[Feature-10273][Datasource] Add dataSource for AWS Athena (#11157)

This commit is contained in:
ZhaoGuodong 2022-09-16 09:59:20 +08:00 committed by GitHub
parent 925e2fa551
commit 86291bb5e6
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
23 changed files with 619 additions and 9 deletions

View File

@ -275,6 +275,10 @@ export default {
title: 'Oracle',
link: '/en-us/docs/dev/user_doc/guide/datasource/oracle.html',
},
{
title: 'Amazon Athena',
link: '/en-us/docs/dev/user_doc/guide/datasource/athena.html',
},
],
},
{
@ -883,6 +887,10 @@ export default {
title: 'Spark',
link: '/zh-cn/docs/dev/user_doc/guide/datasource/spark.html',
},
{
title: 'Amazon Athena',
link: '/zh-cn/docs/dev/user_doc/guide/datasource/athena.html',
},
],
},
{

View File

@ -0,0 +1,22 @@
# AWS Athena
![AWS Athena](../../../../img/new_ui/dev/datasource/athena.png)
## Datasource Parameters
| **Datasource** | **Description** |
| --- | --- |
| Datasource | Select ATHENA. |
| Datasource name | Enter the name of the DataSource. |
| Description | Enter a description of the DataSource. |
| Username | Set the AWS access key. |
| Password | Set the AWS secret access key. |
| AwsRegion | Set the AWS region. |
| Database name | Enter the database name of the ATHENA connection. |
| Jdbc connection parameters | Parameter settings for ATHENA connection, in JSON format. |
## Native Supported
- No, read section example in [datasource-setting](../howto/datasource-setting.md) `DataSource Center` section to activate this datasource.
- JDBC driver configuration reference document [athena-connect-with-jdbc](https://docs.amazonaws.cn/athena/latest/ug/connect-with-jdbc.html)
- Driver download link [SimbaAthenaJDBC-2.0.31.1000/AthenaJDBC42.jar](https://s3.cn-north-1.amazonaws.com.cn/athena-downloads-cn/drivers/JDBC/SimbaAthenaJDBC-2.0.31.1000/AthenaJDBC42.jar)

View File

@ -0,0 +1,19 @@
# AWS Athena 数据源
![AWS Athena](../../../../img/new_ui/dev/datasource/athena.png)
- 数据源:选择 ATHENA
- 数据源名称:输入数据源的名称
- 描述:输入数据源的描述
- 用户名:设置连接 AWS 的 access key
- 密码:设置连接 AWS 的 secret access key
- AwsRegion: 设置连接 AWS 的区域
- 数据库名:输入连接 ATHENA 的数据库名称
- Jdbc 连接参数:用于 ATHENA 连接的参数设置,以 JSON 形式填写
## 是否原生支持
- 否,使用前需请参考 [数据源配置](../howto/datasource-setting.md) 中的 "数据源中心" 章节激活数据源。
- JDBC驱动配置参考文档 [athena-connect-with-jdbc](https://docs.amazonaws.cn/athena/latest/ug/connect-with-jdbc.html)
- 驱动下载链接 [SimbaAthenaJDBC-2.0.31.1000/AthenaJDBC42.jar](https://s3.cn-north-1.amazonaws.com.cn/athena-downloads-cn/drivers/JDBC/SimbaAthenaJDBC-2.0.31.1000/AthenaJDBC42.jar)

Binary file not shown.

After

Width:  |  Height:  |  Size: 424 KiB

View File

@ -68,6 +68,11 @@
<artifactId>dolphinscheduler-datasource-redshift</artifactId>
<version>${project.version}</version>
</dependency>
<dependency>
<groupId>org.apache.dolphinscheduler</groupId>
<artifactId>dolphinscheduler-datasource-athena</artifactId>
<version>${project.version}</version>
</dependency>
<dependency>
<groupId>org.apache.dolphinscheduler</groupId>
<artifactId>dolphinscheduler-datasource-spark</artifactId>

View File

@ -41,7 +41,7 @@ public abstract class AbstractDataSourceProcessor implements DataSourceProcessor
@Override
public void checkDatasourceParam(BaseDataSourceParamDTO baseDataSourceParamDTO) {
checkHost(baseDataSourceParamDTO.getHost());
checkDatasourcePatter(baseDataSourceParamDTO.getDatabase());
checkDatabasePatter(baseDataSourceParamDTO.getDatabase());
checkOther(baseDataSourceParamDTO.getOther());
}
@ -61,9 +61,9 @@ public abstract class AbstractDataSourceProcessor implements DataSourceProcessor
*
* @param database database name
*/
protected void checkDatasourcePatter(String database) {
protected void checkDatabasePatter(String database) {
if (!DATABASE_PATTER.matcher(database).matches()) {
throw new IllegalArgumentException("datasource name illegal");
throw new IllegalArgumentException("database name illegal");
}
}

View File

@ -0,0 +1,44 @@
<?xml version="1.0" encoding="UTF-8"?>
<!--
~ Licensed to the Apache Software Foundation (ASF) under one or more
~ contributor license agreements. See the NOTICE file distributed with
~ this work for additional information regarding copyright ownership.
~ The ASF licenses this file to You under the Apache License, Version 2.0
~ (the "License"); you may not use this file except in compliance with
~ the License. You may obtain a copy of the License at
~
~ http://www.apache.org/licenses/LICENSE-2.0
~
~ Unless required by applicable law or agreed to in writing, software
~ distributed under the License is distributed on an "AS IS" BASIS,
~ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
~ See the License for the specific language governing permissions and
~ limitations under the License.
-->
<project xmlns="http://maven.apache.org/POM/4.0.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<parent>
<artifactId>dolphinscheduler-datasource-plugin</artifactId>
<groupId>org.apache.dolphinscheduler</groupId>
<version>dev-SNAPSHOT</version>
</parent>
<modelVersion>4.0.0</modelVersion>
<artifactId>dolphinscheduler-datasource-athena</artifactId>
<packaging>jar</packaging>
<dependencies>
<dependency>
<groupId>org.apache.dolphinscheduler</groupId>
<artifactId>dolphinscheduler-spi</artifactId>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>org.apache.dolphinscheduler</groupId>
<artifactId>dolphinscheduler-datasource-api</artifactId>
</dependency>
</dependencies>
</project>

View File

@ -0,0 +1,30 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.plugin.datasource.athena;
import org.apache.dolphinscheduler.spi.datasource.BaseConnectionParam;
import org.apache.dolphinscheduler.spi.datasource.DataSourceChannel;
import org.apache.dolphinscheduler.spi.datasource.DataSourceClient;
import org.apache.dolphinscheduler.spi.enums.DbType;
public class AthenaDataSourceChannel implements DataSourceChannel {
@Override
public DataSourceClient createDataSourceClient(BaseConnectionParam baseConnectionParam, DbType dbType) {
return new AthenaDataSourceClient(baseConnectionParam,dbType);
}
}

View File

@ -0,0 +1,36 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.plugin.datasource.athena;
import org.apache.dolphinscheduler.spi.datasource.DataSourceChannel;
import org.apache.dolphinscheduler.spi.datasource.DataSourceChannelFactory;
import com.google.auto.service.AutoService;
@AutoService(DataSourceChannelFactory.class)
public class AthenaDataSourceChannelFactory implements DataSourceChannelFactory {
@Override
public DataSourceChannel create() {
return new AthenaDataSourceChannel();
}
@Override
public String getName() {
return "athena";
}
}

View File

@ -0,0 +1,28 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.plugin.datasource.athena;
import org.apache.dolphinscheduler.plugin.datasource.api.client.CommonDataSourceClient;
import org.apache.dolphinscheduler.spi.datasource.BaseConnectionParam;
import org.apache.dolphinscheduler.spi.enums.DbType;
public class AthenaDataSourceClient extends CommonDataSourceClient {
public AthenaDataSourceClient(BaseConnectionParam baseConnectionParam, DbType dbType) {
super(baseConnectionParam, dbType);
}
}

View File

@ -0,0 +1,49 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.plugin.datasource.athena.param;
import org.apache.dolphinscheduler.spi.datasource.BaseConnectionParam;
public class AthenaConnectionParam extends BaseConnectionParam {
protected String awsRegion;
public String getAwsRegion() {
return awsRegion;
}
public void setAwsRegion(String awsRegion) {
this.awsRegion = awsRegion;
}
@Override
public String toString() {
return "AthenaConnectionParam{"
+ "user='" + user + '\''
+ ", password='" + password + '\''
+ ", address='" + address + '\''
+ ", database='" + database + '\''
+ ", jdbcUrl='" + jdbcUrl + '\''
+ ", driverLocation='" + driverLocation + '\''
+ ", driverClassName='" + driverClassName + '\''
+ ", validationQuery='" + validationQuery + '\''
+ ", other='" + other + '\''
+ ", awsRegion='" + awsRegion + '\''
+ '}';
}
}

View File

@ -0,0 +1,35 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.plugin.datasource.athena.param;
import org.apache.dolphinscheduler.plugin.datasource.api.datasource.BaseDataSourceParamDTO;
import org.apache.dolphinscheduler.spi.enums.DbType;
import lombok.Data;
import lombok.EqualsAndHashCode;
@Data
@EqualsAndHashCode(callSuper = true)
public class AthenaDataSourceParamDTO extends BaseDataSourceParamDTO {
protected String awsRegion;
@Override
public DbType getType() {
return DbType.ATHENA;
}
}

View File

@ -0,0 +1,158 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.plugin.datasource.athena.param;
import com.google.auto.service.AutoService;
import org.apache.commons.collections4.MapUtils;
import org.apache.dolphinscheduler.plugin.datasource.api.datasource.AbstractDataSourceProcessor;
import org.apache.dolphinscheduler.plugin.datasource.api.datasource.BaseDataSourceParamDTO;
import org.apache.dolphinscheduler.plugin.datasource.api.datasource.DataSourceProcessor;
import org.apache.dolphinscheduler.plugin.datasource.api.utils.PasswordUtils;
import org.apache.dolphinscheduler.spi.datasource.BaseConnectionParam;
import org.apache.dolphinscheduler.spi.datasource.ConnectionParam;
import org.apache.dolphinscheduler.spi.enums.DbType;
import org.apache.dolphinscheduler.spi.utils.Constants;
import org.apache.dolphinscheduler.spi.utils.JSONUtils;
import org.apache.dolphinscheduler.spi.utils.StringUtils;
import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
@AutoService(DataSourceProcessor.class)
public class AthenaDataSourceProcessor extends AbstractDataSourceProcessor {
@Override
public BaseDataSourceParamDTO castDatasourceParamDTO(String paramJson) {
return JSONUtils.parseObject(paramJson, AthenaDataSourceParamDTO.class);
}
@Override
public BaseDataSourceParamDTO createDatasourceParamDTO(String connectionJson) {
AthenaConnectionParam
connectionParams = (AthenaConnectionParam) this.createConnectionParams(connectionJson);
AthenaDataSourceParamDTO
athenaDatasourceParamDTO = new AthenaDataSourceParamDTO();
athenaDatasourceParamDTO.setAwsRegion(connectionParams.getAwsRegion());
athenaDatasourceParamDTO.setDatabase(connectionParams.getDatabase());
athenaDatasourceParamDTO.setUserName(connectionParams.getUser());
athenaDatasourceParamDTO.setOther(this.parseOther(connectionParams.getOther()));
return athenaDatasourceParamDTO;
}
@Override
public BaseConnectionParam createConnectionParams(BaseDataSourceParamDTO datasourceParam) {
AthenaDataSourceParamDTO athenaParam = (AthenaDataSourceParamDTO) datasourceParam;
String address = String.format("%s%s=%s;", Constants.JDBC_ATHENA, "AwsRegion", athenaParam.getAwsRegion());
AthenaConnectionParam
athenaConnectionParam = new AthenaConnectionParam();
athenaConnectionParam.setUser(athenaParam.getUserName());
athenaConnectionParam.setPassword(PasswordUtils.encodePassword(athenaParam.getPassword()));
athenaConnectionParam.setAwsRegion(athenaParam.getAwsRegion());
athenaConnectionParam.setOther(this.transformOther(athenaParam.getOther()));
athenaConnectionParam.setAddress(address);
athenaConnectionParam.setJdbcUrl(address);
athenaConnectionParam.setDatabase(athenaParam.getDatabase());
athenaConnectionParam.setDriverClassName(this.getDatasourceDriver());
athenaConnectionParam.setValidationQuery(this.getValidationQuery());
athenaConnectionParam.setProps(athenaParam.getOther());
return athenaConnectionParam;
}
@Override
public ConnectionParam createConnectionParams(String connectionJson) {
return JSONUtils.parseObject(connectionJson, AthenaConnectionParam.class);
}
@Override
public String getDatasourceDriver() {
return Constants.COM_ATHENA_JDBC_DRIVER;
}
@Override
public String getValidationQuery() {
return Constants.ATHENA_VALIDATION_QUERY;
}
@Override
public String getJdbcUrl(ConnectionParam connectionParam) {
AthenaConnectionParam
athenaConnectionParam = (AthenaConnectionParam) connectionParam;
if (!StringUtils.isEmpty(athenaConnectionParam.getOther())) {
return String.format("%s%s", athenaConnectionParam.getJdbcUrl(), athenaConnectionParam.getOther());
}
return athenaConnectionParam.getJdbcUrl();
}
@Override
public Connection getConnection(ConnectionParam connectionParam) throws ClassNotFoundException, SQLException {
AthenaConnectionParam athenaConnectionParam = (AthenaConnectionParam) connectionParam;
Class.forName(this.getDatasourceDriver());
return DriverManager.getConnection(this.getJdbcUrl(connectionParam),
athenaConnectionParam.getUser(), PasswordUtils.decodePassword(athenaConnectionParam.getPassword()));
}
@Override
public DbType getDbType() {
return DbType.ATHENA;
}
@Override
public DataSourceProcessor create() {
return new AthenaDataSourceProcessor();
}
private String transformOther(Map<String, String> otherMap) {
if (MapUtils.isNotEmpty(otherMap)) {
List<String> list = new ArrayList<>(otherMap.size());
otherMap.forEach((key, value) -> list.add(String.format("%s=%s", key, value)));
return String.join(Constants.SEMICOLON, list);
}
return null;
}
private Map<String, String> parseOther(String other) {
Map<String, String> otherMap = new LinkedHashMap<>();
if (StringUtils.isEmpty(other)) {
return otherMap;
}
String[] configs = other.split(Constants.SEMICOLON);
for (String config : configs) {
otherMap.put(config.split(Constants.EQUAL_SIGN)[0], config.split(Constants.EQUAL_SIGN)[1]);
}
return otherMap;
}
@Override
protected void checkHost(String host) {
// Do not need to set the host, nothing to do
}
@Override
protected void checkDatabasePatter(String database) {
// Do not need to set the database, nothing to do
}
}

View File

@ -0,0 +1,120 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.plugin.datasource.athena.param;
import org.apache.dolphinscheduler.plugin.datasource.api.plugin.DataSourceClientProvider;
import org.apache.dolphinscheduler.plugin.datasource.api.utils.CommonUtils;
import org.apache.dolphinscheduler.plugin.datasource.api.utils.DataSourceUtils;
import org.apache.dolphinscheduler.plugin.datasource.api.utils.PasswordUtils;
import org.apache.dolphinscheduler.spi.enums.DbType;
import org.apache.dolphinscheduler.spi.utils.Constants;
import java.sql.DriverManager;
import java.util.HashMap;
import java.util.Map;
import org.junit.Assert;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.Mockito;
import org.powermock.api.mockito.PowerMockito;
import org.powermock.core.classloader.annotations.PrepareForTest;
import org.powermock.modules.junit4.PowerMockRunner;
@RunWith(PowerMockRunner.class)
@PrepareForTest({Class.class, DriverManager.class, DataSourceUtils.class, CommonUtils.class, DataSourceClientProvider.class, PasswordUtils.class})
public class AthenaDataSourceProcessorTest {
private final AthenaDataSourceProcessor athenaDataSourceProcessor = new AthenaDataSourceProcessor();
@Test
public void testCreateConnectionParams() {
Map<String, String> props = new HashMap<>();
props.put("LogLevel", "6");
AthenaDataSourceParamDTO athenaDataSourceParamDTO = new AthenaDataSourceParamDTO();
athenaDataSourceParamDTO.setDatabase("");
athenaDataSourceParamDTO.setUserName("awsuser");
athenaDataSourceParamDTO.setPassword("123456");
athenaDataSourceParamDTO.setAwsRegion("cn-north-1");
athenaDataSourceParamDTO.setOther(props);
PowerMockito.mockStatic(PasswordUtils.class);
PowerMockito.when(PasswordUtils.encodePassword(Mockito.anyString())).thenReturn("test");
AthenaConnectionParam connectionParams = (AthenaConnectionParam) this.athenaDataSourceProcessor
.createConnectionParams(athenaDataSourceParamDTO);
Assert.assertEquals("jdbc:awsathena://AwsRegion=cn-north-1;", connectionParams.getAddress());
Assert.assertEquals("jdbc:awsathena://AwsRegion=cn-north-1;", connectionParams.getJdbcUrl());
}
@Test
public void testCreateConnectionParams2() {
String connectionJson = "{\"user\":\"awsuser\",\"password\":\"123456\",\"address\":\"jdbc:awsathena://AwsRegion=cn-north-1;\""
+ ",\"database\":\"\",\"jdbcUrl\":\"jdbc:awsathena://AwsRegion=cn-north-1;\", \"awsRegion\":\"cn-north-1\"}";
AthenaConnectionParam connectionParams = (AthenaConnectionParam) this.athenaDataSourceProcessor
.createConnectionParams(connectionJson);
Assert.assertNotNull(connectionParams);
Assert.assertEquals("awsuser", connectionParams.getUser());
Assert.assertEquals("cn-north-1", connectionParams.getAwsRegion());
}
@Test
public void testGetDatasourceDriver() {
Assert.assertEquals(Constants.COM_ATHENA_JDBC_DRIVER, this.athenaDataSourceProcessor.getDatasourceDriver());
}
@Test
public void testGetJdbcUrl() {
AthenaConnectionParam athenaConnectionParam = new AthenaConnectionParam();
athenaConnectionParam.setJdbcUrl("jdbc:awsathena://AwsRegion=cn-north-1;");
athenaConnectionParam.setOther("LogLevel=6;LogPath=/tmp;");
Assert.assertEquals("jdbc:awsathena://AwsRegion=cn-north-1;LogLevel=6;LogPath=/tmp;",
this.athenaDataSourceProcessor.getJdbcUrl(athenaConnectionParam));
}
@Test
public void testGetJdbcUrlNoOther() {
AthenaConnectionParam athenaConnectionParam = new AthenaConnectionParam();
athenaConnectionParam.setJdbcUrl("jdbc:awsathena://AwsRegion=cn-north-1;");
athenaConnectionParam.setOther("");
Assert.assertEquals("jdbc:awsathena://AwsRegion=cn-north-1;",
this.athenaDataSourceProcessor.getJdbcUrl(athenaConnectionParam));
}
@Test
public void testGetDbType() {
Assert.assertEquals(DbType.ATHENA, this.athenaDataSourceProcessor.getDbType());
}
@Test
public void testGetValidationQuery() {
Assert.assertEquals(Constants.ATHENA_VALIDATION_QUERY, this.athenaDataSourceProcessor.getValidationQuery());
}
@Test
public void testCreateDatasourceParamDTO() {
String connectionJson = "{\"user\":\"awsuser\",\"password\":\"123456\",\"address\":\"jdbc:awsathena://AwsRegion=cn-north-1;\""
+ ",\"database\":\"\",\"jdbcUrl\":\"jdbc:awsathena://AwsRegion=cn-north-1;\", \"awsRegion\":\"cn-north-1\"}";
AthenaDataSourceParamDTO athenaDataSourceParamDTO = (AthenaDataSourceParamDTO) this.athenaDataSourceProcessor
.createDatasourceParamDTO(connectionJson);
Assert.assertEquals("awsuser", athenaDataSourceParamDTO.getUserName());
Assert.assertEquals("cn-north-1", athenaDataSourceParamDTO.getAwsRegion());
Assert.assertEquals("", athenaDataSourceParamDTO.getDatabase());
}
}

View File

@ -41,6 +41,7 @@
<module>dolphinscheduler-datasource-api</module>
<module>dolphinscheduler-datasource-all</module>
<module>dolphinscheduler-datasource-redshift</module>
<module>dolphinscheduler-datasource-athena</module>
</modules>
<dependencyManagement>

View File

@ -17,14 +17,14 @@
package org.apache.dolphinscheduler.spi.enums;
import com.baomidou.mybatisplus.annotation.EnumValue;
import com.google.common.base.Functions;
import static java.util.stream.Collectors.toMap;
import java.util.Arrays;
import java.util.Map;
import java.util.NoSuchElementException;
import static java.util.stream.Collectors.toMap;
import com.baomidou.mybatisplus.annotation.EnumValue;
import com.google.common.base.Functions;
public enum DbType {
MYSQL(0, "mysql"),
@ -38,6 +38,7 @@ public enum DbType {
PRESTO(8, "presto"),
H2(9, "h2"),
REDSHIFT(10,"redshift"),
ATHENA(11,"athena"),
;
@EnumValue

View File

@ -162,6 +162,7 @@ public class Constants {
public static final String COM_DB2_JDBC_DRIVER = "com.ibm.db2.jcc.DB2Driver";
public static final String COM_PRESTO_JDBC_DRIVER = "com.facebook.presto.jdbc.PrestoDriver";
public static final String COM_REDSHIFT_JDBC_DRIVER = "com.amazon.redshift.jdbc42.Driver";
public static final String COM_ATHENA_JDBC_DRIVER = "com.simba.athena.jdbc.Driver";
/**
@ -176,6 +177,7 @@ public class Constants {
public static final String DB2_VALIDATION_QUERY = "select 1 from sysibm.sysdummy1";
public static final String PRESTO_VALIDATION_QUERY = "select 1";
public static final String REDHIFT_VALIDATION_QUERY = "select 1";
public static final String ATHENA_VALIDATION_QUERY = "select 1";
/**
* jdbc url
@ -190,6 +192,7 @@ public class Constants {
public static final String JDBC_DB2 = "jdbc:db2://";
public static final String JDBC_PRESTO = "jdbc:presto://";
public static final String JDBC_REDSHIFT = "jdbc:redshift://";
public static final String JDBC_ATHENA = "jdbc:awsathena://";
public static final String ADDRESS = "address";
public static final String DATABASE = "database";

View File

@ -62,5 +62,7 @@ export default {
user_name_tips: 'Please enter your username',
user_password: 'Password',
user_password_tips: 'Please enter your password',
aws_region: 'Aws Region',
aws_region_tips: 'Please enter AwsRegion',
jdbc_format_tips: 'jdbc connection parameters is not a correct JSON format'
}

View File

@ -59,5 +59,7 @@ export default {
user_name_tips: '请输入用户名',
user_password: '密码',
user_password_tips: '请输入密码',
aws_region: 'AwsRegion',
aws_region_tips: '请输入AwsRegion',
jdbc_format_tips: 'jdbc连接参数不是一个正确的JSON格式'
}

View File

@ -26,6 +26,7 @@ type IDataBase =
| 'DB2'
| 'PRESTO'
| 'REDSHIFT'
| 'ATHENA'
interface IDataSource {
id?: number
@ -40,6 +41,7 @@ interface IDataSource {
loginUserKeytabPath?: string
userName?: string
password?: string
awsRegion?: string
database?: string
connectType?: string
other?: object

View File

@ -125,6 +125,9 @@ const DetailModal = defineComponent({
detailForm,
rules,
requiredDataBase,
showHost,
showPort,
showAwsRegion,
showConnectType,
showPrincipal,
loading,
@ -194,6 +197,7 @@ const DetailModal = defineComponent({
/>
</NFormItem>
<NFormItem
v-show={showHost}
label={t('datasource.ip')}
path='host'
show-require-mark
@ -208,6 +212,7 @@ const DetailModal = defineComponent({
/>
</NFormItem>
<NFormItem
v-show={showPort}
label={t('datasource.port')}
path='port'
show-require-mark
@ -296,6 +301,20 @@ const DetailModal = defineComponent({
placeholder={t('datasource.user_password_tips')}
/>
</NFormItem>
<NFormItem
v-show={showAwsRegion}
label={t('datasource.aws_region')}
path='awsRegion'
show-require-mark
>
<NInput
allowInput={this.trim}
v-model={[detailForm.awsRegion, 'value']}
type='text'
maxlength={60}
placeholder={t('datasource.aws_region_tips')}
/>
</NFormItem>
<NFormItem
label={t('datasource.database_name')}
path='database'

View File

@ -52,6 +52,9 @@ export function useForm(id?: number) {
detailFormRef: ref(),
detailForm: { ...initialValues },
requiredDataBase: true,
showHost: true,
showPort: true,
showAwsRegion: false,
showConnectType: false,
showPrincipal: false,
rules: {
@ -66,7 +69,7 @@ export function useForm(id?: number) {
host: {
trigger: ['input'],
validator() {
if (!state.detailForm.host) {
if (!state.detailForm.host && state.showHost) {
return new Error(t('datasource.ip_tips'))
}
}
@ -74,7 +77,7 @@ export function useForm(id?: number) {
port: {
trigger: ['input'],
validator() {
if (!state.detailForm.port) {
if (!state.detailForm.port && state.showPort) {
return new Error(t('datasource.port_tips'))
}
}
@ -95,6 +98,14 @@ export function useForm(id?: number) {
}
}
},
awsRegion: {
trigger: ['input'],
validator() {
if (!state.detailForm.awsRegion && state.showAwsRegion) {
return new Error(t('datasource.aws_region_tips'))
}
}
},
database: {
trigger: ['input'],
validator() {
@ -126,10 +137,15 @@ export function useForm(id?: number) {
state.detailForm.port = options.previousPort || options.defaultPort
state.detailForm.type = type
state.requiredDataBase = (type !== 'POSTGRESQL' && type !== 'ATHENA')
state.showHost = type !== 'ATHENA'
state.showPort = type !== 'ATHENA'
state.showAwsRegion = type === 'ATHENA'
if (type === 'ORACLE' && !id) {
state.detailForm.connectType = 'ORACLE_SERVICE_NAME'
}
state.requiredDataBase = type !== 'POSTGRESQL'
state.showConnectType = type === 'ORACLE'
if (type === 'HIVE' || type === 'SPARK') {
@ -219,6 +235,11 @@ export const datasourceType: IDataBaseOptionKeys = {
value: 'REDSHIFT',
label: 'REDSHIFT',
defaultPort: 5439
},
ATHENA: {
value: 'ATHENA',
label: 'ATHENA',
defaultPort: 0
}
}

View File

@ -86,6 +86,11 @@ export function useDatasource(
id: 9,
code: 'REDSHIFT',
disabled: false
},
{
id: 10,
code: 'ATHENA',
disabled: false
}
]