Merge pull request #36 from Baoqi/bwu_clickhouse

add clickhouse && fix db connection leak issue
This commit is contained in:
escheduler 2019-04-12 22:12:34 +08:00 committed by GitHub
commit 22cd959521
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
15 changed files with 173 additions and 12 deletions

View File

@ -38,6 +38,7 @@ import org.springframework.transaction.annotation.Transactional;
import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.SQLException;
import java.util.*;
/**
@ -217,6 +218,9 @@ public class DataSourceService extends BaseService{
case POSTGRESQL:
separator = "&";
break;
case CLICKHOUSE:
separator = "&";
break;
default:
separator = "&";
break;
@ -367,6 +371,10 @@ public class DataSourceService extends BaseService{
datasource = JSONObject.parseObject(parameter, SparkDataSource.class);
Class.forName(Constants.ORG_APACHE_HIVE_JDBC_HIVE_DRIVER);
break;
case CLICKHOUSE:
datasource = JSONObject.parseObject(parameter, ClickHouseDataSource.class);
Class.forName(Constants.COM_CLICKHOUSE_JDBC_DRIVER);
break;
default:
break;
}
@ -392,6 +400,11 @@ public class DataSourceService extends BaseService{
Connection con = getConnection(type, parameter);
if (con != null) {
isConnection = true;
try {
con.close();
} catch (SQLException e) {
logger.error("close connection fail at DataSourceService::checkConnection()", e);
}
}
return isConnection;
}
@ -428,7 +441,7 @@ public class DataSourceService extends BaseService{
String address = buildAddress(type, host, port);
String jdbcUrl = address + "/" + database;
String separator = "";
if (Constants.MYSQL.equals(type.name()) || Constants.POSTGRESQL.equals(type.name())) {
if (Constants.MYSQL.equals(type.name()) || Constants.POSTGRESQL.equals(type.name()) || Constants.CLICKHOUSE.equals(type.name())) {
separator = "&";
} else if (Constants.HIVE.equals(type.name()) || Constants.SPARK.equals(type.name())) {
separator = ";";
@ -479,6 +492,9 @@ public class DataSourceService extends BaseService{
}
sb.deleteCharAt(sb.length() - 1);
}
} else if (Constants.CLICKHOUSE.equals(type.name())) {
sb.append(Constants.JDBC_CLICKHOUSE);
sb.append(host).append(":").append(port);
}
return sb.toString();

View File

@ -82,6 +82,7 @@ public class Constants {
public static final String ORG_POSTGRESQL_DRIVER = "org.postgresql.Driver";
public static final String COM_MYSQL_JDBC_DRIVER = "com.mysql.jdbc.Driver";
public static final String ORG_APACHE_HIVE_JDBC_HIVE_DRIVER = "org.apache.hive.jdbc.HiveDriver";
public static final String COM_CLICKHOUSE_JDBC_DRIVER = "ru.yandex.clickhouse.ClickHouseDriver";
/**
* database type
@ -90,6 +91,7 @@ public class Constants {
public static final String POSTGRESQL = "POSTGRESQL";
public static final String HIVE = "HIVE";
public static final String SPARK = "SPARK";
public static final String CLICKHOUSE = "CLICKHOUSE";
/**
* jdbc url
@ -97,6 +99,7 @@ public class Constants {
public static final String JDBC_MYSQL = "jdbc:mysql://";
public static final String JDBC_POSTGRESQL = "jdbc:postgresql://";
public static final String JDBC_HIVE_2 = "jdbc:hive2://";
public static final String JDBC_CLICKHOUSE = "jdbc:clickhouse://";
public static final String ADDRESS = "address";

View File

@ -371,6 +371,21 @@
<groupId>com.github.oshi</groupId>
<artifactId>oshi-core</artifactId>
</dependency>
<dependency>
<groupId>ru.yandex.clickhouse</groupId>
<artifactId>clickhouse-jdbc</artifactId>
<exclusions>
<exclusion>
<artifactId>com.fasterxml.jackson.core</artifactId>
<groupId>jackson-core</groupId>
</exclusion>
<exclusion>
<artifactId>com.fasterxml.jackson.core</artifactId>
<groupId>jackson-databind</groupId>
</exclusion>
</exclusions>
</dependency>
</dependencies>

View File

@ -602,15 +602,19 @@ public final class Constants {
public static final String JDBC_POSTGRESQL_CLASS_NAME = "org.postgresql.Driver";
/**
* postgresql
* hive
*/
public static final String JDBC_HIVE_CLASS_NAME = "org.apache.hive.jdbc.HiveDriver";
/**
* postgresql
* spark
*/
public static final String JDBC_SPARK_CLASS_NAME = "org.apache.hive.jdbc.HiveDriver";
/**
* ClickHouse
*/
public static final String JDBC_CLICKHOUSE_CLASS_NAME = "ru.yandex.clickhouse.ClickHouseDriver";
/**
* spark params constant

View File

@ -25,6 +25,7 @@ public enum DbType {
* 1 postgresql
* 2 hive
* 3 spark
* 4 clickhouse
*/
MYSQL, POSTGRESQL, HIVE, SPARK
MYSQL, POSTGRESQL, HIVE, SPARK, CLICKHOUSE
}

View File

@ -0,0 +1,75 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package cn.escheduler.common.job.db;
import org.apache.commons.lang3.StringUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.SQLException;
/**
* data source of ClickHouse
*/
public class ClickHouseDataSource extends BaseDataSource {
private static final Logger logger = LoggerFactory.getLogger(ClickHouseDataSource.class);
/**
* gets the JDBC url for the data source connection
* @return
*/
@Override
public String getJdbcUrl() {
String jdbcUrl = getAddress();
if (jdbcUrl.lastIndexOf("/") != (jdbcUrl.length() - 1)) {
jdbcUrl += "/";
}
jdbcUrl += getDatabase();
if (StringUtils.isNotEmpty(getOther())) {
jdbcUrl += "?" + getOther();
}
return jdbcUrl;
}
/**
* test whether the data source can be connected successfully
* @throws Exception
*/
@Override
public void isConnectable() throws Exception {
Connection con = null;
try {
Class.forName("ru.yandex.clickhouse.ClickHouseDriver");
con = DriverManager.getConnection(getJdbcUrl(), getUser(), getPassword());
} finally {
if (con != null) {
try {
con.close();
} catch (SQLException e) {
logger.error("ClickHouse datasource try conn close conn error", e);
throw e;
}
}
}
}
}

View File

@ -39,6 +39,8 @@ public class DataSourceFactory {
return JSONUtils.parseObject(parameter, HiveDataSource.class);
case SPARK:
return JSONUtils.parseObject(parameter, SparkDataSource.class);
case CLICKHOUSE:
return JSONUtils.parseObject(parameter, ClickHouseDataSource.class);
default:
return null;
}

View File

@ -22,6 +22,7 @@ import cn.escheduler.common.enums.DbType;
import cn.escheduler.common.enums.Direct;
import cn.escheduler.common.enums.TaskTimeoutStrategy;
import cn.escheduler.common.job.db.BaseDataSource;
import cn.escheduler.common.job.db.ClickHouseDataSource;
import cn.escheduler.common.job.db.MySQLDataSource;
import cn.escheduler.common.job.db.PostgreDataSource;
import cn.escheduler.common.process.Property;
@ -111,6 +112,11 @@ public class ProcedureTask extends AbstractTask {
}else if (DbType.POSTGRESQL.name().equals(dataSource.getType().name())){
baseDataSource = JSONObject.parseObject(dataSource.getConnectionParams(),PostgreDataSource.class);
Class.forName(Constants.JDBC_POSTGRESQL_CLASS_NAME);
}else if (DbType.CLICKHOUSE.name().equals(dataSource.getType().name())){
// NOTE: currently, ClickHouse don't support procedure or UDF yet,
// but still load JDBC driver to keep source code sync with other DB
baseDataSource = JSONObject.parseObject(dataSource.getConnectionParams(),ClickHouseDataSource.class);
Class.forName(Constants.JDBC_CLICKHOUSE_CLASS_NAME);
}
// get jdbc connection

View File

@ -120,6 +120,9 @@ public class SqlTask extends AbstractTask {
}else if (DbType.SPARK.name().equals(dataSource.getType().name())){
baseDataSource = JSONObject.parseObject(dataSource.getConnectionParams(),SparkDataSource.class);
Class.forName(Constants.JDBC_SPARK_CLASS_NAME);
}else if (DbType.CLICKHOUSE.name().equals(dataSource.getType().name())){
baseDataSource = JSONObject.parseObject(dataSource.getConnectionParams(),ClickHouseDataSource.class);
Class.forName(Constants.JDBC_CLICKHOUSE_CLASS_NAME);
}
Map<Integer,Property> sqlParamMap = new HashMap<Integer,Property>();

View File

@ -52,21 +52,45 @@ public class SqlExecutorTest {
@Test
public void test() throws Exception {
String nodeName = "mysql sql test";
String taskAppId = "51_11282_263978";
String tenantCode = "hdfs";
Integer taskInstId = 263978;
sharedTestSqlTask(nodeName, taskAppId, tenantCode, taskInstId);
}
@Test
public void testClickhouse() throws Exception {
String nodeName = "ClickHouse sql test";
String taskAppId = "1_11_20";
String tenantCode = "default";
Integer taskInstId = 20;
sharedTestSqlTask(nodeName, taskAppId, tenantCode, taskInstId);
}
/**
* Basic test template for SQLTasks, mainly test different types of DBMS types
* @param nodeName node name for selected task
* @param taskAppId task app id
* @param tenantCode tenant code
* @param taskInstId task instance id
* @throws Exception
*/
private void sharedTestSqlTask(String nodeName, String taskAppId, String tenantCode, Integer taskInstId) throws Exception {
TaskProps taskProps = new TaskProps();
taskProps.setTaskDir("");
// processDefineId_processInstanceId_taskInstanceId
taskProps.setTaskAppId("51_11282_263978");
taskProps.setTaskAppId(taskAppId);
// set tenant -> task execute linux user
taskProps.setTenantCode("hdfs");
taskProps.setTenantCode(tenantCode);
taskProps.setTaskStartTime(new Date());
taskProps.setTaskTimeout(360000);
taskProps.setTaskInstId(263978);
taskProps.setNodeName("mysql sql test");
taskProps.setTaskInstId(taskInstId);
taskProps.setNodeName(nodeName);
TaskInstance taskInstance = processDao.findTaskInstanceById(263978);
TaskInstance taskInstance = processDao.findTaskInstanceById(taskInstId);
String taskJson = taskInstance.getTaskJson();
TaskNode taskNode = JSONObject.parseObject(taskJson, TaskNode.class);

View File

@ -6,7 +6,7 @@
<m-datasource
ref="refDs"
@on-dsData="_onDsData"
:supportType="['MYSQL','POSTGRESQL']"
:supportType="['MYSQL','POSTGRESQL','CLICKHOUSE']"
:data="{ type:type,datasource:datasource }">
</m-datasource>
</div>

View File

@ -13,6 +13,7 @@
<x-radio :label="'POSTGRESQL'">POSTGRESQL</x-radio>
<x-radio :label="'HIVE'">HIVE</x-radio>
<x-radio :label="'SPARK'">SPARK</x-radio>
<x-radio :label="'CLICKHOUSE'">CLICKHOUSE</x-radio>
</x-radio-group>
</template>
</m-list-box-f>

View File

@ -66,6 +66,11 @@ export default {
id: 3,
code: 'SPARK',
disabled: false
},
{
id: 4,
code: 'CLICKHOUSE',
disabled: false
}
],
// Alarm interface

View File

@ -20,7 +20,7 @@ import io from '@/module/io'
export default {
/**
* Data source creation
* @param "type": string,//MYSQL, POSTGRESQL, HIVE
* @param "type": string,//MYSQL, POSTGRESQL, HIVE, SPARK, CLICKHOUSE
* @param "name": string,
* @param "desc": string,
* @param "parameter":string //{"address":"jdbc:hive2://192.168.220.189:10000","autoReconnect":"true","characterEncoding":"utf8","database":"default","initialTimeout":3000,"jdbcUrl":"jdbc:hive2://192.168.220.189:10000/default","maxReconnect":10,"password":"","useUnicode":true,"user":"hive"}
@ -49,7 +49,7 @@ export default {
},
/**
* Query data source list - no paging
* @param "type": string//MYSQL, POSTGRESQL, HIVE
* @param "type": string//MYSQL, POSTGRESQL, HIVE, SPARK, CLICKHOUSE
*/
getDatasourcesList ({ state }, payload) {
return new Promise((resolve, reject) => {

View File

@ -366,6 +366,12 @@
<version>3.5.0</version>
</dependency>
<dependency>
<groupId>ru.yandex.clickhouse</groupId>
<artifactId>clickhouse-jdbc</artifactId>
<version>0.1.52</version>
</dependency>
</dependencies>
</dependencyManagement>