mirror of
https://gitee.com/dolphinscheduler/DolphinScheduler.git
synced 2024-12-05 05:38:30 +08:00
Merge remote-tracking branch 'upstream/1.3.1-release' into 131
This commit is contained in:
commit
d7b768d157
@ -20,7 +20,7 @@ package org.apache.dolphinscheduler.server.entity;
|
|||||||
import org.apache.dolphinscheduler.dao.entity.UdfFunc;
|
import org.apache.dolphinscheduler.dao.entity.UdfFunc;
|
||||||
|
|
||||||
import java.io.Serializable;
|
import java.io.Serializable;
|
||||||
import java.util.List;
|
import java.util.Map;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* SQL Task ExecutionContext
|
* SQL Task ExecutionContext
|
||||||
@ -38,9 +38,9 @@ public class SQLTaskExecutionContext implements Serializable {
|
|||||||
*/
|
*/
|
||||||
private String connectionParams;
|
private String connectionParams;
|
||||||
/**
|
/**
|
||||||
* udf function list
|
* udf function tenant code map
|
||||||
*/
|
*/
|
||||||
private List<UdfFunc> udfFuncList;
|
private Map<UdfFunc,String> udfFuncTenantCodeMap;
|
||||||
|
|
||||||
|
|
||||||
public int getWarningGroupId() {
|
public int getWarningGroupId() {
|
||||||
@ -51,12 +51,12 @@ public class SQLTaskExecutionContext implements Serializable {
|
|||||||
this.warningGroupId = warningGroupId;
|
this.warningGroupId = warningGroupId;
|
||||||
}
|
}
|
||||||
|
|
||||||
public List<UdfFunc> getUdfFuncList() {
|
public Map<UdfFunc, String> getUdfFuncTenantCodeMap() {
|
||||||
return udfFuncList;
|
return udfFuncTenantCodeMap;
|
||||||
}
|
}
|
||||||
|
|
||||||
public void setUdfFuncList(List<UdfFunc> udfFuncList) {
|
public void setUdfFuncTenantCodeMap(Map<UdfFunc, String> udfFuncTenantCodeMap) {
|
||||||
this.udfFuncList = udfFuncList;
|
this.udfFuncTenantCodeMap = udfFuncTenantCodeMap;
|
||||||
}
|
}
|
||||||
|
|
||||||
public String getConnectionParams() {
|
public String getConnectionParams() {
|
||||||
@ -72,7 +72,7 @@ public class SQLTaskExecutionContext implements Serializable {
|
|||||||
return "SQLTaskExecutionContext{" +
|
return "SQLTaskExecutionContext{" +
|
||||||
"warningGroupId=" + warningGroupId +
|
"warningGroupId=" + warningGroupId +
|
||||||
", connectionParams='" + connectionParams + '\'' +
|
", connectionParams='" + connectionParams + '\'' +
|
||||||
", udfFuncList=" + udfFuncList +
|
", udfFuncTenantCodeMap=" + udfFuncTenantCodeMap +
|
||||||
'}';
|
'}';
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -324,7 +324,13 @@ public class TaskPriorityQueueConsumer extends Thread{
|
|||||||
}
|
}
|
||||||
|
|
||||||
List<UdfFunc> udfFuncList = processService.queryUdfFunListByids(udfFunIdsArray);
|
List<UdfFunc> udfFuncList = processService.queryUdfFunListByids(udfFunIdsArray);
|
||||||
sqlTaskExecutionContext.setUdfFuncList(udfFuncList);
|
Map<UdfFunc,String> udfFuncMap = new HashMap<>();
|
||||||
|
for(UdfFunc udfFunc : udfFuncList) {
|
||||||
|
String tenantCode = processService.queryTenantCodeByResName(udfFunc.getResourceName(), ResourceType.UDF);
|
||||||
|
udfFuncMap.put(udfFunc,tenantCode);
|
||||||
|
}
|
||||||
|
|
||||||
|
sqlTaskExecutionContext.setUdfFuncTenantCodeMap(udfFuncMap);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -366,7 +372,7 @@ public class TaskPriorityQueueConsumer extends Thread{
|
|||||||
|
|
||||||
if (baseParam != null) {
|
if (baseParam != null) {
|
||||||
List<ResourceInfo> projectResourceFiles = baseParam.getResourceFilesList();
|
List<ResourceInfo> projectResourceFiles = baseParam.getResourceFilesList();
|
||||||
if (projectResourceFiles != null) {
|
if (CollectionUtils.isNotEmpty(projectResourceFiles)) {
|
||||||
|
|
||||||
// filter the resources that the resource id equals 0
|
// filter the resources that the resource id equals 0
|
||||||
Set<ResourceInfo> oldVersionResources = projectResourceFiles.stream().filter(t -> t.getId() == 0).collect(Collectors.toSet());
|
Set<ResourceInfo> oldVersionResources = projectResourceFiles.stream().filter(t -> t.getId() == 0).collect(Collectors.toSet());
|
||||||
|
@ -16,6 +16,7 @@
|
|||||||
*/
|
*/
|
||||||
package org.apache.dolphinscheduler.server.utils;
|
package org.apache.dolphinscheduler.server.utils;
|
||||||
|
|
||||||
|
import org.apache.commons.collections.MapUtils;
|
||||||
import org.apache.dolphinscheduler.common.Constants;
|
import org.apache.dolphinscheduler.common.Constants;
|
||||||
import org.apache.dolphinscheduler.common.utils.CollectionUtils;
|
import org.apache.dolphinscheduler.common.utils.CollectionUtils;
|
||||||
import org.apache.dolphinscheduler.common.utils.HadoopUtils;
|
import org.apache.dolphinscheduler.common.utils.HadoopUtils;
|
||||||
@ -24,10 +25,8 @@ import org.apache.dolphinscheduler.dao.entity.UdfFunc;
|
|||||||
import org.slf4j.Logger;
|
import org.slf4j.Logger;
|
||||||
|
|
||||||
import java.text.MessageFormat;
|
import java.text.MessageFormat;
|
||||||
import java.util.ArrayList;
|
import java.util.*;
|
||||||
import java.util.HashSet;
|
import java.util.stream.Collectors;
|
||||||
import java.util.List;
|
|
||||||
import java.util.Set;
|
|
||||||
|
|
||||||
import static org.apache.dolphinscheduler.common.utils.CollectionUtils.isNotEmpty;
|
import static org.apache.dolphinscheduler.common.utils.CollectionUtils.isNotEmpty;
|
||||||
|
|
||||||
@ -43,34 +42,23 @@ public class UDFUtils {
|
|||||||
|
|
||||||
/**
|
/**
|
||||||
* create function list
|
* create function list
|
||||||
* @param udfFuncs udf functions
|
* @param udfFuncTenantCodeMap key is udf function,value is tenant code
|
||||||
* @param tenantCode tenant code
|
|
||||||
* @param logger logger
|
* @param logger logger
|
||||||
* @return create function list
|
* @return create function list
|
||||||
*/
|
*/
|
||||||
public static List<String> createFuncs(List<UdfFunc> udfFuncs, String tenantCode,Logger logger){
|
public static List<String> createFuncs(Map<UdfFunc,String> udfFuncTenantCodeMap, Logger logger){
|
||||||
|
|
||||||
if (CollectionUtils.isEmpty(udfFuncs)){
|
if (MapUtils.isEmpty(udfFuncTenantCodeMap)){
|
||||||
logger.info("can't find udf function resource");
|
logger.info("can't find udf function resource");
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
// get hive udf jar path
|
|
||||||
String hiveUdfJarPath = HadoopUtils.getHdfsUdfDir(tenantCode);
|
|
||||||
logger.info("hive udf jar path : {}" , hiveUdfJarPath);
|
|
||||||
|
|
||||||
// is the root directory of udf defined
|
|
||||||
if (StringUtils.isEmpty(hiveUdfJarPath)) {
|
|
||||||
logger.error("not define hive udf jar path");
|
|
||||||
throw new RuntimeException("hive udf jar base path not defined ");
|
|
||||||
}
|
|
||||||
Set<String> resources = getFuncResouces(udfFuncs);
|
|
||||||
List<String> funcList = new ArrayList<>();
|
List<String> funcList = new ArrayList<>();
|
||||||
|
|
||||||
// build jar sql
|
// build jar sql
|
||||||
buildJarSql(funcList, resources, hiveUdfJarPath);
|
buildJarSql(funcList, udfFuncTenantCodeMap);
|
||||||
|
|
||||||
// build temp function sql
|
// build temp function sql
|
||||||
buildTempFuncSql(funcList, udfFuncs);
|
buildTempFuncSql(funcList, udfFuncTenantCodeMap.keySet().stream().collect(Collectors.toList()));
|
||||||
|
|
||||||
return funcList;
|
return funcList;
|
||||||
}
|
}
|
||||||
@ -78,18 +66,20 @@ public class UDFUtils {
|
|||||||
/**
|
/**
|
||||||
* build jar sql
|
* build jar sql
|
||||||
* @param sqls sql list
|
* @param sqls sql list
|
||||||
* @param resources resource set
|
* @param udfFuncTenantCodeMap key is udf function,value is tenant code
|
||||||
* @param uploadPath upload path
|
|
||||||
*/
|
*/
|
||||||
private static void buildJarSql(List<String> sqls, Set<String> resources, String uploadPath) {
|
private static void buildJarSql(List<String> sqls, Map<UdfFunc,String> udfFuncTenantCodeMap) {
|
||||||
String defaultFS = HadoopUtils.getInstance().getConfiguration().get(Constants.FS_DEFAULTFS);
|
String defaultFS = HadoopUtils.getInstance().getConfiguration().get(Constants.FS_DEFAULTFS);
|
||||||
|
|
||||||
|
Set<Map.Entry<UdfFunc,String>> entries = udfFuncTenantCodeMap.entrySet();
|
||||||
|
for (Map.Entry<UdfFunc,String> entry:entries){
|
||||||
|
String uploadPath = HadoopUtils.getHdfsUdfDir(entry.getValue());
|
||||||
if (!uploadPath.startsWith("hdfs:")) {
|
if (!uploadPath.startsWith("hdfs:")) {
|
||||||
uploadPath = defaultFS + uploadPath;
|
uploadPath = defaultFS + uploadPath;
|
||||||
}
|
}
|
||||||
|
sqls.add(String.format("add jar %s%s", uploadPath, entry.getKey().getResourceName()));
|
||||||
for (String resource : resources) {
|
|
||||||
sqls.add(String.format("add jar %s/%s", uploadPath, resource));
|
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@ -106,20 +96,5 @@ public class UDFUtils {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* get the resource names of all functions
|
|
||||||
* @param udfFuncs udf function list
|
|
||||||
* @return
|
|
||||||
*/
|
|
||||||
private static Set<String> getFuncResouces(List<UdfFunc> udfFuncs) {
|
|
||||||
Set<String> resources = new HashSet<>();
|
|
||||||
|
|
||||||
for (UdfFunc udfFunc : udfFuncs) {
|
|
||||||
resources.add(udfFunc.getResourceName());
|
|
||||||
}
|
|
||||||
|
|
||||||
return resources;
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
}
|
}
|
||||||
|
@ -132,8 +132,7 @@ public class SqlTask extends AbstractTask {
|
|||||||
.map(this::getSqlAndSqlParamsMap)
|
.map(this::getSqlAndSqlParamsMap)
|
||||||
.collect(Collectors.toList());
|
.collect(Collectors.toList());
|
||||||
|
|
||||||
List<String> createFuncs = UDFUtils.createFuncs(sqlTaskExecutionContext.getUdfFuncList(),
|
List<String> createFuncs = UDFUtils.createFuncs(sqlTaskExecutionContext.getUdfFuncTenantCodeMap(),
|
||||||
taskExecutionContext.getTenantCode(),
|
|
||||||
logger);
|
logger);
|
||||||
|
|
||||||
// execute sql task
|
// execute sql task
|
||||||
|
Loading…
Reference in New Issue
Block a user