mirror of
https://gitee.com/dolphinscheduler/DolphinScheduler.git
synced 2024-12-03 04:39:00 +08:00
DataxTask process test modify (#2162)
* 1, master persistent task 2. extract master and worker communication model * 1, master persistent task 2. extract master and worker communication model * 1, master persistent task 2. extract master and worker communication model * add license * modify javadoc error * TaskExecutionContext create modify * buildAckCommand taskInstanceId not set modify * java doc error modify * add comment * ExecutorManager interface add generic type * add TaskInstanceCacheManager receive Worker report result * TaskInstance setExecutePath * add TaskInstanceCacheManager to receive Worker Task result report * TaskInstanceCacheManager add remove method * add license * add dispatcht task method * AbstractCommandExecutor remove db access * AbstractCommandExecutor remove db access * AbstractCommandExecutor remove db access * AbstractCommandExecutor remove db access * AbstractCommandExecutor remove db access * AbstractCommandExecutor remove db access * AbstractCommandExecutor remove db access * taskInstanceCache is null ,need load from db * taskInstanceCache is null ,need load from db * taskInstanceCache is null ,need load from db * 1,worker TaskPros use TaskExecutionContext replase 2,Master kill Task , KillTaskProcessor modify * worker remove db * ShellTask modify * master persistence processId and appIds * master persistence processId and appIds * master add kill task logic * master add kill task logic * master add kill task logic * javadoc error modify * remove chinese log * executeDirectly method add Override * remote module modify * TaskKillResponseProcessor command type modify * create buildKillCommand * host add host:port format * host add host:port format * TaskAckProcessor modify * TaskAckProcessor modify * task prioriry refator * remove ITaskQueue * task prioriry refator * remove ITaskQueue * TaskPriority refactor * remove logs * WorkerServer refactor * MasterSchedulerService modify * WorkerConfig listen port modify * modify master and worker listen port * cancelTaskInstance set TaskExecutionContext host,logPath,executePath * cancelTaskInstance set TaskExecutionContext host,logPath,executePath * Encapsulate the parameters required by sqltask * 1,Encapsulate the parameters required by sqltask 2,SQLTask optimization * AbstractTask modify * ProcedureTask optimization * MasterSchedulerService modify * TaskUpdateQueueConsumer modify * test * DataxTask process run debug * DataxTask process run debug * add protobuf dependency,MR、Spark task etc need this * TaskUpdateQueueConsumer modify Co-authored-by: qiaozhanwei <qiaozhanwei@analysys.com.cn>
This commit is contained in:
parent
607ec3d174
commit
71b11e2c08
@ -36,10 +36,6 @@
|
||||
<groupId>org.apache.dolphinscheduler</groupId>
|
||||
<artifactId>dolphinscheduler-common</artifactId>
|
||||
<exclusions>
|
||||
<exclusion>
|
||||
<artifactId>protobuf-java</artifactId>
|
||||
<groupId>com.google.protobuf</groupId>
|
||||
</exclusion>
|
||||
<exclusion>
|
||||
<groupId>io.netty</groupId>
|
||||
<artifactId>netty</artifactId>
|
||||
|
@ -22,6 +22,7 @@ import org.apache.dolphinscheduler.common.enums.ExecutionStatus;
|
||||
import org.apache.dolphinscheduler.common.enums.TaskType;
|
||||
import org.apache.dolphinscheduler.common.enums.UdfType;
|
||||
import org.apache.dolphinscheduler.common.model.TaskNode;
|
||||
import org.apache.dolphinscheduler.common.task.datax.DataxParameters;
|
||||
import org.apache.dolphinscheduler.common.task.procedure.ProcedureParameters;
|
||||
import org.apache.dolphinscheduler.common.task.sql.SqlParameters;
|
||||
import org.apache.dolphinscheduler.common.thread.Stopper;
|
||||
@ -178,7 +179,19 @@ public class TaskUpdateQueueConsumer extends Thread{
|
||||
|
||||
// DATAX task
|
||||
if (taskType == TaskType.DATAX){
|
||||
DataxParameters dataxParameters = JSONObject.parseObject(taskNode.getParams(), DataxParameters.class);
|
||||
|
||||
DataSource dataSource = processService.findDataSourceById(dataxParameters.getDataSource());
|
||||
DataSource dataTarget = processService.findDataSourceById(dataxParameters.getDataTarget());
|
||||
|
||||
|
||||
dataxTaskExecutionContext.setDataSourceId(dataxParameters.getDataSource());
|
||||
dataxTaskExecutionContext.setSourcetype(dataSource.getType().getCode());
|
||||
dataxTaskExecutionContext.setSourceConnectionParams(dataSource.getConnectionParams());
|
||||
|
||||
dataxTaskExecutionContext.setDataTargetId(dataxParameters.getDataTarget());
|
||||
dataxTaskExecutionContext.setTargetType(dataTarget.getType().getCode());
|
||||
dataxTaskExecutionContext.setTargetConnectionParams(dataTarget.getConnectionParams());
|
||||
}
|
||||
|
||||
|
||||
|
@ -216,8 +216,7 @@ public class DataxTask extends AbstractTask {
|
||||
* @return collection of datax job config JSONObject
|
||||
* @throws SQLException if error throws SQLException
|
||||
*/
|
||||
private List<JSONObject> buildDataxJobContentJson()
|
||||
throws SQLException {
|
||||
private List<JSONObject> buildDataxJobContentJson() throws SQLException {
|
||||
DataxTaskExecutionContext dataxTaskExecutionContext = taskExecutionContext.getDataxTaskExecutionContext();
|
||||
|
||||
|
||||
|
Loading…
Reference in New Issue
Block a user