From fee49557242a13307961e4280b404fbc846241de Mon Sep 17 00:00:00 2001 From: xiangzihao <460888207@qq.com> Date: Wed, 4 Sep 2024 21:22:34 +0800 Subject: [PATCH] [DSIP-68] Unify the table structure and table fields in backend and frontend ui sections (#16544) --- .github/workflows/api-test.yml | 27 +- .github/workflows/backend.yml | 139 +-- .github/workflows/e2e.yml | 34 +- .../mysql/docker-compose-base.yaml | 35 + .../schema-check/mysql/running-test.sh | 54 ++ .../workflows/schema-check/mysql/start-job.sh | 57 ++ .../postgresql/docker-compose-base.yaml | 34 + .../schema-check/postgresql/running-test.sh | 54 ++ .../schema-check/postgresql/start-job.sh | 45 + .github/workflows/unit-test.yml | 4 +- .gitignore | 1 + docs/docs/en/about/glossary.md | 4 +- docs/docs/en/architecture/task-structure.md | 2 +- .../docs/en/contribute/architecture-design.md | 2 +- docs/docs/en/guide/parameter/context.md | 4 +- docs/docs/en/guide/project/project-list.md | 16 +- docs/docs/en/guide/resource/task-group.md | 2 +- docs/docs/en/guide/task/sub-process.md | 10 +- docs/docs/en/guide/upgrade/incompatible.md | 2 + docs/docs/en/guide/upgrade/upgrade.md | 2 +- docs/docs/zh/about/glossary.md | 2 +- docs/docs/zh/architecture/task-structure.md | 2 +- .../docs/zh/contribute/architecture-design.md | 2 +- docs/docs/zh/guide/parameter/context.md | 4 +- docs/docs/zh/guide/resource/task-group.md | 2 +- docs/docs/zh/guide/task/sub-process.md | 10 +- docs/docs/zh/guide/upgrade/incompatible.md | 2 + docs/docs/zh/guide/upgrade/upgrade.md | 2 +- .../{sub_process.png => sub_workflow.png} | Bin .../api/test/cases/ExecutorAPITest.java | 46 +- .../api/test/cases/ProjectAPITest.java | 2 + .../api/test/cases/SchedulerAPITest.java | 28 +- .../api/test/cases/TenantAPITest.java | 2 + .../api/test/cases/WorkerGroupAPITest.java | 2 + .../test/cases/WorkflowDefinitionAPITest.java | 128 +-- .../test/cases/WorkflowInstanceAPITest.java | 114 +-- .../api/test/pages/workflow/ExecutorPage.java | 28 +- .../test/pages/workflow/SchedulerPage.java | 4 +- ...nPage.java => WorkflowDefinitionPage.java} | 36 +- ...ncePage.java => WorkflowInstancePage.java} | 22 +- .../test/resources/workflow-json/test.json | 10 +- dolphinscheduler-api-test/pom.xml | 13 +- .../audit/constants/AuditLogConstants.java | 7 +- .../api/audit/enums/AuditType.java | 33 +- .../impl/ScheduleAuditOperatorImpl.java | 2 +- .../impl/WorkflowAuditOperatorImpl.java | 4 +- ...=> WorkflowInstanceAuditOperatorImpl.java} | 4 +- .../controller/DataAnalysisController.java | 4 +- .../api/controller/ExecutorController.java | 100 +- .../api/controller/SchedulerController.java | 72 +- .../api/controller/TaskGroupController.java | 8 +- .../controller/TaskInstanceController.java | 18 +- .../WorkflowDefinitionController.java | 16 +- .../WorkflowInstanceController.java | 39 +- .../controller/WorkflowLineageController.java | 16 +- .../WorkflowTaskRelationController.java | 46 +- .../controller/v2/StatisticsV2Controller.java | 6 +- .../v2/TaskInstanceV2Controller.java | 8 +- ... => WorkflowTaskRelationV2Controller.java} | 13 +- .../controller/v2/WorkflowV2Controller.java | 6 +- .../dolphinscheduler/api/dto/ClusterDto.java | 9 +- .../api/dto/DagDataSchedule.java | 22 +- .../api/dto/DefineUserDto.java | 21 +- .../api/dto/DynamicSubWorkflowDto.java | 2 +- .../api/dto/WorkflowDefinitionDto.java | 26 - .../api/dto/WorkflowInstanceDto.java | 26 - .../dto/schedule/ScheduleCreateRequest.java | 14 +- .../dto/schedule/ScheduleFilterRequest.java | 8 +- .../dto/schedule/ScheduleUpdateRequest.java | 6 +- .../TaskInstanceQueryRequest.java | 14 +- .../TaskRelationCreateRequest.java | 7 +- .../TaskRelationFilterRequest.java | 2 +- .../dto/workflow/WorkflowCreateRequest.java | 2 +- .../dto/workflow/WorkflowUpdateRequest.java | 8 +- .../WorkflowInstanceQueryRequest.java | 2 +- .../api/python/PythonGateway.java | 32 +- .../api/service/impl/ExecutorServiceImpl.java | 59 +- .../api/service/impl/ProjectServiceImpl.java | 2 +- .../service/impl/SchedulerServiceImpl.java | 47 +- .../impl/TaskDefinitionServiceImpl.java | 16 +- .../service/impl/TaskInstanceServiceImpl.java | 6 +- .../api/service/impl/TenantServiceImpl.java | 2 +- .../service/impl/WorkerGroupServiceImpl.java | 6 +- .../impl/WorkflowDefinitionServiceImpl.java | 64 +- .../impl/WorkflowInstanceServiceImpl.java | 76 +- .../impl/WorkflowLineageServiceImpl.java | 50 +- .../impl/WorkflowTaskRelationServiceImpl.java | 57 +- .../BackfillWorkflowRequestTransformer.java | 2 +- .../dolphinscheduler/api/vo/ScheduleVO.java | 72 +- .../api/vo/TaskDefinitionVO.java | 8 +- .../resources/dynamic-task-type-config.yaml | 2 +- .../resources/i18n/messages_en_US.properties | 6 +- .../resources/i18n/messages_zh_CN.properties | 8 +- .../src/main/resources/task-type-config.yaml | 2 +- .../DataAnalysisControllerTest.java | 5 +- .../controller/SchedulerControllerTest.java | 8 +- .../controller/WorkerGroupControllerTest.java | 2 +- .../WorkflowInstanceControllerTest.java | 32 +- ...InstanceExecuteFunctionControllerTest.java | 43 +- .../WorkflowTaskRelationControllerTest.java | 7 +- .../v2/TaskInstanceV2ControllerTest.java | 10 +- .../api/service/SchedulerServiceTest.java | 12 +- .../TaskDefinitionServiceImplTest.java | 23 +- .../api/service/TaskInstanceServiceTest.java | 8 +- .../WorkflowDefinitionServiceTest.java | 30 +- ...lowInstanceExecuteFunctionServiceTest.java | 24 +- .../service/WorkflowInstanceServiceTest.java | 162 ++-- .../WorkflowTaskLineageServiceTest.java | 9 +- .../WorkflowTaskRelationServiceTest.java | 179 ++-- .../common/constants/CommandKeyConstants.java | 2 +- .../common/constants/Constants.java | 8 +- .../sql/ClasspathSqlScriptParserTest.java | 2 +- .../common/utils/CodeGenerateUtilsTest.java | 4 +- .../src/test/resources/sql/mysql_ddl.sql | 8 +- .../src/test/resources/sql/mysql_dml.sql | 4 +- .../apache/dolphinscheduler/dao/AlertDao.java | 50 +- .../dolphinscheduler/dao/entity/Alert.java | 53 +- .../dolphinscheduler/dao/entity/Command.java | 30 +- .../dolphinscheduler/dao/entity/DagData.java | 39 +- .../dao/entity/DependentLineageTask.java | 4 +- .../entity/DependentWorkflowDefinition.java | 23 +- .../dao/entity/DqExecuteResult.java | 105 +-- .../dao/entity/DqTaskStatisticsValue.java | 57 +- .../dao/entity/ErrorCommand.java | 73 +- .../dolphinscheduler/dao/entity/Schedule.java | 66 +- .../dao/entity/TaskGroupQueue.java | 53 +- .../dao/entity/TaskInstance.java | 134 +-- .../UserWithWorkflowDefinitionCode.java | 4 +- .../dao/entity/WorkflowAlertContent.java | 58 +- .../dao/entity/WorkflowDefinition.java | 68 +- .../dao/entity/WorkflowDefinitionLog.java | 8 +- .../dao/entity/WorkflowInstance.java | 71 +- .../dao/entity/WorkflowInstanceRelation.java | 52 +- .../dao/entity/WorkflowTaskLineage.java | 2 +- .../dao/entity/WorkflowTaskRelation.java | 57 +- .../dao/entity/WorkflowTaskRelationLog.java | 34 +- .../dao/mapper/ProjectMapper.java | 6 +- .../dao/mapper/RelationSubWorkflowMapper.java | 7 +- .../dao/mapper/ScheduleMapper.java | 34 +- .../dao/mapper/TaskDefinitionMapper.java | 6 +- .../dao/mapper/TaskGroupQueueMapper.java | 2 +- .../dao/mapper/TaskInstanceMapper.java | 22 +- .../dao/mapper/UserMapper.java | 4 +- .../mapper/WorkflowDefinitionLogMapper.java | 10 +- .../dao/mapper/WorkflowDefinitionMapper.java | 26 +- .../dao/mapper/WorkflowInstanceMapper.java | 124 ++- .../WorkflowInstanceRelationMapper.java | 12 +- .../dao/mapper/WorkflowTaskLineageMapper.java | 2 +- .../mapper/WorkflowTaskRelationLogMapper.java | 31 +- .../mapper/WorkflowTaskRelationMapper.java | 86 +- .../dao/repository/TaskDefinitionDao.java | 4 +- .../impl/TaskDefinitionDaoImpl.java | 11 +- .../impl/TaskDefinitionLogDaoImpl.java | 2 +- .../repository/impl/TaskInstanceDaoImpl.java | 6 +- .../impl/WorkflowDefinitionLogDaoImpl.java | 2 +- .../impl/WorkflowInstanceDaoImpl.java | 14 +- .../impl/WorkflowTaskRelationLogDaoImpl.java | 2 +- .../dao/utils/TaskInstanceUtils.java | 8 +- .../dao/mapper/AlertMapper.xml | 6 +- .../dao/mapper/CommandMapper.xml | 22 +- .../dao/mapper/DqExecuteResultMapper.xml | 28 +- .../dao/mapper/ErrorCommandMapper.xml | 19 +- .../dao/mapper/ProjectMapper.xml | 14 +- .../dao/mapper/RelationSubWorkflowMapper.xml | 2 +- .../dao/mapper/ScheduleMapper.xml | 68 +- .../dao/mapper/TaskDefinitionMapper.xml | 10 +- .../dao/mapper/TaskGroupQueueMapper.xml | 22 +- .../dao/mapper/TaskInstanceMapper.xml | 44 +- .../dao/mapper/UserMapper.xml | 14 +- .../mapper/WorkflowDefinitionLogMapper.xml | 22 +- .../dao/mapper/WorkflowDefinitionMapper.xml | 42 +- .../dao/mapper/WorkflowInstanceMapper.xml | 155 ++-- .../mapper/WorkflowInstanceRelationMapper.xml | 28 +- .../dao/mapper/WorkflowTaskLineageMapper.xml | 20 +- .../mapper/WorkflowTaskRelationLogMapper.xml | 54 +- .../dao/mapper/WorkflowTaskRelationMapper.xml | 147 ++- .../resources/sql/dolphinscheduler_h2.sql | 173 ++-- .../resources/sql/dolphinscheduler_mysql.sql | 200 ++-- .../sql/dolphinscheduler_postgresql.sql | 168 ++-- .../mysql/dolphinscheduler_ddl.sql | 82 +- .../mysql/dolphinscheduler_dml.sql | 7 + .../postgresql/dolphinscheduler_ddl.sql | 89 +- .../postgresql/dolphinscheduler_dml.sql | 7 + .../dao/entity/ErrorCommandTest.java | 16 +- .../dao/mapper/CommandMapperTest.java | 14 +- .../dao/mapper/ErrorCommandMapperTest.java | 4 +- .../dao/mapper/ScheduleMapperTest.java | 25 +- .../dao/mapper/TaskDefinitionMapperTest.java | 4 +- .../dao/mapper/TaskGroupQueueMapperTest.java | 2 +- .../dao/mapper/TaskInstanceMapperTest.java | 31 +- .../dao/mapper/UserMapperTest.java | 4 +- .../WorkflowDefinitionLogMapperTest.java | 8 +- .../mapper/WorkflowInstanceMapMapperTest.java | 20 +- .../mapper/WorkflowInstanceMapperTest.java | 35 +- .../mapper/WorkflowTaskLineageMapperTest.java | 4 +- .../WorkflowTaskRelationLogMapperTest.java | 8 +- .../WorkflowTaskRelationMapperTest.java | 10 +- .../repository/impl/CommandDaoImplTest.java | 8 +- .../impl/TaskGroupQueueDaoImplTest.java | 2 +- .../impl/WorkflowInstanceDaoImplTest.java | 4 +- .../cases/ClickhouseDataSourceE2ETest.java | 2 + .../e2e/cases/ClusterE2ETest.java | 2 + .../e2e/cases/EnvironmentE2ETest.java | 2 + .../e2e/cases/FileManageE2ETest.java | 2 + .../e2e/cases/HiveDataSourceE2ETest.java | 2 + .../e2e/cases/MysqlDataSourceE2ETest.java | 2 + .../e2e/cases/PostgresDataSourceE2ETest.java | 2 + .../e2e/cases/ProjectE2ETest.java | 2 + .../e2e/cases/QueueE2ETest.java | 2 + .../e2e/cases/SqlServerDataSourceE2ETest.java | 2 + .../e2e/cases/TenantE2ETest.java | 2 + .../e2e/cases/TokenE2ETest.java | 2 + .../e2e/cases/UserE2ETest.java | 2 + .../e2e/cases/WorkerGroupE2ETest.java | 2 + .../e2e/cases/WorkflowE2ETest.java | 6 +- .../e2e/cases/WorkflowHttpTaskE2ETest.java | 3 + .../e2e/cases/WorkflowJavaTaskE2ETest.java | 2 + .../e2e/cases/WorkflowSwitchE2ETest.java | 2 + .../e2e/cases/tasks/PythonTaskE2ETest.java | 2 + .../e2e/cases/tasks/ShellTaskE2ETest.java | 2 + .../e2e/pages/project/ProjectDetailPage.java | 8 +- .../project/workflow/TaskInstanceTab.java | 2 +- .../workflow/WorkflowDefinitionTab.java | 10 +- .../pages/project/workflow/WorkflowForm.java | 4 +- dolphinscheduler-e2e/pom.xml | 7 + .../master/engine/TaskGroupCoordinator.java | 6 +- .../handler/AbstractCommandHandler.java | 4 +- .../handler/ReRunWorkflowCommandHandler.java | 2 +- .../RecoverFailureTaskCommandHandler.java | 2 +- .../handler/RunWorkflowCommandHandler.java | 2 +- .../WorkflowFailoverCommandHandler.java | 2 +- .../runnable/AbstractTaskInstanceFactory.java | 8 +- .../runnable/TaskExecutionContextBuilder.java | 10 +- .../task/runnable/TaskExecutionRunnable.java | 4 +- .../WorkflowSuccessLifecycleListener.java | 8 +- .../trigger/WorkflowBackfillTrigger.java | 16 +- ...flowInstanceRecoverFailureTaskTrigger.java | 6 +- ...flowInstanceRecoverSuspendTaskTrigger.java | 6 +- .../WorkflowInstanceRepeatTrigger.java | 6 +- .../trigger/WorkflowManualTrigger.java | 16 +- .../trigger/WorkflowScheduleTrigger.java | 16 +- .../master/failover/WorkflowFailover.java | 6 +- .../master/metrics/MasterServerMetrics.java | 2 +- .../server/master/metrics/TaskMetrics.java | 2 +- .../registry/MasterWaitingStrategy.java | 2 +- ...TaskInstanceDispatchOperationFunction.java | 2 +- ...icITaskInstancePauseOperationFunction.java | 2 +- .../AsyncMasterTaskDelayQueueLooper.java | 2 +- .../runner/execute/MasterTaskExecutor.java | 4 +- ...askInstanceExecuteDispatchEventSender.java | 2 +- ...TaskInstanceExecuteRunningEventSender.java | 2 +- ...askInstanceExecutionFailedEventSender.java | 2 +- ...askInstanceExecutionKilledEventSender.java | 2 +- ...askInstanceExecutionPausedEventSender.java | 2 +- ...skInstanceExecutionSuccessEventSender.java | 2 +- .../task/condition/ConditionLogicTask.java | 2 +- .../ConditionLogicTaskPluginFactory.java | 2 +- .../DependentAsyncTaskExecuteFunction.java | 2 +- .../DependentLogicTaskPluginFactory.java | 2 +- .../DynamicAsyncTaskExecuteFunction.java | 4 +- .../task/dynamic/DynamicCommandUtils.java | 6 +- .../runner/task/dynamic/DynamicLogicTask.java | 16 +- .../task/fake/LogicFakeTaskPluginFactory.java | 4 +- .../subworkflow/SubWorkflowLogicTask.java | 20 +- .../SubWorkflowLogicTaskPluginFactory.java | 2 +- .../trigger/SubWorkflowManualTrigger.java | 2 +- .../SwitchLogicTaskPluginFactory.java | 2 +- .../server/master/utils/DependentExecute.java | 16 +- .../master/utils/WorkflowInstanceUtils.java | 2 +- .../server/master/ParamsTest.java | 2 +- .../server/master/it/Repository.java | 4 +- .../it/cases/WorkflowInstancePauseIT.java | 4 +- .../cases/WorkflowInstanceRecoverPauseIT.java | 2 +- .../cases/WorkflowInstanceRecoverStopIT.java | 2 +- .../it/cases/WorkflowInstanceStopIT.java | 4 +- .../master/it/cases/WorkflowSchedulingIT.java | 4 +- .../master/it/cases/WorkflowStartIT.java | 8 +- .../GlobalTaskDispatchWaitingQueueTest.java | 2 +- .../task/dynamic/DynamicCommandUtilsTest.java | 8 +- .../utils/WorkflowInstanceUtilsTest.java | 2 +- ...rkflow_with_sub_workflow_task_success.yaml | 16 +- ...rkflow_with_sub_workflow_task_success.yaml | 16 +- ...rkflow_with_sub_workflow_task_success.yaml | 16 +- ...orkflow_with_sub_workflow_task_failed.yaml | 12 +- ...rkflow_with_sub_workflow_task_success.yaml | 12 +- ...rkflow_with_sub_workflow_task_success.yaml | 16 +- .../scheduler/quartz/ProcessScheduleTask.java | 4 +- .../service/alert/WorkflowAlertManager.java | 86 +- .../service/command/CommandServiceImpl.java | 4 +- .../service/cron/CronUtils.java | 2 +- .../service/expand/CuringParamsService.java | 8 +- .../expand/CuringParamsServiceImpl.java | 17 +- .../TimePlaceholderResolverExpandService.java | 2 +- ...ePlaceholderResolverExpandServiceImpl.java | 2 +- .../service/process/ProcessService.java | 83 +- .../service/process/ProcessServiceImpl.java | 878 ++++-------------- .../{ProcessDag.java => WorkflowDag.java} | 47 +- .../StandByTaskInstancePriorityQueue.java | 2 +- .../service/queue/TaskPriority.java | 139 +-- .../subworkflow/SubWorkflowServiceImpl.java | 6 +- .../service/utils/DagHelper.java | 68 +- .../service/utils/ParamUtils.java | 16 +- .../service/utils/ProcessUtils.java | 6 +- .../alert/WorkflowAlertManagerTest.java | 8 +- .../command/MessageServiceImplTest.java | 4 +- .../expand/CuringParamsServiceTest.java | 8 +- .../service/process/ProcessServiceTest.java | 82 +- .../StandByTaskInstancePriorityQueueTest.java | 2 +- .../service/utils/DagHelperTest.java | 46 +- .../src/main/resources/application.yaml | 8 +- .../plugin/task/api/TaskExecutionContext.java | 126 +-- .../task/api/k8s/impl/K8sTaskExecutor.java | 6 +- .../api/parameters/DynamicParameters.java | 9 +- ...meters.java => SubWorkflowParameters.java} | 17 +- .../api/task/SubWorkflowLogicTaskChannel.java | 4 +- .../SubWorkflowLogicTaskChannelFactory.java | 2 +- .../plugin/task/api/utils/LogUtils.java | 6 +- .../plugin/task/dq/DataQualityTask.java | 8 +- .../tools/demo/ProcessDefinitionDemo.java | 4 +- .../tools/lineage/MigrateLineageService.java | 4 +- .../{sub_process.png => sub_workflow.png} | Bin ...ocess_hover.png => sub_workflow_hover.png} | Bin dolphinscheduler-ui/src/locales/en_US/home.ts | 4 +- .../src/locales/en_US/project.ts | 12 +- dolphinscheduler-ui/src/locales/zh_CN/home.ts | 4 +- .../src/locales/zh_CN/project.ts | 10 +- .../src/service/modules/audit/types.ts | 2 +- .../src/service/modules/data-quality/types.ts | 10 +- .../src/service/modules/executors/index.ts | 8 +- .../src/service/modules/executors/types.ts | 16 +- .../modules/projects-analysis/index.ts | 4 +- .../modules/projects-analysis/types.ts | 4 +- .../src/service/modules/schedules/index.ts | 6 +- .../src/service/modules/schedules/types.ts | 14 +- .../src/service/modules/task-group/types.ts | 4 +- .../service/modules/task-instances/types.ts | 18 +- .../index.ts | 87 +- .../types.ts | 4 +- .../index.ts | 46 +- .../types.ts | 16 +- .../index.ts | 18 +- .../types.ts | 8 +- .../src/store/project/task-node.ts | 6 +- .../src/store/project/task-type.ts | 4 +- .../src/store/project/types.ts | 2 +- .../data-quality/task-result/use-table.ts | 8 +- .../views/home/components/definition-card.tsx | 18 +- dolphinscheduler-ui/src/views/home/index.tsx | 37 +- ...finition.ts => use-workflow-definition.ts} | 10 +- ...process-state.ts => use-workflow-state.ts} | 20 +- .../statistics/list-command-table.tsx | 8 +- .../statistics/list-error-command-table.tsx | 8 +- .../dependencies/use-dependencies.ts | 22 +- .../overview/components/definition-card.tsx | 18 +- .../src/views/projects/overview/index.tsx | 36 +- ...finition.ts => use-workflow-definition.ts} | 10 +- ...process-state.ts => use-workflow-state.ts} | 20 +- .../task/components/node/detail-modal.tsx | 20 +- .../task/components/node/fields/index.ts | 2 +- .../components/node/fields/use-child-node.ts | 24 +- .../components/node/fields/use-dependent.ts | 42 +- .../task/components/node/fields/use-switch.ts | 10 +- .../node/fields/use-task-definition.ts | 6 +- ...e-process-name.ts => use-workflow-name.ts} | 28 +- .../task/components/node/format-data.ts | 14 +- .../task/components/node/tasks/index.ts | 4 +- .../components/node/tasks/use-datasync.ts | 4 +- .../task/components/node/tasks/use-dms.ts | 4 +- .../task/components/node/tasks/use-dynamic.ts | 2 +- .../components/node/tasks/use-hive-cli.ts | 4 +- .../task/components/node/tasks/use-java.ts | 4 +- .../task/components/node/tasks/use-pytorch.ts | 4 +- ...use-sub-process.ts => use-sub-workflow.ts} | 6 +- .../projects/task/components/node/types.ts | 4 +- .../projects/task/constants/task-type.ts | 6 +- .../projects/task/instance/batch-task.tsx | 14 +- .../projects/task/instance/stream-task.tsx | 4 +- .../src/views/projects/task/instance/types.ts | 2 +- .../task/instance/use-stream-table.ts | 12 +- .../views/projects/task/instance/use-table.ts | 24 +- .../components/dag/dag-save-modal.tsx | 40 +- .../components/dag/dag-startup-param.tsx | 2 +- .../workflow/components/dag/dag-toolbar.tsx | 14 +- .../workflow/components/dag/dag.module.scss | 8 +- .../workflow/components/dag/index.tsx | 20 +- .../projects/workflow/components/dag/types.ts | 18 +- .../components/dag/use-business-mapper.ts | 2 +- .../components/dag/use-custom-cell-builder.ts | 4 +- .../components/dag/use-node-status.ts | 4 +- .../workflow/components/dag/use-task-edit.ts | 54 +- .../definition/components/start-modal.tsx | 6 +- .../definition/components/timing-modal.tsx | 10 +- .../workflow/definition/components/types.ts | 2 +- .../definition/components/use-form.ts | 6 +- .../definition/components/use-modal.ts | 26 +- .../definition/components/use-table.ts | 2 +- .../workflow/definition/create/index.tsx | 4 +- .../workflow/definition/detail/index.tsx | 14 +- .../projects/workflow/definition/index.tsx | 2 +- .../workflow/definition/timing/index.tsx | 2 +- .../workflow/definition/timing/types.ts | 2 +- .../workflow/definition/timing/use-table.ts | 18 +- .../workflow/definition/tree/index.tsx | 6 +- .../projects/workflow/definition/types.ts | 6 +- .../projects/workflow/definition/use-table.ts | 4 +- .../instance/components/table-action.tsx | 6 +- .../instance/components/variables-view.tsx | 8 +- ...on.tsx => workflow-instance-condition.tsx} | 36 +- .../workflow/instance/detail/index.tsx | 12 +- .../workflow/instance/gantt/use-gantt.ts | 2 +- .../projects/workflow/instance/index.tsx | 6 +- .../views/projects/workflow/instance/types.ts | 2 +- .../projects/workflow/instance/use-table.ts | 39 +- .../timing/components/timing-condition.tsx | 34 +- .../views/projects/workflow/timing/index.tsx | 4 +- .../views/projects/workflow/timing/types.ts | 2 +- .../views/resource/task-group/queue/index.tsx | 6 +- .../resource/task-group/queue/use-table.ts | 2 +- .../TaskExecutionDispatchEventSender.java | 2 +- .../TaskExecutionFailedEventSender.java | 2 +- .../TaskExecutionKilledEventSender.java | 2 +- .../TaskExecutionPausedEventSender.java | 2 +- .../TaskExecutionSuccessEventSender.java | 2 +- ...skInstanceExecutionRunningEventSender.java | 2 +- .../worker/runner/WorkerTaskExecutor.java | 4 +- ...TaskInstanceDispatchOperationFunction.java | 4 +- .../utils/TaskExecutionContextUtils.java | 6 +- .../worker/utils/TaskFilesTransferUtils.java | 4 +- .../runner/DefaultWorkerTaskExecutorTest.java | 4 +- .../utils/TaskExecutionContextUtilsTest.java | 12 +- .../utils/TaskFilesTransferUtilsTest.java | 18 +- 431 files changed, 3934 insertions(+), 5244 deletions(-) create mode 100644 .github/workflows/schema-check/mysql/docker-compose-base.yaml create mode 100644 .github/workflows/schema-check/mysql/running-test.sh create mode 100644 .github/workflows/schema-check/mysql/start-job.sh create mode 100644 .github/workflows/schema-check/postgresql/docker-compose-base.yaml create mode 100644 .github/workflows/schema-check/postgresql/running-test.sh create mode 100644 .github/workflows/schema-check/postgresql/start-job.sh rename docs/img/tasks/icons/{sub_process.png => sub_workflow.png} (100%) rename dolphinscheduler-api-test/dolphinscheduler-api-test-case/src/test/java/org/apache/dolphinscheduler/api/test/pages/workflow/{ProcessDefinitionPage.java => WorkflowDefinitionPage.java} (71%) rename dolphinscheduler-api-test/dolphinscheduler-api-test-case/src/test/java/org/apache/dolphinscheduler/api/test/pages/workflow/{ProcessInstancePage.java => WorkflowInstancePage.java} (73%) rename dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/audit/operator/impl/{ProcessInstanceAuditOperatorImpl.java => WorkflowInstanceAuditOperatorImpl.java} (94%) rename dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/v2/{ProcessTaskRelationV2Controller.java => WorkflowTaskRelationV2Controller.java} (94%) delete mode 100644 dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/dto/WorkflowDefinitionDto.java delete mode 100644 dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/dto/WorkflowInstanceDto.java rename dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/process/{ProcessDag.java => WorkflowDag.java} (62%) rename dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/api/parameters/{SubProcessParameters.java => SubWorkflowParameters.java} (70%) rename dolphinscheduler-ui/public/images/task-icons/{sub_process.png => sub_workflow.png} (100%) rename dolphinscheduler-ui/public/images/task-icons/{sub_process_hover.png => sub_workflow_hover.png} (100%) rename dolphinscheduler-ui/src/service/modules/{process-definition => workflow-definition}/index.ts (58%) rename dolphinscheduler-ui/src/service/modules/{process-definition => workflow-definition}/types.ts (96%) rename dolphinscheduler-ui/src/service/modules/{process-instances => workflow-instances}/index.ts (61%) rename dolphinscheduler-ui/src/service/modules/{process-instances => workflow-instances}/types.ts (89%) rename dolphinscheduler-ui/src/service/modules/{process-task-relation => workflow-task-relation}/index.ts (75%) rename dolphinscheduler-ui/src/service/modules/{process-task-relation => workflow-task-relation}/types.ts (87%) rename dolphinscheduler-ui/src/views/home/{use-process-definition.ts => use-workflow-definition.ts} (83%) rename dolphinscheduler-ui/src/views/home/{use-process-state.ts => use-workflow-state.ts} (79%) rename dolphinscheduler-ui/src/views/projects/overview/{use-process-definition.ts => use-workflow-definition.ts} (83%) rename dolphinscheduler-ui/src/views/projects/overview/{use-process-state.ts => use-workflow-state.ts} (80%) rename dolphinscheduler-ui/src/views/projects/task/components/node/fields/{use-process-name.ts => use-workflow-name.ts} (80%) rename dolphinscheduler-ui/src/views/projects/task/components/node/tasks/{use-sub-process.ts => use-sub-workflow.ts} (94%) rename dolphinscheduler-ui/src/views/projects/workflow/instance/components/{process-instance-condition.tsx => workflow-instance-condition.tsx} (84%) diff --git a/.github/workflows/api-test.yml b/.github/workflows/api-test.yml index 900dab8d7e..90a2e9aa17 100644 --- a/.github/workflows/api-test.yml +++ b/.github/workflows/api-test.yml @@ -89,26 +89,35 @@ jobs: strategy: matrix: case: - - name: Tenant + - name: TenantAPITest class: org.apache.dolphinscheduler.api.test.cases.TenantAPITest - - name: WorkerGroup + - name: WorkerGroupAPITest class: org.apache.dolphinscheduler.api.test.cases.WorkerGroupAPITest - - name: Project + - name: ProjectAPITest class: org.apache.dolphinscheduler.api.test.cases.ProjectAPITest - - name: Workflow - class: org.apache.dolphinscheduler.api.test.cases.ProcessDefinitionAPITest - - name: Scheduler + - name: WorkflowDefinitionAPITest + class: org.apache.dolphinscheduler.api.test.cases.WorkflowDefinitionAPITest + - name: SchedulerAPITest class: org.apache.dolphinscheduler.api.test.cases.SchedulerAPITest - - name: Executor + - name: ExecutorAPITest class: org.apache.dolphinscheduler.api.test.cases.ExecutorAPITest - - name: ProcessInstance - class: org.apache.dolphinscheduler.api.test.cases.ProcessInstanceAPITest + - name: WorkflowInstanceAPITest + class: org.apache.dolphinscheduler.api.test.cases.WorkflowInstanceAPITest env: RECORDING_PATH: /tmp/recording-${{ matrix.case.name }} steps: - uses: actions/checkout@v4 with: submodules: true + - name: Set up JDK 11 + uses: actions/setup-java@v4 + with: + java-version: 11 + distribution: 'adopt' + - name: Collect Workflow Telemetry + uses: ./.github/actions/workflow-telemetry-action + with: + comment_on_pr: false - name: Cache local Maven repository uses: actions/cache@v4 with: diff --git a/.github/workflows/backend.yml b/.github/workflows/backend.yml index 52ccc6c431..0e98226e2a 100644 --- a/.github/workflows/backend.yml +++ b/.github/workflows/backend.yml @@ -132,136 +132,35 @@ jobs: run: | /bin/bash ${{ matrix.case.script }} schema-check: - runs-on: ubuntu-latest - if: ${{ (needs.paths-filter.outputs.db-schema == 'true') || (github.event_name == 'push') }} - timeout-minutes: 20 + name: ${{ matrix.case.name }}-${{ matrix.version }} needs: build - services: - mysql: - image: mysql:5.7 - env: - MYSQL_ROOT_PASSWORD: mysql - MYSQL_DATABASE: dolphinscheduler_dev - ports: - - 3306:3306 - options: --name=mysql --health-cmd="mysqladmin ping" --health-interval=10s --health-timeout=5s --health-retries=3 - postgres: - image: postgres:15 - env: - POSTGRES_PASSWORD: postgres - POSTGRES_DB: dolphinscheduler_dev - ports: - - 5432:5432 - options: --name=postgres --health-cmd=pg_isready --health-interval=10s --health-timeout=5s --health-retries=5 + runs-on: ubuntu-latest + timeout-minutes: 20 strategy: fail-fast: false matrix: - db: ["mysql", "postgresql"] version: ["3.1.9", "3.2.0"] + case: + - name: schema-check-with-mysql + script: .github/workflows/schema-check/mysql/start-job.sh + - name: schema-check-with-postgresql + script: .github/workflows/schema-check/postgresql/start-job.sh steps: - - name: Set up JDK 8 - uses: actions/setup-java@v4 + - uses: actions/checkout@v4 with: - java-version: 8 - distribution: 'adopt' - - name: Install Atlas and Create Dir - run: | - mkdir -p dolphinscheduler/dev dolphinscheduler/${{ matrix.version }} - curl -sSf https://atlasgo.sh | sh - - name: Download Tarball + submodules: true + - name: Collect Workflow Telemetry + uses: ./.github/actions/workflow-telemetry-action + with: + comment_on_pr: false + - name: Download Binary Package uses: actions/download-artifact@v4 with: name: binary-package-8 - path: dolphinscheduler/dev - - name: Set Env + path: ds_schema_check_test/dev + - name: Running Schema Check run: | - VERSION=${{ matrix.version }} - echo "DATABASE_VERSION=${VERSION//\./}" >> $GITHUB_ENV - - name: Prepare - run: | - wget https://archive.apache.org/dist/dolphinscheduler/${{ matrix.version }}/apache-dolphinscheduler-${{ matrix.version }}-bin.tar.gz -P dolphinscheduler/${{ matrix.version }} - tar -xzf dolphinscheduler/${{ matrix.version }}/apache-dolphinscheduler-${{ matrix.version }}-bin.tar.gz -C dolphinscheduler/${{ matrix.version }} --strip-components 1 - tar -xzf dolphinscheduler/dev/apache-dolphinscheduler-*-bin.tar.gz -C dolphinscheduler/dev --strip-components 1 - - if [[ ${{ matrix.db }} == "mysql" ]]; then - MYSQL_JDBC_URL="https://repo.maven.apache.org/maven2/mysql/mysql-connector-java/8.0.16/mysql-connector-java-8.0.16.jar" - MYSQL_JDBC_JAR="mysql-connector-java-8.0.16.jar" - wget ${MYSQL_JDBC_URL} -O /tmp/${MYSQL_JDBC_JAR} - for base_dir in dolphinscheduler/dev dolphinscheduler/${{ matrix.version }}; do - if [[ $base_dir == *"dolphinscheduler/2"* ]]; then - cp /tmp/${MYSQL_JDBC_JAR} ${base_dir}/lib - else - for d in alert-server api-server master-server worker-server tools; do - cp /tmp/${MYSQL_JDBC_JAR} ${base_dir}/${d}/libs - done - fi - done - docker exec -i mysql mysql -uroot -pmysql -e "create database dolphinscheduler_${{ env.DATABASE_VERSION }}"; - else - docker exec -i postgres psql -U postgres -c "create database dolphinscheduler_${{ env.DATABASE_VERSION }};" - fi - - name: Check - run: | - if [[ $DATABASE_VERSION -lt 300 ]]; then - chmod +x dolphinscheduler/dev/tools/bin/upgrade-schema.sh dolphinscheduler/${{ matrix.version }}/script/create-dolphinscheduler.sh - else - chmod +x dolphinscheduler/dev/tools/bin/upgrade-schema.sh dolphinscheduler/${{ matrix.version }}/tools/bin/upgrade-schema.sh - fi - if [[ ${{ matrix.db }} == "mysql" ]]; then - export DATABASE="mysql" - export SPRING_DATASOURCE_DRIVER_CLASS_NAME="com.mysql.cj.jdbc.Driver" - export SPRING_DATASOURCE_URL="jdbc:mysql://127.0.0.1:3306/dolphinscheduler_dev?useUnicode=true&characterEncoding=UTF-8&allowMultiQueries=true&useSSL=false" - export SPRING_DATASOURCE_USERNAME="root" - export SPRING_DATASOURCE_PASSWORD="mysql" - bash dolphinscheduler/dev/tools/bin/upgrade-schema.sh - - export SPRING_DATASOURCE_URL="jdbc:mysql://127.0.0.1:3306/dolphinscheduler_${{ env.DATABASE_VERSION }}?useUnicode=true&characterEncoding=UTF-8&allowMultiQueries=true&useSSL=false" - if [[ $DATABASE_VERSION -lt 300 ]]; then - bash dolphinscheduler/${{ matrix.version }}/script/create-dolphinscheduler.sh - else - bash dolphinscheduler/${{ matrix.version }}/tools/bin/upgrade-schema.sh - fi - bash dolphinscheduler/dev/tools/bin/upgrade-schema.sh - - atlas_result=$(atlas schema diff \ - --from "mysql://root:mysql@127.0.0.1:3306/dolphinscheduler_${{ env.DATABASE_VERSION }}" \ - --to "mysql://root:mysql@127.0.0.1:3306/dolphinscheduler_dev") - if [[ ${atlas_result} != *"Schemas are synced"* ]]; then - echo "================================================================================================" - echo " !!!!! For Contributors !!!!!" - echo "================================================================================================" - echo "Database schema not sync, please add below change in the latest version of dolphinscheduler-dao/src/main/resources/sql/upgrade directory" - echo "${atlas_result}" - exit 1 - fi - else - export DATABASE="postgresql" - export SPRING_DATASOURCE_DRIVER_CLASS_NAME="org.postgresql.Driver" - export SPRING_DATASOURCE_URL="jdbc:postgresql://127.0.0.1:5432/dolphinscheduler_dev" - export SPRING_DATASOURCE_USERNAME="postgres" - export SPRING_DATASOURCE_PASSWORD="postgres" - bash dolphinscheduler/dev/tools/bin/upgrade-schema.sh - - export SPRING_DATASOURCE_URL="jdbc:postgresql://127.0.0.1:5432/dolphinscheduler_${{ env.DATABASE_VERSION }}" - if [[ $DATABASE_VERSION -lt 300 ]]; then - bash dolphinscheduler/${{ matrix.version }}/script/create-dolphinscheduler.sh - else - bash dolphinscheduler/${{ matrix.version }}/tools/bin/upgrade-schema.sh - fi - bash dolphinscheduler/dev/tools/bin/upgrade-schema.sh - - atlas_result=$(atlas schema diff \ - --from "postgres://postgres:postgres@127.0.0.1:5432/dolphinscheduler_${{ env.DATABASE_VERSION }}?search_path=public&sslmode=disable" \ - --to "postgres://postgres:postgres@127.0.0.1:5432/dolphinscheduler_dev?search_path=public&sslmode=disable") - if [[ ${atlas_result} != *"Schemas are synced"* ]]; then - echo "================================================================================================" - echo " !!!!! For Contributors !!!!!" - echo "================================================================================================" - echo "Database schema not sync, please add below change in the latest version in dolphinscheduler-dao/src/main/resources/sql/upgrade directory" - echo "${atlas_result}" - exit 1 - fi - fi + /bin/bash ${{ matrix.case.script }} ${{ matrix.version }} result: name: Build runs-on: ubuntu-latest @@ -275,7 +174,7 @@ jobs: echo "Skip Build!" exit 0 fi - if [[ ${{ needs.build.result }} != 'success' || ${{ needs.cluster-test.result }} != 'success' ]]; then + if [[ ${{ needs.build.result }} != 'success' || ${{ needs.cluster-test.result }} != 'success' || ${{ needs.schema-check.result }} != 'success' ]]; then echo "Build Failed!" exit -1 fi diff --git a/.github/workflows/e2e.yml b/.github/workflows/e2e.yml index 565187572a..a24cf820f8 100644 --- a/.github/workflows/e2e.yml +++ b/.github/workflows/e2e.yml @@ -92,45 +92,45 @@ jobs: strategy: matrix: case: - - name: Tenant + - name: TenantE2ETest class: org.apache.dolphinscheduler.e2e.cases.TenantE2ETest - - name: User + - name: UserE2ETest class: org.apache.dolphinscheduler.e2e.cases.UserE2ETest - - name: WorkerGroup + - name: WorkerGroupE2ETest class: org.apache.dolphinscheduler.e2e.cases.WorkerGroupE2ETest - - name: Project + - name: ProjectE2ETest class: org.apache.dolphinscheduler.e2e.cases.ProjectE2ETest - - name: Queue + - name: QueueE2ETest class: org.apache.dolphinscheduler.e2e.cases.QueueE2ETest - - name: Environment + - name: EnvironmentE2ETest class: org.apache.dolphinscheduler.e2e.cases.EnvironmentE2ETest - - name: Cluster + - name: ClusterE2ETest class: org.apache.dolphinscheduler.e2e.cases.ClusterE2ETest - - name: Token + - name: TokenE2ETest class: org.apache.dolphinscheduler.e2e.cases.TokenE2ETest - - name: Workflow + - name: WorkflowE2ETest class: org.apache.dolphinscheduler.e2e.cases.WorkflowE2ETest - - name: WorkflowHttp + - name: WorkflowHttpTaskE2ETest class: org.apache.dolphinscheduler.e2e.cases.WorkflowHttpTaskE2ETest - - name: WorkflowJava + - name: WorkflowJavaTaskE2ETest class: org.apache.dolphinscheduler.e2e.cases.WorkflowJavaTaskE2ETest # - name: WorkflowForSwitch # class: org.apache.dolphinscheduler.e2e.cases.WorkflowSwitchE2ETest - - name: FileManage + - name: FileManageE2ETest class: org.apache.dolphinscheduler.e2e.cases.FileManageE2ETest - - name: MysqlDataSource + - name: MysqlDataSourceE2ETest class: org.apache.dolphinscheduler.e2e.cases.MysqlDataSourceE2ETest - - name: ClickhouseDataSource + - name: ClickhouseDataSourceE2ETest class: org.apache.dolphinscheduler.e2e.cases.ClickhouseDataSourceE2ETest - - name: PostgresDataSource + - name: PostgresDataSourceE2ETest class: org.apache.dolphinscheduler.e2e.cases.PostgresDataSourceE2ETest - name: ShellTaskE2ETest class: org.apache.dolphinscheduler.e2e.cases.tasks.ShellTaskE2ETest - name: PythonTaskE2ETest class: org.apache.dolphinscheduler.e2e.cases.tasks.PythonTaskE2ETest - - name: SqlServerDataSource + - name: SqlServerDataSourceE2ETest class: org.apache.dolphinscheduler.e2e.cases.SqlServerDataSourceE2ETest - - name: HiveDataSource + - name: HiveDataSourceE2ETest class: org.apache.dolphinscheduler.e2e.cases.HiveDataSourceE2ETest env: RECORDING_PATH: /tmp/recording-${{ matrix.case.name }} diff --git a/.github/workflows/schema-check/mysql/docker-compose-base.yaml b/.github/workflows/schema-check/mysql/docker-compose-base.yaml new file mode 100644 index 0000000000..d515a91762 --- /dev/null +++ b/.github/workflows/schema-check/mysql/docker-compose-base.yaml @@ -0,0 +1,35 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +version: "3" + +services: + mysql: + container_name: mysql + image: mysql:8.0.33 + command: --default-authentication-plugin=mysql_native_password + restart: always + environment: + MYSQL_ROOT_PASSWORD: mysql + MYSQL_DATABASE: dolphinscheduler_dev + ports: + - "3306:3306" + healthcheck: + test: mysqladmin ping -h 127.0.0.1 -u root --password=$$MYSQL_ROOT_PASSWORD + interval: 5s + timeout: 60s + retries: 120 diff --git a/.github/workflows/schema-check/mysql/running-test.sh b/.github/workflows/schema-check/mysql/running-test.sh new file mode 100644 index 0000000000..72e2fb3b6a --- /dev/null +++ b/.github/workflows/schema-check/mysql/running-test.sh @@ -0,0 +1,54 @@ +#!/bin/bash +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +set -euox pipefail + +DS_VERSION=$1 +DATABASE_VERSION=$2 + +# Install dev schema +export DATABASE="mysql" +export SPRING_DATASOURCE_DRIVER_CLASS_NAME="com.mysql.cj.jdbc.Driver" +export SPRING_DATASOURCE_URL="jdbc:mysql://127.0.0.1:3306/dolphinscheduler_dev?useUnicode=true&characterEncoding=UTF-8&allowMultiQueries=true&useSSL=false" +export SPRING_DATASOURCE_USERNAME="root" +export SPRING_DATASOURCE_PASSWORD="mysql" +bash ds_schema_check_test/dev/tools/bin/upgrade-schema.sh + +# Install the target version schema and upgrade it +export SPRING_DATASOURCE_URL="jdbc:mysql://127.0.0.1:3306/dolphinscheduler_${DATABASE_VERSION}?useUnicode=true&characterEncoding=UTF-8&allowMultiQueries=true&useSSL=false" +bash ds_schema_check_test/${DS_VERSION}/tools/bin/upgrade-schema.sh +bash ds_schema_check_test/dev/tools/bin/upgrade-schema.sh + +# Compare the schema +set +x +atlas_result=$(atlas schema diff \ + --from "mysql://root:mysql@127.0.0.1:3306/dolphinscheduler_${DATABASE_VERSION}" \ + --to "mysql://root:mysql@127.0.0.1:3306/dolphinscheduler_dev") +if [[ ${atlas_result} != *"Schemas are synced"* ]]; then + echo "================================================================================================" + echo " !!!!! For Contributors !!!!!" + echo "================================================================================================" + echo "Database schema not sync, please add below change in the latest version of dolphinscheduler-dao/src/main/resources/sql/upgrade directory" + echo "${atlas_result}" + exit 1 +else + echo "================================================================================================" + echo " !!!!! For Contributors !!!!!" + echo "================================================================================================" + echo "Database schema sync successfully" + exit 0 +fi diff --git a/.github/workflows/schema-check/mysql/start-job.sh b/.github/workflows/schema-check/mysql/start-job.sh new file mode 100644 index 0000000000..4ca8ee4810 --- /dev/null +++ b/.github/workflows/schema-check/mysql/start-job.sh @@ -0,0 +1,57 @@ +#!/bin/bash +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +set -euox pipefail + +DS_VERSION=$1 +DATABASE_VERSION=${DS_VERSION//\./} + +# Install Atlas and Create Dir +mkdir -p ds_schema_check_test/dev ds_schema_check_test/${DS_VERSION} +curl -sSf https://atlasgo.sh | sh + +# Preparing the environment +wget https://archive.apache.org/dist/dolphinscheduler/${DS_VERSION}/apache-dolphinscheduler-${DS_VERSION}-bin.tar.gz -P ds_schema_check_test/${DS_VERSION} +tar -xzf ds_schema_check_test/${DS_VERSION}/apache-dolphinscheduler-${DS_VERSION}-bin.tar.gz -C ds_schema_check_test/${DS_VERSION} --strip-components 1 +tar -xzf ds_schema_check_test/dev/apache-dolphinscheduler-*-bin.tar.gz -C ds_schema_check_test/dev --strip-components 1 + +if [[ $DATABASE_VERSION -lt 300 ]]; then + chmod +x ds_schema_check_test/dev/tools/bin/upgrade-schema.sh ds_schema_check_test/${DS_VERSION}/script/create-dolphinscheduler.sh +else + chmod +x ds_schema_check_test/dev/tools/bin/upgrade-schema.sh ds_schema_check_test/${DS_VERSION}/tools/bin/upgrade-schema.sh +fi + +MYSQL_JDBC_URL="https://repo.maven.apache.org/maven2/mysql/mysql-connector-java/8.0.16/mysql-connector-java-8.0.16.jar" +MYSQL_JDBC_JAR="mysql-connector-java-8.0.16.jar" +wget ${MYSQL_JDBC_URL} -O ds_schema_check_test/${MYSQL_JDBC_JAR} +for base_dir in ds_schema_check_test/dev ds_schema_check_test/${DS_VERSION}; do + if [[ $base_dir == *"dolphinscheduler/2"* ]]; then + cp ds_schema_check_test/${MYSQL_JDBC_JAR} ${base_dir}/lib + else + for d in alert-server api-server master-server worker-server tools; do + cp ds_schema_check_test/${MYSQL_JDBC_JAR} ${base_dir}/${d}/libs + done + fi +done +docker compose -f .github/workflows/schema-check/mysql/docker-compose-base.yaml up -d --wait +docker exec -i mysql mysql -uroot -pmysql -e "create database dolphinscheduler_${DATABASE_VERSION}"; + +#Running schema check tests +/bin/bash .github/workflows/schema-check/mysql/running-test.sh ${DS_VERSION} ${DATABASE_VERSION} + +#Cleanup +docker compose -f .github/workflows/schema-check/mysql/docker-compose-base.yaml down -v --remove-orphans diff --git a/.github/workflows/schema-check/postgresql/docker-compose-base.yaml b/.github/workflows/schema-check/postgresql/docker-compose-base.yaml new file mode 100644 index 0000000000..9f09f0d326 --- /dev/null +++ b/.github/workflows/schema-check/postgresql/docker-compose-base.yaml @@ -0,0 +1,34 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +version: "3" + +services: + postgres: + container_name: postgres + image: postgres:14.1 + restart: always + environment: + POSTGRES_PASSWORD: postgres + POSTGRES_DB: dolphinscheduler_dev + ports: + - "5432:5432" + healthcheck: + test: [ "CMD-SHELL", "pg_isready -U postgres" ] + interval: 5s + timeout: 60s + retries: 120 diff --git a/.github/workflows/schema-check/postgresql/running-test.sh b/.github/workflows/schema-check/postgresql/running-test.sh new file mode 100644 index 0000000000..0118ca2477 --- /dev/null +++ b/.github/workflows/schema-check/postgresql/running-test.sh @@ -0,0 +1,54 @@ +#!/bin/bash +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +set -euox pipefail + +DS_VERSION=$1 +DATABASE_VERSION=$2 + +# Install dev schema +export DATABASE="postgresql" +export SPRING_DATASOURCE_DRIVER_CLASS_NAME="org.postgresql.Driver" +export SPRING_DATASOURCE_USERNAME="postgres" +export SPRING_DATASOURCE_PASSWORD="postgres" +export SPRING_DATASOURCE_URL="jdbc:postgresql://127.0.0.1:5432/dolphinscheduler_dev" +bash ds_schema_check_test/dev/tools/bin/upgrade-schema.sh + +# Install the target version schema and upgrade it +export SPRING_DATASOURCE_URL="jdbc:postgresql://127.0.0.1:5432/dolphinscheduler_${DATABASE_VERSION}" +bash ds_schema_check_test/${DS_VERSION}/tools/bin/upgrade-schema.sh +bash ds_schema_check_test/dev/tools/bin/upgrade-schema.sh + +# Compare the schema +set +x +atlas_result=$(atlas schema diff \ + --from "postgres://postgres:postgres@127.0.0.1:5432/dolphinscheduler_${DATABASE_VERSION}?search_path=public&sslmode=disable" \ + --to "postgres://postgres:postgres@127.0.0.1:5432/dolphinscheduler_dev?search_path=public&sslmode=disable") +if [[ ${atlas_result} != *"Schemas are synced"* ]]; then + echo "================================================================================================" + echo " !!!!! For Contributors !!!!!" + echo "================================================================================================" + echo "Database schema not sync, please add below change in the latest version of dolphinscheduler-dao/src/main/resources/sql/upgrade directory" + echo "${atlas_result}" + exit 1 +else + echo "================================================================================================" + echo " !!!!! For Contributors !!!!!" + echo "================================================================================================" + echo "Database schema sync successfully" + exit 0 +fi diff --git a/.github/workflows/schema-check/postgresql/start-job.sh b/.github/workflows/schema-check/postgresql/start-job.sh new file mode 100644 index 0000000000..2d71794fe6 --- /dev/null +++ b/.github/workflows/schema-check/postgresql/start-job.sh @@ -0,0 +1,45 @@ +#!/bin/bash +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +set -euox pipefail + +DS_VERSION=$1 +DATABASE_VERSION=${DS_VERSION//\./} + +# Install Atlas and Create Dir +mkdir -p ds_schema_check_test/dev ds_schema_check_test/${DS_VERSION} +curl -sSf https://atlasgo.sh | sh + +# Preparing the environment +wget https://archive.apache.org/dist/dolphinscheduler/${DS_VERSION}/apache-dolphinscheduler-${DS_VERSION}-bin.tar.gz -P ds_schema_check_test/${DS_VERSION} +tar -xzf ds_schema_check_test/${DS_VERSION}/apache-dolphinscheduler-${DS_VERSION}-bin.tar.gz -C ds_schema_check_test/${DS_VERSION} --strip-components 1 +tar -xzf ds_schema_check_test/dev/apache-dolphinscheduler-*-bin.tar.gz -C ds_schema_check_test/dev --strip-components 1 + +if [[ $DATABASE_VERSION -lt 300 ]]; then + chmod +x ds_schema_check_test/dev/tools/bin/upgrade-schema.sh ds_schema_check_test/${DS_VERSION}/script/create-dolphinscheduler.sh +else + chmod +x ds_schema_check_test/dev/tools/bin/upgrade-schema.sh ds_schema_check_test/${DS_VERSION}/tools/bin/upgrade-schema.sh +fi + +docker compose -f .github/workflows/schema-check/postgresql/docker-compose-base.yaml up -d --wait +docker exec -i postgres psql -U postgres -c "create database dolphinscheduler_${DATABASE_VERSION}"; + +#Running schema check tests +/bin/bash .github/workflows/schema-check/postgresql/running-test.sh ${DS_VERSION} ${DATABASE_VERSION} + +#Cleanup +docker compose -f .github/workflows/schema-check/postgresql/docker-compose-base.yaml down -v --remove-orphans diff --git a/.github/workflows/unit-test.yml b/.github/workflows/unit-test.yml index d5cd09d87f..c0bc86dbf0 100644 --- a/.github/workflows/unit-test.yml +++ b/.github/workflows/unit-test.yml @@ -100,8 +100,8 @@ jobs: -Dsonar.projectKey=apache-dolphinscheduler -Dsonar.login=e4058004bc6be89decf558ac819aa1ecbee57682 -Dsonar.exclusions=dolphinscheduler-ui/src/**/i18n/locale/*.js,dolphinscheduler-microbench/src/**/* - -Dhttp.keepAlive=false - -Dmaven.wagon.http.pool=false + -Dhttp.keepAlive=false + -Dmaven.wagon.http.pool=false -Dmaven.wagon.httpconnectionManager.ttlSeconds=120 -DskipUT=true env: diff --git a/.gitignore b/.gitignore index 174ab57242..fcf292d66e 100644 --- a/.gitignore +++ b/.gitignore @@ -54,3 +54,4 @@ dolphinscheduler-worker/logs dolphinscheduler-master/logs dolphinscheduler-api/logs __pycache__ +ds_schema_check_test diff --git a/docs/docs/en/about/glossary.md b/docs/docs/en/about/glossary.md index e3cee76f14..db4e875354 100644 --- a/docs/docs/en/about/glossary.md +++ b/docs/docs/en/about/glossary.md @@ -19,8 +19,8 @@ manual start or scheduled scheduling. Each time the process definition runs, a p **Task instance**: The task instance is the instantiation of the task node in the process definition, which identifies the specific task -**Task type**: Currently supports SHELL, SQL, SUB_PROCESS (sub-process), PROCEDURE, MR, SPARK, PYTHON, DEPENDENT ( -depends), and plans to support dynamic plug-in expansion, note: **SUB_PROCESS** need relation with another workflow definition which also a separate process +**Task type**: Currently supports SHELL, SQL, SUB_WORKFLOW, PROCEDURE, MR, SPARK, PYTHON, DEPENDENT ( +depends), and plans to support dynamic plug-in expansion, note: **SUB_WORKFLOW** need relation with another workflow definition which also a separate process definition that can be started and executed separately **Scheduling method**: The system supports scheduled scheduling and manual scheduling based on cron expressions. Command diff --git a/docs/docs/en/architecture/task-structure.md b/docs/docs/en/architecture/task-structure.md index 041d753403..c9430d5068 100644 --- a/docs/docs/en/architecture/task-structure.md +++ b/docs/docs/en/architecture/task-structure.md @@ -919,7 +919,7 @@ No.|parameter name||type|description |notes ```bash { - "type":"SUB_PROCESS", + "type":"SUB_WORKFLOW", "id":"tasks-14806", "name":"SubProcessTask", "params":{ diff --git a/docs/docs/en/contribute/architecture-design.md b/docs/docs/en/contribute/architecture-design.md index 837f49ea90..a7b325bd08 100644 --- a/docs/docs/en/contribute/architecture-design.md +++ b/docs/docs/en/contribute/architecture-design.md @@ -19,7 +19,7 @@ Before explaining the architecture of the schedule system, let us first understa **Task instance**: A task instance is the instantiation of a specific task node when a process instance runs, which indicates the specific task execution status -**Task type**: Currently supports SHELL, SQL, SUB_PROCESS (sub-process), PROCEDURE, MR, SPARK, PYTHON, DEPENDENT (dependency), and plans to support dynamic plug-in extension, note: the sub-**SUB_PROCESS** is also A separate process definition that can be launched separately +**Task type**: Currently supports SHELL, SQL, SUB_WORKFLOW, PROCEDURE, MR, SPARK, PYTHON, DEPENDENT (dependency), and plans to support dynamic plug-in extension, note: the sub-**SUB_WORKFLOW** is also A separate process definition that can be launched separately **Schedule mode** : The system supports timing schedule and manual schedule based on cron expressions. Command type support: start workflow, start execution from current node, resume fault-tolerant workflow, resume pause process, start execution from failed node, complement, timer, rerun, pause, stop, resume waiting thread. Where **recovers the fault-tolerant workflow** and **restores the waiting thread** The two command types are used by the scheduling internal control and cannot be called externally diff --git a/docs/docs/en/guide/parameter/context.md b/docs/docs/en/guide/parameter/context.md index 9d6131d92e..5291d91fa2 100644 --- a/docs/docs/en/guide/parameter/context.md +++ b/docs/docs/en/guide/parameter/context.md @@ -106,11 +106,11 @@ Save the subprocess_example1 workflow and set the global parameters var1. ![context-subprocess02](../../../../img/new_ui/dev/parameter/context-subprocess02.png) -Create a sub_process task in a new workflow, and use the subprocess_example1 workflow as the sub-node. +Create a sub_workflow task in a new workflow, and use the subprocess_example1 workflow as the sub-node. ![context-subprocess03](../../../../img/new_ui/dev/parameter/context-subprocess03.png) -Create a shell task as a downstream task of the sub_process task, and write the following script: +Create a shell task as a downstream task of the sub_workflow task, and write the following script: ![context-subprocess04](../../../../img/new_ui/dev/parameter/context-subprocess04.png) diff --git a/docs/docs/en/guide/project/project-list.md b/docs/docs/en/guide/project/project-list.md index 253930e34c..fb6b8ace8e 100644 --- a/docs/docs/en/guide/project/project-list.md +++ b/docs/docs/en/guide/project/project-list.md @@ -2,14 +2,14 @@ This page describes details regarding Project screen in Apache DolphinScheduler. Here, you will see all the functions which can be handled in this screen. The following table explains commonly used terms in Apache DolphinScheduler: -| Glossary | description | -|---------------------|---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| -| DAG | Tasks in a workflow are assembled in form of Directed Acyclic Graph (DAG). A topological traversal is performed from nodes with zero degrees of entry until there are no subsequent nodes. | -| Workflow Definition | Visualization formed by dragging task nodes and establishing task node associations (DAG). | -| Workflow Instance | Instantiation of the workflow definition, which can be generated by manual start or scheduled scheduling. Each time the process definition runs, a workflow instance is generated. | -| Workflow Relation | Shows dynamic status of all the workflows in a project. | -| Task | Task is a discrete action in a Workflow. Apache DolphinScheduler supports SHELL, SQL, SUB_PROCESS (sub-process), PROCEDURE, MR, SPARK, PYTHON, DEPENDENT ( depends), and plans to support dynamic plug-in expansion, (SUB_PROCESS). It is also a separate process definition that can be started and executed separately. | -| Task Instance | Instantiation of the task node in the process definition, which identifies the specific task execution status. | +| Glossary | description | +|---------------------|---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| +| DAG | Tasks in a workflow are assembled in form of Directed Acyclic Graph (DAG). A topological traversal is performed from nodes with zero degrees of entry until there are no subsequent nodes. | +| Workflow Definition | Visualization formed by dragging task nodes and establishing task node associations (DAG). | +| Workflow Instance | Instantiation of the workflow definition, which can be generated by manual start or scheduled scheduling. Each time the process definition runs, a workflow instance is generated. | +| Workflow Relation | Shows dynamic status of all the workflows in a project. | +| Task | Task is a discrete action in a Workflow. Apache DolphinScheduler supports SHELL, SQL, SUB_WORKFLOW, PROCEDURE, MR, SPARK, PYTHON, DEPENDENT ( depends), and plans to support dynamic plug-in expansion, (SUB_WORKFLOW). It is also a separate process definition that can be started and executed separately. | +| Task Instance | Instantiation of the task node in the process definition, which identifies the specific task execution status. | ## Project List diff --git a/docs/docs/en/guide/resource/task-group.md b/docs/docs/en/guide/resource/task-group.md index b66a4878b2..c21d02dbef 100644 --- a/docs/docs/en/guide/resource/task-group.md +++ b/docs/docs/en/guide/resource/task-group.md @@ -30,7 +30,7 @@ Click the button to view task group usage information: ### Use of Task Groups -**Note**: The use of task groups is applicable to tasks executed by workers, such as `switch` nodes, `condition` nodes, `sub_process` and other node types executed by the master are not controlled by the task group. +**Note**: The use of task groups is applicable to tasks executed by workers, such as `switch` nodes, `condition` nodes, `sub_workflow` and other node types executed by the master are not controlled by the task group. Let's take the shell node as an example: diff --git a/docs/docs/en/guide/task/sub-process.md b/docs/docs/en/guide/task/sub-process.md index 8284dbf2d9..5f182e770b 100644 --- a/docs/docs/en/guide/task/sub-process.md +++ b/docs/docs/en/guide/task/sub-process.md @@ -7,7 +7,7 @@ The sub-process node is to execute an external workflow definition as a task nod ## Create Task - Click `Project Management -> Project Name -> Workflow Definition`, and click the `Create Workflow` button to enter the DAG editing page. -- Drag from the toolbar task node to canvas to create a new SubProcess task. +- Drag from the toolbar task node to canvas to create a new SubProcess task. ## Task Parameter @@ -30,16 +30,16 @@ Create a shell task to print "hello" and define the workflow as `test_dag01`. ![subprocess_task01](../../../../img/tasks/demo/subprocess_task01.png) -## Create the Sub_process task +## Create the Sub_workflow task -To use the sub_process, you need to create the sub-node task, which is the workflow `test_dag01` we created in the first step. After that, as shown in the diagram below, select the corresponding sub-node in position ⑤. +To use the sub_workflow, you need to create the sub-node task, which is the workflow `test_dag01` we created in the first step. After that, as shown in the diagram below, select the corresponding sub-node in position ⑤. ![subprocess_task02](../../../../img/tasks/demo/subprocess_task02.png) -After creating the sub_process, create a corresponding shell task for printing "world" and link both together. Save the current workflow and run it to get the expected result. +After creating the sub_workflow, create a corresponding shell task for printing "world" and link both together. Save the current workflow and run it to get the expected result. ![subprocess_task03](../../../../img/tasks/demo/subprocess_task03.png) ## Note -When using `sub_process` to recall a sub-node task, you need to ensure that the defined sub-node is online status, otherwise, the sub_process workflow will not work properly. +When using `sub_workflow` to recall a sub-node task, you don't need to ensure that the defined sub-node is online status. diff --git a/docs/docs/en/guide/upgrade/incompatible.md b/docs/docs/en/guide/upgrade/incompatible.md index d20dba51ba..62852e2463 100644 --- a/docs/docs/en/guide/upgrade/incompatible.md +++ b/docs/docs/en/guide/upgrade/incompatible.md @@ -30,4 +30,6 @@ This document records the incompatible updates between each version. You need to * Remove the `udf-manage` function from the `resource center` ([#16209]) * Remove the `Pigeon` from the `Task Plugin` ([#16218]) +* Uniformly name `process` in code as `workflow` ([#16515]) +* Deprecated upgrade code of 1.x and 2.x in 3.3.0-release ([#16543]) diff --git a/docs/docs/en/guide/upgrade/upgrade.md b/docs/docs/en/guide/upgrade/upgrade.md index 9eb2987170..074b1ca442 100644 --- a/docs/docs/en/guide/upgrade/upgrade.md +++ b/docs/docs/en/guide/upgrade/upgrade.md @@ -60,7 +60,7 @@ Execute script: `sh ./tools/bin/migrate-lineage.sh`. Execution result: -- Migrate lineage data to new table `t_ds_process_task_lineage`. +- Migrate lineage data to new table `t_ds_workflow_task_lineage`. - This script only performs upsert operations, not deletes. You can delete it manually if you need to. ### Upgrade Service diff --git a/docs/docs/zh/about/glossary.md b/docs/docs/zh/about/glossary.md index 2b5876669e..009d412276 100644 --- a/docs/docs/zh/about/glossary.md +++ b/docs/docs/zh/about/glossary.md @@ -14,7 +14,7 @@ **任务实例**:任务实例是流程定义中任务节点的实例化,标识着某个具体的任务 -**任务类型**:目前支持有 SHELL、SQL、SUB_PROCESS(子流程)、PROCEDURE、MR、SPARK、PYTHON、DEPENDENT(依赖),同时计划支持动态插件扩展,注意:其中 **SUB_PROCESS**类型的任务需要关联另外一个流程定义,被关联的流程定义是可以单独启动执行的 +**任务类型**:目前支持有 SHELL、SQL、SUB_WORKFLOW(子工作流)、PROCEDURE、MR、SPARK、PYTHON、DEPENDENT(依赖),同时计划支持动态插件扩展,注意:其中 **SUB_WORKFLOW**类型的任务需要关联另外一个流程定义,被关联的流程定义是可以单独启动执行的 **调度方式**:系统支持基于 cron 表达式的定时调度和手动调度。命令类型支持:启动工作流、从当前节点开始执行、恢复被容错的工作流、恢复暂停流程、从失败节点开始执行、补数、定时、重跑、暂停、停止、恢复等待线程。 其中 **恢复被容错的工作流** 和 **恢复等待线程** 两种命令类型是由调度内部控制使用,外部无法调用 diff --git a/docs/docs/zh/architecture/task-structure.md b/docs/docs/zh/architecture/task-structure.md index 656e57eed7..b9177add37 100644 --- a/docs/docs/zh/architecture/task-structure.md +++ b/docs/docs/zh/architecture/task-structure.md @@ -918,7 +918,7 @@ ```bash { - "type":"SUB_PROCESS", + "type":"SUB_WORKFLOW", "id":"tasks-14806", "name":"SubProcessTask", "params":{ diff --git a/docs/docs/zh/contribute/architecture-design.md b/docs/docs/zh/contribute/architecture-design.md index 091dff8b07..9d82714327 100644 --- a/docs/docs/zh/contribute/architecture-design.md +++ b/docs/docs/zh/contribute/architecture-design.md @@ -19,7 +19,7 @@ **任务实例**:任务实例是流程定义中任务节点的实例化,标识着具体的任务执行状态 -**任务类型**: 目前支持有 SHELL、SQL、SUB_PROCESS(子流程)、PROCEDURE、MR、SPARK、PYTHON、DEPENDENT(依赖),同时计划支持动态插件扩展,注意:其中子 **SUB_PROCESS** 也是一个单独的流程定义,是可以单独启动执行的 +**任务类型**: 目前支持有 SHELL、SQL、SUB_WORKFLOW(子工作流)、PROCEDURE、MR、SPARK、PYTHON、DEPENDENT(依赖),同时计划支持动态插件扩展,注意:其中子 **SUB_WORKFLOW** 也是一个单独的流程定义,是可以单独启动执行的 **调度方式:** 系统支持基于 cron 表达式的定时调度和手动调度。命令类型支持:启动工作流、从当前节点开始执行、恢复被容错的工作流、恢复暂停流程、从失败节点开始执行、补数、定时、重跑、暂停、停止、恢复等待线程。其中 **恢复被容错的工作流** 和 **恢复等待线程** 两种命令类型是由调度内部控制使用,外部无法调用 diff --git a/docs/docs/zh/guide/parameter/context.md b/docs/docs/zh/guide/parameter/context.md index 7f5870458d..51b1cfcff4 100644 --- a/docs/docs/zh/guide/parameter/context.md +++ b/docs/docs/zh/guide/parameter/context.md @@ -105,11 +105,11 @@ Node_mysql 运行结果如下: ![context-subprocess02](../../../../img/new_ui/dev/parameter/context-subprocess02.png) -在新的工作流中创建 sub_process 任务,使用 subprocess_example1 工作流作为子节点。 +在新的工作流中创建 sub_workflow 任务,使用 subprocess_example1 工作流作为子节点。 ![context-subprocess03](../../../../img/new_ui/dev/parameter/context-subprocess03.png) -创建一个 shell 任务作为 sub_process 任务的下游任务,并编写如下脚本: +创建一个 shell 任务作为 sub_workflow 任务的下游任务,并编写如下脚本: ![context-subprocess04](../../../../img/new_ui/dev/parameter/context-subprocess04.png) diff --git a/docs/docs/zh/guide/resource/task-group.md b/docs/docs/zh/guide/resource/task-group.md index 58b46e26d5..84833fb282 100644 --- a/docs/docs/zh/guide/resource/task-group.md +++ b/docs/docs/zh/guide/resource/task-group.md @@ -32,7 +32,7 @@ #### 任务组的使用 -注:任务组的使用适用于由 worker 执行的任务,例如【switch】节点、【condition】节点、【sub_process】等由 master 负责执行的节点类型不受任务组控制。 +注:任务组的使用适用于由 worker 执行的任务,例如【switch】节点、【condition】节点、【sub_workflow】等由 master 负责执行的节点类型不受任务组控制。 我们以 shell 节点为例: diff --git a/docs/docs/zh/guide/task/sub-process.md b/docs/docs/zh/guide/task/sub-process.md index 8095e4b4a3..2fb091c7e0 100644 --- a/docs/docs/zh/guide/task/sub-process.md +++ b/docs/docs/zh/guide/task/sub-process.md @@ -8,7 +8,7 @@ - 点击项目管理 -> 项目名称 -> 工作流定义,点击”创建工作流”按钮,进入 DAG 编辑页面: -- 拖动工具栏的 任务节点到画板中。 +- 拖动工具栏的 任务节点到画板中。 ## 任务参数 @@ -31,16 +31,16 @@ ![subprocess_task01](../../../../img/tasks/demo/subprocess_task01.png) -### 创建 sub_process 任务 +### 创建 sub_workflow 任务 -在使用 sub_process 的过程中,需要创建所需的子结点任务,也就是我们第一步所创建的 test_dag01 工作流。然后如下图所示,在 ⑤ 的位置选择对应的子结点即可。 +在使用 sub_workflow 的过程中,需要创建所需的子结点任务,也就是我们第一步所创建的 test_dag01 工作流。然后如下图所示,在 ⑤ 的位置选择对应的子结点即可。 ![subprocess_task02](../../../../img/tasks/demo/subprocess_task02.png) -创建 sub_process 完成之后,再创建一个对应的 shell 任务,用于打印 “world”,并将二者连接起来。保存当前工作流,并上线运行,即可得到想要的结果。 +创建 sub_workflow 完成之后,再创建一个对应的 shell 任务,用于打印 “world”,并将二者连接起来。保存当前工作流,并上线运行,即可得到想要的结果。 ![subprocess_task03](../../../../img/tasks/demo/subprocess_task03.png) ## 注意事项 -在使用 sub_process 调用子结点任务的时候,需要保证定义的子结点为上线状态,否则 sub_process 的工作流无法正常运行。 +在使用 sub_workflow 调用子结点任务的时候,不需要保证定义的子结点为上线状态。 diff --git a/docs/docs/zh/guide/upgrade/incompatible.md b/docs/docs/zh/guide/upgrade/incompatible.md index a5260a0695..9dd689f0ae 100644 --- a/docs/docs/zh/guide/upgrade/incompatible.md +++ b/docs/docs/zh/guide/upgrade/incompatible.md @@ -28,4 +28,6 @@ * 从 `资源中心` 中移除了 `udf-manage` 功能 ([#16209]) * 从 `任务插件` 中移除了 `Pigeon` 类型 ([#16218]) +* 统一代码中的 `process` 为 `workflow` ([#16515]) +* 在 3.3.0-release 中废弃了从 1.x 至 2.x 的升级代码 ([#16543]) diff --git a/docs/docs/zh/guide/upgrade/upgrade.md b/docs/docs/zh/guide/upgrade/upgrade.md index 4866458c04..c2cec6783a 100644 --- a/docs/docs/zh/guide/upgrade/upgrade.md +++ b/docs/docs/zh/guide/upgrade/upgrade.md @@ -59,7 +59,7 @@ jar 包 并添加到 `./tools/libs` 目录下,设置以下环境变量 执行结果: -- 原血缘数据迁移至新血缘表 `t_ds_process_task_lineage`。 +- 原血缘数据迁移至新血缘表 `t_ds_workflow_task_lineage`。 - 此脚本仅执行 upsert 操作,不执行删除操作,如果需要删除,您可以手动删除。 ### 服务升级 diff --git a/docs/img/tasks/icons/sub_process.png b/docs/img/tasks/icons/sub_workflow.png similarity index 100% rename from docs/img/tasks/icons/sub_process.png rename to docs/img/tasks/icons/sub_workflow.png diff --git a/dolphinscheduler-api-test/dolphinscheduler-api-test-case/src/test/java/org/apache/dolphinscheduler/api/test/cases/ExecutorAPITest.java b/dolphinscheduler-api-test/dolphinscheduler-api-test-case/src/test/java/org/apache/dolphinscheduler/api/test/cases/ExecutorAPITest.java index 55639bd20f..303aa8ac5a 100644 --- a/dolphinscheduler-api-test/dolphinscheduler-api-test-case/src/test/java/org/apache/dolphinscheduler/api/test/cases/ExecutorAPITest.java +++ b/dolphinscheduler-api-test/dolphinscheduler-api-test-case/src/test/java/org/apache/dolphinscheduler/api/test/cases/ExecutorAPITest.java @@ -25,7 +25,7 @@ import org.apache.dolphinscheduler.api.test.entity.LoginResponseData; import org.apache.dolphinscheduler.api.test.pages.LoginPage; import org.apache.dolphinscheduler.api.test.pages.project.ProjectPage; import org.apache.dolphinscheduler.api.test.pages.workflow.ExecutorPage; -import org.apache.dolphinscheduler.api.test.pages.workflow.ProcessDefinitionPage; +import org.apache.dolphinscheduler.api.test.pages.workflow.WorkflowDefinitionPage; import org.apache.dolphinscheduler.api.test.utils.JSONUtils; import org.apache.dolphinscheduler.common.enums.FailureStrategy; import org.apache.dolphinscheduler.common.enums.ReleaseState; @@ -50,7 +50,7 @@ import org.junit.jupiter.api.BeforeAll; import org.junit.jupiter.api.Order; import org.junit.jupiter.api.Test; -//TODO: Some test cases rely on ProcessInstance APIs. Should complete remaining cases after ProcessInstance related API tests done. +//TODO: Some test cases rely on WorkflowInstance APIs. Should complete remaining cases after WorkflowInstance related API tests done. @DolphinScheduler(composeFiles = "docker/basic/docker-compose.yaml") @Slf4j public class ExecutorAPITest { @@ -65,13 +65,13 @@ public class ExecutorAPITest { private static ExecutorPage executorPage; - private static ProcessDefinitionPage processDefinitionPage; + private static WorkflowDefinitionPage workflowDefinitionPage; private static ProjectPage projectPage; private static long projectCode; - private static long processDefinitionCode; + private static long workflowDefinitionCode; private static List workflowInstanceIds; @@ -82,7 +82,7 @@ public class ExecutorAPITest { sessionId = JSONUtils.convertValue(loginHttpResponse.getBody().getData(), LoginResponseData.class).getSessionId(); executorPage = new ExecutorPage(sessionId); - processDefinitionPage = new ProcessDefinitionPage(sessionId); + workflowDefinitionPage = new WorkflowDefinitionPage(sessionId); projectPage = new ProjectPage(sessionId); loginUser = new User(); loginUser.setUserName("admin"); @@ -97,7 +97,7 @@ public class ExecutorAPITest { @Test @Order(1) - public void testStartProcessInstance() { + public void testStartWorkflowInstance() { try { // create test project HttpResponse createProjectResponse = projectPage.createProject(loginUser, "project-test"); @@ -109,36 +109,36 @@ public class ExecutorAPITest { // upload test workflow definition json ClassLoader classLoader = getClass().getClassLoader(); File file = new File(classLoader.getResource("workflow-json/test.json").getFile()); - CloseableHttpResponse importProcessDefinitionResponse = processDefinitionPage - .importProcessDefinition(loginUser, projectCode, file); - String data = EntityUtils.toString(importProcessDefinitionResponse.getEntity()); + CloseableHttpResponse importWorkflowDefinitionResponse = workflowDefinitionPage + .importWorkflowDefinition(loginUser, projectCode, file); + String data = EntityUtils.toString(importWorkflowDefinitionResponse.getEntity()); Assertions.assertTrue(data.contains("\"success\":true")); // get workflow definition code - HttpResponse queryAllProcessDefinitionByProjectCodeResponse = - processDefinitionPage.queryAllProcessDefinitionByProjectCode(loginUser, projectCode); - Assertions.assertTrue(queryAllProcessDefinitionByProjectCodeResponse.getBody().getSuccess()); - Assertions.assertTrue(queryAllProcessDefinitionByProjectCodeResponse.getBody().getData().toString() + HttpResponse queryAllWorkflowDefinitionByProjectCodeResponse = + workflowDefinitionPage.queryAllWorkflowDefinitionByProjectCode(loginUser, projectCode); + Assertions.assertTrue(queryAllWorkflowDefinitionByProjectCodeResponse.getBody().getSuccess()); + Assertions.assertTrue(queryAllWorkflowDefinitionByProjectCodeResponse.getBody().getData().toString() .contains("hello world")); - processDefinitionCode = - (long) ((LinkedHashMap) ((LinkedHashMap) ((List) queryAllProcessDefinitionByProjectCodeResponse - .getBody().getData()).get(0)).get("processDefinition")).get("code"); + workflowDefinitionCode = + (long) ((LinkedHashMap) ((LinkedHashMap) ((List) queryAllWorkflowDefinitionByProjectCodeResponse + .getBody().getData()).get(0)).get("workflowDefinition")).get("code"); // release test workflow - HttpResponse releaseProcessDefinitionResponse = processDefinitionPage.releaseProcessDefinition(loginUser, - projectCode, processDefinitionCode, ReleaseState.ONLINE); - Assertions.assertTrue(releaseProcessDefinitionResponse.getBody().getSuccess()); + HttpResponse releaseWorkflowDefinitionResponse = workflowDefinitionPage.releaseWorkflowDefinition(loginUser, + projectCode, workflowDefinitionCode, ReleaseState.ONLINE); + Assertions.assertTrue(releaseWorkflowDefinitionResponse.getBody().getSuccess()); // trigger workflow instance SimpleDateFormat formatter = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss"); Date date = new Date(); String scheduleTime = String.format("%s,%s", formatter.format(date), formatter.format(date)); log.info("use current time {} as scheduleTime", scheduleTime); - HttpResponse startProcessInstanceResponse = executorPage.startProcessInstance(loginUser, projectCode, - processDefinitionCode, scheduleTime, FailureStrategy.END, WarningType.NONE); - Assertions.assertTrue(startProcessInstanceResponse.getBody().getSuccess()); + HttpResponse startWorkflowInstanceResponse = executorPage.startWorkflowInstance(loginUser, projectCode, + workflowDefinitionCode, scheduleTime, FailureStrategy.END, WarningType.NONE); + Assertions.assertTrue(startWorkflowInstanceResponse.getBody().getSuccess()); - workflowInstanceIds = (List) startProcessInstanceResponse.getBody().getData(); + workflowInstanceIds = (List) startWorkflowInstanceResponse.getBody().getData(); } catch (Exception e) { log.error("failed", e); Assertions.fail(); diff --git a/dolphinscheduler-api-test/dolphinscheduler-api-test-case/src/test/java/org/apache/dolphinscheduler/api/test/cases/ProjectAPITest.java b/dolphinscheduler-api-test/dolphinscheduler-api-test-case/src/test/java/org/apache/dolphinscheduler/api/test/cases/ProjectAPITest.java index cf5621f06c..25e4d09042 100644 --- a/dolphinscheduler-api-test/dolphinscheduler-api-test-case/src/test/java/org/apache/dolphinscheduler/api/test/cases/ProjectAPITest.java +++ b/dolphinscheduler-api-test/dolphinscheduler-api-test-case/src/test/java/org/apache/dolphinscheduler/api/test/cases/ProjectAPITest.java @@ -39,9 +39,11 @@ import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.BeforeAll; import org.junit.jupiter.api.Order; import org.junit.jupiter.api.Test; +import org.junitpioneer.jupiter.DisableIfTestFails; @DolphinScheduler(composeFiles = "docker/basic/docker-compose.yaml") @Slf4j +@DisableIfTestFails // TODO: Add more detailed permission control related cases after userPage test cases completed public class ProjectAPITest { diff --git a/dolphinscheduler-api-test/dolphinscheduler-api-test-case/src/test/java/org/apache/dolphinscheduler/api/test/cases/SchedulerAPITest.java b/dolphinscheduler-api-test/dolphinscheduler-api-test-case/src/test/java/org/apache/dolphinscheduler/api/test/cases/SchedulerAPITest.java index 9d02acfd28..715b73e849 100644 --- a/dolphinscheduler-api-test/dolphinscheduler-api-test-case/src/test/java/org/apache/dolphinscheduler/api/test/cases/SchedulerAPITest.java +++ b/dolphinscheduler-api-test/dolphinscheduler-api-test-case/src/test/java/org/apache/dolphinscheduler/api/test/cases/SchedulerAPITest.java @@ -24,8 +24,8 @@ import org.apache.dolphinscheduler.api.test.entity.HttpResponse; import org.apache.dolphinscheduler.api.test.entity.LoginResponseData; import org.apache.dolphinscheduler.api.test.pages.LoginPage; import org.apache.dolphinscheduler.api.test.pages.project.ProjectPage; -import org.apache.dolphinscheduler.api.test.pages.workflow.ProcessDefinitionPage; import org.apache.dolphinscheduler.api.test.pages.workflow.SchedulerPage; +import org.apache.dolphinscheduler.api.test.pages.workflow.WorkflowDefinitionPage; import org.apache.dolphinscheduler.api.test.utils.JSONUtils; import org.apache.dolphinscheduler.common.enums.ReleaseState; import org.apache.dolphinscheduler.common.enums.UserType; @@ -42,9 +42,11 @@ import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.BeforeAll; import org.junit.jupiter.api.Order; import org.junit.jupiter.api.Test; +import org.junitpioneer.jupiter.DisableIfTestFails; @DolphinScheduler(composeFiles = "docker/basic/docker-compose.yaml") @Slf4j +@DisableIfTestFails public class SchedulerAPITest { private static final String username = "admin"; @@ -57,13 +59,13 @@ public class SchedulerAPITest { private static SchedulerPage schedulerPage; - private static ProcessDefinitionPage processDefinitionPage; + private static WorkflowDefinitionPage workflowDefinitionPage; private static ProjectPage projectPage; private static long projectCode; - private static long processDefinitionCode; + private static long workflowDefinitionCode; private static int scheduleId; @@ -75,7 +77,7 @@ public class SchedulerAPITest { JSONUtils.convertValue(loginHttpResponse.getBody().getData(), LoginResponseData.class).getSessionId(); projectPage = new ProjectPage(sessionId); schedulerPage = new SchedulerPage(sessionId); - processDefinitionPage = new ProcessDefinitionPage(sessionId); + workflowDefinitionPage = new WorkflowDefinitionPage(sessionId); loginUser = new User(); loginUser.setUserName("admin"); loginUser.setId(1); @@ -98,20 +100,20 @@ public class SchedulerAPITest { .getBody().getData()).get(0)).get("code"); ClassLoader classLoader = getClass().getClassLoader(); File file = new File(classLoader.getResource("workflow-json/test.json").getFile()); - processDefinitionPage.importProcessDefinition(loginUser, projectCode, file); - HttpResponse queryAllProcessDefinitionByProjectCodeResponse = - processDefinitionPage.queryAllProcessDefinitionByProjectCode(loginUser, projectCode); - Assertions.assertTrue(queryAllProcessDefinitionByProjectCodeResponse.getBody().getSuccess()); - processDefinitionCode = - (long) ((LinkedHashMap) ((LinkedHashMap) ((List) queryAllProcessDefinitionByProjectCodeResponse - .getBody().getData()).get(0)).get("processDefinition")).get("code"); + workflowDefinitionPage.importWorkflowDefinition(loginUser, projectCode, file); + HttpResponse queryAllWorkflowDefinitionByProjectCodeResponse = + workflowDefinitionPage.queryAllWorkflowDefinitionByProjectCode(loginUser, projectCode); + Assertions.assertTrue(queryAllWorkflowDefinitionByProjectCodeResponse.getBody().getSuccess()); + workflowDefinitionCode = + (long) ((LinkedHashMap) ((LinkedHashMap) ((List) queryAllWorkflowDefinitionByProjectCodeResponse + .getBody().getData()).get(0)).get("workflowDefinition")).get("code"); - processDefinitionPage.releaseProcessDefinition(loginUser, projectCode, processDefinitionCode, + workflowDefinitionPage.releaseWorkflowDefinition(loginUser, projectCode, workflowDefinitionCode, ReleaseState.ONLINE); final String schedule = "{\"startTime\":\"2019-08-08 00:00:00\",\"endTime\":\"2100-08-08 00:00:00\",\"timezoneId\":\"America/Phoenix\",\"crontab\":\"0 0 3/6 * * ? *\"}"; HttpResponse createScheduleResponse = - schedulerPage.createSchedule(loginUser, projectCode, processDefinitionCode, schedule); + schedulerPage.createSchedule(loginUser, projectCode, workflowDefinitionCode, schedule); Assertions.assertTrue(createScheduleResponse.getBody().getSuccess()); Assertions.assertTrue(createScheduleResponse.getBody().getData().toString().contains("2019-08-08")); } diff --git a/dolphinscheduler-api-test/dolphinscheduler-api-test-case/src/test/java/org/apache/dolphinscheduler/api/test/cases/TenantAPITest.java b/dolphinscheduler-api-test/dolphinscheduler-api-test-case/src/test/java/org/apache/dolphinscheduler/api/test/cases/TenantAPITest.java index 2f8e6aa056..9f8407a469 100644 --- a/dolphinscheduler-api-test/dolphinscheduler-api-test-case/src/test/java/org/apache/dolphinscheduler/api/test/cases/TenantAPITest.java +++ b/dolphinscheduler-api-test/dolphinscheduler-api-test-case/src/test/java/org/apache/dolphinscheduler/api/test/cases/TenantAPITest.java @@ -35,9 +35,11 @@ import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.BeforeAll; import org.junit.jupiter.api.Order; import org.junit.jupiter.api.Test; +import org.junitpioneer.jupiter.DisableIfTestFails; @DolphinScheduler(composeFiles = "docker/basic/docker-compose.yaml") @Slf4j +@DisableIfTestFails public class TenantAPITest { private static final String tenant = System.getProperty("user.name"); diff --git a/dolphinscheduler-api-test/dolphinscheduler-api-test-case/src/test/java/org/apache/dolphinscheduler/api/test/cases/WorkerGroupAPITest.java b/dolphinscheduler-api-test/dolphinscheduler-api-test-case/src/test/java/org/apache/dolphinscheduler/api/test/cases/WorkerGroupAPITest.java index d34f6bad2d..05e6e1e8dc 100644 --- a/dolphinscheduler-api-test/dolphinscheduler-api-test-case/src/test/java/org/apache/dolphinscheduler/api/test/cases/WorkerGroupAPITest.java +++ b/dolphinscheduler-api-test/dolphinscheduler-api-test-case/src/test/java/org/apache/dolphinscheduler/api/test/cases/WorkerGroupAPITest.java @@ -40,9 +40,11 @@ import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.BeforeAll; import org.junit.jupiter.api.Order; import org.junit.jupiter.api.Test; +import org.junitpioneer.jupiter.DisableIfTestFails; @DolphinScheduler(composeFiles = "docker/basic/docker-compose.yaml") @Slf4j +@DisableIfTestFails public class WorkerGroupAPITest { private static final String username = "admin"; diff --git a/dolphinscheduler-api-test/dolphinscheduler-api-test-case/src/test/java/org/apache/dolphinscheduler/api/test/cases/WorkflowDefinitionAPITest.java b/dolphinscheduler-api-test/dolphinscheduler-api-test-case/src/test/java/org/apache/dolphinscheduler/api/test/cases/WorkflowDefinitionAPITest.java index ab0463d3ed..2408753e41 100644 --- a/dolphinscheduler-api-test/dolphinscheduler-api-test-case/src/test/java/org/apache/dolphinscheduler/api/test/cases/WorkflowDefinitionAPITest.java +++ b/dolphinscheduler-api-test/dolphinscheduler-api-test-case/src/test/java/org/apache/dolphinscheduler/api/test/cases/WorkflowDefinitionAPITest.java @@ -24,7 +24,7 @@ import org.apache.dolphinscheduler.api.test.entity.HttpResponse; import org.apache.dolphinscheduler.api.test.entity.LoginResponseData; import org.apache.dolphinscheduler.api.test.pages.LoginPage; import org.apache.dolphinscheduler.api.test.pages.project.ProjectPage; -import org.apache.dolphinscheduler.api.test.pages.workflow.ProcessDefinitionPage; +import org.apache.dolphinscheduler.api.test.pages.workflow.WorkflowDefinitionPage; import org.apache.dolphinscheduler.api.test.utils.JSONUtils; import org.apache.dolphinscheduler.common.enums.ReleaseState; import org.apache.dolphinscheduler.common.enums.UserType; @@ -44,9 +44,11 @@ import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.BeforeAll; import org.junit.jupiter.api.Order; import org.junit.jupiter.api.Test; +import org.junitpioneer.jupiter.DisableIfTestFails; @DolphinScheduler(composeFiles = "docker/basic/docker-compose.yaml") @Slf4j +@DisableIfTestFails public class WorkflowDefinitionAPITest { private static final String username = "admin"; @@ -57,15 +59,15 @@ public class WorkflowDefinitionAPITest { private static User loginUser; - private static ProcessDefinitionPage processDefinitionPage; + private static WorkflowDefinitionPage workflowDefinitionPage; private static ProjectPage projectPage; private static long projectCode; - private static long processDefinitionCode; + private static long workflowDefinitionCode; - private static String processDefinitionName; + private static String workflowDefinitionName; @BeforeAll public static void setup() { @@ -73,7 +75,7 @@ public class WorkflowDefinitionAPITest { HttpResponse loginHttpResponse = loginPage.login(username, password); sessionId = JSONUtils.convertValue(loginHttpResponse.getBody().getData(), LoginResponseData.class).getSessionId(); - processDefinitionPage = new ProcessDefinitionPage(sessionId); + workflowDefinitionPage = new WorkflowDefinitionPage(sessionId); projectPage = new ProjectPage(sessionId); loginUser = new User(); loginUser.setId(123); @@ -87,7 +89,7 @@ public class WorkflowDefinitionAPITest { @Test @Order(1) - public void testImportProcessDefinition() { + public void testImportWorkflowDefinition() { try { HttpResponse createProjectResponse = projectPage.createProject(loginUser, "project-test"); HttpResponse queryAllProjectListResponse = projectPage.queryAllProjectList(loginUser); @@ -97,9 +99,9 @@ public class WorkflowDefinitionAPITest { .getBody().getData()).get(0)).get("code"); ClassLoader classLoader = getClass().getClassLoader(); File file = new File(classLoader.getResource("workflow-json/test.json").getFile()); - CloseableHttpResponse importProcessDefinitionResponse = processDefinitionPage - .importProcessDefinition(loginUser, projectCode, file); - String data = EntityUtils.toString(importProcessDefinitionResponse.getEntity()); + CloseableHttpResponse importWorkflowDefinitionResponse = workflowDefinitionPage + .importWorkflowDefinition(loginUser, projectCode, file); + String data = EntityUtils.toString(importWorkflowDefinitionResponse.getEntity()); Assertions.assertTrue(data.contains("\"success\":true")); } catch (Exception e) { log.error("failed", e); @@ -109,93 +111,95 @@ public class WorkflowDefinitionAPITest { @Test @Order(2) - public void testQueryAllProcessDefinitionByProjectCode() { - HttpResponse queryAllProcessDefinitionByProjectCodeResponse = - processDefinitionPage.queryAllProcessDefinitionByProjectCode(loginUser, projectCode); - Assertions.assertTrue(queryAllProcessDefinitionByProjectCodeResponse.getBody().getSuccess()); + public void testQueryAllWorkflowDefinitionByProjectCode() { + HttpResponse queryAllWorkflowDefinitionByProjectCodeResponse = + workflowDefinitionPage.queryAllWorkflowDefinitionByProjectCode(loginUser, projectCode); + Assertions.assertTrue(queryAllWorkflowDefinitionByProjectCodeResponse.getBody().getSuccess()); Assertions.assertTrue( - queryAllProcessDefinitionByProjectCodeResponse.getBody().getData().toString().contains("hello world")); - processDefinitionCode = - (long) ((LinkedHashMap) ((LinkedHashMap) ((List) queryAllProcessDefinitionByProjectCodeResponse - .getBody().getData()).get(0)).get("processDefinition")).get("code"); - processDefinitionName = - (String) ((LinkedHashMap) ((LinkedHashMap) ((List) queryAllProcessDefinitionByProjectCodeResponse - .getBody().getData()).get(0)).get("processDefinition")).get("name"); + queryAllWorkflowDefinitionByProjectCodeResponse.getBody().getData().toString().contains("hello world")); + workflowDefinitionCode = + (long) ((LinkedHashMap) ((LinkedHashMap) ((List) queryAllWorkflowDefinitionByProjectCodeResponse + .getBody().getData()).get(0)).get("workflowDefinition")).get("code"); + workflowDefinitionName = + (String) ((LinkedHashMap) ((LinkedHashMap) ((List) queryAllWorkflowDefinitionByProjectCodeResponse + .getBody().getData()).get(0)).get("workflowDefinition")).get("name"); } @Test @Order(3) - public void testQueryProcessDefinitionByCode() { - HttpResponse queryProcessDefinitionByCodeResponse = - processDefinitionPage.queryProcessDefinitionByCode(loginUser, projectCode, processDefinitionCode); - Assertions.assertTrue(queryProcessDefinitionByCodeResponse.getBody().getSuccess()); + public void testQueryWorkflowDefinitionByCode() { + HttpResponse queryWorkflowDefinitionByCodeResponse = + workflowDefinitionPage.queryWorkflowDefinitionByCode(loginUser, projectCode, workflowDefinitionCode); + Assertions.assertTrue(queryWorkflowDefinitionByCodeResponse.getBody().getSuccess()); Assertions.assertTrue( - queryProcessDefinitionByCodeResponse.getBody().getData().toString().contains("hello world")); + queryWorkflowDefinitionByCodeResponse.getBody().getData().toString().contains("hello world")); } @Test @Order(4) - public void testgetProcessListByProjectCode() { - HttpResponse getProcessListByProjectCodeResponse = - processDefinitionPage.getProcessListByProjectCode(loginUser, projectCode); - Assertions.assertTrue(getProcessListByProjectCodeResponse.getBody().getSuccess()); + public void testGetWorkflowListByProjectCode() { + HttpResponse getWorkflowListByProjectCodeResponse = + workflowDefinitionPage.getWorkflowListByProjectCode(loginUser, projectCode); + Assertions.assertTrue(getWorkflowListByProjectCodeResponse.getBody().getSuccess()); Assertions - .assertTrue(getProcessListByProjectCodeResponse.getBody().getData().toString().contains("test_import")); + .assertTrue( + getWorkflowListByProjectCodeResponse.getBody().getData().toString().contains("test_import")); } @Test @Order(5) - public void testQueryProcessDefinitionByName() { - HttpResponse queryProcessDefinitionByNameResponse = - processDefinitionPage.queryProcessDefinitionByName(loginUser, projectCode, processDefinitionName); - Assertions.assertTrue(queryProcessDefinitionByNameResponse.getBody().getSuccess()); + public void testQueryWorkflowDefinitionByName() { + HttpResponse queryWorkflowDefinitionByNameResponse = + workflowDefinitionPage.queryWorkflowDefinitionByName(loginUser, projectCode, workflowDefinitionName); + Assertions.assertTrue(queryWorkflowDefinitionByNameResponse.getBody().getSuccess()); Assertions.assertTrue( - queryProcessDefinitionByNameResponse.getBody().getData().toString().contains("hello world")); + queryWorkflowDefinitionByNameResponse.getBody().getData().toString().contains("hello world")); } @Test @Order(6) - public void testQueryProcessDefinitionList() { - HttpResponse queryProcessDefinitionListResponse = - processDefinitionPage.queryProcessDefinitionList(loginUser, projectCode); - Assertions.assertTrue(queryProcessDefinitionListResponse.getBody().getSuccess()); + public void testQueryWorkflowDefinitionList() { + HttpResponse queryWorkflowDefinitionListResponse = + workflowDefinitionPage.queryWorkflowDefinitionList(loginUser, projectCode); + Assertions.assertTrue(queryWorkflowDefinitionListResponse.getBody().getSuccess()); Assertions - .assertTrue(queryProcessDefinitionListResponse.getBody().getData().toString().contains("hello world")); + .assertTrue(queryWorkflowDefinitionListResponse.getBody().getData().toString().contains("hello world")); } @Test @Order(7) - public void testReleaseProcessDefinition() { - HttpResponse releaseProcessDefinitionResponse = processDefinitionPage.releaseProcessDefinition(loginUser, - projectCode, processDefinitionCode, ReleaseState.ONLINE); - Assertions.assertTrue(releaseProcessDefinitionResponse.getBody().getSuccess()); + public void testReleaseWorkflowDefinition() { + HttpResponse releaseWorkflowDefinitionResponse = workflowDefinitionPage.releaseWorkflowDefinition(loginUser, + projectCode, workflowDefinitionCode, ReleaseState.ONLINE); + Assertions.assertTrue(releaseWorkflowDefinitionResponse.getBody().getSuccess()); - HttpResponse queryProcessDefinitionByCodeResponse = - processDefinitionPage.queryProcessDefinitionByCode(loginUser, projectCode, processDefinitionCode); - Assertions.assertTrue(queryProcessDefinitionByCodeResponse.getBody().getSuccess()); + HttpResponse queryWorkflowDefinitionByCodeResponse = + workflowDefinitionPage.queryWorkflowDefinitionByCode(loginUser, projectCode, workflowDefinitionCode); + Assertions.assertTrue(queryWorkflowDefinitionByCodeResponse.getBody().getSuccess()); Assertions.assertTrue( - queryProcessDefinitionByCodeResponse.getBody().getData().toString().contains("releaseState=ONLINE")); + queryWorkflowDefinitionByCodeResponse.getBody().getData().toString().contains("releaseState=ONLINE")); } @Test @Order(8) - public void testDeleteProcessDefinitionByCode() { - HttpResponse deleteProcessDefinitionByCodeResponse = - processDefinitionPage.deleteProcessDefinitionByCode(loginUser, projectCode, processDefinitionCode); - Assertions.assertFalse(deleteProcessDefinitionByCodeResponse.getBody().getSuccess()); + public void testDeleteWorkflowDefinitionByCode() { + HttpResponse deleteWorkflowDefinitionByCodeResponse = + workflowDefinitionPage.deleteWorkflowDefinitionByCode(loginUser, projectCode, workflowDefinitionCode); + Assertions.assertFalse(deleteWorkflowDefinitionByCodeResponse.getBody().getSuccess()); - HttpResponse releaseProcessDefinitionResponse = processDefinitionPage.releaseProcessDefinition(loginUser, - projectCode, processDefinitionCode, ReleaseState.OFFLINE); - Assertions.assertTrue(releaseProcessDefinitionResponse.getBody().getSuccess()); + HttpResponse releaseWorkflowDefinitionResponse = workflowDefinitionPage.releaseWorkflowDefinition(loginUser, + projectCode, workflowDefinitionCode, ReleaseState.OFFLINE); + Assertions.assertTrue(releaseWorkflowDefinitionResponse.getBody().getSuccess()); - deleteProcessDefinitionByCodeResponse = - processDefinitionPage.deleteProcessDefinitionByCode(loginUser, projectCode, processDefinitionCode); - Assertions.assertTrue(deleteProcessDefinitionByCodeResponse.getBody().getSuccess()); + deleteWorkflowDefinitionByCodeResponse = + workflowDefinitionPage.deleteWorkflowDefinitionByCode(loginUser, projectCode, workflowDefinitionCode); + Assertions.assertTrue(deleteWorkflowDefinitionByCodeResponse.getBody().getSuccess()); - HttpResponse queryProcessDefinitionListResponse = - processDefinitionPage.queryProcessDefinitionList(loginUser, projectCode); - Assertions.assertTrue(queryProcessDefinitionListResponse.getBody().getSuccess()); + HttpResponse queryWorkflowDefinitionListResponse = + workflowDefinitionPage.queryWorkflowDefinitionList(loginUser, projectCode); + Assertions.assertTrue(queryWorkflowDefinitionListResponse.getBody().getSuccess()); Assertions - .assertFalse(queryProcessDefinitionListResponse.getBody().getData().toString().contains("hello world")); + .assertFalse( + queryWorkflowDefinitionListResponse.getBody().getData().toString().contains("hello world")); } } diff --git a/dolphinscheduler-api-test/dolphinscheduler-api-test-case/src/test/java/org/apache/dolphinscheduler/api/test/cases/WorkflowInstanceAPITest.java b/dolphinscheduler-api-test/dolphinscheduler-api-test-case/src/test/java/org/apache/dolphinscheduler/api/test/cases/WorkflowInstanceAPITest.java index 35523f5e44..1025292b81 100644 --- a/dolphinscheduler-api-test/dolphinscheduler-api-test-case/src/test/java/org/apache/dolphinscheduler/api/test/cases/WorkflowInstanceAPITest.java +++ b/dolphinscheduler-api-test/dolphinscheduler-api-test-case/src/test/java/org/apache/dolphinscheduler/api/test/cases/WorkflowInstanceAPITest.java @@ -28,8 +28,8 @@ import org.apache.dolphinscheduler.api.test.entity.LoginResponseData; import org.apache.dolphinscheduler.api.test.pages.LoginPage; import org.apache.dolphinscheduler.api.test.pages.project.ProjectPage; import org.apache.dolphinscheduler.api.test.pages.workflow.ExecutorPage; -import org.apache.dolphinscheduler.api.test.pages.workflow.ProcessDefinitionPage; -import org.apache.dolphinscheduler.api.test.pages.workflow.ProcessInstancePage; +import org.apache.dolphinscheduler.api.test.pages.workflow.WorkflowDefinitionPage; +import org.apache.dolphinscheduler.api.test.pages.workflow.WorkflowInstancePage; import org.apache.dolphinscheduler.api.test.utils.JSONUtils; import org.apache.dolphinscheduler.common.enums.FailureStrategy; import org.apache.dolphinscheduler.common.enums.ReleaseState; @@ -55,10 +55,12 @@ import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.BeforeAll; import org.junit.jupiter.api.Order; import org.junit.jupiter.api.Test; +import org.junitpioneer.jupiter.DisableIfTestFails; import org.testcontainers.shaded.org.awaitility.Awaitility; @DolphinScheduler(composeFiles = "docker/basic/docker-compose.yaml") @Slf4j +@DisableIfTestFails public class WorkflowInstanceAPITest { private static final String username = "admin"; @@ -69,19 +71,19 @@ public class WorkflowInstanceAPITest { private static User loginUser; - private static ProcessInstancePage processInstancePage; + private static WorkflowInstancePage workflowInstancePage; private static ExecutorPage executorPage; - private static ProcessDefinitionPage processDefinitionPage; + private static WorkflowDefinitionPage workflowDefinitionPage; private static ProjectPage projectPage; private static long projectCode; - private static long processDefinitionCode; + private static long workflowDefinitionCode; - private static int processInstanceId; + private static int workflowInstanceId; @BeforeAll public static void setup() { @@ -89,9 +91,9 @@ public class WorkflowInstanceAPITest { HttpResponse loginHttpResponse = loginPage.login(username, password); sessionId = JSONUtils.convertValue(loginHttpResponse.getBody().getData(), LoginResponseData.class).getSessionId(); - processInstancePage = new ProcessInstancePage(sessionId); + workflowInstancePage = new WorkflowInstancePage(sessionId); executorPage = new ExecutorPage(sessionId); - processDefinitionPage = new ProcessDefinitionPage(sessionId); + workflowDefinitionPage = new WorkflowDefinitionPage(sessionId); projectPage = new ProjectPage(sessionId); loginUser = new User(); loginUser.setUserName("admin"); @@ -106,7 +108,7 @@ public class WorkflowInstanceAPITest { @Test @Order(1) - public void testQueryProcessInstancesByWorkflowInstanceId() { + public void testQueryWorkflowInstancesByWorkflowInstanceId() { try { // create test project HttpResponse createProjectResponse = projectPage.createProject(loginUser, "project-test"); @@ -118,49 +120,50 @@ public class WorkflowInstanceAPITest { // upload test workflow definition json ClassLoader classLoader = getClass().getClassLoader(); File file = new File(classLoader.getResource("workflow-json/test.json").getFile()); - CloseableHttpResponse importProcessDefinitionResponse = processDefinitionPage - .importProcessDefinition(loginUser, projectCode, file); - String data = EntityUtils.toString(importProcessDefinitionResponse.getEntity()); + CloseableHttpResponse importWorkflowDefinitionResponse = workflowDefinitionPage + .importWorkflowDefinition(loginUser, projectCode, file); + String data = EntityUtils.toString(importWorkflowDefinitionResponse.getEntity()); assertTrue(data.contains("\"success\":true")); // get workflow definition code - HttpResponse queryAllProcessDefinitionByProjectCodeResponse = - processDefinitionPage.queryAllProcessDefinitionByProjectCode(loginUser, projectCode); - assertTrue(queryAllProcessDefinitionByProjectCodeResponse.getBody().getSuccess()); - assertTrue(queryAllProcessDefinitionByProjectCodeResponse.getBody().getData().toString() + HttpResponse queryAllWorkflowDefinitionByProjectCodeResponse = + workflowDefinitionPage.queryAllWorkflowDefinitionByProjectCode(loginUser, projectCode); + assertTrue(queryAllWorkflowDefinitionByProjectCodeResponse.getBody().getSuccess()); + assertTrue(queryAllWorkflowDefinitionByProjectCodeResponse.getBody().getData().toString() .contains("hello world")); - processDefinitionCode = - (long) ((LinkedHashMap) ((LinkedHashMap) ((List) queryAllProcessDefinitionByProjectCodeResponse - .getBody().getData()).get(0)).get("processDefinition")).get("code"); + workflowDefinitionCode = + (long) ((LinkedHashMap) ((LinkedHashMap) ((List) queryAllWorkflowDefinitionByProjectCodeResponse + .getBody().getData()).get(0)).get("workflowDefinition")).get("code"); // release test workflow - HttpResponse releaseProcessDefinitionResponse = processDefinitionPage.releaseProcessDefinition(loginUser, - projectCode, processDefinitionCode, ReleaseState.ONLINE); - assertTrue(releaseProcessDefinitionResponse.getBody().getSuccess()); + HttpResponse releaseWorkflowDefinitionResponse = workflowDefinitionPage.releaseWorkflowDefinition(loginUser, + projectCode, workflowDefinitionCode, ReleaseState.ONLINE); + assertTrue(releaseWorkflowDefinitionResponse.getBody().getSuccess()); // trigger workflow instance SimpleDateFormat formatter = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss"); Date date = new Date(); String scheduleTime = String.format("%s,%s", formatter.format(date), formatter.format(date)); log.info("use current time {} as scheduleTime", scheduleTime); - HttpResponse startProcessInstanceResponse = executorPage.startProcessInstance(loginUser, projectCode, - processDefinitionCode, scheduleTime, FailureStrategy.END, WarningType.NONE); - assertTrue(startProcessInstanceResponse.getBody().getSuccess()); - final List workflowInstanceIds = (List) startProcessInstanceResponse.getBody().getData(); + HttpResponse startWorkflowInstanceResponse = executorPage.startWorkflowInstance(loginUser, projectCode, + workflowDefinitionCode, scheduleTime, FailureStrategy.END, WarningType.NONE); + assertTrue(startWorkflowInstanceResponse.getBody().getSuccess()); + final List workflowInstanceIds = (List) startWorkflowInstanceResponse.getBody().getData(); assertEquals(1, workflowInstanceIds.size()); - processInstanceId = workflowInstanceIds.get(0); + workflowInstanceId = workflowInstanceIds.get(0); - // make sure process instance has completed and successfully persisted into db + // make sure workflow instance has completed and successfully persisted into db Awaitility.await() .atMost(30, TimeUnit.SECONDS) .untilAsserted(() -> { // query workflow instance by trigger code - HttpResponse queryProcessInstanceListResponse = - processInstancePage.queryProcessInstanceById(loginUser, projectCode, processInstanceId); - assertTrue(queryProcessInstanceListResponse.getBody().getSuccess()); + HttpResponse queryWorkflowInstanceListResponse = + workflowInstancePage.queryWorkflowInstanceById(loginUser, projectCode, + workflowInstanceId); + assertTrue(queryWorkflowInstanceListResponse.getBody().getSuccess()); final Map workflowInstance = - (Map) queryProcessInstanceListResponse.getBody().getData(); + (Map) queryWorkflowInstanceListResponse.getBody().getData(); assertEquals("SUCCESS", workflowInstance.get("state")); }); } catch (Exception e) { @@ -171,42 +174,43 @@ public class WorkflowInstanceAPITest { @Test @Order(2) - public void testQueryProcessInstanceList() { - HttpResponse queryProcessInstanceListResponse = - processInstancePage.queryProcessInstanceList(loginUser, projectCode, 1, 10); - assertTrue(queryProcessInstanceListResponse.getBody().getSuccess()); - assertTrue(queryProcessInstanceListResponse.getBody().getData().toString().contains("test_import")); + public void testQueryWorkflowInstanceList() { + HttpResponse queryWorkflowInstanceListResponse = + workflowInstancePage.queryWorkflowInstanceList(loginUser, projectCode, 1, 10); + assertTrue(queryWorkflowInstanceListResponse.getBody().getSuccess()); + assertTrue(queryWorkflowInstanceListResponse.getBody().getData().toString().contains("test_import")); } @Test @Order(3) - public void testQueryTaskListByProcessId() { - HttpResponse queryTaskListByProcessIdResponse = - processInstancePage.queryTaskListByProcessId(loginUser, projectCode, processInstanceId); - assertTrue(queryTaskListByProcessIdResponse.getBody().getSuccess()); - assertTrue(queryTaskListByProcessIdResponse.getBody().getData().toString().contains("test_import")); + public void testQueryTaskListByWorkflowInstanceId() { + HttpResponse queryTaskListByWorkflowInstanceIdResponse = + workflowInstancePage.queryTaskListByWorkflowInstanceId(loginUser, projectCode, workflowInstanceId); + assertTrue(queryTaskListByWorkflowInstanceIdResponse.getBody().getSuccess()); + assertTrue(queryTaskListByWorkflowInstanceIdResponse.getBody().getData().toString().contains("test_import")); } @Test @Order(4) - public void testQueryProcessInstanceById() { - HttpResponse queryProcessInstanceByIdResponse = - processInstancePage.queryProcessInstanceById(loginUser, projectCode, processInstanceId); - assertTrue(queryProcessInstanceByIdResponse.getBody().getSuccess()); - assertTrue(queryProcessInstanceByIdResponse.getBody().getData().toString().contains("test_import")); + public void testQueryWorkflowInstanceById() { + HttpResponse queryWorkflowInstanceByIdResponse = + workflowInstancePage.queryWorkflowInstanceById(loginUser, projectCode, workflowInstanceId); + assertTrue(queryWorkflowInstanceByIdResponse.getBody().getSuccess()); + assertTrue(queryWorkflowInstanceByIdResponse.getBody().getData().toString().contains("test_import")); } @Test @Order(5) - public void testDeleteProcessInstanceById() { - HttpResponse deleteProcessInstanceByIdResponse = - processInstancePage.deleteProcessInstanceById(loginUser, projectCode, processInstanceId); - assertTrue(deleteProcessInstanceByIdResponse.getBody().getSuccess()); + public void testDeleteWorkflowInstanceById() { + HttpResponse deleteWorkflowInstanceByIdResponse = + workflowInstancePage.deleteWorkflowInstanceById(loginUser, projectCode, workflowInstanceId); + assertTrue(deleteWorkflowInstanceByIdResponse.getBody().getSuccess()); - HttpResponse queryProcessInstanceListResponse = - processInstancePage.queryProcessInstanceList(loginUser, projectCode, 1, 10); - assertTrue(queryProcessInstanceListResponse.getBody().getSuccess()); - Assertions.assertFalse(queryProcessInstanceListResponse.getBody().getData().toString().contains("test_import")); + HttpResponse queryWorkflowInstanceListResponse = + workflowInstancePage.queryWorkflowInstanceList(loginUser, projectCode, 1, 10); + assertTrue(queryWorkflowInstanceListResponse.getBody().getSuccess()); + Assertions + .assertFalse(queryWorkflowInstanceListResponse.getBody().getData().toString().contains("test_import")); } } diff --git a/dolphinscheduler-api-test/dolphinscheduler-api-test-case/src/test/java/org/apache/dolphinscheduler/api/test/pages/workflow/ExecutorPage.java b/dolphinscheduler-api-test/dolphinscheduler-api-test-case/src/test/java/org/apache/dolphinscheduler/api/test/pages/workflow/ExecutorPage.java index 320457ab85..3afcdbef45 100644 --- a/dolphinscheduler-api-test/dolphinscheduler-api-test-case/src/test/java/org/apache/dolphinscheduler/api/test/pages/workflow/ExecutorPage.java +++ b/dolphinscheduler-api-test/dolphinscheduler-api-test-case/src/test/java/org/apache/dolphinscheduler/api/test/pages/workflow/ExecutorPage.java @@ -40,15 +40,15 @@ public class ExecutorPage { private String sessionId; - public HttpResponse startProcessInstance(User loginUser, - long projectCode, - long processDefinitionCode, - String scheduleTime, - FailureStrategy failureStrategy, - WarningType warningType) { + public HttpResponse startWorkflowInstance(User loginUser, + long projectCode, + long workflowDefinitionCode, + String scheduleTime, + FailureStrategy failureStrategy, + WarningType warningType) { Map params = new HashMap<>(); params.put("loginUser", loginUser); - params.put("processDefinitionCode", processDefinitionCode); + params.put("workflowDefinitionCode", workflowDefinitionCode); params.put("scheduleTime", scheduleTime); params.put("failureStrategy", failureStrategy); params.put("warningType", warningType); @@ -56,14 +56,14 @@ public class ExecutorPage { headers.put(Constants.SESSION_ID_KEY, sessionId); RequestClient requestClient = new RequestClient(); - String url = String.format("/projects/%s/executors/start-process-instance", projectCode); + String url = String.format("/projects/%s/executors/start-workflow-instance", projectCode); return requestClient.post(url, headers, params); } - public HttpResponse queryExecutingWorkflow(User loginUser, long projectCode, long processInstanceCode) { + public HttpResponse queryExecutingWorkflow(User loginUser, long projectCode, long workflowInstanceCode) { Map params = new HashMap<>(); params.put("loginUser", loginUser); - params.put("id", processInstanceCode); + params.put("id", workflowInstanceCode); Map headers = new HashMap<>(); headers.put(Constants.SESSION_ID_KEY, sessionId); RequestClient requestClient = new RequestClient(); @@ -71,11 +71,11 @@ public class ExecutorPage { return requestClient.get(url, headers, params); } - public HttpResponse execute(User loginUser, long projectCode, int processInstanceId, ExecuteType executeType) { + public HttpResponse execute(User loginUser, long projectCode, int workflowInstanceId, ExecuteType executeType) { Map params = new HashMap<>(); params.put("loginUser", loginUser); params.put("projectCode", projectCode); - params.put("processInstanceId", processInstanceId); + params.put("workflowInstanceId", workflowInstanceId); params.put("executeType", executeType); Map headers = new HashMap<>(); headers.put(Constants.SESSION_ID_KEY, sessionId); @@ -85,11 +85,11 @@ public class ExecutorPage { return requestClient.post(url, headers, params); } - public HttpResponse executeTask(User loginUser, long projectCode, int processInstanceId, String startNodeList, + public HttpResponse executeTask(User loginUser, long projectCode, int workflowInstanceId, String startNodeList, TaskDependType taskDependType) { Map params = new HashMap<>(); params.put("loginUser", loginUser); - params.put("processInstanceId", processInstanceId); + params.put("workflowInstanceId", workflowInstanceId); params.put("startNodeList", startNodeList); params.put("taskDependType", taskDependType); Map headers = new HashMap<>(); diff --git a/dolphinscheduler-api-test/dolphinscheduler-api-test-case/src/test/java/org/apache/dolphinscheduler/api/test/pages/workflow/SchedulerPage.java b/dolphinscheduler-api-test/dolphinscheduler-api-test-case/src/test/java/org/apache/dolphinscheduler/api/test/pages/workflow/SchedulerPage.java index d6b3b9a743..6fbc5aac07 100644 --- a/dolphinscheduler-api-test/dolphinscheduler-api-test-case/src/test/java/org/apache/dolphinscheduler/api/test/pages/workflow/SchedulerPage.java +++ b/dolphinscheduler-api-test/dolphinscheduler-api-test-case/src/test/java/org/apache/dolphinscheduler/api/test/pages/workflow/SchedulerPage.java @@ -36,11 +36,11 @@ public class SchedulerPage { private String sessionId; - public HttpResponse createSchedule(User loginUser, long projectCode, long processDefinitionCode, String schedule) { + public HttpResponse createSchedule(User loginUser, long projectCode, long workflowDefinitionCode, String schedule) { Map params = new HashMap<>(); params.put("loginUser", loginUser); params.put("projectCode", projectCode); - params.put("processDefinitionCode", processDefinitionCode); + params.put("workflowDefinitionCode", workflowDefinitionCode); params.put("schedule", schedule); Map headers = new HashMap<>(); headers.put(Constants.SESSION_ID_KEY, sessionId); diff --git a/dolphinscheduler-api-test/dolphinscheduler-api-test-case/src/test/java/org/apache/dolphinscheduler/api/test/pages/workflow/ProcessDefinitionPage.java b/dolphinscheduler-api-test/dolphinscheduler-api-test-case/src/test/java/org/apache/dolphinscheduler/api/test/pages/workflow/WorkflowDefinitionPage.java similarity index 71% rename from dolphinscheduler-api-test/dolphinscheduler-api-test-case/src/test/java/org/apache/dolphinscheduler/api/test/pages/workflow/ProcessDefinitionPage.java rename to dolphinscheduler-api-test/dolphinscheduler-api-test-case/src/test/java/org/apache/dolphinscheduler/api/test/pages/workflow/WorkflowDefinitionPage.java index 3f3b715c39..ea6664b377 100644 --- a/dolphinscheduler-api-test/dolphinscheduler-api-test-case/src/test/java/org/apache/dolphinscheduler/api/test/pages/workflow/ProcessDefinitionPage.java +++ b/dolphinscheduler-api-test/dolphinscheduler-api-test-case/src/test/java/org/apache/dolphinscheduler/api/test/pages/workflow/WorkflowDefinitionPage.java @@ -36,54 +36,54 @@ import lombok.extern.slf4j.Slf4j; @Slf4j @AllArgsConstructor -public class ProcessDefinitionPage { +public class WorkflowDefinitionPage { private String sessionId; - public CloseableHttpResponse importProcessDefinition(User loginUser, long projectCode, File file) { + public CloseableHttpResponse importWorkflowDefinition(User loginUser, long projectCode, File file) { Map params = new HashMap<>(); params.put("loginUser", loginUser); Map headers = new HashMap<>(); headers.put(Constants.SESSION_ID_KEY, sessionId); RequestClient requestClient = new RequestClient(); - String url = String.format("/projects/%s/process-definition/import", projectCode); + String url = String.format("/projects/%s/workflow-definition/import", projectCode); return requestClient.postWithFile(url, headers, params, file); } - public HttpResponse queryAllProcessDefinitionByProjectCode(User loginUser, long projectCode) { + public HttpResponse queryAllWorkflowDefinitionByProjectCode(User loginUser, long projectCode) { Map params = new HashMap<>(); params.put("loginUser", loginUser); Map headers = new HashMap<>(); headers.put(Constants.SESSION_ID_KEY, sessionId); RequestClient requestClient = new RequestClient(); - String url = String.format("/projects/%s/process-definition/all", projectCode); + String url = String.format("/projects/%s/workflow-definition/all", projectCode); return requestClient.get(url, headers, params); } - public HttpResponse queryProcessDefinitionByCode(User loginUser, long projectCode, long processDefinitionCode) { + public HttpResponse queryWorkflowDefinitionByCode(User loginUser, long projectCode, long workflowDefinitionCode) { Map params = new HashMap<>(); params.put("loginUser", loginUser); Map headers = new HashMap<>(); headers.put(Constants.SESSION_ID_KEY, sessionId); RequestClient requestClient = new RequestClient(); - String url = String.format("/projects/%s/process-definition/%s", projectCode, processDefinitionCode); + String url = String.format("/projects/%s/workflow-definition/%s", projectCode, workflowDefinitionCode); return requestClient.get(url, headers, params); } - public HttpResponse getProcessListByProjectCode(User loginUser, long projectCode) { + public HttpResponse getWorkflowListByProjectCode(User loginUser, long projectCode) { Map params = new HashMap<>(); params.put("loginUser", loginUser); Map headers = new HashMap<>(); headers.put(Constants.SESSION_ID_KEY, sessionId); RequestClient requestClient = new RequestClient(); - String url = String.format("/projects/%s/process-definition/query-process-definition-list", projectCode); + String url = String.format("/projects/%s/workflow-definition/query-workflow-definition-list", projectCode); return requestClient.get(url, headers, params); } - public HttpResponse queryProcessDefinitionByName(User loginUser, long projectCode, String name) { + public HttpResponse queryWorkflowDefinitionByName(User loginUser, long projectCode, String name) { Map params = new HashMap<>(); params.put("loginUser", loginUser); params.put("name", name); @@ -91,23 +91,23 @@ public class ProcessDefinitionPage { headers.put(Constants.SESSION_ID_KEY, sessionId); RequestClient requestClient = new RequestClient(); - String url = String.format("/projects/%s/process-definition/query-by-name", projectCode); + String url = String.format("/projects/%s/workflow-definition/query-by-name", projectCode); return requestClient.get(url, headers, params); } - public HttpResponse queryProcessDefinitionList(User loginUser, long projectCode) { + public HttpResponse queryWorkflowDefinitionList(User loginUser, long projectCode) { Map params = new HashMap<>(); params.put("loginUser", loginUser); Map headers = new HashMap<>(); headers.put(Constants.SESSION_ID_KEY, sessionId); RequestClient requestClient = new RequestClient(); - String url = String.format("/projects/%s/process-definition/list", projectCode); + String url = String.format("/projects/%s/workflow-definition/list", projectCode); return requestClient.get(url, headers, params); } - public HttpResponse releaseProcessDefinition(User loginUser, long projectCode, long code, - ReleaseState releaseState) { + public HttpResponse releaseWorkflowDefinition(User loginUser, long projectCode, long code, + ReleaseState releaseState) { Map params = new HashMap<>(); params.put("loginUser", loginUser); params.put("code", code); @@ -116,11 +116,11 @@ public class ProcessDefinitionPage { headers.put(Constants.SESSION_ID_KEY, sessionId); RequestClient requestClient = new RequestClient(); - String url = String.format("/projects/%s/process-definition/%s/release", projectCode, code); + String url = String.format("/projects/%s/workflow-definition/%s/release", projectCode, code); return requestClient.post(url, headers, params); } - public HttpResponse deleteProcessDefinitionByCode(User loginUser, long projectCode, long code) { + public HttpResponse deleteWorkflowDefinitionByCode(User loginUser, long projectCode, long code) { Map params = new HashMap<>(); params.put("loginUser", loginUser); params.put("code", code); @@ -128,7 +128,7 @@ public class ProcessDefinitionPage { headers.put(Constants.SESSION_ID_KEY, sessionId); RequestClient requestClient = new RequestClient(); - String url = String.format("/projects/%s/process-definition/%s", projectCode, code); + String url = String.format("/projects/%s/workflow-definition/%s", projectCode, code); return requestClient.delete(url, headers, params); } } diff --git a/dolphinscheduler-api-test/dolphinscheduler-api-test-case/src/test/java/org/apache/dolphinscheduler/api/test/pages/workflow/ProcessInstancePage.java b/dolphinscheduler-api-test/dolphinscheduler-api-test-case/src/test/java/org/apache/dolphinscheduler/api/test/pages/workflow/WorkflowInstancePage.java similarity index 73% rename from dolphinscheduler-api-test/dolphinscheduler-api-test-case/src/test/java/org/apache/dolphinscheduler/api/test/pages/workflow/ProcessInstancePage.java rename to dolphinscheduler-api-test/dolphinscheduler-api-test-case/src/test/java/org/apache/dolphinscheduler/api/test/pages/workflow/WorkflowInstancePage.java index eba4e63036..433aa6080b 100644 --- a/dolphinscheduler-api-test/dolphinscheduler-api-test-case/src/test/java/org/apache/dolphinscheduler/api/test/pages/workflow/ProcessInstancePage.java +++ b/dolphinscheduler-api-test/dolphinscheduler-api-test-case/src/test/java/org/apache/dolphinscheduler/api/test/pages/workflow/WorkflowInstancePage.java @@ -32,11 +32,11 @@ import lombok.extern.slf4j.Slf4j; @Slf4j @AllArgsConstructor -public class ProcessInstancePage { +public class WorkflowInstancePage { private String sessionId; - public HttpResponse queryProcessInstancesByTriggerCode(User loginUser, long projectCode, long triggerCode) { + public HttpResponse queryWorkflowInstancesByTriggerCode(User loginUser, long projectCode, long triggerCode) { Map params = new HashMap<>(); params.put("loginUser", loginUser); params.put("triggerCode", triggerCode); @@ -44,11 +44,11 @@ public class ProcessInstancePage { headers.put(Constants.SESSION_ID_KEY, sessionId); RequestClient requestClient = new RequestClient(); - String url = String.format("/projects/%s/process-instances/trigger", projectCode); + String url = String.format("/projects/%s/workflow-instances/trigger", projectCode); return requestClient.get(url, headers, params); } - public HttpResponse queryProcessInstanceList(User loginUser, long projectCode, int pageNo, int pageSize) { + public HttpResponse queryWorkflowInstanceList(User loginUser, long projectCode, int pageNo, int pageSize) { Map params = new HashMap<>(); params.put("loginUser", loginUser); params.put("pageNo", pageNo); @@ -57,39 +57,39 @@ public class ProcessInstancePage { headers.put(Constants.SESSION_ID_KEY, sessionId); RequestClient requestClient = new RequestClient(); - String url = String.format("/projects/%s/process-instances", projectCode); + String url = String.format("/projects/%s/workflow-instances", projectCode); return requestClient.get(url, headers, params); } - public HttpResponse queryTaskListByProcessId(User loginUser, long projectCode, long processInstanceId) { + public HttpResponse queryTaskListByWorkflowInstanceId(User loginUser, long projectCode, long workflowInstanceId) { Map params = new HashMap<>(); params.put("loginUser", loginUser); Map headers = new HashMap<>(); headers.put(Constants.SESSION_ID_KEY, sessionId); RequestClient requestClient = new RequestClient(); - String url = String.format("/projects/%s/process-instances/%s/tasks", projectCode, processInstanceId); + String url = String.format("/projects/%s/workflow-instances/%s/tasks", projectCode, workflowInstanceId); return requestClient.get(url, headers, params); } - public HttpResponse queryProcessInstanceById(User loginUser, long projectCode, long processInstanceId) { + public HttpResponse queryWorkflowInstanceById(User loginUser, long projectCode, long workflowInstanceId) { Map params = new HashMap<>(); params.put("loginUser", loginUser); Map headers = new HashMap<>(); headers.put(Constants.SESSION_ID_KEY, sessionId); RequestClient requestClient = new RequestClient(); - String url = String.format("/projects/%s/process-instances/%s", projectCode, processInstanceId); + String url = String.format("/projects/%s/workflow-instances/%s", projectCode, workflowInstanceId); return requestClient.get(url, headers, params); } - public HttpResponse deleteProcessInstanceById(User loginUser, long projectCode, long processInstanceId) { + public HttpResponse deleteWorkflowInstanceById(User loginUser, long projectCode, long workflowInstanceId) { Map params = new HashMap<>(); params.put("loginUser", loginUser); Map headers = new HashMap<>(); headers.put(Constants.SESSION_ID_KEY, sessionId); RequestClient requestClient = new RequestClient(); - String url = String.format("/projects/%s/process-instances/%s", projectCode, processInstanceId); + String url = String.format("/projects/%s/workflow-instances/%s", projectCode, workflowInstanceId); return requestClient.delete(url, headers, params); } diff --git a/dolphinscheduler-api-test/dolphinscheduler-api-test-case/src/test/resources/workflow-json/test.json b/dolphinscheduler-api-test/dolphinscheduler-api-test-case/src/test/resources/workflow-json/test.json index ae11e05f38..9601a93a9b 100644 --- a/dolphinscheduler-api-test/dolphinscheduler-api-test-case/src/test/resources/workflow-json/test.json +++ b/dolphinscheduler-api-test/dolphinscheduler-api-test-case/src/test/resources/workflow-json/test.json @@ -1,5 +1,5 @@ [ { - "processDefinition" : { + "workflowDefinition" : { "id" : 1, "code" : 9752686452032, "name" : "test", @@ -23,12 +23,12 @@ "warningGroupId" : null, "executionType" : "PARALLEL" }, - "processTaskRelationList" : [ { + "workflowTaskRelationList" : [ { "id" : 1, "name" : "", - "processDefinitionVersion" : 1, + "workflowDefinitionVersion" : 1, "projectCode" : 9752680865600, - "processDefinitionCode" : 9752686452032, + "workflowDefinitionCode" : 9752686452032, "preTaskCode" : 0, "preTaskVersion" : 0, "postTaskCode" : 9752684723008, @@ -78,4 +78,4 @@ "operateTime" : "2023-06-01 20:41:02" } ], "schedule" : null -} ] \ No newline at end of file +} ] diff --git a/dolphinscheduler-api-test/pom.xml b/dolphinscheduler-api-test/pom.xml index 94c7603680..095cefd64c 100644 --- a/dolphinscheduler-api-test/pom.xml +++ b/dolphinscheduler-api-test/pom.xml @@ -31,8 +31,8 @@ - 8 - 8 + 11 + 11 UTF-8 5.7.2 @@ -46,6 +46,7 @@ 31.0.1-jre 2.13.2 3.1.2 + 2.2.0 @@ -65,6 +66,14 @@ junit-jupiter + + org.junit-pioneer + junit-pioneer + ${junit-pioneer.version} + test + + + org.testcontainers testcontainers diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/audit/constants/AuditLogConstants.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/audit/constants/AuditLogConstants.java index f0f749aea7..b7200adc13 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/audit/constants/AuditLogConstants.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/audit/constants/AuditLogConstants.java @@ -26,10 +26,9 @@ public final class AuditLogConstants { public static final String CODE = "code"; public static final String CODES = "codes"; public static final String VERSION = "version"; - public static final String PROCESS_DEFINITION_CODE = "processDefinitionCode"; - public static final String PROCESS_DEFINITION_CODES = "processDefinitionCodes"; - public static final String PROCESS_INSTANCE_IDS = "processInstanceIds"; - public static final String PROCESS_INSTANCE_ID = "processInstanceId"; + public static final String WORKFLOW_DEFINITION_CODES = "workflowDefinitionCodes"; + public static final String WORKFLOW_INSTANCE_IDS = "workflowInstanceIds"; + public static final String WORKFLOW_INSTANCE_ID = "workflowInstanceId"; public static final String WORKFLOW_DEFINITION_CODE = "workflowDefinitionCode"; public static final String TYPE = "type"; public static final String NAME = "name"; diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/audit/enums/AuditType.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/audit/enums/AuditType.java index 8c41bf14cc..0a1ace075b 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/audit/enums/AuditType.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/audit/enums/AuditType.java @@ -27,15 +27,14 @@ import static org.apache.dolphinscheduler.api.audit.constants.AuditLogConstants. import static org.apache.dolphinscheduler.api.audit.constants.AuditLogConstants.ID; import static org.apache.dolphinscheduler.api.audit.constants.AuditLogConstants.NAME; import static org.apache.dolphinscheduler.api.audit.constants.AuditLogConstants.PRIORITY; -import static org.apache.dolphinscheduler.api.audit.constants.AuditLogConstants.PROCESS_DEFINITION_CODE; -import static org.apache.dolphinscheduler.api.audit.constants.AuditLogConstants.PROCESS_DEFINITION_CODES; -import static org.apache.dolphinscheduler.api.audit.constants.AuditLogConstants.PROCESS_INSTANCE_ID; -import static org.apache.dolphinscheduler.api.audit.constants.AuditLogConstants.PROCESS_INSTANCE_IDS; import static org.apache.dolphinscheduler.api.audit.constants.AuditLogConstants.QUEUE_ID; import static org.apache.dolphinscheduler.api.audit.constants.AuditLogConstants.TYPE; import static org.apache.dolphinscheduler.api.audit.constants.AuditLogConstants.USER_ID; import static org.apache.dolphinscheduler.api.audit.constants.AuditLogConstants.VERSION; import static org.apache.dolphinscheduler.api.audit.constants.AuditLogConstants.WORKFLOW_DEFINITION_CODE; +import static org.apache.dolphinscheduler.api.audit.constants.AuditLogConstants.WORKFLOW_DEFINITION_CODES; +import static org.apache.dolphinscheduler.api.audit.constants.AuditLogConstants.WORKFLOW_INSTANCE_ID; +import static org.apache.dolphinscheduler.api.audit.constants.AuditLogConstants.WORKFLOW_INSTANCE_IDS; import static org.apache.dolphinscheduler.common.enums.AuditModelType.ALARM_GROUP; import static org.apache.dolphinscheduler.common.enums.AuditModelType.ALARM_INSTANCE; import static org.apache.dolphinscheduler.common.enums.AuditModelType.CLUSTER; @@ -76,6 +75,7 @@ import static org.apache.dolphinscheduler.common.enums.AuditOperationType.START; import static org.apache.dolphinscheduler.common.enums.AuditOperationType.SWITCH_VERSION; import static org.apache.dolphinscheduler.common.enums.AuditOperationType.UPDATE; +import org.apache.dolphinscheduler.api.audit.constants.AuditLogConstants; import org.apache.dolphinscheduler.api.audit.operator.AuditOperator; import org.apache.dolphinscheduler.api.audit.operator.impl.AlertGroupAuditOperatorImpl; import org.apache.dolphinscheduler.api.audit.operator.impl.AlertInstanceAuditOperatorImpl; @@ -83,7 +83,6 @@ import org.apache.dolphinscheduler.api.audit.operator.impl.ClusterAuditOperatorI import org.apache.dolphinscheduler.api.audit.operator.impl.DatasourceAuditOperatorImpl; import org.apache.dolphinscheduler.api.audit.operator.impl.EnvironmentAuditOperatorImpl; import org.apache.dolphinscheduler.api.audit.operator.impl.K8SNamespaceAuditOperatorImpl; -import org.apache.dolphinscheduler.api.audit.operator.impl.ProcessInstanceAuditOperatorImpl; import org.apache.dolphinscheduler.api.audit.operator.impl.ProjectAuditOperatorImpl; import org.apache.dolphinscheduler.api.audit.operator.impl.ResourceAuditOperatorImpl; import org.apache.dolphinscheduler.api.audit.operator.impl.ScheduleAuditOperatorImpl; @@ -95,6 +94,7 @@ import org.apache.dolphinscheduler.api.audit.operator.impl.TokenAuditOperatorImp import org.apache.dolphinscheduler.api.audit.operator.impl.UserAuditOperatorImpl; import org.apache.dolphinscheduler.api.audit.operator.impl.WorkerGroupAuditOperatorImpl; import org.apache.dolphinscheduler.api.audit.operator.impl.WorkflowAuditOperatorImpl; +import org.apache.dolphinscheduler.api.audit.operator.impl.WorkflowInstanceAuditOperatorImpl; import org.apache.dolphinscheduler.api.audit.operator.impl.YarnQueueAuditOperatorImpl; import org.apache.dolphinscheduler.common.enums.AuditModelType; import org.apache.dolphinscheduler.common.enums.AuditOperationType; @@ -120,22 +120,24 @@ public enum AuditType { WORKFLOW_EXPORT(WORKFLOW, EXPORT, WorkflowAuditOperatorImpl.class, new String[]{CODES}, new String[]{}), WORKFLOW_DELETE(WORKFLOW, DELETE, WorkflowAuditOperatorImpl.class, new String[]{CODE}, new String[]{}), WORKFLOW_BATCH_DELETE(WORKFLOW, BATCH_DELETE, WorkflowAuditOperatorImpl.class, new String[]{CODES}, new String[]{}), - WORKFLOW_START(WORKFLOW, START, WorkflowAuditOperatorImpl.class, new String[]{PROCESS_DEFINITION_CODE}, + WORKFLOW_START(WORKFLOW, START, WorkflowAuditOperatorImpl.class, + new String[]{AuditLogConstants.WORKFLOW_DEFINITION_CODE}, new String[]{}), - WORKFLOW_BATCH_START(WORKFLOW, BATCH_START, WorkflowAuditOperatorImpl.class, new String[]{PROCESS_DEFINITION_CODES}, + WORKFLOW_BATCH_START(WORKFLOW, BATCH_START, WorkflowAuditOperatorImpl.class, + new String[]{WORKFLOW_DEFINITION_CODES}, new String[]{}), - WORKFLOW_BATCH_RERUN(WORKFLOW, BATCH_RERUN, ProcessInstanceAuditOperatorImpl.class, - new String[]{PROCESS_INSTANCE_IDS}, + WORKFLOW_BATCH_RERUN(WORKFLOW, BATCH_RERUN, WorkflowInstanceAuditOperatorImpl.class, + new String[]{WORKFLOW_INSTANCE_IDS}, new String[]{}), - WORKFLOW_EXECUTE(WORKFLOW, EXECUTE, ProcessInstanceAuditOperatorImpl.class, new String[]{PROCESS_INSTANCE_ID}, + WORKFLOW_EXECUTE(WORKFLOW, EXECUTE, WorkflowInstanceAuditOperatorImpl.class, new String[]{WORKFLOW_INSTANCE_ID}, new String[]{}), WORKFLOW_IMPORT(WORKFLOW, IMPORT, WorkflowAuditOperatorImpl.class, new String[]{}, new String[]{CODE}), - WORKFLOW_INSTANCE_UPDATE(WORKFLOW_INSTANCE, UPDATE, ProcessInstanceAuditOperatorImpl.class, new String[]{ID}, + WORKFLOW_INSTANCE_UPDATE(WORKFLOW_INSTANCE, UPDATE, WorkflowInstanceAuditOperatorImpl.class, new String[]{ID}, new String[]{}), - WORKFLOW_INSTANCE_DELETE(WORKFLOW_INSTANCE, DELETE, ProcessInstanceAuditOperatorImpl.class, new String[]{ID}, + WORKFLOW_INSTANCE_DELETE(WORKFLOW_INSTANCE, DELETE, WorkflowInstanceAuditOperatorImpl.class, new String[]{ID}, new String[]{}), - WORKFLOW_INSTANCE_BATCH_DELETE(WORKFLOW_INSTANCE, BATCH_DELETE, ProcessInstanceAuditOperatorImpl.class, - new String[]{PROCESS_INSTANCE_IDS}, new String[]{}), + WORKFLOW_INSTANCE_BATCH_DELETE(WORKFLOW_INSTANCE, BATCH_DELETE, WorkflowInstanceAuditOperatorImpl.class, + new String[]{WORKFLOW_INSTANCE_IDS}, new String[]{}), TASK_CREATE(TASK, CREATE, TaskAuditOperatorImpl.class, new String[]{}, new String[]{CODE}), TASK_UPDATE(TASK, UPDATE, TaskAuditOperatorImpl.class, new String[]{}, new String[]{CODE}), @@ -147,7 +149,8 @@ public enum AuditType { TASK_INSTANCE_FORCE_SUCCESS(TASK_INSTANCE, FORCE_SUCCESS, TaskInstancesAuditOperatorImpl.class, new String[]{ID}, new String[]{}), - SCHEDULE_CREATE(SCHEDULE, CREATE, ScheduleAuditOperatorImpl.class, new String[]{PROCESS_DEFINITION_CODE}, + SCHEDULE_CREATE(SCHEDULE, CREATE, ScheduleAuditOperatorImpl.class, + new String[]{AuditLogConstants.WORKFLOW_DEFINITION_CODE}, new String[]{ID}), SCHEDULE_UPDATE(SCHEDULE, UPDATE, ScheduleAuditOperatorImpl.class, new String[]{ID}, new String[]{}), SCHEDULE_ONLINE(SCHEDULE, ONLINE, ScheduleAuditOperatorImpl.class, new String[]{ID}, new String[]{}), diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/audit/operator/impl/ScheduleAuditOperatorImpl.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/audit/operator/impl/ScheduleAuditOperatorImpl.java index be8d774f8a..8365a61c20 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/audit/operator/impl/ScheduleAuditOperatorImpl.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/audit/operator/impl/ScheduleAuditOperatorImpl.java @@ -51,7 +51,7 @@ public class ScheduleAuditOperatorImpl extends BaseAuditOperator { int id = (int) paramsMap.get(paramNameArr[0]); Schedule schedule = scheduleMapper.selectById(id); if (schedule != null) { - paramsMap.put(AuditLogConstants.CODE, schedule.getProcessDefinitionCode()); + paramsMap.put(AuditLogConstants.CODE, schedule.getWorkflowDefinitionCode()); paramNameArr[0] = AuditLogConstants.CODE; auditLogList.forEach(auditLog -> auditLog.setDetail(String.valueOf(id))); } diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/audit/operator/impl/WorkflowAuditOperatorImpl.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/audit/operator/impl/WorkflowAuditOperatorImpl.java index b0fb86faa6..5369fe24eb 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/audit/operator/impl/WorkflowAuditOperatorImpl.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/audit/operator/impl/WorkflowAuditOperatorImpl.java @@ -49,8 +49,8 @@ public class WorkflowAuditOperatorImpl extends BaseAuditOperator { protected void setObjectByParam(String[] paramNameArr, Map paramsMap, List auditLogList) { if (paramNameArr[0].equals(AuditLogConstants.CODES) - || paramNameArr[0].equals(AuditLogConstants.PROCESS_DEFINITION_CODES) - || paramNameArr[0].equals(AuditLogConstants.PROCESS_INSTANCE_IDS)) { + || paramNameArr[0].equals(AuditLogConstants.WORKFLOW_DEFINITION_CODES) + || paramNameArr[0].equals(AuditLogConstants.WORKFLOW_INSTANCE_IDS)) { super.setObjectByParamArr(paramNameArr, paramsMap, auditLogList); } else { super.setObjectByParam(paramNameArr, paramsMap, auditLogList); diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/audit/operator/impl/ProcessInstanceAuditOperatorImpl.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/audit/operator/impl/WorkflowInstanceAuditOperatorImpl.java similarity index 94% rename from dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/audit/operator/impl/ProcessInstanceAuditOperatorImpl.java rename to dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/audit/operator/impl/WorkflowInstanceAuditOperatorImpl.java index 2b57da007e..fec46ac149 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/audit/operator/impl/ProcessInstanceAuditOperatorImpl.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/audit/operator/impl/WorkflowInstanceAuditOperatorImpl.java @@ -35,7 +35,7 @@ import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Service; @Service -public class ProcessInstanceAuditOperatorImpl extends BaseAuditOperator { +public class WorkflowInstanceAuditOperatorImpl extends BaseAuditOperator { @Autowired private WorkflowInstanceMapper workflowInstanceMapper; @@ -50,7 +50,7 @@ public class ProcessInstanceAuditOperatorImpl extends BaseAuditOperator { @Override protected void setObjectByParam(String[] paramNameArr, Map paramsMap, List auditLogList) { - if (paramNameArr[0].equals(AuditLogConstants.PROCESS_INSTANCE_IDS)) { + if (paramNameArr[0].equals(AuditLogConstants.WORKFLOW_INSTANCE_IDS)) { super.setObjectByParamArr(paramNameArr, paramsMap, auditLogList); } else { super.setObjectByParam(paramNameArr, paramsMap, auditLogList); diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/DataAnalysisController.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/DataAnalysisController.java index 98a871bf9e..ee263b865f 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/DataAnalysisController.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/DataAnalysisController.java @@ -86,13 +86,13 @@ public class DataAnalysisController extends BaseController { dataAnalysisService.getTaskInstanceStateCountByProject(loginUser, projectCode, startDate, endDate)); } - @Operation(summary = "countProcessInstanceState", description = "COUNT_WORKFLOW_INSTANCE_NOTES") + @Operation(summary = "countWorkflowInstanceState", description = "COUNT_WORKFLOW_INSTANCE_NOTES") @Parameters({ @Parameter(name = "startDate", description = "START_DATE", schema = @Schema(implementation = String.class)), @Parameter(name = "endDate", description = "END_DATE", schema = @Schema(implementation = String.class)), @Parameter(name = "projectCode", description = "PROJECT_CODE", schema = @Schema(implementation = long.class, example = "100")) }) - @GetMapping(value = "/process-state-count") + @GetMapping(value = "/workflow-state-count") @ResponseStatus(HttpStatus.OK) @ApiException(COUNT_WORKFLOW_INSTANCE_STATE_ERROR) public Result getWorkflowInstanceStateCount(@Parameter(hidden = true) @RequestAttribute(value = Constants.SESSION_USER) User loginUser, diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/ExecutorController.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/ExecutorController.java index 5866e2e25d..ad7254f004 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/ExecutorController.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/ExecutorController.java @@ -85,10 +85,10 @@ public class ExecutorController extends BaseController { private ExecutorService execService; /** - * execute process instance + * execute workflow instance * * @param loginUser login user - * @param processDefinitionCode process definition code + * @param workflowDefinitionCode workflow definition code * @param scheduleTime schedule time when CommandType is COMPLEMENT_DATA there are two ways to transfer parameters 1.date range, for example:{"complementStartDate":"2022-01-01 12:12:12","complementEndDate":"2022-01-6 12:12:12"} 2.manual input, for example:{"complementScheduleDateList":"2022-01-01 00:00:00,2022-01-02 12:12:12,2022-01-03 12:12:12"} * @param failureStrategy failure strategy * @param startNodeList start nodes list @@ -97,16 +97,16 @@ public class ExecutorController extends BaseController { * @param warningType warning type * @param warningGroupId warning group id * @param runMode run mode - * @param processInstancePriority process instance priority + * @param workflowInstancePriority workflow instance priority * @param workerGroup worker group * @param expectedParallelismNumber the expected parallelism number when execute complement in parallel mode * @param testFlag testFlag * @param executionOrder complement data in some kind of order - * @return start process result code + * @return start workflow result code */ - @Operation(summary = "startProcessInstance", description = "RUN_WORKFLOW_INSTANCE_NOTES") + @Operation(summary = "startWorkflowInstance", description = "RUN_WORKFLOW_INSTANCE_NOTES") @Parameters({ - @Parameter(name = "processDefinitionCode", description = "WORKFLOW_DEFINITION_CODE", required = true, schema = @Schema(implementation = Long.class), example = "100"), + @Parameter(name = "workflowDefinitionCode", description = "WORKFLOW_DEFINITION_CODE", required = true, schema = @Schema(implementation = Long.class), example = "100"), @Parameter(name = "scheduleTime", description = "SCHEDULE_TIME", required = true, schema = @Schema(implementation = String.class), example = "2022-04-06 00:00:00,2022-04-06 00:00:00"), @Parameter(name = "failureStrategy", description = "FAILURE_STRATEGY", required = true, schema = @Schema(implementation = FailureStrategy.class)), @Parameter(name = "startNodeList", description = "START_NODE_LIST", schema = @Schema(implementation = String.class)), @@ -115,7 +115,7 @@ public class ExecutorController extends BaseController { @Parameter(name = "warningType", description = "WARNING_TYPE", required = true, schema = @Schema(implementation = WarningType.class)), @Parameter(name = "warningGroupId", description = "WARNING_GROUP_ID", schema = @Schema(implementation = int.class, example = "100")), @Parameter(name = "runMode", description = "RUN_MODE", schema = @Schema(implementation = RunMode.class)), - @Parameter(name = "processInstancePriority", description = "WORKFLOW_INSTANCE_PRIORITY", required = true, schema = @Schema(implementation = Priority.class)), + @Parameter(name = "workflowInstancePriority", description = "WORKFLOW_INSTANCE_PRIORITY", required = true, schema = @Schema(implementation = Priority.class)), @Parameter(name = "workerGroup", description = "WORKER_GROUP", schema = @Schema(implementation = String.class, example = "default")), @Parameter(name = "tenantCode", description = "TENANT_CODE", schema = @Schema(implementation = String.class, example = "default")), @Parameter(name = "environmentCode", description = "ENVIRONMENT_CODE", schema = @Schema(implementation = Long.class, example = "-1")), @@ -127,12 +127,12 @@ public class ExecutorController extends BaseController { @Parameter(name = "allLevelDependent", description = "ALL_LEVEL_DEPENDENT", schema = @Schema(implementation = boolean.class, example = "false")), @Parameter(name = "executionOrder", description = "EXECUTION_ORDER", schema = @Schema(implementation = ExecutionOrder.class)) }) - @PostMapping(value = "start-process-instance") + @PostMapping(value = "start-workflow-instance") @ResponseStatus(HttpStatus.OK) @ApiException(START_WORKFLOW_INSTANCE_ERROR) @OperatorLog(auditType = AuditType.WORKFLOW_START) public Result> triggerWorkflowDefinition(@Parameter(hidden = true) @RequestAttribute(value = Constants.SESSION_USER) User loginUser, - @RequestParam(value = "processDefinitionCode") long processDefinitionCode, + @RequestParam(value = "workflowDefinitionCode") long workflowDefinitionCode, @RequestParam(value = "scheduleTime") String scheduleTime, @RequestParam(value = "failureStrategy") FailureStrategy failureStrategy, @RequestParam(value = "startNodeList", required = false) String startNodeList, @@ -141,7 +141,7 @@ public class ExecutorController extends BaseController { @RequestParam(value = "warningType") WarningType warningType, @RequestParam(value = "warningGroupId", required = false) Integer warningGroupId, @RequestParam(value = "runMode", required = false) RunMode runMode, - @RequestParam(value = "processInstancePriority", required = false) Priority processInstancePriority, + @RequestParam(value = "workflowInstancePriority", required = false) Priority workflowInstancePriority, @RequestParam(value = "workerGroup", required = false, defaultValue = "default") String workerGroup, @RequestParam(value = "tenantCode", required = false, defaultValue = "default") String tenantCode, @RequestParam(value = "environmentCode", required = false, defaultValue = "-1") Long environmentCode, @@ -156,14 +156,14 @@ public class ExecutorController extends BaseController { case START_PROCESS: final WorkflowTriggerRequest workflowTriggerRequest = WorkflowTriggerRequest.builder() .loginUser(loginUser) - .workflowDefinitionCode(processDefinitionCode) + .workflowDefinitionCode(workflowDefinitionCode) .startNodes(startNodeList) .failureStrategy(failureStrategy) .taskDependType(taskDependType) .execType(execType) .warningType(warningType) .warningGroupId(warningGroupId) - .workflowInstancePriority(processInstancePriority) + .workflowInstancePriority(workflowInstancePriority) .workerGroup(workerGroup) .tenantCode(tenantCode) .environmentCode(environmentCode) @@ -176,7 +176,7 @@ public class ExecutorController extends BaseController { case COMPLEMENT_DATA: final WorkflowBackFillRequest workflowBackFillRequest = WorkflowBackFillRequest.builder() .loginUser(loginUser) - .workflowDefinitionCode(processDefinitionCode) + .workflowDefinitionCode(workflowDefinitionCode) .startNodes(startNodeList) .failureStrategy(failureStrategy) .taskDependType(taskDependType) @@ -184,7 +184,7 @@ public class ExecutorController extends BaseController { .warningType(warningType) .warningGroupId(warningGroupId) .backfillRunMode(runMode) - .workflowInstancePriority(processInstancePriority) + .workflowInstancePriority(workflowInstancePriority) .workerGroup(workerGroup) .tenantCode(tenantCode) .environmentCode(environmentCode) @@ -204,12 +204,12 @@ public class ExecutorController extends BaseController { } /** - * batch execute process instance - * If any processDefinitionCode cannot be found, the failure information is returned and the status is set to + * batch execute workflow instance + * If any workflowDefinitionCode cannot be found, the failure information is returned and the status is set to * failed. The successful task will run normally and will not stop * * @param loginUser login user - * @param processDefinitionCodes process definition codes + * @param workflowDefinitionCodes workflow definition codes * @param scheduleTime schedule time * @param failureStrategy failure strategy * @param startNodeList start nodes list @@ -218,17 +218,17 @@ public class ExecutorController extends BaseController { * @param warningType warning type * @param warningGroupId warning group id * @param runMode run mode - * @param processInstancePriority process instance priority + * @param workflowInstancePriority workflow instance priority * @param workerGroup worker group * @param tenantCode tenant code * @param expectedParallelismNumber the expected parallelism number when execute complement in parallel mode * @param testFlag testFlag * @param executionOrder complement data in some kind of order - * @return start process result code + * @return start workflow result code */ - @Operation(summary = "batchStartProcessInstance", description = "BATCH_RUN_WORKFLOW_INSTANCE_NOTES") + @Operation(summary = "batchStartWorkflowInstance", description = "BATCH_RUN_WORKFLOW_INSTANCE_NOTES") @Parameters({ - @Parameter(name = "processDefinitionCodes", description = "WORKFLOW_DEFINITION_CODE_LIST", required = true, schema = @Schema(implementation = String.class, example = "1,2,3")), + @Parameter(name = "workflowDefinitionCodes", description = "WORKFLOW_DEFINITION_CODE_LIST", required = true, schema = @Schema(implementation = String.class, example = "1,2,3")), @Parameter(name = "scheduleTime", description = "SCHEDULE_TIME", required = true, schema = @Schema(implementation = String.class, example = "2022-04-06 00:00:00,2022-04-06 00:00:00")), @Parameter(name = "failureStrategy", description = "FAILURE_STRATEGY", required = true, schema = @Schema(implementation = FailureStrategy.class)), @Parameter(name = "startNodeList", description = "START_NODE_LIST", schema = @Schema(implementation = String.class)), @@ -237,7 +237,7 @@ public class ExecutorController extends BaseController { @Parameter(name = "warningType", description = "WARNING_TYPE", required = true, schema = @Schema(implementation = WarningType.class)), @Parameter(name = "warningGroupId", description = "WARNING_GROUP_ID", required = true, schema = @Schema(implementation = int.class, example = "100")), @Parameter(name = "runMode", description = "RUN_MODE", schema = @Schema(implementation = RunMode.class)), - @Parameter(name = "processInstancePriority", description = "WORKFLOW_INSTANCE_PRIORITY", required = true, schema = @Schema(implementation = Priority.class)), + @Parameter(name = "workflowInstancePriority", description = "WORKFLOW_INSTANCE_PRIORITY", required = true, schema = @Schema(implementation = Priority.class)), @Parameter(name = "workerGroup", description = "WORKER_GROUP", schema = @Schema(implementation = String.class, example = "default")), @Parameter(name = "tenantCode", description = "TENANT_CODE", schema = @Schema(implementation = String.class, example = "default")), @Parameter(name = "environmentCode", description = "ENVIRONMENT_CODE", schema = @Schema(implementation = Long.class, example = "-1")), @@ -248,12 +248,12 @@ public class ExecutorController extends BaseController { @Parameter(name = "allLevelDependent", description = "ALL_LEVEL_DEPENDENT", schema = @Schema(implementation = boolean.class, example = "false")), @Parameter(name = "executionOrder", description = "EXECUTION_ORDER", schema = @Schema(implementation = ExecutionOrder.class)) }) - @PostMapping(value = "batch-start-process-instance") + @PostMapping(value = "batch-start-workflow-instance") @ResponseStatus(HttpStatus.OK) @ApiException(BATCH_START_WORKFLOW_INSTANCE_ERROR) @OperatorLog(auditType = AuditType.WORKFLOW_BATCH_START) public Result> batchTriggerWorkflowDefinitions(@Parameter(hidden = true) @RequestAttribute(value = Constants.SESSION_USER) User loginUser, - @RequestParam(value = "processDefinitionCodes") String processDefinitionCodes, + @RequestParam(value = "workflowDefinitionCodes") String workflowDefinitionCodes, @RequestParam(value = "scheduleTime") String scheduleTime, @RequestParam(value = "failureStrategy") FailureStrategy failureStrategy, @RequestParam(value = "startNodeList", required = false) String startNodeList, @@ -262,7 +262,7 @@ public class ExecutorController extends BaseController { @RequestParam(value = "warningType") WarningType warningType, @RequestParam(value = "warningGroupId", required = false) Integer warningGroupId, @RequestParam(value = "runMode", required = false) RunMode runMode, - @RequestParam(value = "processInstancePriority", required = false) Priority processInstancePriority, + @RequestParam(value = "workflowInstancePriority", required = false) Priority workflowInstancePriority, @RequestParam(value = "workerGroup", required = false, defaultValue = "default") String workerGroup, @RequestParam(value = "tenantCode", required = false, defaultValue = "default") String tenantCode, @RequestParam(value = "environmentCode", required = false, defaultValue = "-1") Long environmentCode, @@ -273,12 +273,12 @@ public class ExecutorController extends BaseController { @RequestParam(value = "complementDependentMode", required = false) ComplementDependentMode complementDependentMode, @RequestParam(value = "allLevelDependent", required = false, defaultValue = "false") boolean allLevelDependent, @RequestParam(value = "executionOrder", required = false) ExecutionOrder executionOrder) { - List workflowDefinitionCodes = Arrays.stream(processDefinitionCodes.split(Constants.COMMA)) + List workflowDefinitionCodeList = Arrays.stream(workflowDefinitionCodes.split(Constants.COMMA)) .map(Long::parseLong) .collect(Collectors.toList()); List result = new ArrayList<>(); - for (Long workflowDefinitionCode : workflowDefinitionCodes) { + for (Long workflowDefinitionCode : workflowDefinitionCodeList) { Result> workflowInstanceIds = triggerWorkflowDefinition(loginUser, workflowDefinitionCode, scheduleTime, @@ -289,7 +289,7 @@ public class ExecutorController extends BaseController { warningType, warningGroupId, runMode, - processInstancePriority, + workflowInstancePriority, workerGroup, tenantCode, environmentCode, @@ -306,11 +306,11 @@ public class ExecutorController extends BaseController { } /** - * do action to process instance: pause, stop, repeat, recover from pause, recover from stop + * do action to workflow instance: pause, stop, repeat, recover from pause, recover from stop */ @Operation(summary = "execute", description = "EXECUTE_ACTION_TO_WORKFLOW_INSTANCE_NOTES") @Parameters({ - @Parameter(name = "processInstanceId", description = "WORKFLOW_INSTANCE_ID", required = true, schema = @Schema(implementation = int.class, example = "100")), + @Parameter(name = "workflowInstanceId", description = "WORKFLOW_INSTANCE_ID", required = true, schema = @Schema(implementation = int.class, example = "100")), @Parameter(name = "executeType", description = "EXECUTE_TYPE", required = true, schema = @Schema(implementation = ExecuteType.class)) }) @PostMapping(value = "/execute") @@ -318,24 +318,24 @@ public class ExecutorController extends BaseController { @ApiException(EXECUTE_WORKFLOW_INSTANCE_ERROR) @OperatorLog(auditType = AuditType.WORKFLOW_EXECUTE) public Result controlWorkflowInstance(@Parameter(hidden = true) @RequestAttribute(value = Constants.SESSION_USER) User loginUser, - @RequestParam("processInstanceId") Integer processInstanceId, + @RequestParam("workflowInstanceId") Integer workflowInstanceId, @RequestParam("executeType") ExecuteType executeType) { - execService.controlWorkflowInstance(loginUser, processInstanceId, executeType); + execService.controlWorkflowInstance(loginUser, workflowInstanceId, executeType); return Result.success(); } /** - * batch execute and do action to process instance + * batch execute and do action to workflow instance * * @param loginUser login user - * @param processInstanceIds process instance ids, delimiter by "," if more than one id + * @param workflowInstanceIds workflow instance ids, delimiter by "," if more than one id * @param executeType execute type * @return execute result code */ @Operation(summary = "batchExecute", description = "BATCH_EXECUTE_ACTION_TO_WORKFLOW_INSTANCE_NOTES") @Parameters({ @Parameter(name = "projectCode", description = "PROJECT_CODE", required = true, schema = @Schema(implementation = int.class)), - @Parameter(name = "processInstanceIds", description = "PROCESS_INSTANCE_IDS", required = true, schema = @Schema(implementation = String.class)), + @Parameter(name = "workflowInstanceIds", description = "WORKFLOW_INSTANCE_IDS", required = true, schema = @Schema(implementation = String.class)), @Parameter(name = "executeType", description = "EXECUTE_TYPE", required = true, schema = @Schema(implementation = ExecuteType.class)) }) @PostMapping(value = "/batch-execute") @@ -343,20 +343,20 @@ public class ExecutorController extends BaseController { @ApiException(BATCH_EXECUTE_WORKFLOW_INSTANCE_ERROR) @OperatorLog(auditType = AuditType.WORKFLOW_BATCH_RERUN) public Result batchControlWorkflowInstance(@RequestAttribute(value = Constants.SESSION_USER) User loginUser, - @RequestParam("processInstanceIds") String processInstanceIds, + @RequestParam("workflowInstanceIds") String workflowInstanceIds, @RequestParam("executeType") ExecuteType executeType) { - String[] processInstanceIdArray = processInstanceIds.split(Constants.COMMA); + String[] workflowInstanceIdArray = workflowInstanceIds.split(Constants.COMMA); List errorMessage = new ArrayList<>(); - for (String strProcessInstanceId : processInstanceIdArray) { - int processInstanceId = Integer.parseInt(strProcessInstanceId); + for (String strWorkflowInstanceId : workflowInstanceIdArray) { + int workflowInstanceId = Integer.parseInt(strWorkflowInstanceId); try { - execService.controlWorkflowInstance(loginUser, processInstanceId, executeType); - log.info("Success do action {} on workflowInstance: {}", executeType, processInstanceId); + execService.controlWorkflowInstance(loginUser, workflowInstanceId, executeType); + log.info("Success do action {} on workflowInstance: {}", executeType, workflowInstanceId); } catch (Exception e) { - errorMessage.add("Failed do action " + executeType + " on workflowInstance: " + processInstanceId + errorMessage.add("Failed do action " + executeType + " on workflowInstance: " + workflowInstanceId + "reason: " + e.getMessage()); - log.error("Failed do action {} on workflowInstance: {}, error: {}", executeType, processInstanceId, e); + log.error("Failed do action {} on workflowInstance: {}, error: {}", executeType, workflowInstanceId, e); } } if (org.apache.commons.collections4.CollectionUtils.isNotEmpty(errorMessage)) { @@ -417,18 +417,18 @@ public class ExecutorController extends BaseController { } /** - * do action to process instance: pause, stop, repeat, recover from pause, recover from stop + * do action to workflow instance: pause, stop, repeat, recover from pause, recover from stop * * @param loginUser login user * @param projectCode project code - * @param processInstanceId process instance id + * @param workflowInstanceId workflow instance id * @param startNodeList start node list * @param taskDependType task depend type * @return execute result code */ @Operation(summary = "execute-task", description = "EXECUTE_ACTION_TO_WORKFLOW_INSTANCE_NOTES") @Parameters({ - @Parameter(name = "processInstanceId", description = "WORKFLOW_INSTANCE_ID", required = true, schema = @Schema(implementation = int.class, example = "100")), + @Parameter(name = "workflowInstanceId", description = "WORKFLOW_INSTANCE_ID", required = true, schema = @Schema(implementation = int.class, example = "100")), @Parameter(name = "startNodeList", description = "START_NODE_LIST", required = true, schema = @Schema(implementation = String.class)), @Parameter(name = "taskDependType", description = "TASK_DEPEND_TYPE", required = true, schema = @Schema(implementation = TaskDependType.class)) }) @@ -438,13 +438,13 @@ public class ExecutorController extends BaseController { @OperatorLog(auditType = AuditType.WORKFLOW_EXECUTE) public Result executeTask(@Parameter(hidden = true) @RequestAttribute(value = Constants.SESSION_USER) User loginUser, @Parameter(name = "projectCode", description = "PROJECT_CODE", required = true) @PathVariable long projectCode, - @RequestParam("processInstanceId") Integer processInstanceId, + @RequestParam("workflowInstanceId") Integer workflowInstanceId, @RequestParam("startNodeList") String startNodeList, @RequestParam("taskDependType") TaskDependType taskDependType) { - log.info("Start to execute task in process instance, projectCode:{}, processInstanceId:{}.", + log.info("Start to execute task in workflow instance, projectCode:{}, workflowInstanceId:{}.", projectCode, - processInstanceId); - return execService.executeTask(loginUser, projectCode, processInstanceId, startNodeList, taskDependType); + workflowInstanceId); + return execService.executeTask(loginUser, projectCode, workflowInstanceId, startNodeList, taskDependType); } } diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/SchedulerController.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/SchedulerController.java index f95f369d07..30c489d216 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/SchedulerController.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/SchedulerController.java @@ -71,7 +71,7 @@ public class SchedulerController extends BaseController { public static final String DEFAULT_WARNING_TYPE = "NONE"; public static final String DEFAULT_NOTIFY_GROUP_ID = "1"; public static final String DEFAULT_FAILURE_POLICY = "CONTINUE"; - public static final String DEFAULT_PROCESS_INSTANCE_PRIORITY = "MEDIUM"; + public static final String DEFAULT_WORKFLOW_INSTANCE_PRIORITY = "MEDIUM"; @Autowired private SchedulerService schedulerService; @@ -81,19 +81,19 @@ public class SchedulerController extends BaseController { * * @param loginUser login user * @param projectCode project code - * @param processDefinitionCode process definition code + * @param workflowDefinitionCode workflow definition code * @param schedule scheduler * @param warningType warning type * @param warningGroupId warning group id * @param failureStrategy failure strategy - * @param processInstancePriority process instance priority + * @param workflowInstancePriority workflow instance priority * @param workerGroup worker group * @param tenantCode tenant code * @return create result code */ @Operation(summary = "createSchedule", description = "CREATE_SCHEDULE_NOTES") @Parameters({ - @Parameter(name = "processDefinitionCode", description = "WORKFLOW_DEFINITION_CODE", required = true, schema = @Schema(implementation = long.class, example = "100")), + @Parameter(name = "workflowDefinitionCode", description = "WORKFLOW_DEFINITION_CODE", required = true, schema = @Schema(implementation = long.class, example = "100")), @Parameter(name = "schedule", description = "SCHEDULE", schema = @Schema(implementation = String.class, example = "{'startTime':'2019-06-10 00:00:00','endTime':'2019-06-13 00:00:00','timezoneId':'America/Phoenix','crontab':'0 0 3/6 * * ? *'}")), @Parameter(name = "warningType", description = "WARNING_TYPE", schema = @Schema(implementation = WarningType.class)), @Parameter(name = "warningGroupId", description = "WARNING_GROUP_ID", schema = @Schema(implementation = int.class, example = "100")), @@ -101,7 +101,7 @@ public class SchedulerController extends BaseController { @Parameter(name = "workerGroup", description = "WORKER_GROUP", schema = @Schema(implementation = String.class, example = "default")), @Parameter(name = "tenantCode", description = "TENANT_CODE", schema = @Schema(implementation = String.class, example = "default")), @Parameter(name = "environmentCode", description = "ENVIRONMENT_CODE", schema = @Schema(implementation = long.class)), - @Parameter(name = "processInstancePriority", description = "WORKFLOW_INSTANCE_PRIORITY", schema = @Schema(implementation = Priority.class)), + @Parameter(name = "workflowInstancePriority", description = "WORKFLOW_INSTANCE_PRIORITY", schema = @Schema(implementation = Priority.class)), }) @PostMapping() @ResponseStatus(HttpStatus.CREATED) @@ -109,7 +109,7 @@ public class SchedulerController extends BaseController { @OperatorLog(auditType = AuditType.SCHEDULE_CREATE) public Result createSchedule(@Parameter(hidden = true) @RequestAttribute(value = SESSION_USER) User loginUser, @Parameter(name = "projectCode", description = "PROJECT_CODE", required = true) @PathVariable long projectCode, - @RequestParam(value = "processDefinitionCode") long processDefinitionCode, + @RequestParam(value = "workflowDefinitionCode") long workflowDefinitionCode, @RequestParam(value = "schedule") String schedule, @RequestParam(value = "warningType", required = false, defaultValue = DEFAULT_WARNING_TYPE) WarningType warningType, @RequestParam(value = "warningGroupId", required = false, defaultValue = DEFAULT_NOTIFY_GROUP_ID) int warningGroupId, @@ -117,16 +117,16 @@ public class SchedulerController extends BaseController { @RequestParam(value = "workerGroup", required = false, defaultValue = "default") String workerGroup, @RequestParam(value = "tenantCode", required = false, defaultValue = "default") String tenantCode, @RequestParam(value = "environmentCode", required = false, defaultValue = "-1") Long environmentCode, - @RequestParam(value = "processInstancePriority", required = false, defaultValue = DEFAULT_PROCESS_INSTANCE_PRIORITY) Priority processInstancePriority) { + @RequestParam(value = "workflowInstancePriority", required = false, defaultValue = DEFAULT_WORKFLOW_INSTANCE_PRIORITY) Priority workflowInstancePriority) { Map result = schedulerService.insertSchedule( loginUser, projectCode, - processDefinitionCode, + workflowDefinitionCode, schedule, warningType, warningGroupId, failureStrategy, - processInstancePriority, + workflowInstancePriority, workerGroup, tenantCode, environmentCode); @@ -146,7 +146,7 @@ public class SchedulerController extends BaseController { * @param failureStrategy failure strategy * @param workerGroup worker group * @param tenantCode tenant code - * @param processInstancePriority process instance priority + * @param workflowInstancePriority workflow instance priority * @return update result code */ @Operation(summary = "updateSchedule", description = "UPDATE_SCHEDULE_NOTES") @@ -158,7 +158,7 @@ public class SchedulerController extends BaseController { @Parameter(name = "failureStrategy", description = "FAILURE_STRATEGY", schema = @Schema(implementation = FailureStrategy.class)), @Parameter(name = "workerGroup", description = "WORKER_GROUP", schema = @Schema(implementation = String.class, example = "default")), @Parameter(name = "tenantCode", description = "TENANT_CODE", schema = @Schema(implementation = String.class, example = "default")), - @Parameter(name = "processInstancePriority", description = "WORKFLOW_INSTANCE_PRIORITY", schema = @Schema(implementation = Priority.class)), + @Parameter(name = "workflowInstancePriority", description = "WORKFLOW_INSTANCE_PRIORITY", schema = @Schema(implementation = Priority.class)), @Parameter(name = "environmentCode", description = "ENVIRONMENT_CODE", schema = @Schema(implementation = long.class)), }) @PutMapping("/{id}") @@ -175,10 +175,10 @@ public class SchedulerController extends BaseController { @RequestParam(value = "workerGroup", required = false, defaultValue = "default") String workerGroup, @RequestParam(value = "tenantCode", required = false, defaultValue = "default") String tenantCode, @RequestParam(value = "environmentCode", required = false, defaultValue = "-1") Long environmentCode, - @RequestParam(value = "processInstancePriority", required = false, defaultValue = DEFAULT_PROCESS_INSTANCE_PRIORITY) Priority processInstancePriority) { + @RequestParam(value = "workflowInstancePriority", required = false, defaultValue = DEFAULT_WORKFLOW_INSTANCE_PRIORITY) Priority workflowInstancePriority) { Map result = schedulerService.updateSchedule(loginUser, projectCode, id, schedule, - warningType, warningGroupId, failureStrategy, processInstancePriority, workerGroup, tenantCode, + warningType, warningGroupId, failureStrategy, workflowInstancePriority, workerGroup, tenantCode, environmentCode); return returnDataList(result); } @@ -216,7 +216,7 @@ public class SchedulerController extends BaseController { * * @param loginUser login user * @param projectCode project code - * @param processDefinitionCode process definition code + * @param workflowDefinitionCode workflow definition code * @param pageNo page number * @param pageSize page size * @param searchVal search value @@ -224,7 +224,7 @@ public class SchedulerController extends BaseController { */ @Operation(summary = "queryScheduleListPaging", description = "QUERY_SCHEDULE_LIST_PAGING_NOTES") @Parameters({ - @Parameter(name = "processDefinitionId", description = "WORKFLOW_DEFINITION_ID", required = true, schema = @Schema(implementation = int.class, example = "100")), + @Parameter(name = "searchVal", description = "SEARCH_VAL", schema = @Schema(implementation = String.class)), @Parameter(name = "pageNo", description = "PAGE_NO", schema = @Schema(implementation = int.class, example = "1")), @Parameter(name = "pageSize", description = "PAGE_SIZE", schema = @Schema(implementation = int.class, example = "20")) @@ -233,13 +233,13 @@ public class SchedulerController extends BaseController { @ApiException(QUERY_SCHEDULE_LIST_PAGING_ERROR) public Result queryScheduleListPaging(@Parameter(hidden = true) @RequestAttribute(value = SESSION_USER) User loginUser, @Parameter(name = "projectCode", description = "PROJECT_CODE", required = true) @PathVariable long projectCode, - @RequestParam(value = "processDefinitionCode", required = false, defaultValue = "0") long processDefinitionCode, + @RequestParam(value = "workflowDefinitionCode", required = false, defaultValue = "0") long workflowDefinitionCode, @RequestParam(value = "searchVal", required = false) String searchVal, @RequestParam("pageNo") Integer pageNo, @RequestParam("pageSize") Integer pageSize) { checkPageParams(pageNo, pageSize); searchVal = ParameterUtils.handleEscapes(searchVal); - return schedulerService.querySchedule(loginUser, projectCode, processDefinitionCode, searchVal, pageNo, + return schedulerService.querySchedule(loginUser, projectCode, workflowDefinitionCode, searchVal, pageNo, pageSize); } @@ -304,49 +304,49 @@ public class SchedulerController extends BaseController { } /** - * update process definition schedule + * update workflow definition schedule * * @param loginUser login user * @param projectCode project code - * @param processDefinitionCode process definition code + * @param workflowDefinitionCode workflow definition code * @param schedule scheduler * @param warningType warning type * @param warningGroupId warning group id * @param failureStrategy failure strategy * @param workerGroup worker group - * @param processInstancePriority process instance priority + * @param workflowInstancePriority workflow instance priority * @return update result code */ - @Operation(summary = "updateScheduleByWorkflowDefinitionCode", description = "UPDATE_SCHEDULE_BY_PROCESS_DEFINITION_CODE_NOTES") + @Operation(summary = "updateScheduleByWorkflowDefinitionCode", description = "UPDATE_SCHEDULE_BY_WORKFLOW_DEFINITION_CODE_NOTES") @Parameters({ - @Parameter(name = "processDefinitionCode", description = "WORKFLOW_DEFINITION_CODE", required = true, schema = @Schema(implementation = long.class, example = "12345678")), + @Parameter(name = "workflowDefinitionCode", description = "WORKFLOW_DEFINITION_CODE", required = true, schema = @Schema(implementation = long.class, example = "12345678")), @Parameter(name = "schedule", description = "SCHEDULE", schema = @Schema(implementation = String.class, example = "{'startTime':'2019-06-10 00:00:00','endTime':'2019-06-13 00:00:00','crontab':'0 0 3/6 * * ? *'}")), @Parameter(name = "warningType", description = "WARNING_TYPE", schema = @Schema(implementation = WarningType.class)), @Parameter(name = "warningGroupId", description = "WARNING_GROUP_ID", schema = @Schema(implementation = int.class, example = "100")), @Parameter(name = "failureStrategy", description = "FAILURE_STRATEGY", schema = @Schema(implementation = FailureStrategy.class)), @Parameter(name = "workerGroup", description = "WORKER_GROUP", schema = @Schema(implementation = String.class, example = "default")), @Parameter(name = "tenantCode", description = "TENANT_CODE", schema = @Schema(implementation = String.class, example = "default")), - @Parameter(name = "processInstancePriority", description = "WORKFLOW_INSTANCE_PRIORITY", schema = @Schema(implementation = Priority.class)), + @Parameter(name = "workflowInstancePriority", description = "WORKFLOW_INSTANCE_PRIORITY", schema = @Schema(implementation = Priority.class)), @Parameter(name = "environmentCode", description = "ENVIRONMENT_CODE", schema = @Schema(implementation = long.class)), }) @PutMapping("/update/{code}") @ResponseStatus(HttpStatus.OK) @ApiException(UPDATE_SCHEDULE_ERROR) @OperatorLog(auditType = AuditType.SCHEDULE_UPDATE) - public Result updateScheduleByProcessDefinitionCode(@Parameter(hidden = true) @RequestAttribute(value = SESSION_USER) User loginUser, - @Parameter(name = "projectCode", description = "PROJECT_CODE", required = true) @PathVariable long projectCode, - @PathVariable(value = "code") long processDefinitionCode, - @RequestParam(value = "schedule") String schedule, - @RequestParam(value = "warningType", required = false, defaultValue = DEFAULT_WARNING_TYPE) WarningType warningType, - @RequestParam(value = "warningGroupId", required = false) int warningGroupId, - @RequestParam(value = "failureStrategy", required = false, defaultValue = "END") FailureStrategy failureStrategy, - @RequestParam(value = "workerGroup", required = false, defaultValue = "default") String workerGroup, - @RequestParam(value = "tenantCode", required = false, defaultValue = "default") String tenantCode, - @RequestParam(value = "environmentCode", required = false, defaultValue = "-1") long environmentCode, - @RequestParam(value = "processInstancePriority", required = false) Priority processInstancePriority) { + public Result updateScheduleByWorkflowDefinitionCode(@Parameter(hidden = true) @RequestAttribute(value = SESSION_USER) User loginUser, + @Parameter(name = "projectCode", description = "PROJECT_CODE", required = true) @PathVariable long projectCode, + @PathVariable(value = "code") long workflowDefinitionCode, + @RequestParam(value = "schedule") String schedule, + @RequestParam(value = "warningType", required = false, defaultValue = DEFAULT_WARNING_TYPE) WarningType warningType, + @RequestParam(value = "warningGroupId", required = false) int warningGroupId, + @RequestParam(value = "failureStrategy", required = false, defaultValue = "END") FailureStrategy failureStrategy, + @RequestParam(value = "workerGroup", required = false, defaultValue = "default") String workerGroup, + @RequestParam(value = "tenantCode", required = false, defaultValue = "default") String tenantCode, + @RequestParam(value = "environmentCode", required = false, defaultValue = "-1") long environmentCode, + @RequestParam(value = "workflowInstancePriority", required = false) Priority workflowInstancePriority) { Map result = schedulerService.updateScheduleByWorkflowDefinitionCode(loginUser, projectCode, - processDefinitionCode, schedule, - warningType, warningGroupId, failureStrategy, processInstancePriority, workerGroup, tenantCode, + workflowDefinitionCode, schedule, + warningType, warningGroupId, failureStrategy, workflowInstancePriority, workerGroup, tenantCode, environmentCode); return returnDataList(result); } diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/TaskGroupController.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/TaskGroupController.java index 2f99f098ed..3fb5ec4e01 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/TaskGroupController.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/TaskGroupController.java @@ -299,7 +299,7 @@ public class TaskGroupController extends BaseController { * * @param groupId ID for task group * @param taskName Task Name - * @param processName Process instance name + * @param workflowInstanceName workflow instance name * @param status Task queue status * @param loginUser login user * @param pageNo page number @@ -310,7 +310,7 @@ public class TaskGroupController extends BaseController { @Parameters({ @Parameter(name = "groupId", description = "GROUP_ID", required = false, schema = @Schema(implementation = int.class, example = "1", defaultValue = "-1")), @Parameter(name = "taskInstanceName", description = "TASK_INSTANCE_NAME", required = false, schema = @Schema(implementation = String.class, example = "taskName")), - @Parameter(name = "processInstanceName", description = "PROCESS_INSTANCE_NAME", required = false, schema = @Schema(implementation = String.class, example = "processName")), + @Parameter(name = "workflowInstanceName", description = "WORKFLOW_INSTANCE_NAME", required = false, schema = @Schema(implementation = String.class, example = "workflowInstanceName")), @Parameter(name = "status", description = "TASK_GROUP_STATUS", required = false, schema = @Schema(implementation = int.class, example = "1")), @Parameter(name = "pageNo", description = "PAGE_NO", required = true, schema = @Schema(implementation = int.class, example = "1")), @Parameter(name = "pageSize", description = "PAGE_SIZE", required = true, schema = @Schema(implementation = int.class, example = "20")) @@ -321,14 +321,14 @@ public class TaskGroupController extends BaseController { public Result queryTaskGroupQueues(@Parameter(hidden = true) @RequestAttribute(value = Constants.SESSION_USER) User loginUser, @RequestParam(value = "groupId", required = false, defaultValue = "-1") Integer groupId, @RequestParam(value = "taskInstanceName", required = false) String taskName, - @RequestParam(value = "processInstanceName", required = false) String processName, + @RequestParam(value = "workflowInstanceName", required = false) String workflowInstanceName, @RequestParam(value = "status", required = false) Integer status, @RequestParam("pageNo") Integer pageNo, @RequestParam("pageSize") Integer pageSize) { Map result = taskGroupQueueService.queryTasksByGroupId( loginUser, taskName, - processName, + workflowInstanceName, status, groupId, pageNo, diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/TaskInstanceController.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/TaskInstanceController.java index ef9d3b4b63..777da9bc21 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/TaskInstanceController.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/TaskInstanceController.java @@ -69,7 +69,7 @@ public class TaskInstanceController extends BaseController { * * @param loginUser login user * @param projectCode project code - * @param processInstanceId process instance id + * @param workflowInstanceId workflow instance id * @param searchVal search value * @param taskName task name * @param stateType state type @@ -83,8 +83,8 @@ public class TaskInstanceController extends BaseController { */ @Operation(summary = "queryTaskListPaging", description = "QUERY_TASK_INSTANCE_LIST_PAGING_NOTES") @Parameters({ - @Parameter(name = "processInstanceId", description = "WORKFLOW_INSTANCE_ID", schema = @Schema(implementation = int.class, example = "100")), - @Parameter(name = "processInstanceName", description = "PROCESS_INSTANCE_NAME", schema = @Schema(implementation = String.class)), + @Parameter(name = "workflowInstanceId", description = "WORKFLOW_INSTANCE_ID", schema = @Schema(implementation = int.class, example = "100")), + @Parameter(name = "workflowInstanceName", description = "WORKFLOW_INSTANCE_NAME", schema = @Schema(implementation = String.class)), @Parameter(name = "searchVal", description = "SEARCH_VAL", schema = @Schema(implementation = String.class)), @Parameter(name = "taskName", description = "TASK_NAME", schema = @Schema(implementation = String.class)), @Parameter(name = "taskCode", description = "TASK_CODE", schema = @Schema(implementation = Long.class)), @@ -102,9 +102,9 @@ public class TaskInstanceController extends BaseController { @ApiException(QUERY_TASK_LIST_PAGING_ERROR) public Result queryTaskListPaging(@Parameter(hidden = true) @RequestAttribute(value = Constants.SESSION_USER) User loginUser, @Parameter(name = "projectCode", description = "PROJECT_CODE", required = true) @PathVariable long projectCode, - @RequestParam(value = "processInstanceId", required = false, defaultValue = "0") Integer processInstanceId, - @RequestParam(value = "processInstanceName", required = false) String processInstanceName, - @RequestParam(value = "processDefinitionName", required = false) String processDefinitionName, + @RequestParam(value = "workflowInstanceId", required = false, defaultValue = "0") Integer workflowInstanceId, + @RequestParam(value = "workflowInstanceName", required = false) String workflowInstanceName, + @RequestParam(value = "workflowDefinitionName", required = false) String workflowDefinitionName, @RequestParam(value = "searchVal", required = false) String searchVal, @RequestParam(value = "taskName", required = false) String taskName, @RequestParam(value = "taskCode", required = false) Long taskCode, @@ -121,9 +121,9 @@ public class TaskInstanceController extends BaseController { return taskInstanceService.queryTaskListPaging( loginUser, projectCode, - processInstanceId, - processInstanceName, - processDefinitionName, + workflowInstanceId, + workflowInstanceName, + workflowDefinitionName, taskName, taskCode, executorName, diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/WorkflowDefinitionController.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/WorkflowDefinitionController.java index b531f3a16c..5f203b9f15 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/WorkflowDefinitionController.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/WorkflowDefinitionController.java @@ -82,7 +82,7 @@ import io.swagger.v3.oas.annotations.tags.Tag; */ @Tag(name = "WORKFLOW_DEFINITION_TAG") @RestController -@RequestMapping("projects/{projectCode}/process-definition") +@RequestMapping("projects/{projectCode}/workflow-definition") @Slf4j public class WorkflowDefinitionController extends BaseController { @@ -204,10 +204,10 @@ public class WorkflowDefinitionController extends BaseController { public Result verifyWorkflowDefinitionName(@Parameter(hidden = true) @RequestAttribute(value = Constants.SESSION_USER) User loginUser, @Parameter(name = "projectCode", description = "PROJECT_CODE", required = true) @PathVariable long projectCode, @RequestParam(value = "name", required = true) String name, - @RequestParam(value = "code", required = false, defaultValue = "0") long processDefinitionCode) { + @RequestParam(value = "workflowDefinitionCode", required = false, defaultValue = "0") long workflowDefinitionCode) { Map result = workflowDefinitionService.verifyWorkflowDefinitionName(loginUser, projectCode, name, - processDefinitionCode); + workflowDefinitionCode); return returnDataList(result); } @@ -581,7 +581,7 @@ public class WorkflowDefinitionController extends BaseController { @Parameters({ @Parameter(name = "projectCode", description = "PROJECT_CODE", required = true, schema = @Schema(implementation = long.class, example = "100")) }) - @GetMapping(value = "/query-process-definition-list") + @GetMapping(value = "/query-workflow-definition-list") @ResponseStatus(HttpStatus.OK) @ApiException(GET_TASKS_LIST_BY_WORKFLOW_DEFINITION_CODE_ERROR) public Result getWorkflowListByProjectCode(@Parameter(hidden = true) @RequestAttribute(value = Constants.SESSION_USER) User loginUser, @@ -600,20 +600,20 @@ public class WorkflowDefinitionController extends BaseController { @Operation(summary = "getTaskListByWorkflowDefinitionCode", description = "GET_TASK_LIST_BY_WORKFLOW_CODE_NOTES") @Parameters({ @Parameter(name = "projectCode", description = "PROJECT_CODE", required = true, schema = @Schema(implementation = long.class, example = "100")), - @Parameter(name = "processDefinitionCode", description = "WORKFLOW_DEFINITION_CODE", required = true, schema = @Schema(implementation = long.class, example = "100")), + @Parameter(name = "workflowDefinitionCode", description = "WORKFLOW_DEFINITION_CODE", required = true, schema = @Schema(implementation = long.class, example = "100")), }) @GetMapping(value = "/query-task-definition-list") @ResponseStatus(HttpStatus.OK) @ApiException(GET_TASKS_LIST_BY_WORKFLOW_DEFINITION_CODE_ERROR) public Result getTaskListByWorkflowDefinitionCode(@Parameter(hidden = true) @RequestAttribute(value = Constants.SESSION_USER) User loginUser, @Parameter(name = "projectCode", description = "PROJECT_CODE", required = true) @PathVariable long projectCode, - @RequestParam(value = "processDefinitionCode") Long processDefinitionCode) { + @RequestParam(value = "workflowDefinitionCode") Long workflowDefinitionCode) { Map result = workflowDefinitionService - .queryTaskDefinitionListByWorkflowDefinitionCode(projectCode, processDefinitionCode); + .queryTaskDefinitionListByWorkflowDefinitionCode(projectCode, workflowDefinitionCode); return returnDataList(result); } - @Operation(summary = "deleteByCode", description = "DELETE_WORKFLOW_DEFINITION_BY_ID_NOTES") + @Operation(summary = "deleteByWorkflowDefinitionCode", description = "DELETE_WORKFLOW_DEFINITION_BY_ID_NOTES") @Parameters({ @Parameter(name = "code", description = "WORKFLOW_DEFINITION_CODE", schema = @Schema(implementation = int.class, example = "100")) }) diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/WorkflowInstanceController.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/WorkflowInstanceController.java index 6ab26c1909..bd7fb4619b 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/WorkflowInstanceController.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/WorkflowInstanceController.java @@ -67,7 +67,7 @@ import io.swagger.v3.oas.annotations.tags.Tag; */ @Tag(name = "WORKFLOW_INSTANCE_TAG") @RestController -@RequestMapping("/projects/{projectCode}/process-instances") +@RequestMapping("/projects/{projectCode}/workflow-instances") @Slf4j public class WorkflowInstanceController extends BaseController { @@ -81,7 +81,7 @@ public class WorkflowInstanceController extends BaseController { * @param projectCode project code * @param pageNo page number * @param pageSize page size - * @param processDefineCode process definition code + * @param workflowDefinitionCode workflow definition code * @param searchVal search value * @param stateType state type * @param host host @@ -92,7 +92,7 @@ public class WorkflowInstanceController extends BaseController { */ @Operation(summary = "queryWorkflowInstanceListPaging", description = "QUERY_WORKFLOW_INSTANCE_LIST_NOTES") @Parameters({ - @Parameter(name = "processDefineCode", description = "WORKFLOW_DEFINITION_CODE", schema = @Schema(implementation = long.class, example = "100")), + @Parameter(name = "workflowDefinitionCode", description = "WORKFLOW_DEFINITION_CODE", schema = @Schema(implementation = long.class, example = "100")), @Parameter(name = "searchVal", description = "SEARCH_VAL", schema = @Schema(implementation = String.class)), @Parameter(name = "executorName", description = "EXECUTOR_NAME", schema = @Schema(implementation = String.class)), @Parameter(name = "stateType", description = "EXECUTION_STATUS", schema = @Schema(implementation = WorkflowExecutionStatus.class)), @@ -107,7 +107,7 @@ public class WorkflowInstanceController extends BaseController { @ApiException(Status.QUERY_WORKFLOW_INSTANCE_LIST_PAGING_ERROR) public Result queryWorkflowInstanceList(@Parameter(hidden = true) @RequestAttribute(value = Constants.SESSION_USER) User loginUser, @Parameter(name = "projectCode", description = "PROJECT_CODE", required = true) @PathVariable long projectCode, - @RequestParam(value = "processDefineCode", required = false, defaultValue = "0") long processDefineCode, + @RequestParam(value = "workflowDefinitionCode", required = false, defaultValue = "0") long workflowDefinitionCode, @RequestParam(value = "searchVal", required = false) String searchVal, @RequestParam(value = "executorName", required = false) String executorName, @RequestParam(value = "stateType", required = false) WorkflowExecutionStatus stateType, @@ -120,7 +120,8 @@ public class WorkflowInstanceController extends BaseController { checkPageParams(pageNo, pageSize); searchVal = ParameterUtils.handleEscapes(searchVal); - return workflowInstanceService.queryWorkflowInstanceList(loginUser, projectCode, processDefineCode, startTime, + return workflowInstanceService.queryWorkflowInstanceList(loginUser, projectCode, workflowDefinitionCode, + startTime, endTime, searchVal, executorName, stateType, host, otherParamsJson, pageNo, pageSize); } @@ -153,7 +154,7 @@ public class WorkflowInstanceController extends BaseController { * * @param loginUser login user * @param projectCode project code - * @param taskRelationJson process task relation json + * @param taskRelationJson workflow task relation json * @param taskDefinitionJson taskDefinitionJson * @param id workflow instance id * @param scheduleTime schedule time @@ -168,9 +169,9 @@ public class WorkflowInstanceController extends BaseController { @Parameter(name = "id", description = "WORKFLOW_INSTANCE_ID", required = true, schema = @Schema(implementation = int.class, example = "1")), @Parameter(name = "scheduleTime", description = "SCHEDULE_TIME", schema = @Schema(implementation = String.class)), @Parameter(name = "syncDefine", description = "SYNC_DEFINE", required = true, schema = @Schema(implementation = boolean.class, example = "false")), - @Parameter(name = "globalParams", description = "PROCESS_GLOBAL_PARAMS", schema = @Schema(implementation = String.class, example = "[]")), + @Parameter(name = "globalParams", description = "WORKFLOW_GLOBAL_PARAMS", schema = @Schema(implementation = String.class, example = "[]")), @Parameter(name = "locations", description = "WORKFLOW_INSTANCE_LOCATIONS", schema = @Schema(implementation = String.class)), - @Parameter(name = "timeout", description = "PROCESS_TIMEOUT", schema = @Schema(implementation = int.class, example = "0")), + @Parameter(name = "timeout", description = "WORKFLOW_TIMEOUT", schema = @Schema(implementation = int.class, example = "0")), }) @PutMapping(value = "/{id}") @ResponseStatus(HttpStatus.OK) @@ -223,7 +224,7 @@ public class WorkflowInstanceController extends BaseController { * @param endTime end time * @return list of workflow instance */ - @Operation(summary = "queryTopNLongestRunningWorkflowInstance", description = "QUERY_TOPN_LONGEST_RUNNING_PROCESS_INSTANCE_NOTES") + @Operation(summary = "queryTopNLongestRunningWorkflowInstance", description = "QUERY_TOPN_LONGEST_RUNNING_WORKFLOW_INSTANCE_NOTES") @Parameters({ @Parameter(name = "size", description = "WORKFLOW_INSTANCE_SIZE", required = true, schema = @Schema(implementation = int.class, example = "10")), @Parameter(name = "startTime", description = "WORKFLOW_INSTANCE_START_TIME", required = true, schema = @Schema(implementation = String.class)), @@ -381,13 +382,13 @@ public class WorkflowInstanceController extends BaseController { * * @param loginUser login user * @param projectCode project code - * @param processInstanceIds workflow instance id + * @param workflowInstanceIds workflow instance id * @return delete result code */ @Operation(summary = "batchDeleteWorkflowInstanceByIds", description = "BATCH_DELETE_WORKFLOW_INSTANCE_BY_IDS_NOTES") @Parameters({ @Parameter(name = "projectCode", description = "PROJECT_CODE", required = true, schema = @Schema(implementation = int.class)), - @Parameter(name = "processInstanceIds", description = "PROCESS_INSTANCE_IDS", required = true, schema = @Schema(implementation = String.class)), + @Parameter(name = "workflowInstanceIds", description = "WORKFLOW_INSTANCE_IDS", required = true, schema = @Schema(implementation = String.class)), }) @PostMapping(value = "/batch-delete") @ResponseStatus(HttpStatus.OK) @@ -395,21 +396,21 @@ public class WorkflowInstanceController extends BaseController { @OperatorLog(auditType = AuditType.WORKFLOW_INSTANCE_BATCH_DELETE) public Result batchDeleteWorkflowInstanceByIds(@RequestAttribute(value = Constants.SESSION_USER) User loginUser, @PathVariable long projectCode, - @RequestParam("processInstanceIds") String processInstanceIds) { + @RequestParam("workflowInstanceIds") String workflowInstanceIds) { // task queue Map result = new HashMap<>(); List deleteFailedIdList = new ArrayList<>(); - if (!StringUtils.isEmpty(processInstanceIds)) { - String[] processInstanceIdArray = processInstanceIds.split(Constants.COMMA); + if (!StringUtils.isEmpty(workflowInstanceIds)) { + String[] workflowInstanceIdArray = workflowInstanceIds.split(Constants.COMMA); - for (String strProcessInstanceId : processInstanceIdArray) { - int processInstanceId = Integer.parseInt(strProcessInstanceId); + for (String strWorkflowInstanceId : workflowInstanceIdArray) { + int workflowInstanceId = Integer.parseInt(strWorkflowInstanceId); try { - workflowInstanceService.deleteWorkflowInstanceById(loginUser, processInstanceId); + workflowInstanceService.deleteWorkflowInstanceById(loginUser, workflowInstanceId); } catch (Exception e) { - log.error("Delete workflow instance: {} error", strProcessInstanceId, e); + log.error("Delete workflow instance: {} error", strWorkflowInstanceId, e); deleteFailedIdList - .add(MessageFormat.format(Status.WORKFLOW_INSTANCE_ERROR.getMsg(), strProcessInstanceId)); + .add(MessageFormat.format(Status.WORKFLOW_INSTANCE_ERROR.getMsg(), strWorkflowInstanceId)); } } } diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/WorkflowLineageController.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/WorkflowLineageController.java index 25a0ee522e..58bb10e05b 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/WorkflowLineageController.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/WorkflowLineageController.java @@ -75,10 +75,10 @@ public class WorkflowLineageController extends BaseController { @ApiException(QUERY_WORKFLOW_LINEAGE_ERROR) public Result> queryWorkFlowLineageByName(@Parameter(hidden = true) @RequestAttribute(value = SESSION_USER) User loginUser, @Parameter(name = "projectCode", description = "PROJECT_CODE", required = true) @PathVariable long projectCode, - @RequestParam(value = "processDefinitionName", required = false) String processDefinitionName) { - processDefinitionName = ParameterUtils.handleEscapes(processDefinitionName); + @RequestParam(value = "workflowDefinitionName", required = false) String workflowDefinitionName) { + workflowDefinitionName = ParameterUtils.handleEscapes(workflowDefinitionName); List workFlowLineages = - workflowLineageService.queryWorkFlowLineageByName(projectCode, processDefinitionName); + workflowLineageService.queryWorkFlowLineageByName(projectCode, workflowDefinitionName); return Result.success(workFlowLineages); } @@ -117,14 +117,14 @@ public class WorkflowLineageController extends BaseController { * * @param loginUser login user * @param projectCode project codes which taskCode belong - * @param processDefinitionCode project code which taskCode belong + * @param workflowDefinitionCode project code which taskCode belong * @param taskCode task definition code * @return Result of task can be deleted or not */ @Operation(summary = "verifyTaskCanDelete", description = "VERIFY_TASK_CAN_DELETE") @Parameters({ @Parameter(name = "projectCode", description = "PROJECT_CODE", required = true, schema = @Schema(implementation = long.class)), - @Parameter(name = "processDefinitionCode", description = "WORKFLOW_DEFINITION_CODE", required = true, schema = @Schema(implementation = long.class)), + @Parameter(name = "workflowDefinitionCode", description = "WORKFLOW_DEFINITION_CODE", required = true, schema = @Schema(implementation = long.class)), @Parameter(name = "taskCode", description = "TASK_DEFINITION_CODE", required = true, schema = @Schema(implementation = long.class, example = "123456789")), }) @PostMapping(value = "/tasks/verify-delete") @@ -132,11 +132,11 @@ public class WorkflowLineageController extends BaseController { @ApiException(TASK_WITH_DEPENDENT_ERROR) public Result> verifyTaskCanDelete(@Parameter(hidden = true) @RequestAttribute(value = Constants.SESSION_USER) User loginUser, @Parameter(name = "projectCode", description = "PROJECT_CODE", required = true) @PathVariable long projectCode, - @RequestParam(value = "processDefinitionCode", required = true) long processDefinitionCode, - @RequestParam(value = "taskCode", required = true) long taskCode) { + @RequestParam(value = "workflowDefinitionCode") long workflowDefinitionCode, + @RequestParam(value = "taskCode") long taskCode) { Result> result = new Result<>(); Optional taskDepMsg = - workflowLineageService.taskDependentMsg(projectCode, processDefinitionCode, taskCode); + workflowLineageService.taskDependentMsg(projectCode, workflowDefinitionCode, taskCode); if (taskDepMsg.isPresent()) { throw new ServiceException(taskDepMsg.get()); } diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/WorkflowTaskRelationController.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/WorkflowTaskRelationController.java index 8c32771342..5557ee17b9 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/WorkflowTaskRelationController.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/WorkflowTaskRelationController.java @@ -55,7 +55,7 @@ import io.swagger.v3.oas.annotations.tags.Tag; */ @Tag(name = "WORKFLOW_TASK_RELATION_TAG") @RestController -@RequestMapping("projects/{projectCode}/process-task-relation") +@RequestMapping("projects/{projectCode}/workflow-task-relation") public class WorkflowTaskRelationController extends BaseController { @Autowired @@ -66,7 +66,7 @@ public class WorkflowTaskRelationController extends BaseController { * * @param loginUser login user * @param projectCode project code - * @param processDefinitionCode processDefinitionCode + * @param workflowDefinitionCode workflowDefinitionCode * @param preTaskCode preTaskCode * @param postTaskCode postTaskCode * @return create result code @@ -74,26 +74,26 @@ public class WorkflowTaskRelationController extends BaseController { @Operation(summary = "save", description = "CREATE_WORKFLOW_TASK_RELATION_NOTES") @Parameters({ @Parameter(name = "projectCode", description = "PROJECT_CODE", required = true, schema = @Schema(implementation = long.class)), - @Parameter(name = "processDefinitionCode", description = "WORKFLOW_DEFINITION_CODE", required = true, schema = @Schema(implementation = long.class)), + @Parameter(name = "workflowDefinitionCode", description = "WORKFLOW_DEFINITION_CODE", required = true, schema = @Schema(implementation = long.class)), @Parameter(name = "preTaskCode", description = "PRE_TASK_CODE", required = true, schema = @Schema(implementation = long.class)), @Parameter(name = "postTaskCode", description = "POST_TASK_CODE", required = true, schema = @Schema(implementation = long.class)) }) @PostMapping() @ResponseStatus(HttpStatus.CREATED) @ApiException(CREATE_WORKFLOW_TASK_RELATION_ERROR) - public Result createProcessTaskRelation(@Parameter(hidden = true) @RequestAttribute(value = Constants.SESSION_USER) User loginUser, - @Parameter(name = "projectCode", description = "PROJECT_CODE", required = true) @PathVariable long projectCode, - @RequestParam(name = "processDefinitionCode", required = true) long processDefinitionCode, - @RequestParam(name = "preTaskCode", required = true) long preTaskCode, - @RequestParam(name = "postTaskCode", required = true) long postTaskCode) { + public Result createWorkflowTaskRelation(@Parameter(hidden = true) @RequestAttribute(value = Constants.SESSION_USER) User loginUser, + @Parameter(name = "projectCode", description = "PROJECT_CODE", required = true) @PathVariable long projectCode, + @RequestParam(name = "workflowDefinitionCode", required = true) long workflowDefinitionCode, + @RequestParam(name = "preTaskCode", required = true) long preTaskCode, + @RequestParam(name = "postTaskCode", required = true) long postTaskCode) { Map result = new HashMap<>(); if (postTaskCode == 0L) { putMsg(result, DATA_IS_NOT_VALID, "postTaskCode"); - } else if (processDefinitionCode == 0L) { - putMsg(result, DATA_IS_NOT_VALID, "processDefinitionCode"); + } else if (workflowDefinitionCode == 0L) { + putMsg(result, DATA_IS_NOT_VALID, "workflowDefinitionCode"); } else { result = workflowTaskRelationService.createWorkflowTaskRelation(loginUser, projectCode, - processDefinitionCode, + workflowDefinitionCode, preTaskCode, postTaskCode); } return returnDataList(result); @@ -104,25 +104,25 @@ public class WorkflowTaskRelationController extends BaseController { * * @param loginUser login user * @param projectCode project code - * @param processDefinitionCode process definition code + * @param workflowDefinitionCode workflow definition code * @param taskCode the post task code * @return delete result code */ @Operation(summary = "deleteRelation", description = "DELETE_WORKFLOW_TASK_RELATION_NOTES") @Parameters({ @Parameter(name = "projectCode", description = "PROJECT_CODE", required = true, schema = @Schema(implementation = long.class)), - @Parameter(name = "processDefinitionCode", description = "WORKFLOW_DEFINITION_CODE", required = true, schema = @Schema(implementation = long.class)), + @Parameter(name = "workflowDefinitionCode", description = "WORKFLOW_DEFINITION_CODE", required = true, schema = @Schema(implementation = long.class)), @Parameter(name = "taskCode", description = "TASK_CODE", required = true, schema = @Schema(implementation = long.class)) }) @DeleteMapping(value = "/{taskCode}") @ResponseStatus(HttpStatus.OK) @ApiException(DELETE_TASK_WORKFLOW_RELATION_ERROR) - public Result deleteTaskProcessRelation(@Parameter(hidden = true) @RequestAttribute(value = Constants.SESSION_USER) User loginUser, - @Parameter(name = "projectCode", description = "PROJECT_CODE", required = true) @PathVariable long projectCode, - @RequestParam(name = "processDefinitionCode", required = true) long processDefinitionCode, - @PathVariable("taskCode") long taskCode) { + public Result deleteTaskWorkflowRelation(@Parameter(hidden = true) @RequestAttribute(value = Constants.SESSION_USER) User loginUser, + @Parameter(name = "projectCode", description = "PROJECT_CODE", required = true) @PathVariable long projectCode, + @RequestParam(name = "workflowDefinitionCode") long workflowDefinitionCode, + @PathVariable("taskCode") long taskCode) { return returnDataList(workflowTaskRelationService.deleteTaskWorkflowRelation(loginUser, projectCode, - processDefinitionCode, taskCode)); + workflowDefinitionCode, taskCode)); } /** @@ -226,7 +226,7 @@ public class WorkflowTaskRelationController extends BaseController { * * @param loginUser login user * @param projectCode project code - * @param processDefinitionCode process definition code + * @param workflowDefinitionCode workflow definition code * @param preTaskCode pre task code * @param postTaskCode post task code * @return delete result code @@ -234,19 +234,19 @@ public class WorkflowTaskRelationController extends BaseController { @Operation(summary = "deleteEdge", description = "DELETE_EDGE_NOTES") @Parameters({ @Parameter(name = "projectCode", description = "PROJECT_CODE", required = true, schema = @Schema(implementation = long.class)), - @Parameter(name = "processDefinitionCode", description = "WORKFLOW_DEFINITION_CODE", required = true, schema = @Schema(implementation = long.class)), + @Parameter(name = "workflowDefinitionCode", description = "WORKFLOW_DEFINITION_CODE", required = true, schema = @Schema(implementation = long.class)), @Parameter(name = "preTaskCode", description = "PRE_TASK_CODE", required = true, schema = @Schema(implementation = long.class)), @Parameter(name = "postTaskCode", description = "POST_TASK_CODE", required = true, schema = @Schema(implementation = long.class)) }) - @DeleteMapping(value = "/{processDefinitionCode}/{preTaskCode}/{postTaskCode}") + @DeleteMapping(value = "/{workflowDefinitionCode}/{preTaskCode}/{postTaskCode}") @ResponseStatus(HttpStatus.OK) @ApiException(DELETE_EDGE_ERROR) public Result deleteEdge(@Parameter(hidden = true) @RequestAttribute(value = Constants.SESSION_USER) User loginUser, @Parameter(name = "projectCode", description = "PROJECT_CODE", required = true) @PathVariable long projectCode, - @PathVariable long processDefinitionCode, + @PathVariable long workflowDefinitionCode, @PathVariable long preTaskCode, @PathVariable long postTaskCode) { - return returnDataList(workflowTaskRelationService.deleteEdge(loginUser, projectCode, processDefinitionCode, + return returnDataList(workflowTaskRelationService.deleteEdge(loginUser, projectCode, workflowDefinitionCode, preTaskCode, postTaskCode)); } diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/v2/StatisticsV2Controller.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/v2/StatisticsV2Controller.java index 599405b74b..1c4f1788bf 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/v2/StatisticsV2Controller.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/v2/StatisticsV2Controller.java @@ -132,7 +132,7 @@ public class StatisticsV2Controller extends BaseController { * @param statisticsStateRequest statisticsStateRequest * @return workflow count in project code */ - @Operation(summary = "countDefinitionV2ByUserId", description = "COUNT_PROCESS_DEFINITION_V2_BY_USERID_NOTES") + @Operation(summary = "countDefinitionV2ByUserId", description = "COUNT_WORKFLOW_DEFINITION_V2_BY_USERID_NOTES") @GetMapping(value = "/workflows/users/count") @ResponseStatus(HttpStatus.OK) @ApiException(COUNT_WORKFLOW_DEFINITION_USER_ERROR) @@ -149,7 +149,7 @@ public class StatisticsV2Controller extends BaseController { * @param userId userId * @return workflow count in project code */ - @Operation(summary = "countDefinitionV2ByUser", description = "COUNT_PROCESS_DEFINITION_V2_BY_USER_NOTES") + @Operation(summary = "countDefinitionV2ByUser", description = "COUNT_WORKFLOW_DEFINITION_V2_BY_USER_NOTES") @GetMapping(value = "/workflows/users/{userId}/count") @ResponseStatus(HttpStatus.OK) @ApiException(COUNT_WORKFLOW_DEFINITION_USER_ERROR) @@ -167,7 +167,7 @@ public class StatisticsV2Controller extends BaseController { * @param releaseState releaseState * @return workflow count in project code */ - @Operation(summary = "countDefinitionV2ByUser", description = "COUNT_PROCESS_DEFINITION_V2_BY_USER_NOTES") + @Operation(summary = "countDefinitionV2ByUser", description = "COUNT_WORKFLOW_DEFINITION_V2_BY_USER_NOTES") @GetMapping(value = "/workflows/users/{userId}/{releaseState}/count") @ResponseStatus(HttpStatus.OK) @ApiException(COUNT_WORKFLOW_DEFINITION_USER_ERROR) diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/v2/TaskInstanceV2Controller.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/v2/TaskInstanceV2Controller.java index 18fdb4cbdf..1b3e6ec446 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/v2/TaskInstanceV2Controller.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/v2/TaskInstanceV2Controller.java @@ -73,8 +73,8 @@ public class TaskInstanceV2Controller extends BaseController { */ @Operation(summary = "queryTaskListPaging", description = "QUERY_TASK_INSTANCE_LIST_PAGING_NOTES") @Parameters({ - @Parameter(name = "processInstanceId", description = "WORKFLOW_INSTANCE_ID", schema = @Schema(implementation = int.class), example = "100"), - @Parameter(name = "processInstanceName", description = "PROCESS_INSTANCE_NAME", schema = @Schema(implementation = String.class)), + @Parameter(name = "workflowInstanceId", description = "WORKFLOW_INSTANCE_ID", schema = @Schema(implementation = int.class), example = "100"), + @Parameter(name = "workflowInstanceName", description = "WORKFLOW_INSTANCE_NAME", schema = @Schema(implementation = String.class)), @Parameter(name = "searchVal", description = "SEARCH_VAL", schema = @Schema(implementation = String.class)), @Parameter(name = "taskName", description = "TASK_NAME", schema = @Schema(implementation = String.class)), @Parameter(name = "taskCode", description = "TASK_CODE", schema = @Schema(implementation = Long.class)), @@ -97,8 +97,8 @@ public class TaskInstanceV2Controller extends BaseController { String searchVal = ParameterUtils.handleEscapes(taskInstanceQueryReq.getSearchVal()); return taskInstanceService.queryTaskListPaging(loginUser, projectCode, - taskInstanceQueryReq.getProcessInstanceId(), taskInstanceQueryReq.getProcessInstanceName(), - taskInstanceQueryReq.getProcessDefinitionName(), + taskInstanceQueryReq.getWorkflowInstanceId(), taskInstanceQueryReq.getWorkflowInstanceName(), + taskInstanceQueryReq.getWorkflowDefinitionName(), taskInstanceQueryReq.getTaskName(), taskInstanceQueryReq.getTaskCode(), taskInstanceQueryReq.getExecutorName(), taskInstanceQueryReq.getStartTime(), taskInstanceQueryReq.getEndTime(), searchVal, diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/v2/ProcessTaskRelationV2Controller.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/v2/WorkflowTaskRelationV2Controller.java similarity index 94% rename from dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/v2/ProcessTaskRelationV2Controller.java rename to dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/v2/WorkflowTaskRelationV2Controller.java index 7c111f99ef..4dfa98a0a8 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/v2/ProcessTaskRelationV2Controller.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/v2/WorkflowTaskRelationV2Controller.java @@ -52,22 +52,19 @@ import io.swagger.v3.oas.annotations.Parameters; import io.swagger.v3.oas.annotations.media.Schema; import io.swagger.v3.oas.annotations.tags.Tag; -/** - * process task relation controller - */ @Tag(name = "WORKFLOW_TASK_RELATION_TAG") @RestController @RequestMapping("v2/relations") -public class ProcessTaskRelationV2Controller extends BaseController { +public class WorkflowTaskRelationV2Controller extends BaseController { @Autowired private WorkflowTaskRelationService workflowTaskRelationService; /** - * create resource process task relation + * create resource workflow task relation * * @param loginUser login user - * @param TaskRelationCreateRequest process task definition json contains the object you want to create + * @param TaskRelationCreateRequest workflow task definition json contains the object you want to create * @return Result object created */ @Operation(summary = "create", description = "CREATE_WORKFLOW_TASK_RELATION_NOTES") @@ -82,7 +79,7 @@ public class ProcessTaskRelationV2Controller extends BaseController { } /** - * delete resource process task relation + * delete resource workflow task relation * * @param loginUser login user * @param codePair code pair you want to delete the task relation, use `upstream,downstream` as example, will delete exists relation upstream -> downstream, throw error if not exists @@ -111,7 +108,7 @@ public class ProcessTaskRelationV2Controller extends BaseController { * @param taskRelationUpdateUpstreamRequest workflowUpdateRequest * @return ResourceResponse object updated */ - @Operation(summary = "update", description = "UPDATE_PROCESS_TASK_RELATION_NOTES") + @Operation(summary = "update", description = "UPDATE_WORKFLOW_TASK_RELATION_NOTES") @Parameters({ @Parameter(name = "code", description = "DOWNSTREAM_TASK_DEFINITION_CODE", schema = @Schema(implementation = long.class, example = "123456", required = true)) }) diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/v2/WorkflowV2Controller.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/v2/WorkflowV2Controller.java index f70528a87d..56d15fdab4 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/v2/WorkflowV2Controller.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/v2/WorkflowV2Controller.java @@ -86,7 +86,7 @@ public class WorkflowV2Controller extends BaseController { * Delete workflow by code * * @param loginUser login user - * @param code process definition code + * @param code workflow definition code * @return Result result object delete */ @Operation(summary = "delete", description = "DELETE_WORKFLOWS_NOTES") @@ -152,8 +152,8 @@ public class WorkflowV2Controller extends BaseController { @ApiException(QUERY_WORKFLOW_DEFINITION_LIST) public Result> filterWorkflows(@Parameter(hidden = true) @RequestAttribute(value = Constants.SESSION_USER) User loginUser, @RequestBody WorkflowFilterRequest workflowFilterRequest) { - PageInfo processDefinitions = + PageInfo workflowDefinitions = workflowDefinitionService.filterWorkflowDefinition(loginUser, workflowFilterRequest); - return Result.success(processDefinitions); + return Result.success(workflowDefinitions); } } diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/dto/ClusterDto.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/dto/ClusterDto.java index 1722f0e150..c8bdb1612f 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/dto/ClusterDto.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/dto/ClusterDto.java @@ -22,21 +22,18 @@ import java.util.List; import lombok.Data; -/** - * ClusterDto - */ @Data public class ClusterDto { private int id; /** - * clluster code + * cluster code */ private Long code; /** - * clluster name + * cluster name */ private String name; @@ -47,7 +44,7 @@ public class ClusterDto { private String description; - private List processDefinitions; + private List workflowDefinitions; /** * operator user id diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/dto/DagDataSchedule.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/dto/DagDataSchedule.java index d944ffb3d8..e3510948a4 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/dto/DagDataSchedule.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/dto/DagDataSchedule.java @@ -20,14 +20,13 @@ package org.apache.dolphinscheduler.api.dto; import org.apache.dolphinscheduler.dao.entity.DagData; import org.apache.dolphinscheduler.dao.entity.Schedule; -/** - * DagDataSchedule - */ +import lombok.Data; +import lombok.EqualsAndHashCode; + +@EqualsAndHashCode(callSuper = true) +@Data public class DagDataSchedule extends DagData { - /** - * schedule - */ private Schedule schedule; public DagDataSchedule() { @@ -35,16 +34,9 @@ public class DagDataSchedule extends DagData { public DagDataSchedule(DagData dagData) { super(); - this.setProcessDefinition(dagData.getProcessDefinition()); + this.setWorkflowDefinition(dagData.getWorkflowDefinition()); this.setTaskDefinitionList(dagData.getTaskDefinitionList()); - this.setProcessTaskRelationList(dagData.getProcessTaskRelationList()); + this.setWorkflowTaskRelationList(dagData.getWorkflowTaskRelationList()); } - public Schedule getSchedule() { - return schedule; - } - - public void setSchedule(Schedule schedule) { - this.schedule = schedule; - } } diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/dto/DefineUserDto.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/dto/DefineUserDto.java index 93eba75b05..0ecd85ba63 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/dto/DefineUserDto.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/dto/DefineUserDto.java @@ -21,9 +21,9 @@ import org.apache.dolphinscheduler.dao.model.WorkflowDefinitionCountDto; import java.util.List; -/** - * user process define dto - */ +import lombok.Data; + +@Data public class DefineUserDto { private int count; @@ -38,19 +38,4 @@ public class DefineUserDto { this.userList = defineGroupByUsers; } - public int getCount() { - return count; - } - - public void setCount(int count) { - this.count = count; - } - - public List getUserList() { - return userList; - } - - public void setUserList(List userList) { - this.userList = userList; - } } diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/dto/DynamicSubWorkflowDto.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/dto/DynamicSubWorkflowDto.java index 5a482f0a6c..42b437d1b1 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/dto/DynamicSubWorkflowDto.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/dto/DynamicSubWorkflowDto.java @@ -28,7 +28,7 @@ import lombok.NoArgsConstructor; @NoArgsConstructor public class DynamicSubWorkflowDto { - private long processInstanceId; + private long workflowInstanceId; private String name; diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/dto/WorkflowDefinitionDto.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/dto/WorkflowDefinitionDto.java deleted file mode 100644 index 047e409276..0000000000 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/dto/WorkflowDefinitionDto.java +++ /dev/null @@ -1,26 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.dolphinscheduler.api.dto; - -import org.apache.dolphinscheduler.dao.entity.WorkflowDefinition; - -/** - * ProcessDefinitionDto - */ -public class WorkflowDefinitionDto extends WorkflowDefinition { -} diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/dto/WorkflowInstanceDto.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/dto/WorkflowInstanceDto.java deleted file mode 100644 index 6109e55a3d..0000000000 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/dto/WorkflowInstanceDto.java +++ /dev/null @@ -1,26 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.dolphinscheduler.api.dto; - -import org.apache.dolphinscheduler.dao.entity.WorkflowInstance; - -/** - * ProcessInstanceDto - */ -public class WorkflowInstanceDto extends WorkflowInstance { -} diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/dto/schedule/ScheduleCreateRequest.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/dto/schedule/ScheduleCreateRequest.java index d97f15e7f7..f3fce4c012 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/dto/schedule/ScheduleCreateRequest.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/dto/schedule/ScheduleCreateRequest.java @@ -33,14 +33,11 @@ import com.google.gson.Gson; import com.google.gson.GsonBuilder; import io.swagger.v3.oas.annotations.media.Schema; -/** - * schedule create request - */ @Data public class ScheduleCreateRequest { @Schema(example = "1234567890123", required = true) - private long processDefinitionCode; + private long workflowDefinitionCode; @Schema(example = "schedule timezone", required = true) private String crontab; @@ -67,7 +64,7 @@ public class ScheduleCreateRequest { private int warningGroupId; @Schema(allowableValues = "HIGHEST / HIGH / MEDIUM / LOW / LOWEST", example = "MEDIUM", description = "default MEDIUM if value not provide.") - private String processInstancePriority; + private String workflowInstancePriority; @Schema(example = "worker-group-name") private String workerGroup; @@ -87,7 +84,7 @@ public class ScheduleCreateRequest { public Schedule convert2Schedule() { Schedule schedule = new Schedule(); - schedule.setProcessDefinitionCode(this.processDefinitionCode); + schedule.setWorkflowDefinitionCode(this.workflowDefinitionCode); schedule.setCrontab(this.crontab); schedule.setStartTime(stringToDate(this.startTime)); schedule.setEndTime(stringToDate(this.endTime)); @@ -110,8 +107,9 @@ public class ScheduleCreateRequest { schedule.setWarningType(newWarningType); Priority newPriority = - this.processInstancePriority == null ? Priority.MEDIUM : Priority.valueOf(this.processInstancePriority); - schedule.setProcessInstancePriority(newPriority); + this.workflowInstancePriority == null ? Priority.MEDIUM + : Priority.valueOf(this.workflowInstancePriority); + schedule.setWorkflowInstancePriority(newPriority); Date date = new Date(); schedule.setCreateTime(date); diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/dto/schedule/ScheduleFilterRequest.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/dto/schedule/ScheduleFilterRequest.java index b0afead0a3..dced4e54d6 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/dto/schedule/ScheduleFilterRequest.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/dto/schedule/ScheduleFilterRequest.java @@ -39,8 +39,8 @@ public class ScheduleFilterRequest extends PageQueryDto { @Schema(example = "project-name") private String projectName; - @Schema(example = "process-definition-name") - private String processDefinitionName; + @Schema(example = "workflow-definition-name") + private String workflowDefinitionName; @Schema(allowableValues = "ONLINE / OFFLINE", example = "OFFLINE", description = "default OFFLINE if value not provide.") private String releaseState; @@ -50,8 +50,8 @@ public class ScheduleFilterRequest extends PageQueryDto { if (this.projectName != null) { schedule.setProjectName(this.projectName); } - if (this.processDefinitionName != null) { - schedule.setProcessDefinitionName(this.processDefinitionName); + if (this.workflowDefinitionName != null) { + schedule.setWorkflowDefinitionName(this.workflowDefinitionName); } if (this.releaseState != null) { schedule.setReleaseState(ReleaseState.valueOf(this.releaseState)); diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/dto/schedule/ScheduleUpdateRequest.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/dto/schedule/ScheduleUpdateRequest.java index 6effbd5eb8..93cbce5219 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/dto/schedule/ScheduleUpdateRequest.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/dto/schedule/ScheduleUpdateRequest.java @@ -73,7 +73,7 @@ public class ScheduleUpdateRequest { private int warningGroupId; @Schema(allowableValues = "HIGHEST / HIGH / MEDIUM / LOW / LOWEST", example = "MEDIUM", description = "default MEDIUM if value not provide.") - private String processInstancePriority; + private String workflowInstancePriority; @Schema(example = "worker-group-name") private String workerGroup; @@ -122,8 +122,8 @@ public class ScheduleUpdateRequest { if (this.warningGroupId != 0) { scheduleDeepCopy.setWarningGroupId(this.warningGroupId); } - if (this.processInstancePriority != null) { - scheduleDeepCopy.setProcessInstancePriority(Priority.valueOf(this.processInstancePriority)); + if (this.workflowInstancePriority != null) { + scheduleDeepCopy.setWorkflowInstancePriority(Priority.valueOf(this.workflowInstancePriority)); } if (this.workerGroup != null) { scheduleDeepCopy.setWorkerGroup(this.workerGroup); diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/dto/taskInstance/TaskInstanceQueryRequest.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/dto/taskInstance/TaskInstanceQueryRequest.java index 6aadd7d732..26d5b153ec 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/dto/taskInstance/TaskInstanceQueryRequest.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/dto/taskInstance/TaskInstanceQueryRequest.java @@ -22,6 +22,7 @@ import org.apache.dolphinscheduler.common.enums.TaskExecuteType; import org.apache.dolphinscheduler.plugin.task.api.enums.TaskExecutionStatus; import lombok.Data; +import lombok.EqualsAndHashCode; import com.fasterxml.jackson.annotation.JsonIgnoreProperties; import com.fasterxml.jackson.annotation.JsonInclude; @@ -30,19 +31,20 @@ import io.swagger.v3.oas.annotations.media.Schema; /** * task instance request */ +@EqualsAndHashCode(callSuper = true) @JsonIgnoreProperties(ignoreUnknown = true) @JsonInclude(JsonInclude.Include.NON_NULL) @Data public class TaskInstanceQueryRequest extends PageQueryDto { - @Schema(name = "processInstanceId", example = "WORKFLOW_INSTANCE_ID", defaultValue = "0") - Integer processInstanceId; + @Schema(name = "workflowInstanceId", example = "WORKFLOW_INSTANCE_ID", defaultValue = "0") + Integer workflowInstanceId; - @Schema(name = "processInstanceName", example = "PROCESS-INSTANCE-NAME") - String processInstanceName; + @Schema(name = "workflowInstanceName", example = "WORKFLOW-INSTANCE-NAME") + String workflowInstanceName; - @Schema(name = "processDefinitionName", example = "PROCESS-DEFINITION-NAME") - String processDefinitionName; + @Schema(name = "workflowDefinitionName", example = "WORKFLOW-DEFINITION-NAME") + String workflowDefinitionName; @Schema(name = "searchVal", example = "SEARCH-VAL") String searchVal; diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/dto/taskRelation/TaskRelationCreateRequest.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/dto/taskRelation/TaskRelationCreateRequest.java index a618128dc7..2f501341a4 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/dto/taskRelation/TaskRelationCreateRequest.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/dto/taskRelation/TaskRelationCreateRequest.java @@ -24,9 +24,6 @@ import java.util.Date; import lombok.Data; import io.swagger.v3.oas.annotations.media.Schema; -/** - * task relation create request - */ @Data public class TaskRelationCreateRequest { @@ -42,11 +39,11 @@ public class TaskRelationCreateRequest { @Schema(example = "54321", required = true) private long postTaskCode; - public WorkflowTaskRelation convert2ProcessTaskRelation() { + public WorkflowTaskRelation convert2WorkflowTaskRelation() { WorkflowTaskRelation workflowTaskRelation = new WorkflowTaskRelation(); workflowTaskRelation.setProjectCode(this.projectCode); - workflowTaskRelation.setProcessDefinitionCode(this.workflowCode); + workflowTaskRelation.setWorkflowDefinitionCode(this.workflowCode); workflowTaskRelation.setPreTaskCode(this.preTaskCode); workflowTaskRelation.setPostTaskCode(this.postTaskCode); diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/dto/taskRelation/TaskRelationFilterRequest.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/dto/taskRelation/TaskRelationFilterRequest.java index f4093d9647..d198134681 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/dto/taskRelation/TaskRelationFilterRequest.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/dto/taskRelation/TaskRelationFilterRequest.java @@ -58,7 +58,7 @@ public class TaskRelationFilterRequest extends PageQueryDto { public WorkflowTaskRelation convert2TaskDefinition() { WorkflowTaskRelation workflowTaskRelation = new WorkflowTaskRelation(); if (this.workflowCode != 0L) { - workflowTaskRelation.setProcessDefinitionCode(this.workflowCode); + workflowTaskRelation.setWorkflowDefinitionCode(this.workflowCode); } if (this.preTaskCode != 0L) { workflowTaskRelation.setPreTaskCode(this.preTaskCode); diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/dto/workflow/WorkflowCreateRequest.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/dto/workflow/WorkflowCreateRequest.java index e93926f280..9cf38fe6a4 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/dto/workflow/WorkflowCreateRequest.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/dto/workflow/WorkflowCreateRequest.java @@ -58,7 +58,7 @@ public class WorkflowCreateRequest { @Schema(allowableValues = "PARALLEL / SERIAL_WAIT / SERIAL_DISCARD / SERIAL_PRIORITY", example = "PARALLEL", description = "default PARALLEL if not provide.") private String executionType; - public WorkflowDefinition convert2ProcessDefinition() { + public WorkflowDefinition convert2WorkflowDefinition() { WorkflowDefinition workflowDefinition = new WorkflowDefinition(); workflowDefinition.setName(this.name); diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/dto/workflow/WorkflowUpdateRequest.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/dto/workflow/WorkflowUpdateRequest.java index 4420bd92c0..c041f6c358 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/dto/workflow/WorkflowUpdateRequest.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/dto/workflow/WorkflowUpdateRequest.java @@ -63,12 +63,12 @@ public class WorkflowUpdateRequest { private String location; /** - * Merge workflowUpdateRequest information into exists processDefinition object + * Merge workflowUpdateRequest information into exists workflowDefinition object * - * @param workflowDefinition exists processDefinition object - * @return process definition + * @param workflowDefinition exists workflowDefinition object + * @return workflow definition */ - public WorkflowDefinition mergeIntoProcessDefinition(WorkflowDefinition workflowDefinition) { + public WorkflowDefinition mergeIntoWorkflowDefinition(WorkflowDefinition workflowDefinition) { WorkflowDefinition workflowDefinitionDeepCopy = JSONUtils.parseObject(JSONUtils.toJsonString(workflowDefinition), WorkflowDefinition.class); assert workflowDefinitionDeepCopy != null; diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/dto/workflowInstance/WorkflowInstanceQueryRequest.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/dto/workflowInstance/WorkflowInstanceQueryRequest.java index 328dcdc574..12c15a2caf 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/dto/workflowInstance/WorkflowInstanceQueryRequest.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/dto/workflowInstance/WorkflowInstanceQueryRequest.java @@ -52,7 +52,7 @@ public class WorkflowInstanceQueryRequest extends PageQueryDto { @Schema(name = "state", example = "STATE") Integer state; - public WorkflowInstance convert2ProcessInstance() { + public WorkflowInstance convert2WorkflowInstance() { WorkflowInstance workflowInstance = new WorkflowInstance(); if (this.workflowName != null) { workflowInstance.setName(this.workflowName); diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/python/PythonGateway.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/python/PythonGateway.java index aee3d44daa..0166e34381 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/python/PythonGateway.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/python/PythonGateway.java @@ -177,7 +177,7 @@ public class PythonGateway { return taskDefinitionService.genTaskCodeList(genNum); } - public Map getCodeAndVersion(String projectName, String processDefinitionName, + public Map getCodeAndVersion(String projectName, String workflowDefinitionName, String taskName) throws CodeGenerateUtils.CodeGenerateException { Project project = projectMapper.queryByName(projectName); Map result = new HashMap<>(); @@ -189,7 +189,7 @@ public class PythonGateway { } WorkflowDefinition workflowDefinition = - workflowDefinitionMapper.queryByDefineName(project.getCode(), processDefinitionName); + workflowDefinitionMapper.queryByDefineName(project.getCode(), workflowDefinitionName); // In the case project exists, but current workflow still not created, we should also return the init // version of it if (workflowDefinition == null) { @@ -259,14 +259,14 @@ public class PythonGateway { WorkflowDefinition workflowDefinition = getWorkflow(user, projectCode, name); WorkflowExecutionTypeEnum executionTypeEnum = WorkflowExecutionTypeEnum.valueOf(executionType); - long processDefinitionCode; + long workflowDefinitionCode; // create or update workflow if (workflowDefinition != null) { - processDefinitionCode = workflowDefinition.getCode(); + workflowDefinitionCode = workflowDefinition.getCode(); // make sure workflow offline which could edit - workflowDefinitionService.offlineWorkflowDefinition(user, projectCode, processDefinitionCode); + workflowDefinitionService.offlineWorkflowDefinition(user, projectCode, workflowDefinitionCode); workflowDefinitionService.updateWorkflowDefinition(user, projectCode, name, - processDefinitionCode, description, globalParams, + workflowDefinitionCode, description, globalParams, null, timeout, taskRelationJson, taskDefinitionJson, executionTypeEnum); } else { @@ -279,21 +279,21 @@ public class PythonGateway { throw new ServiceException(result.get(Constants.MSG).toString()); } workflowDefinition = (WorkflowDefinition) result.get(Constants.DATA_LIST); - processDefinitionCode = workflowDefinition.getCode(); + workflowDefinitionCode = workflowDefinition.getCode(); } // Fresh workflow schedule if (schedule != null) { - createOrUpdateSchedule(user, projectCode, processDefinitionCode, schedule, onlineSchedule, workerGroup, + createOrUpdateSchedule(user, projectCode, workflowDefinitionCode, schedule, onlineSchedule, workerGroup, warningType, warningGroupId); } if (ReleaseState.ONLINE.equals(ReleaseState.getEnum(releaseState))) { - workflowDefinitionService.onlineWorkflowDefinition(user, projectCode, processDefinitionCode); + workflowDefinitionService.onlineWorkflowDefinition(user, projectCode, workflowDefinitionCode); } else if (ReleaseState.OFFLINE.equals(ReleaseState.getEnum(releaseState))) { - workflowDefinitionService.offlineWorkflowDefinition(user, projectCode, processDefinitionCode); + workflowDefinitionService.offlineWorkflowDefinition(user, projectCode, workflowDefinitionCode); } - return processDefinitionCode; + return workflowDefinitionCode; } /** @@ -304,9 +304,9 @@ public class PythonGateway { * @param workflowName workflow name */ private WorkflowDefinition getWorkflow(User user, long projectCode, String workflowName) { - Map verifyProcessDefinitionExists = + Map verifyWorkflowDefinitionExists = workflowDefinitionService.verifyWorkflowDefinitionName(user, projectCode, workflowName, 0); - Status verifyStatus = (Status) verifyProcessDefinitionExists.get(Constants.STATUS); + Status verifyStatus = (Status) verifyWorkflowDefinitionExists.get(Constants.STATUS); WorkflowDefinition workflowDefinition = null; if (verifyStatus == Status.WORKFLOW_DEFINITION_NAME_EXIST) { @@ -343,7 +343,7 @@ public class PythonGateway { String workerGroup, String warningType, int warningGroupId) { - Schedule scheduleObj = scheduleMapper.queryByProcessDefinitionCode(workflowCode); + Schedule scheduleObj = scheduleMapper.queryByWorkflowDefinitionCode(workflowCode); // create or update schedule int scheduleId; if (scheduleObj == null) { @@ -526,7 +526,7 @@ public class PythonGateway { /** * Get workflow object by given workflow name. It returns map contain workflow id, name, code. - * Useful in Python API create subProcess task which need workflow information. + * Useful in Python API create sub workflow task which need workflow information. * * @param userName user who create or update schedule * @param projectName project name which workflow belongs to @@ -583,7 +583,7 @@ public class PythonGateway { log.error(msg); throw new IllegalArgumentException(msg); } - result.put("processDefinitionCode", workflowDefinition.getCode()); + result.put("workflowDefinitionCode", workflowDefinition.getCode()); if (taskName != null) { TaskDefinition taskDefinition = diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/ExecutorServiceImpl.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/ExecutorServiceImpl.java index c67a2c2e7a..99e55bfab2 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/ExecutorServiceImpl.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/ExecutorServiceImpl.java @@ -210,7 +210,7 @@ public class ExecutorServiceImpl extends BaseServiceImpl implements ExecutorServ public boolean checkSubWorkflowDefinitionValid(WorkflowDefinition workflowDefinition) { // query all sub workflows under the current workflow List workflowTaskRelations = - workflowTaskRelationMapper.queryDownstreamByProcessDefinitionCode(workflowDefinition.getCode()); + workflowTaskRelationMapper.queryDownstreamByWorkflowDefinitionCode(workflowDefinition.getCode()); if (workflowTaskRelations.isEmpty()) { return true; } @@ -337,11 +337,11 @@ public class ExecutorServiceImpl extends BaseServiceImpl implements ExecutorServ } WorkflowDefinition workflowDefinition = - processService.findProcessDefinition(workflowInstance.getProcessDefinitionCode(), - workflowInstance.getProcessDefinitionVersion()); + processService.findWorkflowDefinition(workflowInstance.getWorkflowDefinitionCode(), + workflowInstance.getWorkflowDefinitionVersion()); workflowDefinition.setReleaseState(ReleaseState.ONLINE); - this.checkWorkflowDefinitionValid(projectCode, workflowDefinition, workflowInstance.getProcessDefinitionCode(), - workflowInstance.getProcessDefinitionVersion()); + this.checkWorkflowDefinitionValid(projectCode, workflowDefinition, workflowInstance.getWorkflowDefinitionCode(), + workflowInstance.getWorkflowDefinitionVersion()); // get the startParams user specified at the first starting while repeat running is needed @@ -367,11 +367,11 @@ public class ExecutorServiceImpl extends BaseServiceImpl implements ExecutorServ Command command = new Command(); command.setCommandType(CommandType.EXECUTE_TASK); - command.setProcessDefinitionCode(workflowDefinition.getCode()); + command.setWorkflowDefinitionCode(workflowDefinition.getCode()); command.setCommandParam(JSONUtils.toJsonString(cmdParam)); command.setExecutorId(loginUser.getId()); - command.setProcessDefinitionVersion(workflowDefinition.getVersion()); - command.setProcessInstanceId(workflowInstanceId); + command.setWorkflowDefinitionVersion(workflowDefinition.getVersion()); + command.setWorkflowInstanceId(workflowInstanceId); command.setTestFlag(workflowInstance.getTestFlag()); // Add taskDependType @@ -391,13 +391,13 @@ public class ExecutorServiceImpl extends BaseServiceImpl implements ExecutorServ if (create > 0) { log.info("Create {} command complete, workflowDefinitionCode:{}, workflowDefinitionVersion:{}.", - command.getCommandType().getDescp(), command.getProcessDefinitionCode(), + command.getCommandType().getDescp(), command.getWorkflowDefinitionCode(), workflowDefinition.getVersion()); putMsg(response, Status.SUCCESS); } else { log.error( "Execute workflow instance failed because create {} command error, workflowDefinitionCode:{}, workflowDefinitionVersion:{}, workflowInstanceId:{}.", - command.getCommandType().getDescp(), command.getProcessDefinitionCode(), + command.getCommandType().getDescp(), command.getWorkflowDefinitionCode(), workflowDefinition.getVersion(), workflowInstanceId); putMsg(response, Status.EXECUTE_WORKFLOW_INSTANCE_ERROR); @@ -411,9 +411,10 @@ public class ExecutorServiceImpl extends BaseServiceImpl implements ExecutorServ Map result = new HashMap<>(); TaskGroupQueue taskGroupQueue = taskGroupQueueMapper.selectById(queueId); // check workflow instance exist - workflowInstanceDao.queryOptionalById(taskGroupQueue.getProcessId()) + workflowInstanceDao.queryOptionalById(taskGroupQueue.getWorkflowInstanceId()) .orElseThrow( - () -> new ServiceException(Status.WORKFLOW_INSTANCE_NOT_EXIST, taskGroupQueue.getProcessId())); + () -> new ServiceException(Status.WORKFLOW_INSTANCE_NOT_EXIST, + taskGroupQueue.getWorkflowInstanceId())); if (taskGroupQueue.getInQueue() == Flag.NO.getCode()) { throw new ServiceException(Status.TASK_GROUP_QUEUE_ALREADY_START); @@ -442,20 +443,20 @@ public class ExecutorServiceImpl extends BaseServiceImpl implements ExecutorServ if (createCount > 0) { log.info("Create {} command complete, workflowDefinitionCode:{}", - command.getCommandType().getDescp(), command.getProcessDefinitionCode()); + command.getCommandType().getDescp(), command.getWorkflowDefinitionCode()); } else { log.error("Create {} command error, workflowDefinitionCode:{}", - command.getCommandType().getDescp(), command.getProcessDefinitionCode()); + command.getCommandType().getDescp(), command.getWorkflowDefinitionCode()); } if (schedules.isEmpty() || complementDependentMode == ComplementDependentMode.OFF_MODE) { log.info( "Complement dependent mode is off mode or Scheduler is empty, so skip create complement dependent command, workflowDefinitionCode:{}.", - command.getProcessDefinitionCode()); + command.getWorkflowDefinitionCode()); } else { log.info( "Complement dependent mode is all dependent and Scheduler is not empty, need create complement dependent command, workflowDefinitionCode:{}.", - command.getProcessDefinitionCode()); + command.getWorkflowDefinitionCode()); createComplementDependentCommand(schedules, command, allLevelDependent); } @@ -478,8 +479,8 @@ public class ExecutorServiceImpl extends BaseServiceImpl implements ExecutorServ executionOrder = ExecutionOrder.DESC_ORDER; } - List schedules = processService.queryReleaseSchedulerListByProcessDefinitionCode( - command.getProcessDefinitionCode()); + List schedules = processService.queryReleaseSchedulerListByWorkflowDefinitionCode( + command.getWorkflowDefinitionCode()); List listDate = new ArrayList<>(); if (scheduleParam.containsKey(CMD_PARAM_COMPLEMENT_DATA_START_DATE) && scheduleParam.containsKey( @@ -518,14 +519,14 @@ public class ExecutorServiceImpl extends BaseServiceImpl implements ExecutorServ switch (runMode) { case RUN_MODE_SERIAL: { log.info("RunMode of {} command is serial run, workflowDefinitionCode:{}.", - command.getCommandType().getDescp(), command.getProcessDefinitionCode()); + command.getCommandType().getDescp(), command.getWorkflowDefinitionCode()); createCount = createComplementCommand(triggerCode, command, cmdParam, listDate, schedules, complementDependentMode, allLevelDependent); break; } case RUN_MODE_PARALLEL: { log.info("RunMode of {} command is parallel run, workflowDefinitionCode:{}.", - command.getCommandType().getDescp(), command.getProcessDefinitionCode()); + command.getCommandType().getDescp(), command.getWorkflowDefinitionCode()); int queueNum = 0; if (CollectionUtils.isNotEmpty(listDate)) { @@ -573,7 +574,7 @@ public class ExecutorServiceImpl extends BaseServiceImpl implements ExecutorServ } List dependentWorkflowDefinitionList = - getComplementDependentDefinitionList(dependentCommand.getProcessDefinitionCode(), + getComplementDependentDefinitionList(dependentCommand.getWorkflowDefinitionCode(), CronUtils.getMaxCycle(schedules.get(0).getCrontab()), dependentCommand.getWorkerGroup(), allLevelDependent); dependentCommand.setTaskDependType(TaskDependType.TASK_POST); @@ -581,8 +582,8 @@ public class ExecutorServiceImpl extends BaseServiceImpl implements ExecutorServ // If the id is Integer, the auto-increment id will be obtained by mybatis-plus // and causing duplicate when clone it. dependentCommand.setId(null); - dependentCommand.setProcessDefinitionCode(dependentWorkflowDefinition.getProcessDefinitionCode()); - dependentCommand.setProcessDefinitionVersion(dependentWorkflowDefinition.getProcessDefinitionVersion()); + dependentCommand.setWorkflowDefinitionCode(dependentWorkflowDefinition.getWorkflowDefinitionCode()); + dependentCommand.setWorkflowDefinitionVersion(dependentWorkflowDefinition.getWorkflowDefinitionVersion()); dependentCommand.setWorkerGroup(dependentWorkflowDefinition.getWorkerGroup()); Map cmdParam = JSONUtils.toMap(dependentCommand.getCommandParam()); cmdParam.put(CMD_PARAM_START_NODES, String.valueOf(dependentWorkflowDefinition.getTaskDefinitionCode())); @@ -618,10 +619,10 @@ public class ExecutorServiceImpl extends BaseServiceImpl implements ExecutorServ .stream() .flatMap(dependentWorkflowDefinition -> checkDependentWorkflowDefinitionValid( workflowLineageService.queryDownstreamDependentWorkflowDefinitions( - dependentWorkflowDefinition.getProcessDefinitionCode()), + dependentWorkflowDefinition.getWorkflowDefinitionCode()), workflowDefinitionCycle, workerGroup, - dependentWorkflowDefinition.getProcessDefinitionCode()).stream()) + dependentWorkflowDefinition.getWorkflowDefinitionCode()).stream()) .collect(Collectors.toList()); if (childDependentList.isEmpty()) { break; @@ -646,17 +647,17 @@ public class ExecutorServiceImpl extends BaseServiceImpl implements ExecutorServ List validDependentWorkflowDefinitionList = new ArrayList<>(); List workflowDefinitionCodeList = - dependentWorkflowDefinitionList.stream().map(DependentWorkflowDefinition::getProcessDefinitionCode) + dependentWorkflowDefinitionList.stream().map(DependentWorkflowDefinition::getWorkflowDefinitionCode) .collect(Collectors.toList()); - Map processDefinitionWorkerGroupMap = + Map workflowDefinitionWorkerGroupMap = workerGroupService.queryWorkerGroupByWorkflowDefinitionCodes(workflowDefinitionCodeList); for (DependentWorkflowDefinition dependentWorkflowDefinition : dependentWorkflowDefinitionList) { if (dependentWorkflowDefinition .getDependentCycle(upstreamWorkflowDefinitionCode) == workflowDefinitionCycle) { - if (processDefinitionWorkerGroupMap - .get(dependentWorkflowDefinition.getProcessDefinitionCode()) == null) { + if (workflowDefinitionWorkerGroupMap + .get(dependentWorkflowDefinition.getWorkflowDefinitionCode()) == null) { dependentWorkflowDefinition.setWorkerGroup(workerGroup); } diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/ProjectServiceImpl.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/ProjectServiceImpl.java index 975dea0bc1..3cab05aa77 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/ProjectServiceImpl.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/ProjectServiceImpl.java @@ -395,7 +395,7 @@ public class ProjectServiceImpl extends BaseServiceImpl implements ProjectServic .map(Project::getUserId).distinct().collect(Collectors.toList())); Map userMap = userList.stream().collect(Collectors.toMap(User::getId, User::getUserName)); List projectWorkflowDefinitionCountList = - workflowDefinitionMapper.queryProjectProcessDefinitionCountByProjectCodes( + workflowDefinitionMapper.queryProjectWorkflowDefinitionCountByProjectCodes( projectList.stream().map(Project::getCode).distinct().collect(Collectors.toList())); Map projectWorkflowDefinitionCountMap = projectWorkflowDefinitionCountList.stream() .collect(Collectors.toMap(ProjectWorkflowDefinitionCount::getProjectCode, diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/SchedulerServiceImpl.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/SchedulerServiceImpl.java index 2feff66451..ffafa331cc 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/SchedulerServiceImpl.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/SchedulerServiceImpl.java @@ -155,7 +155,7 @@ public class SchedulerServiceImpl extends BaseServiceImpl implements SchedulerSe workflowDefinition.getVersion()); Schedule scheduleExists = - scheduleMapper.queryByProcessDefinitionCode(workflowDefinitionCode); + scheduleMapper.queryByWorkflowDefinitionCode(workflowDefinitionCode); if (scheduleExists != null) { log.error("Schedule already exist, scheduleId:{}, workflowDefinitionCode:{}", scheduleExists.getId(), workflowDefinitionCode); @@ -170,8 +170,8 @@ public class SchedulerServiceImpl extends BaseServiceImpl implements SchedulerSe scheduleObj.setTenantCode(tenantCode); scheduleObj.setProjectName(project.getName()); - scheduleObj.setProcessDefinitionCode(workflowDefinitionCode); - scheduleObj.setProcessDefinitionName(workflowDefinition.getName()); + scheduleObj.setWorkflowDefinitionCode(workflowDefinitionCode); + scheduleObj.setWorkflowDefinitionName(workflowDefinition.getName()); ScheduleParam scheduleParam = JSONUtils.parseObject(schedule, ScheduleParam.class); if (DateUtils.differSec(scheduleParam.getStartTime(), scheduleParam.getEndTime()) == 0) { @@ -202,7 +202,7 @@ public class SchedulerServiceImpl extends BaseServiceImpl implements SchedulerSe scheduleObj.setUserId(loginUser.getId()); scheduleObj.setUserName(loginUser.getUserName()); scheduleObj.setReleaseState(ReleaseState.OFFLINE); - scheduleObj.setProcessInstancePriority(workflowInstancePriority); + scheduleObj.setWorkflowInstancePriority(workflowInstancePriority); scheduleObj.setWorkerGroup(workerGroup); scheduleObj.setEnvironmentCode(environmentCode); scheduleMapper.insert(scheduleObj); @@ -259,19 +259,20 @@ public class SchedulerServiceImpl extends BaseServiceImpl implements SchedulerSe @Transactional public Schedule createSchedulesV2(User loginUser, ScheduleCreateRequest scheduleCreateRequest) { - this.projectPermCheckByWorkflowCode(loginUser, scheduleCreateRequest.getProcessDefinitionCode()); + this.projectPermCheckByWorkflowCode(loginUser, scheduleCreateRequest.getWorkflowDefinitionCode()); WorkflowDefinition workflowDefinition = - workflowDefinitionMapper.queryByCode(scheduleCreateRequest.getProcessDefinitionCode()); + workflowDefinitionMapper.queryByCode(scheduleCreateRequest.getWorkflowDefinitionCode()); // check workflow define release state executorService.checkWorkflowDefinitionValid(workflowDefinition.getProjectCode(), workflowDefinition, workflowDefinition.getCode(), workflowDefinition.getVersion()); Schedule scheduleExists = - scheduleMapper.queryByProcessDefinitionCode(scheduleCreateRequest.getProcessDefinitionCode()); + scheduleMapper.queryByWorkflowDefinitionCode(scheduleCreateRequest.getWorkflowDefinitionCode()); if (scheduleExists != null) { - throw new ServiceException(Status.SCHEDULE_ALREADY_EXISTS, scheduleCreateRequest.getProcessDefinitionCode(), + throw new ServiceException(Status.SCHEDULE_ALREADY_EXISTS, + scheduleCreateRequest.getWorkflowDefinitionCode(), scheduleExists.getId()); } @@ -286,7 +287,7 @@ public class SchedulerServiceImpl extends BaseServiceImpl implements SchedulerSe schedule.setUserId(loginUser.getId()); // give more detail when return schedule object schedule.setUserName(loginUser.getUserName()); - schedule.setProcessDefinitionName(workflowDefinition.getName()); + schedule.setWorkflowDefinitionName(workflowDefinition.getName()); this.scheduleParamCheck(scheduleCreateRequest.getScheduleParam()); int create = scheduleMapper.insert(schedule); @@ -348,11 +349,11 @@ public class SchedulerServiceImpl extends BaseServiceImpl implements SchedulerSe } WorkflowDefinition workflowDefinition = - workflowDefinitionMapper.queryByCode(schedule.getProcessDefinitionCode()); + workflowDefinitionMapper.queryByCode(schedule.getWorkflowDefinitionCode()); if (workflowDefinition == null || projectCode != workflowDefinition.getProjectCode()) { log.error("workflow definition does not exist, workflowDefinitionCode:{}.", - schedule.getProcessDefinitionCode()); - putMsg(result, Status.WORKFLOW_DEFINITION_NOT_EXIST, String.valueOf(schedule.getProcessDefinitionCode())); + schedule.getWorkflowDefinitionCode()); + putMsg(result, Status.WORKFLOW_DEFINITION_NOT_EXIST, String.valueOf(schedule.getWorkflowDefinitionCode())); return result; } @@ -389,7 +390,7 @@ public class SchedulerServiceImpl extends BaseServiceImpl implements SchedulerSe throw new ServiceException(Status.REQUEST_PARAMS_NOT_VALID_ERROR, scheduleUpdateRequest.toString()); } // check update params - this.projectPermCheckByWorkflowCode(loginUser, scheduleUpdate.getProcessDefinitionCode()); + this.projectPermCheckByWorkflowCode(loginUser, scheduleUpdate.getWorkflowDefinitionCode()); if (scheduleUpdate.getEnvironmentCode() != null) { Environment environment = environmentMapper.queryByEnvironmentCode(scheduleUpdate.getEnvironmentCode()); @@ -420,7 +421,7 @@ public class SchedulerServiceImpl extends BaseServiceImpl implements SchedulerSe if (schedule == null) { throw new ServiceException(Status.SCHEDULE_NOT_EXISTS, scheduleId); } - this.projectPermCheckByWorkflowCode(loginUser, schedule.getProcessDefinitionCode()); + this.projectPermCheckByWorkflowCode(loginUser, schedule.getWorkflowDefinitionCode()); return schedule; } @@ -460,7 +461,7 @@ public class SchedulerServiceImpl extends BaseServiceImpl implements SchedulerSe Page page = new Page<>(pageNo, pageSize); IPage schedulePage = - scheduleMapper.queryByProjectAndProcessDefineCodePaging(page, projectCode, workflowDefinitionCode, + scheduleMapper.queryByProjectAndWorkflowDefinitionCodePaging(page, projectCode, workflowDefinitionCode, searchVal); List scheduleList = new ArrayList<>(); @@ -480,7 +481,7 @@ public class SchedulerServiceImpl extends BaseServiceImpl implements SchedulerSe if (CollectionUtils.isEmpty(workflowDefinitionCodes)) { return Collections.emptyList(); } - return scheduleMapper.querySchedulesByProcessDefinitionCodes(workflowDefinitionCodes); + return scheduleMapper.querySchedulesByWorkflowDefinitionCodes(workflowDefinitionCodes); } /** @@ -578,7 +579,7 @@ public class SchedulerServiceImpl extends BaseServiceImpl implements SchedulerSe throw new ServiceException(Status.USER_NO_OPERATION_PERM); } - this.projectPermCheckByWorkflowCode(loginUser, schedule.getProcessDefinitionCode()); + this.projectPermCheckByWorkflowCode(loginUser, schedule.getWorkflowDefinitionCode()); int delete = scheduleMapper.deleteById(scheduleId); if (delete <= 0) { throw new ServiceException(Status.DELETE_SCHEDULE_BY_ID_ERROR); @@ -655,7 +656,7 @@ public class SchedulerServiceImpl extends BaseServiceImpl implements SchedulerSe return result; } // check schedule exists - Schedule schedule = scheduleMapper.queryByProcessDefinitionCode(workflowDefinitionCode); + Schedule schedule = scheduleMapper.queryByWorkflowDefinitionCode(workflowDefinitionCode); if (schedule == null) { log.error("Schedule of workflow definition does not exist, workflowDefinitionCode:{}.", workflowDefinitionCode); @@ -686,7 +687,7 @@ public class SchedulerServiceImpl extends BaseServiceImpl implements SchedulerSe @Transactional @Override public void onlineSchedulerByWorkflowCode(Long workflowDefinitionCode) { - Schedule schedule = scheduleMapper.queryByProcessDefinitionCode(workflowDefinitionCode); + Schedule schedule = scheduleMapper.queryByWorkflowDefinitionCode(workflowDefinitionCode); doOnlineScheduler(schedule); } @@ -699,7 +700,7 @@ public class SchedulerServiceImpl extends BaseServiceImpl implements SchedulerSe return; } WorkflowDefinition workflowDefinition = - workflowDefinitionMapper.queryByCode(schedule.getProcessDefinitionCode()); + workflowDefinitionMapper.queryByCode(schedule.getWorkflowDefinitionCode()); if (!ReleaseState.ONLINE.equals(workflowDefinition.getReleaseState())) { throw new ServiceException(Status.WORKFLOW_DEFINITION_NOT_RELEASE, workflowDefinition.getName()); } @@ -722,7 +723,7 @@ public class SchedulerServiceImpl extends BaseServiceImpl implements SchedulerSe @Transactional @Override public void offlineSchedulerByWorkflowCode(Long workflowDefinitionCode) { - Schedule schedule = scheduleMapper.queryByProcessDefinitionCode(workflowDefinitionCode); + Schedule schedule = scheduleMapper.queryByWorkflowDefinitionCode(workflowDefinitionCode); doOfflineScheduler(schedule); } @@ -737,7 +738,7 @@ public class SchedulerServiceImpl extends BaseServiceImpl implements SchedulerSe schedule.setReleaseState(ReleaseState.OFFLINE); scheduleMapper.updateById(schedule); WorkflowDefinition workflowDefinition = - workflowDefinitionMapper.queryByCode(schedule.getProcessDefinitionCode()); + workflowDefinitionMapper.queryByCode(schedule.getWorkflowDefinitionCode()); Project project = projectMapper.queryByCode(workflowDefinition.getProjectCode()); schedulerApi.deleteScheduleTask(project.getId(), schedule.getId()); } @@ -802,7 +803,7 @@ public class SchedulerServiceImpl extends BaseServiceImpl implements SchedulerSe schedule.setWorkerGroup(workerGroup); schedule.setEnvironmentCode(environmentCode); schedule.setUpdateTime(now); - schedule.setProcessInstancePriority(workflowInstancePriority); + schedule.setWorkflowInstancePriority(workflowInstancePriority); scheduleMapper.updateById(schedule); workflowDefinition.setWarningGroupId(warningGroupId); diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/TaskDefinitionServiceImpl.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/TaskDefinitionServiceImpl.java index 433a4ea395..5f7ccfcf76 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/TaskDefinitionServiceImpl.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/TaskDefinitionServiceImpl.java @@ -419,11 +419,11 @@ public class TaskDefinitionServiceImpl extends BaseServiceImpl implements TaskDe projectCode, taskCode, taskDefinitionToUpdate.getVersion()); // update workflow task relation List workflowTaskRelations = workflowTaskRelationMapper - .queryProcessTaskRelationByTaskCodeAndTaskVersion(taskDefinitionToUpdate.getCode(), + .queryWorkflowTaskRelationByTaskCodeAndTaskVersion(taskDefinitionToUpdate.getCode(), taskDefinition.getVersion()); if (CollectionUtils.isNotEmpty(workflowTaskRelations)) { Map> workflowTaskRelationGroupList = workflowTaskRelations.stream() - .collect(Collectors.groupingBy(WorkflowTaskRelation::getProcessDefinitionCode)); + .collect(Collectors.groupingBy(WorkflowTaskRelation::getWorkflowDefinitionCode)); for (Map.Entry> workflowTaskRelationMap : workflowTaskRelationGroupList .entrySet()) { Long workflowDefinitionCode = workflowTaskRelationMap.getKey(); @@ -437,9 +437,9 @@ public class TaskDefinitionServiceImpl extends BaseServiceImpl implements TaskDe } else if (taskCode == workflowTaskRelation.getPostTaskCode()) { workflowTaskRelation.setPostTaskVersion(version); } - workflowTaskRelation.setProcessDefinitionVersion(workflowDefinitionVersion); + workflowTaskRelation.setWorkflowDefinitionVersion(workflowDefinitionVersion); int updateWorkflowDefinitionVersionCount = - workflowTaskRelationMapper.updateProcessTaskRelationTaskVersion(workflowTaskRelation); + workflowTaskRelationMapper.updateWorkflowTaskRelationTaskVersion(workflowTaskRelation); if (updateWorkflowDefinitionVersionCount != 1) { log.error("batch update workflow task relation error, projectCode:{}, taskDefinitionCode:{}.", projectCode, taskCode); @@ -527,11 +527,11 @@ public class TaskDefinitionServiceImpl extends BaseServiceImpl implements TaskDe if (MapUtils.isNotEmpty(queryUpStreamTaskCodeMap)) { WorkflowTaskRelation taskRelation = upstreamTaskRelations.get(0); List workflowTaskRelations = - workflowTaskRelationMapper.queryByProcessCode(taskRelation.getProcessDefinitionCode()); + workflowTaskRelationMapper.queryByWorkflowDefinitionCode(taskRelation.getWorkflowDefinitionCode()); // set upstream code list updateUpstreamTask(new HashSet<>(queryUpStreamTaskCodeMap.keySet()), - taskCode, projectCode, taskRelation.getProcessDefinitionCode(), loginUser); + taskCode, projectCode, taskRelation.getWorkflowDefinitionCode(), loginUser); List workflowTaskRelationList = Lists.newArrayList(workflowTaskRelations); List relationList = Lists.newArrayList(); @@ -695,9 +695,9 @@ public class TaskDefinitionServiceImpl extends BaseServiceImpl implements TaskDe log.info( "Task definition has upstream tasks, start handle them after switch task, taskDefinitionCode:{}.", taskCode); - long workflowDefinitionCode = taskRelationList.get(0).getProcessDefinitionCode(); + long workflowDefinitionCode = taskRelationList.get(0).getWorkflowDefinitionCode(); List workflowTaskRelations = - workflowTaskRelationMapper.queryByProcessCode(workflowDefinitionCode); + workflowTaskRelationMapper.queryByWorkflowDefinitionCode(workflowDefinitionCode); updateDag(loginUser, workflowDefinitionCode, workflowTaskRelations, Lists.newArrayList(taskDefinitionUpdate)); } else { diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/TaskInstanceServiceImpl.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/TaskInstanceServiceImpl.java index cb8d24eb1d..a3137d3f7c 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/TaskInstanceServiceImpl.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/TaskInstanceServiceImpl.java @@ -229,9 +229,9 @@ public class TaskInstanceServiceImpl extends BaseServiceImpl implements TaskInst throw new ServiceException("The task instance is not under the project: " + projectCode); } - WorkflowInstance workflowInstance = workflowInstanceDao.queryOptionalById(task.getProcessInstanceId()) + WorkflowInstance workflowInstance = workflowInstanceDao.queryOptionalById(task.getWorkflowInstanceId()) .orElseThrow( - () -> new ServiceException(Status.WORKFLOW_INSTANCE_NOT_EXIST, task.getProcessInstanceId())); + () -> new ServiceException(Status.WORKFLOW_INSTANCE_NOT_EXIST, task.getWorkflowInstanceId())); if (!workflowInstance.getState().isFinished()) { throw new ServiceException("The workflow instance is not finished: " + workflowInstance.getState() + " cannot force start task instance"); @@ -249,7 +249,7 @@ public class TaskInstanceServiceImpl extends BaseServiceImpl implements TaskInst if (changedNum <= 0) { throw new ServiceException(Status.FORCE_TASK_SUCCESS_ERROR); } - processService.forceProcessInstanceSuccessByTaskInstanceId(task); + processService.forceWorkflowInstanceSuccessByTaskInstanceId(task); log.info("Force success task instance:{} success", taskInstanceId); } diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/TenantServiceImpl.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/TenantServiceImpl.java index a1aa39f81d..02141c3fad 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/TenantServiceImpl.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/TenantServiceImpl.java @@ -251,7 +251,7 @@ public class TenantServiceImpl extends BaseServiceImpl implements TenantService throw new ServiceException(Status.DELETE_TENANT_BY_ID_ERROR); } - workflowInstanceMapper.updateProcessInstanceByTenantCode(tenant.getTenantCode(), Constants.DEFAULT); + workflowInstanceMapper.updateWorkflowInstanceByTenantCode(tenant.getTenantCode(), Constants.DEFAULT); } private List getWorkflowInstancesByTenant(Tenant tenant) { diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/WorkerGroupServiceImpl.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/WorkerGroupServiceImpl.java index 1deb1da00d..0ceca0d887 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/WorkerGroupServiceImpl.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/WorkerGroupServiceImpl.java @@ -217,7 +217,7 @@ public class WorkerGroupServiceImpl extends BaseServiceImpl implements WorkerGro if (CollectionUtils.isNotEmpty(schedules)) { List workflowDefinitionNames = schedules.stream().limit(3) - .map(schedule -> workflowDefinitionMapper.queryByCode(schedule.getProcessDefinitionCode()) + .map(schedule -> workflowDefinitionMapper.queryByCode(schedule.getWorkflowDefinitionCode()) .getName()) .collect(Collectors.toList()); @@ -432,8 +432,8 @@ public class WorkerGroupServiceImpl extends BaseServiceImpl implements WorkerGro @Override public Map queryWorkerGroupByWorkflowDefinitionCodes(List workflowDefinitionCodeList) { List workflowDefinitionScheduleList = - scheduleMapper.querySchedulesByProcessDefinitionCodes(workflowDefinitionCodeList); - return workflowDefinitionScheduleList.stream().collect(Collectors.toMap(Schedule::getProcessDefinitionCode, + scheduleMapper.querySchedulesByWorkflowDefinitionCodes(workflowDefinitionCodeList); + return workflowDefinitionScheduleList.stream().collect(Collectors.toMap(Schedule::getWorkflowDefinitionCode, Schedule::getWorkerGroup)); } diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/WorkflowDefinitionServiceImpl.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/WorkflowDefinitionServiceImpl.java index 2ef2a5becb..caf62ef1cf 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/WorkflowDefinitionServiceImpl.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/WorkflowDefinitionServiceImpl.java @@ -346,7 +346,7 @@ public class WorkflowDefinitionServiceImpl extends BaseServiceImpl implements Wo @Transactional public WorkflowDefinition createSingleWorkflowDefinition(User loginUser, WorkflowCreateRequest workflowCreateRequest) { - WorkflowDefinition workflowDefinition = workflowCreateRequest.convert2ProcessDefinition(); + WorkflowDefinition workflowDefinition = workflowCreateRequest.convert2WorkflowDefinition(); this.createWorkflowValid(loginUser, workflowDefinition); long workflowDefinitionCode; @@ -626,13 +626,13 @@ public class WorkflowDefinitionServiceImpl extends BaseServiceImpl implements Wo Map scheduleMap = schedulerService.queryScheduleByWorkflowDefinitionCodes(workflowDefinitionCodes) .stream() - .collect(Collectors.toMap(Schedule::getProcessDefinitionCode, Function.identity())); - List userWithCodes = userMapper.queryUserWithProcessDefinitionCode( + .collect(Collectors.toMap(Schedule::getWorkflowDefinitionCode, Function.identity())); + List userWithCodes = userMapper.queryUserWithWorkflowDefinitionCode( workflowDefinitionCodes); for (WorkflowDefinition pd : workflowDefinitions) { userWithCodes.stream() - .filter(userWithCode -> userWithCode.getProcessDefinitionCode() == pd.getCode() - && userWithCode.getProcessDefinitionVersion() == pd.getVersion()) + .filter(userWithCode -> userWithCode.getWorkflowDefinitionCode() == pd.getCode() + && userWithCode.getWorkflowDefinitionVersion() == pd.getVersion()) .findAny().ifPresent(userWithCode -> { pd.setModifyBy(userWithCode.getModifierName()); pd.setUserName(userWithCode.getCreatorName()); @@ -670,7 +670,7 @@ public class WorkflowDefinitionServiceImpl extends BaseServiceImpl implements Wo Page page = new Page<>(workflowFilterRequest.getPageNo(), workflowFilterRequest.getPageSize()); IPage workflowDefinitionIPage = - workflowDefinitionMapper.filterProcessDefinition(page, workflowDefinition); + workflowDefinitionMapper.filterWorkflowDefinition(page, workflowDefinition); List records = workflowDefinitionIPage.getRecords(); for (WorkflowDefinition pd : records) { @@ -869,7 +869,7 @@ public class WorkflowDefinitionServiceImpl extends BaseServiceImpl implements Wo private void taskUsedInOtherTaskValid(WorkflowDefinition workflowDefinition, List taskRelationList) { List oldWorkflowTaskRelationList = - workflowTaskRelationMapper.queryByProcessCode(workflowDefinition.getCode()); + workflowTaskRelationMapper.queryByWorkflowDefinitionCode(workflowDefinition.getCode()); Set oldWorkflowTaskRelationSet = oldWorkflowTaskRelationList.stream().map(WorkflowTaskRelationLog::new).collect(Collectors.toSet()); StringBuilder sb = new StringBuilder(); @@ -878,7 +878,7 @@ public class WorkflowDefinitionServiceImpl extends BaseServiceImpl implements Wo .anyMatch(relation -> oldWorkflowTaskRelation.getPostTaskCode() == relation.getPostTaskCode()); if (!oldTaskExists) { Optional taskDepMsg = workflowLineageService.taskDependentMsg( - workflowDefinition.getProjectCode(), oldWorkflowTaskRelation.getProcessDefinitionCode(), + workflowDefinition.getProjectCode(), oldWorkflowTaskRelation.getWorkflowDefinitionCode(), oldWorkflowTaskRelation.getPostTaskCode()); taskDepMsg.ifPresent(sb::append); } @@ -909,7 +909,7 @@ public class WorkflowDefinitionServiceImpl extends BaseServiceImpl implements Wo boolean isChange = false; if (workflowDefinition.equals(workflowDefinitionDeepCopy) && saveTaskResult == Constants.EXIT_CODE_SUCCESS) { List workflowTaskRelationLogList = workflowTaskRelationLogMapper - .queryByProcessCodeAndVersion(workflowDefinition.getCode(), workflowDefinition.getVersion()); + .queryByWorkflowCodeAndVersion(workflowDefinition.getCode(), workflowDefinition.getVersion()); if (taskRelationList.size() == workflowTaskRelationLogList.size()) { Set taskRelationSet = new HashSet<>(taskRelationList); Set workflowTaskRelationLogSet = new HashSet<>(workflowTaskRelationLogList); @@ -1095,7 +1095,7 @@ public class WorkflowDefinitionServiceImpl extends BaseServiceImpl implements Wo workflowDefinitionUsedInOtherTaskValid(workflowDefinition); // get the timing according to the workflow definition - Schedule scheduleObj = scheduleMapper.queryByProcessDefinitionCode(code); + Schedule scheduleObj = scheduleMapper.queryByWorkflowDefinitionCode(code); if (scheduleObj != null) { if (scheduleObj.getReleaseState() == ReleaseState.OFFLINE) { int delete = scheduleMapper.deleteById(scheduleObj.getId()); @@ -1203,7 +1203,7 @@ public class WorkflowDefinitionServiceImpl extends BaseServiceImpl implements Wo * @return DagDataSchedule */ public DagDataSchedule exportWorkflowDagData(WorkflowDefinition workflowDefinition) { - Schedule scheduleObj = scheduleMapper.queryByProcessDefinitionCode(workflowDefinition.getCode()); + Schedule scheduleObj = scheduleMapper.queryByWorkflowDefinitionCode(workflowDefinition.getCode()); DagDataSchedule dagDataSchedule = new DagDataSchedule(processService.genDagData(workflowDefinition)); if (scheduleObj != null) { scheduleObj.setReleaseState(ReleaseState.OFFLINE); @@ -1463,7 +1463,7 @@ public class WorkflowDefinitionServiceImpl extends BaseServiceImpl implements Wo if (!checkImportanceParams(dagDataSchedule, result)) { return false; } - WorkflowDefinition workflowDefinition = dagDataSchedule.getProcessDefinition(); + WorkflowDefinition workflowDefinition = dagDataSchedule.getWorkflowDefinition(); // generate import workflowDefinitionName String workflowDefinitionName = recursionWorkflowDefinitionName(projectCode, workflowDefinition.getName(), 1); @@ -1525,7 +1525,7 @@ public class WorkflowDefinitionServiceImpl extends BaseServiceImpl implements Wo throw new ServiceException(Status.CREATE_TASK_DEFINITION_ERROR); } - List taskRelationList = dagDataSchedule.getProcessTaskRelationList(); + List taskRelationList = dagDataSchedule.getWorkflowTaskRelationList(); List taskRelationLogList = new ArrayList<>(); for (WorkflowTaskRelation workflowTaskRelation : taskRelationList) { WorkflowTaskRelationLog workflowTaskRelationLog = new WorkflowTaskRelationLog(workflowTaskRelation); @@ -1572,7 +1572,7 @@ public class WorkflowDefinitionServiceImpl extends BaseServiceImpl implements Wo if (null != schedule) { WorkflowDefinition newWorkflowDefinition = workflowDefinitionMapper.queryByCode(workflowDefinition.getCode()); - schedule.setProcessDefinitionCode(newWorkflowDefinition.getCode()); + schedule.setWorkflowDefinitionCode(newWorkflowDefinition.getCode()); schedule.setId(null); schedule.setUserId(loginUser.getId()); schedule.setCreateTime(now); @@ -1597,7 +1597,7 @@ public class WorkflowDefinitionServiceImpl extends BaseServiceImpl implements Wo * check importance params */ private boolean checkImportanceParams(DagDataSchedule dagDataSchedule, Map result) { - if (dagDataSchedule.getProcessDefinition() == null) { + if (dagDataSchedule.getWorkflowDefinition() == null) { log.warn("workflow definition is null."); putMsg(result, Status.DATA_IS_NULL, "WorkflowDefinition"); return false; @@ -1607,7 +1607,7 @@ public class WorkflowDefinitionServiceImpl extends BaseServiceImpl implements Wo putMsg(result, Status.DATA_IS_NULL, "TaskDefinitionList"); return false; } - if (CollectionUtils.isEmpty(dagDataSchedule.getProcessTaskRelationList())) { + if (CollectionUtils.isEmpty(dagDataSchedule.getWorkflowTaskRelationList())) { log.warn("workflow task relation list is null."); putMsg(result, Status.DATA_IS_NULL, "WorkflowTaskRelationList"); return false; @@ -1801,7 +1801,7 @@ public class WorkflowDefinitionServiceImpl extends BaseServiceImpl implements Wo public Map queryWorkflowDefinitionListByProjectCode(long projectCode) { Map result = new HashMap<>(); List workflowDefinitions = - workflowDefinitionMapper.queryDefinitionListByProjectCodeAndProcessDefinitionCodes(projectCode, null); + workflowDefinitionMapper.queryDefinitionListByProjectCodeAndWorkflowDefinitionCodes(projectCode, null); result.put(Constants.DATA_LIST, workflowDefinitions); putMsg(result, Status.SUCCESS); return result; @@ -1822,7 +1822,7 @@ public class WorkflowDefinitionServiceImpl extends BaseServiceImpl implements Wo Set definitionCodesSet = new HashSet<>(); definitionCodesSet.add(workflowDefinitionCode); List workflowDefinitions = workflowDefinitionMapper - .queryDefinitionListByProjectCodeAndProcessDefinitionCodes(projectCode, definitionCodesSet); + .queryDefinitionListByProjectCodeAndWorkflowDefinitionCodes(projectCode, definitionCodesSet); // query task definition log List taskDefinitionLogsList = taskDefinitionLogDao.queryByWorkflowDefinitionCodeAndVersion( @@ -1901,7 +1901,7 @@ public class WorkflowDefinitionServiceImpl extends BaseServiceImpl implements Wo Date endTime = workflowInstance.getEndTime() == null ? new Date() : workflowInstance.getEndTime(); parentTreeViewDto.getInstances() .add(new Instance(workflowInstance.getId(), workflowInstance.getName(), - workflowInstance.getProcessDefinitionCode(), + workflowInstance.getWorkflowDefinitionCode(), "", workflowInstance.getState().name(), workflowInstance.getStartTime(), endTime, workflowInstance.getHost(), DateUtils.format2Readable(endTime.getTime() - workflowInstance.getStartTime().getTime()))); @@ -2110,7 +2110,7 @@ public class WorkflowDefinitionServiceImpl extends BaseServiceImpl implements Wo diffCode.forEach(code -> failedWorkflowList.add(code + "[null]")); for (WorkflowDefinition workflowDefinition : workflowDefinitionList) { List workflowTaskRelations = - workflowTaskRelationMapper.queryByProcessCode(workflowDefinition.getCode()); + workflowTaskRelationMapper.queryByWorkflowDefinitionCode(workflowDefinition.getCode()); List taskRelationList = workflowTaskRelations.stream().map(WorkflowTaskRelationLog::new).collect(Collectors.toList()); workflowDefinition.setProjectCode(targetProjectCode); @@ -2168,11 +2168,11 @@ public class WorkflowDefinitionServiceImpl extends BaseServiceImpl implements Wo workflowDefinition.setLocations(JSONUtils.toJsonString(jsonNodes)); } // copy timing configuration - Schedule scheduleObj = scheduleMapper.queryByProcessDefinitionCode(oldWorkflowDefinitionCode); + Schedule scheduleObj = scheduleMapper.queryByWorkflowDefinitionCode(oldWorkflowDefinitionCode); if (scheduleObj != null) { scheduleObj.setId(null); scheduleObj.setUserId(loginUser.getId()); - scheduleObj.setProcessDefinitionCode(workflowDefinition.getCode()); + scheduleObj.setWorkflowDefinitionCode(workflowDefinition.getCode()); scheduleObj.setReleaseState(ReleaseState.OFFLINE); scheduleObj.setCreateTime(date); scheduleObj.setUpdateTime(date); @@ -2282,7 +2282,7 @@ public class WorkflowDefinitionServiceImpl extends BaseServiceImpl implements Wo } List workflowTaskRelationList = workflowTaskRelationMapper - .queryProcessTaskRelationsByProcessDefinitionCode(workflowDefinitionLog.getCode(), + .queryWorkflowTaskRelationsByWorkflowDefinitionCode(workflowDefinitionLog.getCode(), workflowDefinitionLog.getVersion()); List taskDefinitionList = getTaskCodeVersionDtos(workflowTaskRelationList); List taskDefinitionLogList = @@ -2380,7 +2380,7 @@ public class WorkflowDefinitionServiceImpl extends BaseServiceImpl implements Wo PageInfo pageInfo = new PageInfo<>(pageNo, pageSize); Page page = new Page<>(pageNo, pageSize); IPage workflowDefinitionLogIPage = - workflowDefinitionLogMapper.queryProcessDefinitionVersionsPaging(page, code, projectCode); + workflowDefinitionLogMapper.queryWorkflowDefinitionVersionsPaging(page, code, projectCode); List workflowDefinitionLogs = workflowDefinitionLogIPage.getRecords(); pageInfo.setTotalList(workflowDefinitionLogs); @@ -2423,7 +2423,7 @@ public class WorkflowDefinitionServiceImpl extends BaseServiceImpl implements Wo throw new ServiceException(Status.DELETE_WORKFLOW_DEFINITION_EXECUTING_FAIL, workflowInstances.size()); } - int deleteLog = workflowDefinitionLogMapper.deleteByProcessDefinitionCodeAndVersion(code, version); + int deleteLog = workflowDefinitionLogMapper.deleteByWorkflowDefinitionCodeAndVersion(code, version); int deleteRelationLog = workflowTaskRelationLogMapper.deleteByCode(code, version); if (deleteLog == 0 || deleteRelationLog == 0) { throw new ServiceException(Status.DELETE_WORKFLOW_DEFINE_BY_CODE_ERROR); @@ -2483,7 +2483,7 @@ public class WorkflowDefinitionServiceImpl extends BaseServiceImpl implements Wo } WorkflowDefinition workflowDefinitionUpdate = - workflowUpdateRequest.mergeIntoProcessDefinition(workflowDefinition); + workflowUpdateRequest.mergeIntoWorkflowDefinition(workflowDefinition); this.updateWorkflowValid(loginUser, workflowDefinition, workflowDefinitionUpdate); int insertVersion = this.saveWorkflowDefine(loginUser, workflowDefinitionUpdate); @@ -2532,7 +2532,7 @@ public class WorkflowDefinitionServiceImpl extends BaseServiceImpl implements Wo long projectCode = workflowDefinition.getProjectCode(); long workflowDefinitionCode = workflowDefinition.getCode(); List taskRelations = - workflowTaskRelationMapper.queryByProcessCode(workflowDefinitionCode); + workflowTaskRelationMapper.queryByWorkflowDefinitionCode(workflowDefinitionCode); List taskRelationList = taskRelations.stream().map(WorkflowTaskRelationLog::new).collect(Collectors.toList()); @@ -2554,8 +2554,8 @@ public class WorkflowDefinitionServiceImpl extends BaseServiceImpl implements Wo Date now = new Date(); for (WorkflowTaskRelationLog workflowTaskRelationLog : taskRelationList) { workflowTaskRelationLog.setProjectCode(projectCode); - workflowTaskRelationLog.setProcessDefinitionCode(workflowDefinitionCode); - workflowTaskRelationLog.setProcessDefinitionVersion(workflowDefinitionVersion); + workflowTaskRelationLog.setWorkflowDefinitionCode(workflowDefinitionCode); + workflowTaskRelationLog.setWorkflowDefinitionVersion(workflowDefinitionVersion); if (taskDefinitionLogMap != null) { TaskDefinitionLog preTaskDefinitionLog = taskDefinitionLogMap.get(workflowTaskRelationLog.getPreTaskCode()); @@ -2585,7 +2585,7 @@ public class WorkflowDefinitionServiceImpl extends BaseServiceImpl implements Wo workflowDefinition.getProjectCode(), workflowDefinition.getCode()); return Constants.EXIT_CODE_SUCCESS; } - workflowTaskRelationMapper.deleteByCode(projectCode, workflowDefinitionCode); + workflowTaskRelationMapper.deleteByWorkflowDefinitionCode(projectCode, workflowDefinitionCode); } List workflowTaskRelations = taskRelationList.stream().map(WorkflowTaskRelation::new).collect(Collectors.toList()); @@ -2690,7 +2690,7 @@ public class WorkflowDefinitionServiceImpl extends BaseServiceImpl implements Wo Set taskCodeSet = new TreeSet<>(); - workflowTaskRelationMapper.queryByProcessCode(workflowDefinition.getCode()) + workflowTaskRelationMapper.queryByWorkflowDefinitionCode(workflowDefinition.getCode()) .forEach(processTaskRelation -> { if (processTaskRelation.getPreTaskCode() > 0) { taskCodeSet.add(processTaskRelation.getPreTaskCode()); @@ -2720,7 +2720,7 @@ public class WorkflowDefinitionServiceImpl extends BaseServiceImpl implements Wo private void checkWorkflowDefinitionIsValidated(Long workflowDefinitionCode) { // todo: build dag check if the dag is validated List workflowTaskRelations = - workflowTaskRelationMapper.queryByProcessCode(workflowDefinitionCode); + workflowTaskRelationMapper.queryByWorkflowDefinitionCode(workflowDefinitionCode); if (CollectionUtils.isEmpty(workflowTaskRelations)) { throw new ServiceException(Status.WORKFLOW_DAG_IS_EMPTY); } diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/WorkflowInstanceServiceImpl.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/WorkflowInstanceServiceImpl.java index f67c13d208..85753558fa 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/WorkflowInstanceServiceImpl.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/WorkflowInstanceServiceImpl.java @@ -23,8 +23,8 @@ import static org.apache.dolphinscheduler.api.enums.Status.WORKFLOW_INSTANCE_STA import static org.apache.dolphinscheduler.common.constants.Constants.DATA_LIST; import static org.apache.dolphinscheduler.common.constants.Constants.GLOBAL_PARAMS; import static org.apache.dolphinscheduler.common.constants.Constants.LOCAL_PARAMS; -import static org.apache.dolphinscheduler.common.constants.Constants.PROCESS_INSTANCE_STATE; import static org.apache.dolphinscheduler.common.constants.Constants.TASK_LIST; +import static org.apache.dolphinscheduler.common.constants.Constants.WORKFLOW_INSTANCE_STATE; import static org.apache.dolphinscheduler.plugin.task.api.TaskPluginManager.checkTaskParameters; import org.apache.dolphinscheduler.api.constants.ApiFuncIdentificationConstant; @@ -212,7 +212,7 @@ public class WorkflowInstanceServiceImpl extends BaseServiceImpl implements Work return result; } - List workflowInstances = workflowInstanceMapper.queryTopNProcessInstance(size, start, end, + List workflowInstances = workflowInstanceMapper.queryTopNWorkflowInstance(size, start, end, WorkflowExecutionStatus.SUCCESS, projectCode); result.put(DATA_LIST, workflowInstances); putMsg(result, Status.SUCCESS); @@ -241,8 +241,8 @@ public class WorkflowInstanceServiceImpl extends BaseServiceImpl implements Work .orElseThrow(() -> new ServiceException(WORKFLOW_INSTANCE_NOT_EXIST, workflowInstanceId)); WorkflowDefinition workflowDefinition = - processService.findProcessDefinition(workflowInstance.getProcessDefinitionCode(), - workflowInstance.getProcessDefinitionVersion()); + processService.findWorkflowDefinition(workflowInstance.getWorkflowDefinitionCode(), + workflowInstance.getWorkflowDefinitionVersion()); if (workflowDefinition == null || projectCode != workflowDefinition.getProjectCode()) { log.error("workflow definition does not exist, projectCode:{}.", projectCode); @@ -277,7 +277,7 @@ public class WorkflowInstanceServiceImpl extends BaseServiceImpl implements Work public Map queryWorkflowInstanceById(User loginUser, Integer workflowInstanceId) { WorkflowInstance workflowInstance = workflowInstanceMapper.selectById(workflowInstanceId); WorkflowDefinition workflowDefinition = - workflowDefinitionMapper.queryByCode(workflowInstance.getProcessDefinitionCode()); + workflowDefinitionMapper.queryByCode(workflowInstance.getWorkflowDefinitionCode()); return queryWorkflowInstanceById(loginUser, workflowDefinition.getProjectCode(), workflowInstanceId); } @@ -328,7 +328,7 @@ public class WorkflowInstanceServiceImpl extends BaseServiceImpl implements Work Page page = new Page<>(pageNo, pageSize); PageInfo pageInfo = new PageInfo<>(pageNo, pageSize); - IPage workflowInstanceList = workflowInstanceMapper.queryProcessInstanceListPaging( + IPage workflowInstanceList = workflowInstanceMapper.queryWorkflowInstanceListPaging( page, projectCode, workflowDefinitionCode, @@ -375,7 +375,7 @@ public class WorkflowInstanceServiceImpl extends BaseServiceImpl implements Work @Override public Result queryWorkflowInstanceList(User loginUser, WorkflowInstanceQueryRequest workflowInstanceQueryRequest) { Result result = new Result(); - WorkflowInstance workflowInstance = workflowInstanceQueryRequest.convert2ProcessInstance(); + WorkflowInstance workflowInstance = workflowInstanceQueryRequest.convert2WorkflowInstance(); String projectName = workflowInstanceQueryRequest.getProjectName(); if (!StringUtils.isBlank(projectName)) { Project project = projectMapper.queryByName(projectName); @@ -383,7 +383,7 @@ public class WorkflowInstanceServiceImpl extends BaseServiceImpl implements Work ApiFuncIdentificationConstant.WORKFLOW_DEFINITION); WorkflowDefinition workflowDefinition = workflowDefinitionMapper.queryByDefineName(project.getCode(), workflowInstance.getName()); - workflowInstance.setProcessDefinitionCode(workflowDefinition.getCode()); + workflowInstance.setWorkflowDefinitionCode(workflowDefinition.getCode()); workflowInstance.setProjectCode(project.getCode()); } @@ -392,10 +392,10 @@ public class WorkflowInstanceServiceImpl extends BaseServiceImpl implements Work PageInfo pageInfo = new PageInfo<>(workflowInstanceQueryRequest.getPageNo(), workflowInstanceQueryRequest.getPageSize()); - IPage workflowInstanceList = workflowInstanceMapper.queryProcessInstanceListV2Paging( + IPage workflowInstanceList = workflowInstanceMapper.queryWorkflowInstanceListV2Paging( page, workflowInstance.getProjectCode(), - workflowInstance.getProcessDefinitionCode(), + workflowInstance.getWorkflowDefinitionCode(), workflowInstance.getName(), workflowInstanceQueryRequest.getStartTime(), workflowInstanceQueryRequest.getEndTime(), @@ -450,7 +450,7 @@ public class WorkflowInstanceServiceImpl extends BaseServiceImpl implements Work WorkflowInstance workflowInstance = processService.findWorkflowInstanceDetailById(workflowInstanceId) .orElseThrow(() -> new ServiceException(WORKFLOW_INSTANCE_NOT_EXIST, workflowInstanceId)); WorkflowDefinition workflowDefinition = - workflowDefinitionMapper.queryByCode(workflowInstance.getProcessDefinitionCode()); + workflowDefinitionMapper.queryByCode(workflowInstance.getWorkflowDefinitionCode()); if (workflowDefinition != null && projectCode != workflowDefinition.getProjectCode()) { log.error("workflow definition does not exist, projectCode:{}, workflowInstanceId:{}.", projectCode, workflowInstanceId); @@ -461,7 +461,7 @@ public class WorkflowInstanceServiceImpl extends BaseServiceImpl implements Work taskInstanceDao.queryValidTaskListByWorkflowInstanceId(workflowInstanceId, workflowInstance.getTestFlag()); Map resultMap = new HashMap<>(); - resultMap.put(PROCESS_INSTANCE_STATE, workflowInstance.getState().toString()); + resultMap.put(WORKFLOW_INSTANCE_STATE, workflowInstance.getState().toString()); resultMap.put(TASK_LIST, taskInstanceList); result.put(DATA_LIST, resultMap); @@ -489,20 +489,20 @@ public class WorkflowInstanceServiceImpl extends BaseServiceImpl implements Work throw new ServiceException(Status.TASK_INSTANCE_NOT_EXISTS, taskId); } List relationSubWorkflows = relationSubWorkflowMapper - .queryAllSubProcessInstance((long) taskInstance.getProcessInstanceId(), + .queryAllSubWorkflowInstance((long) taskInstance.getWorkflowInstanceId(), taskInstance.getTaskCode()); - List allSubProcessInstanceId = relationSubWorkflows.stream() + List allSubWorkflowInstanceId = relationSubWorkflows.stream() .map(RelationSubWorkflow::getSubWorkflowInstanceId).collect(java.util.stream.Collectors.toList()); - List allSubWorkflows = workflowInstanceDao.queryByIds(allSubProcessInstanceId); + List allSubWorkflows = workflowInstanceDao.queryByIds(allSubWorkflowInstanceId); if (allSubWorkflows == null || allSubWorkflows.isEmpty()) { putMsg(result, Status.SUB_WORKFLOW_INSTANCE_NOT_EXIST, taskId); throw new ServiceException(Status.SUB_WORKFLOW_INSTANCE_NOT_EXIST, taskId); } - Long subWorkflowCode = allSubWorkflows.get(0).getProcessDefinitionCode(); - int subWorkflowVersion = allSubWorkflows.get(0).getProcessDefinitionVersion(); + Long subWorkflowCode = allSubWorkflows.get(0).getWorkflowDefinitionCode(); + int subWorkflowVersion = allSubWorkflows.get(0).getWorkflowDefinitionVersion(); WorkflowDefinition subWorkflowDefinition = - processService.findProcessDefinition(subWorkflowCode, subWorkflowVersion); + processService.findWorkflowDefinition(subWorkflowCode, subWorkflowVersion); if (subWorkflowDefinition == null) { putMsg(result, Status.WORKFLOW_DEFINITION_NOT_EXIST, subWorkflowCode); throw new ServiceException(Status.WORKFLOW_DEFINITION_NOT_EXIST, subWorkflowCode); @@ -514,7 +514,7 @@ public class WorkflowInstanceServiceImpl extends BaseServiceImpl implements Work int index = 1; for (WorkflowInstance workflowInstance : allSubWorkflows) { DynamicSubWorkflowDto dynamicSubWorkflowDto = new DynamicSubWorkflowDto(); - dynamicSubWorkflowDto.setProcessInstanceId(workflowInstance.getId()); + dynamicSubWorkflowDto.setWorkflowInstanceId(workflowInstance.getId()); dynamicSubWorkflowDto.setIndex(index); dynamicSubWorkflowDto.setState(workflowInstance.getState()); dynamicSubWorkflowDto.setName(subWorkflowDefinition.getName()); @@ -569,7 +569,7 @@ public class WorkflowInstanceServiceImpl extends BaseServiceImpl implements Work } WorkflowInstance subWorkflowInstance = processService.findSubWorkflowInstance( - taskInstance.getProcessInstanceId(), taskInstance.getId()); + taskInstance.getWorkflowInstanceId(), taskInstance.getId()); if (subWorkflowInstance == null) { log.error("Sub workflow instance does not exist, projectCode:{}, taskInstanceId:{}.", projectCode, taskInstance.getId()); @@ -577,7 +577,7 @@ public class WorkflowInstanceServiceImpl extends BaseServiceImpl implements Work return result; } Map dataMap = new HashMap<>(); - dataMap.put(Constants.SUBPROCESS_INSTANCE_ID, subWorkflowInstance.getId()); + dataMap.put(Constants.SUBWORKFLOW_INSTANCE_ID, subWorkflowInstance.getId()); result.put(DATA_LIST, dataMap); putMsg(result, Status.SUCCESS); return result; @@ -615,10 +615,10 @@ public class WorkflowInstanceServiceImpl extends BaseServiceImpl implements Work .orElseThrow(() -> new ServiceException(WORKFLOW_INSTANCE_NOT_EXIST, workflowInstanceId)); // check workflow instance exists in project WorkflowDefinition workflowDefinition0 = - workflowDefinitionMapper.queryByCode(workflowInstance.getProcessDefinitionCode()); + workflowDefinitionMapper.queryByCode(workflowInstance.getWorkflowDefinitionCode()); if (workflowDefinition0 != null && projectCode != workflowDefinition0.getProjectCode()) { log.error("workflow definition does not exist, projectCode:{}, workflowDefinitionCode:{}.", projectCode, - workflowInstance.getProcessDefinitionCode()); + workflowInstance.getWorkflowDefinitionCode()); putMsg(result, WORKFLOW_INSTANCE_NOT_EXIST, workflowInstanceId); return result; } @@ -662,7 +662,7 @@ public class WorkflowInstanceServiceImpl extends BaseServiceImpl implements Work throw new ServiceException(Status.UPDATE_TASK_DEFINITION_ERROR); } WorkflowDefinition workflowDefinition = - workflowDefinitionMapper.queryByCode(workflowInstance.getProcessDefinitionCode()); + workflowDefinitionMapper.queryByCode(workflowInstance.getWorkflowDefinitionCode()); List taskRelationList = JSONUtils.toList(taskRelationJson, WorkflowTaskRelationLog.class); // check workflow json is valid @@ -705,7 +705,7 @@ public class WorkflowInstanceServiceImpl extends BaseServiceImpl implements Work putMsg(result, Status.UPDATE_WORKFLOW_DEFINITION_ERROR); throw new ServiceException(Status.UPDATE_WORKFLOW_DEFINITION_ERROR); } - workflowInstance.setProcessDefinitionVersion(insertVersion); + workflowInstance.setWorkflowDefinitionVersion(insertVersion); boolean update = workflowInstanceDao.updateById(workflowInstance); if (!update) { log.error( @@ -761,7 +761,7 @@ public class WorkflowInstanceServiceImpl extends BaseServiceImpl implements Work WorkflowInstance subInstance = processService.findWorkflowInstanceDetailById(subId) .orElseThrow(() -> new ServiceException(WORKFLOW_INSTANCE_NOT_EXIST, subId)); - if (subInstance.getIsSubProcess() == Flag.NO) { + if (subInstance.getIsSubWorkflow() == Flag.NO) { log.warn( "workflow instance is not sub workflow instance type, workflowInstanceId:{}, workflowInstanceName:{}.", subId, subInstance.getName()); @@ -796,7 +796,7 @@ public class WorkflowInstanceServiceImpl extends BaseServiceImpl implements Work WorkflowInstance workflowInstance = processService.findWorkflowInstanceDetailById(workflowInstanceId) .orElseThrow(() -> new ServiceException(WORKFLOW_INSTANCE_NOT_EXIST, workflowInstanceId)); WorkflowDefinition workflowDefinition = workflowDefinitionLogMapper.queryByDefinitionCodeAndVersion( - workflowInstance.getProcessDefinitionCode(), workflowInstance.getProcessDefinitionVersion()); + workflowInstance.getWorkflowDefinitionCode(), workflowInstance.getWorkflowDefinitionVersion()); Project project = projectMapper.queryByCode(workflowDefinition.getProjectCode()); // check user access for project @@ -833,10 +833,10 @@ public class WorkflowInstanceServiceImpl extends BaseServiceImpl implements Work } WorkflowDefinition workflowDefinition = - workflowDefinitionMapper.queryByCode(workflowInstance.getProcessDefinitionCode()); + workflowDefinitionMapper.queryByCode(workflowInstance.getWorkflowDefinitionCode()); if (workflowDefinition != null && projectCode != workflowDefinition.getProjectCode()) { log.error("workflow definition does not exist, projectCode:{}, workflowDefinitionCode:{}.", projectCode, - workflowInstance.getProcessDefinitionCode()); + workflowInstance.getWorkflowDefinitionCode()); putMsg(result, WORKFLOW_INSTANCE_NOT_EXIST, workflowInstanceId); return result; } @@ -884,7 +884,7 @@ public class WorkflowInstanceServiceImpl extends BaseServiceImpl implements Work Map timeParams) { Map> localUserDefParams = new HashMap<>(); List taskInstanceList = - taskInstanceMapper.findValidTaskListByProcessId(workflowInstance.getId(), Flag.YES, + taskInstanceMapper.findValidTaskListByWorkflowInstanceId(workflowInstance.getId(), Flag.YES, workflowInstance.getTestFlag()); for (TaskInstance taskInstance : taskInstanceList) { TaskDefinitionLog taskDefinitionLog = taskDefinitionLogMapper.queryByDefinitionCodeAndVersion( @@ -927,11 +927,11 @@ public class WorkflowInstanceServiceImpl extends BaseServiceImpl implements Work } WorkflowDefinition workflowDefinition = workflowDefinitionLogMapper.queryByDefinitionCodeAndVersion( - workflowInstance.getProcessDefinitionCode(), - workflowInstance.getProcessDefinitionVersion()); + workflowInstance.getWorkflowDefinitionCode(), + workflowInstance.getWorkflowDefinitionVersion()); if (workflowDefinition == null || projectCode != workflowDefinition.getProjectCode()) { log.error("workflow definition does not exist, projectCode:{}, workflowDefinitionCode:{}.", projectCode, - workflowInstance.getProcessDefinitionCode()); + workflowInstance.getWorkflowDefinitionCode()); putMsg(result, WORKFLOW_INSTANCE_NOT_EXIST, workflowInstanceId); return result; } @@ -944,12 +944,12 @@ public class WorkflowInstanceServiceImpl extends BaseServiceImpl implements Work List taskList = new ArrayList<>(); if (CollectionUtils.isNotEmpty(nodeList)) { - List taskInstances = taskInstanceMapper.queryByProcessInstanceIdsAndTaskCodes( + List taskInstances = taskInstanceMapper.queryByWorkflowInstanceIdsAndTaskCodes( Collections.singletonList(workflowInstanceId), nodeList); for (Long node : nodeList) { TaskInstance taskInstance = null; for (TaskInstance instance : taskInstances) { - if (instance.getProcessInstanceId() == workflowInstanceId && instance.getTaskCode() == node) { + if (instance.getWorkflowInstanceId() == workflowInstanceId && instance.getTaskCode() == node) { taskInstance = instance; break; } @@ -987,7 +987,7 @@ public class WorkflowInstanceServiceImpl extends BaseServiceImpl implements Work */ @Override public List queryByWorkflowDefinitionCodeAndStatus(Long workflowDefinitionCode, int[] states) { - return workflowInstanceMapper.queryByProcessDefineCodeAndStatus(workflowDefinitionCode, states); + return workflowInstanceMapper.queryByWorkflowDefinitionCodeAndStatus(workflowDefinitionCode, states); } @Override @@ -1006,7 +1006,7 @@ public class WorkflowInstanceServiceImpl extends BaseServiceImpl implements Work */ @Override public List queryByWorkflowDefinitionCode(Long workflowDefinitionCode, int size) { - return workflowInstanceMapper.queryByProcessDefineCode(workflowDefinitionCode, size); + return workflowInstanceMapper.queryByWorkflowDefinitionCode(workflowDefinitionCode, size); } /** @@ -1039,7 +1039,7 @@ public class WorkflowInstanceServiceImpl extends BaseServiceImpl implements Work public void deleteWorkflowInstanceByWorkflowDefinitionCode(long workflowDefinitionCode) { while (true) { List workflowInstances = - workflowInstanceMapper.queryByProcessDefineCode(workflowDefinitionCode, 100); + workflowInstanceMapper.queryByWorkflowDefinitionCode(workflowDefinitionCode, 100); if (CollectionUtils.isEmpty(workflowInstances)) { break; } diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/WorkflowLineageServiceImpl.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/WorkflowLineageServiceImpl.java index 4a2ea2ee27..c32be6a061 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/WorkflowLineageServiceImpl.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/WorkflowLineageServiceImpl.java @@ -136,35 +136,35 @@ public class WorkflowLineageServiceImpl extends BaseServiceImpl implements Workf private List getWorkFlowRelations(List workflowTaskLineageList) { List workFlowRelations = new ArrayList<>(); - List processDefinitionCodes = workflowTaskLineageList.stream() + List workflowDefinitionCodes = workflowTaskLineageList.stream() .map(WorkflowTaskLineage::getWorkflowDefinitionCode).distinct().collect(Collectors.toList()); for (WorkflowTaskLineage workflowTaskLineage : workflowTaskLineageList) { workFlowRelations.add(new WorkFlowRelation(workflowTaskLineage.getDeptWorkflowDefinitionCode(), workflowTaskLineage.getWorkflowDefinitionCode())); - if (!processDefinitionCodes.contains(workflowTaskLineage.getDeptWorkflowDefinitionCode())) { + if (!workflowDefinitionCodes.contains(workflowTaskLineage.getDeptWorkflowDefinitionCode())) { workFlowRelations.add(new WorkFlowRelation(0, workflowTaskLineage.getWorkflowDefinitionCode())); } } return workFlowRelations; } - private List getWorkflowRelationDetails(List processDefinitionCodes) { + private List getWorkflowRelationDetails(List workflowDefinitionCodes) { List workFlowRelationDetails = new ArrayList<>(); - for (Long processDefinitionCode : processDefinitionCodes) { + for (Long workflowDefinitionCode : workflowDefinitionCodes) { List workFlowRelationDetailList = - workflowTaskLineageDao.queryWorkFlowLineageByCode(processDefinitionCode); + workflowTaskLineageDao.queryWorkFlowLineageByCode(workflowDefinitionCode); workFlowRelationDetails.addAll(workFlowRelationDetailList); } return workFlowRelationDetails; } /** - * Query tasks depend on process definition, include upstream or downstream + * Query tasks depend on workflow definition, include upstream or downstream * and return tasks dependence with string format. * * @param projectCode Project code want to query tasks dependence - * @param workflowDefinitionCode Process definition code want to query tasks dependence + * @param workflowDefinitionCode workflow definition code want to query tasks dependence * @param taskCode Task code want to query tasks dependence * @return Optional of formatter message */ @@ -174,15 +174,15 @@ public class WorkflowLineageServiceImpl extends BaseServiceImpl implements Workf if (taskCode != 0) { queryTaskCode = taskCode; } - List dependentProcessList = + List dependentWorkflowList = workflowTaskLineageDao.queryWorkFlowLineageByDept(projectCode, workflowDefinitionCode, queryTaskCode); - if (CollectionUtils.isEmpty(dependentProcessList)) { + if (CollectionUtils.isEmpty(dependentWorkflowList)) { return Optional.empty(); } List taskDepStrList = new ArrayList<>(); - for (WorkflowTaskLineage workflowTaskLineage : dependentProcessList) { + for (WorkflowTaskLineage workflowTaskLineage : dependentWorkflowList) { WorkflowDefinition workflowDefinition = workflowDefinitionMapper.queryByCode(workflowTaskLineage.getDeptWorkflowDefinitionCode()); String taskName = ""; @@ -207,10 +207,10 @@ public class WorkflowLineageServiceImpl extends BaseServiceImpl implements Workf } /** - * Query downstream tasks depend on a process definition or a task + * Query downstream tasks depend on a workflow definition or a task * - * @param workflowDefinitionCode Process definition code want to query tasks dependence - * @return downstream dependent process definition list + * @param workflowDefinitionCode workflow definition code want to query tasks dependence + * @return downstream dependent workflow definition list */ @Override public List queryDownstreamDependentWorkflowDefinitions(Long workflowDefinitionCode) { @@ -232,21 +232,21 @@ public class WorkflowLineageServiceImpl extends BaseServiceImpl implements Workf for (TaskDefinition taskDefinition : taskDefinitionList) { DependentWorkflowDefinition dependentWorkflowDefinition = new DependentWorkflowDefinition(); workflowTaskLineageList.stream() - .filter(processLineage -> processLineage.getDeptTaskDefinitionCode() == taskDefinition.getCode()) + .filter(workflowLineage -> workflowLineage.getDeptTaskDefinitionCode() == taskDefinition.getCode()) .findFirst() - .ifPresent(processLineage -> { + .ifPresent(workflowLineage -> { dependentWorkflowDefinition - .setProcessDefinitionCode(processLineage.getDeptWorkflowDefinitionCode()); + .setWorkflowDefinitionCode(workflowLineage.getDeptWorkflowDefinitionCode()); dependentWorkflowDefinition.setTaskDefinitionCode(taskDefinition.getCode()); dependentWorkflowDefinition.setTaskParams(taskDefinition.getTaskParams()); dependentWorkflowDefinition.setWorkerGroup(taskDefinition.getWorkerGroup()); }); workflowDefinitionList.stream() - .filter(processDefinition -> processDefinition.getCode() == dependentWorkflowDefinition - .getProcessDefinitionCode()) + .filter(workflowDefinition -> workflowDefinition.getCode() == dependentWorkflowDefinition + .getWorkflowDefinitionCode()) .findFirst() - .ifPresent(processDefinition -> { - dependentWorkflowDefinition.setProcessDefinitionVersion(processDefinition.getVersion()); + .ifPresent(workflowDefinition -> { + dependentWorkflowDefinition.setWorkflowDefinitionVersion(workflowDefinition.getVersion()); }); } @@ -280,13 +280,13 @@ public class WorkflowLineageServiceImpl extends BaseServiceImpl implements Workf dependentLineageTask.setTaskDefinitionName(taskDefinition.getName()); }); workflowDefinitionList.stream() - .filter(processDefinition -> processDefinition.getCode() == workflowTaskLineage + .filter(workflowDefinition -> workflowDefinition.getCode() == workflowTaskLineage .getWorkflowDefinitionCode()) .findFirst() - .ifPresent(processDefinition -> { - dependentLineageTask.setProcessDefinitionCode(processDefinition.getCode()); - dependentLineageTask.setProcessDefinitionName(processDefinition.getName()); - dependentLineageTask.setProjectCode(processDefinition.getProjectCode()); + .ifPresent(workflowDefinition -> { + dependentLineageTask.setWorkflowDefinitionCode(workflowDefinition.getCode()); + dependentLineageTask.setWorkflowDefinitionName(workflowDefinition.getName()); + dependentLineageTask.setProjectCode(workflowDefinition.getProjectCode()); }); dependentLineageTaskList.add(dependentLineageTask); } diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/WorkflowTaskRelationServiceImpl.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/WorkflowTaskRelationServiceImpl.java index 9fb8f29883..2b46d12681 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/WorkflowTaskRelationServiceImpl.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/WorkflowTaskRelationServiceImpl.java @@ -132,7 +132,7 @@ public class WorkflowTaskRelationServiceImpl extends BaseServiceImpl implements } updateWorkflowDefiniteVersion(loginUser, result, workflowDefinition); List workflowTaskRelationList = - workflowTaskRelationMapper.queryByProcessCode(workflowDefinitionCode); + workflowTaskRelationMapper.queryByWorkflowDefinitionCode(workflowDefinitionCode); List workflowTaskRelations = Lists.newArrayList(workflowTaskRelationList); if (!workflowTaskRelations.isEmpty()) { Map preTaskCodeMap = @@ -190,8 +190,8 @@ public class WorkflowTaskRelationServiceImpl extends BaseServiceImpl implements private void updateVersions(WorkflowTaskRelation workflowTaskRelation) { // workflow WorkflowDefinition workflowDefinition = - workflowDefinitionMapper.queryByCode(workflowTaskRelation.getProcessDefinitionCode()); - workflowTaskRelation.setProcessDefinitionVersion(workflowDefinition.getVersion()); + workflowDefinitionMapper.queryByCode(workflowTaskRelation.getWorkflowDefinitionCode()); + workflowTaskRelation.setWorkflowDefinitionVersion(workflowDefinition.getVersion()); // tasks TaskDefinition preTaskDefinition = taskDefinitionMapper.queryByCode(workflowTaskRelation.getPreTaskCode()); @@ -211,12 +211,12 @@ public class WorkflowTaskRelationServiceImpl extends BaseServiceImpl implements @Transactional public WorkflowTaskRelation createWorkflowTaskRelationV2(User loginUser, TaskRelationCreateRequest taskRelationCreateRequest) { - WorkflowTaskRelation workflowTaskRelation = taskRelationCreateRequest.convert2ProcessTaskRelation(); + WorkflowTaskRelation workflowTaskRelation = taskRelationCreateRequest.convert2WorkflowTaskRelation(); WorkflowDefinition workflowDefinition = - workflowDefinitionMapper.queryByCode(workflowTaskRelation.getProcessDefinitionCode()); + workflowDefinitionMapper.queryByCode(workflowTaskRelation.getWorkflowDefinitionCode()); if (workflowDefinition == null) { throw new ServiceException(Status.WORKFLOW_DEFINITION_NOT_EXIST, - String.valueOf(workflowTaskRelation.getProcessDefinitionCode())); + String.valueOf(workflowTaskRelation.getWorkflowDefinitionCode())); } if (workflowTaskRelation.getProjectCode() == 0) { workflowTaskRelation.setProjectCode(workflowDefinition.getProjectCode()); @@ -241,8 +241,8 @@ public class WorkflowTaskRelationServiceImpl extends BaseServiceImpl implements Date now = new Date(); WorkflowTaskRelation workflowTaskRelation = new WorkflowTaskRelation(); workflowTaskRelation.setProjectCode(workflowDefinition.getProjectCode()); - workflowTaskRelation.setProcessDefinitionCode(workflowDefinition.getCode()); - workflowTaskRelation.setProcessDefinitionVersion(workflowDefinition.getVersion()); + workflowTaskRelation.setWorkflowDefinitionCode(workflowDefinition.getCode()); + workflowTaskRelation.setWorkflowDefinitionVersion(workflowDefinition.getVersion()); workflowTaskRelation.setPostTaskCode(taskDefinition.getCode()); workflowTaskRelation.setPostTaskVersion(taskDefinition.getVersion()); workflowTaskRelation.setConditionType(ConditionType.NONE); @@ -307,7 +307,7 @@ public class WorkflowTaskRelationServiceImpl extends BaseServiceImpl implements return result; } List workflowTaskRelations = - workflowTaskRelationMapper.queryByProcessCode(workflowDefinitionCode); + workflowTaskRelationMapper.queryByWorkflowDefinitionCode(workflowDefinitionCode); List workflowTaskRelationList = Lists.newArrayList(workflowTaskRelations); if (CollectionUtils.isEmpty(workflowTaskRelationList)) { log.error("workflow task relations are empty, projectCode:{}, workflowDefinitionCode:{}.", projectCode, @@ -369,7 +369,7 @@ public class WorkflowTaskRelationServiceImpl extends BaseServiceImpl implements new Page<>(new TaskRelationFilterRequest(preTaskCode, postTaskCode).getPageNo(), new TaskRelationFilterRequest(preTaskCode, postTaskCode).getPageSize()); IPage workflowTaskRelationIPage = - workflowTaskRelationMapper.filterProcessTaskRelation(page, workflowTaskRelation); + workflowTaskRelationMapper.filterWorkflowTaskRelation(page, workflowTaskRelation); List workflowTaskRelations = workflowTaskRelationIPage.getRecords(); if (workflowTaskRelations.size() != 1) { @@ -408,13 +408,13 @@ public class WorkflowTaskRelationServiceImpl extends BaseServiceImpl implements Page page = new Page<>(taskRelationUpdateUpstreamRequest.getPageNo(), taskRelationUpdateUpstreamRequest.getPageSize()); IPage workflowTaskRelationExistsIPage = - workflowTaskRelationMapper.filterProcessTaskRelation(page, workflowTaskRelation); + workflowTaskRelationMapper.filterWorkflowTaskRelation(page, workflowTaskRelation); List workflowTaskRelationExists = workflowTaskRelationExistsIPage.getRecords(); WorkflowDefinition workflowDefinition = null; if (CollectionUtils.isNotEmpty(workflowTaskRelationExists)) { workflowDefinition = - workflowDefinitionMapper.queryByCode(workflowTaskRelationExists.get(0).getProcessDefinitionCode()); + workflowDefinitionMapper.queryByCode(workflowTaskRelationExists.get(0).getWorkflowDefinitionCode()); } else if (taskRelationUpdateUpstreamRequest.getWorkflowCode() != 0L) { workflowDefinition = workflowDefinitionMapper.queryByCode(taskRelationUpdateUpstreamRequest.getWorkflowCode()); @@ -487,7 +487,7 @@ public class WorkflowTaskRelationServiceImpl extends BaseServiceImpl implements log.info( "Save workflow task relations complete, projectCode:{}, workflowDefinitionCode:{}, workflowDefinitionVersion:{}.", workflowDefinition.getProjectCode(), workflowDefinition.getCode(), insertVersion); - workflowTaskRelations.get(0).setProcessDefinitionVersion(insertVersion); + workflowTaskRelations.get(0).setWorkflowDefinitionVersion(insertVersion); return workflowTaskRelations; } @@ -496,7 +496,7 @@ public class WorkflowTaskRelationServiceImpl extends BaseServiceImpl implements long projectCode = workflowDefinition.getProjectCode(); long workflowDefinitionCode = workflowDefinition.getCode(); List taskRelations = - workflowTaskRelationMapper.queryByProcessCode(workflowDefinitionCode); + workflowTaskRelationMapper.queryByWorkflowDefinitionCode(workflowDefinitionCode); List taskRelationList = taskRelations.stream().map(WorkflowTaskRelationLog::new).collect(Collectors.toList()); @@ -518,8 +518,8 @@ public class WorkflowTaskRelationServiceImpl extends BaseServiceImpl implements Date now = new Date(); for (WorkflowTaskRelationLog workflowTaskRelationLog : taskRelationList) { workflowTaskRelationLog.setProjectCode(projectCode); - workflowTaskRelationLog.setProcessDefinitionCode(workflowDefinitionCode); - workflowTaskRelationLog.setProcessDefinitionVersion(workflowDefinitionVersion); + workflowTaskRelationLog.setWorkflowDefinitionCode(workflowDefinitionCode); + workflowTaskRelationLog.setWorkflowDefinitionVersion(workflowDefinitionVersion); if (taskDefinitionLogMap != null) { TaskDefinitionLog preTaskDefinitionLog = taskDefinitionLogMap.get(workflowTaskRelationLog.getPreTaskCode()); @@ -547,7 +547,7 @@ public class WorkflowTaskRelationServiceImpl extends BaseServiceImpl implements if (isSame) { return Constants.EXIT_CODE_SUCCESS; } - workflowTaskRelationMapper.deleteByCode(projectCode, workflowDefinitionCode); + workflowTaskRelationMapper.deleteByWorkflowDefinitionCode(projectCode, workflowDefinitionCode); } List workflowTaskRelations = taskRelationList.stream().map(WorkflowTaskRelation::new).collect(Collectors.toList()); @@ -648,16 +648,16 @@ public class WorkflowTaskRelationServiceImpl extends BaseServiceImpl implements return result; } WorkflowDefinition workflowDefinition = - workflowDefinitionMapper.queryByCode(upstreamList.get(0).getProcessDefinitionCode()); + workflowDefinitionMapper.queryByCode(upstreamList.get(0).getWorkflowDefinitionCode()); if (workflowDefinition == null) { log.error("workflow definition does not exist, workflowDefinitionCode:{}.", - upstreamList.get(0).getProcessDefinitionCode()); + upstreamList.get(0).getWorkflowDefinitionCode()); putMsg(result, Status.WORKFLOW_DEFINITION_NOT_EXIST, - String.valueOf(upstreamList.get(0).getProcessDefinitionCode())); + String.valueOf(upstreamList.get(0).getWorkflowDefinitionCode())); return result; } List workflowTaskRelations = - workflowTaskRelationMapper.queryByProcessCode(workflowDefinition.getCode()); + workflowTaskRelationMapper.queryByWorkflowDefinitionCode(workflowDefinition.getCode()); List workflowTaskRelationList = Lists.newArrayList(workflowTaskRelations); List workflowTaskRelationWaitRemove = Lists.newArrayList(); for (WorkflowTaskRelation workflowTaskRelation : workflowTaskRelationList) { @@ -719,16 +719,16 @@ public class WorkflowTaskRelationServiceImpl extends BaseServiceImpl implements return result; } WorkflowDefinition workflowDefinition = - workflowDefinitionMapper.queryByCode(downstreamList.get(0).getProcessDefinitionCode()); + workflowDefinitionMapper.queryByCode(downstreamList.get(0).getWorkflowDefinitionCode()); if (workflowDefinition == null) { log.error("workflow definition does not exist, workflowDefinitionCode:{}.", - downstreamList.get(0).getProcessDefinitionCode()); + downstreamList.get(0).getWorkflowDefinitionCode()); putMsg(result, Status.WORKFLOW_DEFINITION_NOT_EXIST, - String.valueOf(downstreamList.get(0).getProcessDefinitionCode())); + String.valueOf(downstreamList.get(0).getWorkflowDefinitionCode())); return result; } List workflowTaskRelations = - workflowTaskRelationMapper.queryByProcessCode(workflowDefinition.getCode()); + workflowTaskRelationMapper.queryByWorkflowDefinitionCode(workflowDefinition.getCode()); List workflowTaskRelationList = Lists.newArrayList(workflowTaskRelations); workflowTaskRelationList .removeIf(workflowTaskRelation -> postTaskCodeList.contains(workflowTaskRelation.getPostTaskCode()) @@ -840,7 +840,7 @@ public class WorkflowTaskRelationServiceImpl extends BaseServiceImpl implements return result; } List workflowTaskRelations = - workflowTaskRelationMapper.queryByProcessCode(workflowDefinitionCode); + workflowTaskRelationMapper.queryByWorkflowDefinitionCode(workflowDefinitionCode); List workflowTaskRelationList = Lists.newArrayList(workflowTaskRelations); if (CollectionUtils.isEmpty(workflowTaskRelationList)) { log.error("workflow task relations are empty, projectCode:{}, workflowDefinitionCode:{}.", projectCode, @@ -897,13 +897,14 @@ public class WorkflowTaskRelationServiceImpl extends BaseServiceImpl implements @Override public List queryByWorkflowDefinitionCode(long workflowDefinitionCode, int workflowDefinitionVersion) { - return workflowTaskRelationMapper.queryProcessTaskRelationsByProcessDefinitionCode(workflowDefinitionCode, + return workflowTaskRelationMapper.queryWorkflowTaskRelationsByWorkflowDefinitionCode(workflowDefinitionCode, workflowDefinitionVersion); } @Override public void deleteByWorkflowDefinitionCode(long workflowDefinitionCode, int workflowDefinitionVersion) { - workflowTaskRelationMapper.deleteByWorkflowDefinitionCode(workflowDefinitionCode, workflowDefinitionVersion); + workflowTaskRelationMapper.deleteByWorkflowDefinitionCodeAndVersion(workflowDefinitionCode, + workflowDefinitionVersion); } /** diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/validator/workflow/BackfillWorkflowRequestTransformer.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/validator/workflow/BackfillWorkflowRequestTransformer.java index 35a440c2a3..e78cab5952 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/validator/workflow/BackfillWorkflowRequestTransformer.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/validator/workflow/BackfillWorkflowRequestTransformer.java @@ -105,7 +105,7 @@ public class BackfillWorkflowRequestTransformer implements ITransformer parseBackfillDateList(WorkflowBackFillRequest workflowBackFillRequest) { final WorkflowBackFillRequest.BackfillTime backfillTime = workflowBackFillRequest.getBackfillTime(); - List schedules = processService.queryReleaseSchedulerListByProcessDefinitionCode( + List schedules = processService.queryReleaseSchedulerListByWorkflowDefinitionCode( workflowBackFillRequest.getWorkflowDefinitionCode()); if (StringUtils.isNotEmpty(backfillTime.getComplementStartDate()) diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/vo/ScheduleVO.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/vo/ScheduleVO.java index 2b5dd55adb..fc4b1d9668 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/vo/ScheduleVO.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/vo/ScheduleVO.java @@ -34,34 +34,16 @@ public class ScheduleVO { private int id; - /** - * process definition code - */ - private long processDefinitionCode; + private long workflowDefinitionCode; - /** - * process definition name - */ - private String processDefinitionName; + private String workflowDefinitionName; - /** - * project name - */ private String projectName; - /** - * schedule description - */ private String definitionDescription; - /** - * schedule start time - */ private String startTime; - /** - * schedule end time - */ private String endTime; /** @@ -70,74 +52,32 @@ public class ScheduleVO { */ private String timezoneId; - /** - * crontab expression - */ private String crontab; - /** - * failure strategy - */ private FailureStrategy failureStrategy; - /** - * warning type - */ private WarningType warningType; - /** - * create time - */ private Date createTime; - /** - * update time - */ private Date updateTime; - /** - * created user id - */ private int userId; - /** - * created user name - */ private String userName; - /** - * release state - */ private ReleaseState releaseState; - /** - * warning group id - */ private int warningGroupId; - /** - * process instance priority - */ - private Priority processInstancePriority; + private Priority workflowInstancePriority; - /** - * worker group - */ private String workerGroup; - /** - * tenantCode - */ private String tenantCode; - /** - * environment code - */ private Long environmentCode; - /** - * environment name - */ private String environmentName; public ScheduleVO(Schedule schedule) { @@ -153,9 +93,9 @@ public class ScheduleVO { this.setUpdateTime(schedule.getUpdateTime()); this.setTimezoneId(schedule.getTimezoneId()); this.setReleaseState(schedule.getReleaseState()); - this.setProcessInstancePriority(schedule.getProcessInstancePriority()); - this.setProcessDefinitionName(schedule.getProcessDefinitionName()); - this.setProcessDefinitionCode(schedule.getProcessDefinitionCode()); + this.setWorkflowInstancePriority(schedule.getWorkflowInstancePriority()); + this.setWorkflowDefinitionName(schedule.getWorkflowDefinitionName()); + this.setWorkflowDefinitionCode(schedule.getWorkflowDefinitionCode()); this.setFailureStrategy(schedule.getFailureStrategy()); this.setEnvironmentCode(schedule.getEnvironmentCode()); this.setStartTime(DateUtils.dateToString(schedule.getStartTime(), ZoneId.systemDefault().getId())); diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/vo/TaskDefinitionVO.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/vo/TaskDefinitionVO.java index c3f3d0e19e..1f57220920 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/vo/TaskDefinitionVO.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/vo/TaskDefinitionVO.java @@ -24,18 +24,14 @@ import org.apache.dolphinscheduler.dao.entity.WorkflowTaskRelation; import java.util.List; import lombok.Data; +import lombok.EqualsAndHashCode; import org.springframework.beans.BeanUtils; -/** - * @author fanwanlong - */ +@EqualsAndHashCode(callSuper = true) @Data public class TaskDefinitionVO extends TaskDefinition { - /** - * process task related list - */ private List workflowTaskRelationList; public TaskDefinitionVO() { diff --git a/dolphinscheduler-api/src/main/resources/dynamic-task-type-config.yaml b/dolphinscheduler-api/src/main/resources/dynamic-task-type-config.yaml index 74a2504d66..3fd74c3812 100644 --- a/dolphinscheduler-api/src/main/resources/dynamic-task-type-config.yaml +++ b/dolphinscheduler-api/src/main/resources/dynamic-task-type-config.yaml @@ -21,7 +21,7 @@ dynamic-task: cloud: - {name: EMR,icon: shell-icon.png,hover: shell-hover.png} logic: - - {name: SUB_PROCESS,icon: shell-icon.png,hover: shell-hover.png} + - {name: SUB_WORKFLOW,icon: shell-icon.png,hover: shell-hover.png} dataIntegration: - {name: SEATUNNEL,icon: shell-icon.png,hover: shell-hover.png} dataQuality: diff --git a/dolphinscheduler-api/src/main/resources/i18n/messages_en_US.properties b/dolphinscheduler-api/src/main/resources/i18n/messages_en_US.properties index 3c361166d4..aed7f4bf94 100644 --- a/dolphinscheduler-api/src/main/resources/i18n/messages_en_US.properties +++ b/dolphinscheduler-api/src/main/resources/i18n/messages_en_US.properties @@ -43,7 +43,7 @@ UPDATE_ALERT_PLUGIN_INSTANCE_NOTES=update alert plugin instance operation CREATE_ALERT_PLUGIN_INSTANCE_NOTES=create alert plugin instance operation DELETE_ALERT_PLUGIN_INSTANCE_NOTES=delete alert plugin instance operation QUERY_ALERT_PLUGIN_INSTANCE_LIST_PAGING_NOTES=query alert plugin instance paging -QUERY_TOPN_LONGEST_RUNNING_PROCESS_INSTANCE_NOTES=query topN longest running process instance +QUERY_TOPN_LONGEST_RUNNING_WORKFLOW_INSTANCE_NOTES=query topN longest running workflow instance ALERT_PLUGIN_INSTANCE_NAME=alert plugin instance name ALERT_PLUGIN_DEFINE_ID=alert plugin define id ALERT_PLUGIN_ID=alert plugin id @@ -175,7 +175,7 @@ SEARCH_VAL=search val USER_ID=user id FORCE_TASK_SUCCESS=force task success QUERY_TASK_INSTANCE_LIST_PAGING_NOTES=query task instance list paging -PROCESS_INSTANCE_NAME=process instance name +WORKFLOW_INSTANCE_NAME=workflow instance name TASK_INSTANCE_ID=task instance id VERIFY_TENANT_CODE_NOTES=verify tenant code QUERY_UI_PLUGIN_DETAIL_BY_ID=query ui plugin detail by id @@ -201,7 +201,7 @@ BATCH_DELETE_WORKFLOW_INSTANCE_BY_IDS_NOTES=batch delete workflow instance by wo QUERY_WORKFLOW_INSTANCE_BY_ID_NOTES=query process instance by process instance id DELETE_WORKFLOW_INSTANCE_BY_ID_NOTES=delete process instance by process instance id TASK_ID=task instance id -PROCESS_INSTANCE_IDS=process_instance ids, delimiter by "," if more than one id +WORKFLOW_INSTANCE_IDS=workflow instance ids, delimiter by "," if more than one id SKIP_LINE_NUM=skip line num QUERY_TASK_INSTANCE_LOG_NOTES=query task instance log DOWNLOAD_TASK_INSTANCE_LOG_NOTES=download task instance log diff --git a/dolphinscheduler-api/src/main/resources/i18n/messages_zh_CN.properties b/dolphinscheduler-api/src/main/resources/i18n/messages_zh_CN.properties index bae5dde77f..6529121fc6 100644 --- a/dolphinscheduler-api/src/main/resources/i18n/messages_zh_CN.properties +++ b/dolphinscheduler-api/src/main/resources/i18n/messages_zh_CN.properties @@ -60,7 +60,7 @@ CREATE_DATA_SOURCE_NOTES=\u521B\u5EFA\u6570\u636E\u6E90 DATA_SOURCE_NAME=\u6570\u636E\u6E90\u540D\u79F0 DB_TYPE=\u6570\u636E\u6E90\u7C7B\u578B QUEUE_TAG=\u961F\u5217\u76F8\u5173\u64CD\u4F5C -QUERY_TOPN_LONGEST_RUNNING_PROCESS_INSTANCE_NOTES=\u67E5\u8BE2topN\u6700\u957F\u8FD0\u884C\u6D41\u7A0B\u5B9E\u4F8B +QUERY_TOPN_LONGEST_RUNNING_WORKFLOW_INSTANCE_NOTES=\u67E5\u8BE2topN\u6700\u957F\u8FD0\u884C\u5DE5\u4F5C\u6D41\u5B9E\u4F8B QUERY_QUEUE_LIST_NOTES=\u67E5\u8BE2\u961F\u5217\u5217\u8868 QUERY_QUEUE_LIST_PAGING_NOTES=\u5206\u9875\u67E5\u8BE2\u961F\u5217\u5217\u8868 CREATE_QUEUE_NOTES=\u521B\u5EFA\u961F\u5217 @@ -150,15 +150,15 @@ QUERY_WORKFLOW_DEFINITION_LIST_NOTES=\u67E5\u8BE2\u6D41\u7A0B\u5B9A\u4E49\u5217\ QUERY_WORKFLOW_DEFINITION_LIST_PAGING_NOTES=\u5206\u9875\u67E5\u8BE2\u6D41\u7A0B\u5B9A\u4E49\u5217\u8868 QUERY_WORKFLOW_DEFINITION_BY_CODE_NOTES=\u901A\u8FC7\u5DE5\u4F5C\u6D41\u5B9A\u4E49code\u67E5\u8BE2\u5DE5\u4F5C\u6D41\u5B9A\u4E49 PAGE_NO=\u9875\u7801\u53F7 -WORKFLOW_INSTANCE_ID=\u6D41\u7A0B\u5B9E\u4F8BID -PROCESS_INSTANCE_IDS=\u6D41\u7A0B\u5B9E\u4F8BID\u96C6\u5408\uFF0C\u5982\u679C\u6709\u591A\u4E2A\u6D41\u7A0B\u5B9E\u4F8B\u5219\u7528 "," \u5206\u9694 +WORKFLOW_INSTANCE_ID=\u5DE5\u4F5C\u6D41\u5B9E\u4F8BID +WORKFLOW_INSTANCE_IDS=\u5DE5\u4F5C\u6D41\u5B9E\u4F8BID\u96C6\u5408\uFF0C\u5982\u679C\u6709\u591A\u4E2A\u5DE5\u4F5C\u6D41\u5B9E\u4F8B\u5219\u7528 "," \u5206\u9694 PREVIEW_SCHEDULE_NOTES=\u5B9A\u65F6\u8C03\u5EA6\u9884\u89C8 SCHEDULE_TIME=\u5B9A\u65F6\u65F6\u95F4,\u7A7A\u5B57\u7B26\u4E32\u8868\u793A\u5F53\u524D\u5929 SYNC_DEFINE=\u66F4\u65B0\u6D41\u7A0B\u5B9E\u4F8B\u7684\u4FE1\u606F\u662F\u5426\u540C\u6B65\u5230\u6D41\u7A0B\u5B9A\u4E49 SEARCH_VAL=\u641C\u7D22\u503C FORCE_TASK_SUCCESS=\u5F3A\u5236TASK\u6210\u529F QUERY_TASK_INSTANCE_LIST_PAGING_NOTES=\u5206\u9875\u67E5\u8BE2\u4EFB\u52A1\u5B9E\u4F8B\u5217\u8868 -PROCESS_INSTANCE_NAME=\u6D41\u7A0B\u5B9E\u4F8B\u540D\u79F0 +WORKFLOW_INSTANCE_NAME=\u6D41\u7A0B\u5B9E\u4F8B\u540D\u79F0 TASK_INSTANCE_ID=\u4EFB\u52A1\u5B9E\u4F8BID VERIFY_TENANT_CODE_NOTES=\u9A8C\u8BC1\u79DF\u6237 QUERY_UI_PLUGIN_DETAIL_BY_ID=\u901A\u8FC7ID\u67E5\u8BE2UI\u63D2\u4EF6\u8BE6\u60C5 diff --git a/dolphinscheduler-api/src/main/resources/task-type-config.yaml b/dolphinscheduler-api/src/main/resources/task-type-config.yaml index d92f41f3c5..761faf1f0c 100644 --- a/dolphinscheduler-api/src/main/resources/task-type-config.yaml +++ b/dolphinscheduler-api/src/main/resources/task-type-config.yaml @@ -37,7 +37,7 @@ task: - 'DATA_FACTORY' - 'ALIYUN_SERVERLESS_SPARK' logic: - - 'SUB_PROCESS' + - 'SUB_WORKFLOW' - 'DEPENDENT' - 'CONDITIONS' - 'SWITCH' diff --git a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/DataAnalysisControllerTest.java b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/DataAnalysisControllerTest.java index 36622b8b98..24a61ce7d5 100644 --- a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/DataAnalysisControllerTest.java +++ b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/DataAnalysisControllerTest.java @@ -43,9 +43,6 @@ import org.springframework.util.MultiValueMap; import com.fasterxml.jackson.core.type.TypeReference; -/** - * data analysis controller test - */ public class DataAnalysisControllerTest extends AbstractControllerTest { private static final Logger logger = LoggerFactory.getLogger(DataAnalysisControllerTest.class); @@ -95,7 +92,7 @@ public class DataAnalysisControllerTest extends AbstractControllerTest { paramsMap.add("endDate", "2019-12-28 00:00:00"); paramsMap.add("projectCode", "16"); - MvcResult mvcResult = mockMvc.perform(get("/projects/analysis/process-state-count") + MvcResult mvcResult = mockMvc.perform(get("/projects/analysis/workflow-state-count") .header("sessionId", sessionId) .params(paramsMap)) .andExpect(status().isOk()) diff --git a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/SchedulerControllerTest.java b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/SchedulerControllerTest.java index 1be311fbaf..36a1163898 100644 --- a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/SchedulerControllerTest.java +++ b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/SchedulerControllerTest.java @@ -72,7 +72,7 @@ public class SchedulerControllerTest extends AbstractControllerTest { @Test public void testCreateSchedule() throws Exception { MultiValueMap paramsMap = new LinkedMultiValueMap<>(); - paramsMap.add("processDefinitionCode", "40"); + paramsMap.add("workflowDefinitionCode", "40"); paramsMap.add("schedule", "{'startTime':'2019-12-16 00:00:00','endTime':'2019-12-17 00:00:00','crontab':'0 0 6 * * ? *'}"); paramsMap.add("warningType", String.valueOf(WarningType.NONE)); @@ -82,7 +82,7 @@ public class SchedulerControllerTest extends AbstractControllerTest { paramsMap.add("receiversCc", ""); paramsMap.add("workerGroupId", "1"); paramsMap.add("tenantCode", "root"); - paramsMap.add("processInstancePriority", String.valueOf(Priority.HIGH)); + paramsMap.add("workflowInstancePriority", String.valueOf(Priority.HIGH)); Mockito.when(schedulerService.insertSchedule(isA(User.class), isA(Long.class), isA(Long.class), isA(String.class), isA(WarningType.class), isA(int.class), isA(FailureStrategy.class), @@ -113,7 +113,7 @@ public class SchedulerControllerTest extends AbstractControllerTest { paramsMap.add("receiversCc", ""); paramsMap.add("workerGroupId", "1"); paramsMap.add("tenantCode", "root"); - paramsMap.add("processInstancePriority", String.valueOf(Priority.HIGH)); + paramsMap.add("workflowInstancePriority", String.valueOf(Priority.HIGH)); Mockito.when(schedulerService.updateSchedule(isA(User.class), isA(Long.class), isA(Integer.class), isA(String.class), isA(WarningType.class), isA(Integer.class), isA(FailureStrategy.class), @@ -172,7 +172,7 @@ public class SchedulerControllerTest extends AbstractControllerTest { @Test public void testQueryScheduleListPaging() throws Exception { MultiValueMap paramsMap = new LinkedMultiValueMap<>(); - paramsMap.add("processDefinitionCode", "40"); + paramsMap.add("workflowDefinitionCode", "40"); paramsMap.add("searchVal", "test"); paramsMap.add("pageNo", "1"); paramsMap.add("pageSize", "30"); diff --git a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/WorkerGroupControllerTest.java b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/WorkerGroupControllerTest.java index 48c12e5a68..691157fd16 100644 --- a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/WorkerGroupControllerTest.java +++ b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/WorkerGroupControllerTest.java @@ -137,7 +137,7 @@ public class WorkerGroupControllerTest extends AbstractControllerTest { org.apache.dolphinscheduler.service.utils.Constants.NOT_TERMINATED_STATES)) .thenReturn(null); Mockito.when(workerGroupMapper.deleteById(12)).thenReturn(1); - Mockito.when(workflowInstanceMapper.updateProcessInstanceByWorkerGroupName("测试", "")).thenReturn(1); + Mockito.when(workflowInstanceMapper.updateWorkflowInstanceByWorkerGroupName("测试", "")).thenReturn(1); MvcResult mvcResult = mockMvc.perform(delete("/worker-groups/{id}", "12") .header("sessionId", sessionId)) diff --git a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/WorkflowInstanceControllerTest.java b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/WorkflowInstanceControllerTest.java index a44cdde14b..9007446dd1 100644 --- a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/WorkflowInstanceControllerTest.java +++ b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/WorkflowInstanceControllerTest.java @@ -43,9 +43,6 @@ import org.springframework.test.web.servlet.MvcResult; import org.springframework.util.LinkedMultiValueMap; import org.springframework.util.MultiValueMap; -/** - * process instance controller test - */ public class WorkflowInstanceControllerTest extends AbstractControllerTest { @MockBean @@ -62,7 +59,7 @@ public class WorkflowInstanceControllerTest extends AbstractControllerTest { .thenReturn(mockResult); MultiValueMap paramsMap = new LinkedMultiValueMap<>(); - paramsMap.add("processDefineCode", "91"); + paramsMap.add("workflowDefinitionCode", "91"); paramsMap.add("searchVal", "cxc"); paramsMap.add("stateType", WorkflowExecutionStatus.SUCCESS.name()); paramsMap.add("host", "192.168.1.13"); @@ -71,7 +68,7 @@ public class WorkflowInstanceControllerTest extends AbstractControllerTest { paramsMap.add("pageNo", "2"); paramsMap.add("pageSize", "2"); - MvcResult mvcResult = mockMvc.perform(get("/projects/1113/process-instances") + MvcResult mvcResult = mockMvc.perform(get("/projects/1113/workflow-instances") .header("sessionId", sessionId) .params(paramsMap)) .andExpect(status().isOk()) @@ -91,8 +88,9 @@ public class WorkflowInstanceControllerTest extends AbstractControllerTest { Mockito.any())) .thenReturn(mockResult); - MvcResult mvcResult = mockMvc.perform(get("/projects/{projectCode}/process-instances/{id}/tasks", "1113", "123") - .header(SESSION_ID, sessionId)) + MvcResult mvcResult = mockMvc + .perform(get("/projects/{projectCode}/workflow-instances/{id}/tasks", "1113", "123") + .header(SESSION_ID, sessionId)) .andExpect(status().isOk()) .andExpect(content().contentType(MediaType.APPLICATION_JSON)) .andReturn(); @@ -122,13 +120,13 @@ public class WorkflowInstanceControllerTest extends AbstractControllerTest { MultiValueMap paramsMap = new LinkedMultiValueMap<>(); paramsMap.add("taskRelationJson", json); paramsMap.add("taskDefinitionJson", ""); - paramsMap.add("processInstanceId", "91"); + paramsMap.add("workflowInstanceId", "91"); paramsMap.add("scheduleTime", "2019-12-15 00:00:00"); paramsMap.add("syncDefine", "false"); paramsMap.add("locations", locations); paramsMap.add("tenantCode", "123"); - MvcResult mvcResult = mockMvc.perform(put("/projects/{projectCode}/process-instances/{id}", "1113", "123") + MvcResult mvcResult = mockMvc.perform(put("/projects/{projectCode}/workflow-instances/{id}", "1113", "123") .header("sessionId", sessionId) .params(paramsMap)) .andExpect(status().isOk()) @@ -146,7 +144,7 @@ public class WorkflowInstanceControllerTest extends AbstractControllerTest { Mockito.when( workflowInstanceService.queryWorkflowInstanceById(Mockito.any(), Mockito.anyLong(), Mockito.anyInt())) .thenReturn(mockResult); - MvcResult mvcResult = mockMvc.perform(get("/projects/{projectCode}/process-instances/{id}", "1113", "123") + MvcResult mvcResult = mockMvc.perform(get("/projects/{projectCode}/workflow-instances/{id}", "1113", "123") .header(SESSION_ID, sessionId)) .andExpect(status().isOk()) .andExpect(content().contentType(MediaType.APPLICATION_JSON)) @@ -165,7 +163,7 @@ public class WorkflowInstanceControllerTest extends AbstractControllerTest { Mockito.anyInt())).thenReturn(mockResult); MvcResult mvcResult = mockMvc - .perform(get("/projects/{projectCode}/process-instances/query-sub-by-parent", "1113") + .perform(get("/projects/{projectCode}/workflow-instances/query-sub-by-parent", "1113") .header(SESSION_ID, sessionId) .param("taskId", "1203")) .andExpect(status().isOk()) @@ -186,7 +184,7 @@ public class WorkflowInstanceControllerTest extends AbstractControllerTest { .thenReturn(mockResult); MvcResult mvcResult = mockMvc - .perform(get("/projects/{projectCode}/process-instances/query-parent-by-sub", "1113") + .perform(get("/projects/{projectCode}/workflow-instances/query-parent-by-sub", "1113") .header(SESSION_ID, sessionId) .param("subId", "1204")) .andExpect(status().isOk()) @@ -205,7 +203,7 @@ public class WorkflowInstanceControllerTest extends AbstractControllerTest { mockResult.put(Constants.STATUS, Status.SUCCESS); Mockito.when(workflowInstanceService.viewVariables(1113L, 123)).thenReturn(mockResult); MvcResult mvcResult = mockMvc - .perform(get("/projects/{projectCode}/process-instances/{id}/view-variables", "1113", "123") + .perform(get("/projects/{projectCode}/workflow-instances/{id}/view-variables", "1113", "123") .header(SESSION_ID, sessionId)) .andExpect(status().isOk()) .andExpect(content().contentType(MediaType.APPLICATION_JSON)) @@ -221,7 +219,7 @@ public class WorkflowInstanceControllerTest extends AbstractControllerTest { mockResult.put(Constants.STATUS, Status.SUCCESS); Mockito.doNothing().when(workflowInstanceService).deleteWorkflowInstanceById(Mockito.any(), Mockito.anyInt()); - MvcResult mvcResult = mockMvc.perform(delete("/projects/{projectCode}/process-instances/{id}", "1113", "123") + MvcResult mvcResult = mockMvc.perform(delete("/projects/{projectCode}/workflow-instances/{id}", "1113", "123") .header(SESSION_ID, sessionId)) .andExpect(status().isOk()) .andExpect(content().contentType(MediaType.APPLICATION_JSON)) @@ -238,9 +236,9 @@ public class WorkflowInstanceControllerTest extends AbstractControllerTest { mockResult.put(Constants.STATUS, Status.WORKFLOW_INSTANCE_NOT_EXIST); Mockito.doNothing().when(workflowInstanceService).deleteWorkflowInstanceById(Mockito.any(), Mockito.anyInt()); - MvcResult mvcResult = mockMvc.perform(post("/projects/{projectCode}/process-instances/batch-delete", "1113") + MvcResult mvcResult = mockMvc.perform(post("/projects/{projectCode}/workflow-instances/batch-delete", "1113") .header(SESSION_ID, sessionId) - .param("processInstanceIds", "1205,1206")) + .param("workflowInstanceIds", "1205,1206")) .andExpect(status().isOk()) .andExpect(content().contentType(MediaType.APPLICATION_JSON)) .andReturn(); @@ -259,7 +257,7 @@ public class WorkflowInstanceControllerTest extends AbstractControllerTest { .queryByTriggerCode(Mockito.any(), Mockito.anyLong(), Mockito.anyLong())) .thenReturn(mockResult); - MvcResult mvcResult = mockMvc.perform(get("/projects/1113/process-instances/trigger") + MvcResult mvcResult = mockMvc.perform(get("/projects/1113/workflow-instances/trigger") .header("sessionId", sessionId) .param("triggerCode", "12051206")) .andExpect(status().isOk()) diff --git a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/WorkflowInstanceExecuteFunctionControllerTest.java b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/WorkflowInstanceExecuteFunctionControllerTest.java index 7f3324f365..3f25c86e9b 100644 --- a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/WorkflowInstanceExecuteFunctionControllerTest.java +++ b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/WorkflowInstanceExecuteFunctionControllerTest.java @@ -58,14 +58,11 @@ import com.google.common.collect.ImmutableMap; import com.google.gson.Gson; import com.google.gson.JsonObject; -/** - * executor controller test - */ public class WorkflowInstanceExecuteFunctionControllerTest extends AbstractControllerTest { final Gson gson = new Gson(); final long projectCode = 1L; - final long processDefinitionCode = 2L; + final long workflowDefinitionCode = 2L; final String scheduleTime = "scheduleTime"; final FailureStrategy failureStrategy = FailureStrategy.END; final String startNodeList = "startNodeList"; @@ -75,7 +72,7 @@ public class WorkflowInstanceExecuteFunctionControllerTest extends AbstractContr final int warningGroupId = 3; final RunMode runMode = RunMode.RUN_MODE_SERIAL; final ExecutionOrder executionOrder = ExecutionOrder.DESC_ORDER; - final Priority processInstancePriority = Priority.HIGH; + final Priority workflowInstancePriority = Priority.HIGH; final String workerGroup = "workerGroup"; final String tenantCode = "root"; final Long environmentCode = 4L; @@ -99,10 +96,10 @@ public class WorkflowInstanceExecuteFunctionControllerTest extends AbstractContr private ExecutorService executorService; @Test - public void testStartProcessInstanceWithFullParams() throws Exception { + public void testStartWorkflowInstanceWithFullParams() throws Exception { // Given final MultiValueMap paramsMap = new LinkedMultiValueMap<>(); - paramsMap.add("processDefinitionCode", String.valueOf(processDefinitionCode)); + paramsMap.add("workflowDefinitionCode", String.valueOf(workflowDefinitionCode)); paramsMap.add("scheduleTime", scheduleTime); paramsMap.add("failureStrategy", String.valueOf(failureStrategy)); paramsMap.add("startNodeList", startNodeList); @@ -111,7 +108,7 @@ public class WorkflowInstanceExecuteFunctionControllerTest extends AbstractContr paramsMap.add("warningType", String.valueOf(warningType)); paramsMap.add("warningGroupId", String.valueOf(warningGroupId)); paramsMap.add("runMode", String.valueOf(runMode)); - paramsMap.add("processInstancePriority", String.valueOf(processInstancePriority)); + paramsMap.add("workflowInstancePriority", String.valueOf(workflowInstancePriority)); paramsMap.add("workerGroup", workerGroup); paramsMap.add("tenantCode", tenantCode); paramsMap.add("environmentCode", String.valueOf(environmentCode)); @@ -127,7 +124,7 @@ public class WorkflowInstanceExecuteFunctionControllerTest extends AbstractContr // When final MvcResult mvcResult = mockMvc - .perform(post("/projects/{projectCode}/executors/start-process-instance", projectCode) + .perform(post("/projects/{projectCode}/executors/start-workflow-instance", projectCode) .header("sessionId", sessionId) .params(paramsMap)) .andExpect(status().isOk()) @@ -140,10 +137,10 @@ public class WorkflowInstanceExecuteFunctionControllerTest extends AbstractContr } @Test - public void testStartProcessInstanceWithoutTimeout() throws Exception { + public void testStartWorkflowInstanceWithoutTimeout() throws Exception { // Given final MultiValueMap paramsMap = new LinkedMultiValueMap<>(); - paramsMap.add("processDefinitionCode", String.valueOf(processDefinitionCode)); + paramsMap.add("workflowDefinitionCode", String.valueOf(workflowDefinitionCode)); paramsMap.add("scheduleTime", scheduleTime); paramsMap.add("failureStrategy", String.valueOf(failureStrategy)); paramsMap.add("startNodeList", startNodeList); @@ -152,7 +149,7 @@ public class WorkflowInstanceExecuteFunctionControllerTest extends AbstractContr paramsMap.add("warningType", String.valueOf(warningType)); paramsMap.add("warningGroupId", String.valueOf(warningGroupId)); paramsMap.add("runMode", String.valueOf(runMode)); - paramsMap.add("processInstancePriority", String.valueOf(processInstancePriority)); + paramsMap.add("workflowInstancePriority", String.valueOf(workflowInstancePriority)); paramsMap.add("workerGroup", workerGroup); paramsMap.add("tenantCode", tenantCode); paramsMap.add("environmentCode", String.valueOf(environmentCode)); @@ -167,7 +164,7 @@ public class WorkflowInstanceExecuteFunctionControllerTest extends AbstractContr // When final MvcResult mvcResult = mockMvc - .perform(post("/projects/{projectCode}/executors/start-process-instance", projectCode) + .perform(post("/projects/{projectCode}/executors/start-workflow-instance", projectCode) .header("sessionId", sessionId) .params(paramsMap)) .andExpect(status().isOk()) @@ -180,10 +177,10 @@ public class WorkflowInstanceExecuteFunctionControllerTest extends AbstractContr } @Test - public void testStartProcessInstanceWithoutStartParams() throws Exception { + public void testStartWorkflowInstanceWithoutStartParams() throws Exception { // Given final MultiValueMap paramsMap = new LinkedMultiValueMap<>(); - paramsMap.add("processDefinitionCode", String.valueOf(processDefinitionCode)); + paramsMap.add("workflowDefinitionCode", String.valueOf(workflowDefinitionCode)); paramsMap.add("scheduleTime", scheduleTime); paramsMap.add("failureStrategy", String.valueOf(failureStrategy)); paramsMap.add("startNodeList", startNodeList); @@ -192,7 +189,7 @@ public class WorkflowInstanceExecuteFunctionControllerTest extends AbstractContr paramsMap.add("warningType", String.valueOf(warningType)); paramsMap.add("warningGroupId", String.valueOf(warningGroupId)); paramsMap.add("runMode", String.valueOf(runMode)); - paramsMap.add("processInstancePriority", String.valueOf(processInstancePriority)); + paramsMap.add("workflowInstancePriority", String.valueOf(workflowInstancePriority)); paramsMap.add("workerGroup", workerGroup); paramsMap.add("tenantCode", tenantCode); paramsMap.add("environmentCode", String.valueOf(environmentCode)); @@ -207,7 +204,7 @@ public class WorkflowInstanceExecuteFunctionControllerTest extends AbstractContr // When final MvcResult mvcResult = mockMvc - .perform(post("/projects/{projectCode}/executors/start-process-instance", projectCode) + .perform(post("/projects/{projectCode}/executors/start-workflow-instance", projectCode) .header("sessionId", sessionId) .params(paramsMap)) .andExpect(status().isOk()) @@ -220,10 +217,10 @@ public class WorkflowInstanceExecuteFunctionControllerTest extends AbstractContr } @Test - public void testStartProcessInstanceWithRequiredParams() throws Exception { + public void testStartWorkflowInstanceWithRequiredParams() throws Exception { // Given final MultiValueMap paramsMap = new LinkedMultiValueMap<>(); - paramsMap.add("processDefinitionCode", String.valueOf(processDefinitionCode)); + paramsMap.add("workflowDefinitionCode", String.valueOf(workflowDefinitionCode)); paramsMap.add("failureStrategy", String.valueOf(failureStrategy)); paramsMap.add("warningType", String.valueOf(warningType)); paramsMap.add("scheduleTime", scheduleTime); @@ -232,7 +229,7 @@ public class WorkflowInstanceExecuteFunctionControllerTest extends AbstractContr when(executorService.triggerWorkflowDefinition(Mockito.any())).thenReturn(1); final MvcResult mvcResult = mockMvc - .perform(post("/projects/{projectCode}/executors/start-process-instance", projectCode) + .perform(post("/projects/{projectCode}/executors/start-workflow-instance", projectCode) .header("sessionId", sessionId) .params(paramsMap)) .andExpect(status().isOk()) @@ -247,17 +244,17 @@ public class WorkflowInstanceExecuteFunctionControllerTest extends AbstractContr public void testExecuteWithSuccessStatus() throws Exception { // Given final ExecuteType executeType = ExecuteType.NONE; - final int processInstanceId = 40; + final int workflowInstanceId = 40; final long projectCode = 1113; final MultiValueMap paramsMap = new LinkedMultiValueMap<>(); - paramsMap.add("processInstanceId", Integer.toString(processInstanceId)); + paramsMap.add("workflowInstanceId", Integer.toString(workflowInstanceId)); paramsMap.add("executeType", String.valueOf(executeType)); final JsonObject expectResponseContent = gson .fromJson("{\"code\":0,\"msg\":\"success\",\"data\":null,\"success\":true,\"failed\":false}", JsonObject.class); - doNothing().when(executorService).controlWorkflowInstance(any(User.class), eq(processInstanceId), + doNothing().when(executorService).controlWorkflowInstance(any(User.class), eq(workflowInstanceId), eq(ExecuteType.NONE)); // When diff --git a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/WorkflowTaskRelationControllerTest.java b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/WorkflowTaskRelationControllerTest.java index 9f554e3195..994fc340c6 100644 --- a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/WorkflowTaskRelationControllerTest.java +++ b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/WorkflowTaskRelationControllerTest.java @@ -37,9 +37,6 @@ import org.springframework.boot.test.mock.mockito.MockBean; import org.springframework.http.MediaType; import org.springframework.test.web.servlet.MvcResult; -/** - * process task relation controller test - */ public class WorkflowTaskRelationControllerTest extends AbstractControllerTest { @MockBean @@ -55,7 +52,7 @@ public class WorkflowTaskRelationControllerTest extends AbstractControllerTest { .thenReturn(mockResult); MvcResult mvcResult = mockMvc - .perform(get("/projects/{projectCode}/process-task-relation/{taskCode}/downstream", "1113", "123") + .perform(get("/projects/{projectCode}/workflow-task-relation/{taskCode}/downstream", "1113", "123") .header(SESSION_ID, sessionId)) .andExpect(status().isOk()) .andExpect(content().contentType(MediaType.APPLICATION_JSON)) @@ -75,7 +72,7 @@ public class WorkflowTaskRelationControllerTest extends AbstractControllerTest { .thenReturn(mockResult); MvcResult mvcResult = mockMvc - .perform(get("/projects/{projectCode}/process-task-relation/{taskCode}/upstream", "1113", "123") + .perform(get("/projects/{projectCode}/workflow-task-relation/{taskCode}/upstream", "1113", "123") .header(SESSION_ID, sessionId)) .andExpect(status().isOk()) .andExpect(content().contentType(MediaType.APPLICATION_JSON)) diff --git a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/v2/TaskInstanceV2ControllerTest.java b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/v2/TaskInstanceV2ControllerTest.java index 8e76ec1694..447d45a5d3 100644 --- a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/v2/TaskInstanceV2ControllerTest.java +++ b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/v2/TaskInstanceV2ControllerTest.java @@ -52,9 +52,9 @@ public class TaskInstanceV2ControllerTest extends AbstractControllerTest { public void testQueryTaskListPaging() { TaskInstanceQueryRequest taskInstanceQueryReq = new TaskInstanceQueryRequest(); - taskInstanceQueryReq.setProcessInstanceId(1); - taskInstanceQueryReq.setProcessInstanceName(""); - taskInstanceQueryReq.setProcessDefinitionName(""); + taskInstanceQueryReq.setWorkflowInstanceId(1); + taskInstanceQueryReq.setWorkflowInstanceName(""); + taskInstanceQueryReq.setWorkflowDefinitionName(""); taskInstanceQueryReq.setTaskName(""); taskInstanceQueryReq.setExecutorName(""); taskInstanceQueryReq.setStartTime("2022-06-01 00:00:00"); @@ -73,8 +73,8 @@ public class TaskInstanceV2ControllerTest extends AbstractControllerTest { result.setData(pageInfo); putMsg(result, Status.SUCCESS); - when(taskInstanceService.queryTaskListPaging(any(), eq(1L), eq(taskInstanceQueryReq.getProcessInstanceId()), - eq(taskInstanceQueryReq.getProcessInstanceName()), eq(taskInstanceQueryReq.getProcessInstanceName()), + when(taskInstanceService.queryTaskListPaging(any(), eq(1L), eq(taskInstanceQueryReq.getWorkflowInstanceId()), + eq(taskInstanceQueryReq.getWorkflowInstanceName()), eq(taskInstanceQueryReq.getWorkflowInstanceName()), eq(taskInstanceQueryReq.getTaskName()), Mockito.any(), eq(taskInstanceQueryReq.getExecutorName()), any(), any(), eq(taskInstanceQueryReq.getSearchVal()), Mockito.any(), eq(taskInstanceQueryReq.getHost()), diff --git a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/SchedulerServiceTest.java b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/SchedulerServiceTest.java index 4c68cdcae8..bb49fa4e7a 100644 --- a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/SchedulerServiceTest.java +++ b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/SchedulerServiceTest.java @@ -125,7 +125,7 @@ public class SchedulerServiceTest extends BaseServiceTestTool { Schedule schedule = this.getSchedule(); ScheduleCreateRequest scheduleCreateRequest = new ScheduleCreateRequest(); - scheduleCreateRequest.setProcessDefinitionCode(processDefinitionCode); + scheduleCreateRequest.setWorkflowDefinitionCode(processDefinitionCode); scheduleCreateRequest.setEnvironmentCode(environmentCode); scheduleCreateRequest.setTenantCode(Constants.DEFAULT); @@ -149,13 +149,13 @@ public class SchedulerServiceTest extends BaseServiceTestTool { // executorServiceTest // error process definition already exists schedule Mockito.doNothing().when(projectService).checkProjectAndAuthThrowException(user, project, null); - Mockito.when(scheduleMapper.queryByProcessDefinitionCode(processDefinitionCode)).thenReturn(schedule); + Mockito.when(scheduleMapper.queryByWorkflowDefinitionCode(processDefinitionCode)).thenReturn(schedule); exception = Assertions.assertThrows(ServiceException.class, () -> schedulerService.createSchedulesV2(user, scheduleCreateRequest)); Assertions.assertEquals(Status.SCHEDULE_ALREADY_EXISTS.getCode(), ((ServiceException) exception).getCode()); // error environment do not exists - Mockito.when(scheduleMapper.queryByProcessDefinitionCode(processDefinitionCode)).thenReturn(null); + Mockito.when(scheduleMapper.queryByWorkflowDefinitionCode(processDefinitionCode)).thenReturn(null); Mockito.when(environmentMapper.queryByEnvironmentCode(environmentCode)).thenReturn(null); exception = Assertions.assertThrows(ServiceException.class, () -> schedulerService.createSchedulesV2(user, scheduleCreateRequest)); @@ -205,8 +205,8 @@ public class SchedulerServiceTest extends BaseServiceTestTool { scheduleCreateRequest.setCrontab(crontab); Mockito.when(scheduleMapper.insert(isA(Schedule.class))).thenReturn(1); Schedule scheduleCreated = schedulerService.createSchedulesV2(user, scheduleCreateRequest); - Assertions.assertEquals(scheduleCreateRequest.getProcessDefinitionCode(), - scheduleCreated.getProcessDefinitionCode()); + Assertions.assertEquals(scheduleCreateRequest.getWorkflowDefinitionCode(), + scheduleCreated.getWorkflowDefinitionCode()); Assertions.assertEquals(scheduleCreateRequest.getEnvironmentCode(), scheduleCreated.getEnvironmentCode()); Assertions.assertEquals(stringToDate(scheduleCreateRequest.getStartTime()), scheduleCreated.getStartTime()); Assertions.assertEquals(stringToDate(scheduleCreateRequest.getEndTime()), scheduleCreated.getEndTime()); @@ -413,7 +413,7 @@ public class SchedulerServiceTest extends BaseServiceTestTool { private Schedule getSchedule() { Schedule schedule = new Schedule(); schedule.setId(scheduleId); - schedule.setProcessDefinitionCode(processDefinitionCode); + schedule.setWorkflowDefinitionCode(processDefinitionCode); schedule.setEnvironmentCode(environmentCode); schedule.setUserId(userId); return schedule; diff --git a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/TaskDefinitionServiceImplTest.java b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/TaskDefinitionServiceImplTest.java index 918866995c..d3c6cdb541 100644 --- a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/TaskDefinitionServiceImplTest.java +++ b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/TaskDefinitionServiceImplTest.java @@ -100,7 +100,7 @@ public class TaskDefinitionServiceImplTest { private ProcessService processService; @Mock - private WorkflowDefinitionLogMapper processDefineLogMapper; + private WorkflowDefinitionLogMapper workflowDefinitionLogMapper; @Mock private WorkflowTaskRelationLogMapper workflowTaskRelationLogMapper; @@ -120,9 +120,6 @@ public class TaskDefinitionServiceImplTest { @Mock private WorkflowTaskRelationLogDao workflowTaskRelationLogDao; - @Mock - private WorkflowDefinitionLogMapper workflowDefinitionLogMapper; - private static final String TASK_PARAMETER = "{\"resourceList\":[],\"localParams\":[],\"rawScript\":\"echo 1\",\"conditionResult\":{\"successNode\":[\"\"],\"failedNode\":[\"\"]},\"dependence\":{}}";; private static final long PROJECT_CODE = 1L; @@ -289,12 +286,12 @@ public class TaskDefinitionServiceImplTest { TaskDefinitionLog taskDefinitionLog = getTaskDefinitionLog(); ArrayList taskDefinitionLogs = new ArrayList<>(); taskDefinitionLogs.add(taskDefinitionLog); - Integer version = 1; + int version = 1; when(workflowDefinitionMapper.queryByCode(isA(long.class))).thenReturn(workflowDefinition); // saveWorkflowDefine - when(processDefineLogMapper.queryMaxVersionForDefinition(isA(long.class))).thenReturn(version); - when(processDefineLogMapper.insert(isA(WorkflowDefinitionLog.class))).thenReturn(1); + when(workflowDefinitionLogMapper.queryMaxVersionForDefinition(isA(long.class))).thenReturn(version); + when(workflowDefinitionLogMapper.insert(isA(WorkflowDefinitionLog.class))).thenReturn(1); when(workflowDefinitionMapper.insert(isA(WorkflowDefinitionLog.class))).thenReturn(1); int insertVersion = processServiceImpl.saveWorkflowDefine(loginUser, workflowDefinition, Boolean.TRUE, Boolean.TRUE); @@ -304,7 +301,7 @@ public class TaskDefinitionServiceImplTest { // saveTaskRelation List processTaskRelationLogList = getProcessTaskRelationLogList(); - when(workflowTaskRelationMapper.queryByProcessCode(eq(workflowDefinition.getCode()))) + when(workflowTaskRelationMapper.queryByWorkflowDefinitionCode(eq(workflowDefinition.getCode()))) .thenReturn(workflowTaskRelationList); when(workflowTaskRelationMapper.batchInsert(isA(List.class))).thenReturn(1); when(workflowTaskRelationLogMapper.batchInsert(isA(List.class))).thenReturn(1); @@ -455,7 +452,7 @@ public class TaskDefinitionServiceImplTest { WorkflowTaskRelation workflowTaskRelation = new WorkflowTaskRelation(); workflowTaskRelation.setProjectCode(PROJECT_CODE); - workflowTaskRelation.setProcessDefinitionCode(PROCESS_DEFINITION_CODE); + workflowTaskRelation.setWorkflowDefinitionCode(PROCESS_DEFINITION_CODE); workflowTaskRelation.setPreTaskCode(TASK_CODE); workflowTaskRelation.setPostTaskCode(TASK_CODE + 1L); @@ -479,7 +476,7 @@ public class TaskDefinitionServiceImplTest { private void fillProcessTaskRelation(WorkflowTaskRelation workflowTaskRelation) { workflowTaskRelation.setProjectCode(PROJECT_CODE); - workflowTaskRelation.setProcessDefinitionCode(PROCESS_DEFINITION_CODE); + workflowTaskRelation.setWorkflowDefinitionCode(PROCESS_DEFINITION_CODE); workflowTaskRelation.setPreTaskCode(TASK_CODE); workflowTaskRelation.setPostTaskCode(TASK_CODE + 1L); } @@ -489,7 +486,7 @@ public class TaskDefinitionServiceImplTest { WorkflowTaskRelationLog processTaskRelationLog = new WorkflowTaskRelationLog(); processTaskRelationLog.setProjectCode(PROJECT_CODE); - processTaskRelationLog.setProcessDefinitionCode(PROCESS_DEFINITION_CODE); + processTaskRelationLog.setWorkflowDefinitionCode(PROCESS_DEFINITION_CODE); processTaskRelationLog.setPreTaskCode(TASK_CODE); processTaskRelationLog.setPostTaskCode(TASK_CODE + 1L); @@ -502,7 +499,7 @@ public class TaskDefinitionServiceImplTest { WorkflowTaskRelation workflowTaskRelation = new WorkflowTaskRelation(); workflowTaskRelation.setProjectCode(PROJECT_CODE); - workflowTaskRelation.setProcessDefinitionCode(PROCESS_DEFINITION_CODE); + workflowTaskRelation.setWorkflowDefinitionCode(PROCESS_DEFINITION_CODE); workflowTaskRelation.setPreTaskCode(TASK_CODE); workflowTaskRelation.setPostTaskCode(TASK_CODE + 1L); @@ -510,7 +507,7 @@ public class TaskDefinitionServiceImplTest { WorkflowTaskRelation workflowTaskRelation2 = new WorkflowTaskRelation(); workflowTaskRelation2.setProjectCode(PROJECT_CODE); - workflowTaskRelation2.setProcessDefinitionCode(PROCESS_DEFINITION_CODE); + workflowTaskRelation2.setWorkflowDefinitionCode(PROCESS_DEFINITION_CODE); workflowTaskRelation2.setPreTaskCode(TASK_CODE - 1); workflowTaskRelation2.setPostTaskCode(TASK_CODE); workflowTaskRelationList.add(workflowTaskRelation2); diff --git a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/TaskInstanceServiceTest.java b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/TaskInstanceServiceTest.java index e83dfd72b5..224111ba80 100644 --- a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/TaskInstanceServiceTest.java +++ b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/TaskInstanceServiceTest.java @@ -180,7 +180,7 @@ public class TaskInstanceServiceTest { Mockito.any())) .thenReturn(pageReturn); when(usersService.queryUser(workflowInstance.getExecutorId())).thenReturn(loginUser); - when(processService.findWorkflowInstanceDetailById(taskInstance.getProcessInstanceId())) + when(processService.findWorkflowInstanceDetailById(taskInstance.getWorkflowInstanceId())) .thenReturn(Optional.of(workflowInstance)); Result successRes = taskInstanceService.queryTaskListPaging(loginUser, @@ -370,7 +370,7 @@ public class TaskInstanceServiceTest { TaskInstance task = getTaskInstance(); doNothing().when(projectService).checkProjectAndAuthThrowException(user, task.getProjectCode(), FORCED_SUCCESS); when(taskInstanceDao.queryOptionalById(task.getId())).thenReturn(Optional.of(task)); - when(workflowInstanceDao.queryOptionalById(task.getProcessInstanceId())).thenReturn(Optional.empty()); + when(workflowInstanceDao.queryOptionalById(task.getWorkflowInstanceId())).thenReturn(Optional.empty()); assertThrowsServiceException(Status.WORKFLOW_INSTANCE_NOT_EXIST, () -> taskInstanceService.forceTaskSuccess(user, task.getProjectCode(), task.getId())); @@ -385,7 +385,7 @@ public class TaskInstanceServiceTest { workflowInstance.setState(WorkflowExecutionStatus.RUNNING_EXECUTION); doNothing().when(projectService).checkProjectAndAuthThrowException(user, projectCode, FORCED_SUCCESS); when(taskInstanceDao.queryOptionalById(task.getId())).thenReturn(Optional.of(task)); - when(workflowInstanceDao.queryOptionalById(task.getProcessInstanceId())) + when(workflowInstanceDao.queryOptionalById(task.getWorkflowInstanceId())) .thenReturn(Optional.of(workflowInstance)); assertThrowsServiceException( @@ -402,7 +402,7 @@ public class TaskInstanceServiceTest { workflowInstance.setState(WorkflowExecutionStatus.FAILURE); doNothing().when(projectService).checkProjectAndAuthThrowException(user, task.getProjectCode(), FORCED_SUCCESS); when(taskInstanceDao.queryOptionalById(task.getId())).thenReturn(Optional.of(task)); - when(workflowInstanceDao.queryOptionalById(task.getProcessInstanceId())) + when(workflowInstanceDao.queryOptionalById(task.getWorkflowInstanceId())) .thenReturn(Optional.of(workflowInstance)); assertThrowsServiceException( diff --git a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/WorkflowDefinitionServiceTest.java b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/WorkflowDefinitionServiceTest.java index 4cce58eda3..5fc60f9561 100644 --- a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/WorkflowDefinitionServiceTest.java +++ b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/WorkflowDefinitionServiceTest.java @@ -271,21 +271,21 @@ public class WorkflowDefinitionServiceTest extends BaseServiceTestTool { Mockito.eq(projectCode))).thenReturn(pageListingResult); String user1 = "user1"; String user2 = "user2"; - when(userMapper.queryUserWithProcessDefinitionCode(processDefinitionCodes)) + when(userMapper.queryUserWithWorkflowDefinitionCode(processDefinitionCodes)) .thenReturn(Arrays.asList( UserWithWorkflowDefinitionCode.builder() - .processDefinitionCode(processDefinitionCode1) - .processDefinitionVersion(1) + .workflowDefinitionCode(processDefinitionCode1) + .workflowDefinitionVersion(1) .modifierName(user1).build(), UserWithWorkflowDefinitionCode.builder() - .processDefinitionCode(processDefinitionCode2) - .processDefinitionVersion(1) + .workflowDefinitionCode(processDefinitionCode2) + .workflowDefinitionVersion(1) .modifierName(user2).build())); Schedule schedule1 = new Schedule(); - schedule1.setProcessDefinitionCode(processDefinitionCode1); + schedule1.setWorkflowDefinitionCode(processDefinitionCode1); schedule1.setReleaseState(ReleaseState.ONLINE); Schedule schedule2 = new Schedule(); - schedule2.setProcessDefinitionCode(processDefinitionCode2); + schedule2.setWorkflowDefinitionCode(processDefinitionCode2); schedule2.setReleaseState(ReleaseState.ONLINE); when(schedulerService.queryScheduleByWorkflowDefinitionCodes(processDefinitionCodes)) .thenReturn(Arrays.asList(schedule1, schedule2)); @@ -461,7 +461,7 @@ public class WorkflowDefinitionServiceTest extends BaseServiceTestTool { } when(workflowDefinitionMapper.queryByCodes(definitionCodes)).thenReturn(workflowDefinitionList); when(processService.saveWorkflowDefine(user, definition, Boolean.TRUE, Boolean.TRUE)).thenReturn(2); - when(workflowTaskRelationMapper.queryByProcessCode(processDefinitionCode)) + when(workflowTaskRelationMapper.queryByWorkflowDefinitionCode(processDefinitionCode)) .thenReturn(getProcessTaskRelation()); putMsg(result, Status.SUCCESS); @@ -519,7 +519,7 @@ public class WorkflowDefinitionServiceTest extends BaseServiceTestTool { // scheduler list elements > 1 workflowDefinition.setReleaseState(ReleaseState.OFFLINE); when(workflowDefinitionDao.queryByCode(46L)).thenReturn(Optional.of(workflowDefinition)); - when(scheduleMapper.queryByProcessDefinitionCode(46L)).thenReturn(getSchedule()); + when(scheduleMapper.queryByWorkflowDefinitionCode(46L)).thenReturn(getSchedule()); when(scheduleMapper.deleteById(46)).thenReturn(1); when(workflowLineageService.taskDependentMsg(project.getCode(), workflowDefinition.getCode(), 0)) .thenReturn(Optional.empty()); @@ -529,7 +529,7 @@ public class WorkflowDefinitionServiceTest extends BaseServiceTestTool { // scheduler online Schedule schedule = getSchedule(); schedule.setReleaseState(ReleaseState.ONLINE); - when(scheduleMapper.queryByProcessDefinitionCode(46L)).thenReturn(schedule); + when(scheduleMapper.queryByWorkflowDefinitionCode(46L)).thenReturn(schedule); exception = Assertions.assertThrows(ServiceException.class, () -> processDefinitionService.deleteWorkflowDefinitionByCode(user, 46L)); Assertions.assertEquals(Status.SCHEDULE_STATE_ONLINE.getCode(), ((ServiceException) exception).getCode()); @@ -544,7 +544,7 @@ public class WorkflowDefinitionServiceTest extends BaseServiceTestTool { // delete success schedule.setReleaseState(ReleaseState.OFFLINE); - when(scheduleMapper.queryByProcessDefinitionCode(46L)).thenReturn(getSchedule()); + when(scheduleMapper.queryByWorkflowDefinitionCode(46L)).thenReturn(getSchedule()); when(scheduleMapper.deleteById(schedule.getId())).thenReturn(1); when(workflowLineageService.taskDependentMsg(project.getCode(), workflowDefinition.getCode(), 0)) .thenReturn(Optional.empty()); @@ -1114,8 +1114,8 @@ public class WorkflowDefinitionServiceTest extends BaseServiceTestTool { List workflowTaskRelations = new ArrayList<>(); WorkflowTaskRelation workflowTaskRelation = new WorkflowTaskRelation(); workflowTaskRelation.setProjectCode(projectCode); - workflowTaskRelation.setProcessDefinitionCode(46L); - workflowTaskRelation.setProcessDefinitionVersion(1); + workflowTaskRelation.setWorkflowDefinitionCode(46L); + workflowTaskRelation.setWorkflowDefinitionVersion(1); workflowTaskRelation.setPreTaskCode(100); workflowTaskRelation.setPostTaskCode(200); workflowTaskRelations.add(workflowTaskRelation); @@ -1131,14 +1131,14 @@ public class WorkflowDefinitionServiceTest extends BaseServiceTestTool { Date date = new Date(); Schedule schedule = new Schedule(); schedule.setId(46); - schedule.setProcessDefinitionCode(1); + schedule.setWorkflowDefinitionCode(1); schedule.setStartTime(date); schedule.setEndTime(date); schedule.setCrontab("0 0 5 * * ? *"); schedule.setFailureStrategy(FailureStrategy.END); schedule.setUserId(1); schedule.setReleaseState(ReleaseState.OFFLINE); - schedule.setProcessInstancePriority(Priority.MEDIUM); + schedule.setWorkflowInstancePriority(Priority.MEDIUM); schedule.setWarningType(WarningType.NONE); schedule.setWarningGroupId(1); schedule.setWorkerGroup(WorkerGroupUtils.getDefaultWorkerGroup()); diff --git a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/WorkflowInstanceExecuteFunctionServiceTest.java b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/WorkflowInstanceExecuteFunctionServiceTest.java index cb94a87d2c..1603acd50e 100644 --- a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/WorkflowInstanceExecuteFunctionServiceTest.java +++ b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/WorkflowInstanceExecuteFunctionServiceTest.java @@ -239,13 +239,13 @@ public class WorkflowInstanceExecuteFunctionServiceTest { // Mockito.when(projectService.checkProjectAndAuth(loginUser, project, projectCode, WORKFLOW_START)) // .thenReturn(checkProjectAndAuth()); // Mockito.when(processDefinitionMapper.queryByCode(processDefinitionCode)).thenReturn(this.processDefinition); - // Mockito.when(processService.getTenantForProcess(tenantCode, userId)).thenReturn(tenantCode); + // Mockito.when(processService.getTenantForWorkflow(tenantCode, userId)).thenReturn(tenantCode); // doReturn(1).when(commandService).createCommand(argThat(c -> c.getId() == null)); // doReturn(0).when(commandService).createCommand(argThat(c -> c.getId() != null)); // Mockito.when(monitorService.listServer(RegistryNodeType.MASTER)).thenReturn(getMasterServersList()); // Mockito.when(processService.findWorkflowInstanceDetailById(processInstanceId)) // .thenReturn(Optional.ofNullable(processInstance)); - // Mockito.when(processService.findProcessDefinition(1L, 1)).thenReturn(this.processDefinition); + // Mockito.when(processService.findWorkflowDefinition(1L, 1)).thenReturn(this.processDefinition); // Mockito.when(taskGroupQueueMapper.selectById(1)).thenReturn(taskGroupQueue); // Mockito.when(processInstanceMapper.selectById(1)).thenReturn(processInstance); // Mockito.when(triggerRelationService.saveProcessInstanceTrigger(Mockito.any(), Mockito.any())) @@ -260,7 +260,7 @@ public class WorkflowInstanceExecuteFunctionServiceTest { // @Test // public void testNoComplement() { // - // Mockito.when(processService.queryReleaseSchedulerListByProcessDefinitionCode(processDefinitionCode)) + // Mockito.when(processService.queryReleaseSchedulerListByWorkflowDefinitionCode(processDefinitionCode)) // .thenReturn(zeroSchedulerList()); // Mockito.when(tenantMapper.queryByTenantCode(tenantCode)).thenReturn(new Tenant()); // Map result = executorService.execProcessInstance(loginUser, projectCode, @@ -287,7 +287,7 @@ public class WorkflowInstanceExecuteFunctionServiceTest { // @Test // public void testComplementWithStartNodeList() { // - // Mockito.when(processService.queryReleaseSchedulerListByProcessDefinitionCode(processDefinitionCode)) + // Mockito.when(processService.queryReleaseSchedulerListByWorkflowDefinitionCode(processDefinitionCode)) // .thenReturn(zeroSchedulerList()); // Mockito.when(tenantMapper.queryByTenantCode(tenantCode)).thenReturn(new Tenant()); // Map result = executorService.execProcessInstance(loginUser, projectCode, @@ -310,7 +310,7 @@ public class WorkflowInstanceExecuteFunctionServiceTest { // // @Test // public void testComplementWithOldStartNodeList() { - // Mockito.when(processService.queryReleaseSchedulerListByProcessDefinitionCode(processDefinitionCode)) + // Mockito.when(processService.queryReleaseSchedulerListByWorkflowDefinitionCode(processDefinitionCode)) // .thenReturn(zeroSchedulerList()); // Mockito.when(tenantMapper.queryByTenantCode(tenantCode)).thenReturn(new Tenant()); // Map result = new HashMap<>(); @@ -345,7 +345,7 @@ public class WorkflowInstanceExecuteFunctionServiceTest { // schedule.setCreateTime(new Date()); // schedule.setUpdateTime(new Date()); // List schedules = Lists.newArrayList(schedule); - // Mockito.when(processService.queryReleaseSchedulerListByProcessDefinitionCode( + // Mockito.when(processService.queryReleaseSchedulerListByWorkflowDefinitionCode( // processDefinitionCode)) // .thenReturn(schedules); // @@ -399,7 +399,7 @@ public class WorkflowInstanceExecuteFunctionServiceTest { // @Test // public void testDateError() { // - // Mockito.when(processService.queryReleaseSchedulerListByProcessDefinitionCode(processDefinitionCode)) + // Mockito.when(processService.queryReleaseSchedulerListByWorkflowDefinitionCode(processDefinitionCode)) // .thenReturn(zeroSchedulerList()); // Mockito.when(tenantMapper.queryByTenantCode(tenantCode)).thenReturn(new Tenant()); // Map result = executorService.execProcessInstance(loginUser, projectCode, @@ -425,7 +425,7 @@ public class WorkflowInstanceExecuteFunctionServiceTest { // @Test // public void testSerial() { // - // Mockito.when(processService.queryReleaseSchedulerListByProcessDefinitionCode(processDefinitionCode)) + // Mockito.when(processService.queryReleaseSchedulerListByWorkflowDefinitionCode(processDefinitionCode)) // .thenReturn(zeroSchedulerList()); // Mockito.when(tenantMapper.queryByTenantCode(tenantCode)).thenReturn(new Tenant()); // Map result = executorService.execProcessInstance(loginUser, projectCode, @@ -451,7 +451,7 @@ public class WorkflowInstanceExecuteFunctionServiceTest { // @Test // public void testParallelWithOutSchedule() { // - // Mockito.when(processService.queryReleaseSchedulerListByProcessDefinitionCode(processDefinitionCode)) + // Mockito.when(processService.queryReleaseSchedulerListByWorkflowDefinitionCode(processDefinitionCode)) // .thenReturn(zeroSchedulerList()); // Mockito.when(tenantMapper.queryByTenantCode(tenantCode)).thenReturn(new Tenant()); // Map result = executorService.execProcessInstance(loginUser, projectCode, @@ -478,7 +478,7 @@ public class WorkflowInstanceExecuteFunctionServiceTest { // @Test // public void testParallelWithSchedule() { // - // Mockito.when(processService.queryReleaseSchedulerListByProcessDefinitionCode(processDefinitionCode)) + // Mockito.when(processService.queryReleaseSchedulerListByWorkflowDefinitionCode(processDefinitionCode)) // .thenReturn(oneSchedulerList()); // Mockito.when(tenantMapper.queryByTenantCode(tenantCode)).thenReturn(new Tenant()); // Map result = executorService.execProcessInstance(loginUser, projectCode, @@ -667,10 +667,10 @@ public class WorkflowInstanceExecuteFunctionServiceTest { // // ProcessDefinition processDefinition = new ProcessDefinition(); // processDefinition.setProjectCode(projectCode); - // Mockito.when(processService.findProcessDefinition(Mockito.anyLong(), Mockito.anyInt())) + // Mockito.when(processService.findWorkflowDefinition(Mockito.anyLong(), Mockito.anyInt())) // .thenReturn(processDefinition); // - // Mockito.when(processService.getTenantForProcess(Mockito.anyString(), Mockito.anyInt())).thenReturn(tenantCode); + // Mockito.when(processService.getTenantForWorkflow(Mockito.anyString(), Mockito.anyInt())).thenReturn(tenantCode); // // when(processInstanceMock.getState().isFinished()).thenReturn(false); // WorkflowExecuteResponse responseInstanceIsNotFinished = diff --git a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/WorkflowInstanceServiceTest.java b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/WorkflowInstanceServiceTest.java index a290b8e341..5d075e09ef 100644 --- a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/WorkflowInstanceServiceTest.java +++ b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/WorkflowInstanceServiceTest.java @@ -91,15 +91,12 @@ import org.mockito.quality.Strictness; import com.baomidou.mybatisplus.extension.plugins.pagination.Page; -/** - * process instance service test - */ @ExtendWith(MockitoExtension.class) @MockitoSettings(strictness = Strictness.LENIENT) public class WorkflowInstanceServiceTest { @InjectMocks - WorkflowInstanceServiceImpl processInstanceService; + WorkflowInstanceServiceImpl workflowInstanceService; @Mock ProjectMapper projectMapper; @@ -123,7 +120,7 @@ public class WorkflowInstanceServiceTest { WorkflowDefinitionLogMapper workflowDefinitionLogMapper; @Mock - WorkflowDefinitionMapper processDefineMapper; + WorkflowDefinitionMapper workflowDefinitionMapper; @Mock WorkflowDefinitionService workflowDefinitionService; @@ -196,7 +193,7 @@ public class WorkflowInstanceServiceTest { .when(projectService) .checkProjectAndAuthThrowException(loginUser, projectCode, WORKFLOW_INSTANCE); assertThrows(ServiceException.class, () -> { - processInstanceService.queryWorkflowInstanceList( + workflowInstanceService.queryWorkflowInstanceList( loginUser, projectCode, 46, @@ -225,12 +222,12 @@ public class WorkflowInstanceServiceTest { Mockito.doNothing().when(projectService).checkProjectAndAuthThrowException(Mockito.any(), Mockito.any(Project.class), Mockito.any()); - when(processDefineMapper.selectById(Mockito.anyInt())).thenReturn(getProcessDefinition()); - when(workflowInstanceMapper.queryProcessInstanceListPaging(Mockito.any(Page.class), Mockito.any(), + when(workflowDefinitionMapper.selectById(Mockito.anyInt())).thenReturn(getProcessDefinition()); + when(workflowInstanceMapper.queryWorkflowInstanceListPaging(Mockito.any(Page.class), Mockito.any(), Mockito.any(), Mockito.any(), Mockito.any(), Mockito.any(), eq("192.168.xx.xx"), Mockito.any(), Mockito.any())).thenReturn(pageReturn); - assertThrows(ServiceException.class, () -> processInstanceService.queryWorkflowInstanceList( + assertThrows(ServiceException.class, () -> workflowInstanceService.queryWorkflowInstanceList( loginUser, projectCode, 1, @@ -250,7 +247,7 @@ public class WorkflowInstanceServiceTest { doNothing().when(projectService).checkProjectAndAuthThrowException(loginUser, projectCode, WORKFLOW_INSTANCE); when(usersService.queryUser(loginUser.getId())).thenReturn(loginUser); when(usersService.getUserIdByName(loginUser.getUserName())).thenReturn(loginUser.getId()); - when(workflowInstanceMapper.queryProcessInstanceListPaging( + when(workflowInstanceMapper.queryWorkflowInstanceListPaging( Mockito.any(Page.class), eq(project.getCode()), eq(1L), @@ -264,16 +261,16 @@ public class WorkflowInstanceServiceTest { when(usersService.queryUser(workflowInstance.getExecutorId())).thenReturn(loginUser); Result successRes = - processInstanceService.queryWorkflowInstanceList(loginUser, projectCode, 1, "2020-01-01 00:00:00", + workflowInstanceService.queryWorkflowInstanceList(loginUser, projectCode, 1, "2020-01-01 00:00:00", "2020-01-02 00:00:00", "", loginUser.getUserName(), WorkflowExecutionStatus.RUNNING_EXECUTION, "192.168.xx.xx", "", 1, 10); Assertions.assertEquals(Status.SUCCESS.getCode(), (int) successRes.getCode()); // data parameter empty - when(workflowInstanceMapper.queryProcessInstanceListPaging(Mockito.any(Page.class), eq(project.getCode()), + when(workflowInstanceMapper.queryWorkflowInstanceListPaging(Mockito.any(Page.class), eq(project.getCode()), eq(1L), eq(""), eq(""), Mockito.any(), eq("192.168.xx.xx"), eq(null), eq(null))).thenReturn(pageReturn); - successRes = processInstanceService.queryWorkflowInstanceList(loginUser, projectCode, 1, "", + successRes = workflowInstanceService.queryWorkflowInstanceList(loginUser, projectCode, 1, "", "", "", loginUser.getUserName(), WorkflowExecutionStatus.RUNNING_EXECUTION, "192.168.xx.xx", "", 1, 10); Assertions.assertEquals(Status.SUCCESS.getCode(), (int) successRes.getCode()); @@ -282,18 +279,18 @@ public class WorkflowInstanceServiceTest { when(usersService.queryUser(loginUser.getId())).thenReturn(null); when(usersService.getUserIdByName(loginUser.getUserName())).thenReturn(-1); Result executorExistRes = - processInstanceService.queryWorkflowInstanceList(loginUser, projectCode, 1, "2020-01-01 00:00:00", + workflowInstanceService.queryWorkflowInstanceList(loginUser, projectCode, 1, "2020-01-01 00:00:00", "2020-01-02 00:00:00", "", "admin", WorkflowExecutionStatus.RUNNING_EXECUTION, "192.168.xx.xx", "", 1, 10); Assertions.assertEquals(Status.SUCCESS.getCode(), (int) executorExistRes.getCode()); // executor name empty - when(workflowInstanceMapper.queryProcessInstanceListPaging(Mockito.any(Page.class), eq(project.getCode()), + when(workflowInstanceMapper.queryWorkflowInstanceListPaging(Mockito.any(Page.class), eq(project.getCode()), eq(1L), eq(""), eq("admin"), Mockito.any(), eq("192.168.xx.xx"), eq(start), eq(end))).thenReturn(pageReturn); Result executorEmptyRes = - processInstanceService.queryWorkflowInstanceList(loginUser, projectCode, 1, "2020-01-01 00:00:00", + workflowInstanceService.queryWorkflowInstanceList(loginUser, projectCode, 1, "2020-01-01 00:00:00", "2020-01-02 00:00:00", "", "", WorkflowExecutionStatus.RUNNING_EXECUTION, "192.168.xx.xx", "", 1, 10); Assertions.assertEquals(Status.SUCCESS.getCode(), (int) executorEmptyRes.getCode()); @@ -312,13 +309,13 @@ public class WorkflowInstanceServiceTest { when(projectMapper.queryByCode(projectCode)).thenReturn(project); when(projectService.checkProjectAndAuth(loginUser, project, projectCode, WORKFLOW_INSTANCE)).thenReturn(result); Map proejctAuthFailMap = - processInstanceService.queryByTriggerCode(loginUser, projectCode, 999L); + workflowInstanceService.queryByTriggerCode(loginUser, projectCode, 999L); Assertions.assertEquals(Status.PROJECT_NOT_FOUND, proejctAuthFailMap.get(Constants.STATUS)); // project auth sucess putMsg(result, Status.SUCCESS, projectCode); when(workflowInstanceMapper.queryByTriggerCode(projectCode)).thenReturn(new ArrayList()); proejctAuthFailMap = - processInstanceService.queryByTriggerCode(loginUser, projectCode, 999L); + workflowInstanceService.queryByTriggerCode(loginUser, projectCode, 999L); Assertions.assertEquals(Status.SUCCESS, proejctAuthFailMap.get(Constants.STATUS)); } @@ -338,7 +335,7 @@ public class WorkflowInstanceServiceTest { // project auth fail when(projectMapper.queryByCode(projectCode)).thenReturn(project); when(projectService.checkProjectAndAuth(loginUser, project, projectCode, WORKFLOW_INSTANCE)).thenReturn(result); - Map projectAuthFailRes = processInstanceService + Map projectAuthFailRes = workflowInstanceService .queryTopNLongestRunningWorkflowInstance(loginUser, projectCode, size, startTime, endTime); Assertions.assertEquals(Status.PROJECT_NOT_FOUND, projectAuthFailRes.get(Constants.STATUS)); @@ -348,7 +345,7 @@ public class WorkflowInstanceServiceTest { WorkflowInstance workflowInstance = getProcessInstance(); when(projectMapper.queryByCode(projectCode)).thenReturn(project); when(projectService.checkProjectAndAuth(loginUser, project, projectCode, WORKFLOW_INSTANCE)).thenReturn(result); - projectAuthFailRes = processInstanceService + projectAuthFailRes = workflowInstanceService .queryTopNLongestRunningWorkflowInstance(loginUser, projectCode, -1, startTime, endTime); Assertions.assertEquals(Status.NEGTIVE_SIZE_NUMBER_ERROR, projectAuthFailRes.get(Constants.STATUS)); @@ -358,7 +355,7 @@ public class WorkflowInstanceServiceTest { when(usersService.queryUser(loginUser.getId())).thenReturn(loginUser); when(usersService.getUserIdByName(loginUser.getUserName())).thenReturn(loginUser.getId()); when(usersService.queryUser(workflowInstance.getExecutorId())).thenReturn(loginUser); - Map successRes = processInstanceService.queryTopNLongestRunningWorkflowInstance(loginUser, + Map successRes = workflowInstanceService.queryTopNLongestRunningWorkflowInstance(loginUser, projectCode, size, startTime, endTime); Assertions.assertEquals(Status.SUCCESS, successRes.get(Constants.STATUS)); @@ -378,7 +375,7 @@ public class WorkflowInstanceServiceTest { putMsg(result, Status.SUCCESS, projectCode); when(projectMapper.queryByCode(projectCode)).thenReturn(project); when(projectService.checkProjectAndAuth(loginUser, project, projectCode, WORKFLOW_INSTANCE)).thenReturn(result); - Map startTimeBiggerFailRes = processInstanceService + Map startTimeBiggerFailRes = workflowInstanceService .queryTopNLongestRunningWorkflowInstance(loginUser, projectCode, size, endTime, startTime); Assertions.assertEquals(Status.START_TIME_BIGGER_THAN_END_TIME_ERROR, startTimeBiggerFailRes.get(Constants.STATUS)); @@ -386,14 +383,14 @@ public class WorkflowInstanceServiceTest { putMsg(result, Status.SUCCESS, projectCode); when(projectMapper.queryByCode(projectCode)).thenReturn(project); when(projectService.checkProjectAndAuth(loginUser, project, projectCode, WORKFLOW_INSTANCE)).thenReturn(result); - Map dataNullFailRes = processInstanceService + Map dataNullFailRes = workflowInstanceService .queryTopNLongestRunningWorkflowInstance(loginUser, projectCode, size, null, endTime); Assertions.assertEquals(Status.DATA_IS_NULL, dataNullFailRes.get(Constants.STATUS)); putMsg(result, Status.SUCCESS, projectCode); when(projectMapper.queryByCode(projectCode)).thenReturn(project); when(projectService.checkProjectAndAuth(loginUser, project, projectCode, WORKFLOW_INSTANCE)).thenReturn(result); - dataNullFailRes = processInstanceService + dataNullFailRes = workflowInstanceService .queryTopNLongestRunningWorkflowInstance(loginUser, projectCode, size, startTime, null); Assertions.assertEquals(Status.DATA_IS_NULL, dataNullFailRes.get(Constants.STATUS)); } @@ -410,7 +407,7 @@ public class WorkflowInstanceServiceTest { when(projectMapper.queryByCode(projectCode)).thenReturn(project); when(projectService.checkProjectAndAuth(loginUser, project, projectCode, WORKFLOW_INSTANCE)).thenReturn(result); Map projectAuthFailRes = - processInstanceService.queryWorkflowInstanceById(loginUser, projectCode, 1); + workflowInstanceService.queryWorkflowInstanceById(loginUser, projectCode, 1); Assertions.assertEquals(Status.PROJECT_NOT_FOUND, projectAuthFailRes.get(Constants.STATUS)); // project auth success @@ -422,24 +419,25 @@ public class WorkflowInstanceServiceTest { when(projectService.checkProjectAndAuth(loginUser, project, projectCode, WORKFLOW_INSTANCE)).thenReturn(result); when(processService.findWorkflowInstanceDetailById(workflowInstance.getId())) .thenReturn(Optional.of(workflowInstance)); - when(processService.findProcessDefinition(workflowInstance.getProcessDefinitionCode(), - workflowInstance.getProcessDefinitionVersion())).thenReturn(workflowDefinition); - Map successRes = processInstanceService.queryWorkflowInstanceById(loginUser, projectCode, 1); + when(processService.findWorkflowDefinition(workflowInstance.getWorkflowDefinitionCode(), + workflowInstance.getWorkflowDefinitionVersion())).thenReturn(workflowDefinition); + Map successRes = workflowInstanceService.queryWorkflowInstanceById(loginUser, projectCode, 1); Assertions.assertEquals(Status.SUCCESS, successRes.get(Constants.STATUS)); // worker group null - Map workerNullRes = processInstanceService.queryWorkflowInstanceById(loginUser, projectCode, 1); + Map workerNullRes = + workflowInstanceService.queryWorkflowInstanceById(loginUser, projectCode, 1); Assertions.assertEquals(Status.SUCCESS, workerNullRes.get(Constants.STATUS)); // worker group exist WorkerGroup workerGroup = getWorkGroup(); Map workerExistRes = - processInstanceService.queryWorkflowInstanceById(loginUser, projectCode, 1); + workflowInstanceService.queryWorkflowInstanceById(loginUser, projectCode, 1); Assertions.assertEquals(Status.SUCCESS, workerExistRes.get(Constants.STATUS)); - when(processService.findProcessDefinition(workflowInstance.getProcessDefinitionCode(), - workflowInstance.getProcessDefinitionVersion())).thenReturn(null);; - workerExistRes = processInstanceService.queryWorkflowInstanceById(loginUser, projectCode, 1); + when(processService.findWorkflowDefinition(workflowInstance.getWorkflowDefinitionCode(), + workflowInstance.getWorkflowDefinitionVersion())).thenReturn(null);; + workerExistRes = workflowInstanceService.queryWorkflowInstanceById(loginUser, projectCode, 1); Assertions.assertEquals(Status.WORKFLOW_DEFINITION_NOT_EXIST, workerExistRes.get(Constants.STATUS)); } @@ -455,7 +453,7 @@ public class WorkflowInstanceServiceTest { when(projectMapper.queryByCode(projectCode)).thenReturn(project); when(projectService.checkProjectAndAuth(loginUser, project, projectCode, WORKFLOW_INSTANCE)).thenReturn(result); Map projectAuthFailRes = - processInstanceService.queryTaskListByWorkflowInstanceId(loginUser, projectCode, 1); + workflowInstanceService.queryTaskListByWorkflowInstanceId(loginUser, projectCode, 1); Assertions.assertEquals(Status.PROJECT_NOT_FOUND, projectAuthFailRes.get(Constants.STATUS)); // project auth success @@ -479,7 +477,7 @@ public class WorkflowInstanceServiceTest { .thenReturn(taskInstanceList); when(loggerService.queryLog(loginUser, taskInstance.getId(), 0, 4098)).thenReturn(res); Map successRes = - processInstanceService.queryTaskListByWorkflowInstanceId(loginUser, projectCode, 1); + workflowInstanceService.queryTaskListByWorkflowInstanceId(loginUser, projectCode, 1); Assertions.assertEquals(Status.SUCCESS, successRes.get(Constants.STATUS)); } @@ -495,7 +493,7 @@ public class WorkflowInstanceServiceTest { when(projectMapper.queryByCode(projectCode)).thenReturn(project); when(projectService.checkProjectAndAuth(loginUser, project, projectCode, WORKFLOW_INSTANCE)).thenReturn(result); Map projectAuthFailRes = - processInstanceService.querySubWorkflowInstanceByTaskId(loginUser, projectCode, 1); + workflowInstanceService.querySubWorkflowInstanceByTaskId(loginUser, projectCode, 1); Assertions.assertEquals(Status.PROJECT_NOT_FOUND, projectAuthFailRes.get(Constants.STATUS)); // task null @@ -504,47 +502,47 @@ public class WorkflowInstanceServiceTest { when(projectService.checkProjectAndAuth(loginUser, project, projectCode, WORKFLOW_INSTANCE)).thenReturn(result); when(taskInstanceDao.queryById(1)).thenReturn(null); Map taskNullRes = - processInstanceService.querySubWorkflowInstanceByTaskId(loginUser, projectCode, 1); + workflowInstanceService.querySubWorkflowInstanceByTaskId(loginUser, projectCode, 1); Assertions.assertEquals(Status.TASK_INSTANCE_NOT_EXISTS, taskNullRes.get(Constants.STATUS)); // task not sub process TaskInstance taskInstance = getTaskInstance(); taskInstance.setTaskType("HTTP"); - taskInstance.setProcessInstanceId(1); + taskInstance.setWorkflowInstanceId(1); putMsg(result, Status.SUCCESS, projectCode); when(taskInstanceDao.queryById(1)).thenReturn(taskInstance); TaskDefinition taskDefinition = new TaskDefinition(); taskDefinition.setProjectCode(projectCode); when(taskDefinitionMapper.queryByCode(taskInstance.getTaskCode())).thenReturn(taskDefinition); Map notSubprocessRes = - processInstanceService.querySubWorkflowInstanceByTaskId(loginUser, projectCode, 1); + workflowInstanceService.querySubWorkflowInstanceByTaskId(loginUser, projectCode, 1); Assertions.assertEquals(Status.TASK_INSTANCE_NOT_SUB_WORKFLOW_INSTANCE, notSubprocessRes.get(Constants.STATUS)); putMsg(result, Status.SUCCESS, projectCode); taskDefinition.setProjectCode(0L); - notSubprocessRes = processInstanceService.querySubWorkflowInstanceByTaskId(loginUser, projectCode, 1); + notSubprocessRes = workflowInstanceService.querySubWorkflowInstanceByTaskId(loginUser, projectCode, 1); Assertions.assertEquals(Status.TASK_INSTANCE_NOT_EXISTS, notSubprocessRes.get(Constants.STATUS)); taskDefinition.setProjectCode(projectCode); when(taskDefinitionMapper.queryByCode(taskInstance.getTaskCode())).thenReturn(taskDefinition); // sub process not exist TaskInstance subTask = getTaskInstance(); - subTask.setTaskType("SUB_PROCESS"); - subTask.setProcessInstanceId(1); + subTask.setTaskType("SUB_WORKFLOW"); + subTask.setWorkflowInstanceId(1); putMsg(result, Status.SUCCESS, projectCode); when(taskInstanceDao.queryById(subTask.getId())).thenReturn(subTask); - when(processService.findSubWorkflowInstance(subTask.getProcessInstanceId(), subTask.getId())).thenReturn(null); + when(processService.findSubWorkflowInstance(subTask.getWorkflowInstanceId(), subTask.getId())).thenReturn(null); Map subprocessNotExistRes = - processInstanceService.querySubWorkflowInstanceByTaskId(loginUser, projectCode, 1); + workflowInstanceService.querySubWorkflowInstanceByTaskId(loginUser, projectCode, 1); Assertions.assertEquals(Status.SUB_WORKFLOW_INSTANCE_NOT_EXIST, subprocessNotExistRes.get(Constants.STATUS)); // sub process exist WorkflowInstance workflowInstance = getProcessInstance(); putMsg(result, Status.SUCCESS, projectCode); - when(processService.findSubWorkflowInstance(taskInstance.getProcessInstanceId(), taskInstance.getId())) + when(processService.findSubWorkflowInstance(taskInstance.getWorkflowInstanceId(), taskInstance.getId())) .thenReturn(workflowInstance); Map subprocessExistRes = - processInstanceService.querySubWorkflowInstanceByTaskId(loginUser, projectCode, 1); + workflowInstanceService.querySubWorkflowInstanceByTaskId(loginUser, projectCode, 1); Assertions.assertEquals(Status.SUCCESS, subprocessExistRes.get(Constants.STATUS)); } @@ -562,7 +560,7 @@ public class WorkflowInstanceServiceTest { .when(projectService) .checkProjectAndAuthThrowException(loginUser, projectCode, INSTANCE_UPDATE); Assertions.assertThrows(ServiceException.class, - () -> processInstanceService.updateWorkflowInstance(loginUser, projectCode, 1, + () -> workflowInstanceService.updateWorkflowInstance(loginUser, projectCode, 1, shellJson, taskJson, "2020-02-21 00:00:00", true, "", "", 0)); // process instance null @@ -574,7 +572,7 @@ public class WorkflowInstanceServiceTest { .checkProjectAndAuthThrowException(loginUser, projectCode, INSTANCE_UPDATE); when(processService.findWorkflowInstanceDetailById(1)).thenReturn(Optional.empty()); assertThrows(ServiceException.class, () -> { - processInstanceService.updateWorkflowInstance(loginUser, projectCode, 1, + workflowInstanceService.updateWorkflowInstance(loginUser, projectCode, 1, shellJson, taskJson, "2020-02-21 00:00:00", true, "", "", 0); }); // process instance not finish @@ -582,7 +580,7 @@ public class WorkflowInstanceServiceTest { workflowInstance.setState(WorkflowExecutionStatus.RUNNING_EXECUTION); putMsg(result, Status.SUCCESS, projectCode); Map processInstanceNotFinishRes = - processInstanceService.updateWorkflowInstance(loginUser, projectCode, 1, + workflowInstanceService.updateWorkflowInstance(loginUser, projectCode, 1, shellJson, taskJson, "2020-02-21 00:00:00", true, "", "", 0); Assertions.assertEquals(Status.WORKFLOW_INSTANCE_STATE_OPERATION_ERROR, processInstanceNotFinishRes.get(Constants.STATUS)); @@ -591,16 +589,16 @@ public class WorkflowInstanceServiceTest { workflowInstance.setState(WorkflowExecutionStatus.SUCCESS); workflowInstance.setTimeout(3000); workflowInstance.setCommandType(CommandType.STOP); - workflowInstance.setProcessDefinitionCode(46L); - workflowInstance.setProcessDefinitionVersion(1); + workflowInstance.setWorkflowDefinitionCode(46L); + workflowInstance.setWorkflowDefinitionVersion(1); WorkflowDefinition workflowDefinition = getProcessDefinition(); workflowDefinition.setId(1); workflowDefinition.setUserId(1); workflowDefinition.setProjectCode(projectCode); Tenant tenant = getTenant(); - when(processDefineMapper.queryByCode(46L)).thenReturn(workflowDefinition); + when(workflowDefinitionMapper.queryByCode(46L)).thenReturn(workflowDefinition); when(tenantMapper.queryByTenantCode("root")).thenReturn(tenant); - when(processService.getTenantForProcess(Mockito.anyString(), Mockito.anyInt())) + when(processService.getTenantForWorkflow(Mockito.anyString(), Mockito.anyInt())) .thenReturn(tenant.getTenantCode()); when(workflowInstanceDao.updateById(workflowInstance)).thenReturn(true); when(processService.saveWorkflowDefine(loginUser, workflowDefinition, Boolean.TRUE, Boolean.FALSE)) @@ -617,17 +615,17 @@ public class WorkflowInstanceServiceTest { .when(() -> TaskPluginManager.checkTaskParameters(Mockito.any(), Mockito.any())) .thenReturn(true); Map processInstanceFinishRes = - processInstanceService.updateWorkflowInstance(loginUser, projectCode, 1, + workflowInstanceService.updateWorkflowInstance(loginUser, projectCode, 1, taskRelationJson, taskDefinitionJson, "2020-02-21 00:00:00", true, "", "", 0); Assertions.assertEquals(Status.SUCCESS, processInstanceFinishRes.get(Constants.STATUS)); // success - when(processDefineMapper.queryByCode(46L)).thenReturn(workflowDefinition); + when(workflowDefinitionMapper.queryByCode(46L)).thenReturn(workflowDefinition); putMsg(result, Status.SUCCESS, projectCode); when(processService.saveWorkflowDefine(loginUser, workflowDefinition, Boolean.FALSE, Boolean.FALSE)) .thenReturn(1); - Map successRes = processInstanceService.updateWorkflowInstance(loginUser, projectCode, 1, + Map successRes = workflowInstanceService.updateWorkflowInstance(loginUser, projectCode, 1, taskRelationJson, taskDefinitionJson, "2020-02-21 00:00:00", Boolean.FALSE, "", "", 0); Assertions.assertEquals(Status.SUCCESS, successRes.get(Constants.STATUS)); } @@ -645,7 +643,7 @@ public class WorkflowInstanceServiceTest { when(projectMapper.queryByCode(projectCode)).thenReturn(project); when(projectService.checkProjectAndAuth(loginUser, project, projectCode, WORKFLOW_INSTANCE)).thenReturn(result); Map projectAuthFailRes = - processInstanceService.queryParentInstanceBySubId(loginUser, projectCode, 1); + workflowInstanceService.queryParentInstanceBySubId(loginUser, projectCode, 1); Assertions.assertEquals(Status.PROJECT_NOT_FOUND, projectAuthFailRes.get(Constants.STATUS)); // process instance null @@ -654,31 +652,31 @@ public class WorkflowInstanceServiceTest { when(projectService.checkProjectAndAuth(loginUser, project, projectCode, WORKFLOW_INSTANCE)).thenReturn(result); when(processService.findWorkflowInstanceDetailById(1)).thenReturn(Optional.empty()); assertThrows(ServiceException.class, () -> { - processInstanceService.queryParentInstanceBySubId(loginUser, projectCode, 1); + workflowInstanceService.queryParentInstanceBySubId(loginUser, projectCode, 1); }); // not sub process WorkflowInstance workflowInstance = getProcessInstance(); - workflowInstance.setIsSubProcess(Flag.NO); + workflowInstance.setIsSubWorkflow(Flag.NO); putMsg(result, Status.SUCCESS, projectCode); when(processService.findWorkflowInstanceDetailById(1)).thenReturn(Optional.ofNullable(workflowInstance)); Map notSubProcessRes = - processInstanceService.queryParentInstanceBySubId(loginUser, projectCode, 1); + workflowInstanceService.queryParentInstanceBySubId(loginUser, projectCode, 1); Assertions.assertEquals(Status.WORKFLOW_INSTANCE_NOT_SUB_WORKFLOW_INSTANCE, notSubProcessRes.get(Constants.STATUS)); // sub process - workflowInstance.setIsSubProcess(Flag.YES); + workflowInstance.setIsSubWorkflow(Flag.YES); putMsg(result, Status.SUCCESS, projectCode); when(processService.findParentWorkflowInstance(1)).thenReturn(null); Map subProcessNullRes = - processInstanceService.queryParentInstanceBySubId(loginUser, projectCode, 1); + workflowInstanceService.queryParentInstanceBySubId(loginUser, projectCode, 1); Assertions.assertEquals(Status.SUB_WORKFLOW_INSTANCE_NOT_EXIST, subProcessNullRes.get(Constants.STATUS)); // success putMsg(result, Status.SUCCESS, projectCode); when(processService.findParentWorkflowInstance(1)).thenReturn(workflowInstance); - Map successRes = processInstanceService.queryParentInstanceBySubId(loginUser, projectCode, 1); + Map successRes = workflowInstanceService.queryParentInstanceBySubId(loginUser, projectCode, 1); Assertions.assertEquals(Status.SUCCESS, successRes.get(Constants.STATUS)); } @@ -695,41 +693,41 @@ public class WorkflowInstanceServiceTest { when(projectService.checkProjectAndAuth(loginUser, project, projectCode, INSTANCE_DELETE)).thenReturn(result); assertThrows(ServiceException.class, - () -> processInstanceService.deleteWorkflowInstanceById(loginUser, 1)); + () -> workflowInstanceService.deleteWorkflowInstanceById(loginUser, 1)); // not sub process WorkflowInstance workflowInstance = getProcessInstance(); - workflowInstance.setIsSubProcess(Flag.NO); + workflowInstance.setIsSubWorkflow(Flag.NO); workflowInstance.setState(WorkflowExecutionStatus.RUNNING_EXECUTION); putMsg(result, Status.SUCCESS, projectCode); when(processService.findWorkflowInstanceDetailById(1)).thenReturn(Optional.ofNullable(workflowInstance)); when(workflowDefinitionLogMapper.queryByDefinitionCodeAndVersion(Mockito.anyLong(), Mockito.anyInt())) .thenReturn(new WorkflowDefinitionLog()); assertThrows(ServiceException.class, - () -> processInstanceService.deleteWorkflowInstanceById(loginUser, 1)); + () -> workflowInstanceService.deleteWorkflowInstanceById(loginUser, 1)); workflowInstance.setState(WorkflowExecutionStatus.SUCCESS); workflowInstance.setState(WorkflowExecutionStatus.SUCCESS); workflowInstance.setTimeout(3000); workflowInstance.setCommandType(CommandType.STOP); - workflowInstance.setProcessDefinitionCode(46L); - workflowInstance.setProcessDefinitionVersion(1); + workflowInstance.setWorkflowDefinitionCode(46L); + workflowInstance.setWorkflowDefinitionVersion(1); WorkflowDefinition workflowDefinition = getProcessDefinition(); workflowDefinition.setId(1); workflowDefinition.setUserId(1); workflowDefinition.setProjectCode(0L); - when(processDefineMapper.queryByCode(46L)).thenReturn(workflowDefinition); + when(workflowDefinitionMapper.queryByCode(46L)).thenReturn(workflowDefinition); when(processService.findWorkflowInstanceDetailById(Mockito.anyInt())).thenReturn(Optional.empty()); assertThrows(ServiceException.class, - () -> processInstanceService.deleteWorkflowInstanceById(loginUser, 1)); + () -> workflowInstanceService.deleteWorkflowInstanceById(loginUser, 1)); workflowDefinition.setProjectCode(projectCode); when(processService.findWorkflowInstanceDetailById(Mockito.anyInt())).thenReturn(Optional.of(workflowInstance)); - when(processService.deleteWorkProcessInstanceById(1)).thenReturn(1); - processInstanceService.deleteWorkflowInstanceById(loginUser, 1); + when(processService.deleteWorkflowInstanceById(1)).thenReturn(1); + workflowInstanceService.deleteWorkflowInstanceById(loginUser, 1); - when(processService.deleteWorkProcessInstanceById(1)).thenReturn(0); - Assertions.assertDoesNotThrow(() -> processInstanceService.deleteWorkflowInstanceById(loginUser, 1)); + when(processService.deleteWorkflowInstanceById(1)).thenReturn(0); + Assertions.assertDoesNotThrow(() -> workflowInstanceService.deleteWorkflowInstanceById(loginUser, 1)); } @Test @@ -740,12 +738,12 @@ public class WorkflowInstanceServiceTest { workflowInstance.setScheduleTime(new Date()); workflowInstance.setGlobalParams(""); when(workflowInstanceMapper.queryDetailById(1)).thenReturn(workflowInstance); - Map successRes = processInstanceService.viewVariables(1L, 1); + Map successRes = workflowInstanceService.viewVariables(1L, 1); Assertions.assertEquals(Status.SUCCESS, successRes.get(Constants.STATUS)); when(workflowInstanceMapper.queryDetailById(1)).thenReturn(null); - Map processNotExist = processInstanceService.viewVariables(1L, 1); + Map processNotExist = workflowInstanceService.viewVariables(1L, 1); Assertions.assertEquals(Status.WORKFLOW_INSTANCE_NOT_EXIST, processNotExist.get(Constants.STATUS)); } @@ -757,8 +755,8 @@ public class WorkflowInstanceServiceTest { taskInstance.setStartTime(new Date()); when(workflowInstanceMapper.queryDetailById(1)).thenReturn(workflowInstance); when(workflowDefinitionLogMapper.queryByDefinitionCodeAndVersion( - workflowInstance.getProcessDefinitionCode(), - workflowInstance.getProcessDefinitionVersion())).thenReturn(new WorkflowDefinitionLog()); + workflowInstance.getWorkflowDefinitionCode(), + workflowInstance.getWorkflowDefinitionVersion())).thenReturn(new WorkflowDefinitionLog()); when(workflowInstanceMapper.queryDetailById(1)).thenReturn(workflowInstance); DAG graph = new DAG<>(); for (long i = 1; i <= 7; ++i) { @@ -768,11 +766,11 @@ public class WorkflowInstanceServiceTest { when(processService.genDagGraph(Mockito.any(WorkflowDefinition.class))) .thenReturn(graph); - Map successRes = processInstanceService.viewGantt(0L, 1); + Map successRes = workflowInstanceService.viewGantt(0L, 1); Assertions.assertEquals(Status.SUCCESS, successRes.get(Constants.STATUS)); when(workflowInstanceMapper.queryDetailById(1)).thenReturn(null); - Map processNotExist = processInstanceService.viewVariables(1L, 1); + Map processNotExist = workflowInstanceService.viewVariables(1L, 1); Assertions.assertEquals(Status.WORKFLOW_INSTANCE_NOT_EXIST, processNotExist.get(Constants.STATUS)); } @@ -813,8 +811,8 @@ public class WorkflowInstanceServiceTest { WorkflowInstance workflowInstance = new WorkflowInstance(); workflowInstance.setId(1); workflowInstance.setName("test_process_instance"); - workflowInstance.setProcessDefinitionCode(46L); - workflowInstance.setProcessDefinitionVersion(1); + workflowInstance.setWorkflowDefinitionCode(46L); + workflowInstance.setWorkflowDefinitionVersion(1); workflowInstance.setStartTime(new Date()); workflowInstance.setEndTime(new Date()); return workflowInstance; diff --git a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/WorkflowTaskLineageServiceTest.java b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/WorkflowTaskLineageServiceTest.java index 59d0ccd85f..26eb126929 100644 --- a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/WorkflowTaskLineageServiceTest.java +++ b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/WorkflowTaskLineageServiceTest.java @@ -42,14 +42,11 @@ import org.mockito.Mock; import org.mockito.Mockito; import org.mockito.junit.jupiter.MockitoExtension; -/** - * work flow lineage service test - */ @ExtendWith(MockitoExtension.class) public class WorkflowTaskLineageServiceTest { @InjectMocks - private WorkflowLineageServiceImpl processLineageService; + private WorkflowLineageServiceImpl workflowLineageService; @Mock private WorkflowTaskLineageDao workflowTaskLineageDao; @@ -82,7 +79,7 @@ public class WorkflowTaskLineageServiceTest { when(projectMapper.queryByCode(1L)).thenReturn(project); when(workflowTaskLineageDao.queryWorkFlowLineageByName(Mockito.anyLong(), Mockito.any())) .thenReturn(getWorkFlowLineages()); - List workFlowLineages = processLineageService.queryWorkFlowLineageByName(1L, name); + List workFlowLineages = workflowLineageService.queryWorkFlowLineageByName(1L, name); Assertions.assertTrue(CollectionUtils.isNotEmpty(workFlowLineages)); } @@ -112,7 +109,7 @@ public class WorkflowTaskLineageServiceTest { when(workflowTaskLineageDao.queryWorkFlowLineageByCode(workflowTaskLineage.getWorkflowDefinitionCode())) .thenReturn(workFlowRelationDetailList); - WorkFlowLineage workFlowLineage = processLineageService.queryWorkFlowLineage(1L); + WorkFlowLineage workFlowLineage = workflowLineageService.queryWorkFlowLineage(1L); List workFlowLineageList = workFlowLineage.getWorkFlowRelationDetailList(); diff --git a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/WorkflowTaskRelationServiceTest.java b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/WorkflowTaskRelationServiceTest.java index 53b35c5516..fc6ed64aa1 100644 --- a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/WorkflowTaskRelationServiceTest.java +++ b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/WorkflowTaskRelationServiceTest.java @@ -68,15 +68,12 @@ import com.baomidou.mybatisplus.core.metadata.IPage; import com.baomidou.mybatisplus.extension.plugins.pagination.Page; import com.google.common.collect.Lists; -/** - * process task instance relation service test - */ @ExtendWith(MockitoExtension.class) @MockitoSettings(strictness = Strictness.LENIENT) public class WorkflowTaskRelationServiceTest { @InjectMocks - WorkflowTaskRelationServiceImpl processTaskRelationService; + WorkflowTaskRelationServiceImpl workflowTaskRelationService; @Mock private ProjectMapper projectMapper; @@ -103,7 +100,7 @@ public class WorkflowTaskRelationServiceTest { private ProcessService processService; private static final long PROJECT_CODE = 1L; - private static final long PROCESS_DEFINITION_CODE = 2L; + private static final long WORKFLOW_DEFINITION_CODE = 2L; private static final long UPSTREAM_TASK_CODE = 3L; private static final long DOWNSTREAM_TASK_CODE = 4L; protected User user; @@ -201,24 +198,24 @@ public class WorkflowTaskRelationServiceTest { return taskDefinition; } - private List getProcessTaskUpstreamRelationList(long projectCode, long taskCode) { + private List getWorkflowTaskUpstreamRelationList(long projectCode, long taskCode) { WorkflowTaskRelation workflowTaskRelationUpstream0 = new WorkflowTaskRelation(); workflowTaskRelationUpstream0.setPostTaskCode(taskCode); workflowTaskRelationUpstream0.setPreTaskVersion(1); workflowTaskRelationUpstream0.setProjectCode(projectCode); workflowTaskRelationUpstream0.setPreTaskCode(123); - workflowTaskRelationUpstream0.setProcessDefinitionCode(123); + workflowTaskRelationUpstream0.setWorkflowDefinitionCode(123); WorkflowTaskRelation workflowTaskRelationUpstream1 = new WorkflowTaskRelation(); workflowTaskRelationUpstream1.setPostTaskCode(taskCode); workflowTaskRelationUpstream1.setPreTaskVersion(1); workflowTaskRelationUpstream1.setPreTaskCode(123); - workflowTaskRelationUpstream1.setProcessDefinitionCode(124); + workflowTaskRelationUpstream1.setWorkflowDefinitionCode(124); workflowTaskRelationUpstream1.setProjectCode(projectCode); WorkflowTaskRelation workflowTaskRelationUpstream2 = new WorkflowTaskRelation(); workflowTaskRelationUpstream2.setPostTaskCode(taskCode); workflowTaskRelationUpstream2.setPreTaskVersion(2); workflowTaskRelationUpstream2.setPreTaskCode(123); - workflowTaskRelationUpstream2.setProcessDefinitionCode(125); + workflowTaskRelationUpstream2.setWorkflowDefinitionCode(125); workflowTaskRelationUpstream2.setProjectCode(projectCode); List workflowTaskRelationList = new ArrayList<>(); workflowTaskRelationList.add(workflowTaskRelationUpstream0); @@ -227,7 +224,7 @@ public class WorkflowTaskRelationServiceTest { return workflowTaskRelationList; } - private List getProcessTaskDownstreamRelationList(long projectCode, long taskCode) { + private List getWorkflowTaskDownstreamRelationList(long projectCode, long taskCode) { WorkflowTaskRelation workflowTaskRelationDownstream0 = new WorkflowTaskRelation(); workflowTaskRelationDownstream0.setPreTaskCode(taskCode); workflowTaskRelationDownstream0.setPostTaskCode(456); @@ -250,7 +247,7 @@ public class WorkflowTaskRelationServiceTest { return workflowTaskRelationList; } - private WorkflowDefinition getProcessDefinition() { + private WorkflowDefinition getWorkflowDefinition() { WorkflowDefinition workflowDefinition = new WorkflowDefinition(); workflowDefinition.setId(1); workflowDefinition.setProjectCode(1L); @@ -273,7 +270,7 @@ public class WorkflowTaskRelationServiceTest { @Test public void testCreateWorkflowTaskRelation() { long projectCode = 1L; - long processDefinitionCode = 1L; + long workflowDefinitionCode = 1L; long preTaskCode = 0L; long postTaskCode = 1L; @@ -283,26 +280,26 @@ public class WorkflowTaskRelationServiceTest { Map result = new HashMap<>(); putMsg(result, Status.SUCCESS, projectCode); Mockito.when(projectService.checkProjectAndAuth(user, project, projectCode, null)).thenReturn(result); - Mockito.when(workflowDefinitionMapper.queryByCode(processDefinitionCode)).thenReturn(getProcessDefinition()); + Mockito.when(workflowDefinitionMapper.queryByCode(workflowDefinitionCode)).thenReturn(getWorkflowDefinition()); Mockito.when( - workflowTaskRelationMapper.queryByCode(projectCode, processDefinitionCode, preTaskCode, postTaskCode)) + workflowTaskRelationMapper.queryByCode(projectCode, workflowDefinitionCode, preTaskCode, postTaskCode)) .thenReturn(Lists.newArrayList()); Mockito.when(taskDefinitionMapper.queryByCode(postTaskCode)).thenReturn(getTaskDefinition()); List workflowTaskRelationList = Lists.newArrayList(); - List processTaskRelationLogList = Lists.newArrayList(); + List workflowTaskRelationLogList = Lists.newArrayList(); WorkflowTaskRelation workflowTaskRelation = new WorkflowTaskRelation(); workflowTaskRelation.setProjectCode(projectCode); - workflowTaskRelation.setProcessDefinitionCode(processDefinitionCode); + workflowTaskRelation.setWorkflowDefinitionCode(workflowDefinitionCode); workflowTaskRelation.setPreTaskCode(0L); workflowTaskRelation.setPreTaskVersion(0); workflowTaskRelation.setPostTaskCode(postTaskCode); workflowTaskRelation.setPostTaskVersion(1); workflowTaskRelationList.add(workflowTaskRelation); - processTaskRelationLogList.add(new WorkflowTaskRelationLog(workflowTaskRelation)); + workflowTaskRelationLogList.add(new WorkflowTaskRelationLog(workflowTaskRelation)); Mockito.when(workflowTaskRelationMapper.batchInsert(workflowTaskRelationList)).thenReturn(1); - Mockito.when(workflowTaskRelationLogMapper.batchInsert(processTaskRelationLogList)).thenReturn(1); + Mockito.when(workflowTaskRelationLogMapper.batchInsert(workflowTaskRelationLogList)).thenReturn(1); Assertions.assertEquals(Status.SUCCESS, result.get(Constants.STATUS)); } @@ -319,7 +316,7 @@ public class WorkflowTaskRelationServiceTest { Mockito.when(projectService.checkProjectAndAuth(user, project, projectCode, null)).thenReturn(result); List workflowTaskRelationList = - getProcessTaskDownstreamRelationList(projectCode, taskCode); + getWorkflowTaskDownstreamRelationList(projectCode, taskCode); Mockito.when(workflowTaskRelationMapper.queryDownstreamByCode(projectCode, taskCode)) .thenReturn(workflowTaskRelationList); @@ -327,30 +324,24 @@ public class WorkflowTaskRelationServiceTest { if (CollectionUtils.isNotEmpty(workflowTaskRelationList)) { Set taskDefinitions = workflowTaskRelationList .stream() - .map(processTaskRelation -> { - TaskDefinition taskDefinition = buildTaskDefinition( - processTaskRelation.getProjectCode(), - processTaskRelation.getPostTaskCode(), - processTaskRelation.getPostTaskVersion()); - return taskDefinition; - }) + .map(workflowTaskRelation -> buildTaskDefinition( + workflowTaskRelation.getProjectCode(), + workflowTaskRelation.getPostTaskCode(), + workflowTaskRelation.getPostTaskVersion())) .collect(Collectors.toSet()); Set taskDefinitionLogSet = workflowTaskRelationList .stream() - .map(processTaskRelation -> { - TaskDefinitionLog taskDefinitionLog = buildTaskDefinitionLog( - processTaskRelation.getProjectCode(), - processTaskRelation.getPostTaskCode(), - processTaskRelation.getPostTaskVersion()); - return taskDefinitionLog; - }) + .map(workflowTaskRelation -> buildTaskDefinitionLog( + workflowTaskRelation.getProjectCode(), + workflowTaskRelation.getPostTaskCode(), + workflowTaskRelation.getPostTaskVersion())) .collect(Collectors.toSet()); List taskDefinitionLogList = taskDefinitionLogSet.stream().collect(Collectors.toList()); Mockito.when(taskDefinitionLogMapper.queryByTaskDefinitions(taskDefinitions)) .thenReturn(taskDefinitionLogList); } - Map relation = processTaskRelationService + Map relation = workflowTaskRelationService .queryDownstreamRelation(user, projectCode, taskCode); Assertions.assertEquals(Status.SUCCESS, relation.get(Constants.STATUS)); Assertions.assertEquals(2, ((List) relation.get("data")).size()); @@ -367,7 +358,8 @@ public class WorkflowTaskRelationServiceTest { Map result = new HashMap<>(); putMsg(result, Status.SUCCESS, projectCode); Mockito.when(projectService.checkProjectAndAuth(user, project, projectCode, null)).thenReturn(result); - List workflowTaskRelationList = getProcessTaskUpstreamRelationList(projectCode, taskCode); + List workflowTaskRelationList = + getWorkflowTaskUpstreamRelationList(projectCode, taskCode); Mockito.when(workflowTaskRelationMapper.queryUpstreamByCode(projectCode, taskCode)) .thenReturn(workflowTaskRelationList); @@ -375,30 +367,24 @@ public class WorkflowTaskRelationServiceTest { if (CollectionUtils.isNotEmpty(workflowTaskRelationList)) { Set taskDefinitions = workflowTaskRelationList .stream() - .map(processTaskRelation -> { - TaskDefinition taskDefinition = buildTaskDefinition( - processTaskRelation.getProjectCode(), - processTaskRelation.getPreTaskCode(), - processTaskRelation.getPreTaskVersion()); - return taskDefinition; - }) + .map(workflowTaskRelation -> buildTaskDefinition( + workflowTaskRelation.getProjectCode(), + workflowTaskRelation.getPreTaskCode(), + workflowTaskRelation.getPreTaskVersion())) .collect(Collectors.toSet()); Set taskDefinitionLogSet = workflowTaskRelationList .stream() - .map(processTaskRelation -> { - TaskDefinitionLog taskDefinitionLog = buildTaskDefinitionLog( - processTaskRelation.getProjectCode(), - processTaskRelation.getPreTaskCode(), - processTaskRelation.getPreTaskVersion()); - return taskDefinitionLog; - }) + .map(workflowTaskRelation -> buildTaskDefinitionLog( + workflowTaskRelation.getProjectCode(), + workflowTaskRelation.getPreTaskCode(), + workflowTaskRelation.getPreTaskVersion())) .collect(Collectors.toSet()); List taskDefinitionLogList = taskDefinitionLogSet.stream().collect(Collectors.toList()); Mockito.when(taskDefinitionLogMapper.queryByTaskDefinitions(taskDefinitions)) .thenReturn(taskDefinitionLogList); } - Map relation = processTaskRelationService + Map relation = workflowTaskRelationService .queryUpstreamRelation(user, projectCode, taskCode); Assertions.assertEquals(Status.SUCCESS, relation.get(Constants.STATUS)); Assertions.assertEquals(2, ((List) relation.get("data")).size()); @@ -417,21 +403,21 @@ public class WorkflowTaskRelationServiceTest { List workflowTaskRelationList = new ArrayList<>(); WorkflowTaskRelation workflowTaskRelation = new WorkflowTaskRelation(); workflowTaskRelation.setProjectCode(projectCode); - workflowTaskRelation.setProcessDefinitionCode(1L); + workflowTaskRelation.setWorkflowDefinitionCode(1L); workflowTaskRelation.setPreTaskCode(taskCode); workflowTaskRelation.setPostTaskCode(123L); workflowTaskRelationList.add(workflowTaskRelation); Mockito.when(workflowTaskRelationMapper.queryDownstreamByCode(projectCode, taskCode)) .thenReturn(workflowTaskRelationList); - WorkflowTaskRelationLog processTaskRelationLog = new WorkflowTaskRelationLog(workflowTaskRelation); - Mockito.when(workflowTaskRelationMapper.deleteRelation(processTaskRelationLog)).thenReturn(1); - Mockito.when(workflowTaskRelationLogMapper.deleteRelation(processTaskRelationLog)).thenReturn(1); - WorkflowDefinition workflowDefinition = getProcessDefinition(); + WorkflowTaskRelationLog workflowTaskRelationLog = new WorkflowTaskRelationLog(workflowTaskRelation); + Mockito.when(workflowTaskRelationMapper.deleteRelation(workflowTaskRelationLog)).thenReturn(1); + Mockito.when(workflowTaskRelationLogMapper.deleteRelation(workflowTaskRelationLog)).thenReturn(1); + WorkflowDefinition workflowDefinition = getWorkflowDefinition(); Mockito.when(workflowDefinitionMapper.queryByCode(1L)).thenReturn(workflowDefinition); Mockito.when(processService.saveWorkflowDefine(user, workflowDefinition, Boolean.TRUE, Boolean.TRUE)) .thenReturn(1); Map result1 = - processTaskRelationService.deleteDownstreamRelation(user, projectCode, "123", taskCode); + workflowTaskRelationService.deleteDownstreamRelation(user, projectCode, "123", taskCode); Assertions.assertEquals(Status.SUCCESS, result1.get(Constants.STATUS)); } @@ -447,7 +433,7 @@ public class WorkflowTaskRelationServiceTest { List workflowTaskRelationList = Lists.newArrayList(); WorkflowTaskRelation workflowTaskRelation = new WorkflowTaskRelation(); workflowTaskRelation.setProjectCode(projectCode); - workflowTaskRelation.setProcessDefinitionCode(1L); + workflowTaskRelation.setWorkflowDefinitionCode(1L); workflowTaskRelation.setPreTaskCode(0L); workflowTaskRelation.setPreTaskVersion(0); workflowTaskRelation.setPostTaskCode(taskCode); @@ -456,8 +442,8 @@ public class WorkflowTaskRelationServiceTest { Mockito.when(projectService.checkProjectAndAuth(user, project, projectCode, null)).thenReturn(result); Mockito.when(workflowTaskRelationMapper.queryUpstreamByCode(projectCode, taskCode)) .thenReturn(workflowTaskRelationList); - Mockito.when(workflowDefinitionMapper.queryByCode(1L)).thenReturn(getProcessDefinition()); - Mockito.when(workflowTaskRelationMapper.queryByProcessCode(1L)).thenReturn(workflowTaskRelationList); + Mockito.when(workflowDefinitionMapper.queryByCode(1L)).thenReturn(getWorkflowDefinition()); + Mockito.when(workflowTaskRelationMapper.queryByWorkflowDefinitionCode(1L)).thenReturn(workflowTaskRelationList); List relationLogs = workflowTaskRelationList.stream().map(WorkflowTaskRelationLog::new).collect(Collectors.toList()); Mockito.when(processService.saveTaskRelation(user, 1L, 1L, @@ -469,7 +455,7 @@ public class WorkflowTaskRelationServiceTest { public void testDeleteTaskWorkflowRelation() { long projectCode = 1L; long taskCode = 1L; - long processDefinitionCode = 1L; + long workflowDefinitionCode = 1L; long preTaskCode = 4L; long postTaskCode = 5L; Project project = getProject(projectCode); @@ -479,9 +465,9 @@ public class WorkflowTaskRelationServiceTest { putMsg(result, Status.SUCCESS, projectCode); Mockito.when(projectService.checkProjectAndAuth(user, project, projectCode, null)).thenReturn(result); Mockito.when( - workflowTaskRelationMapper.queryByCode(projectCode, processDefinitionCode, preTaskCode, postTaskCode)) + workflowTaskRelationMapper.queryByCode(projectCode, workflowDefinitionCode, preTaskCode, postTaskCode)) .thenReturn(Lists.newArrayList()); - Mockito.when(workflowDefinitionMapper.queryByCode(processDefinitionCode)).thenReturn(getProcessDefinition()); + Mockito.when(workflowDefinitionMapper.queryByCode(workflowDefinitionCode)).thenReturn(getWorkflowDefinition()); Mockito.when(taskDefinitionMapper.queryByCode(taskCode)).thenReturn(getTaskDefinition()); TaskDefinition taskDefinition = new TaskDefinition(); taskDefinition.setTaskType("CONDITIONS"); @@ -489,13 +475,13 @@ public class WorkflowTaskRelationServiceTest { List workflowTaskRelationList = Lists.newArrayList(); WorkflowTaskRelation workflowTaskRelation = new WorkflowTaskRelation(); workflowTaskRelation.setProjectCode(projectCode); - workflowTaskRelation.setProcessDefinitionCode(1L); + workflowTaskRelation.setWorkflowDefinitionCode(1L); workflowTaskRelation.setPreTaskCode(0L); workflowTaskRelation.setPreTaskVersion(0); workflowTaskRelation.setPostTaskCode(taskCode); workflowTaskRelation.setPostTaskVersion(1); workflowTaskRelationList.add(workflowTaskRelation); - Mockito.when(workflowTaskRelationMapper.queryByProcessCode(processDefinitionCode)) + Mockito.when(workflowTaskRelationMapper.queryByWorkflowDefinitionCode(workflowDefinitionCode)) .thenReturn(workflowTaskRelationList); List relationLogs = workflowTaskRelationList.stream().map(WorkflowTaskRelationLog::new).collect(Collectors.toList()); @@ -508,7 +494,7 @@ public class WorkflowTaskRelationServiceTest { @Test public void testDeleteEdge() { long projectCode = 1L; - long processDefinitionCode = 3L; + long workflowDefinitionCode = 3L; long preTaskCode = 0L; long postTaskCode = 5L; Project project = getProject(projectCode); @@ -519,8 +505,8 @@ public class WorkflowTaskRelationServiceTest { Mockito.when(projectService.checkProjectAndAuth(user, project, projectCode, null)).thenReturn(result); WorkflowTaskRelation workflowTaskRelation = new WorkflowTaskRelation(); workflowTaskRelation.setProjectCode(projectCode); - workflowTaskRelation.setProcessDefinitionCode(processDefinitionCode); - workflowTaskRelation.setProcessDefinitionVersion(1); + workflowTaskRelation.setWorkflowDefinitionCode(workflowDefinitionCode); + workflowTaskRelation.setWorkflowDefinitionVersion(1); workflowTaskRelation.setPreTaskCode(preTaskCode); workflowTaskRelation.setPostTaskCode(postTaskCode); WorkflowTaskRelationLog processTaskRelationLog = new WorkflowTaskRelationLog(workflowTaskRelation); @@ -528,7 +514,7 @@ public class WorkflowTaskRelationServiceTest { List workflowTaskRelationList = new ArrayList<>(); workflowTaskRelationList.add(workflowTaskRelation); Mockito.when(projectService.checkProjectAndAuth(user, project, projectCode, null)).thenReturn(result); - Mockito.when(workflowTaskRelationMapper.queryByProcessCode(1L)).thenReturn(workflowTaskRelationList); + Mockito.when(workflowTaskRelationMapper.queryByWorkflowDefinitionCode(1L)).thenReturn(workflowTaskRelationList); List relationLogs = workflowTaskRelationList.stream().map(WorkflowTaskRelationLog::new).collect(Collectors.toList()); Mockito.when(processService.saveTaskRelation(user, 1L, 1L, @@ -539,97 +525,98 @@ public class WorkflowTaskRelationServiceTest { @Test public void testCreateWorkflowTaskRelationV2() { TaskRelationCreateRequest taskRelationCreateRequest = new TaskRelationCreateRequest(); - taskRelationCreateRequest.setWorkflowCode(PROCESS_DEFINITION_CODE); + taskRelationCreateRequest.setWorkflowCode(WORKFLOW_DEFINITION_CODE); - // error process definition not exists + // error workflow definition not exists exception = Assertions.assertThrows(ServiceException.class, - () -> processTaskRelationService.createWorkflowTaskRelationV2(user, taskRelationCreateRequest)); + () -> workflowTaskRelationService.createWorkflowTaskRelationV2(user, taskRelationCreateRequest)); Assertions.assertEquals(Status.WORKFLOW_DEFINITION_NOT_EXIST.getCode(), ((ServiceException) exception).getCode()); // error project without permissions - Mockito.when(workflowDefinitionMapper.queryByCode(PROCESS_DEFINITION_CODE)).thenReturn(getProcessDefinition()); + Mockito.when(workflowDefinitionMapper.queryByCode(WORKFLOW_DEFINITION_CODE)) + .thenReturn(getWorkflowDefinition()); Mockito.when(projectMapper.queryByCode(PROJECT_CODE)).thenReturn(getProject(PROJECT_CODE)); Mockito.doThrow(new ServiceException(Status.USER_NO_OPERATION_PROJECT_PERM)).when(projectService) .checkProjectAndAuthThrowException(user, getProject(PROJECT_CODE), null); exception = Assertions.assertThrows(ServiceException.class, - () -> processTaskRelationService.createWorkflowTaskRelationV2(user, taskRelationCreateRequest)); + () -> workflowTaskRelationService.createWorkflowTaskRelationV2(user, taskRelationCreateRequest)); Assertions.assertEquals(Status.USER_NO_OPERATION_PROJECT_PERM.getCode(), ((ServiceException) exception).getCode()); - // error insert process task relation + // error insert workflow task relation Mockito.doNothing().when(projectService).checkProjectAndAuthThrowException(user, getProject(PROJECT_CODE), null); Mockito.when(workflowTaskRelationMapper.insert(isA(WorkflowTaskRelation.class))).thenReturn(0); Mockito.when(taskDefinitionMapper.queryByCode(isA(Long.class))).thenReturn(getTaskDefinition()); exception = Assertions.assertThrows(ServiceException.class, - () -> processTaskRelationService.createWorkflowTaskRelationV2(user, taskRelationCreateRequest)); + () -> workflowTaskRelationService.createWorkflowTaskRelationV2(user, taskRelationCreateRequest)); Assertions.assertEquals(Status.CREATE_WORKFLOW_TASK_RELATION_ERROR.getCode(), ((ServiceException) exception).getCode()); - // error insert process task relation log + // error insert workflow task relation log Mockito.when(workflowTaskRelationMapper.insert(isA(WorkflowTaskRelation.class))).thenReturn(1); Mockito.when(workflowTaskRelationLogMapper.insert(isA(WorkflowTaskRelationLog.class))).thenReturn(0); exception = Assertions.assertThrows(ServiceException.class, - () -> processTaskRelationService.createWorkflowTaskRelationV2(user, taskRelationCreateRequest)); + () -> workflowTaskRelationService.createWorkflowTaskRelationV2(user, taskRelationCreateRequest)); Assertions.assertEquals(Status.CREATE_WORKFLOW_TASK_RELATION_LOG_ERROR.getCode(), ((ServiceException) exception).getCode()); // success Mockito.when(workflowTaskRelationLogMapper.insert(isA(WorkflowTaskRelationLog.class))).thenReturn(1); Assertions.assertDoesNotThrow( - () -> processTaskRelationService.createWorkflowTaskRelationV2(user, taskRelationCreateRequest)); + () -> workflowTaskRelationService.createWorkflowTaskRelationV2(user, taskRelationCreateRequest)); } @Test public void testDeleteTaskWorkflowRelationV2() { TaskRelationCreateRequest taskRelationCreateRequest = new TaskRelationCreateRequest(); - taskRelationCreateRequest.setWorkflowCode(PROCESS_DEFINITION_CODE); + taskRelationCreateRequest.setWorkflowCode(WORKFLOW_DEFINITION_CODE); // error task relation size Mockito.when( - workflowTaskRelationMapper.filterProcessTaskRelation(isA(Page.class), isA(WorkflowTaskRelation.class))) - .thenReturn(getMultiProcessTaskRelations()); - exception = Assertions.assertThrows(ServiceException.class, () -> processTaskRelationService + workflowTaskRelationMapper.filterWorkflowTaskRelation(isA(Page.class), isA(WorkflowTaskRelation.class))) + .thenReturn(getMultiWorkflowTaskRelations()); + exception = Assertions.assertThrows(ServiceException.class, () -> workflowTaskRelationService .deleteTaskWorkflowRelationV2(user, UPSTREAM_TASK_CODE, DOWNSTREAM_TASK_CODE)); Assertions.assertEquals(Status.WORKFLOW_TASK_RELATION_NOT_EXPECT.getCode(), ((ServiceException) exception).getCode()); // success Mockito.when( - workflowTaskRelationMapper.filterProcessTaskRelation(isA(Page.class), isA(WorkflowTaskRelation.class))) - .thenReturn(getOneProcessTaskRelation()); - Assertions.assertDoesNotThrow(() -> processTaskRelationService.deleteTaskWorkflowRelationV2(user, + workflowTaskRelationMapper.filterWorkflowTaskRelation(isA(Page.class), isA(WorkflowTaskRelation.class))) + .thenReturn(getOneWorkflowTaskRelation()); + Assertions.assertDoesNotThrow(() -> workflowTaskRelationService.deleteTaskWorkflowRelationV2(user, UPSTREAM_TASK_CODE, DOWNSTREAM_TASK_CODE)); } - private IPage getOneProcessTaskRelation() { - IPage processTaskRelationIPage = new Page<>(); + private IPage getOneWorkflowTaskRelation() { + IPage workflowTaskRelationIPage = new Page<>(); WorkflowTaskRelation workflowTaskRelation = new WorkflowTaskRelation(); - workflowTaskRelation.setProcessDefinitionCode(PROCESS_DEFINITION_CODE); + workflowTaskRelation.setWorkflowDefinitionCode(WORKFLOW_DEFINITION_CODE); workflowTaskRelation.setPreTaskCode(UPSTREAM_TASK_CODE); workflowTaskRelation.setPostTaskCode(DOWNSTREAM_TASK_CODE); - processTaskRelationIPage.setRecords(Collections.singletonList(workflowTaskRelation)); - return processTaskRelationIPage; + workflowTaskRelationIPage.setRecords(Collections.singletonList(workflowTaskRelation)); + return workflowTaskRelationIPage; } - private IPage getMultiProcessTaskRelations() { - IPage processTaskRelationIPage = new Page<>(); + private IPage getMultiWorkflowTaskRelations() { + IPage workflowTaskRelationIPage = new Page<>(); List workflowTaskRelations = new ArrayList<>(); WorkflowTaskRelation workflowTaskRelation0 = new WorkflowTaskRelation(); - workflowTaskRelation0.setProcessDefinitionCode(PROCESS_DEFINITION_CODE); + workflowTaskRelation0.setWorkflowDefinitionCode(WORKFLOW_DEFINITION_CODE); workflowTaskRelation0.setPreTaskCode(UPSTREAM_TASK_CODE); workflowTaskRelation0.setPostTaskCode(DOWNSTREAM_TASK_CODE); workflowTaskRelations.add(workflowTaskRelation0); WorkflowTaskRelation workflowTaskRelation1 = new WorkflowTaskRelation(); - workflowTaskRelation1.setProcessDefinitionCode(PROCESS_DEFINITION_CODE); + workflowTaskRelation1.setWorkflowDefinitionCode(WORKFLOW_DEFINITION_CODE); workflowTaskRelation1.setPreTaskCode(UPSTREAM_TASK_CODE); workflowTaskRelation1.setPostTaskCode(DOWNSTREAM_TASK_CODE); workflowTaskRelations.add(workflowTaskRelation1); - processTaskRelationIPage.setRecords(workflowTaskRelations); - return processTaskRelationIPage; + workflowTaskRelationIPage.setRecords(workflowTaskRelations); + return workflowTaskRelationIPage; } } diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/constants/CommandKeyConstants.java b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/constants/CommandKeyConstants.java index 7924876667..b1308272d6 100644 --- a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/constants/CommandKeyConstants.java +++ b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/constants/CommandKeyConstants.java @@ -31,7 +31,7 @@ public class CommandKeyConstants { public static final String CMD_PARAM_RECOVERY_WAITING_THREAD = "WaitingThreadInstanceId"; - public static final String CMD_PARAM_SUB_PROCESS = "processInstanceId"; + public static final String CMD_PARAM_SUB_WORKFLOW = "processInstanceId"; public static final String CMD_PARAM_EMPTY_SUB_PROCESS = "0"; diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/constants/Constants.java b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/constants/Constants.java index 79892d270e..cb0b812011 100644 --- a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/constants/Constants.java +++ b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/constants/Constants.java @@ -323,14 +323,14 @@ public final class Constants { public static final int EXIT_CODE_FAILURE = -1; /** - * process or task definition failure + * workflow or task definition failure */ public static final int DEFINITION_FAILURE = -1; public static final int OPPOSITE_VALUE = -1; /** - * process or task definition first version + * workflow or task definition first version */ public static final int VERSION_FIRST = 1; @@ -353,8 +353,8 @@ public final class Constants { public static final char N = 'N'; public static final String GLOBAL_PARAMS = "globalParams"; public static final String LOCAL_PARAMS = "localParams"; - public static final String SUBPROCESS_INSTANCE_ID = "subProcessInstanceId"; - public static final String PROCESS_INSTANCE_STATE = "processInstanceState"; + public static final String SUBWORKFLOW_INSTANCE_ID = "subWorkflowInstanceId"; + public static final String WORKFLOW_INSTANCE_STATE = "workflowInstanceState"; public static final String PARENT_WORKFLOW_INSTANCE = "parentWorkflowInstance"; public static final String CONDITION_RESULT = "conditionResult"; public static final String SWITCH_RESULT = "switchResult"; diff --git a/dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/sql/ClasspathSqlScriptParserTest.java b/dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/sql/ClasspathSqlScriptParserTest.java index e2a23845ad..3cc625c71f 100644 --- a/dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/sql/ClasspathSqlScriptParserTest.java +++ b/dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/sql/ClasspathSqlScriptParserTest.java @@ -47,6 +47,6 @@ class ClasspathSqlScriptParserTest { void testMysqlDdlSql() throws IOException { ClasspathSqlScriptParser classpathSqlScriptParser = new ClasspathSqlScriptParser("sql/mysql_ddl.sql"); List allSql = classpathSqlScriptParser.getAllSql(); - Assertions.assertEquals("ALTER TABLE t_ds_process_definition DROP tenant_id;", allSql.get(0)); + Assertions.assertEquals("ALTER TABLE t_ds_workflow_definition DROP tenant_id;", allSql.get(0)); } } diff --git a/dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/utils/CodeGenerateUtilsTest.java b/dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/utils/CodeGenerateUtilsTest.java index 45a47582be..6eeed6ab73 100644 --- a/dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/utils/CodeGenerateUtilsTest.java +++ b/dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/utils/CodeGenerateUtilsTest.java @@ -35,7 +35,7 @@ class CodeGenerateUtilsTest { @Test void testNoGenerateDuplicateCode() { - int codeNum = 10000000; + int codeNum = 1000000; List existsCode = new ArrayList<>(); for (int i = 0; i < codeNum; i++) { Long currentCode = CodeGenerateUtils.genCode(); @@ -49,7 +49,7 @@ class CodeGenerateUtilsTest { @Test void testNoGenerateDuplicateCodeWithDifferentAppName() throws UnknownHostException, InterruptedException { int threadNum = 10; - int codeNum = 100000; + int codeNum = 1000000; final String hostName = InetAddress.getLocalHost().getHostName(); Map> machineCodes = new ConcurrentHashMap<>(); diff --git a/dolphinscheduler-common/src/test/resources/sql/mysql_ddl.sql b/dolphinscheduler-common/src/test/resources/sql/mysql_ddl.sql index beadc63ca3..c4be0bed7f 100644 --- a/dolphinscheduler-common/src/test/resources/sql/mysql_ddl.sql +++ b/dolphinscheduler-common/src/test/resources/sql/mysql_ddl.sql @@ -15,9 +15,9 @@ * limitations under the License. */ -ALTER TABLE t_ds_process_definition DROP tenant_id; -ALTER TABLE t_ds_process_definition_log DROP tenant_id; -ALTER TABLE t_ds_process_instance DROP tenant_id; +ALTER TABLE t_ds_workflow_definition DROP tenant_id; +ALTER TABLE t_ds_workflow_definition_log DROP tenant_id; +ALTER TABLE t_ds_workflow_instance DROP tenant_id; SET FOREIGN_KEY_CHECKS = 0; -- auto detect by atlas, see more detail in https://github.com/apache/dolphinscheduler/pull/14620 @@ -33,7 +33,7 @@ ALTER TABLE `QRTZ_SIMPLE_TRIGGERS` COLLATE utf8_bin, MODIFY COLUMN `SCHED_NAME` ALTER TABLE `QRTZ_SIMPROP_TRIGGERS` COLLATE utf8_bin, MODIFY COLUMN `SCHED_NAME` varchar(120) NOT NULL, MODIFY COLUMN `TRIGGER_NAME` varchar(200) NOT NULL, MODIFY COLUMN `TRIGGER_GROUP` varchar(200) NOT NULL, MODIFY COLUMN `STR_PROP_1` varchar(512) NULL, MODIFY COLUMN `STR_PROP_2` varchar(512) NULL, MODIFY COLUMN `STR_PROP_3` varchar(512) NULL, MODIFY COLUMN `BOOL_PROP_1` varchar(1) NULL, MODIFY COLUMN `BOOL_PROP_2` varchar(1) NULL; ALTER TABLE `QRTZ_TRIGGERS` COLLATE utf8_bin, MODIFY COLUMN `SCHED_NAME` varchar(120) NOT NULL, MODIFY COLUMN `TRIGGER_NAME` varchar(200) NOT NULL, MODIFY COLUMN `TRIGGER_GROUP` varchar(200) NOT NULL, MODIFY COLUMN `JOB_NAME` varchar(200) NOT NULL, MODIFY COLUMN `JOB_GROUP` varchar(200) NOT NULL, MODIFY COLUMN `DESCRIPTION` varchar(250) NULL, MODIFY COLUMN `TRIGGER_STATE` varchar(16) NOT NULL, MODIFY COLUMN `TRIGGER_TYPE` varchar(8) NOT NULL, MODIFY COLUMN `CALENDAR_NAME` varchar(200) NULL; ALTER TABLE `t_ds_plugin_define` AUTO_INCREMENT 2; -ALTER TABLE `t_ds_process_instance` MODIFY COLUMN `state_history` text NULL COMMENT 'state history desc'; +ALTER TABLE `t_ds_workflow_instance` MODIFY COLUMN `state_history` text NULL COMMENT 'state history desc'; ALTER TABLE `t_ds_project` MODIFY COLUMN `description` varchar(255) NULL; ALTER TABLE `t_ds_task_group` MODIFY COLUMN `description` varchar(255) NULL; ALTER TABLE `t_ds_task_instance` MODIFY COLUMN `app_link` text NULL COMMENT 'yarn app id', MODIFY COLUMN `cache_key` varchar(200) NULL COMMENT 'cache_key', MODIFY COLUMN `executor_name` varchar(64) NULL; diff --git a/dolphinscheduler-common/src/test/resources/sql/mysql_dml.sql b/dolphinscheduler-common/src/test/resources/sql/mysql_dml.sql index e5d97fab94..af1846ebf7 100644 --- a/dolphinscheduler-common/src/test/resources/sql/mysql_dml.sql +++ b/dolphinscheduler-common/src/test/resources/sql/mysql_dml.sql @@ -35,8 +35,8 @@ CALL dolphin_t_ds_tenant_insert_default(); DROP PROCEDURE dolphin_t_ds_tenant_insert_default; -- tenant improvement -UPDATE t_ds_schedules t1 JOIN t_ds_process_definition t2 ON t1.process_definition_code = t2.code LEFT JOIN t_ds_tenant t3 ON t2.tenant_id = t3.id SET t1.tenant_code = COALESCE(t3.tenant_code, 'default'); -UPDATE `t_ds_process_instance` SET `tenant_code` = 'default' WHERE `tenant_code` IS NULL; +UPDATE t_ds_schedules t1 JOIN t_ds_workflow_definition t2 ON t1.workflow_definition_code = t2.code LEFT JOIN t_ds_tenant t3 ON t2.tenant_id = t3.id SET t1.tenant_code = COALESCE(t3.tenant_code, 'default'); +UPDATE `t_ds_workflow_instance` SET `tenant_code` = 'default' WHERE `tenant_code` IS NULL; -- data quality support choose database INSERT IGNORE INTO `t_ds_dq_rule_input_entry` diff --git a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/AlertDao.java b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/AlertDao.java index 7096a3c549..66dcb4aeb1 100644 --- a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/AlertDao.java +++ b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/AlertDao.java @@ -189,12 +189,12 @@ public class AlertDao { } /** - * process time out alert + * workflow time out alert * - * @param workflowInstance processInstance + * @param workflowInstance workflowInstance * @param projectUser projectUser */ - public void sendProcessTimeoutAlert(WorkflowInstance workflowInstance, ProjectUser projectUser) { + public void sendWorkflowTimeoutAlert(WorkflowInstance workflowInstance, ProjectUser projectUser) { int alertGroupId = workflowInstance.getWarningGroupId(); Alert alert = new Alert(); List workflowAlertContentList = new ArrayList<>(1); @@ -202,23 +202,23 @@ public class AlertDao { .projectCode(projectUser.getProjectCode()) .projectName(projectUser.getProjectName()) .owner(projectUser.getUserName()) - .processId(workflowInstance.getId()) - .processDefinitionCode(workflowInstance.getProcessDefinitionCode()) - .processName(workflowInstance.getName()) - .processType(workflowInstance.getCommandType()) - .processState(workflowInstance.getState()) + .workflowInstanceId(workflowInstance.getId()) + .workflowDefinitionCode(workflowInstance.getWorkflowDefinitionCode()) + .workflowInstanceName(workflowInstance.getName()) + .commandType(workflowInstance.getCommandType()) + .workflowExecutionStatus(workflowInstance.getState()) .runTimes(workflowInstance.getRunTimes()) - .processStartTime(workflowInstance.getStartTime()) - .processHost(workflowInstance.getHost()) + .workflowStartTime(workflowInstance.getStartTime()) + .workflowHost(workflowInstance.getHost()) .event(AlertEvent.TIME_OUT) .warnLevel(AlertWarnLevel.MIDDLE) .build(); workflowAlertContentList.add(workflowAlertContent); String content = JSONUtils.toJsonString(workflowAlertContentList); - alert.setTitle("Process Timeout Warn"); + alert.setTitle("Workflow Timeout Warn"); alert.setProjectCode(projectUser.getProjectCode()); - alert.setProcessDefinitionCode(workflowInstance.getProcessDefinitionCode()); - alert.setProcessInstanceId(workflowInstance.getId()); + alert.setWorkflowDefinitionCode(workflowInstance.getWorkflowDefinitionCode()); + alert.setWorkflowInstanceId(workflowInstance.getId()); alert.setAlertType(AlertType.WORKFLOW_INSTANCE_TIMEOUT); saveTaskTimeoutAlert(alert, content, alertGroupId); } @@ -237,7 +237,7 @@ public class AlertDao { /** * task timeout warn * - * @param workflowInstance processInstanceId + * @param workflowInstance workflowInstance * @param taskInstance taskInstance * @param projectUser projectUser */ @@ -250,9 +250,9 @@ public class AlertDao { .projectCode(projectUser.getProjectCode()) .projectName(projectUser.getProjectName()) .owner(projectUser.getUserName()) - .processId(workflowInstance.getId()) - .processDefinitionCode(workflowInstance.getProcessDefinitionCode()) - .processName(workflowInstance.getName()) + .workflowInstanceId(workflowInstance.getId()) + .workflowDefinitionCode(workflowInstance.getWorkflowDefinitionCode()) + .workflowInstanceName(workflowInstance.getName()) .taskCode(taskInstance.getTaskCode()) .taskName(taskInstance.getName()) .taskType(taskInstance.getTaskType()) @@ -265,8 +265,8 @@ public class AlertDao { String content = JSONUtils.toJsonString(workflowAlertContentList); alert.setTitle("Task Timeout Warn"); alert.setProjectCode(projectUser.getProjectCode()); - alert.setProcessDefinitionCode(workflowInstance.getProcessDefinitionCode()); - alert.setProcessInstanceId(workflowInstance.getId()); + alert.setWorkflowDefinitionCode(workflowInstance.getWorkflowDefinitionCode()); + alert.setWorkflowInstanceId(workflowInstance.getId()); alert.setAlertType(AlertType.TASK_TIMEOUT); saveTaskTimeoutAlert(alert, content, workflowInstance.getWarningGroupId()); } @@ -279,9 +279,9 @@ public class AlertDao { QUERY_ALERT_THRESHOLD); } - public List listAlerts(int processInstanceId) { + public List listAlerts(int workflowInstanceId) { LambdaQueryWrapper wrapper = new LambdaQueryWrapper() - .eq(Alert::getProcessInstanceId, processInstanceId); + .eq(Alert::getWorkflowInstanceId, workflowInstanceId); return alertMapper.selectList(wrapper); } @@ -323,15 +323,15 @@ public class AlertDao { this.crashAlarmSuppression = crashAlarmSuppression; } - public void deleteByWorkflowInstanceId(Integer processInstanceId) { - if (processInstanceId == null) { + public void deleteByWorkflowInstanceId(Integer workflowInstanceId) { + if (workflowInstanceId == null) { return; } - List alertList = alertMapper.selectByWorkflowInstanceId(processInstanceId); + List alertList = alertMapper.selectByWorkflowInstanceId(workflowInstanceId); if (CollectionUtils.isEmpty(alertList)) { return; } - alertMapper.deleteByWorkflowInstanceId(processInstanceId); + alertMapper.deleteByWorkflowInstanceId(workflowInstanceId); List alertIds = alertList .stream() .map(Alert::getId) diff --git a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/Alert.java b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/Alert.java index 421f3f76b7..96ad2890ba 100644 --- a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/Alert.java +++ b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/Alert.java @@ -42,84 +42,45 @@ import com.baomidou.mybatisplus.annotation.TableName; @TableName("t_ds_alert") public class Alert { - /** - * primary key - */ @TableId(value = "id", type = IdType.AUTO) private Integer id; - /** - * sign - */ + @TableField(value = "sign") private String sign; - /** - * title - */ + @TableField(value = "title") private String title; - /** - * content - */ @TableField(value = "content") private String content; - /** - * alert_status - */ @TableField(value = "alert_status") private AlertStatus alertStatus; - /** - * warning_type - */ @TableField(value = "warning_type") private WarningType warningType; - /** - * log - */ @TableField(value = "log") private String log; - /** - * alertgroup_id - */ @TableField("alertgroup_id") private Integer alertGroupId; - /** - * create_time - */ @TableField("create_time") private Date createTime; - /** - * update_time - */ + @TableField("update_time") private Date updateTime; - /** - * project_code - */ @TableField("project_code") private Long projectCode; - /** - * process_definition_code - */ - @TableField("process_definition_code") - private Long processDefinitionCode; + @TableField("workflow_definition_code") + private Long workflowDefinitionCode; - /** - * process_instance_id - */ - @TableField("process_instance_id") - private Integer processInstanceId; + @TableField("workflow_instance_id") + private Integer workflowInstanceId; - /** - * alert_type - */ @TableField("alert_type") private AlertType alertType; diff --git a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/Command.java b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/Command.java index 72ab356f50..c48dccfe7a 100644 --- a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/Command.java +++ b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/Command.java @@ -48,14 +48,14 @@ public class Command { @TableField("command_type") private CommandType commandType; - @TableField("process_definition_code") - private long processDefinitionCode; + @TableField("workflow_definition_code") + private long workflowDefinitionCode; - @TableField("process_definition_version") - private int processDefinitionVersion; + @TableField("workflow_definition_version") + private int workflowDefinitionVersion; - @TableField("process_instance_id") - private int processInstanceId; + @TableField("workflow_instance_id") + private int workflowInstanceId; /** * command parameter, format json @@ -63,8 +63,8 @@ public class Command { @TableField("command_param") private String commandParam; - @TableField("process_instance_priority") - private Priority processInstancePriority; + @TableField("workflow_instance_priority") + private Priority workflowInstancePriority; @Deprecated @TableField("executor_id") @@ -131,7 +131,7 @@ public class Command { TaskDependType taskDependType, FailureStrategy failureStrategy, int executorId, - long processDefinitionCode, + long workflowDefinitionCode, String commandParam, WarningType warningType, int warningGroupId, @@ -140,12 +140,12 @@ public class Command { Long environmentCode, Priority workflowInstancePriority, int dryRun, - int processInstanceId, - int processDefinitionVersion, + int workflowInstanceId, + int workflowDefinitionVersion, int testFlag) { this.commandType = commandType; this.executorId = executorId; - this.processDefinitionCode = processDefinitionCode; + this.workflowDefinitionCode = workflowDefinitionCode; this.commandParam = commandParam; this.warningType = warningType; this.warningGroupId = warningGroupId; @@ -156,10 +156,10 @@ public class Command { this.updateTime = new Date(); this.workerGroup = workerGroup; this.environmentCode = environmentCode; - this.processInstancePriority = workflowInstancePriority; + this.workflowInstancePriority = workflowInstancePriority; this.dryRun = dryRun; - this.processInstanceId = processInstanceId; - this.processDefinitionVersion = processDefinitionVersion; + this.workflowInstanceId = workflowInstanceId; + this.workflowDefinitionVersion = workflowDefinitionVersion; this.testFlag = testFlag; } } diff --git a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/DagData.java b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/DagData.java index 6f187dc1d4..9d26663e96 100644 --- a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/DagData.java +++ b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/DagData.java @@ -19,24 +19,15 @@ package org.apache.dolphinscheduler.dao.entity; import java.util.List; -/** - * DagData - */ +import lombok.Data; + +@Data public class DagData { - /** - * processDefinition - */ private WorkflowDefinition workflowDefinition; - /** - * processTaskRelationList - */ private List workflowTaskRelationList; - /** - * processTaskRelationList - */ private List taskDefinitionList; public DagData(WorkflowDefinition workflowDefinition, List workflowTaskRelationList, @@ -48,28 +39,4 @@ public class DagData { public DagData() { } - - public WorkflowDefinition getProcessDefinition() { - return workflowDefinition; - } - - public void setProcessDefinition(WorkflowDefinition workflowDefinition) { - this.workflowDefinition = workflowDefinition; - } - - public List getProcessTaskRelationList() { - return workflowTaskRelationList; - } - - public void setProcessTaskRelationList(List workflowTaskRelationList) { - this.workflowTaskRelationList = workflowTaskRelationList; - } - - public List getTaskDefinitionList() { - return taskDefinitionList; - } - - public void setTaskDefinitionList(List taskDefinitionList) { - this.taskDefinitionList = taskDefinitionList; - } } diff --git a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/DependentLineageTask.java b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/DependentLineageTask.java index 2f3ffdff56..319490ecad 100644 --- a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/DependentLineageTask.java +++ b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/DependentLineageTask.java @@ -27,8 +27,8 @@ import lombok.NoArgsConstructor; public class DependentLineageTask { private long projectCode; - private long processDefinitionCode; - private String processDefinitionName; + private long workflowDefinitionCode; + private String workflowDefinitionName; private long taskDefinitionCode; private String taskDefinitionName; } diff --git a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/DependentWorkflowDefinition.java b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/DependentWorkflowDefinition.java index 397841478a..01e48e4669 100644 --- a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/DependentWorkflowDefinition.java +++ b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/DependentWorkflowDefinition.java @@ -30,35 +30,16 @@ import lombok.Data; @Data public class DependentWorkflowDefinition { - /** - * process definition code - */ - private long processDefinitionCode; + private long workflowDefinitionCode; - /** - * process definition version - **/ - private int processDefinitionVersion; + private int workflowDefinitionVersion; - /** - * task definition name - */ private long taskDefinitionCode; - /** - * task definition params - */ private String taskParams; - /** - * schedule worker group - */ private String workerGroup; - /** - * get dependent cycle - * @return CycleEnum - */ public CycleEnum getDependentCycle(long upstreamProcessDefinitionCode) { DependentParameters dependentParameters = this.getDependentParameters(); List dependentTaskModelList = dependentParameters.getDependence().getDependTaskList(); diff --git a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/DqExecuteResult.java b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/DqExecuteResult.java index 490a70a60a..b9065d21a5 100644 --- a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/DqExecuteResult.java +++ b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/DqExecuteResult.java @@ -31,129 +31,78 @@ import com.baomidou.mybatisplus.annotation.TableName; @TableName("t_ds_dq_execute_result") public class DqExecuteResult implements Serializable { - /** - * primary key - */ @TableId(value = "id", type = IdType.AUTO) private Integer id; - /** - * process defined id - */ - @TableField(value = "process_definition_id") - private long processDefinitionId; - /** - * process definition name - */ + + @TableField(value = "workflow_definition_id") + private long workflowDefinitionId; + @TableField(exist = false) - private String processDefinitionName; - /** - * process definition code - */ + private String workflowDefinitionName; + @TableField(exist = false) private long processDefinitionCode; - /** - * process instance id - */ + @TableField(value = "process_instance_id") private long processInstanceId; - /** - * process instance name - */ + @TableField(exist = false) private String processInstanceName; - /** - * project code - */ + @TableField(exist = false) private long projectCode; - /** - * task instance id - */ + @TableField(value = "task_instance_id") private long taskInstanceId; - /** - * task name - */ + @TableField(exist = false) private String taskName; - /** - * rule type - */ + @TableField(value = "rule_type") private int ruleType; - /** - * rule name - */ + @TableField(value = "rule_name") private String ruleName; - /** - * statistics value - */ + @TableField(value = "statistics_value") private double statisticsValue; - /** - * comparison value - */ + @TableField(value = "comparison_value") private double comparisonValue; - /** - * comparison type - */ + @TableField(value = "comparison_type") private int comparisonType; - /** - * comparison type name - */ + @TableField(exist = false) private String comparisonTypeName; - /** - * check type - */ + @TableField(value = "check_type") private int checkType; - /** - * threshold - */ + @TableField(value = "threshold") private double threshold; - /** - * operator - */ + @TableField(value = "operator") private int operator; - /** - * failure strategy - */ + @TableField(value = "failure_strategy") private int failureStrategy; - /** - * user id - */ + @TableField(value = "user_id") private int userId; - /** - * user name - */ + @TableField(exist = false) private String userName; - /** - * state - */ + @TableField(value = "state") private int state; - /** - * error output path - */ + @TableField(value = "error_output_path") private String errorOutputPath; - /** - * create_time - */ + @TableField(value = "create_time") private Date createTime; - /** - * update_time - */ + @TableField(value = "update_time") private Date updateTime; } diff --git a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/DqTaskStatisticsValue.java b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/DqTaskStatisticsValue.java index 9bef9ab9a0..a86f782fb0 100644 --- a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/DqTaskStatisticsValue.java +++ b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/DqTaskStatisticsValue.java @@ -31,69 +31,42 @@ import com.baomidou.mybatisplus.annotation.TableName; @TableName("t_ds_dq_task_statistics_value") public class DqTaskStatisticsValue implements Serializable { - /** - * primary key - */ @TableId(value = "id", type = IdType.AUTO) private Integer id; - /** - * process defined id - */ - @TableField(value = "process_definition_id") - private long processDefinitionId; - /** - * process definition name - */ + + @TableField(value = "workflow_definition_id") + private long workflowDefinitionId; + @TableField(exist = false) - private String processDefinitionName; - /** - * task instance id - */ + private String workflowDefinitionName; + @TableField(value = "task_instance_id") private long taskInstanceId; - /** - * task name - */ + @TableField(exist = false) private String taskName; - /** - * rule id - */ + @TableField(value = "rule_id") private long ruleId; - /** - * rule type - */ + @TableField(exist = false) private int ruleType; - /** - * rule name - */ + @TableField(exist = false) private String ruleName; - /** - * statistics value - */ + @TableField(value = "statistics_value") private double statisticsValue; - /** - * comparison value - */ + @TableField(value = "statistics_name") private String statisticsName; - /** - * data time - */ + @TableField(value = "data_time") private Date dataTime; - /** - * create time - */ + @TableField(value = "create_time") private Date createTime; - /** - * update time - */ + @TableField(value = "update_time") private Date updateTime; } diff --git a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/ErrorCommand.java b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/ErrorCommand.java index d52984b61f..12ba5da0b6 100644 --- a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/ErrorCommand.java +++ b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/ErrorCommand.java @@ -36,104 +36,47 @@ import com.baomidou.mybatisplus.annotation.TableName; @TableName("t_ds_error_command") public class ErrorCommand { - /** - * id - */ @TableId(value = "id", type = IdType.INPUT) private Integer id; - /** - * command type - */ private CommandType commandType; - /** - * process definition code - */ - private long processDefinitionCode; + private long workflowDefinitionCode; - private int processDefinitionVersion; + private int workflowDefinitionVersion; - private int processInstanceId; + private int workflowInstanceId; - /** - * executor id - */ private int executorId; - /** - * command parameter, format json - */ private String commandParam; - /** - * task depend type - */ private TaskDependType taskDependType; - /** - * failure strategy - */ private FailureStrategy failureStrategy; - /** - * warning type - */ private WarningType warningType; - /** - * warning group id - */ private Integer warningGroupId; - /** - * schedule time - */ private Date scheduleTime; - /** - * start time - */ private Date startTime; - /** - * process instance priority - */ - private Priority processInstancePriority; + private Priority workflowInstancePriority; - /** - * update time - */ private Date updateTime; - /** - * 执行信息 - */ private String message; - /** - * worker group - */ private String workerGroup; - /** - * tenant code - */ private String tenantCode; - /** - * environment code - */ private Long environmentCode; - /** - * dry run flag - */ private int dryRun; - /** - * test flag - */ @TableField("test_flag") private int testFlag; @@ -144,9 +87,9 @@ public class ErrorCommand { this.id = command.getId(); this.commandType = command.getCommandType(); this.executorId = command.getExecutorId(); - this.processDefinitionCode = command.getProcessDefinitionCode(); - this.processDefinitionVersion = command.getProcessDefinitionVersion(); - this.processInstanceId = command.getProcessInstanceId(); + this.workflowDefinitionCode = command.getWorkflowDefinitionCode(); + this.workflowDefinitionVersion = command.getWorkflowDefinitionVersion(); + this.workflowInstanceId = command.getWorkflowInstanceId(); this.commandParam = command.getCommandParam(); this.taskDependType = command.getTaskDependType(); this.failureStrategy = command.getFailureStrategy(); @@ -155,7 +98,7 @@ public class ErrorCommand { this.scheduleTime = command.getScheduleTime(); this.startTime = command.getStartTime(); this.updateTime = command.getUpdateTime(); - this.processInstancePriority = command.getProcessInstancePriority(); + this.workflowInstancePriority = command.getWorkflowInstancePriority(); this.workerGroup = command.getWorkerGroup(); this.tenantCode = command.getTenantCode(); this.environmentCode = command.getEnvironmentCode(); diff --git a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/Schedule.java b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/Schedule.java index 1ca6be744b..a55c8d1ad5 100644 --- a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/Schedule.java +++ b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/Schedule.java @@ -44,37 +44,19 @@ public class Schedule { @TableId(value = "id", type = IdType.AUTO) private Integer id; - /** - * process definition code - */ - private long processDefinitionCode; + private long workflowDefinitionCode; - /** - * process definition name - */ @TableField(exist = false) - private String processDefinitionName; + private String workflowDefinitionName; - /** - * project name - */ @TableField(exist = false) private String projectName; - /** - * schedule description - */ @TableField(exist = false) private String definitionDescription; - /** - * schedule start time - */ private Date startTime; - /** - * schedule end time - */ private Date endTime; /** @@ -83,75 +65,33 @@ public class Schedule { */ private String timezoneId; - /** - * crontab expression - */ private String crontab; - /** - * failure strategy - */ private FailureStrategy failureStrategy; - /** - * warning type - */ private WarningType warningType; - /** - * create time - */ private Date createTime; - /** - * update time - */ private Date updateTime; - /** - * created user id - */ private int userId; - /** - * created user name - */ @TableField(exist = false) private String userName; - /** - * release state - */ private ReleaseState releaseState; - /** - * warning group id - */ private int warningGroupId; - /** - * process instance priority - */ - private Priority processInstancePriority; + private Priority workflowInstancePriority; - /** - * worker group - */ private String workerGroup; - /** - * tenant code - */ private String tenantCode; - /** - * environment code - */ private Long environmentCode; - /** - * environment name - */ @TableField(exist = false) private String environmentName; } diff --git a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/TaskGroupQueue.java b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/TaskGroupQueue.java index 9302d141e3..fdf9769575 100644 --- a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/TaskGroupQueue.java +++ b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/TaskGroupQueue.java @@ -39,66 +39,43 @@ import com.baomidou.mybatisplus.annotation.TableName; @TableName("t_ds_task_group_queue") public class TaskGroupQueue implements Serializable { - /** - * key - */ @TableId(value = "id", type = IdType.AUTO) private Integer id; - /** - * taskInstanceId - */ + private int taskId; - /** - * TaskInstance name - */ + private String taskName; - /** - * project name - */ + @TableField(exist = false) private String projectName; - /** - * project code - */ + @TableField(exist = false) private String projectCode; - /** - * process instance name - */ + @TableField(exist = false) - private String processInstanceName; - /** - * taskGroup id - */ + private String workflowInstanceName; + private int groupId; - /** - * processInstance id - */ - private int processId; - /** - * the priority of task instance - */ + + private Integer workflowInstanceId; + private int priority; + /** * is force start * 0 NO ,1 YES */ private int forceStart; + /** * ready to get the queue by other task finish * 0 NO ,1 YES */ private int inQueue; - /** - * -1: waiting 1: running 2: finished - */ + private TaskGroupQueueStatus status; - /** - * create time - */ + private Date createTime; - /** - * update time - */ + private Date updateTime; } diff --git a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/TaskInstance.java b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/TaskInstance.java index f87667b2e9..ca0ee90c9c 100644 --- a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/TaskInstance.java +++ b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/TaskInstance.java @@ -33,32 +33,20 @@ import com.baomidou.mybatisplus.annotation.TableField; import com.baomidou.mybatisplus.annotation.TableId; import com.baomidou.mybatisplus.annotation.TableName; -/** - * task instance - */ @Data @TableName("t_ds_task_instance") public class TaskInstance implements Serializable { - /** - * id - */ @TableId(value = "id", type = IdType.AUTO) private Integer id; - /** - * task name - */ private String name; - /** - * task type - */ private String taskType; - private int processInstanceId; + private int workflowInstanceId; - private String processInstanceName; + private String workflowInstanceName; private Long projectCode; @@ -69,199 +57,87 @@ public class TaskInstance implements Serializable { @TableField(exist = false) private String processDefinitionName; - /** - * process instance name - */ @TableField(exist = false) private int taskGroupPriority; - /** - * state - */ private TaskExecutionStatus state; - /** - * task first submit time. - */ private Date firstSubmitTime; - /** - * task submit time - */ private Date submitTime; - /** - * task start time - */ private Date startTime; - /** - * task end time - */ private Date endTime; - /** - * task host - */ private String host; - /** - * task shell execute path and the resource down from hdfs - * default path: $base_run_dir/processInstanceId/taskInstanceId/retryTimes - */ private String executePath; - /** - * task log path - * default path: $base_run_dir/processInstanceId/taskInstanceId/retryTimes - */ private String logPath; - /** - * retry times - */ private int retryTimes; - /** - * alert flag - */ private Flag alertFlag; - /** - * process instance - */ @TableField(exist = false) private WorkflowInstance workflowInstance; - /** - * process definition - */ @TableField(exist = false) - private WorkflowDefinition processDefine; + private WorkflowDefinition workflowDefinition; - /** - * task definition - */ @TableField(exist = false) private TaskDefinition taskDefine; - /** - * process id - */ private int pid; - /** - * appLink - */ private String appLink; - /** - * flag - */ private Flag flag; - /** - * task is cache: yes/no - */ private Flag isCache; - /** - * cache_key - */ @TableField(updateStrategy = FieldStrategy.IGNORED) private String cacheKey; - /** - * duration - */ @TableField(exist = false) private String duration; - /** - * max retry times - */ private int maxRetryTimes; - /** - * task retry interval, unit: minute - */ private int retryInterval; - /** - * task intance priority - */ private Priority taskInstancePriority; - /** - * process intance priority - */ @TableField(exist = false) - private Priority processInstancePriority; + private Priority workflowInstancePriority; - /** - * workerGroup - */ private String workerGroup; - /** - * environment code - */ private Long environmentCode; - /** - * environment config - */ private String environmentConfig; - /** - * executor id - */ private int executorId; - /** - * varPool string - */ private String varPool; private String executorName; - /** - * delay execution time. - */ private int delayTime; - /** - * task params - */ private String taskParams; - /** - * dry run flag - */ private int dryRun; - /** - * task group id - */ + private int taskGroupId; - /** - * cpu quota - */ private Integer cpuQuota; - /** - * max memory - */ private Integer memoryMax; - /** - * task execute type - */ private TaskExecuteType taskExecuteType; - /** - * test flag - */ private int testFlag; public void init(String host, Date startTime, String executePath) { diff --git a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/UserWithWorkflowDefinitionCode.java b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/UserWithWorkflowDefinitionCode.java index c13d9f0fd4..ae030d833a 100644 --- a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/UserWithWorkflowDefinitionCode.java +++ b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/UserWithWorkflowDefinitionCode.java @@ -27,9 +27,9 @@ import lombok.Data; @Builder public class UserWithWorkflowDefinitionCode { - private long processDefinitionCode; + private long workflowDefinitionCode; - private int processDefinitionVersion; + private int workflowDefinitionVersion; private Integer modifierId; diff --git a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/WorkflowAlertContent.java b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/WorkflowAlertContent.java index e1cb1d8507..ef86ef40ec 100644 --- a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/WorkflowAlertContent.java +++ b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/WorkflowAlertContent.java @@ -45,56 +45,82 @@ public class WorkflowAlertContent implements Serializable { @JsonProperty("projectId") private Integer projectId; + @JsonProperty("projectCode") private Long projectCode; + @JsonProperty("projectName") private String projectName; + @JsonProperty("owner") private String owner; - @JsonProperty("processId") - private Integer processId; - @JsonProperty("processDefinitionCode") - private Long processDefinitionCode; - @JsonProperty("processName") - private String processName; - @JsonProperty("processType") - private CommandType processType; - @JsonProperty("processState") - private WorkflowExecutionStatus processState; + + @JsonProperty("workflowInstanceId") + private Integer workflowInstanceId; + + @JsonProperty("workflowDefinitionCode") + private Long workflowDefinitionCode; + + @JsonProperty("workflowInstanceName") + private String workflowInstanceName; + + @JsonProperty("commandType") + private CommandType commandType; + + @JsonProperty("workflowExecutionStatus") + private WorkflowExecutionStatus workflowExecutionStatus; + @JsonProperty("modifyBy") private String modifyBy; + @JsonProperty("recovery") private Flag recovery; + @JsonProperty("runTimes") private Integer runTimes; - @JsonProperty("processStartTime") - private Date processStartTime; - @JsonProperty("processEndTime") - private Date processEndTime; - @JsonProperty("processHost") - private String processHost; + + @JsonProperty("workflowStartTime") + private Date workflowStartTime; + + @JsonProperty("workflowEndTime") + private Date workflowEndTime; + + @JsonProperty("workflowHost") + private String workflowHost; + @JsonProperty("taskCode") private Long taskCode; + @JsonProperty("taskName") private String taskName; + @JsonProperty("event") private AlertEvent event; + @JsonProperty("warnLevel") private AlertWarnLevel warnLevel; + @JsonProperty("taskType") private String taskType; + @JsonProperty("retryTimes") private Integer retryTimes; + @JsonProperty("taskState") private TaskExecutionStatus taskState; + @JsonProperty("taskStartTime") private Date taskStartTime; + @JsonProperty("taskEndTime") private Date taskEndTime; + @JsonProperty("taskHost") private String taskHost; + @JsonProperty("taskPriority") private String taskPriority; + @JsonProperty("logPath") private String logPath; diff --git a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/WorkflowDefinition.java b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/WorkflowDefinition.java index af5b38e203..1d7886626b 100644 --- a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/WorkflowDefinition.java +++ b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/WorkflowDefinition.java @@ -44,128 +44,62 @@ import com.google.common.base.Strings; @Builder @NoArgsConstructor @AllArgsConstructor -@TableName("t_ds_process_definition") +@TableName("t_ds_workflow_definition") public class WorkflowDefinition { - /** - * id - */ @TableId(value = "id", type = IdType.AUTO) private Integer id; - /** - * code - */ private long code; - /** - * name - */ private String name; - /** - * version - */ private int version; - /** - * release state : online/offline - */ private ReleaseState releaseState; - /** - * project code - */ private long projectCode; - /** - * description - */ private String description; - /** - * user defined parameters - */ private String globalParams; - /** - * user defined parameter list - */ @TableField(exist = false) private List globalParamList; - /** - * user define parameter map - */ @TableField(exist = false) private Map globalParamMap; - /** - * create time - */ private Date createTime; - /** - * update time - */ private Date updateTime; - /** - * process is valid: yes/no - */ private Flag flag; - /** - * process user id - */ private int userId; - /** - * create user name - */ @TableField(exist = false) private String userName; - /** - * project name - */ @TableField(exist = false) private String projectName; - /** - * locations array for web - */ private String locations; - /** - * schedule release state : online/offline - */ @TableField(exist = false) private ReleaseState scheduleReleaseState; @TableField(exist = false) private Schedule schedule; - /** - * process warning time out. unit: minute - */ private int timeout; - /** - * modify user name - */ @TableField(exist = false) private String modifyBy; - /** - * warningGroupId - */ @TableField(exist = false) private Integer warningGroupId; - /** - * execution type - */ private WorkflowExecutionTypeEnum executionType; public WorkflowDefinition(long projectCode, diff --git a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/WorkflowDefinitionLog.java b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/WorkflowDefinitionLog.java index 6dae0a0638..3da9252508 100644 --- a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/WorkflowDefinitionLog.java +++ b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/WorkflowDefinitionLog.java @@ -24,17 +24,11 @@ import lombok.Data; import com.baomidou.mybatisplus.annotation.TableName; @Data -@TableName("t_ds_process_definition_log") +@TableName("t_ds_workflow_definition_log") public class WorkflowDefinitionLog extends WorkflowDefinition { - /** - * operator - */ private int operator; - /** - * operateTime - */ private Date operateTime; public WorkflowDefinitionLog() { diff --git a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/WorkflowInstance.java b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/WorkflowInstance.java index a688b700ff..e20a506311 100644 --- a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/WorkflowInstance.java +++ b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/WorkflowInstance.java @@ -45,25 +45,19 @@ import com.baomidou.mybatisplus.annotation.TableId; import com.baomidou.mybatisplus.annotation.TableName; import com.google.common.base.Strings; -/** - * process instance - */ @NoArgsConstructor @Data @Builder @AllArgsConstructor -@TableName("t_ds_process_instance") +@TableName("t_ds_workflow_instance") public class WorkflowInstance { - /** - * id - */ @TableId(value = "id", type = IdType.AUTO) private Integer id; - private Long processDefinitionCode; + private Long workflowDefinitionCode; - private int processDefinitionVersion; + private int workflowDefinitionVersion; private Long projectCode; @@ -71,15 +65,9 @@ public class WorkflowInstance { private String stateHistory; - /** - * state desc list from state history - */ @TableField(exist = false) private List stateDescList; - /** - * recovery flag for failover - */ private Flag recovery; private Date startTime; @@ -120,9 +108,6 @@ public class WorkflowInstance { */ private String globalParams; - /** - * dagData - */ @TableField(exist = false) private DagData dagData; @@ -132,16 +117,10 @@ public class WorkflowInstance { private String tenantCode; - /** - * queue - */ @TableField(exist = false) private String queue; - /** - * process is sub process - */ - private Flag isSubProcess; + private Flag isSubWorkflow; /** * task locations for web @@ -149,68 +128,36 @@ public class WorkflowInstance { @TableField(exist = false) private String locations; - /** - * history command - */ private String historyCmd; - /** - * depend processes schedule time - */ @TableField(exist = false) private String dependenceScheduleTimes; /** - * process duration + * workflow execution duration * * @return */ @TableField(exist = false) private String duration; - /** - * process instance priority - */ - private Priority processInstancePriority; + private Priority workflowInstancePriority; - /** - * worker group - */ private String workerGroup; - /** - * environment code - */ private Long environmentCode; - /** - * process timeout for warning - */ private int timeout; - /** - * varPool string - */ private String varPool; - /** - * serial queue next processInstanceId - */ - @Deprecated - private int nextProcessInstanceId; - /** - * dry run flag - */ + @Deprecated + private int nextWorkflowInstanceId; + private int dryRun; - /** - * re-start time - */ private Date restartTime; - /** - * test flag - */ private int testFlag; /** diff --git a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/WorkflowInstanceRelation.java b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/WorkflowInstanceRelation.java index eb44833f7b..e189f48ba8 100644 --- a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/WorkflowInstanceRelation.java +++ b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/WorkflowInstanceRelation.java @@ -17,6 +17,8 @@ package org.apache.dolphinscheduler.dao.entity; +import java.util.Objects; + import lombok.AllArgsConstructor; import lombok.Builder; import lombok.Data; @@ -30,28 +32,48 @@ import com.baomidou.mybatisplus.annotation.TableName; @Builder @NoArgsConstructor @AllArgsConstructor -@TableName("t_ds_relation_process_instance") +@TableName("t_ds_relation_workflow_instance") public class WorkflowInstanceRelation { - /** - * id - */ @TableId(value = "id", type = IdType.AUTO) private Integer id; - /** - * parent process instance id - */ - private int parentProcessInstanceId; + private int parentWorkflowInstanceId; - /** - * parent task instance id - */ private int parentTaskInstanceId; - /** - * process instance id - */ - private int processInstanceId; + private int workflowInstanceId; + + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + + WorkflowInstanceRelation that = (WorkflowInstanceRelation) o; + + if (!Objects.equals(id, that.id)) { + return false; + } + if (parentWorkflowInstanceId != that.parentWorkflowInstanceId) { + return false; + } + if (parentTaskInstanceId != that.parentTaskInstanceId) { + return false; + } + return workflowInstanceId == that.workflowInstanceId; + } + + @Override + public int hashCode() { + int result = id; + result = 31 * result + parentWorkflowInstanceId; + result = 31 * result + parentTaskInstanceId; + result = 31 * result + workflowInstanceId; + return result; + } } diff --git a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/WorkflowTaskLineage.java b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/WorkflowTaskLineage.java index ceb031f911..c18ac029d8 100644 --- a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/WorkflowTaskLineage.java +++ b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/WorkflowTaskLineage.java @@ -30,7 +30,7 @@ import com.baomidou.mybatisplus.annotation.TableName; @Data @NoArgsConstructor @AllArgsConstructor -@TableName("t_ds_process_task_lineage") +@TableName("t_ds_workflow_task_lineage") public class WorkflowTaskLineage { @TableId(value = "id", type = IdType.AUTO) diff --git a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/WorkflowTaskRelation.java b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/WorkflowTaskRelation.java index c5d959610a..2164be31d4 100644 --- a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/WorkflowTaskRelation.java +++ b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/WorkflowTaskRelation.java @@ -37,83 +37,44 @@ import com.fasterxml.jackson.databind.annotation.JsonSerialize; @Builder @AllArgsConstructor @NoArgsConstructor -@TableName("t_ds_process_task_relation") +@TableName("t_ds_workflow_task_relation") public class WorkflowTaskRelation { - /** - * id - */ @TableId(value = "id", type = IdType.AUTO) private Integer id; - /** - * name - */ private String name; - /** - * process version - */ - private int processDefinitionVersion; + private int workflowDefinitionVersion; - /** - * project code - */ private long projectCode; - /** - * process code - */ - private long processDefinitionCode; + private long workflowDefinitionCode; - /** - * pre task code - */ private long preTaskCode; - /** - * pre node version - */ private int preTaskVersion; - /** - * post task code - */ private long postTaskCode; - /** - * post node version - */ private int postTaskVersion; - /** - * condition type - */ @Deprecated private ConditionType conditionType; - /** - * condition parameters - */ @JsonDeserialize(using = JSONUtils.JsonDataDeserializer.class) @JsonSerialize(using = JSONUtils.JsonDataSerializer.class) @Deprecated private String conditionParams; - /** - * create time - */ private Date createTime; - /** - * update time - */ private Date updateTime; public WorkflowTaskRelation(String name, - int processDefinitionVersion, + int workflowDefinitionVersion, long projectCode, - long processDefinitionCode, + long workflowDefinitionCode, long preTaskCode, int preTaskVersion, long postTaskCode, @@ -121,9 +82,9 @@ public class WorkflowTaskRelation { ConditionType conditionType, String conditionParams) { this.name = name; - this.processDefinitionVersion = processDefinitionVersion; + this.workflowDefinitionVersion = workflowDefinitionVersion; this.projectCode = projectCode; - this.processDefinitionCode = processDefinitionCode; + this.workflowDefinitionCode = workflowDefinitionCode; this.preTaskCode = preTaskCode; this.preTaskVersion = preTaskVersion; this.postTaskCode = postTaskCode; @@ -138,9 +99,9 @@ public class WorkflowTaskRelation { public WorkflowTaskRelation(WorkflowTaskRelationLog processTaskRelationLog) { this.name = processTaskRelationLog.getName(); - this.processDefinitionVersion = processTaskRelationLog.getProcessDefinitionVersion(); + this.workflowDefinitionVersion = processTaskRelationLog.getWorkflowDefinitionVersion(); this.projectCode = processTaskRelationLog.getProjectCode(); - this.processDefinitionCode = processTaskRelationLog.getProcessDefinitionCode(); + this.workflowDefinitionCode = processTaskRelationLog.getWorkflowDefinitionCode(); this.preTaskCode = processTaskRelationLog.getPreTaskCode(); this.preTaskVersion = processTaskRelationLog.getPreTaskVersion(); this.postTaskCode = processTaskRelationLog.getPostTaskCode(); diff --git a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/WorkflowTaskRelationLog.java b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/WorkflowTaskRelationLog.java index 5c434ff071..a6aadcc631 100644 --- a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/WorkflowTaskRelationLog.java +++ b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/WorkflowTaskRelationLog.java @@ -19,22 +19,16 @@ package org.apache.dolphinscheduler.dao.entity; import java.util.Date; +import lombok.Data; + import com.baomidou.mybatisplus.annotation.TableName; -/** - * process task relation log - */ -@TableName("t_ds_process_task_relation_log") +@Data +@TableName("t_ds_workflow_task_relation_log") public class WorkflowTaskRelationLog extends WorkflowTaskRelation { - /** - * operator user id - */ private int operator; - /** - * operate time - */ private Date operateTime; public WorkflowTaskRelationLog() { @@ -44,8 +38,8 @@ public class WorkflowTaskRelationLog extends WorkflowTaskRelation { public WorkflowTaskRelationLog(WorkflowTaskRelation workflowTaskRelation) { super(); this.setName(workflowTaskRelation.getName()); - this.setProcessDefinitionCode(workflowTaskRelation.getProcessDefinitionCode()); - this.setProcessDefinitionVersion(workflowTaskRelation.getProcessDefinitionVersion()); + this.setWorkflowDefinitionCode(workflowTaskRelation.getWorkflowDefinitionCode()); + this.setWorkflowDefinitionVersion(workflowTaskRelation.getWorkflowDefinitionVersion()); this.setProjectCode(workflowTaskRelation.getProjectCode()); this.setPreTaskCode(workflowTaskRelation.getPreTaskCode()); this.setPreTaskVersion(workflowTaskRelation.getPreTaskVersion()); @@ -57,22 +51,6 @@ public class WorkflowTaskRelationLog extends WorkflowTaskRelation { this.setUpdateTime(workflowTaskRelation.getUpdateTime()); } - public int getOperator() { - return operator; - } - - public void setOperator(int operator) { - this.operator = operator; - } - - public Date getOperateTime() { - return operateTime; - } - - public void setOperateTime(Date operateTime) { - this.operateTime = operateTime; - } - @Override public boolean equals(Object o) { return super.equals(o); diff --git a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/ProjectMapper.java b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/ProjectMapper.java index 772593d21e..b103c4cfc8 100644 --- a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/ProjectMapper.java +++ b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/ProjectMapper.java @@ -116,11 +116,11 @@ public interface ProjectMapper extends BaseMapper { List queryProjectCreatedAndAuthorizedByUserId(@Param("userId") int userId); /** - * query project name and user name by processInstanceId. - * @param processInstanceId processInstanceId + * query project name and user name by workflowInstanceId. + * @param workflowInstanceId workflowInstanceId * @return projectName and userName */ - ProjectUser queryProjectWithUserByProcessInstanceId(@Param("processInstanceId") int processInstanceId); + ProjectUser queryProjectWithUserByWorkflowInstanceId(@Param("workflowInstanceId") int workflowInstanceId); /** * query all project diff --git a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/RelationSubWorkflowMapper.java b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/RelationSubWorkflowMapper.java index 3c8fb2084b..78fcdd89ea 100644 --- a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/RelationSubWorkflowMapper.java +++ b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/RelationSubWorkflowMapper.java @@ -25,15 +25,12 @@ import java.util.List; import com.baomidou.mybatisplus.core.mapper.BaseMapper; -/** - * process instance map mapper interface - */ public interface RelationSubWorkflowMapper extends BaseMapper { int batchInsert(@Param("relationSubWorkflows") List relationSubWorkflows); - List queryAllSubProcessInstance(@Param("parentWorkflowInstanceId") Long parentWorkflowInstanceId, - @Param("parentTaskCode") Long parentTaskCode); + List queryAllSubWorkflowInstance(@Param("parentWorkflowInstanceId") Long parentWorkflowInstanceId, + @Param("parentTaskCode") Long parentTaskCode); RelationSubWorkflow queryParentWorkflowInstance(@Param("subWorkflowInstanceId") Long subWorkflowInstanceId); diff --git a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/ScheduleMapper.java b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/ScheduleMapper.java index 383fe1152f..82afa56553 100644 --- a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/ScheduleMapper.java +++ b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/ScheduleMapper.java @@ -38,36 +38,36 @@ public interface ScheduleMapper extends BaseMapper { /** * query schedule list by process definition code * - * @param processDefinitionCode processDefinitionCode + * @param workflowDefinitionCode workflowDefinitionCode * @return schedule list */ - List queryReleaseSchedulerListByProcessDefinitionCode(@Param("processDefinitionCode") long processDefinitionCode); + List queryReleaseSchedulerListByWorkflowDefinitionCode(@Param("workflowDefinitionCode") long workflowDefinitionCode); /** * scheduler page * * @param page page - * @param processDefinitionCode processDefinitionCode + * @param workflowDefinitionCode workflowDefinitionCode * @param searchVal searchVal * @return scheduler IPage */ - IPage queryByProcessDefineCodePaging(IPage page, - @Param("processDefinitionCode") long processDefinitionCode, - @Param("searchVal") String searchVal); + IPage queryByWorkflowDefinitionCodePaging(IPage page, + @Param("workflowDefinitionCode") long workflowDefinitionCode, + @Param("searchVal") String searchVal); /** * scheduler page * * @param page page * @param projectCode projectCode - * @param processDefinitionCode processDefinitionCode + * @param workflowDefinitionCode workflowDefinitionCode * @param searchVal searchVal * @return scheduler IPage */ - IPage queryByProjectAndProcessDefineCodePaging(IPage page, - @Param("projectCode") long projectCode, - @Param("processDefinitionCode") long processDefinitionCode, - @Param("searchVal") String searchVal); + IPage queryByProjectAndWorkflowDefinitionCodePaging(IPage page, + @Param("projectCode") long projectCode, + @Param("workflowDefinitionCode") long workflowDefinitionCode, + @Param("searchVal") String searchVal); /** * Filter schedule @@ -90,26 +90,26 @@ public interface ScheduleMapper extends BaseMapper { /** * query schedule list by process definition codes * - * @param processDefineCodes processDefineCodes + * @param workflowDefinitionCodes workflowDefinitionCodes * @return schedule list */ - List selectAllByProcessDefineArray(@Param("processDefineCodes") long[] processDefineCodes); + List selectAllByWorkflowDefinitionArray(@Param("workflowDefinitionCodes") long[] workflowDefinitionCodes); /** * query schedule list by process definition code * - * @param processDefinitionCode processDefinitionCode + * @param workflowDefinitionCode workflowDefinitionCode * @return schedule */ - Schedule queryByProcessDefinitionCode(@Param("processDefinitionCode") long processDefinitionCode); + Schedule queryByWorkflowDefinitionCode(@Param("workflowDefinitionCode") long workflowDefinitionCode); /** * query worker group list by process definition code * - * @param processDefinitionCodeList processDefinitionCodeList + * @param workflowDefinitionCodeList workflowDefinitionCodeList * @return schedule */ - List querySchedulesByProcessDefinitionCodes(@Param("processDefinitionCodeList") List processDefinitionCodeList); + List querySchedulesByWorkflowDefinitionCodes(@Param("workflowDefinitionCodeList") List workflowDefinitionCodeList); /** * query schedule by tenant diff --git a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/TaskDefinitionMapper.java b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/TaskDefinitionMapper.java index 3f05b11f6d..4d8abd1501 100644 --- a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/TaskDefinitionMapper.java +++ b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/TaskDefinitionMapper.java @@ -39,12 +39,12 @@ public interface TaskDefinitionMapper extends BaseMapper { * query task definition by name * * @param projectCode projectCode - * @param processCode processCode + * @param workflowDefinitionCode workflowDefinitionCode * @param name name * @return task definition */ TaskDefinition queryByName(@Param("projectCode") long projectCode, - @Param("processCode") long processCode, + @Param("workflowDefinitionCode") long workflowDefinitionCode, @Param("name") String name); /** @@ -108,7 +108,7 @@ public interface TaskDefinitionMapper extends BaseMapper { * Filter task definition * * @param page page - * @param taskDefinition process definition object + * @param taskDefinition task definition * @return task definition IPage */ IPage filterTaskDefinition(IPage page, diff --git a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/TaskGroupQueueMapper.java b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/TaskGroupQueueMapper.java index 8b8241a2ad..c4e6495e90 100644 --- a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/TaskGroupQueueMapper.java +++ b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/TaskGroupQueueMapper.java @@ -95,7 +95,7 @@ public interface TaskGroupQueueMapper extends BaseMapper { IPage queryTaskGroupQueueByTaskGroupIdPaging(Page page, @Param("taskName") String taskName, - @Param("processName") String processName, + @Param("workflowName") String workflowName, @Param("status") Integer status, @Param("groupId") int groupId, @Param("projects") List projects); diff --git a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/TaskInstanceMapper.java b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/TaskInstanceMapper.java index 3c9a982742..e2979c14e0 100644 --- a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/TaskInstanceMapper.java +++ b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/TaskInstanceMapper.java @@ -39,19 +39,19 @@ import com.baomidou.mybatisplus.core.metadata.IPage; */ public interface TaskInstanceMapper extends BaseMapper { - List findValidTaskListByProcessId(@Param("processInstanceId") Integer processInstanceId, - @Param("flag") Flag flag, - @Param("testFlag") int testFlag); + List findValidTaskListByWorkflowInstanceId(@Param("workflowInstanceId") Integer workflowInstanceId, + @Param("flag") Flag flag, + @Param("testFlag") int testFlag); - TaskInstance queryByInstanceIdAndCode(@Param("processInstanceId") int processInstanceId, + TaskInstance queryByInstanceIdAndCode(@Param("workflowInstanceId") int workflowInstanceId, @Param("taskCode") Long taskCode); TaskInstance queryByCacheKey(@Param("cacheKey") String cacheKey); Boolean clearCacheByCacheKey(@Param("cacheKey") String cacheKey); - List queryByProcessInstanceIdsAndTaskCodes(@Param("processInstanceIds") List processInstanceIds, - @Param("taskCodes") List taskCodes); + List queryByWorkflowInstanceIdsAndTaskCodes(@Param("workflowInstanceIds") List workflowInstanceIds, + @Param("taskCodes") List taskCodes); /** * Statistics task instance group by given project codes list by start time @@ -104,8 +104,8 @@ public interface TaskInstanceMapper extends BaseMapper { IPage queryTaskInstanceListPaging(IPage page, @Param("projectCode") Long projectCode, - @Param("processInstanceId") Integer processInstanceId, - @Param("processInstanceName") String processInstanceName, + @Param("workflowInstanceId") Integer workflowInstanceId, + @Param("workflowInstanceName") String workflowInstanceName, @Param("searchVal") String searchVal, @Param("taskName") String taskName, @Param("taskCode") Long taskCode, @@ -118,7 +118,7 @@ public interface TaskInstanceMapper extends BaseMapper { IPage queryStreamTaskInstanceListPaging(IPage page, @Param("projectCode") Long projectCode, - @Param("processDefinitionName") String processDefinitionName, + @Param("workflowDefinitionName") String workflowDefinitionName, @Param("searchVal") String searchVal, @Param("taskName") String taskName, @Param("taskCode") Long taskCode, @@ -140,11 +140,11 @@ public interface TaskInstanceMapper extends BaseMapper { * @param testFlag testFlag * @return task instance list */ - List findLastTaskInstances(@Param("processInstanceId") Integer processInstanceId, + List findLastTaskInstances(@Param("workflowInstanceId") Integer workflowInstanceId, @Param("taskCodes") Set taskCodes, @Param("testFlag") int testFlag); - TaskInstance findLastTaskInstance(@Param("processInstanceId") Integer processInstanceId, + TaskInstance findLastTaskInstance(@Param("workflowInstanceId") Integer workflowInstanceId, @Param("taskCode") long depTaskCode, @Param("testFlag") int testFlag); diff --git a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/UserMapper.java b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/UserMapper.java index 01a4cb2edd..9d0e60aaa7 100644 --- a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/UserMapper.java +++ b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/UserMapper.java @@ -174,9 +174,9 @@ public interface UserMapper extends BaseMapper { /** * query User and task flow binding relationship * - * @param processDefinitionCodes processDefinitionCodes + * @param workflowDefinitionCodes workflowDefinitionCodes * @return user with process definition code */ - List queryUserWithProcessDefinitionCode(@Param("processDefinitionCodes") List processDefinitionCodes); + List queryUserWithWorkflowDefinitionCode(@Param("workflowDefinitionCodes") List workflowDefinitionCodes); } diff --git a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/WorkflowDefinitionLogMapper.java b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/WorkflowDefinitionLogMapper.java index 64afb292be..f2decf9849 100644 --- a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/WorkflowDefinitionLogMapper.java +++ b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/WorkflowDefinitionLogMapper.java @@ -82,9 +82,9 @@ public interface WorkflowDefinitionLogMapper extends BaseMapper queryProcessDefinitionVersionsPaging(Page page, - @Param("code") long code, - @Param("projectCode") long projectCode); + IPage queryWorkflowDefinitionVersionsPaging(Page page, + @Param("code") long code, + @Param("projectCode") long projectCode); /** * delete the certain workflow definition version by workflow definition id and version number @@ -93,7 +93,7 @@ public interface WorkflowDefinitionLogMapper extends BaseMapper List queryByCodes(@Param("codes") Collection codes); /** - * verify workflow definition by name + * verify workflow definition by workflowDefinitionName * * @param projectCode projectCode - * @param name name + * @param workflowDefinitionName workflowDefinitionName * @return workflow definition */ WorkflowDefinition verifyByDefineName(@Param("projectCode") long projectCode, - @Param("processDefinitionName") String name); + @Param("workflowDefinitionName") String workflowDefinitionName); /** - * query workflow definition by name + * query workflow definition by workflowDefinitionName * * @param projectCode projectCode - * @param name name + * @param workflowDefinitionName workflowDefinitionName * @return workflow definition */ WorkflowDefinition queryByDefineName(@Param("projectCode") long projectCode, - @Param("processDefinitionName") String name); + @Param("workflowDefinitionName") String workflowDefinitionName); /** * query workflow definition by id * - * @param processDefineId processDefineId + * @param workflowDefinitionId workflowDefinitionId * @return workflow definition */ - WorkflowDefinition queryByDefineId(@Param("processDefineId") int processDefineId); + WorkflowDefinition queryByDefineId(@Param("workflowDefinitionId") int workflowDefinitionId); /** * workflow definition page @@ -113,8 +113,8 @@ public interface WorkflowDefinitionMapper extends BaseMapper * @param workflowDefinition workflow definition object * @return workflow definition IPage */ - IPage filterProcessDefinition(IPage page, - @Param("pd") WorkflowDefinition workflowDefinition); + IPage filterWorkflowDefinition(IPage page, + @Param("pd") WorkflowDefinition workflowDefinition); /** * query all workflow definition list @@ -130,8 +130,8 @@ public interface WorkflowDefinitionMapper extends BaseMapper * @param projectCode projectCode * @return workflow definition list */ - List queryDefinitionListByProjectCodeAndProcessDefinitionCodes(@Param("projectCode") long projectCode, - @Param("codes") Collection codes); + List queryDefinitionListByProjectCodeAndWorkflowDefinitionCodes(@Param("projectCode") long projectCode, + @Param("codes") Collection codes); /** * query workflow definition by ids @@ -174,5 +174,5 @@ public interface WorkflowDefinitionMapper extends BaseMapper List queryDefinitionCodeListByProjectCodes(@Param("projectCodes") List projectCodes); - List queryProjectProcessDefinitionCountByProjectCodes(@Param("projectCodes") List projectCodes); + List queryProjectWorkflowDefinitionCountByProjectCodes(@Param("projectCodes") List projectCodes); } diff --git a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/WorkflowInstanceMapper.java b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/WorkflowInstanceMapper.java index 0f0bd14a22..65de1b7744 100644 --- a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/WorkflowInstanceMapper.java +++ b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/WorkflowInstanceMapper.java @@ -41,10 +41,10 @@ public interface WorkflowInstanceMapper extends BaseMapper { /** * query workflow instance detail info by id * - * @param processId processId + * @param id id * @return workflow instance */ - WorkflowInstance queryDetailById(@Param("processId") int processId); + WorkflowInstance queryDetailById(@Param("id") int id); /** * query workflow instance by host and stateArray @@ -62,7 +62,7 @@ public interface WorkflowInstanceMapper extends BaseMapper { * @param stateArray * @return */ - List queryNeedFailoverProcessInstanceHost(@Param("states") int[] stateArray); + List queryNeedFailoverWorkflowInstanceHost(@Param("states") int[] stateArray); /** * query workflow instance by tenantCode and stateArray @@ -101,7 +101,7 @@ public interface WorkflowInstanceMapper extends BaseMapper { * * @param page page * @param projectCode projectCode - * @param processDefinitionCode processDefinitionCode + * @param workflowDefinitionCode workflowDefinitionCode * @param searchVal searchVal * @param executorName executorName * @param statusArray statusArray @@ -110,15 +110,15 @@ public interface WorkflowInstanceMapper extends BaseMapper { * @param endTime endTime * @return workflow instance page */ - IPage queryProcessInstanceListPaging(Page page, - @Param("projectCode") Long projectCode, - @Param("processDefinitionCode") Long processDefinitionCode, - @Param("searchVal") String searchVal, - @Param("executorName") String executorName, - @Param("states") int[] statusArray, - @Param("host") String host, - @Param("startTime") Date startTime, - @Param("endTime") Date endTime); + IPage queryWorkflowInstanceListPaging(Page page, + @Param("projectCode") Long projectCode, + @Param("workflowDefinitionCode") Long workflowDefinitionCode, + @Param("searchVal") String searchVal, + @Param("executorName") String executorName, + @Param("states") int[] statusArray, + @Param("host") String host, + @Param("startTime") Date startTime, + @Param("endTime") Date endTime); /** * set failover by host and state array @@ -145,8 +145,8 @@ public interface WorkflowInstanceMapper extends BaseMapper { * @param destTenantCode destTenantCode * @return update result */ - int updateProcessInstanceByTenantCode(@Param("originTenantCode") String originTenantCode, - @Param("destTenantCode") String destTenantCode); + int updateWorkflowInstanceByTenantCode(@Param("originTenantCode") String originTenantCode, + @Param("destTenantCode") String destTenantCode); /** * update workflow instance by worker groupId @@ -155,8 +155,8 @@ public interface WorkflowInstanceMapper extends BaseMapper { * @param destWorkerGroupName destWorkerGroupName * @return update result */ - int updateProcessInstanceByWorkerGroupName(@Param("originWorkerGroupName") String originWorkerGroupName, - @Param("destWorkerGroupName") String destWorkerGroupName); + int updateWorkflowInstanceByWorkerGroupName(@Param("originWorkerGroupName") String originWorkerGroupName, + @Param("destWorkerGroupName") String destWorkerGroupName); /** * Statistics workflow instance state by given project codes list @@ -174,62 +174,62 @@ public interface WorkflowInstanceMapper extends BaseMapper { @Param("projectCodes") Collection projectCodes); /** - * query workflow instance by processDefinitionCode + * query workflow instance by workflowDefinitionCode * - * @param processDefinitionCode processDefinitionCode + * @param workflowDefinitionCode workflowDefinitionCode * @param size size * @return workflow instance list */ - List queryByProcessDefineCode(@Param("processDefinitionCode") Long processDefinitionCode, - @Param("size") int size); + List queryByWorkflowDefinitionCode(@Param("workflowDefinitionCode") Long workflowDefinitionCode, + @Param("size") int size); /** * query last scheduler workflow instance * - * @param processDefinitionCode definitionCode + * @param workflowDefinitionCode definitionCode * @param taskDefinitionCode definitionCode * @param startTime startTime * @param endTime endTime * @param testFlag testFlag * @return workflow instance */ - WorkflowInstance queryLastSchedulerProcess(@Param("processDefinitionCode") Long processDefinitionCode, - @Param("taskDefinitionCode") Long taskDefinitionCode, - @Param("startTime") Date startTime, - @Param("endTime") Date endTime, - @Param("testFlag") int testFlag); + WorkflowInstance queryLastSchedulerWorkflow(@Param("workflowDefinitionCode") Long workflowDefinitionCode, + @Param("taskDefinitionCode") Long taskDefinitionCode, + @Param("startTime") Date startTime, + @Param("endTime") Date endTime, + @Param("testFlag") int testFlag); /** * query last manual workflow instance * - * @param definitionCode definitionCode + * @param workflowDefinitionCode workflowDefinitionCode * @param taskCode taskCode * @param startTime startTime * @param endTime endTime * @param testFlag testFlag * @return workflow instance */ - WorkflowInstance queryLastManualProcess(@Param("processDefinitionCode") Long definitionCode, - @Param("taskCode") Long taskCode, - @Param("startTime") Date startTime, - @Param("endTime") Date endTime, - @Param("testFlag") int testFlag); + WorkflowInstance queryLastManualWorkflow(@Param("workflowDefinitionCode") Long workflowDefinitionCode, + @Param("taskCode") Long taskCode, + @Param("startTime") Date startTime, + @Param("endTime") Date endTime, + @Param("testFlag") int testFlag); /** * query first schedule workflow instance * - * @param definitionCode definitionCode + * @param workflowDefinitionCode workflowDefinitionCode * @return workflow instance */ - WorkflowInstance queryFirstScheduleProcessInstance(@Param("processDefinitionCode") Long definitionCode); + WorkflowInstance queryFirstScheduleWorkflowInstance(@Param("workflowDefinitionCode") Long workflowDefinitionCode); /** * query first manual workflow instance * - * @param definitionCode definitionCode + * @param workflowDefinitionCode workflowDefinitionCode * @return workflow instance */ - WorkflowInstance queryFirstStartProcessInstance(@Param("processDefinitionCode") Long definitionCode); + WorkflowInstance queryFirstStartWorkflowInstance(@Param("workflowDefinitionCode") Long workflowDefinitionCode); /** * query top n workflow instance order by running duration @@ -242,60 +242,46 @@ public interface WorkflowInstanceMapper extends BaseMapper { * @return ProcessInstance list */ - List queryTopNProcessInstance(@Param("size") int size, - @Param("startTime") Date startTime, - @Param("endTime") Date endTime, - @Param("status") WorkflowExecutionStatus status, - @Param("projectCode") long projectCode); + List queryTopNWorkflowInstance(@Param("size") int size, + @Param("startTime") Date startTime, + @Param("endTime") Date endTime, + @Param("status") WorkflowExecutionStatus status, + @Param("projectCode") long projectCode); /** - * query workflow instance by processDefinitionCode and stateArray + * query workflow instance by workflowDefinitionCode and stateArray * - * @param processDefinitionCode processDefinitionCode + * @param workflowDefinitionCode workflowDefinitionCode * @param states states array * @return workflow instance list */ - List queryByProcessDefineCodeAndStatus(@Param("processDefinitionCode") Long processDefinitionCode, - @Param("states") int[] states); + List queryByWorkflowDefinitionCodeAndStatus(@Param("workflowDefinitionCode") Long workflowDefinitionCode, + @Param("states") int[] states); List queryByWorkflowCodeVersionStatus(@Param("workflowDefinitionCode") long workflowDefinitionCode, @Param("workflowDefinitionVersion") int workflowDefinitionVersion, @Param("states") int[] states); - List queryByProcessDefineCodeAndProcessDefinitionVersionAndStatusAndNextId(@Param("processDefinitionCode") Long processDefinitionCode, - @Param("processDefinitionVersion") int processDefinitionVersion, - @Param("states") int[] states, - @Param("id") Integer id); - - int updateGlobalParamsById(@Param("globalParams") String globalParams, - @Param("id") int id); - - boolean updateNextProcessIdById(@Param("thisInstanceId") int thisInstanceId, - @Param("runningInstanceId") int runningInstanceId); - - WorkflowInstance loadNextProcess4Serial(@Param("processDefinitionCode") Long processDefinitionCode, - @Param("state") int state, @Param("id") int id); - /** * Filter workflow instance * * @param page page - * @param processDefinitionCode processDefinitionCode + * @param workflowDefinitionCode workflowDefinitionCode * @param name name * @param host host * @param startTime startTime * @param endTime endTime * @return workflow instance IPage */ - IPage queryProcessInstanceListV2Paging(Page page, - @Param("projectCode") Long projectCode, - @Param("processDefinitionCode") Long processDefinitionCode, - @Param("name") String name, - @Param("startTime") String startTime, - @Param("endTime") String endTime, - @Param("state") Integer state, - @Param("host") String host); + IPage queryWorkflowInstanceListV2Paging(Page page, + @Param("projectCode") Long projectCode, + @Param("workflowDefinitionCode") Long workflowDefinitionCode, + @Param("name") String name, + @Param("startTime") String startTime, + @Param("endTime") String endTime, + @Param("state") Integer state, + @Param("host") String host); /** * Statistics workflow instance state v2 diff --git a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/WorkflowInstanceRelationMapper.java b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/WorkflowInstanceRelationMapper.java index 08d6556c9a..6b59777a42 100644 --- a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/WorkflowInstanceRelationMapper.java +++ b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/WorkflowInstanceRelationMapper.java @@ -32,26 +32,26 @@ public interface WorkflowInstanceRelationMapper extends BaseMapper workflowDefinitionCodes); - int batchInsert(@Param("processTaskLineages") List workflowTaskLineages); + int batchInsert(@Param("workflowTaskLineages") List workflowTaskLineages); List queryByProjectCode(@Param("projectCode") long projectCode); diff --git a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/WorkflowTaskRelationLogMapper.java b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/WorkflowTaskRelationLogMapper.java index ff38ee14ad..82ea32f4b2 100644 --- a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/WorkflowTaskRelationLogMapper.java +++ b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/WorkflowTaskRelationLogMapper.java @@ -26,20 +26,17 @@ import java.util.List; import com.baomidou.mybatisplus.core.mapper.BaseMapper; -/** - * workflow task relation log mapper interface - */ public interface WorkflowTaskRelationLogMapper extends BaseMapper { /** * query workflow task relation log * - * @param processCode process definition code - * @param processVersion process version + * @param workflowDefinitionCode workflow definition code + * @param workflowDefinitionVersion workflow version * @return workflow task relation log */ - List queryByProcessCodeAndVersion(@Param("processCode") long processCode, - @Param("processVersion") int processVersion); + List queryByWorkflowCodeAndVersion(@Param("workflowDefinitionCode") long workflowDefinitionCode, + @Param("workflowDefinitionVersion") int workflowDefinitionVersion); /** * batch insert workflow task relation @@ -50,32 +47,32 @@ public interface WorkflowTaskRelationLogMapper extends BaseMapper taskRelationList); /** - * delete workflow task relation log by processCode and version + * delete workflow task relation log by workflowDefinitionCode and version * - * @param processCode process definition code - * @param processVersion process version + * @param workflowDefinitionCode workflow definition code + * @param workflowDefinitionVersion workflow version * @return int */ - int deleteByCode(@Param("processCode") long processCode, - @Param("processVersion") int processVersion); + int deleteByCode(@Param("workflowDefinitionCode") long workflowDefinitionCode, + @Param("workflowDefinitionVersion") int workflowDefinitionVersion); /** * delete workflow task relation * - * @param processTaskRelationLog processTaskRelationLog + * @param workflowTaskRelationLog workflowTaskRelationLog * @return int */ - int deleteRelation(@Param("processTaskRelationLog") WorkflowTaskRelationLog processTaskRelationLog); + int deleteRelation(@Param("workflowTaskRelationLog") WorkflowTaskRelationLog workflowTaskRelationLog); /** * query workflow task relation log * - * @param workflowTaskRelation processTaskRelation + * @param workflowTaskRelation workflowTaskRelation * @return workflow task relation log */ - WorkflowTaskRelationLog queryRelationLogByRelation(@Param("processTaskRelation") WorkflowTaskRelation workflowTaskRelation); + WorkflowTaskRelationLog queryRelationLogByRelation(@Param("workflowTaskRelation") WorkflowTaskRelation workflowTaskRelation); - List queryByProcessCode(@Param("workflowDefinitionCode") long workflowDefinitionCode); + List queryByWorkflowDefinitionCode(@Param("workflowDefinitionCode") long workflowDefinitionCode); void deleteByWorkflowDefinitionCode(@Param("workflowDefinitionCode") long workflowDefinitionCode); } diff --git a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/WorkflowTaskRelationMapper.java b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/WorkflowTaskRelationMapper.java index 9cc1f980c6..263b70629c 100644 --- a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/WorkflowTaskRelationMapper.java +++ b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/WorkflowTaskRelationMapper.java @@ -23,7 +23,6 @@ import org.apache.dolphinscheduler.dao.entity.WorkflowTaskRelationLog; import org.apache.ibatis.annotations.Param; import java.util.List; -import java.util.Map; import com.baomidou.mybatisplus.core.mapper.BaseMapper; import com.baomidou.mybatisplus.core.metadata.IPage; @@ -34,12 +33,12 @@ import com.baomidou.mybatisplus.core.metadata.IPage; public interface WorkflowTaskRelationMapper extends BaseMapper { /** - * workflow task relation by projectCode and processCode + * workflow task relation by projectCode and workflowDefinitionCode * - * @param processCode processCode + * @param workflowDefinitionCode workflowDefinitionCode * @return ProcessTaskRelation list */ - List queryByProcessCode(@Param("processCode") long processCode); + List queryByWorkflowDefinitionCode(@Param("workflowDefinitionCode") long workflowDefinitionCode); /** * update @@ -47,13 +46,14 @@ public interface WorkflowTaskRelationMapper extends BaseMapper queryProcessTaskRelationsByProcessDefinitionCode(@Param("processDefinitionCode") long processDefinitionCode, - @Param("processDefinitionVersion") Integer processDefinitionVersion); - - /** - * count upstream by codes - * - * @param projectCode projectCode - * @param taskCode taskCode - * @param processDefinitionCodes processDefinitionCodes - * @return upstream count list group by process definition code - */ - List> countUpstreamByCodeGroupByProcessDefinitionCode(@Param("projectCode") long projectCode, - @Param("processDefinitionCodes") Long[] processDefinitionCodes, - @Param("taskCode") long taskCode); - - /** - * batch update workflow task relation pre task - * - * @param workflowTaskRelationList workflow task relation list - * @return update num - */ - int batchUpdateProcessTaskRelationPreTask(@Param("processTaskRelationList") List workflowTaskRelationList); + List queryWorkflowTaskRelationsByWorkflowDefinitionCode(@Param("workflowDefinitionCode") long workflowDefinitionCode, + @Param("workflowDefinitionVersion") Integer workflowDefinitionVersion); /** * query by code * * @param projectCode projectCode - * @param processDefinitionCode processDefinitionCode + * @param workflowDefinitionCode workflowDefinitionCode * @param preTaskCode preTaskCode * @param postTaskCode postTaskCode * @return ProcessTaskRelation */ List queryByCode(@Param("projectCode") long projectCode, - @Param("processDefinitionCode") long processDefinitionCode, + @Param("workflowDefinitionCode") long workflowDefinitionCode, @Param("preTaskCode") long preTaskCode, @Param("postTaskCode") long postTaskCode); /** * delete workflow task relation * - * @param processTaskRelationLog processTaskRelationLog + * @param workflowTaskRelationLog workflowTaskRelationLog * @return int */ - int deleteRelation(@Param("processTaskRelationLog") WorkflowTaskRelationLog processTaskRelationLog); + int deleteRelation(@Param("workflowTaskRelationLog") WorkflowTaskRelationLog workflowTaskRelationLog); /** - * count by code - * - * @param projectCode projectCode - * @param processDefinitionCode processDefinitionCode - * @param preTaskCode preTaskCode - * @param postTaskCode postTaskCode + * query downstream workflow task relation by workflowDefinitionCode + * @param workflowDefinitionCode * @return ProcessTaskRelation */ - int countByCode(@Param("projectCode") long projectCode, - @Param("processDefinitionCode") long processDefinitionCode, - @Param("preTaskCode") long preTaskCode, - @Param("postTaskCode") long postTaskCode); - - /** - * query downstream workflow task relation by processDefinitionCode - * @param processDefinitionCode - * @return ProcessTaskRelation - */ - List queryDownstreamByProcessDefinitionCode(@Param("processDefinitionCode") long processDefinitionCode); + List queryDownstreamByWorkflowDefinitionCode(@Param("workflowDefinitionCode") long workflowDefinitionCode); /** * Filter workflow task relation @@ -199,22 +165,22 @@ public interface WorkflowTaskRelationMapper extends BaseMapper filterProcessTaskRelation(IPage page, - @Param("relation") WorkflowTaskRelation workflowTaskRelation); + IPage filterWorkflowTaskRelation(IPage page, + @Param("relation") WorkflowTaskRelation workflowTaskRelation); /** * batch update workflow task relation version * - * @param workflowTaskRelationList workflow task relation list + * @param workflowTaskRelation workflow task relation list * @return update num */ - int updateProcessTaskRelationTaskVersion(@Param("processTaskRelation") WorkflowTaskRelation workflowTaskRelationList); + int updateWorkflowTaskRelationTaskVersion(@Param("workflowTaskRelation") WorkflowTaskRelation workflowTaskRelation); Long queryTaskCodeByTaskName(@Param("workflowCode") Long workflowCode, @Param("taskName") String taskName); - void deleteByWorkflowDefinitionCode(@Param("workflowDefinitionCode") long workflowDefinitionCode, - @Param("workflowDefinitionVersion") int workflowDefinitionVersion); + void deleteByWorkflowDefinitionCodeAndVersion(@Param("workflowDefinitionCode") long workflowDefinitionCode, + @Param("workflowDefinitionVersion") int workflowDefinitionVersion); /** * workflow task relation by taskCode and postTaskVersion @@ -223,6 +189,6 @@ public interface WorkflowTaskRelationMapper extends BaseMapper queryProcessTaskRelationByTaskCodeAndTaskVersion(@Param("taskCode") long taskCode, - @Param("postTaskVersion") long postTaskVersion); + List queryWorkflowTaskRelationByTaskCodeAndTaskVersion(@Param("taskCode") long taskCode, + @Param("postTaskVersion") long postTaskVersion); } diff --git a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/repository/TaskDefinitionDao.java b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/repository/TaskDefinitionDao.java index 652ecbfbd0..308a1d4a8d 100644 --- a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/repository/TaskDefinitionDao.java +++ b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/repository/TaskDefinitionDao.java @@ -31,10 +31,10 @@ public interface TaskDefinitionDao extends IDao { /** * Get list of task definition by process definition code * - * @param processDefinitionCode process definition code + * @param workflowDefinitionCode process definition code * @return list of task definition */ - List getTaskDefinitionListByDefinition(long processDefinitionCode); + List getTaskDefinitionListByDefinition(long workflowDefinitionCode); /** * Query task definition by code and version diff --git a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/repository/impl/TaskDefinitionDaoImpl.java b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/repository/impl/TaskDefinitionDaoImpl.java index 41206570e8..7037e2370a 100644 --- a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/repository/impl/TaskDefinitionDaoImpl.java +++ b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/repository/impl/TaskDefinitionDaoImpl.java @@ -66,15 +66,16 @@ public class TaskDefinitionDaoImpl extends BaseDao getTaskDefinitionListByDefinition(long processDefinitionCode) { - WorkflowDefinition workflowDefinition = workflowDefinitionMapper.queryByCode(processDefinitionCode); + public List getTaskDefinitionListByDefinition(long workflowDefinitionCode) { + WorkflowDefinition workflowDefinition = workflowDefinitionMapper.queryByCode(workflowDefinitionCode); if (workflowDefinition == null) { - log.error("Cannot find process definition, code: {}", processDefinitionCode); + log.error("Cannot find process definition, code: {}", workflowDefinitionCode); return Lists.newArrayList(); } - List processTaskRelations = workflowTaskRelationLogMapper.queryByProcessCodeAndVersion( - workflowDefinition.getCode(), workflowDefinition.getVersion()); + List processTaskRelations = + workflowTaskRelationLogMapper.queryByWorkflowCodeAndVersion( + workflowDefinition.getCode(), workflowDefinition.getVersion()); Set taskDefinitionSet = processTaskRelations .stream() .filter(p -> p.getPostTaskCode() > 0) diff --git a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/repository/impl/TaskDefinitionLogDaoImpl.java b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/repository/impl/TaskDefinitionLogDaoImpl.java index 7e46d3df74..98eaa20bed 100644 --- a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/repository/impl/TaskDefinitionLogDaoImpl.java +++ b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/repository/impl/TaskDefinitionLogDaoImpl.java @@ -57,7 +57,7 @@ public class TaskDefinitionLogDaoImpl extends BaseDao workflowTaskRelationLogs = workflowTaskRelationLogMapper - .queryByProcessCodeAndVersion(workflowDefinitionCode, workflowDefinitionVersion) + .queryByWorkflowCodeAndVersion(workflowDefinitionCode, workflowDefinitionVersion) .stream() .map(p -> (WorkflowTaskRelation) p) .collect(Collectors.toList()); diff --git a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/repository/impl/TaskInstanceDaoImpl.java b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/repository/impl/TaskInstanceDaoImpl.java index 6b7f932fde..8c0f9f9cca 100644 --- a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/repository/impl/TaskInstanceDaoImpl.java +++ b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/repository/impl/TaskInstanceDaoImpl.java @@ -126,7 +126,7 @@ public class TaskInstanceDaoImpl extends BaseDao taskInstances = - this.queryValidTaskListByWorkflowInstanceId(taskInstance.getProcessInstanceId(), + this.queryValidTaskListByWorkflowInstanceId(taskInstance.getWorkflowInstanceId(), taskInstance.getTestFlag()); for (TaskInstance task : taskInstances) { @@ -140,7 +140,7 @@ public class TaskInstanceDaoImpl extends BaseDao queryValidTaskListByWorkflowInstanceId(Integer processInstanceId, int testFlag) { - return mybatisMapper.findValidTaskListByProcessId(processInstanceId, Flag.YES, testFlag); + return mybatisMapper.findValidTaskListByWorkflowInstanceId(processInstanceId, Flag.YES, testFlag); } @Override @@ -151,7 +151,7 @@ public class TaskInstanceDaoImpl extends BaseDao queryPreviousTaskListByWorkflowInstanceId(Integer workflowInstanceId) { WorkflowInstance workflowInstance = workflowInstanceMapper.selectById(workflowInstanceId); - return mybatisMapper.findValidTaskListByProcessId(workflowInstanceId, Flag.NO, + return mybatisMapper.findValidTaskListByWorkflowInstanceId(workflowInstanceId, Flag.NO, workflowInstance.getTestFlag()); } diff --git a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/repository/impl/WorkflowDefinitionLogDaoImpl.java b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/repository/impl/WorkflowDefinitionLogDaoImpl.java index 18bc5807cf..0c16e580dc 100644 --- a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/repository/impl/WorkflowDefinitionLogDaoImpl.java +++ b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/repository/impl/WorkflowDefinitionLogDaoImpl.java @@ -43,6 +43,6 @@ public class WorkflowDefinitionLogDaoImpl extends BaseDao queryNeedFailoverMasters() { return mybatisMapper - .queryNeedFailoverProcessInstanceHost(WorkflowExecutionStatus.getNeedFailoverWorkflowInstanceState()); + .queryNeedFailoverWorkflowInstanceHost(WorkflowExecutionStatus.getNeedFailoverWorkflowInstanceState()); } @Override diff --git a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/repository/impl/WorkflowTaskRelationLogDaoImpl.java b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/repository/impl/WorkflowTaskRelationLogDaoImpl.java index 2a2eacf7d8..e27864e270 100644 --- a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/repository/impl/WorkflowTaskRelationLogDaoImpl.java +++ b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/repository/impl/WorkflowTaskRelationLogDaoImpl.java @@ -41,7 +41,7 @@ public class WorkflowTaskRelationLogDaoImpl extends BaseDao queryByWorkflowDefinitionCode(long workflowDefinitionCode) { - return mybatisMapper.queryByProcessCode(workflowDefinitionCode); + return mybatisMapper.queryByWorkflowDefinitionCode(workflowDefinitionCode); } @Override diff --git a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/utils/TaskInstanceUtils.java b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/utils/TaskInstanceUtils.java index 9f22c5536f..8c7039c699 100644 --- a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/utils/TaskInstanceUtils.java +++ b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/utils/TaskInstanceUtils.java @@ -32,12 +32,12 @@ public class TaskInstanceUtils { target.setId(source.getId()); target.setName(source.getName()); target.setTaskType(source.getTaskType()); - target.setProcessInstanceId(source.getProcessInstanceId()); - target.setProcessInstanceName(source.getProcessInstanceName()); + target.setWorkflowInstanceId(source.getWorkflowInstanceId()); + target.setWorkflowInstanceName(source.getWorkflowInstanceName()); target.setProjectCode(source.getProjectCode()); target.setTaskCode(source.getTaskCode()); target.setTaskDefinitionVersion(source.getTaskDefinitionVersion()); - target.setProcessInstanceName(source.getProcessInstanceName()); + target.setWorkflowInstanceName(source.getWorkflowInstanceName()); target.setTaskGroupPriority(source.getTaskGroupPriority()); target.setState(source.getState()); target.setFirstSubmitTime(source.getFirstSubmitTime()); @@ -50,7 +50,7 @@ public class TaskInstanceUtils { target.setRetryTimes(source.getRetryTimes()); target.setAlertFlag(source.getAlertFlag()); target.setWorkflowInstance(source.getWorkflowInstance()); - target.setProcessDefine(source.getProcessDefine()); + target.setWorkflowDefinition(source.getWorkflowDefinition()); target.setTaskDefine(source.getTaskDefine()); target.setPid(source.getPid()); target.setAppLink(source.getAppLink()); diff --git a/dolphinscheduler-dao/src/main/resources/org/apache/dolphinscheduler/dao/mapper/AlertMapper.xml b/dolphinscheduler-dao/src/main/resources/org/apache/dolphinscheduler/dao/mapper/AlertMapper.xml index f0c32aae78..7891dd9137 100644 --- a/dolphinscheduler-dao/src/main/resources/org/apache/dolphinscheduler/dao/mapper/AlertMapper.xml +++ b/dolphinscheduler-dao/src/main/resources/org/apache/dolphinscheduler/dao/mapper/AlertMapper.xml @@ -21,14 +21,14 @@ id , sign, title, content, alert_status, warning_type, log, alertgroup_id, create_time, update_time, project_code, - process_definition_code, process_instance_id, alert_type + workflow_definition_code, workflow_instance_id, alert_type @@ -64,6 +64,6 @@ delete from t_ds_alert - where process_instance_id = #{workflowInstanceId} + where workflow_instance_id = #{workflowInstanceId} diff --git a/dolphinscheduler-dao/src/main/resources/org/apache/dolphinscheduler/dao/mapper/CommandMapper.xml b/dolphinscheduler-dao/src/main/resources/org/apache/dolphinscheduler/dao/mapper/CommandMapper.xml index 7f587ef899..7a8dd51af8 100644 --- a/dolphinscheduler-dao/src/main/resources/org/apache/dolphinscheduler/dao/mapper/CommandMapper.xml +++ b/dolphinscheduler-dao/src/main/resources/org/apache/dolphinscheduler/dao/mapper/CommandMapper.xml @@ -19,11 +19,15 @@ delete from t_ds_command - where process_instance_id in + where workflow_instance_id in #{i} diff --git a/dolphinscheduler-dao/src/main/resources/org/apache/dolphinscheduler/dao/mapper/DqExecuteResultMapper.xml b/dolphinscheduler-dao/src/main/resources/org/apache/dolphinscheduler/dao/mapper/DqExecuteResultMapper.xml index b19da7e0b3..096ddc03b6 100644 --- a/dolphinscheduler-dao/src/main/resources/org/apache/dolphinscheduler/dao/mapper/DqExecuteResultMapper.xml +++ b/dolphinscheduler-dao/src/main/resources/org/apache/dolphinscheduler/dao/mapper/DqExecuteResultMapper.xml @@ -20,11 +20,11 @@ SELECT a.id, - a.process_definition_id, - a.process_instance_id, + a.workflow_definition_id, + a.workflow_instance_id, a.task_instance_id, a.rule_type, a.rule_name, @@ -89,15 +89,15 @@ a.user_id, a.comparison_type, a.error_output_path, - b.name as process_definition_name, - e.name as process_instance_name, + b.name as workflow_definition_name, + e.name as workflow_instance_name, c.name as task_name, cp.type as comparison_type_name, d.user_name FROM t_ds_dq_execute_result a - left join t_ds_process_definition b on a.process_definition_id = b.id + left join t_ds_workflow_definition b on a.workflow_definition_id = b.id left join t_ds_task_instance c on a.task_instance_id = c.id - left join t_ds_process_instance e on a.process_instance_id = e.id + left join t_ds_workflow_instance e on a.workflow_instance_id = e.id left join t_ds_user d on d.id = a.user_id left join t_ds_dq_comparison_type cp on cp.id = a.comparison_type where task_instance_id = #{taskInstanceId} @@ -106,6 +106,6 @@ delete from t_ds_dq_execute_result - where process_instance_id = #{workflowInstanceId} + where workflow_instance_id = #{workflowInstanceId} diff --git a/dolphinscheduler-dao/src/main/resources/org/apache/dolphinscheduler/dao/mapper/ErrorCommandMapper.xml b/dolphinscheduler-dao/src/main/resources/org/apache/dolphinscheduler/dao/mapper/ErrorCommandMapper.xml index 217dc43de0..26d1a22f33 100644 --- a/dolphinscheduler-dao/src/main/resources/org/apache/dolphinscheduler/dao/mapper/ErrorCommandMapper.xml +++ b/dolphinscheduler-dao/src/main/resources/org/apache/dolphinscheduler/dao/mapper/ErrorCommandMapper.xml @@ -19,11 +19,15 @@ diff --git a/dolphinscheduler-dao/src/main/resources/org/apache/dolphinscheduler/dao/mapper/ProjectMapper.xml b/dolphinscheduler-dao/src/main/resources/org/apache/dolphinscheduler/dao/mapper/ProjectMapper.xml index ddf4c0c490..939ba2ff80 100644 --- a/dolphinscheduler-dao/src/main/resources/org/apache/dolphinscheduler/dao/mapper/ProjectMapper.xml +++ b/dolphinscheduler-dao/src/main/resources/org/apache/dolphinscheduler/dao/mapper/ProjectMapper.xml @@ -141,17 +141,17 @@ union select id as project_id from t_ds_project where user_id=#{userId}) - select dp.id project_id, dp.code project_code, dp.name project_name, u.user_name user_name - from t_ds_process_instance di - join t_ds_process_definition dpd on di.process_definition_code = dpd.code + from t_ds_workflow_instance di + join t_ds_workflow_definition dpd on di.workflow_definition_code = dpd.code join t_ds_project dp on dpd.project_code = dp.code join t_ds_user u on dp.user_id = u.id - where di.id = #{processInstanceId}; + where di.id = #{workflowInstanceId}; + - select p_f.name as process_definition_name, p.name as project_name,u.user_name,e.name as environment_name, + - + select p_f.name as workflow_definition_name, p.name as project_name,u.user_name,e.name as environment_name, from t_ds_schedules s - join t_ds_process_definition p_f on s.process_definition_code = p_f.code + join t_ds_workflow_definition p_f on s.workflow_definition_code = p_f.code join t_ds_project as p on p_f.project_code = p.code join t_ds_user as u on s.user_id = u.id left join t_ds_environment as e on s.environment_code = e.code @@ -58,78 +58,78 @@ and p.code = #{projectCode} - - and s.process_definition_code = #{processDefinitionCode} + + and s.workflow_definition_code = #{workflowDefinitionCode} order by s.update_time desc - select from t_ds_schedules where 1= 1 - - and process_definition_code in - + + and workflow_definition_code in + #{i} and release_state = 1 - select from t_ds_schedules - where process_definition_code = #{processDefinitionCode} + where workflow_definition_code = #{workflowDefinitionCode} - select from t_ds_schedules - - where process_definition_code in - + + where workflow_definition_code in + #{code} - @@ -101,11 +101,11 @@ @@ -129,20 +129,20 @@ queue.id, queue.task_name, queue.group_id, - queue.process_id, + queue.workflow_instance_id, queue.priority, queue.in_queue, queue.status, queue.force_start, queue.create_time, queue.update_time, - process.name as processInstanceName, + workflow.name as workflowInstanceName, p.name as projectName, p.code as projectCode from t_ds_task_group_queue queue - left join t_ds_process_instance process on queue.process_id = process.id - left join t_ds_process_definition p_f on process.process_definition_code = p_f.code - and process.process_definition_version = p_f.version + left join t_ds_workflow_instance workflow on queue.workflow_instance_id = workflow.id + left join t_ds_workflow_definition p_f on workflow.workflow_definition_code = p_f.code + and workflow.workflow_definition_version = p_f.version join t_ds_project as p on p_f.project_code = p.code @@ -151,8 +151,8 @@ and task_name like concat('%', #{taskName}, '%') - - and process.name like concat('%', #{processName}, '%') + + and workflow.name like concat('%', #{workflowName}, '%') and queue.status =#{status} @@ -178,13 +178,13 @@ delete from t_ds_task_group_queue - where process_id = #{workflowInstanceId} + where workflow_instance_id = #{workflowInstanceId} delete from t_ds_task_group_queue - where process_id in + where workflow_instance_id in #{i} diff --git a/dolphinscheduler-dao/src/main/resources/org/apache/dolphinscheduler/dao/mapper/TaskInstanceMapper.xml b/dolphinscheduler-dao/src/main/resources/org/apache/dolphinscheduler/dao/mapper/TaskInstanceMapper.xml index 9823b6fed1..04b6bec241 100644 --- a/dolphinscheduler-dao/src/main/resources/org/apache/dolphinscheduler/dao/mapper/TaskInstanceMapper.xml +++ b/dolphinscheduler-dao/src/main/resources/org/apache/dolphinscheduler/dao/mapper/TaskInstanceMapper.xml @@ -19,22 +19,22 @@ - id, name, task_type, process_instance_id, process_instance_name, project_code, task_code, task_definition_version, state, submit_time, + id, name, task_type, workflow_instance_id, workflow_instance_name, project_code, task_code, task_definition_version, state, submit_time, start_time, end_time, host, execute_path, log_path, alert_flag, retry_times, pid, app_link, flag, is_cache, cache_key, retry_interval, max_retry_times, task_instance_priority, worker_group,environment_code , executor_id, executor_name, first_submit_time, delay_time, task_params, var_pool, dry_run, test_flag, task_group_id, cpu_quota, memory_max, task_execute_type - ${alias}.id, ${alias}.name, ${alias}.task_type, ${alias}.task_code, ${alias}.task_definition_version, ${alias}.process_instance_id, ${alias}.state, ${alias}.submit_time, + ${alias}.id, ${alias}.name, ${alias}.task_type, ${alias}.task_code, ${alias}.task_definition_version, ${alias}.workflow_instance_id, ${alias}.state, ${alias}.submit_time, ${alias}.start_time, ${alias}.end_time, ${alias}.host, ${alias}.execute_path, ${alias}.log_path, ${alias}.alert_flag, ${alias}.retry_times, ${alias}.pid, ${alias}.app_link, ${alias}.flag, ${alias}.is_cache, ${alias}.cache_key, ${alias}.retry_interval, ${alias}.max_retry_times, ${alias}.task_instance_priority, ${alias}.worker_group,${alias}.environment_code , ${alias}.executor_id, ${alias}.first_submit_time, ${alias}.delay_time, ${alias}.task_params, ${alias}.var_pool, ${alias}.dry_run, ${alias}.test_flag, ${alias}.task_group_id, ${alias}.task_execute_type - select from t_ds_task_instance - WHERE process_instance_id = #{processInstanceId} + WHERE workflow_instance_id = #{workflowInstanceId} and flag = #{flag} and test_flag=#{testFlag} order by start_time desc @@ -43,7 +43,7 @@ select from t_ds_task_instance - WHERE process_instance_id = #{workflowInstanceId} + WHERE workflow_instance_id = #{workflowInstanceId} + @@ -242,8 +242,8 @@ and executor_name = #{executorName} - - and process_instance_name like concat('%', #{processDefinitionName}, '%') + + and workflow_instance_name like concat('%', #{workflowDefinitionName}, '%') order by start_time desc @@ -257,7 +257,7 @@ select task_code, max(end_time) as max_end_time from t_ds_task_instance where 1=1 and test_flag = #{testFlag} - and instance.process_instance_id = #{processInstanceId} + and instance.workflow_instance_id = #{workflowInstanceId} and task_code in @@ -266,7 +266,7 @@ group by task_code ) t_max - on instance.process_instance_id = t_max.process_instance_id + on instance.workflow_instance_id = t_max.workflow_instance_id and instance.task_code = t_max.task_code and instance.end_time = t_max.max_end_time @@ -274,7 +274,7 @@ select from t_ds_task_instance - where process_instance_id = #{processInstanceId} + where workflow_instance_id = #{workflowInstanceId} and task_code = #{taskCode} order by end_time desc limit 1 @@ -288,6 +288,6 @@ delete from t_ds_task_instance - where process_instance_id = #{workflowInstanceId} + where workflow_instance_id = #{workflowInstanceId} diff --git a/dolphinscheduler-dao/src/main/resources/org/apache/dolphinscheduler/dao/mapper/UserMapper.xml b/dolphinscheduler-dao/src/main/resources/org/apache/dolphinscheduler/dao/mapper/UserMapper.xml index 1f298fbf95..61d2a9ca9c 100644 --- a/dolphinscheduler-dao/src/main/resources/org/apache/dolphinscheduler/dao/mapper/UserMapper.xml +++ b/dolphinscheduler-dao/src/main/resources/org/apache/dolphinscheduler/dao/mapper/UserMapper.xml @@ -143,25 +143,25 @@ from t_ds_user where state = 1 - select - dl.code as process_definition_code, - pd.version as process_definition_version, + dl.code as workflow_definition_code, + pd.version as workflow_definition_version, u.id as modifier_id, u.user_name as modifier_name, u2.id as creator_id, u2.user_name as creator_name - from t_ds_process_definition_log dl - inner join t_ds_process_definition pd + from t_ds_workflow_definition_log dl + inner join t_ds_workflow_definition pd on pd.code = dl.code and pd.version = dl.version inner join t_ds_user u on dl.operator = u.id inner join t_ds_user u2 on pd.user_id = u2.id - + where dl.code in - + #{code} diff --git a/dolphinscheduler-dao/src/main/resources/org/apache/dolphinscheduler/dao/mapper/WorkflowDefinitionLogMapper.xml b/dolphinscheduler-dao/src/main/resources/org/apache/dolphinscheduler/dao/mapper/WorkflowDefinitionLogMapper.xml index 79bb1c827b..dde14bd223 100644 --- a/dolphinscheduler-dao/src/main/resources/org/apache/dolphinscheduler/dao/mapper/WorkflowDefinitionLogMapper.xml +++ b/dolphinscheduler-dao/src/main/resources/org/apache/dolphinscheduler/dao/mapper/WorkflowDefinitionLogMapper.xml @@ -30,7 +30,7 @@ pd.release_state, pd.user_id,pd.global_params, pd.flag, pd.locations, pd.warning_group_id, pd.timeout, pd.operator, pd.operate_time, pd.create_time, pd.update_time, u.user_name,p.name as project_name ,pd.execution_type - from t_ds_process_definition_log pd + from t_ds_workflow_definition_log pd JOIN t_ds_user u ON pd.user_id = u.id JOIN t_ds_project p ON pd.project_code = p.code WHERE p.code = #{projectCode} @@ -39,20 +39,20 @@ @@ -60,15 +60,15 @@ select - from t_ds_process_definition_log + from t_ds_workflow_definition_log where code = #{code} order by version desc limit 1 - - + delete - from t_ds_process_definition_log + from t_ds_workflow_definition_log where code = #{code} and version = #{version} - + delete - from t_ds_process_definition_log + from t_ds_workflow_definition_log where code = #{workflowDefinitionCode} diff --git a/dolphinscheduler-dao/src/main/resources/org/apache/dolphinscheduler/dao/mapper/WorkflowDefinitionMapper.xml b/dolphinscheduler-dao/src/main/resources/org/apache/dolphinscheduler/dao/mapper/WorkflowDefinitionMapper.xml index f6baea4101..ceec6371e6 100644 --- a/dolphinscheduler-dao/src/main/resources/org/apache/dolphinscheduler/dao/mapper/WorkflowDefinitionMapper.xml +++ b/dolphinscheduler-dao/src/main/resources/org/apache/dolphinscheduler/dao/mapper/WorkflowDefinitionMapper.xml @@ -26,26 +26,26 @@ - delete from t_ds_process_definition + delete from t_ds_workflow_definition where code = #{code} - select - from t_ds_process_definition + from t_ds_workflow_definition where project_code = #{projectCode} order by create_time desc - select code, name, version - from t_ds_process_definition + from t_ds_workflow_definition where 1=1 AND project_code = #{projectCode} @@ -125,7 +125,7 @@ SELECT user_id as user_id, tu.user_name as user_name, count(0) as count - FROM t_ds_process_definition td + FROM t_ds_workflow_definition td JOIN t_ds_user tu on tu.id=td.user_id where 1 = 1 @@ -148,7 +148,7 @@ select code - from t_ds_process_definition + from t_ds_workflow_definition where project_code in #{i} - select project_code as projectCode ,count(*) as count - from t_ds_process_definition + from t_ds_workflow_definition where project_code in #{i} diff --git a/dolphinscheduler-dao/src/main/resources/org/apache/dolphinscheduler/dao/mapper/WorkflowInstanceMapper.xml b/dolphinscheduler-dao/src/main/resources/org/apache/dolphinscheduler/dao/mapper/WorkflowInstanceMapper.xml index 3a0e4ebcd4..7587eafad9 100644 --- a/dolphinscheduler-dao/src/main/resources/org/apache/dolphinscheduler/dao/mapper/WorkflowInstanceMapper.xml +++ b/dolphinscheduler-dao/src/main/resources/org/apache/dolphinscheduler/dao/mapper/WorkflowInstanceMapper.xml @@ -20,24 +20,24 @@ id - , name, process_definition_version, process_definition_code, project_code, state, recovery, start_time, end_time, run_times,host, + , name, workflow_definition_version, workflow_definition_code, project_code, state, recovery, start_time, end_time, run_times,host, command_type, command_param, task_depend_type, max_try_times, failure_strategy, warning_type, warning_group_id, schedule_time, command_start_time, global_params, flag, - update_time, is_sub_process, executor_id, history_cmd, - process_instance_priority, worker_group,environment_code, timeout, tenant_code, var_pool, - dry_run, test_flag, next_process_instance_id, restart_time, state_history + update_time, is_sub_workflow, executor_id, history_cmd, + workflow_instance_priority, worker_group,environment_code, timeout, tenant_code, var_pool, + dry_run, test_flag, next_workflow_instance_id, restart_time, state_history - select distinct host - from t_ds_process_instance + from t_ds_workflow_instance where state in @@ -61,10 +61,10 @@ - select - from t_ds_process_instance + from t_ds_workflow_instance where state = #{status} and project_code = #{projectCode} @@ -78,7 +78,7 @@ select - from t_ds_process_instance + from t_ds_workflow_instance where 1=1 and worker_group =#{workerGroupName} @@ -109,14 +109,14 @@ order by id asc - select - from t_ds_process_instance - where is_sub_process=0 + from t_ds_workflow_instance + where is_sub_workflow=0 and project_code = #{projectCode} - - and process_definition_code = #{processDefinitionCode} + + and workflow_definition_code = #{workflowDefinitionCode} and name like concat('%', #{searchVal}, '%') @@ -142,7 +142,7 @@ order by start_time desc, end_time desc - update t_ds_process_instance + update t_ds_workflow_instance set host=null where host =#{host} @@ -153,27 +153,27 @@ - update t_ds_process_instance + update t_ds_workflow_instance set state = #{targetState} where id = #{workflowInstanceId} and state = #{originState} - - update t_ds_process_instance + + update t_ds_workflow_instance set tenant_code = #{destTenantCode} where tenant_code = #{originTenantCode} - - update t_ds_process_instance + + update t_ds_workflow_instance set worker_group = #{destWorkerGroupName} where worker_group = #{originWorkerGroupName} - select - from t_ds_process_instance - where process_definition_code=#{processDefinitionCode} + from t_ds_workflow_instance + where workflow_definition_code=#{workflowDefinitionCode} order by start_time desc limit #{size} - select t1.* from (select - from t_ds_process_instance - where process_definition_code=#{processDefinitionCode} and test_flag=#{testFlag} + from t_ds_workflow_instance + where workflow_definition_code=#{workflowDefinitionCode} and test_flag=#{testFlag} and schedule_time = ]]> #{startTime} and schedule_time #{endTime} ) as t1 inner join t_ds_task_instance as t2 - on t1.id = t2.process_instance_id and t2.task_code=#{taskDefinitionCode} + on t1.id = t2.workflow_instance_id and t2.task_code=#{taskDefinitionCode} order by end_time desc limit 1 - select t1.* from ( select - from t_ds_process_instance + from t_ds_workflow_instance where - process_definition_code=#{processDefinitionCode} and test_flag=#{testFlag} + workflow_definition_code=#{workflowDefinitionCode} and test_flag=#{testFlag} and schedule_time is null and start_time = ]]> #{startTime} and start_time #{endTime} @@ -228,34 +228,34 @@ inner join t_ds_task_instance as t2 - on t1.id = t2.process_instance_id and t2.task_code=#{taskCode} + on t1.id = t2.workflow_instance_id and t2.task_code=#{taskCode} order by t1.end_time desc limit 1 - select - from t_ds_process_instance - where process_definition_code = #{processDefinitionCode} and schedule_time is not null + from t_ds_workflow_instance + where workflow_definition_code = #{workflowDefinitionCode} and schedule_time is not null order by schedule_time limit 1 - select - from t_ds_process_instance - where process_definition_code = #{processDefinitionCode} and start_time is not null + from t_ds_workflow_instance + where workflow_definition_code = #{workflowDefinitionCode} and start_time is not null order by start_time limit 1 - select - from t_ds_process_instance - where process_definition_code=#{processDefinitionCode} + from t_ds_workflow_instance + where workflow_definition_code=#{workflowDefinitionCode} and state in @@ -268,9 +268,9 @@ - - - SELECT - FROM t_ds_process_instance - where is_sub_process=0 + FROM t_ds_workflow_instance + where is_sub_workflow=0 and project_code = #{projectCode} - - and process_definition_code = #{processDefinitionCode} + + and workflow_definition_code = #{workflowDefinitionCode} and name like concat('%', #{name}, '%') @@ -337,11 +309,11 @@ - - update t_ds_process_instance - set global_params = #{globalParams} - where id = #{id} - - - update t_ds_process_instance - set next_process_instance_id = #{thisInstanceId} - where id = #{runningInstanceId} - and next_process_instance_id = 0 - select - from t_ds_relation_process_instance - where parent_process_instance_id = #{parentProcessId} + from t_ds_relation_workflow_instance + where parent_workflow_instance_id = #{parentWorkflowInstanceId} and parent_task_instance_id = #{parentTaskId} - select - from t_ds_relation_process_instance - where process_instance_id = #{subProcessId} + from t_ds_relation_workflow_instance + where workflow_instance_id = #{subWorkflowInstanceId} delete - from t_ds_relation_process_instance - where parent_process_instance_id = #{parentWorkflowInstanceId} + from t_ds_relation_workflow_instance + where parent_workflow_instance_id = #{parentWorkflowInstanceId} diff --git a/dolphinscheduler-dao/src/main/resources/org/apache/dolphinscheduler/dao/mapper/WorkflowTaskLineageMapper.xml b/dolphinscheduler-dao/src/main/resources/org/apache/dolphinscheduler/dao/mapper/WorkflowTaskLineageMapper.xml index 0274b1ec91..aa0bd53e32 100644 --- a/dolphinscheduler-dao/src/main/resources/org/apache/dolphinscheduler/dao/mapper/WorkflowTaskLineageMapper.xml +++ b/dolphinscheduler-dao/src/main/resources/org/apache/dolphinscheduler/dao/mapper/WorkflowTaskLineageMapper.xml @@ -32,7 +32,7 @@ - delete from t_ds_process_task_lineage + delete from t_ds_workflow_task_lineage where workflow_definition_code in #{i} @@ -40,7 +40,7 @@ - insert into t_ds_process_task_lineage (workflow_definition_code, workflow_definition_version, task_definition_code, + insert into t_ds_workflow_task_lineage (workflow_definition_code, workflow_definition_version, task_definition_code, task_definition_version, dept_project_code, dept_workflow_definition_code, dept_task_definition_code) values @@ -55,8 +55,8 @@ select from - t_ds_process_task_lineage - where workflow_definition_code in (select code from t_ds_process_definition where project_code = #{projectCode}) + t_ds_workflow_task_lineage + where workflow_definition_code in (select code from t_ds_workflow_definition where project_code = #{projectCode}) @@ -82,8 +82,8 @@ ,schd.end_time as schedule_end_time ,schd.crontab as crontab ,schd.release_state as schedule_publish_status - from t_ds_process_definition pd - left join t_ds_schedules schd on pd.code = schd.process_definition_code + from t_ds_workflow_definition pd + left join t_ds_schedules schd on pd.code = schd.workflow_definition_code where 1=1 and pd.name = #{workflowDefinitionName} @@ -95,7 +95,7 @@ select from - t_ds_process_task_lineage + t_ds_workflow_task_lineage where 1=1 and dept_project_code = #{deptProjectCode} @@ -110,7 +110,7 @@ select from - t_ds_process_task_lineage + t_ds_workflow_task_lineage where workflow_definition_code = #{workflowDefinitionCode} diff --git a/dolphinscheduler-dao/src/main/resources/org/apache/dolphinscheduler/dao/mapper/WorkflowTaskRelationLogMapper.xml b/dolphinscheduler-dao/src/main/resources/org/apache/dolphinscheduler/dao/mapper/WorkflowTaskRelationLogMapper.xml index d92a36980e..ce66d9636b 100644 --- a/dolphinscheduler-dao/src/main/resources/org/apache/dolphinscheduler/dao/mapper/WorkflowTaskRelationLogMapper.xml +++ b/dolphinscheduler-dao/src/main/resources/org/apache/dolphinscheduler/dao/mapper/WorkflowTaskRelationLogMapper.xml @@ -19,67 +19,67 @@ - id, name, process_definition_version, project_code, process_definition_code, pre_task_code, pre_task_version, + id, name, workflow_definition_version, project_code, workflow_definition_code, pre_task_code, pre_task_version, post_task_code, post_task_version, condition_type, condition_params, operator, operate_time, create_time, update_time - select - from t_ds_process_task_relation_log - WHERE process_definition_code = #{processCode} - and process_definition_version = #{processVersion} + from t_ds_workflow_task_relation_log + WHERE workflow_definition_code = #{workflowDefinitionCode} + and workflow_definition_version = #{workflowDefinitionVersion} - insert into t_ds_process_task_relation_log (name, process_definition_version, project_code, process_definition_code, + insert into t_ds_workflow_task_relation_log (name, workflow_definition_version, project_code, workflow_definition_code, pre_task_code, pre_task_version, post_task_code, post_task_version, condition_type, condition_params, operator, operate_time, create_time, update_time) values - (#{relation.name},#{relation.processDefinitionVersion},#{relation.projectCode},#{relation.processDefinitionCode}, + (#{relation.name},#{relation.workflowDefinitionVersion},#{relation.projectCode},#{relation.workflowDefinitionCode}, #{relation.preTaskCode},#{relation.preTaskVersion},#{relation.postTaskCode},#{relation.postTaskVersion}, #{relation.conditionType},#{relation.conditionParams},#{relation.operator},#{relation.operateTime}, #{relation.createTime},#{relation.updateTime}) - delete from t_ds_process_task_relation_log - WHERE process_definition_code = #{processCode} - and process_definition_version = #{processVersion} + delete from t_ds_workflow_task_relation_log + WHERE workflow_definition_code = #{workflowDefinitionCode} + and workflow_definition_version = #{workflowDefinitionVersion} - delete from t_ds_process_task_relation_log - WHERE project_code = #{processTaskRelationLog.projectCode} - and process_definition_code = #{processTaskRelationLog.processDefinitionCode} - and process_definition_version = #{processTaskRelationLog.processDefinitionVersion} - - and pre_task_code = #{processTaskRelationLog.preTaskCode} - and pre_task_version = #{processTaskRelationLog.preTaskVersion} + delete from t_ds_workflow_task_relation_log + WHERE project_code = #{workflowTaskRelationLog.projectCode} + and workflow_definition_code = #{workflowTaskRelationLog.workflowDefinitionCode} + and workflow_definition_version = #{workflowTaskRelationLog.workflowDefinitionVersion} + + and pre_task_code = #{workflowTaskRelationLog.preTaskCode} + and pre_task_version = #{workflowTaskRelationLog.preTaskVersion} - and post_task_code = #{processTaskRelationLog.postTaskCode} - and post_task_version = #{processTaskRelationLog.postTaskVersion} + and post_task_code = #{workflowTaskRelationLog.postTaskCode} + and post_task_version = #{workflowTaskRelationLog.postTaskVersion} - select - from t_ds_process_task_relation_log - WHERE process_definition_code = #{workflowDefinitionCode} + from t_ds_workflow_task_relation_log + WHERE workflow_definition_code = #{workflowDefinitionCode} delete - from t_ds_process_task_relation_log - WHERE process_definition_code = #{workflowDefinitionCode} + from t_ds_workflow_task_relation_log + WHERE workflow_definition_code = #{workflowDefinitionCode} diff --git a/dolphinscheduler-dao/src/main/resources/org/apache/dolphinscheduler/dao/mapper/WorkflowTaskRelationMapper.xml b/dolphinscheduler-dao/src/main/resources/org/apache/dolphinscheduler/dao/mapper/WorkflowTaskRelationMapper.xml index 6f54a07835..7fa82d3892 100644 --- a/dolphinscheduler-dao/src/main/resources/org/apache/dolphinscheduler/dao/mapper/WorkflowTaskRelationMapper.xml +++ b/dolphinscheduler-dao/src/main/resources/org/apache/dolphinscheduler/dao/mapper/WorkflowTaskRelationMapper.xml @@ -19,19 +19,19 @@ - id, name, process_definition_version, project_code, process_definition_code, pre_task_code, pre_task_version, + id, name, workflow_definition_version, project_code, workflow_definition_code, pre_task_code, pre_task_version, post_task_code, post_task_version, condition_type, condition_params, create_time, update_time - select - from t_ds_process_task_relation - WHERE process_definition_code = #{processCode} + from t_ds_workflow_task_relation + WHERE workflow_definition_code = #{workflowDefinitionCode} select - from t_ds_process_task_relation + from t_ds_workflow_task_relation WHERE 1 = 1 and pre_task_code in @@ -53,39 +53,39 @@ - - delete from t_ds_process_task_relation + + delete from t_ds_workflow_task_relation WHERE project_code = #{projectCode} - and process_definition_code = #{processCode} + and workflow_definition_code = #{workflowDefinitionCode} - insert into t_ds_process_task_relation (name, process_definition_version, project_code, process_definition_code, + insert into t_ds_workflow_task_relation (name, workflow_definition_version, project_code, workflow_definition_code, pre_task_code, pre_task_version, post_task_code, post_task_version, condition_type, condition_params, create_time, update_time) values - (#{relation.name},#{relation.processDefinitionVersion},#{relation.projectCode},#{relation.processDefinitionCode}, + (#{relation.name},#{relation.workflowDefinitionVersion},#{relation.projectCode},#{relation.workflowDefinitionCode}, #{relation.preTaskCode},#{relation.preTaskVersion},#{relation.postTaskCode},#{relation.postTaskVersion}, #{relation.conditionType},#{relation.conditionParams},#{relation.createTime},#{relation.updateTime}) - select - from t_ds_process_task_relation - where process_definition_code = #{processDefinitionCode} + from t_ds_workflow_task_relation + where workflow_definition_code = #{workflowDefinitionCode} and pre_task_version >= 1 @@ -93,7 +93,7 @@ @@ -101,7 +101,7 @@ - - - SELECT FROM - t_ds_process_task_relation + t_ds_workflow_task_relation WHERE 1=1 - AND process_definition_code = #{processDefinitionCode} - AND process_definition_version = #{processDefinitionVersion} + AND workflow_definition_code = #{workflowDefinitionCode} + AND workflow_definition_version = #{workflowDefinitionVersion} - - - update t_ds_process_task_relation - - pre_task_code=#{workflowTaskRelation.preTaskCode}, - pre_task_version=#{workflowTaskRelation.preTaskVersion} - - WHERE id = #{workflowTaskRelation.id} - - - - delete from t_ds_process_task_relation - WHERE project_code = #{processTaskRelationLog.projectCode} - and process_definition_code = #{processTaskRelationLog.processDefinitionCode} - and process_definition_version = #{processTaskRelationLog.processDefinitionVersion} - - and pre_task_code = #{processTaskRelationLog.preTaskCode} - and pre_task_version = #{processTaskRelationLog.preTaskVersion} + delete from t_ds_workflow_task_relation + WHERE project_code = #{workflowTaskRelationLog.projectCode} + and workflow_definition_code = #{workflowTaskRelationLog.workflowDefinitionCode} + and workflow_definition_version = #{workflowTaskRelationLog.workflowDefinitionVersion} + + and pre_task_code = #{workflowTaskRelationLog.preTaskCode} + and pre_task_version = #{workflowTaskRelationLog.preTaskVersion} - and post_task_code = #{processTaskRelationLog.postTaskCode} - and post_task_version = #{processTaskRelationLog.postTaskVersion} + and post_task_code = #{workflowTaskRelationLog.postTaskCode} + and post_task_version = #{workflowTaskRelationLog.postTaskVersion} - - - - - update t_ds_process_task_relation + + update t_ds_workflow_task_relation set pre_task_version=#{workflowTaskRelation.preTaskVersion}, post_task_version=#{workflowTaskRelation.postTaskVersion}, - process_definition_version=#{workflowTaskRelation.processDefinitionVersion} + workflow_definition_version=#{workflowTaskRelation.workflowDefinitionVersion} where id = #{workflowTaskRelation.id} - + delete - from t_ds_process_task_relation - where process_definition_code = #{workflowDefinitionCode} and process_definition_version = #{workflowDefinitionVersion} + from t_ds_workflow_task_relation + where workflow_definition_code = #{workflowDefinitionCode} + and workflow_definition_version = #{workflowDefinitionVersion} - select - from t_ds_process_task_relation - WHERE process_definition_code in ( + from t_ds_workflow_task_relation + WHERE workflow_definition_code in ( SELECT - process_definition_code + workflow_definition_code FROM - t_ds_process_task_relation + t_ds_workflow_task_relation WHERE post_task_code = #{taskCode} and post_task_version = #{postTaskVersion} diff --git a/dolphinscheduler-dao/src/main/resources/sql/dolphinscheduler_h2.sql b/dolphinscheduler-dao/src/main/resources/sql/dolphinscheduler_h2.sql index 2d82dac5ed..0259e67c3c 100644 --- a/dolphinscheduler-dao/src/main/resources/sql/dolphinscheduler_h2.sql +++ b/dolphinscheduler-dao/src/main/resources/sql/dolphinscheduler_h2.sql @@ -281,8 +281,8 @@ CREATE TABLE t_ds_alert create_time datetime DEFAULT NULL, update_time datetime DEFAULT NULL, project_code bigint(20) DEFAULT NULL, - process_definition_code bigint(20) DEFAULT NULL, - process_instance_id int(11) DEFAULT NULL, + workflow_definition_code bigint(20) DEFAULT NULL, + workflow_instance_id int(11) DEFAULT NULL, alert_type int(11) DEFAULT NULL, PRIMARY KEY (id), KEY idx_sign (sign) @@ -317,7 +317,7 @@ CREATE TABLE t_ds_command ( id int(11) NOT NULL AUTO_INCREMENT, command_type tinyint(4) DEFAULT NULL, - process_definition_code bigint(20) DEFAULT NULL, + workflow_definition_code bigint(20) DEFAULT NULL, command_param text, task_depend_type tinyint(4) DEFAULT NULL, failure_strategy tinyint(4) DEFAULT '0', @@ -327,16 +327,16 @@ CREATE TABLE t_ds_command start_time datetime DEFAULT NULL, executor_id int(11) DEFAULT NULL, update_time datetime DEFAULT NULL, - process_instance_priority int(11) DEFAULT '2', + workflow_instance_priority int(11) DEFAULT '2', worker_group varchar(255), tenant_code varchar(64) DEFAULT 'default', environment_code bigint(20) DEFAULT '-1', dry_run int NULL DEFAULT 0, - process_instance_id int(11) DEFAULT 0, - process_definition_version int(11) DEFAULT 0, + workflow_instance_id int(11) DEFAULT 0, + workflow_definition_version int(11) DEFAULT 0, test_flag int NULL DEFAULT 0, PRIMARY KEY (id), - KEY priority_id_index (process_instance_priority, id) + KEY priority_id_index (workflow_instance_priority, id) ); -- ---------------------------- @@ -374,7 +374,7 @@ CREATE TABLE t_ds_error_command id int(11) NOT NULL, command_type tinyint(4) DEFAULT NULL, executor_id int(11) DEFAULT NULL, - process_definition_code bigint(20) DEFAULT NULL, + workflow_definition_code bigint(20) DEFAULT NULL, command_param text, task_depend_type tinyint(4) DEFAULT NULL, failure_strategy tinyint(4) DEFAULT '0', @@ -383,14 +383,14 @@ CREATE TABLE t_ds_error_command schedule_time datetime DEFAULT NULL, start_time datetime DEFAULT NULL, update_time datetime DEFAULT NULL, - process_instance_priority int(11) DEFAULT '2', + workflow_instance_priority int(11) DEFAULT '2', worker_group varchar(255), tenant_code varchar(64) DEFAULT 'default', environment_code bigint(20) DEFAULT '-1', message text, dry_run int NULL DEFAULT 0, - process_instance_id int(11) DEFAULT 0, - process_definition_version int(11) DEFAULT 0, + workflow_instance_id int(11) DEFAULT 0, + workflow_definition_version int(11) DEFAULT 0, test_flag int NULL DEFAULT 0, PRIMARY KEY (id) ); @@ -400,10 +400,10 @@ CREATE TABLE t_ds_error_command -- ---------------------------- -- ---------------------------- --- Table structure for t_ds_process_definition +-- Table structure for t_ds_workflow_definition -- ---------------------------- -DROP TABLE IF EXISTS t_ds_process_definition CASCADE; -CREATE TABLE t_ds_process_definition +DROP TABLE IF EXISTS t_ds_workflow_definition CASCADE; +CREATE TABLE t_ds_workflow_definition ( id int(11) NOT NULL AUTO_INCREMENT, code bigint(20) NOT NULL, @@ -422,19 +422,15 @@ CREATE TABLE t_ds_process_definition create_time datetime NOT NULL, update_time datetime DEFAULT NULL, PRIMARY KEY (id), - UNIQUE KEY process_unique (name,project_code) USING BTREE, + UNIQUE KEY workflow_unique (name,project_code) USING BTREE, UNIQUE KEY code_unique (code) ); -- ---------------------------- --- Records of t_ds_process_definition +-- Table structure for t_ds_workflow_definition_log -- ---------------------------- - --- ---------------------------- --- Table structure for t_ds_process_definition_log --- ---------------------------- -DROP TABLE IF EXISTS t_ds_process_definition_log CASCADE; -CREATE TABLE t_ds_process_definition_log +DROP TABLE IF EXISTS t_ds_workflow_definition_log CASCADE; +CREATE TABLE t_ds_workflow_definition_log ( id int(11) NOT NULL AUTO_INCREMENT, code bigint(20) NOT NULL, @@ -535,16 +531,16 @@ CREATE TABLE t_ds_task_definition_log ); -- ---------------------------- --- Table structure for t_ds_process_task_relation +-- Table structure for t_ds_workflow_task_relation -- ---------------------------- -DROP TABLE IF EXISTS t_ds_process_task_relation CASCADE; -CREATE TABLE t_ds_process_task_relation +DROP TABLE IF EXISTS t_ds_workflow_task_relation CASCADE; +CREATE TABLE t_ds_workflow_task_relation ( id int(11) NOT NULL AUTO_INCREMENT, name varchar(255) DEFAULT NULL, - process_definition_version int(11) DEFAULT NULL, + workflow_definition_version int(11) DEFAULT NULL, project_code bigint(20) NOT NULL, - process_definition_code bigint(20) NOT NULL, + workflow_definition_code bigint(20) NOT NULL, pre_task_code bigint(20) NOT NULL, pre_task_version int(11) NOT NULL, post_task_code bigint(20) NOT NULL, @@ -557,16 +553,16 @@ CREATE TABLE t_ds_process_task_relation ); -- ---------------------------- --- Table structure for t_ds_process_task_relation_log +-- Table structure for t_ds_workflow_task_relation_log -- ---------------------------- -DROP TABLE IF EXISTS t_ds_process_task_relation_log CASCADE; -CREATE TABLE t_ds_process_task_relation_log +DROP TABLE IF EXISTS t_ds_workflow_task_relation_log CASCADE; +CREATE TABLE t_ds_workflow_task_relation_log ( id int(11) NOT NULL AUTO_INCREMENT, name varchar(255) DEFAULT NULL, - process_definition_version int(11) DEFAULT NULL, + workflow_definition_version int(11) DEFAULT NULL, project_code bigint(20) NOT NULL, - process_definition_code bigint(20) NOT NULL, + workflow_definition_code bigint(20) NOT NULL, pre_task_code bigint(20) NOT NULL, pre_task_version int(11) NOT NULL, post_task_code bigint(20) NOT NULL, @@ -581,15 +577,15 @@ CREATE TABLE t_ds_process_task_relation_log ); -- ---------------------------- --- Table structure for t_ds_process_instance +-- Table structure for t_ds_workflow_instance -- ---------------------------- -DROP TABLE IF EXISTS t_ds_process_instance CASCADE; -CREATE TABLE t_ds_process_instance +DROP TABLE IF EXISTS t_ds_workflow_instance CASCADE; +CREATE TABLE t_ds_workflow_instance ( id int(11) NOT NULL AUTO_INCREMENT, name varchar(255) DEFAULT NULL, - process_definition_version int(11) NOT NULL DEFAULT '1', - process_definition_code bigint(20) not NULL, + workflow_definition_version int(11) NOT NULL DEFAULT '1', + workflow_definition_code bigint(20) not NULL, project_code bigint(20) DEFAULT NULL, state tinyint(4) DEFAULT NULL, state_history text, @@ -610,15 +606,15 @@ CREATE TABLE t_ds_process_instance global_params text, flag tinyint(4) DEFAULT '1', update_time timestamp NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP, - is_sub_process int(11) DEFAULT '0', + is_sub_workflow int(11) DEFAULT '0', executor_id int(11) NOT NULL, executor_name varchar(64) DEFAULT NULL, history_cmd text, - process_instance_priority int(11) DEFAULT '2', + workflow_instance_priority int(11) DEFAULT '2', worker_group varchar(64) DEFAULT NULL, environment_code bigint(20) DEFAULT '-1', timeout int(11) DEFAULT '0', - next_process_instance_id int(11) DEFAULT '0', + next_workflow_instance_id int(11) DEFAULT '0', tenant_code varchar(64) DEFAULT 'default', var_pool longtext, dry_run int NULL DEFAULT 0, @@ -627,10 +623,6 @@ CREATE TABLE t_ds_process_instance PRIMARY KEY (id) ); --- ---------------------------- --- Records of t_ds_process_instance --- ---------------------------- - -- ---------------------------- -- Table structure for t_ds_project -- ---------------------------- @@ -743,22 +735,18 @@ CREATE TABLE t_ds_relation_datasource_user -- ---------------------------- -- ---------------------------- --- Table structure for t_ds_relation_process_instance +-- Table structure for t_ds_relation_workflow_instance -- ---------------------------- -DROP TABLE IF EXISTS t_ds_relation_process_instance CASCADE; -CREATE TABLE t_ds_relation_process_instance +DROP TABLE IF EXISTS t_ds_relation_workflow_instance CASCADE; +CREATE TABLE t_ds_relation_workflow_instance ( id int(11) NOT NULL AUTO_INCREMENT, - parent_process_instance_id int(11) DEFAULT NULL, + parent_workflow_instance_id int(11) DEFAULT NULL, parent_task_instance_id int(11) DEFAULT NULL, - process_instance_id int(11) DEFAULT NULL, + workflow_instance_id int(11) DEFAULT NULL, PRIMARY KEY (id) ); --- ---------------------------- --- Records of t_ds_relation_process_instance --- ---------------------------- - -- ---------------------------- -- Table structure for t_ds_relation_project_user -- ---------------------------- @@ -848,7 +836,7 @@ DROP TABLE IF EXISTS t_ds_schedules CASCADE; CREATE TABLE t_ds_schedules ( id int(11) NOT NULL AUTO_INCREMENT, - process_definition_code bigint(20) NOT NULL, + workflow_definition_code bigint(20) NOT NULL, start_time datetime NOT NULL, end_time datetime NOT NULL, timezone_id varchar(40) DEFAULT NULL, @@ -858,7 +846,7 @@ CREATE TABLE t_ds_schedules release_state tinyint(4) NOT NULL, warning_type tinyint(4) NOT NULL, warning_group_id int(11) DEFAULT NULL, - process_instance_priority int(11) DEFAULT '2', + workflow_instance_priority int(11) DEFAULT '2', worker_group varchar(255) DEFAULT '', tenant_code varchar(64) DEFAULT 'default', environment_code bigint(20) DEFAULT '-1', @@ -900,8 +888,8 @@ CREATE TABLE t_ds_task_instance task_execute_type int(11) DEFAULT '0', task_code bigint(20) NOT NULL, task_definition_version int(11) NOT NULL DEFAULT '1', - process_instance_id int(11) DEFAULT NULL, - process_instance_name varchar(255) DEFAULT NULL, + workflow_instance_id int(11) DEFAULT NULL, + workflow_instance_name varchar(255) DEFAULT NULL, project_code bigint(20) DEFAULT NULL, state tinyint(4) DEFAULT NULL, submit_time datetime DEFAULT NULL, @@ -1161,25 +1149,25 @@ VALUES(8, 'TargetTableTotalRows', 'SELECT COUNT(*) AS total FROM ${target_table} -- DROP TABLE IF EXISTS `t_ds_dq_execute_result`; CREATE TABLE `t_ds_dq_execute_result` ( - `id` int(11) NOT NULL AUTO_INCREMENT, - `process_definition_id` int(11) DEFAULT NULL, - `process_instance_id` int(11) DEFAULT NULL, - `task_instance_id` int(11) DEFAULT NULL, - `rule_type` int(11) DEFAULT NULL, - `rule_name` varchar(255) DEFAULT NULL, - `statistics_value` double DEFAULT NULL, - `comparison_value` double DEFAULT NULL, - `check_type` int(11) DEFAULT NULL, - `threshold` double DEFAULT NULL, - `operator` int(11) DEFAULT NULL, - `failure_strategy` int(11) DEFAULT NULL, - `state` int(11) DEFAULT NULL, - `user_id` int(11) DEFAULT NULL, - `comparison_type` int(11) DEFAULT NULL, - `error_output_path` text DEFAULT NULL, - `create_time` datetime DEFAULT NULL, - `update_time` datetime DEFAULT NULL, - PRIMARY KEY (`id`) + `id` int(11) NOT NULL AUTO_INCREMENT, + `workflow_definition_id` int(11) DEFAULT NULL, + `workflow_instance_id` int(11) DEFAULT NULL, + `task_instance_id` int(11) DEFAULT NULL, + `rule_type` int(11) DEFAULT NULL, + `rule_name` varchar(255) DEFAULT NULL, + `statistics_value` double DEFAULT NULL, + `comparison_value` double DEFAULT NULL, + `check_type` int(11) DEFAULT NULL, + `threshold` double DEFAULT NULL, + `operator` int(11) DEFAULT NULL, + `failure_strategy` int(11) DEFAULT NULL, + `state` int(11) DEFAULT NULL, + `user_id` int(11) DEFAULT NULL, + `comparison_type` int(11) DEFAULT NULL, + `error_output_path` text DEFAULT NULL, + `create_time` datetime DEFAULT NULL, + `update_time` datetime DEFAULT NULL, + PRIMARY KEY (`id`) ) ENGINE=InnoDB DEFAULT CHARSET=utf8; -- @@ -1418,17 +1406,17 @@ VALUES(31, 'target_database', 'select', '$t(target_database)', NULL, NULL, 'Plea -- DROP TABLE IF EXISTS `t_ds_dq_task_statistics_value`; CREATE TABLE `t_ds_dq_task_statistics_value` ( - `id` int(11) NOT NULL AUTO_INCREMENT, - `process_definition_id` int(11) DEFAULT NULL, - `task_instance_id` int(11) DEFAULT NULL, - `rule_id` int(11) NOT NULL, - `unique_code` varchar(255) NULL, - `statistics_name` varchar(255) NULL, - `statistics_value` double NULL, - `data_time` datetime DEFAULT NULL, - `create_time` datetime DEFAULT NULL, - `update_time` datetime DEFAULT NULL, - PRIMARY KEY (`id`) + `id` int(11) NOT NULL AUTO_INCREMENT, + `workflow_definition_id` int(11) DEFAULT NULL, + `task_instance_id` int(11) DEFAULT NULL, + `rule_id` int(11) NOT NULL, + `unique_code` varchar(255) NULL, + `statistics_name` varchar(255) NULL, + `statistics_value` double NULL, + `data_time` datetime DEFAULT NULL, + `create_time` datetime DEFAULT NULL, + `update_time` datetime DEFAULT NULL, + PRIMARY KEY (`id`) ) ENGINE=InnoDB DEFAULT CHARSET=utf8; -- @@ -1969,6 +1957,7 @@ CREATE TABLE t_ds_environment_worker_group_relation PRIMARY KEY (id), UNIQUE KEY environment_worker_group_unique (environment_code,worker_group) ); + DROP TABLE IF EXISTS t_ds_task_group_queue; CREATE TABLE t_ds_task_group_queue ( @@ -1976,7 +1965,7 @@ CREATE TABLE t_ds_task_group_queue task_id int(11) DEFAULT NULL , task_name VARCHAR(255) DEFAULT NULL , group_id int(11) DEFAULT NULL , - process_id int(11) DEFAULT NULL , + workflow_instance_id int(11) DEFAULT NULL , priority int(8) DEFAULT '0' , status int(4) DEFAULT '-1' , force_start int(4) DEFAULT '0' , @@ -2142,10 +2131,10 @@ CREATE TABLE t_ds_relation_sub_workflow ( ) ENGINE=InnoDB AUTO_INCREMENT=1 DEFAULT CHARSET=utf8; -- ---------------------------- --- Table structure for t_ds_process_task_lineage +-- Table structure for t_ds_workflow_task_lineage -- ---------------------------- -DROP TABLE IF EXISTS t_ds_process_task_lineage; -CREATE TABLE t_ds_process_task_lineage +DROP TABLE IF EXISTS t_ds_workflow_task_lineage; +CREATE TABLE t_ds_workflow_task_lineage ( `id` int NOT NULL AUTO_INCREMENT, `workflow_definition_code` bigint(20) NOT NULL DEFAULT 0, @@ -2158,7 +2147,7 @@ CREATE TABLE t_ds_process_task_lineage `create_time` datetime NOT NULL DEFAULT CURRENT_TIMESTAMP, `update_time` datetime NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP, PRIMARY KEY (`id`), - KEY `idx_process_code_version` (`workflow_definition_code`,`workflow_definition_version`), + KEY `idx_workflow_code_version` (`workflow_definition_code`,`workflow_definition_version`), KEY `idx_task_code_version` (`task_definition_code`,`task_definition_version`), KEY `idx_dept_code` (`dept_project_code`,`dept_workflow_definition_code`,`dept_task_definition_code`) ); diff --git a/dolphinscheduler-dao/src/main/resources/sql/dolphinscheduler_mysql.sql b/dolphinscheduler-dao/src/main/resources/sql/dolphinscheduler_mysql.sql index 41f01e69cb..cfb4997fbe 100644 --- a/dolphinscheduler-dao/src/main/resources/sql/dolphinscheduler_mysql.sql +++ b/dolphinscheduler-dao/src/main/resources/sql/dolphinscheduler_mysql.sql @@ -280,14 +280,14 @@ CREATE TABLE `t_ds_alert` ( `sign` char(40) NOT NULL DEFAULT '' COMMENT 'sign=sha1(content)', `content` text COMMENT 'Message content (can be email, can be SMS. Mail is stored in JSON map, and SMS is string)', `alert_status` tinyint(4) DEFAULT '0' COMMENT '0:wait running,1:success,2:failed', - `warning_type` tinyint(4) DEFAULT '2' COMMENT '1 process is successfully, 2 process/task is failed', + `warning_type` tinyint(4) DEFAULT '2' COMMENT '1 workflow is successfully, 2 workflow/task is failed', `log` text COMMENT 'log', `alertgroup_id` int(11) DEFAULT NULL COMMENT 'alert group id', `create_time` datetime DEFAULT NULL COMMENT 'create time', `update_time` datetime DEFAULT NULL COMMENT 'update time', `project_code` bigint(20) DEFAULT NULL COMMENT 'project_code', - `process_definition_code` bigint(20) DEFAULT NULL COMMENT 'process_definition_code', - `process_instance_id` int(11) DEFAULT NULL COMMENT 'process_instance_id', + `workflow_definition_code` bigint(20) DEFAULT NULL COMMENT 'workflow_definition_code', + `workflow_instance_id` int(11) DEFAULT NULL COMMENT 'workflow_instance_id', `alert_type` int(11) DEFAULT NULL COMMENT 'alert_type', PRIMARY KEY (`id`), KEY `idx_status` (`alert_status`) USING BTREE, @@ -324,27 +324,27 @@ CREATE TABLE `t_ds_alertgroup`( DROP TABLE IF EXISTS `t_ds_command`; CREATE TABLE `t_ds_command` ( `id` int(11) NOT NULL AUTO_INCREMENT COMMENT 'key', - `command_type` tinyint(4) DEFAULT NULL COMMENT 'Command type: 0 start workflow, 1 start execution from current node, 2 resume fault-tolerant workflow, 3 resume pause process, 4 start execution from failed node, 5 complement, 6 schedule, 7 rerun, 8 pause, 9 stop, 10 resume waiting thread', - `process_definition_code` bigint(20) NOT NULL COMMENT 'process definition code', - `process_definition_version` int(11) DEFAULT '0' COMMENT 'process definition version', - `process_instance_id` int(11) DEFAULT '0' COMMENT 'process instance id', + `command_type` tinyint(4) DEFAULT NULL COMMENT 'Command type: 0 start workflow, 1 start execution from current node, 2 resume fault-tolerant workflow, 3 resume pause workflow, 4 start execution from failed node, 5 complement, 6 schedule, 7 rerun, 8 pause, 9 stop, 10 resume waiting thread', + `workflow_definition_code` bigint(20) NOT NULL COMMENT 'workflow definition code', + `workflow_definition_version` int(11) DEFAULT '0' COMMENT 'workflow definition version', + `workflow_instance_id` int(11) DEFAULT '0' COMMENT 'workflow instance id', `command_param` text COMMENT 'json command parameters', `task_depend_type` tinyint(4) DEFAULT NULL COMMENT 'Node dependency type: 0 current node, 1 forward, 2 backward', `failure_strategy` tinyint(4) DEFAULT '0' COMMENT 'Failed policy: 0 end, 1 continue', - `warning_type` tinyint(4) DEFAULT '0' COMMENT 'Alarm type: 0 is not sent, 1 process is sent successfully, 2 process is sent failed, 3 process is sent successfully and all failures are sent', + `warning_type` tinyint(4) DEFAULT '0' COMMENT 'Alarm type: 0 is not sent, 1 workflow is sent successfully, 2 workflow is sent failed, 3 workflow is sent successfully and all failures are sent', `warning_group_id` int(11) DEFAULT NULL COMMENT 'warning group', `schedule_time` datetime DEFAULT NULL COMMENT 'schedule time', `start_time` datetime DEFAULT NULL COMMENT 'start time', `executor_id` int(11) DEFAULT NULL COMMENT 'executor id', `update_time` datetime DEFAULT NULL COMMENT 'update time', - `process_instance_priority` int(11) DEFAULT '2' COMMENT 'process instance priority: 0 Highest,1 High,2 Medium,3 Low,4 Lowest', + `workflow_instance_priority` int(11) DEFAULT '2' COMMENT 'workflow instance priority: 0 Highest,1 High,2 Medium,3 Low,4 Lowest', `worker_group` varchar(255) COMMENT 'worker group', `tenant_code` varchar(64) DEFAULT 'default' COMMENT 'tenant code', `environment_code` bigint(20) DEFAULT '-1' COMMENT 'environment code', `dry_run` tinyint(4) DEFAULT '0' COMMENT 'dry run flag:0 normal, 1 dry run', `test_flag` tinyint(4) DEFAULT null COMMENT 'test flag:0 normal, 1 test run', PRIMARY KEY (`id`), - KEY `priority_id_index` (`process_instance_priority`,`id`) USING BTREE + KEY `priority_id_index` (`workflow_instance_priority`,`id`) USING BTREE ) ENGINE=InnoDB AUTO_INCREMENT=1 DEFAULT CHARSET=utf8 COLLATE = utf8_bin; -- ---------------------------- @@ -380,9 +380,9 @@ CREATE TABLE `t_ds_error_command` ( `id` int(11) NOT NULL COMMENT 'key', `command_type` tinyint(4) DEFAULT NULL COMMENT 'command type', `executor_id` int(11) DEFAULT NULL COMMENT 'executor id', - `process_definition_code` bigint(20) NOT NULL COMMENT 'process definition code', - `process_definition_version` int(11) DEFAULT '0' COMMENT 'process definition version', - `process_instance_id` int(11) DEFAULT '0' COMMENT 'process instance id: 0', + `workflow_definition_code` bigint(20) NOT NULL COMMENT 'workflow definition code', + `workflow_definition_version` int(11) DEFAULT '0' COMMENT 'workflow definition version', + `workflow_instance_id` int(11) DEFAULT '0' COMMENT 'workflow instance id: 0', `command_param` text COMMENT 'json command parameters', `task_depend_type` tinyint(4) DEFAULT NULL COMMENT 'task depend type', `failure_strategy` tinyint(4) DEFAULT '0' COMMENT 'failure strategy', @@ -391,7 +391,7 @@ CREATE TABLE `t_ds_error_command` ( `schedule_time` datetime DEFAULT NULL COMMENT 'scheduler time', `start_time` datetime DEFAULT NULL COMMENT 'start time', `update_time` datetime DEFAULT NULL COMMENT 'update time', - `process_instance_priority` int(11) DEFAULT '2' COMMENT 'process instance priority, 0 Highest,1 High,2 Medium,3 Low,4 Lowest', + `workflow_instance_priority` int(11) DEFAULT '2' COMMENT 'workflow instance priority, 0 Highest,1 High,2 Medium,3 Low,4 Lowest', `worker_group` varchar(255) COMMENT 'worker group', `tenant_code` varchar(64) DEFAULT 'default' COMMENT 'tenant code', `environment_code` bigint(20) DEFAULT '-1' COMMENT 'environment code', @@ -406,18 +406,18 @@ CREATE TABLE `t_ds_error_command` ( -- ---------------------------- -- ---------------------------- --- Table structure for t_ds_process_definition +-- Table structure for t_ds_workflow_definition -- ---------------------------- -DROP TABLE IF EXISTS `t_ds_process_definition`; -CREATE TABLE `t_ds_process_definition` ( +DROP TABLE IF EXISTS `t_ds_workflow_definition`; +CREATE TABLE `t_ds_workflow_definition` ( `id` int(11) NOT NULL AUTO_INCREMENT COMMENT 'self-increasing id', `code` bigint(20) NOT NULL COMMENT 'encoding', - `name` varchar(255) DEFAULT NULL COMMENT 'process definition name', - `version` int(11) NOT NULL DEFAULT '1' COMMENT 'process definition version', + `name` varchar(255) DEFAULT NULL COMMENT 'workflow definition name', + `version` int(11) NOT NULL DEFAULT '1' COMMENT 'workflow definition version', `description` text COMMENT 'description', `project_code` bigint(20) NOT NULL COMMENT 'project code', - `release_state` tinyint(4) DEFAULT NULL COMMENT 'process definition release state:0:offline,1:online', - `user_id` int(11) DEFAULT NULL COMMENT 'process definition creator id', + `release_state` tinyint(4) DEFAULT NULL COMMENT 'workflow definition release state:0:offline,1:online', + `user_id` int(11) DEFAULT NULL COMMENT 'workflow definition creator id', `global_params` text COMMENT 'global parameters', `flag` tinyint(4) DEFAULT NULL COMMENT '0 not available, 1 available', `locations` text COMMENT 'Node location information', @@ -427,26 +427,22 @@ CREATE TABLE `t_ds_process_definition` ( `create_time` datetime NOT NULL COMMENT 'create time', `update_time` datetime NOT NULL COMMENT 'update time', PRIMARY KEY (`id`,`code`), - UNIQUE KEY `process_unique` (`name`,`project_code`) USING BTREE + UNIQUE KEY `workflow_unique` (`name`,`project_code`) USING BTREE ) ENGINE=InnoDB AUTO_INCREMENT=1 DEFAULT CHARSET=utf8 COLLATE = utf8_bin; -- ---------------------------- --- Records of t_ds_process_definition +-- Table structure for t_ds_workflow_definition_log -- ---------------------------- - --- ---------------------------- --- Table structure for t_ds_process_definition_log --- ---------------------------- -DROP TABLE IF EXISTS `t_ds_process_definition_log`; -CREATE TABLE `t_ds_process_definition_log` ( +DROP TABLE IF EXISTS `t_ds_workflow_definition_log`; +CREATE TABLE `t_ds_workflow_definition_log` ( `id` int(11) NOT NULL AUTO_INCREMENT COMMENT 'self-increasing id', `code` bigint(20) NOT NULL COMMENT 'encoding', - `name` varchar(255) DEFAULT NULL COMMENT 'process definition name', - `version` int(11) NOT NULL DEFAULT '1' COMMENT 'process definition version', + `name` varchar(255) DEFAULT NULL COMMENT 'workflow definition name', + `version` int(11) NOT NULL DEFAULT '1' COMMENT 'workflow definition version', `description` text COMMENT 'description', `project_code` bigint(20) NOT NULL COMMENT 'project code', - `release_state` tinyint(4) DEFAULT NULL COMMENT 'process definition release state:0:offline,1:online', - `user_id` int(11) DEFAULT NULL COMMENT 'process definition creator id', + `release_state` tinyint(4) DEFAULT NULL COMMENT 'workflow definition release state:0:offline,1:online', + `user_id` int(11) DEFAULT NULL COMMENT 'workflow definition creator id', `global_params` text COMMENT 'global parameters', `flag` tinyint(4) DEFAULT NULL COMMENT '0 not available, 1 available', `locations` text COMMENT 'Node location information', @@ -538,15 +534,15 @@ CREATE TABLE `t_ds_task_definition_log` ( ) ENGINE=InnoDB AUTO_INCREMENT=1 DEFAULT CHARSET=utf8 COLLATE = utf8_bin; -- ---------------------------- --- Table structure for t_ds_process_task_relation +-- Table structure for t_ds_workflow_task_relation -- ---------------------------- -DROP TABLE IF EXISTS `t_ds_process_task_relation`; -CREATE TABLE `t_ds_process_task_relation` ( +DROP TABLE IF EXISTS `t_ds_workflow_task_relation`; +CREATE TABLE `t_ds_workflow_task_relation` ( `id` int(11) NOT NULL AUTO_INCREMENT COMMENT 'self-increasing id', `name` varchar(255) DEFAULT NULL COMMENT 'relation name', `project_code` bigint(20) NOT NULL COMMENT 'project code', - `process_definition_code` bigint(20) NOT NULL COMMENT 'process code', - `process_definition_version` int(11) NOT NULL COMMENT 'process version', + `workflow_definition_code` bigint(20) NOT NULL COMMENT 'workflow code', + `workflow_definition_version` int(11) NOT NULL COMMENT 'workflow version', `pre_task_code` bigint(20) NOT NULL COMMENT 'pre task code', `pre_task_version` int(11) NOT NULL COMMENT 'pre task version', `post_task_code` bigint(20) NOT NULL COMMENT 'post task code', @@ -556,21 +552,21 @@ CREATE TABLE `t_ds_process_task_relation` ( `create_time` datetime NOT NULL COMMENT 'create time', `update_time` datetime NOT NULL COMMENT 'update time', PRIMARY KEY (`id`), - KEY `idx_code` (`project_code`,`process_definition_code`), + KEY `idx_code` (`project_code`,`workflow_definition_code`), KEY `idx_pre_task_code_version` (`pre_task_code`,`pre_task_version`), KEY `idx_post_task_code_version` (`post_task_code`,`post_task_version`) ) ENGINE=InnoDB AUTO_INCREMENT=1 DEFAULT CHARSET=utf8 COLLATE = utf8_bin; -- ---------------------------- --- Table structure for t_ds_process_task_relation_log +-- Table structure for t_ds_workflow_task_relation_log -- ---------------------------- -DROP TABLE IF EXISTS `t_ds_process_task_relation_log`; -CREATE TABLE `t_ds_process_task_relation_log` ( +DROP TABLE IF EXISTS `t_ds_workflow_task_relation_log`; +CREATE TABLE `t_ds_workflow_task_relation_log` ( `id` int(11) NOT NULL AUTO_INCREMENT COMMENT 'self-increasing id', `name` varchar(255) DEFAULT NULL COMMENT 'relation name', `project_code` bigint(20) NOT NULL COMMENT 'project code', - `process_definition_code` bigint(20) NOT NULL COMMENT 'process code', - `process_definition_version` int(11) NOT NULL COMMENT 'process version', + `workflow_definition_code` bigint(20) NOT NULL COMMENT 'workflow code', + `workflow_definition_version` int(11) NOT NULL COMMENT 'workflow version', `pre_task_code` bigint(20) NOT NULL COMMENT 'pre task code', `pre_task_version` int(11) NOT NULL COMMENT 'pre task version', `post_task_code` bigint(20) NOT NULL COMMENT 'post task code', @@ -582,61 +578,57 @@ CREATE TABLE `t_ds_process_task_relation_log` ( `create_time` datetime NOT NULL COMMENT 'create time', `update_time` datetime NOT NULL COMMENT 'update time', PRIMARY KEY (`id`), - KEY `idx_process_code_version` (`process_definition_code`,`process_definition_version`) + KEY `idx_workflow_code_version` (`workflow_definition_code`,`workflow_definition_version`) ) ENGINE=InnoDB AUTO_INCREMENT=1 DEFAULT CHARSET=utf8 COLLATE = utf8_bin; -- ---------------------------- --- Table structure for t_ds_process_instance +-- Table structure for t_ds_workflow_instance -- ---------------------------- -DROP TABLE IF EXISTS `t_ds_process_instance`; -CREATE TABLE `t_ds_process_instance` ( +DROP TABLE IF EXISTS `t_ds_workflow_instance`; +CREATE TABLE `t_ds_workflow_instance` ( `id` int(11) NOT NULL AUTO_INCREMENT COMMENT 'key', - `name` varchar(255) DEFAULT NULL COMMENT 'process instance name', - `process_definition_code` bigint(20) NOT NULL COMMENT 'process definition code', - `process_definition_version` int(11) NOT NULL DEFAULT '1' COMMENT 'process definition version', + `name` varchar(255) DEFAULT NULL COMMENT 'workflow instance name', + `workflow_definition_code` bigint(20) NOT NULL COMMENT 'workflow definition code', + `workflow_definition_version` int(11) NOT NULL DEFAULT '1' COMMENT 'workflow definition version', `project_code` bigint(20) DEFAULT NULL COMMENT 'project code', - `state` tinyint(4) DEFAULT NULL COMMENT 'process instance Status: 0 commit succeeded, 1 running, 2 prepare to pause, 3 pause, 4 prepare to stop, 5 stop, 6 fail, 7 succeed, 8 need fault tolerance, 9 kill, 10 wait for thread, 11 wait for dependency to complete', + `state` tinyint(4) DEFAULT NULL COMMENT 'workflow instance Status: 0 commit succeeded, 1 running, 2 prepare to pause, 3 pause, 4 prepare to stop, 5 stop, 6 fail, 7 succeed, 8 need fault tolerance, 9 kill, 10 wait for thread, 11 wait for dependency to complete', `state_history` text DEFAULT NULL COMMENT 'state history desc', - `recovery` tinyint(4) DEFAULT NULL COMMENT 'process instance failover flag:0:normal,1:failover instance', - `start_time` datetime DEFAULT NULL COMMENT 'process instance start time', - `end_time` datetime DEFAULT NULL COMMENT 'process instance end time', - `run_times` int(11) DEFAULT NULL COMMENT 'process instance run times', - `host` varchar(135) DEFAULT NULL COMMENT 'process instance host', + `recovery` tinyint(4) DEFAULT NULL COMMENT 'workflow instance failover flag:0:normal,1:failover instance', + `start_time` datetime DEFAULT NULL COMMENT 'workflow instance start time', + `end_time` datetime DEFAULT NULL COMMENT 'workflow instance end time', + `run_times` int(11) DEFAULT NULL COMMENT 'workflow instance run times', + `host` varchar(135) DEFAULT NULL COMMENT 'workflow instance host', `command_type` tinyint(4) DEFAULT NULL COMMENT 'command type', `command_param` text COMMENT 'json command parameters', `task_depend_type` tinyint(4) DEFAULT NULL COMMENT 'task depend type. 0: only current node,1:before the node,2:later nodes', `max_try_times` tinyint(4) DEFAULT '0' COMMENT 'max try times', - `failure_strategy` tinyint(4) DEFAULT '0' COMMENT 'failure strategy. 0:end the process when node failed,1:continue running the other nodes when node failed', - `warning_type` tinyint(4) DEFAULT '0' COMMENT 'warning type. 0:no warning,1:warning if process success,2:warning if process failed,3:warning if success', + `failure_strategy` tinyint(4) DEFAULT '0' COMMENT 'failure strategy. 0:end the workflow when node failed,1:continue running the other nodes when node failed', + `warning_type` tinyint(4) DEFAULT '0' COMMENT 'warning type. 0:no warning,1:warning if workflow success,2:warning if workflow failed,3:warning if success', `warning_group_id` int(11) DEFAULT NULL COMMENT 'warning group id', `schedule_time` datetime DEFAULT NULL COMMENT 'schedule time', `command_start_time` datetime DEFAULT NULL COMMENT 'command start time', `global_params` text COMMENT 'global parameters', `flag` tinyint(4) DEFAULT '1' COMMENT 'flag', `update_time` timestamp NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP, - `is_sub_process` int(11) DEFAULT '0' COMMENT 'flag, whether the process is sub process', + `is_sub_workflow` int(11) DEFAULT '0' COMMENT 'flag, whether the workflow is sub workflow', `executor_id` int(11) NOT NULL COMMENT 'executor id', `executor_name` varchar(64) DEFAULT NULL COMMENT 'execute user name', - `history_cmd` text COMMENT 'history commands of process instance operation', - `process_instance_priority` int(11) DEFAULT '2' COMMENT 'process instance priority. 0 Highest,1 High,2 Medium,3 Low,4 Lowest', + `history_cmd` text COMMENT 'history commands of workflow instance operation', + `workflow_instance_priority` int(11) DEFAULT '2' COMMENT 'workflow instance priority. 0 Highest,1 High,2 Medium,3 Low,4 Lowest', `worker_group` varchar(255) DEFAULT NULL COMMENT 'worker group id', `environment_code` bigint(20) DEFAULT '-1' COMMENT 'environment code', `timeout` int(11) DEFAULT '0' COMMENT 'time out', `tenant_code` varchar(64) DEFAULT 'default' COMMENT 'tenant code', `var_pool` longtext COMMENT 'var_pool', `dry_run` tinyint(4) DEFAULT '0' COMMENT 'dry run flag:0 normal, 1 dry run', - `next_process_instance_id` int(11) DEFAULT '0' COMMENT 'serial queue next processInstanceId', - `restart_time` datetime DEFAULT NULL COMMENT 'process instance restart time', + `next_workflow_instance_id` int(11) DEFAULT '0' COMMENT 'serial queue next workflowInstanceId', + `restart_time` datetime DEFAULT NULL COMMENT 'workflow instance restart time', `test_flag` tinyint(4) DEFAULT null COMMENT 'test flag:0 normal, 1 test run', PRIMARY KEY (`id`), - KEY `process_instance_index` (`process_definition_code`,`id`) USING BTREE, + KEY `workflow_instance_index` (`workflow_definition_code`,`id`) USING BTREE, KEY `start_time_index` (`start_time`,`end_time`) USING BTREE ) ENGINE=InnoDB AUTO_INCREMENT=1 DEFAULT CHARSET=utf8 COLLATE = utf8_bin; --- ---------------------------- --- Records of t_ds_process_instance --- ---------------------------- - -- ---------------------------- -- Table structure for t_ds_project -- ---------------------------- @@ -744,23 +736,19 @@ CREATE TABLE `t_ds_relation_datasource_user` ( -- ---------------------------- -- ---------------------------- --- Table structure for t_ds_relation_process_instance +-- Table structure for t_ds_relation_workflow_instance -- ---------------------------- -DROP TABLE IF EXISTS `t_ds_relation_process_instance`; -CREATE TABLE `t_ds_relation_process_instance` ( +DROP TABLE IF EXISTS `t_ds_relation_workflow_instance`; +CREATE TABLE `t_ds_relation_workflow_instance` ( `id` int(11) NOT NULL AUTO_INCREMENT COMMENT 'key', - `parent_process_instance_id` int(11) DEFAULT NULL COMMENT 'parent process instance id', - `parent_task_instance_id` int(11) DEFAULT NULL COMMENT 'parent process instance id', - `process_instance_id` int(11) DEFAULT NULL COMMENT 'child process instance id', + `parent_workflow_instance_id` int(11) DEFAULT NULL COMMENT 'parent workflow instance id', + `parent_task_instance_id` int(11) DEFAULT NULL COMMENT 'parent workflow instance id', + `workflow_instance_id` int(11) DEFAULT NULL COMMENT 'child workflow instance id', PRIMARY KEY (`id`), - KEY `idx_parent_process_task` (`parent_process_instance_id`,`parent_task_instance_id`) , - KEY `idx_process_instance_id` (`process_instance_id`) + KEY `idx_parent_workflow_task` (`parent_workflow_instance_id`,`parent_task_instance_id`) , + KEY `idx_workflow_instance_id` (`workflow_instance_id`) ) ENGINE=InnoDB AUTO_INCREMENT=1 DEFAULT CHARSET=utf8 COLLATE = utf8_bin; --- ---------------------------- --- Records of t_ds_relation_process_instance --- ---------------------------- - -- ---------------------------- -- Table structure for t_ds_relation_project_user -- ---------------------------- @@ -845,7 +833,7 @@ CREATE TABLE `t_ds_resources` ( DROP TABLE IF EXISTS `t_ds_schedules`; CREATE TABLE `t_ds_schedules` ( `id` int(11) NOT NULL AUTO_INCREMENT COMMENT 'key', - `process_definition_code` bigint(20) NOT NULL COMMENT 'process definition code', + `workflow_definition_code` bigint(20) NOT NULL COMMENT 'workflow definition code', `start_time` datetime NOT NULL COMMENT 'start time', `end_time` datetime NOT NULL COMMENT 'end time', `timezone_id` varchar(40) DEFAULT NULL COMMENT 'schedule timezone id', @@ -853,9 +841,9 @@ CREATE TABLE `t_ds_schedules` ( `failure_strategy` tinyint(4) NOT NULL COMMENT 'failure strategy. 0:end,1:continue', `user_id` int(11) NOT NULL COMMENT 'user id', `release_state` tinyint(4) NOT NULL COMMENT 'release state. 0:offline,1:online ', - `warning_type` tinyint(4) NOT NULL COMMENT 'Alarm type: 0 is not sent, 1 process is sent successfully, 2 process is sent failed, 3 process is sent successfully and all failures are sent', + `warning_type` tinyint(4) NOT NULL COMMENT 'Alarm type: 0 is not sent, 1 workflow is sent successfully, 2 workflow is sent failed, 3 workflow is sent successfully and all failures are sent', `warning_group_id` int(11) DEFAULT NULL COMMENT 'alert group id', - `process_instance_priority` int(11) DEFAULT '2' COMMENT 'process instance priority:0 Highest,1 High,2 Medium,3 Low,4 Lowest', + `workflow_instance_priority` int(11) DEFAULT '2' COMMENT 'workflow instance priority:0 Highest,1 High,2 Medium,3 Low,4 Lowest', `worker_group` varchar(255) DEFAULT '' COMMENT 'worker group id', `tenant_code` varchar(64) DEFAULT 'default' COMMENT 'tenant code', `environment_code` bigint(20) DEFAULT '-1' COMMENT 'environment code', @@ -895,8 +883,8 @@ CREATE TABLE `t_ds_task_instance` ( `task_execute_type` int(11) DEFAULT '0' COMMENT 'task execute type: 0-batch, 1-stream', `task_code` bigint(20) NOT NULL COMMENT 'task definition code', `task_definition_version` int(11) NOT NULL DEFAULT '1' COMMENT 'task definition version', - `process_instance_id` int(11) DEFAULT NULL COMMENT 'process instance id', - `process_instance_name` varchar(255) DEFAULT NULL COMMENT 'process instance name', + `workflow_instance_id` int(11) DEFAULT NULL COMMENT 'workflow instance id', + `workflow_instance_name` varchar(255) DEFAULT NULL COMMENT 'workflow instance name', `project_code` bigint(20) DEFAULT NULL COMMENT 'project code', `state` tinyint(4) DEFAULT NULL COMMENT 'Status: 0 commit succeeded, 1 running, 2 prepare to pause, 3 pause, 4 prepare to stop, 5 stop, 6 fail, 7 succeed, 8 need fault tolerance, 9 kill, 10 wait for thread, 11 wait for dependency to complete', `submit_time` datetime DEFAULT NULL COMMENT 'task submit time', @@ -930,7 +918,7 @@ CREATE TABLE `t_ds_task_instance` ( `memory_max` int(11) DEFAULT '-1' NOT NULL COMMENT 'MemoryMax(MB): -1:Infinity', `test_flag` tinyint(4) DEFAULT null COMMENT 'test flag:0 normal, 1 test run', PRIMARY KEY (`id`), - KEY `process_instance_id` (`process_instance_id`) USING BTREE, + KEY `workflow_instance_id` (`workflow_instance_id`) USING BTREE, KEY `idx_code_version` (`task_code`, `task_definition_version`) USING BTREE, KEY `idx_cache_key` (`cache_key`) USING BTREE ) ENGINE=InnoDB AUTO_INCREMENT=1 DEFAULT CHARSET=utf8 COLLATE = utf8_bin; @@ -1153,8 +1141,8 @@ VALUES(8, 'TargetTableTotalRows', 'SELECT COUNT(*) AS total FROM ${target_table} DROP TABLE IF EXISTS `t_ds_dq_execute_result`; CREATE TABLE `t_ds_dq_execute_result` ( `id` int(11) NOT NULL AUTO_INCREMENT, - `process_definition_id` int(11) DEFAULT NULL, - `process_instance_id` int(11) DEFAULT NULL, + `workflow_definition_id` int(11) DEFAULT NULL, + `workflow_instance_id` int(11) DEFAULT NULL, `task_instance_id` int(11) DEFAULT NULL, `rule_type` int(11) DEFAULT NULL, `rule_name` varchar(255) DEFAULT NULL, @@ -1410,7 +1398,7 @@ VALUES(31, 'target_database', 'select', '$t(target_database)', NULL, NULL, 'Plea DROP TABLE IF EXISTS `t_ds_dq_task_statistics_value`; CREATE TABLE `t_ds_dq_task_statistics_value` ( `id` int(11) NOT NULL AUTO_INCREMENT, - `process_definition_id` int(11) DEFAULT NULL, + `workflow_definition_id` int(11) DEFAULT NULL, `task_instance_id` int(11) DEFAULT NULL, `rule_id` int(11) NOT NULL, `unique_code` varchar(255) NULL, @@ -1968,7 +1956,7 @@ CREATE TABLE `t_ds_task_group_queue` ( `task_id` int(11) DEFAULT NULL COMMENT 'taskintanceid', `task_name` varchar(255) DEFAULT NULL COMMENT 'TaskInstance name', `group_id` int(11) DEFAULT NULL COMMENT 'taskGroup id', - `process_id` int(11) DEFAULT NULL COMMENT 'processInstace id', + `workflow_instance_id` int(11) DEFAULT NULL COMMENT 'workflow instance id', `priority` int(8) DEFAULT '0' COMMENT 'priority', `status` tinyint(4) DEFAULT '-1' COMMENT '-1: waiting 1: running 2: finished', `force_start` tinyint(4) DEFAULT '0' COMMENT 'is force start 0 NO ,1 YES', @@ -2080,17 +2068,17 @@ CREATE TABLE t_ds_alert_send_status ( -- ---------------------------- DROP TABLE IF EXISTS `t_ds_cluster`; CREATE TABLE `t_ds_cluster`( - `id` bigint(11) NOT NULL AUTO_INCREMENT COMMENT 'id', - `code` bigint(20) DEFAULT NULL COMMENT 'encoding', - `name` varchar(255) NOT NULL COMMENT 'cluster name', - `config` text NULL DEFAULT NULL COMMENT 'this config contains many cluster variables config', - `description` text NULL DEFAULT NULL COMMENT 'the details', - `operator` int(11) DEFAULT NULL COMMENT 'operator user id', - `create_time` timestamp NULL DEFAULT CURRENT_TIMESTAMP, - `update_time` timestamp NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP, - PRIMARY KEY (`id`), - UNIQUE KEY `cluster_name_unique` (`name`), - UNIQUE KEY `cluster_code_unique` (`code`) + `id` bigint(11) NOT NULL AUTO_INCREMENT COMMENT 'id', + `code` bigint(20) DEFAULT NULL COMMENT 'encoding', + `name` varchar(255) NOT NULL COMMENT 'cluster name', + `config` text NULL DEFAULT NULL COMMENT 'this config contains many cluster variables config', + `description` text NULL DEFAULT NULL COMMENT 'the details', + `operator` int(11) DEFAULT NULL COMMENT 'operator user id', + `create_time` timestamp NULL DEFAULT CURRENT_TIMESTAMP, + `update_time` timestamp NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP, + PRIMARY KEY (`id`), + UNIQUE KEY `cluster_name_unique` (`name`), + UNIQUE KEY `cluster_code_unique` (`code`) ) ENGINE=InnoDB AUTO_INCREMENT=1 DEFAULT CHARSET=utf8 COLLATE = utf8_bin; -- ---------------------------- @@ -2121,10 +2109,10 @@ CREATE TABLE `t_ds_relation_sub_workflow` ( ); -- ---------------------------- --- Table structure for t_ds_process_task_lineage +-- Table structure for t_ds_workflow_task_lineage -- ---------------------------- -DROP TABLE IF EXISTS `t_ds_process_task_lineage`; -CREATE TABLE `t_ds_process_task_lineage` ( +DROP TABLE IF EXISTS `t_ds_workflow_task_lineage`; +CREATE TABLE `t_ds_workflow_task_lineage` ( `id` int NOT NULL AUTO_INCREMENT, `workflow_definition_code` bigint NOT NULL DEFAULT 0, `workflow_definition_version` int NOT NULL DEFAULT 0, @@ -2136,7 +2124,7 @@ CREATE TABLE `t_ds_process_task_lineage` ( `create_time` datetime NOT NULL DEFAULT CURRENT_TIMESTAMP COMMENT 'create time', `update_time` datetime NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP COMMENT 'update time', PRIMARY KEY (`id`), - KEY `idx_process_code_version` (`workflow_definition_code`,`workflow_definition_version`), + KEY `idx_workflow_code_version` (`workflow_definition_code`,`workflow_definition_version`), KEY `idx_task_code_version` (`task_definition_code`,`task_definition_version`), KEY `idx_dept_code` (`dept_project_code`,`dept_workflow_definition_code`,`dept_task_definition_code`) ) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_bin; diff --git a/dolphinscheduler-dao/src/main/resources/sql/dolphinscheduler_postgresql.sql b/dolphinscheduler-dao/src/main/resources/sql/dolphinscheduler_postgresql.sql index 7449826503..6243772bf2 100644 --- a/dolphinscheduler-dao/src/main/resources/sql/dolphinscheduler_postgresql.sql +++ b/dolphinscheduler-dao/src/main/resources/sql/dolphinscheduler_postgresql.sql @@ -217,8 +217,8 @@ CREATE TABLE t_ds_alert ( create_time timestamp DEFAULT NULL , update_time timestamp DEFAULT NULL , project_code bigint DEFAULT NULL, - process_definition_code bigint DEFAULT NULL, - process_instance_id int DEFAULT NULL , + workflow_definition_code bigint DEFAULT NULL, + workflow_instance_id int DEFAULT NULL , alert_type int DEFAULT NULL , PRIMARY KEY (id) ); @@ -252,7 +252,7 @@ DROP TABLE IF EXISTS t_ds_command; CREATE TABLE t_ds_command ( id int NOT NULL , command_type int DEFAULT NULL , - process_definition_code bigint NOT NULL , + workflow_definition_code bigint NOT NULL , command_param text , task_depend_type int DEFAULT NULL , failure_strategy int DEFAULT '0' , @@ -262,18 +262,18 @@ CREATE TABLE t_ds_command ( start_time timestamp DEFAULT NULL , executor_id int DEFAULT NULL , update_time timestamp DEFAULT NULL , - process_instance_priority int DEFAULT '2' , + workflow_instance_priority int DEFAULT '2' , worker_group varchar(255), tenant_code varchar(64) DEFAULT 'default', environment_code bigint DEFAULT '-1', dry_run int DEFAULT '0' , - process_instance_id int DEFAULT 0, - process_definition_version int DEFAULT 0, + workflow_instance_id int DEFAULT 0, + workflow_definition_version int DEFAULT 0, test_flag int DEFAULT NULL , PRIMARY KEY (id) ) ; -create index priority_id_index on t_ds_command (process_instance_priority,id); +create index priority_id_index on t_ds_command (workflow_instance_priority,id); -- -- Table structure for table t_ds_datasource @@ -301,7 +301,7 @@ DROP TABLE IF EXISTS t_ds_error_command; CREATE TABLE t_ds_error_command ( id int NOT NULL , command_type int DEFAULT NULL , - process_definition_code bigint NOT NULL , + workflow_definition_code bigint NOT NULL , command_param text , task_depend_type int DEFAULT NULL , failure_strategy int DEFAULT '0' , @@ -311,24 +311,24 @@ CREATE TABLE t_ds_error_command ( start_time timestamp DEFAULT NULL , executor_id int DEFAULT NULL , update_time timestamp DEFAULT NULL , - process_instance_priority int DEFAULT '2' , + workflow_instance_priority int DEFAULT '2' , worker_group varchar(255), tenant_code varchar(64) DEFAULT 'default', environment_code bigint DEFAULT '-1', dry_run int DEFAULT '0' , message text , - process_instance_id int DEFAULT 0, - process_definition_version int DEFAULT 0, + workflow_instance_id int DEFAULT 0, + workflow_definition_version int DEFAULT 0, test_flag int DEFAULT NULL , PRIMARY KEY (id) ); -- --- Table structure for table t_ds_process_definition +-- Table structure for table t_ds_workflow_definition -- -DROP TABLE IF EXISTS t_ds_process_definition; -CREATE TABLE t_ds_process_definition ( +DROP TABLE IF EXISTS t_ds_workflow_definition; +CREATE TABLE t_ds_workflow_definition ( id int NOT NULL , code bigint NOT NULL, name varchar(255) DEFAULT NULL , @@ -346,17 +346,17 @@ CREATE TABLE t_ds_process_definition ( create_time timestamp DEFAULT NULL , update_time timestamp DEFAULT NULL , PRIMARY KEY (id) , - CONSTRAINT process_definition_unique UNIQUE (name, project_code) + CONSTRAINT workflow_definition_unique UNIQUE (name, project_code) ) ; -create index process_definition_index on t_ds_process_definition (code,id); +create index workflow_definition_index on t_ds_workflow_definition (code,id); -- --- Table structure for table t_ds_process_definition_log +-- Table structure for table t_ds_workflow_definition_log -- -DROP TABLE IF EXISTS t_ds_process_definition_log; -CREATE TABLE t_ds_process_definition_log ( +DROP TABLE IF EXISTS t_ds_workflow_definition_log; +CREATE TABLE t_ds_workflow_definition_log ( id int NOT NULL , code bigint NOT NULL, name varchar(255) DEFAULT NULL , @@ -378,7 +378,7 @@ CREATE TABLE t_ds_process_definition_log ( PRIMARY KEY (id) ) ; -create UNIQUE index uniq_idx_code_version on t_ds_process_definition_log (code,version); +create UNIQUE index uniq_idx_code_version on t_ds_workflow_definition_log (code,version); -- -- Table structure for table t_ds_task_definition @@ -462,16 +462,16 @@ create index idx_task_definition_log_code_version on t_ds_task_definition_log (c create index idx_task_definition_log_project_code on t_ds_task_definition_log (project_code); -- --- Table structure for table t_ds_process_task_relation +-- Table structure for table t_ds_workflow_task_relation -- -DROP TABLE IF EXISTS t_ds_process_task_relation; -CREATE TABLE t_ds_process_task_relation ( +DROP TABLE IF EXISTS t_ds_workflow_task_relation; +CREATE TABLE t_ds_workflow_task_relation ( id int NOT NULL , name varchar(255) DEFAULT NULL , project_code bigint DEFAULT NULL , - process_definition_code bigint DEFAULT NULL , - process_definition_version int DEFAULT NULL , + workflow_definition_code bigint DEFAULT NULL , + workflow_definition_version int DEFAULT NULL , pre_task_code bigint DEFAULT NULL , pre_task_version int DEFAULT '0' , post_task_code bigint DEFAULT NULL , @@ -483,21 +483,21 @@ CREATE TABLE t_ds_process_task_relation ( PRIMARY KEY (id) ) ; -create index process_task_relation_idx_project_code_process_definition_code on t_ds_process_task_relation (project_code,process_definition_code); -create index process_task_relation_idx_pre_task_code_version on t_ds_process_task_relation (pre_task_code, pre_task_version); -create index process_task_relation_idx_post_task_code_version on t_ds_process_task_relation (post_task_code, post_task_version); +create index workflow_task_relation_idx_project_code_workflow_definition_code on t_ds_workflow_task_relation (project_code,workflow_definition_code); +create index workflow_task_relation_idx_pre_task_code_version on t_ds_workflow_task_relation (pre_task_code, pre_task_version); +create index workflow_task_relation_idx_post_task_code_version on t_ds_workflow_task_relation (post_task_code, post_task_version); -- --- Table structure for table t_ds_process_task_relation_log +-- Table structure for table t_ds_workflow_task_relation_log -- -DROP TABLE IF EXISTS t_ds_process_task_relation_log; -CREATE TABLE t_ds_process_task_relation_log ( +DROP TABLE IF EXISTS t_ds_workflow_task_relation_log; +CREATE TABLE t_ds_workflow_task_relation_log ( id int NOT NULL , name varchar(255) DEFAULT NULL , project_code bigint DEFAULT NULL , - process_definition_code bigint DEFAULT NULL , - process_definition_version int DEFAULT NULL , + workflow_definition_code bigint DEFAULT NULL , + workflow_definition_version int DEFAULT NULL , pre_task_code bigint DEFAULT NULL , pre_task_version int DEFAULT '0' , post_task_code bigint DEFAULT NULL , @@ -511,18 +511,18 @@ CREATE TABLE t_ds_process_task_relation_log ( PRIMARY KEY (id) ) ; -create index process_task_relation_log_idx_project_code_process_definition_code on t_ds_process_task_relation_log (project_code,process_definition_code); +create index workflow_task_relation_log_idx_project_code_workflow_definition_code on t_ds_workflow_task_relation_log (project_code,workflow_definition_code); -- --- Table structure for table t_ds_process_instance +-- Table structure for table t_ds_workflow_instance -- -DROP TABLE IF EXISTS t_ds_process_instance; -CREATE TABLE t_ds_process_instance ( +DROP TABLE IF EXISTS t_ds_workflow_instance; +CREATE TABLE t_ds_workflow_instance ( id int NOT NULL , name varchar(255) DEFAULT NULL , - process_definition_code bigint DEFAULT NULL , - process_definition_version int NOT NULL DEFAULT 1 , + workflow_definition_code bigint DEFAULT NULL , + workflow_definition_version int NOT NULL DEFAULT 1 , project_code bigint DEFAULT NULL , state int DEFAULT NULL , state_history text, @@ -541,29 +541,29 @@ CREATE TABLE t_ds_process_instance ( schedule_time timestamp DEFAULT NULL , command_start_time timestamp DEFAULT NULL , global_params text , - process_instance_json text , + workflow_instance_json text , flag int DEFAULT '1' , update_time timestamp NULL , - is_sub_process int DEFAULT '0' , + is_sub_workflow int DEFAULT '0' , executor_id int NOT NULL , executor_name varchar(64) DEFAULT NULL, history_cmd text , dependence_schedule_times text , - process_instance_priority int DEFAULT '2' , + workflow_instance_priority int DEFAULT '2' , worker_group varchar(255) , environment_code bigint DEFAULT '-1', timeout int DEFAULT '0' , tenant_code varchar(64) DEFAULT 'default', var_pool text , dry_run int DEFAULT '0' , - next_process_instance_id int DEFAULT '0', + next_workflow_instance_id int DEFAULT '0', restart_time timestamp DEFAULT NULL , test_flag int DEFAULT NULL , PRIMARY KEY (id) ) ; -create index process_instance_index on t_ds_process_instance (process_definition_code,id); -create index start_time_index on t_ds_process_instance (start_time,end_time); +create index workflow_instance_index on t_ds_workflow_instance (workflow_definition_code,id); +create index start_time_index on t_ds_workflow_instance (start_time,end_time); -- -- Table structure for table t_ds_project @@ -662,19 +662,19 @@ CREATE TABLE t_ds_relation_datasource_user ( ) ; -- --- Table structure for table t_ds_relation_process_instance +-- Table structure for table t_ds_relation_workflow_instance -- -DROP TABLE IF EXISTS t_ds_relation_process_instance; -CREATE TABLE t_ds_relation_process_instance ( +DROP TABLE IF EXISTS t_ds_relation_workflow_instance; +CREATE TABLE t_ds_relation_workflow_instance ( id int NOT NULL , - parent_process_instance_id int DEFAULT NULL , + parent_workflow_instance_id int DEFAULT NULL , parent_task_instance_id int DEFAULT NULL , - process_instance_id int DEFAULT NULL , + workflow_instance_id int DEFAULT NULL , PRIMARY KEY (id) ) ; -create index idx_relation_process_instance_parent_process_task on t_ds_relation_process_instance (parent_process_instance_id, parent_task_instance_id); -create index idx_relation_process_instance_process_instance_id on t_ds_relation_process_instance (process_instance_id); +create index idx_relation_workflow_instance_parent_workflow_task on t_ds_relation_workflow_instance (parent_workflow_instance_id, parent_task_instance_id); +create index idx_relation_workflow_instance_workflow_instance_id on t_ds_relation_workflow_instance (workflow_instance_id); -- @@ -754,7 +754,7 @@ CREATE TABLE t_ds_resources ( DROP TABLE IF EXISTS t_ds_schedules; CREATE TABLE t_ds_schedules ( id int NOT NULL , - process_definition_code bigint NOT NULL , + workflow_definition_code bigint NOT NULL , start_time timestamp NOT NULL , end_time timestamp NOT NULL , timezone_id varchar(40) default NULL , @@ -764,7 +764,7 @@ CREATE TABLE t_ds_schedules ( release_state int NOT NULL , warning_type int NOT NULL , warning_group_id int DEFAULT NULL , - process_instance_priority int DEFAULT '2' , + workflow_instance_priority int DEFAULT '2' , worker_group varchar(255), tenant_code varchar(64) DEFAULT 'default', environment_code bigint DEFAULT '-1', @@ -798,8 +798,8 @@ CREATE TABLE t_ds_task_instance ( task_execute_type int DEFAULT '0', task_code bigint NOT NULL, task_definition_version int NOT NULL DEFAULT '1' , - process_instance_id int DEFAULT NULL , - process_instance_name varchar(255) DEFAULT NULL, + workflow_instance_id int DEFAULT NULL , + workflow_instance_name varchar(255) DEFAULT NULL, project_code bigint DEFAULT NULL, state int DEFAULT NULL , submit_time timestamp DEFAULT NULL , @@ -960,27 +960,27 @@ ALTER TABLE t_ds_command ALTER COLUMN id SET DEFAULT NEXTVAL('t_ds_command_id_se DROP SEQUENCE IF EXISTS t_ds_datasource_id_sequence; CREATE SEQUENCE t_ds_datasource_id_sequence; ALTER TABLE t_ds_datasource ALTER COLUMN id SET DEFAULT NEXTVAL('t_ds_datasource_id_sequence'); -DROP SEQUENCE IF EXISTS t_ds_process_definition_id_sequence; -CREATE SEQUENCE t_ds_process_definition_id_sequence; -ALTER TABLE t_ds_process_definition ALTER COLUMN id SET DEFAULT NEXTVAL('t_ds_process_definition_id_sequence'); -DROP SEQUENCE IF EXISTS t_ds_process_definition_log_id_sequence; -CREATE SEQUENCE t_ds_process_definition_log_id_sequence; -ALTER TABLE t_ds_process_definition_log ALTER COLUMN id SET DEFAULT NEXTVAL('t_ds_process_definition_log_id_sequence'); +DROP SEQUENCE IF EXISTS t_ds_workflow_definition_id_sequence; +CREATE SEQUENCE t_ds_workflow_definition_id_sequence; +ALTER TABLE t_ds_workflow_definition ALTER COLUMN id SET DEFAULT NEXTVAL('t_ds_workflow_definition_id_sequence'); +DROP SEQUENCE IF EXISTS t_ds_workflow_definition_log_id_sequence; +CREATE SEQUENCE t_ds_workflow_definition_log_id_sequence; +ALTER TABLE t_ds_workflow_definition_log ALTER COLUMN id SET DEFAULT NEXTVAL('t_ds_workflow_definition_log_id_sequence'); DROP SEQUENCE IF EXISTS t_ds_task_definition_id_sequence; CREATE SEQUENCE t_ds_task_definition_id_sequence; ALTER TABLE t_ds_task_definition ALTER COLUMN id SET DEFAULT NEXTVAL('t_ds_task_definition_id_sequence'); DROP SEQUENCE IF EXISTS t_ds_task_definition_log_id_sequence; CREATE SEQUENCE t_ds_task_definition_log_id_sequence; ALTER TABLE t_ds_task_definition_log ALTER COLUMN id SET DEFAULT NEXTVAL('t_ds_task_definition_log_id_sequence'); -DROP SEQUENCE IF EXISTS t_ds_process_task_relation_id_sequence; -CREATE SEQUENCE t_ds_process_task_relation_id_sequence; -ALTER TABLE t_ds_process_task_relation ALTER COLUMN id SET DEFAULT NEXTVAL('t_ds_process_task_relation_id_sequence'); -DROP SEQUENCE IF EXISTS t_ds_process_task_relation_log_id_sequence; -CREATE SEQUENCE t_ds_process_task_relation_log_id_sequence; -ALTER TABLE t_ds_process_task_relation_log ALTER COLUMN id SET DEFAULT NEXTVAL('t_ds_process_task_relation_log_id_sequence'); -DROP SEQUENCE IF EXISTS t_ds_process_instance_id_sequence; -CREATE SEQUENCE t_ds_process_instance_id_sequence; -ALTER TABLE t_ds_process_instance ALTER COLUMN id SET DEFAULT NEXTVAL('t_ds_process_instance_id_sequence'); +DROP SEQUENCE IF EXISTS t_ds_workflow_task_relation_id_sequence; +CREATE SEQUENCE t_ds_workflow_task_relation_id_sequence; +ALTER TABLE t_ds_workflow_task_relation ALTER COLUMN id SET DEFAULT NEXTVAL('t_ds_workflow_task_relation_id_sequence'); +DROP SEQUENCE IF EXISTS t_ds_workflow_task_relation_log_id_sequence; +CREATE SEQUENCE t_ds_workflow_task_relation_log_id_sequence; +ALTER TABLE t_ds_workflow_task_relation_log ALTER COLUMN id SET DEFAULT NEXTVAL('t_ds_workflow_task_relation_log_id_sequence'); +DROP SEQUENCE IF EXISTS t_ds_workflow_instance_id_sequence; +CREATE SEQUENCE t_ds_workflow_instance_id_sequence; +ALTER TABLE t_ds_workflow_instance ALTER COLUMN id SET DEFAULT NEXTVAL('t_ds_workflow_instance_id_sequence'); DROP SEQUENCE IF EXISTS t_ds_project_id_sequence; CREATE SEQUENCE t_ds_project_id_sequence; @@ -993,9 +993,9 @@ ALTER TABLE t_ds_queue ALTER COLUMN id SET DEFAULT NEXTVAL('t_ds_queue_id_sequen DROP SEQUENCE IF EXISTS t_ds_relation_datasource_user_id_sequence; CREATE SEQUENCE t_ds_relation_datasource_user_id_sequence; ALTER TABLE t_ds_relation_datasource_user ALTER COLUMN id SET DEFAULT NEXTVAL('t_ds_relation_datasource_user_id_sequence'); -DROP SEQUENCE IF EXISTS t_ds_relation_process_instance_id_sequence; -CREATE SEQUENCE t_ds_relation_process_instance_id_sequence; -ALTER TABLE t_ds_relation_process_instance ALTER COLUMN id SET DEFAULT NEXTVAL('t_ds_relation_process_instance_id_sequence'); +DROP SEQUENCE IF EXISTS t_ds_relation_workflow_instance_id_sequence; +CREATE SEQUENCE t_ds_relation_workflow_instance_id_sequence; +ALTER TABLE t_ds_relation_workflow_instance ALTER COLUMN id SET DEFAULT NEXTVAL('t_ds_relation_workflow_instance_id_sequence'); DROP SEQUENCE IF EXISTS t_ds_relation_project_user_id_sequence; CREATE SEQUENCE t_ds_relation_project_user_id_sequence; ALTER TABLE t_ds_relation_project_user ALTER COLUMN id SET DEFAULT NEXTVAL('t_ds_relation_project_user_id_sequence'); @@ -1141,8 +1141,8 @@ VALUES(8, 'TargetTableTotalRows', 'SELECT COUNT(*) AS total FROM ${target_table} DROP TABLE IF EXISTS t_ds_dq_execute_result; CREATE TABLE t_ds_dq_execute_result ( id serial NOT NULL, - process_definition_id int4 NULL, - process_instance_id int4 NULL, + workflow_definition_id int4 NULL, + workflow_instance_id int4 NULL, task_instance_id int4 NULL, rule_type int4 NULL, rule_name varchar(255) DEFAULT NULL, @@ -1395,7 +1395,7 @@ VALUES(31, 'target_database', 'select', '$t(target_database)', NULL, NULL, 'Plea DROP TABLE IF EXISTS t_ds_dq_task_statistics_value; CREATE TABLE t_ds_dq_task_statistics_value ( id serial NOT NULL, - process_definition_id int4 NOT NULL, + workflow_definition_id int4 NOT NULL, task_instance_id int4 NULL, rule_id int4 NOT NULL, unique_code varchar NOT NULL, @@ -1953,7 +1953,7 @@ CREATE TABLE t_ds_task_group_queue ( task_id int DEFAULT NULL , task_name VARCHAR(255) DEFAULT NULL , group_id int DEFAULT NULL , - process_id int DEFAULT NULL , + workflow_instance_id int DEFAULT NULL , priority int DEFAULT '0' , status int DEFAULT '-1' , force_start int DEFAULT '0' , @@ -2111,10 +2111,10 @@ CREATE INDEX idx_parent_task_code ON t_ds_relation_sub_workflow (parent_task_cod CREATE INDEX idx_sub_workflow_instance_id ON t_ds_relation_sub_workflow (sub_workflow_instance_id); -- ---------------------------- --- Table structure for t_ds_process_task_lineage +-- Table structure for t_ds_workflow_task_lineage -- ---------------------------- -DROP TABLE IF EXISTS t_ds_process_task_lineage; -CREATE TABLE t_ds_process_task_lineage ( +DROP TABLE IF EXISTS t_ds_workflow_task_lineage; +CREATE TABLE t_ds_workflow_task_lineage ( id int NOT NULL, workflow_definition_code bigint NOT NULL DEFAULT 0, workflow_definition_version int NOT NULL DEFAULT 0, @@ -2128,9 +2128,9 @@ CREATE TABLE t_ds_process_task_lineage ( PRIMARY KEY (id) ); -create index idx_process_code_version on t_ds_process_task_lineage (workflow_definition_code,workflow_definition_version); -create index idx_task_code_version on t_ds_process_task_lineage (task_definition_code,task_definition_version); -create index idx_dept_code on t_ds_process_task_lineage (dept_project_code,dept_workflow_definition_code,dept_task_definition_code); +create index idx_workflow_code_version on t_ds_workflow_task_lineage (workflow_definition_code,workflow_definition_version); +create index idx_task_code_version on t_ds_workflow_task_lineage (task_definition_code,task_definition_version); +create index idx_dept_code on t_ds_workflow_task_lineage (dept_project_code,dept_workflow_definition_code,dept_task_definition_code); DROP TABLE IF EXISTS t_ds_jdbc_registry_data; create table t_ds_jdbc_registry_data diff --git a/dolphinscheduler-dao/src/main/resources/sql/upgrade/3.3.0_schema/mysql/dolphinscheduler_ddl.sql b/dolphinscheduler-dao/src/main/resources/sql/upgrade/3.3.0_schema/mysql/dolphinscheduler_ddl.sql index b285825977..f3bb0b8884 100644 --- a/dolphinscheduler-dao/src/main/resources/sql/upgrade/3.3.0_schema/mysql/dolphinscheduler_ddl.sql +++ b/dolphinscheduler-dao/src/main/resources/sql/upgrade/3.3.0_schema/mysql/dolphinscheduler_ddl.sql @@ -15,8 +15,8 @@ * limitations under the License. */ -DROP TABLE IF EXISTS `t_ds_process_task_lineage`; -CREATE TABLE `t_ds_process_task_lineage` ( +DROP TABLE IF EXISTS `t_ds_workflow_task_lineage`; +CREATE TABLE `t_ds_workflow_task_lineage` ( `id` int NOT NULL AUTO_INCREMENT, `workflow_definition_code` bigint NOT NULL DEFAULT 0, `workflow_definition_version` int NOT NULL DEFAULT 0, @@ -28,7 +28,7 @@ CREATE TABLE `t_ds_process_task_lineage` ( `create_time` datetime NOT NULL DEFAULT CURRENT_TIMESTAMP COMMENT 'create time', `update_time` datetime NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP COMMENT 'update time', PRIMARY KEY (`id`), - KEY `idx_process_code_version` (`workflow_definition_code`,`workflow_definition_version`), + KEY `idx_workflow_code_version` (`workflow_definition_code`,`workflow_definition_version`), KEY `idx_task_code_version` (`task_definition_code`,`task_definition_version`), KEY `idx_dept_code` (`dept_project_code`,`dept_workflow_definition_code`,`dept_task_definition_code`) ) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_bin; @@ -126,3 +126,79 @@ CALL drop_column_t_ds_alert_plugin_instance; DROP PROCEDURE drop_column_t_ds_alert_plugin_instance; DROP TABLE IF EXISTS `t_ds_trigger_relation`; + +-- Rename tables and fields from process to workflow +DROP PROCEDURE if EXISTS rename_tables_and_fields_from_process_to_workflow; +delimiter d// +CREATE PROCEDURE rename_tables_and_fields_from_process_to_workflow() +BEGIN + +ALTER TABLE t_ds_alert change process_definition_code workflow_definition_code bigint(20); +ALTER TABLE t_ds_alert change process_instance_id workflow_instance_id int(11); + +ALTER TABLE t_ds_command change process_definition_code workflow_definition_code bigint(20); +ALTER TABLE t_ds_command change process_instance_priority workflow_instance_priority int(11); +ALTER TABLE t_ds_command change process_instance_id workflow_instance_id int(11); +ALTER TABLE t_ds_command change process_definition_version workflow_definition_version int(11); + +ALTER TABLE t_ds_error_command change process_definition_code workflow_definition_code bigint(20); +ALTER TABLE t_ds_error_command change process_instance_priority workflow_instance_priority int(11); +ALTER TABLE t_ds_error_command change process_instance_id workflow_instance_id int(11); +ALTER TABLE t_ds_error_command change process_definition_version workflow_definition_version int(11); + +ALTER TABLE t_ds_process_task_relation change process_definition_version workflow_definition_version int(11); +ALTER TABLE t_ds_process_task_relation change process_definition_code workflow_definition_code bigint(20); + +ALTER TABLE t_ds_process_task_relation_log change process_definition_version workflow_definition_version int(11); +ALTER TABLE t_ds_process_task_relation_log change process_definition_code workflow_definition_code bigint(20); + +ALTER TABLE t_ds_process_instance change process_definition_code workflow_definition_code bigint(20); +ALTER TABLE t_ds_process_instance change process_definition_version workflow_definition_version int(11); +ALTER TABLE t_ds_process_instance change is_sub_process is_sub_workflow int(11); +ALTER TABLE t_ds_process_instance change process_instance_priority workflow_instance_priority int(11); +ALTER TABLE t_ds_process_instance change next_process_instance_id next_workflow_instance_id int(11); + +ALTER TABLE t_ds_schedules change process_definition_code workflow_definition_code bigint(20); +ALTER TABLE t_ds_schedules change process_instance_priority workflow_instance_priority int(11); + +ALTER TABLE t_ds_task_instance change process_instance_id workflow_instance_id int(11); +ALTER TABLE t_ds_task_instance change process_instance_name workflow_instance_name varchar(255); + +ALTER TABLE t_ds_dq_execute_result change process_definition_id workflow_definition_id int(11); +ALTER TABLE t_ds_dq_execute_result change process_instance_id workflow_instance_id int(11); + +ALTER TABLE t_ds_dq_task_statistics_value change process_definition_id workflow_definition_id int(11); + +ALTER TABLE t_ds_task_group_queue change process_id workflow_instance_id int(11); + +ALTER TABLE t_ds_relation_process_instance change parent_process_instance_id parent_workflow_instance_id int(11); +ALTER TABLE t_ds_relation_process_instance change process_instance_id workflow_instance_id int(11); + +RENAME TABLE t_ds_process_definition TO t_ds_workflow_definition; +RENAME TABLE t_ds_process_definition_log TO t_ds_workflow_definition_log; +RENAME TABLE t_ds_process_task_relation TO t_ds_workflow_task_relation; +RENAME TABLE t_ds_process_task_relation_log TO t_ds_workflow_task_relation_log; +RENAME TABLE t_ds_process_instance TO t_ds_workflow_instance; +RENAME TABLE t_ds_relation_process_instance TO t_ds_relation_workflow_instance; + +ALTER TABLE `t_ds_alert` MODIFY COLUMN `warning_type` tinyint NULL DEFAULT 2 COMMENT "1 workflow is successfully, 2 workflow/task is failed", MODIFY COLUMN `workflow_definition_code` bigint NULL COMMENT "workflow_definition_code", MODIFY COLUMN `workflow_instance_id` int NULL COMMENT "workflow_instance_id"; +ALTER TABLE `t_ds_command` MODIFY COLUMN `command_type` tinyint NULL COMMENT "Command type: 0 start workflow, 1 start execution from current node, 2 resume fault-tolerant workflow, 3 resume pause workflow, 4 start execution from failed node, 5 complement, 6 schedule, 7 rerun, 8 pause, 9 stop, 10 resume waiting thread", MODIFY COLUMN `workflow_definition_code` bigint NOT NULL COMMENT "workflow definition code", MODIFY COLUMN `workflow_definition_version` int NULL DEFAULT 0 COMMENT "workflow definition version", MODIFY COLUMN `workflow_instance_id` int NULL DEFAULT 0 COMMENT "workflow instance id", MODIFY COLUMN `warning_type` tinyint NULL DEFAULT 0 COMMENT "Alarm type: 0 is not sent, 1 workflow is sent successfully, 2 workflow is sent failed, 3 workflow is sent successfully and all failures are sent", MODIFY COLUMN `workflow_instance_priority` int NULL DEFAULT 2 COMMENT "workflow instance priority: 0 Highest,1 High,2 Medium,3 Low,4 Lowest"; +ALTER TABLE `t_ds_error_command` MODIFY COLUMN `workflow_definition_code` bigint NOT NULL COMMENT "workflow definition code", MODIFY COLUMN `workflow_definition_version` int NULL DEFAULT 0 COMMENT "workflow definition version", MODIFY COLUMN `workflow_instance_id` int NULL DEFAULT 0 COMMENT "workflow instance id: 0", MODIFY COLUMN `workflow_instance_priority` int NULL DEFAULT 2 COMMENT "workflow instance priority, 0 Highest,1 High,2 Medium,3 Low,4 Lowest"; +ALTER TABLE `t_ds_relation_workflow_instance` MODIFY COLUMN `parent_task_instance_id` int NULL COMMENT "parent workflow instance id", MODIFY COLUMN `parent_workflow_instance_id` int NULL COMMENT "parent workflow instance id", MODIFY COLUMN `workflow_instance_id` int NULL COMMENT "child workflow instance id", DROP INDEX `idx_parent_process_task`, ADD INDEX `idx_parent_workflow_task` (`parent_workflow_instance_id`, `parent_task_instance_id`), ADD INDEX `idx_workflow_instance_id` (`workflow_instance_id`); +ALTER TABLE `t_ds_schedules` MODIFY COLUMN `workflow_definition_code` bigint NOT NULL COMMENT "workflow definition code", MODIFY COLUMN `warning_type` tinyint NOT NULL COMMENT "Alarm type: 0 is not sent, 1 workflow is sent successfully, 2 workflow is sent failed, 3 workflow is sent successfully and all failures are sent", MODIFY COLUMN `workflow_instance_priority` int NULL DEFAULT 2 COMMENT "workflow instance priority:0 Highest,1 High,2 Medium,3 Low,4 Lowest"; +ALTER TABLE `t_ds_task_group_queue` MODIFY COLUMN `workflow_instance_id` int NULL COMMENT "workflow instance id"; +ALTER TABLE `t_ds_task_instance` MODIFY COLUMN `workflow_instance_id` int NULL COMMENT "workflow instance id", MODIFY COLUMN `workflow_instance_name` varchar(255) NULL COMMENT "workflow instance name", RENAME INDEX `process_instance_id` TO `workflow_instance_id`; +ALTER TABLE `t_ds_workflow_definition` MODIFY COLUMN `name` varchar(255) NULL COMMENT "workflow definition name", MODIFY COLUMN `version` int NOT NULL DEFAULT 1 COMMENT "workflow definition version", MODIFY COLUMN `release_state` tinyint NULL COMMENT "workflow definition release state:0:offline,1:online", MODIFY COLUMN `user_id` int NULL COMMENT "workflow definition creator id", RENAME INDEX `process_unique` TO `workflow_unique`; +ALTER TABLE `t_ds_workflow_definition_log` MODIFY COLUMN `name` varchar(255) NULL COMMENT "workflow definition name", MODIFY COLUMN `version` int NOT NULL DEFAULT 1 COMMENT "workflow definition version", MODIFY COLUMN `release_state` tinyint NULL COMMENT "workflow definition release state:0:offline,1:online", MODIFY COLUMN `user_id` int NULL COMMENT "workflow definition creator id"; +ALTER TABLE `t_ds_workflow_instance` MODIFY COLUMN `name` varchar(255) NULL COMMENT "workflow instance name", MODIFY COLUMN `workflow_definition_code` bigint NOT NULL COMMENT "workflow definition code", MODIFY COLUMN `workflow_definition_version` int NOT NULL DEFAULT 1 COMMENT "workflow definition version", MODIFY COLUMN `state` tinyint NULL COMMENT "workflow instance Status: 0 commit succeeded, 1 running, 2 prepare to pause, 3 pause, 4 prepare to stop, 5 stop, 6 fail, 7 succeed, 8 need fault tolerance, 9 kill, 10 wait for thread, 11 wait for dependency to complete", MODIFY COLUMN `recovery` tinyint NULL COMMENT "workflow instance failover flag:0:normal,1:failover instance", MODIFY COLUMN `start_time` datetime NULL COMMENT "workflow instance start time", MODIFY COLUMN `end_time` datetime NULL COMMENT "workflow instance end time", MODIFY COLUMN `run_times` int NULL COMMENT "workflow instance run times", MODIFY COLUMN `host` varchar(135) NULL COMMENT "workflow instance host", MODIFY COLUMN `failure_strategy` tinyint NULL DEFAULT 0 COMMENT "failure strategy. 0:end the workflow when node failed,1:continue running the other nodes when node failed", MODIFY COLUMN `warning_type` tinyint NULL DEFAULT 0 COMMENT "warning type. 0:no warning,1:warning if workflow success,2:warning if workflow failed,3:warning if success", MODIFY COLUMN `is_sub_workflow` int NULL DEFAULT 0 COMMENT "flag, whether the workflow is sub workflow", MODIFY COLUMN `history_cmd` text NULL COMMENT "history commands of workflow instance operation", MODIFY COLUMN `workflow_instance_priority` int NULL DEFAULT 2 COMMENT "workflow instance priority. 0 Highest,1 High,2 Medium,3 Low,4 Lowest", MODIFY COLUMN `next_workflow_instance_id` int NULL DEFAULT 0 COMMENT "serial queue next workflowInstanceId", MODIFY COLUMN `restart_time` datetime NULL COMMENT "workflow instance restart time", RENAME INDEX `process_instance_index` TO `workflow_instance_index`; +ALTER TABLE `t_ds_workflow_task_relation` MODIFY COLUMN `workflow_definition_code` bigint NOT NULL COMMENT "workflow code", MODIFY COLUMN `workflow_definition_version` int NOT NULL COMMENT "workflow version"; +ALTER TABLE `t_ds_workflow_task_relation_log` MODIFY COLUMN `workflow_definition_code` bigint NOT NULL COMMENT "workflow code", MODIFY COLUMN `workflow_definition_version` int NOT NULL COMMENT "workflow version", RENAME INDEX `idx_process_code_version` TO `idx_workflow_code_version`; +ALTER TABLE `t_ds_relation_workflow_instance` DROP INDEX `idx_process_instance_id`; + + +END; +d// +delimiter ; +CALL rename_tables_and_fields_from_process_to_workflow; +DROP PROCEDURE rename_tables_and_fields_from_process_to_workflow; + diff --git a/dolphinscheduler-dao/src/main/resources/sql/upgrade/3.3.0_schema/mysql/dolphinscheduler_dml.sql b/dolphinscheduler-dao/src/main/resources/sql/upgrade/3.3.0_schema/mysql/dolphinscheduler_dml.sql index 4a14f326b9..780325d42f 100644 --- a/dolphinscheduler-dao/src/main/resources/sql/upgrade/3.3.0_schema/mysql/dolphinscheduler_dml.sql +++ b/dolphinscheduler-dao/src/main/resources/sql/upgrade/3.3.0_schema/mysql/dolphinscheduler_dml.sql @@ -14,3 +14,10 @@ * See the License for the specific language governing permissions and * limitations under the License. */ + +UPDATE t_ds_task_definition SET task_type = 'SUB_WORKFLOW' WHERE task_type = 'SUB_PROCESS'; +UPDATE t_ds_task_definition_log SET task_type = 'SUB_WORKFLOW' WHERE task_type = 'SUB_PROCESS'; +UPDATE t_ds_task_definition SET task_params = replace(task_params, 'processDefinitionCode', 'workflowDefinitionCode') where task_type = 'SUB_WORKFLOW'; +UPDATE t_ds_task_definition_log SET task_params = replace(task_params, 'processDefinitionCode', 'workflowDefinitionCode') where task_type = 'SUB_WORKFLOW'; +UPDATE t_ds_task_definition SET task_params = replace(task_params, 'processDefinitionCode', 'workflowDefinitionCode') where task_type = 'DYNAMIC'; +UPDATE t_ds_task_definition_log SET task_params = replace(task_params, 'processDefinitionCode', 'workflowDefinitionCode') where task_type = 'DYNAMIC'; diff --git a/dolphinscheduler-dao/src/main/resources/sql/upgrade/3.3.0_schema/postgresql/dolphinscheduler_ddl.sql b/dolphinscheduler-dao/src/main/resources/sql/upgrade/3.3.0_schema/postgresql/dolphinscheduler_ddl.sql index 26ba6632ee..82759e1a50 100644 --- a/dolphinscheduler-dao/src/main/resources/sql/upgrade/3.3.0_schema/postgresql/dolphinscheduler_ddl.sql +++ b/dolphinscheduler-dao/src/main/resources/sql/upgrade/3.3.0_schema/postgresql/dolphinscheduler_ddl.sql @@ -15,8 +15,8 @@ * limitations under the License. */ -DROP TABLE IF EXISTS t_ds_process_task_lineage; -CREATE TABLE t_ds_process_task_lineage ( +DROP TABLE IF EXISTS t_ds_workflow_task_lineage; +CREATE TABLE t_ds_workflow_task_lineage ( id int NOT NULL, workflow_definition_code bigint NOT NULL DEFAULT 0, workflow_definition_version int NOT NULL DEFAULT 0, @@ -30,9 +30,9 @@ CREATE TABLE t_ds_process_task_lineage ( PRIMARY KEY (id) ); -create index idx_process_code_version on t_ds_process_task_lineage (workflow_definition_code,workflow_definition_version); -create index idx_task_code_version on t_ds_process_task_lineage (task_definition_code,task_definition_version); -create index idx_dept_code on t_ds_process_task_lineage (dept_project_code,dept_workflow_definition_code,dept_task_definition_code); +create index idx_workflow_code_version on t_ds_workflow_task_lineage (workflow_definition_code,workflow_definition_version); +create index idx_task_code_version on t_ds_workflow_task_lineage (task_definition_code,task_definition_version); +create index idx_dept_code on t_ds_workflow_task_lineage (dept_project_code,dept_workflow_definition_code,dept_task_definition_code); DROP TABLE IF EXISTS t_ds_jdbc_registry_data; create table t_ds_jdbc_registry_data @@ -125,3 +125,82 @@ select drop_column_t_ds_alert_plugin_instance(); DROP FUNCTION IF EXISTS drop_column_t_ds_alert_plugin_instance(); DROP TABLE IF EXISTS t_ds_trigger_relation; + +-- Rename tables and fields from process to workflow +delimiter d// +CREATE OR REPLACE FUNCTION rename_tables_and_fields_from_process_to_workflow() RETURNS void AS $$ +BEGIN + +ALTER TABLE t_ds_alert RENAME COLUMN process_definition_code TO workflow_definition_code; +ALTER TABLE t_ds_alert RENAME COLUMN process_instance_id TO workflow_instance_id; + +ALTER TABLE t_ds_command RENAME COLUMN process_definition_code TO workflow_definition_code; +ALTER TABLE t_ds_command RENAME COLUMN process_instance_priority TO workflow_instance_priority; +ALTER TABLE t_ds_command RENAME COLUMN process_instance_id TO workflow_instance_id; +ALTER TABLE t_ds_command RENAME COLUMN process_definition_version TO workflow_definition_version; + +ALTER TABLE t_ds_error_command RENAME COLUMN process_definition_code TO workflow_definition_code; +ALTER TABLE t_ds_error_command RENAME COLUMN process_instance_priority TO workflow_instance_priority; +ALTER TABLE t_ds_error_command RENAME COLUMN process_instance_id TO workflow_instance_id; +ALTER TABLE t_ds_error_command RENAME COLUMN process_definition_version TO workflow_definition_version; + +ALTER TABLE t_ds_process_task_relation RENAME COLUMN process_definition_version TO workflow_definition_version; +ALTER TABLE t_ds_process_task_relation RENAME COLUMN process_definition_code TO workflow_definition_code; + +ALTER TABLE t_ds_process_task_relation_log RENAME COLUMN process_definition_version TO workflow_definition_version; +ALTER TABLE t_ds_process_task_relation_log RENAME COLUMN process_definition_code TO workflow_definition_code; + +ALTER TABLE t_ds_process_instance RENAME COLUMN process_definition_code TO workflow_definition_code; +ALTER TABLE t_ds_process_instance RENAME COLUMN process_definition_version TO workflow_definition_version; +ALTER TABLE t_ds_process_instance RENAME COLUMN is_sub_process TO is_sub_workflow; +ALTER TABLE t_ds_process_instance RENAME COLUMN process_instance_priority TO workflow_instance_priority; +ALTER TABLE t_ds_process_instance RENAME COLUMN next_process_instance_id TO next_workflow_instance_id; + +ALTER TABLE t_ds_schedules RENAME COLUMN process_definition_code TO workflow_definition_code; +ALTER TABLE t_ds_schedules RENAME COLUMN process_instance_priority TO workflow_instance_priority; + +ALTER TABLE t_ds_task_instance RENAME COLUMN process_instance_id TO workflow_instance_id; +ALTER TABLE t_ds_task_instance RENAME COLUMN process_instance_name TO workflow_instance_name; + +ALTER TABLE t_ds_dq_execute_result RENAME COLUMN process_definition_id TO workflow_definition_id; +ALTER TABLE t_ds_dq_execute_result RENAME COLUMN process_instance_id TO workflow_instance_id; + +ALTER TABLE t_ds_dq_task_statistics_value RENAME COLUMN process_definition_id TO workflow_definition_id; + +ALTER TABLE t_ds_task_group_queue RENAME COLUMN process_id TO workflow_instance_id; + +ALTER TABLE t_ds_relation_process_instance RENAME COLUMN parent_process_instance_id TO parent_workflow_instance_id; +ALTER TABLE t_ds_relation_process_instance RENAME COLUMN process_instance_id TO workflow_instance_id; + +ALTER TABLE t_ds_process_definition RENAME TO t_ds_workflow_definition; +ALTER TABLE t_ds_process_definition_log RENAME TO t_ds_workflow_definition_log; +ALTER TABLE t_ds_process_task_relation RENAME TO t_ds_workflow_task_relation; +ALTER TABLE t_ds_process_task_relation_log RENAME TO t_ds_workflow_task_relation_log; +ALTER TABLE t_ds_process_instance RENAME TO t_ds_workflow_instance; +ALTER TABLE t_ds_relation_process_instance RENAME TO t_ds_relation_workflow_instance; + +ALTER SEQUENCE t_ds_relation_process_instance_id_sequence RENAME TO t_ds_relation_workflow_instance_id_sequence; +ALTER SEQUENCE t_ds_process_definition_id_sequence RENAME TO t_ds_workflow_definition_id_sequence; +ALTER SEQUENCE t_ds_process_definition_log_id_sequence RENAME TO t_ds_workflow_definition_log_id_sequence; +ALTER SEQUENCE t_ds_process_instance_id_sequence RENAME TO t_ds_workflow_instance_id_sequence; +ALTER SEQUENCE t_ds_process_task_relation_id_sequence RENAME TO t_ds_workflow_task_relation_id_sequence; +ALTER SEQUENCE t_ds_process_task_relation_log_id_sequence RENAME TO t_ds_workflow_task_relation_log_id_sequence; + +ALTER INDEX "idx_relation_process_instance_parent_process_task" RENAME TO "idx_relation_workflow_instance_parent_workflow_task"; +ALTER INDEX "idx_relation_process_instance_process_instance_id" RENAME TO "idx_relation_workflow_instance_workflow_instance_id"; +ALTER INDEX "process_definition_index" RENAME TO "workflow_definition_index"; +ALTER INDEX "process_definition_unique" RENAME TO "workflow_definition_unique"; +ALTER TABLE "t_ds_workflow_instance" RENAME COLUMN "process_instance_json" TO "workflow_instance_json"; +ALTER INDEX "process_instance_index" RENAME TO "workflow_instance_index"; +ALTER INDEX "process_task_relation_idx_post_task_code_version" RENAME TO "workflow_task_relation_idx_post_task_code_version"; +ALTER INDEX "process_task_relation_idx_pre_task_code_version" RENAME TO "workflow_task_relation_idx_pre_task_code_version"; +ALTER INDEX "process_task_relation_idx_project_code_process_definition_code" RENAME TO "workflow_task_relation_idx_project_code_workflow_definition_cod"; +ALTER INDEX "process_task_relation_log_idx_project_code_process_definition_c" RENAME TO "workflow_task_relation_log_idx_project_code_workflow_definition"; + +END; +$$ LANGUAGE plpgsql; +d// + +select rename_tables_and_fields_from_process_to_workflow(); +DROP FUNCTION IF EXISTS rename_tables_and_fields_from_process_to_workflow(); + diff --git a/dolphinscheduler-dao/src/main/resources/sql/upgrade/3.3.0_schema/postgresql/dolphinscheduler_dml.sql b/dolphinscheduler-dao/src/main/resources/sql/upgrade/3.3.0_schema/postgresql/dolphinscheduler_dml.sql index 4a14f326b9..780325d42f 100644 --- a/dolphinscheduler-dao/src/main/resources/sql/upgrade/3.3.0_schema/postgresql/dolphinscheduler_dml.sql +++ b/dolphinscheduler-dao/src/main/resources/sql/upgrade/3.3.0_schema/postgresql/dolphinscheduler_dml.sql @@ -14,3 +14,10 @@ * See the License for the specific language governing permissions and * limitations under the License. */ + +UPDATE t_ds_task_definition SET task_type = 'SUB_WORKFLOW' WHERE task_type = 'SUB_PROCESS'; +UPDATE t_ds_task_definition_log SET task_type = 'SUB_WORKFLOW' WHERE task_type = 'SUB_PROCESS'; +UPDATE t_ds_task_definition SET task_params = replace(task_params, 'processDefinitionCode', 'workflowDefinitionCode') where task_type = 'SUB_WORKFLOW'; +UPDATE t_ds_task_definition_log SET task_params = replace(task_params, 'processDefinitionCode', 'workflowDefinitionCode') where task_type = 'SUB_WORKFLOW'; +UPDATE t_ds_task_definition SET task_params = replace(task_params, 'processDefinitionCode', 'workflowDefinitionCode') where task_type = 'DYNAMIC'; +UPDATE t_ds_task_definition_log SET task_params = replace(task_params, 'processDefinitionCode', 'workflowDefinitionCode') where task_type = 'DYNAMIC'; diff --git a/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/entity/ErrorCommandTest.java b/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/entity/ErrorCommandTest.java index 057d6578e6..e782f9e670 100644 --- a/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/entity/ErrorCommandTest.java +++ b/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/entity/ErrorCommandTest.java @@ -38,9 +38,9 @@ class ErrorCommandTest { command.setId(1); command.setCommandType(CommandType.PAUSE); command.setExecutorId(1); - command.setProcessDefinitionCode(123); - command.setProcessDefinitionVersion(1); - command.setProcessInstanceId(1); + command.setWorkflowDefinitionCode(123); + command.setWorkflowDefinitionVersion(1); + command.setWorkflowInstanceId(1); command.setCommandParam("param"); command.setTaskDependType(TaskDependType.TASK_POST); command.setFailureStrategy(FailureStrategy.CONTINUE); @@ -49,7 +49,7 @@ class ErrorCommandTest { command.setScheduleTime(new Date()); command.setStartTime(new Date()); command.setUpdateTime(new Date()); - command.setProcessInstancePriority(Priority.HIGHEST); + command.setWorkflowInstancePriority(Priority.HIGHEST); command.setWorkerGroup("default"); command.setTenantCode("root"); command.setEnvironmentCode(1L); @@ -59,9 +59,9 @@ class ErrorCommandTest { ErrorCommand errorCommand = new ErrorCommand(command, "test"); assertEquals(command.getCommandType(), errorCommand.getCommandType()); assertEquals(command.getExecutorId(), errorCommand.getExecutorId()); - assertEquals(command.getProcessDefinitionCode(), errorCommand.getProcessDefinitionCode()); - assertEquals(command.getProcessDefinitionVersion(), errorCommand.getProcessDefinitionVersion()); - assertEquals(command.getProcessInstanceId(), errorCommand.getProcessInstanceId()); + assertEquals(command.getWorkflowDefinitionCode(), errorCommand.getWorkflowDefinitionCode()); + assertEquals(command.getWorkflowDefinitionVersion(), errorCommand.getWorkflowDefinitionVersion()); + assertEquals(command.getWorkflowInstanceId(), errorCommand.getWorkflowInstanceId()); assertEquals(command.getCommandParam(), errorCommand.getCommandParam()); assertEquals(command.getTaskDependType(), errorCommand.getTaskDependType()); assertEquals(command.getFailureStrategy(), errorCommand.getFailureStrategy()); @@ -70,7 +70,7 @@ class ErrorCommandTest { assertEquals(command.getScheduleTime(), errorCommand.getScheduleTime()); assertEquals(command.getStartTime(), errorCommand.getStartTime()); assertEquals(command.getUpdateTime(), errorCommand.getUpdateTime()); - assertEquals(command.getProcessInstancePriority(), errorCommand.getProcessInstancePriority()); + assertEquals(command.getWorkflowInstancePriority(), errorCommand.getWorkflowInstancePriority()); assertEquals(command.getWorkerGroup(), errorCommand.getWorkerGroup()); assertEquals(command.getTenantCode(), errorCommand.getTenantCode()); assertEquals(command.getEnvironmentCode(), errorCommand.getEnvironmentCode()); diff --git a/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/CommandMapperTest.java b/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/CommandMapperTest.java index 1640d7051a..57a7d9cbfb 100644 --- a/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/CommandMapperTest.java +++ b/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/CommandMapperTest.java @@ -72,7 +72,7 @@ public class CommandMapperTest extends BaseDaoTest { Command expectedCommand = createCommand(); Page page = new Page<>(1, 10); IPage commandIPage = commandMapper.queryCommandPageByIds(page, - Lists.newArrayList(expectedCommand.getProcessDefinitionCode())); + Lists.newArrayList(expectedCommand.getWorkflowDefinitionCode())); List commandList = commandIPage.getRecords(); assertThat(commandList).isNotEmpty(); assertThat(commandIPage.getTotal()).isEqualTo(1); @@ -89,7 +89,7 @@ public class CommandMapperTest extends BaseDaoTest { Command actualCommand = commandMapper.selectById(expectedCommand.getId()); Assertions.assertNotNull(actualCommand); - Assertions.assertEquals(expectedCommand.getProcessDefinitionCode(), actualCommand.getProcessDefinitionCode()); + Assertions.assertEquals(expectedCommand.getWorkflowDefinitionCode(), actualCommand.getWorkflowDefinitionCode()); } /** @@ -194,7 +194,7 @@ public class CommandMapperTest extends BaseDaoTest { void deleteByWorkflowInstanceIds() { Command command = createCommand(); assertThat(commandMapper.selectList(null)).isNotEmpty(); - commandMapper.deleteByWorkflowInstanceIds(Lists.newArrayList(command.getProcessInstanceId())); + commandMapper.deleteByWorkflowInstanceIds(Lists.newArrayList(command.getWorkflowInstanceId())); assertThat(commandMapper.selectList(null)).isEmpty(); } @@ -292,7 +292,7 @@ public class CommandMapperTest extends BaseDaoTest { Command command = new Command(); command.setCommandType(commandType); - command.setProcessDefinitionCode(processDefinitionCode); + command.setWorkflowDefinitionCode(processDefinitionCode); command.setExecutorId(4); command.setCommandParam("test command param"); command.setTaskDependType(TaskDependType.TASK_ONLY); @@ -300,12 +300,12 @@ public class CommandMapperTest extends BaseDaoTest { command.setWarningType(WarningType.ALL); command.setWarningGroupId(1); command.setScheduleTime(DateUtils.stringToDate("2019-12-29 12:10:00")); - command.setProcessInstancePriority(Priority.MEDIUM); + command.setWorkflowInstancePriority(Priority.MEDIUM); command.setStartTime(DateUtils.stringToDate("2019-12-29 10:10:00")); command.setUpdateTime(DateUtils.stringToDate("2019-12-29 10:10:00")); command.setWorkerGroup(WorkerGroupUtils.getDefaultWorkerGroup()); - command.setProcessInstanceId(0); - command.setProcessDefinitionVersion(0); + command.setWorkflowInstanceId(0); + command.setWorkflowDefinitionVersion(0); commandMapper.insert(command); return command; diff --git a/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/ErrorCommandMapperTest.java b/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/ErrorCommandMapperTest.java index 4824dbe76f..460c441706 100644 --- a/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/ErrorCommandMapperTest.java +++ b/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/ErrorCommandMapperTest.java @@ -64,7 +64,7 @@ public class ErrorCommandMapperTest extends BaseDaoTest { ErrorCommand expectedCommand = insertOne(); Page page = new Page<>(1, 10); IPage commandIPage = errorCommandMapper.queryErrorCommandPageByIds(page, - Lists.newArrayList(expectedCommand.getProcessDefinitionCode())); + Lists.newArrayList(expectedCommand.getWorkflowDefinitionCode())); List commandList = commandIPage.getRecords(); assertThat(commandList).isNotEmpty(); assertThat(commandIPage.getTotal()).isEqualTo(1); @@ -87,7 +87,7 @@ public class ErrorCommandMapperTest extends BaseDaoTest { workflowDefinition.setCreateTime(new Date()); workflowDefinitionMapper.insert(workflowDefinition); - errorCommand.setProcessDefinitionCode(workflowDefinition.getCode()); + errorCommand.setWorkflowDefinitionCode(workflowDefinition.getCode()); errorCommandMapper.updateById(errorCommand); List commandCounts = errorCommandMapper.countCommandState( diff --git a/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/ScheduleMapperTest.java b/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/ScheduleMapperTest.java index 3adc3719af..a09058c6ac 100644 --- a/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/ScheduleMapperTest.java +++ b/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/ScheduleMapperTest.java @@ -132,11 +132,11 @@ public class ScheduleMapperTest extends BaseDaoTest { Schedule schedule = insertOne(); schedule.setUserId(user.getId()); - schedule.setProcessDefinitionCode(workflowDefinition.getCode()); + schedule.setWorkflowDefinitionCode(workflowDefinition.getCode()); scheduleMapper.updateById(schedule); Page page = new Page(1, 3); - IPage scheduleIPage = scheduleMapper.queryByProcessDefineCodePaging(page, + IPage scheduleIPage = scheduleMapper.queryByWorkflowDefinitionCodePaging(page, workflowDefinition.getCode(), ""); Assertions.assertNotEquals(0, scheduleIPage.getSize()); } @@ -170,12 +170,13 @@ public class ScheduleMapperTest extends BaseDaoTest { Schedule schedule = insertOne(); schedule.setUserId(user.getId()); - schedule.setProcessDefinitionCode(workflowDefinition.getCode()); + schedule.setWorkflowDefinitionCode(workflowDefinition.getCode()); scheduleMapper.updateById(schedule); Page page = new Page(1, 3); - IPage scheduleIPage = scheduleMapper.queryByProjectAndProcessDefineCodePaging(page, project.getCode(), - workflowDefinition.getCode(), ""); + IPage scheduleIPage = + scheduleMapper.queryByProjectAndWorkflowDefinitionCodePaging(page, project.getCode(), + workflowDefinition.getCode(), ""); Assertions.assertNotEquals(0, scheduleIPage.getSize()); } @@ -208,7 +209,7 @@ public class ScheduleMapperTest extends BaseDaoTest { Schedule schedule = insertOne(); schedule.setUserId(user.getId()); - schedule.setProcessDefinitionCode(workflowDefinition.getCode()); + schedule.setWorkflowDefinitionCode(workflowDefinition.getCode()); scheduleMapper.updateById(schedule); Page page = new Page(1, 3); @@ -222,15 +223,15 @@ public class ScheduleMapperTest extends BaseDaoTest { * test query by process definition ids */ @Test - public void testSelectAllByProcessDefineArray() { + public void testSelectAllByWorkflowDefinitionArray() { Schedule schedule = insertOne(); - schedule.setProcessDefinitionCode(12345); + schedule.setWorkflowDefinitionCode(12345); schedule.setReleaseState(ReleaseState.ONLINE); scheduleMapper.updateById(schedule); List schedules = - scheduleMapper.selectAllByProcessDefineArray(new long[]{schedule.getProcessDefinitionCode()}); + scheduleMapper.selectAllByWorkflowDefinitionArray(new long[]{schedule.getWorkflowDefinitionCode()}); Assertions.assertNotEquals(0, schedules.size()); } @@ -238,12 +239,12 @@ public class ScheduleMapperTest extends BaseDaoTest { * test query by process definition id */ @Test - public void queryByProcessDefinitionCode() { + public void queryByWorkflowDefinitionCode() { Schedule schedule = insertOne(); - schedule.setProcessDefinitionCode(12345); + schedule.setWorkflowDefinitionCode(12345); scheduleMapper.updateById(schedule); - Schedule schedules = scheduleMapper.queryByProcessDefinitionCode(schedule.getProcessDefinitionCode()); + Schedule schedules = scheduleMapper.queryByWorkflowDefinitionCode(schedule.getWorkflowDefinitionCode()); Assertions.assertNotNull(schedules); } } diff --git a/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/TaskDefinitionMapperTest.java b/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/TaskDefinitionMapperTest.java index 18abe62516..7533b44a22 100644 --- a/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/TaskDefinitionMapperTest.java +++ b/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/TaskDefinitionMapperTest.java @@ -73,7 +73,7 @@ public class TaskDefinitionMapperTest extends BaseDaoTest { WorkflowTaskRelation workflowTaskRelation = new WorkflowTaskRelation(); workflowTaskRelation.setName("def 1"); workflowTaskRelation.setProjectCode(1L); - workflowTaskRelation.setProcessDefinitionCode(1L); + workflowTaskRelation.setWorkflowDefinitionCode(1L); workflowTaskRelation.setPostTaskCode(postTaskCode); workflowTaskRelation.setPreTaskCode(0L); workflowTaskRelation.setUpdateTime(new Date()); @@ -93,7 +93,7 @@ public class TaskDefinitionMapperTest extends BaseDaoTest { TaskDefinition taskDefinition = insertOne(); WorkflowTaskRelation workflowTaskRelation = insertTaskRelation(taskDefinition.getCode()); TaskDefinition result = taskDefinitionMapper.queryByName(taskDefinition.getProjectCode(), - workflowTaskRelation.getProcessDefinitionCode(), taskDefinition.getName()); + workflowTaskRelation.getWorkflowDefinitionCode(), taskDefinition.getName()); Assertions.assertNotNull(result); } diff --git a/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/TaskGroupQueueMapperTest.java b/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/TaskGroupQueueMapperTest.java index 62b0307fb2..b46bed5953 100644 --- a/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/TaskGroupQueueMapperTest.java +++ b/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/TaskGroupQueueMapperTest.java @@ -39,7 +39,7 @@ public class TaskGroupQueueMapperTest extends BaseDaoTest { TaskGroupQueue taskGroupQueue = new TaskGroupQueue(); taskGroupQueue.setTaskName("task1"); taskGroupQueue.setGroupId(10); - taskGroupQueue.setProcessId(11); + taskGroupQueue.setWorkflowInstanceId(11); taskGroupQueue.setPriority(10); taskGroupQueue.setStatus(TaskGroupQueueStatus.ACQUIRE_SUCCESS); Date date = new Date(System.currentTimeMillis()); diff --git a/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/TaskInstanceMapperTest.java b/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/TaskInstanceMapperTest.java index 33514bfac8..97713fcba6 100644 --- a/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/TaskInstanceMapperTest.java +++ b/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/TaskInstanceMapperTest.java @@ -72,7 +72,7 @@ public class TaskInstanceMapperTest extends BaseDaoTest { workflowInstance.setState(WorkflowExecutionStatus.RUNNING_EXECUTION); workflowInstance.setStartTime(new Date()); workflowInstance.setEndTime(new Date()); - workflowInstance.setProcessDefinitionCode(1L); + workflowInstance.setWorkflowDefinitionCode(1L); workflowInstance.setProjectCode(1L); workflowInstance.setTestFlag(0); workflowInstanceMapper.insert(workflowInstance); @@ -89,7 +89,7 @@ public class TaskInstanceMapperTest extends BaseDaoTest { taskInstance.setState(TaskExecutionStatus.RUNNING_EXECUTION); taskInstance.setStartTime(new Date()); taskInstance.setEndTime(new Date()); - taskInstance.setProcessInstanceId(processInstanceId); + taskInstance.setWorkflowInstanceId(processInstanceId); taskInstance.setProjectCode(1L); taskInstance.setTaskType(taskType); taskInstanceMapper.insert(taskInstance); @@ -147,28 +147,29 @@ public class TaskInstanceMapperTest extends BaseDaoTest { * test find valid task list by process instance id */ @Test - public void testFindValidTaskListByProcessId() { + public void testFindValidTaskListByWorkflowInstanceId() { // insert ProcessInstance WorkflowInstance workflowInstance = insertProcessInstance(); // insert taskInstance TaskInstance task = insertTaskInstance(workflowInstance.getId()); TaskInstance task2 = insertTaskInstance(workflowInstance.getId()); - task.setProcessInstanceId(workflowInstance.getId()); - task2.setProcessInstanceId(workflowInstance.getId()); + task.setWorkflowInstanceId(workflowInstance.getId()); + task2.setWorkflowInstanceId(workflowInstance.getId()); taskInstanceMapper.updateById(task); taskInstanceMapper.updateById(task2); - List taskInstances = taskInstanceMapper.findValidTaskListByProcessId( - task.getProcessInstanceId(), + List taskInstances = taskInstanceMapper.findValidTaskListByWorkflowInstanceId( + task.getWorkflowInstanceId(), Flag.YES, workflowInstance.getTestFlag()); task2.setFlag(Flag.NO); taskInstanceMapper.updateById(task2); - List taskInstances1 = taskInstanceMapper.findValidTaskListByProcessId(task.getProcessInstanceId(), - Flag.NO, - workflowInstance.getTestFlag()); + List taskInstances1 = + taskInstanceMapper.findValidTaskListByWorkflowInstanceId(task.getWorkflowInstanceId(), + Flag.NO, + workflowInstance.getTestFlag()); taskInstanceMapper.deleteById(task2.getId()); taskInstanceMapper.deleteById(task.getId()); Assertions.assertNotEquals(0, taskInstances.size()); @@ -189,7 +190,7 @@ public class TaskInstanceMapperTest extends BaseDaoTest { taskInstanceMapper.updateById(task); TaskInstance taskInstance = taskInstanceMapper.queryByInstanceIdAndCode( - task.getProcessInstanceId(), + task.getWorkflowInstanceId(), task.getTaskCode()); taskInstanceMapper.deleteById(task.getId()); Assertions.assertNotEquals(null, taskInstance); @@ -199,7 +200,7 @@ public class TaskInstanceMapperTest extends BaseDaoTest { * test query by process instance ids and task codes */ @Test - public void testQueryByProcessInstanceIdsAndTaskCodes() { + public void testQueryByWorkflowInstanceIdsAndTaskCodes() { // insert ProcessInstance WorkflowInstance workflowInstance = insertProcessInstance(); @@ -208,8 +209,8 @@ public class TaskInstanceMapperTest extends BaseDaoTest { task.setHost("111.111.11.11"); taskInstanceMapper.updateById(task); - List taskInstances = taskInstanceMapper.queryByProcessInstanceIdsAndTaskCodes( - Collections.singletonList(task.getProcessInstanceId()), + List taskInstances = taskInstanceMapper.queryByWorkflowInstanceIdsAndTaskCodes( + Collections.singletonList(task.getWorkflowInstanceId()), Collections.singletonList(task.getTaskCode())); taskInstanceMapper.deleteById(task.getId()); Assertions.assertEquals(1, taskInstances.size()); @@ -257,7 +258,7 @@ public class TaskInstanceMapperTest extends BaseDaoTest { IPage taskInstanceIPage = taskInstanceMapper.queryTaskInstanceListPaging( page, definition.getProjectCode(), - task.getProcessInstanceId(), + task.getWorkflowInstanceId(), "", "", "", diff --git a/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/UserMapperTest.java b/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/UserMapperTest.java index a655d89c66..5716b7072b 100644 --- a/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/UserMapperTest.java +++ b/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/UserMapperTest.java @@ -336,10 +336,10 @@ public class UserMapperTest extends BaseDaoTest { insertProcessDefinition(user.getId()); WorkflowDefinitionLog log = insertProcessDefinitionLog(user.getId()); long processDefinitionCode = log.getCode(); - List userWithCodes = userMapper.queryUserWithProcessDefinitionCode( + List userWithCodes = userMapper.queryUserWithWorkflowDefinitionCode( null); UserWithWorkflowDefinitionCode userWithCode = userWithCodes.stream() - .filter(code -> code.getProcessDefinitionCode() == processDefinitionCode) + .filter(code -> code.getWorkflowDefinitionCode() == processDefinitionCode) .findAny().orElse(null); assert userWithCode != null; Assertions.assertEquals(userWithCode.getCreatorId(), user.getId()); diff --git a/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/WorkflowDefinitionLogMapperTest.java b/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/WorkflowDefinitionLogMapperTest.java index 49f20c9cb6..977b48532a 100644 --- a/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/WorkflowDefinitionLogMapperTest.java +++ b/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/WorkflowDefinitionLogMapperTest.java @@ -140,19 +140,19 @@ public class WorkflowDefinitionLogMapperTest extends BaseDaoTest { } @Test - public void testQueryProcessDefinitionVersionsPaging() { + public void testQueryWorkflowDefinitionVersionsPaging() { insertOne(); Page page = new Page(1, 3); IPage processDefinitionLogs = - workflowDefinitionLogMapper.queryProcessDefinitionVersionsPaging(page, 1L, 1L); + workflowDefinitionLogMapper.queryWorkflowDefinitionVersionsPaging(page, 1L, 1L); Assertions.assertNotEquals(0, processDefinitionLogs.getTotal()); } @Test - public void testDeleteByProcessDefinitionCodeAndVersion() { + public void testDeleteByWorkflowDefinitionCodeAndVersion() { insertOne(); Page page = new Page(1, 3); - int processDefinitionLogs = workflowDefinitionLogMapper.deleteByProcessDefinitionCodeAndVersion(1L, 1); + int processDefinitionLogs = workflowDefinitionLogMapper.deleteByWorkflowDefinitionCodeAndVersion(1L, 1); Assertions.assertNotEquals(0, processDefinitionLogs); } diff --git a/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/WorkflowInstanceMapMapperTest.java b/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/WorkflowInstanceMapMapperTest.java index d50f1ec804..527fce4aec 100644 --- a/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/WorkflowInstanceMapMapperTest.java +++ b/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/WorkflowInstanceMapMapperTest.java @@ -39,9 +39,9 @@ public class WorkflowInstanceMapMapperTest extends BaseDaoTest { private WorkflowInstanceRelation insertOne() { // insertOne WorkflowInstanceRelation workflowInstanceRelation = new WorkflowInstanceRelation(); - workflowInstanceRelation.setProcessInstanceId(0); + workflowInstanceRelation.setWorkflowInstanceId(0); workflowInstanceRelation.setParentTaskInstanceId(0); - workflowInstanceRelation.setParentProcessInstanceId(0); + workflowInstanceRelation.setParentWorkflowInstanceId(0); workflowInstanceRelationMapper.insert(workflowInstanceRelation); return workflowInstanceRelation; } @@ -54,7 +54,7 @@ public class WorkflowInstanceMapMapperTest extends BaseDaoTest { // insertOne WorkflowInstanceRelation workflowInstanceRelation = insertOne(); // update - workflowInstanceRelation.setParentProcessInstanceId(1); + workflowInstanceRelation.setParentWorkflowInstanceId(1); int update = workflowInstanceRelationMapper.updateById(workflowInstanceRelation); Assertions.assertEquals(1, update); } @@ -87,7 +87,7 @@ public class WorkflowInstanceMapMapperTest extends BaseDaoTest { public void testQueryByParentId() { WorkflowInstanceRelation workflowInstanceRelation = insertOne(); - workflowInstanceRelation.setParentProcessInstanceId(100); + workflowInstanceRelation.setParentWorkflowInstanceId(100); workflowInstanceRelationMapper.updateById(workflowInstanceRelation); } @@ -99,10 +99,10 @@ public class WorkflowInstanceMapMapperTest extends BaseDaoTest { public void testDeleteByParentProcessId() { WorkflowInstanceRelation workflowInstanceRelation = insertOne(); - workflowInstanceRelation.setParentProcessInstanceId(100); + workflowInstanceRelation.setParentWorkflowInstanceId(100); workflowInstanceRelationMapper.updateById(workflowInstanceRelation); - int delete = workflowInstanceRelationMapper.deleteByParentProcessId( - workflowInstanceRelation.getParentProcessInstanceId()); + int delete = workflowInstanceRelationMapper.deleteByParentWorkflowInstanceId( + workflowInstanceRelation.getParentWorkflowInstanceId()); Assertions.assertEquals(1, delete); } @@ -113,14 +113,14 @@ public class WorkflowInstanceMapMapperTest extends BaseDaoTest { @Test public void querySubIdListByParentId() { WorkflowInstanceRelation workflowInstanceRelation = insertOne(); - workflowInstanceRelation.setProcessInstanceId(1); - workflowInstanceRelation.setParentProcessInstanceId(1010); + workflowInstanceRelation.setWorkflowInstanceId(1); + workflowInstanceRelation.setParentWorkflowInstanceId(1010); workflowInstanceRelationMapper.updateById(workflowInstanceRelation); List subIds = workflowInstanceRelationMapper - .querySubIdListByParentId(workflowInstanceRelation.getParentProcessInstanceId()); + .querySubIdListByParentId(workflowInstanceRelation.getParentWorkflowInstanceId()); Assertions.assertNotEquals(0, subIds.size()); diff --git a/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/WorkflowInstanceMapperTest.java b/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/WorkflowInstanceMapperTest.java index 25b0c29ea5..ad09d6e858 100644 --- a/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/WorkflowInstanceMapperTest.java +++ b/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/WorkflowInstanceMapperTest.java @@ -55,7 +55,7 @@ public class WorkflowInstanceMapperTest extends BaseDaoTest { WorkflowInstance workflowInstance = new WorkflowInstance(); Date start = startTime; Date end = endTime; - workflowInstance.setProcessDefinitionCode(1L); + workflowInstance.setWorkflowDefinitionCode(1L); workflowInstance.setStartTime(start); workflowInstance.setEndTime(end); workflowInstance.setState(WorkflowExecutionStatus.SUCCESS); @@ -74,7 +74,7 @@ public class WorkflowInstanceMapperTest extends BaseDaoTest { WorkflowInstance workflowInstance = new WorkflowInstance(); Date start = new Date(2019 - 1900, 1 - 1, 1, 0, 10, 0); Date end = new Date(2019 - 1900, 1 - 1, 1, 1, 0, 0); - workflowInstance.setProcessDefinitionCode(1L); + workflowInstance.setWorkflowDefinitionCode(1L); workflowInstance.setProjectCode(1L); workflowInstance.setStartTime(start); workflowInstance.setEndTime(end); @@ -156,7 +156,7 @@ public class WorkflowInstanceMapperTest extends BaseDaoTest { * test query process instance page */ @Test - public void testQueryProcessInstanceListPaging() { + public void testQueryWorkflowInstanceListPaging() { int[] stateArray = new int[]{ WorkflowExecutionStatus.RUNNING_EXECUTION.getCode(), @@ -172,19 +172,19 @@ public class WorkflowInstanceMapperTest extends BaseDaoTest { WorkflowInstance workflowInstance = insertOne(); workflowInstance.setProjectCode(workflowDefinition.getProjectCode()); - workflowInstance.setProcessDefinitionCode(workflowDefinition.getCode()); + workflowInstance.setWorkflowDefinitionCode(workflowDefinition.getCode()); workflowInstance.setState(WorkflowExecutionStatus.RUNNING_EXECUTION); - workflowInstance.setIsSubProcess(Flag.NO); + workflowInstance.setIsSubWorkflow(Flag.NO); workflowInstance.setStartTime(new Date()); workflowInstanceMapper.updateById(workflowInstance); Page page = new Page(1, 3); - IPage processInstanceIPage = workflowInstanceMapper.queryProcessInstanceListPaging( + IPage processInstanceIPage = workflowInstanceMapper.queryWorkflowInstanceListPaging( page, workflowDefinition.getProjectCode(), - workflowInstance.getProcessDefinitionCode(), + workflowInstance.getWorkflowDefinitionCode(), workflowInstance.getName(), "", stateArray, @@ -244,11 +244,11 @@ public class WorkflowInstanceMapperTest extends BaseDaoTest { WorkflowInstance workflowInstance1 = insertOne(); List workflowInstances = - workflowInstanceMapper.queryByProcessDefineCode(workflowInstance.getProcessDefinitionCode(), 1); + workflowInstanceMapper.queryByWorkflowDefinitionCode(workflowInstance.getWorkflowDefinitionCode(), 1); Assertions.assertEquals(1, workflowInstances.size()); workflowInstances = - workflowInstanceMapper.queryByProcessDefineCode(workflowInstance.getProcessDefinitionCode(), 2); + workflowInstanceMapper.queryByWorkflowDefinitionCode(workflowInstance.getWorkflowDefinitionCode(), 2); Assertions.assertEquals(2, workflowInstances.size()); workflowInstanceMapper.deleteById(workflowInstance.getId()); @@ -259,13 +259,14 @@ public class WorkflowInstanceMapperTest extends BaseDaoTest { * test query last schedule process instance */ @Test - public void testQueryLastSchedulerProcess() { + public void testQueryLastSchedulerWorkflow() { WorkflowInstance workflowInstance = insertOne(); workflowInstance.setScheduleTime(new Date()); workflowInstanceMapper.updateById(workflowInstance); WorkflowInstance workflowInstance1 = - workflowInstanceMapper.queryLastSchedulerProcess(workflowInstance.getProcessDefinitionCode(), 0L, null, + workflowInstanceMapper.queryLastSchedulerWorkflow(workflowInstance.getWorkflowDefinitionCode(), 0L, + null, null, workflowInstance.getTestFlag()); Assertions.assertNotEquals(null, workflowInstance1); @@ -276,21 +277,23 @@ public class WorkflowInstanceMapperTest extends BaseDaoTest { * test query last manual process instance */ @Test - public void testQueryLastManualProcess() { + public void testQueryLastManualWorkflow() { WorkflowInstance workflowInstance = insertOne(); workflowInstanceMapper.updateById(workflowInstance); Date start = new Date(2019 - 1900, 1 - 1, 01, 0, 0, 0); Date end = new Date(2019 - 1900, 1 - 1, 01, 5, 0, 0); WorkflowInstance workflowInstance1 = - workflowInstanceMapper.queryLastManualProcess(workflowInstance.getProcessDefinitionCode(), null, start, + workflowInstanceMapper.queryLastManualWorkflow(workflowInstance.getWorkflowDefinitionCode(), null, + start, end, workflowInstance.getTestFlag()); Assertions.assertEquals(workflowInstance1.getId(), workflowInstance.getId()); start = new Date(2019 - 1900, 1 - 1, 01, 1, 0, 0); workflowInstance1 = - workflowInstanceMapper.queryLastManualProcess(workflowInstance.getProcessDefinitionCode(), null, start, + workflowInstanceMapper.queryLastManualWorkflow(workflowInstance.getWorkflowDefinitionCode(), null, + start, end, workflowInstance.getTestFlag()); Assertions.assertNull(workflowInstance1); @@ -319,7 +322,7 @@ public class WorkflowInstanceMapperTest extends BaseDaoTest { * test query top n process instance order by running duration */ @Test - public void testQueryTopNProcessInstance() { + public void testQueryTopNWorkflowInstance() { Date startTime1 = new Date(2019, 7, 9, 10, 9, 9); Date endTime1 = new Date(2019, 7, 9, 10, 9, 14); Date startTime2 = new Date(2020, 7, 9, 10, 9, 9); @@ -332,7 +335,7 @@ public class WorkflowInstanceMapperTest extends BaseDaoTest { Date start = new Date(2020, 1, 1, 1, 1, 1); Date end = new Date(2021, 1, 1, 1, 1, 1); List workflowInstances = - workflowInstanceMapper.queryTopNProcessInstance(2, start, end, WorkflowExecutionStatus.SUCCESS, 0L); + workflowInstanceMapper.queryTopNWorkflowInstance(2, start, end, WorkflowExecutionStatus.SUCCESS, 0L); Assertions.assertEquals(2, workflowInstances.size()); Assertions.assertTrue(isSortedByDuration(workflowInstances)); for (WorkflowInstance workflowInstance : workflowInstances) { diff --git a/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/WorkflowTaskLineageMapperTest.java b/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/WorkflowTaskLineageMapperTest.java index bac6ae1d7c..daf3304b39 100644 --- a/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/WorkflowTaskLineageMapperTest.java +++ b/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/WorkflowTaskLineageMapperTest.java @@ -57,7 +57,7 @@ public class WorkflowTaskLineageMapperTest extends BaseDaoTest { workflowTaskRelation.setName("def 1"); workflowTaskRelation.setProjectCode(1L); - workflowTaskRelation.setProcessDefinitionCode(1L); + workflowTaskRelation.setWorkflowDefinitionCode(1L); workflowTaskRelation.setPostTaskCode(3L); workflowTaskRelation.setPostTaskVersion(1); workflowTaskRelation.setPreTaskCode(2L); @@ -113,7 +113,7 @@ public class WorkflowTaskLineageMapperTest extends BaseDaoTest { schedule.setWarningType(WarningType.NONE); schedule.setCreateTime(new Date()); schedule.setUpdateTime(new Date()); - schedule.setProcessDefinitionCode(id); + schedule.setWorkflowDefinitionCode(id); scheduleMapper.insert(schedule); } diff --git a/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/WorkflowTaskRelationLogMapperTest.java b/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/WorkflowTaskRelationLogMapperTest.java index 5cb0ce876c..9c00aead01 100644 --- a/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/WorkflowTaskRelationLogMapperTest.java +++ b/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/WorkflowTaskRelationLogMapperTest.java @@ -41,9 +41,9 @@ public class WorkflowTaskRelationLogMapperTest extends BaseDaoTest { // insertOne WorkflowTaskRelationLog processTaskRelationLog = new WorkflowTaskRelationLog(); processTaskRelationLog.setName("def 1"); - processTaskRelationLog.setProcessDefinitionVersion(1); + processTaskRelationLog.setWorkflowDefinitionVersion(1); processTaskRelationLog.setProjectCode(1L); - processTaskRelationLog.setProcessDefinitionCode(1L); + processTaskRelationLog.setWorkflowDefinitionCode(1L); processTaskRelationLog.setPostTaskCode(3L); processTaskRelationLog.setPreTaskCode(2L); processTaskRelationLog.setUpdateTime(new Date()); @@ -53,10 +53,10 @@ public class WorkflowTaskRelationLogMapperTest extends BaseDaoTest { } @Test - public void testQueryByProcessCodeAndVersion() { + public void testQueryByWorkflowCodeAndVersion() { WorkflowTaskRelationLog processTaskRelationLog = insertOne(); List processTaskRelationLogs = workflowTaskRelationLogMapper - .queryByProcessCodeAndVersion(1L, 1); + .queryByWorkflowCodeAndVersion(1L, 1); Assertions.assertNotEquals(0, processTaskRelationLogs.size()); } diff --git a/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/WorkflowTaskRelationMapperTest.java b/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/WorkflowTaskRelationMapperTest.java index df90e14b23..e227980963 100644 --- a/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/WorkflowTaskRelationMapperTest.java +++ b/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/WorkflowTaskRelationMapperTest.java @@ -44,7 +44,7 @@ public class WorkflowTaskRelationMapperTest extends BaseDaoTest { workflowTaskRelation.setName("def 1"); workflowTaskRelation.setProjectCode(1L); - workflowTaskRelation.setProcessDefinitionCode(1L); + workflowTaskRelation.setWorkflowDefinitionCode(1L); workflowTaskRelation.setPostTaskCode(3L); workflowTaskRelation.setPreTaskCode(2L); workflowTaskRelation.setUpdateTime(new Date()); @@ -54,9 +54,9 @@ public class WorkflowTaskRelationMapperTest extends BaseDaoTest { } @Test - public void testQueryByProcessCode() { + public void testQueryByWorkflowDefinitionCode() { WorkflowTaskRelation workflowTaskRelation = insertOne(); - List workflowTaskRelations = workflowTaskRelationMapper.queryByProcessCode(1L); + List workflowTaskRelations = workflowTaskRelationMapper.queryByWorkflowDefinitionCode(1L); Assertions.assertNotEquals(0, workflowTaskRelations.size()); } @@ -77,9 +77,9 @@ public class WorkflowTaskRelationMapperTest extends BaseDaoTest { } @Test - public void testDeleteByCode() { + public void testDeleteByWorkflowDefinitionCode() { WorkflowTaskRelation workflowTaskRelation = insertOne(); - int i = workflowTaskRelationMapper.deleteByCode(1L, 1L); + int i = workflowTaskRelationMapper.deleteByWorkflowDefinitionCode(1L, 1L); Assertions.assertNotEquals(0, i); } diff --git a/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/repository/impl/CommandDaoImplTest.java b/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/repository/impl/CommandDaoImplTest.java index 9185a96551..6022f89a3b 100644 --- a/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/repository/impl/CommandDaoImplTest.java +++ b/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/repository/impl/CommandDaoImplTest.java @@ -73,7 +73,7 @@ class CommandDaoImplTest extends BaseDaoTest { private Command generateCommand(CommandType commandType, int processDefinitionCode) { Command command = new Command(); command.setCommandType(commandType); - command.setProcessDefinitionCode(processDefinitionCode); + command.setWorkflowDefinitionCode(processDefinitionCode); command.setExecutorId(4); command.setCommandParam("test command param"); command.setTaskDependType(TaskDependType.TASK_ONLY); @@ -81,12 +81,12 @@ class CommandDaoImplTest extends BaseDaoTest { command.setWarningType(WarningType.ALL); command.setWarningGroupId(1); command.setScheduleTime(DateUtils.stringToDate("2019-12-29 12:10:00")); - command.setProcessInstancePriority(Priority.MEDIUM); + command.setWorkflowInstancePriority(Priority.MEDIUM); command.setStartTime(DateUtils.stringToDate("2019-12-29 10:10:00")); command.setUpdateTime(DateUtils.stringToDate("2019-12-29 10:10:00")); command.setWorkerGroup(WorkerGroupUtils.getDefaultWorkerGroup()); - command.setProcessInstanceId(0); - command.setProcessDefinitionVersion(0); + command.setWorkflowInstanceId(0); + command.setWorkflowDefinitionVersion(0); return command; } } diff --git a/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/repository/impl/TaskGroupQueueDaoImplTest.java b/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/repository/impl/TaskGroupQueueDaoImplTest.java index 13dcf91f55..42de6a7017 100644 --- a/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/repository/impl/TaskGroupQueueDaoImplTest.java +++ b/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/repository/impl/TaskGroupQueueDaoImplTest.java @@ -172,7 +172,7 @@ class TaskGroupQueueDaoImplTest extends BaseDaoTest { .taskId(1) .taskName("test") .groupId(1) - .processId(1) + .workflowInstanceId(1) .priority(0) .forceStart(forceStart.getCode()) .inQueue(Flag.YES.getCode()) diff --git a/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/repository/impl/WorkflowInstanceDaoImplTest.java b/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/repository/impl/WorkflowInstanceDaoImplTest.java index f27595c19c..a12c57abea 100644 --- a/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/repository/impl/WorkflowInstanceDaoImplTest.java +++ b/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/repository/impl/WorkflowInstanceDaoImplTest.java @@ -121,8 +121,8 @@ class WorkflowInstanceDaoImplTest extends BaseDaoTest { WorkflowExecutionStatus status) { WorkflowInstance workflowInstance = new WorkflowInstance(); workflowInstance.setName("WorkflowInstance" + System.currentTimeMillis()); - workflowInstance.setProcessDefinitionCode(workflowDefinitionCode); - workflowInstance.setProcessDefinitionVersion(workflowDefinitionVersion); + workflowInstance.setWorkflowDefinitionCode(workflowDefinitionCode); + workflowInstance.setWorkflowDefinitionVersion(workflowDefinitionVersion); workflowInstance.setState(status); return workflowInstance; } diff --git a/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/cases/ClickhouseDataSourceE2ETest.java b/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/cases/ClickhouseDataSourceE2ETest.java index 09d8fa4ea8..22bef54a8f 100644 --- a/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/cases/ClickhouseDataSourceE2ETest.java +++ b/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/cases/ClickhouseDataSourceE2ETest.java @@ -30,6 +30,7 @@ import org.apache.dolphinscheduler.e2e.pages.datasource.DataSourcePage; import org.junit.jupiter.api.BeforeAll; import org.junit.jupiter.api.Order; import org.junit.jupiter.api.Test; +import org.junitpioneer.jupiter.DisableIfTestFails; import org.openqa.selenium.By; import org.openqa.selenium.WebElement; import org.openqa.selenium.remote.RemoteWebDriver; @@ -37,6 +38,7 @@ import org.openqa.selenium.support.ui.ExpectedConditions; import org.testcontainers.shaded.org.awaitility.Awaitility; @DolphinScheduler(composeFiles = "docker/datasource-clickhouse/docker-compose.yaml") +@DisableIfTestFails public class ClickhouseDataSourceE2ETest { private static RemoteWebDriver browser; diff --git a/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/cases/ClusterE2ETest.java b/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/cases/ClusterE2ETest.java index b75b496b5e..e9f86ebd37 100644 --- a/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/cases/ClusterE2ETest.java +++ b/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/cases/ClusterE2ETest.java @@ -29,12 +29,14 @@ import org.apache.dolphinscheduler.e2e.pages.security.SecurityPage; import org.junit.jupiter.api.BeforeAll; import org.junit.jupiter.api.Order; import org.junit.jupiter.api.Test; +import org.junitpioneer.jupiter.DisableIfTestFails; import org.openqa.selenium.By; import org.openqa.selenium.WebElement; import org.openqa.selenium.remote.RemoteWebDriver; import org.testcontainers.shaded.org.awaitility.Awaitility; @DolphinScheduler(composeFiles = "docker/basic/docker-compose.yaml") +@DisableIfTestFails class ClusterE2ETest { private static final String clusterName = "test_cluster_name"; diff --git a/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/cases/EnvironmentE2ETest.java b/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/cases/EnvironmentE2ETest.java index 626f2cc44b..135f4606ba 100644 --- a/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/cases/EnvironmentE2ETest.java +++ b/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/cases/EnvironmentE2ETest.java @@ -29,12 +29,14 @@ import org.apache.dolphinscheduler.e2e.pages.security.SecurityPage; import org.junit.jupiter.api.BeforeAll; import org.junit.jupiter.api.Order; import org.junit.jupiter.api.Test; +import org.junitpioneer.jupiter.DisableIfTestFails; import org.openqa.selenium.By; import org.openqa.selenium.WebElement; import org.openqa.selenium.remote.RemoteWebDriver; import org.testcontainers.shaded.org.awaitility.Awaitility; @DolphinScheduler(composeFiles = "docker/basic/docker-compose.yaml") +@DisableIfTestFails class EnvironmentE2ETest { private static final String environmentName = "test_environment_name"; diff --git a/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/cases/FileManageE2ETest.java b/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/cases/FileManageE2ETest.java index 83d45f4531..298aa11220 100644 --- a/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/cases/FileManageE2ETest.java +++ b/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/cases/FileManageE2ETest.java @@ -44,6 +44,7 @@ import org.junit.jupiter.api.AfterAll; import org.junit.jupiter.api.BeforeAll; import org.junit.jupiter.api.Order; import org.junit.jupiter.api.Test; +import org.junitpioneer.jupiter.DisableIfTestFails; import org.openqa.selenium.By; import org.openqa.selenium.WebElement; import org.openqa.selenium.remote.RemoteWebDriver; @@ -51,6 +52,7 @@ import org.openqa.selenium.support.ui.ExpectedConditions; import org.testcontainers.shaded.org.awaitility.Awaitility; @DolphinScheduler(composeFiles = "docker/file-manage/docker-compose.yaml") +@DisableIfTestFails public class FileManageE2ETest { private static RemoteWebDriver browser; diff --git a/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/cases/HiveDataSourceE2ETest.java b/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/cases/HiveDataSourceE2ETest.java index fa9fb80913..27311f064d 100644 --- a/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/cases/HiveDataSourceE2ETest.java +++ b/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/cases/HiveDataSourceE2ETest.java @@ -30,6 +30,7 @@ import org.apache.dolphinscheduler.e2e.pages.datasource.DataSourcePage; import org.junit.jupiter.api.BeforeAll; import org.junit.jupiter.api.Order; import org.junit.jupiter.api.Test; +import org.junitpioneer.jupiter.DisableIfTestFails; import org.openqa.selenium.By; import org.openqa.selenium.WebElement; import org.openqa.selenium.remote.RemoteWebDriver; @@ -37,6 +38,7 @@ import org.openqa.selenium.support.ui.ExpectedConditions; import org.testcontainers.shaded.org.awaitility.Awaitility; @DolphinScheduler(composeFiles = "docker/datasource-hive/docker-compose.yaml") +@DisableIfTestFails public class HiveDataSourceE2ETest { private static RemoteWebDriver browser; diff --git a/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/cases/MysqlDataSourceE2ETest.java b/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/cases/MysqlDataSourceE2ETest.java index faf033f0d0..81ac5c3e24 100644 --- a/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/cases/MysqlDataSourceE2ETest.java +++ b/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/cases/MysqlDataSourceE2ETest.java @@ -30,6 +30,7 @@ import org.apache.dolphinscheduler.e2e.pages.datasource.DataSourcePage; import org.junit.jupiter.api.BeforeAll; import org.junit.jupiter.api.Order; import org.junit.jupiter.api.Test; +import org.junitpioneer.jupiter.DisableIfTestFails; import org.openqa.selenium.By; import org.openqa.selenium.WebElement; import org.openqa.selenium.remote.RemoteWebDriver; @@ -37,6 +38,7 @@ import org.openqa.selenium.support.ui.ExpectedConditions; import org.testcontainers.shaded.org.awaitility.Awaitility; @DolphinScheduler(composeFiles = "docker/datasource-mysql/docker-compose.yaml") +@DisableIfTestFails public class MysqlDataSourceE2ETest { private static RemoteWebDriver browser; diff --git a/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/cases/PostgresDataSourceE2ETest.java b/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/cases/PostgresDataSourceE2ETest.java index a8a1db9e3d..9a70e90176 100644 --- a/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/cases/PostgresDataSourceE2ETest.java +++ b/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/cases/PostgresDataSourceE2ETest.java @@ -30,6 +30,7 @@ import org.apache.dolphinscheduler.e2e.pages.datasource.DataSourcePage; import org.junit.jupiter.api.BeforeAll; import org.junit.jupiter.api.Order; import org.junit.jupiter.api.Test; +import org.junitpioneer.jupiter.DisableIfTestFails; import org.openqa.selenium.By; import org.openqa.selenium.WebElement; import org.openqa.selenium.remote.RemoteWebDriver; @@ -37,6 +38,7 @@ import org.openqa.selenium.support.ui.ExpectedConditions; import org.testcontainers.shaded.org.awaitility.Awaitility; @DolphinScheduler(composeFiles = "docker/datasource-postgresql/docker-compose.yaml") +@DisableIfTestFails public class PostgresDataSourceE2ETest { private static RemoteWebDriver browser; diff --git a/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/cases/ProjectE2ETest.java b/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/cases/ProjectE2ETest.java index e23a1000b1..fd09b46697 100644 --- a/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/cases/ProjectE2ETest.java +++ b/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/cases/ProjectE2ETest.java @@ -28,10 +28,12 @@ import org.apache.dolphinscheduler.e2e.pages.project.ProjectPage; import org.junit.jupiter.api.BeforeAll; import org.junit.jupiter.api.Order; import org.junit.jupiter.api.Test; +import org.junitpioneer.jupiter.DisableIfTestFails; import org.openqa.selenium.remote.RemoteWebDriver; import org.testcontainers.shaded.org.awaitility.Awaitility; @DolphinScheduler(composeFiles = "docker/basic/docker-compose.yaml") +@DisableIfTestFails class ProjectE2ETest { private static final String project = "test-project-1"; diff --git a/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/cases/QueueE2ETest.java b/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/cases/QueueE2ETest.java index c9c73d8714..664d3db4a6 100644 --- a/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/cases/QueueE2ETest.java +++ b/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/cases/QueueE2ETest.java @@ -29,12 +29,14 @@ import org.apache.dolphinscheduler.e2e.pages.security.SecurityPage; import org.junit.jupiter.api.BeforeAll; import org.junit.jupiter.api.Order; import org.junit.jupiter.api.Test; +import org.junitpioneer.jupiter.DisableIfTestFails; import org.openqa.selenium.By; import org.openqa.selenium.WebElement; import org.openqa.selenium.remote.RemoteWebDriver; import org.testcontainers.shaded.org.awaitility.Awaitility; @DolphinScheduler(composeFiles = "docker/basic/docker-compose.yaml") +@DisableIfTestFails class QueueE2ETest { private static final String queueName = "test_queue_name"; diff --git a/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/cases/SqlServerDataSourceE2ETest.java b/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/cases/SqlServerDataSourceE2ETest.java index c01452eaec..90121ecc26 100644 --- a/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/cases/SqlServerDataSourceE2ETest.java +++ b/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/cases/SqlServerDataSourceE2ETest.java @@ -30,6 +30,7 @@ import org.apache.dolphinscheduler.e2e.pages.datasource.DataSourcePage; import org.junit.jupiter.api.BeforeAll; import org.junit.jupiter.api.Order; import org.junit.jupiter.api.Test; +import org.junitpioneer.jupiter.DisableIfTestFails; import org.openqa.selenium.By; import org.openqa.selenium.WebElement; import org.openqa.selenium.remote.RemoteWebDriver; @@ -37,6 +38,7 @@ import org.openqa.selenium.support.ui.ExpectedConditions; import org.testcontainers.shaded.org.awaitility.Awaitility; @DolphinScheduler(composeFiles = "docker/datasource-sqlserver/docker-compose.yaml") +@DisableIfTestFails public class SqlServerDataSourceE2ETest { private static RemoteWebDriver browser; diff --git a/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/cases/TenantE2ETest.java b/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/cases/TenantE2ETest.java index 696c8f172f..92b7bdbeff 100644 --- a/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/cases/TenantE2ETest.java +++ b/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/cases/TenantE2ETest.java @@ -29,12 +29,14 @@ import org.apache.dolphinscheduler.e2e.pages.security.TenantPage; import org.junit.jupiter.api.BeforeAll; import org.junit.jupiter.api.Order; import org.junit.jupiter.api.Test; +import org.junitpioneer.jupiter.DisableIfTestFails; import org.openqa.selenium.By; import org.openqa.selenium.WebElement; import org.openqa.selenium.remote.RemoteWebDriver; import org.testcontainers.shaded.org.awaitility.Awaitility; @DolphinScheduler(composeFiles = "docker/basic/docker-compose.yaml") +@DisableIfTestFails class TenantE2ETest { private static final String tenant = System.getProperty("user.name"); diff --git a/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/cases/TokenE2ETest.java b/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/cases/TokenE2ETest.java index 7a23f7625c..a0a70a127b 100644 --- a/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/cases/TokenE2ETest.java +++ b/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/cases/TokenE2ETest.java @@ -29,11 +29,13 @@ import org.apache.dolphinscheduler.e2e.pages.security.TokenPage; import org.junit.jupiter.api.BeforeAll; import org.junit.jupiter.api.Order; import org.junit.jupiter.api.Test; +import org.junitpioneer.jupiter.DisableIfTestFails; import org.openqa.selenium.WebElement; import org.openqa.selenium.remote.RemoteWebDriver; import org.testcontainers.shaded.org.awaitility.Awaitility; @DolphinScheduler(composeFiles = "docker/basic/docker-compose.yaml") +@DisableIfTestFails public class TokenE2ETest { private static final String userName = "admin"; diff --git a/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/cases/UserE2ETest.java b/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/cases/UserE2ETest.java index 59afb550ae..18abea6274 100644 --- a/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/cases/UserE2ETest.java +++ b/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/cases/UserE2ETest.java @@ -33,6 +33,7 @@ import org.junit.jupiter.api.AfterAll; import org.junit.jupiter.api.BeforeAll; import org.junit.jupiter.api.Order; import org.junit.jupiter.api.Test; +import org.junitpioneer.jupiter.DisableIfTestFails; import org.openqa.selenium.By; import org.openqa.selenium.WebElement; import org.openqa.selenium.remote.RemoteWebDriver; @@ -40,6 +41,7 @@ import org.openqa.selenium.support.ui.ExpectedConditions; import org.testcontainers.shaded.org.awaitility.Awaitility; @DolphinScheduler(composeFiles = "docker/basic/docker-compose.yaml") +@DisableIfTestFails class UserE2ETest { private static final String tenant = System.getProperty("user.name"); diff --git a/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/cases/WorkerGroupE2ETest.java b/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/cases/WorkerGroupE2ETest.java index 5b6aae8105..9cdff5759d 100644 --- a/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/cases/WorkerGroupE2ETest.java +++ b/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/cases/WorkerGroupE2ETest.java @@ -30,6 +30,7 @@ import org.apache.dolphinscheduler.e2e.pages.security.WorkerGroupPage; import org.junit.jupiter.api.BeforeAll; import org.junit.jupiter.api.Order; import org.junit.jupiter.api.Test; +import org.junitpioneer.jupiter.DisableIfTestFails; import org.openqa.selenium.By; import org.openqa.selenium.WebElement; import org.openqa.selenium.remote.RemoteWebDriver; @@ -37,6 +38,7 @@ import org.openqa.selenium.support.ui.ExpectedConditions; import org.testcontainers.shaded.org.awaitility.Awaitility; @DolphinScheduler(composeFiles = "docker/basic/docker-compose.yaml") +@DisableIfTestFails class WorkerGroupE2ETest { private static final String workerGroupName = "test_worker_group"; diff --git a/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/cases/WorkflowE2ETest.java b/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/cases/WorkflowE2ETest.java index 11543d709c..ae2c80b398 100644 --- a/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/cases/WorkflowE2ETest.java +++ b/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/cases/WorkflowE2ETest.java @@ -41,12 +41,14 @@ import org.junit.jupiter.api.AfterAll; import org.junit.jupiter.api.BeforeAll; import org.junit.jupiter.api.Order; import org.junit.jupiter.api.Test; +import org.junitpioneer.jupiter.DisableIfTestFails; import org.openqa.selenium.By; import org.openqa.selenium.remote.RemoteWebDriver; import org.openqa.selenium.support.ui.ExpectedConditions; import org.testcontainers.shaded.org.awaitility.Awaitility; @DolphinScheduler(composeFiles = "docker/basic/docker-compose.yaml") +@DisableIfTestFails class WorkflowE2ETest { private static final String project = "test-workflow-1"; @@ -143,9 +145,9 @@ class WorkflowE2ETest { .goToTab(WorkflowDefinitionTab.class); workflowDefinitionPage - .createSubProcessWorkflow() + .createSubWorkflowTask() - .addTask(TaskType.SUB_PROCESS) + .addTask(TaskType.SUB_WORKFLOW) .childNode("test-workflow-1") .name("test-sub-1") .submit() diff --git a/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/cases/WorkflowHttpTaskE2ETest.java b/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/cases/WorkflowHttpTaskE2ETest.java index e29c0f1f87..5c230a8a02 100644 --- a/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/cases/WorkflowHttpTaskE2ETest.java +++ b/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/cases/WorkflowHttpTaskE2ETest.java @@ -39,11 +39,14 @@ import org.junit.jupiter.api.AfterAll; import org.junit.jupiter.api.BeforeAll; import org.junit.jupiter.api.Order; import org.junit.jupiter.api.Test; +import org.junitpioneer.jupiter.DisableIfTestFails; import org.openqa.selenium.By; import org.openqa.selenium.remote.RemoteWebDriver; import org.openqa.selenium.support.ui.ExpectedConditions; import org.testcontainers.shaded.org.awaitility.Awaitility; + @DolphinScheduler(composeFiles = "docker/workflow-http/docker-compose.yaml") +@DisableIfTestFails public class WorkflowHttpTaskE2ETest { private static final String project = "test-workflow-1"; diff --git a/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/cases/WorkflowJavaTaskE2ETest.java b/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/cases/WorkflowJavaTaskE2ETest.java index 332bb4ff6f..b90f29e680 100644 --- a/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/cases/WorkflowJavaTaskE2ETest.java +++ b/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/cases/WorkflowJavaTaskE2ETest.java @@ -40,12 +40,14 @@ import org.junit.jupiter.api.AfterAll; import org.junit.jupiter.api.BeforeAll; import org.junit.jupiter.api.Order; import org.junit.jupiter.api.Test; +import org.junitpioneer.jupiter.DisableIfTestFails; import org.openqa.selenium.By; import org.openqa.selenium.remote.RemoteWebDriver; import org.openqa.selenium.support.ui.ExpectedConditions; import org.testcontainers.shaded.org.awaitility.Awaitility; @DolphinScheduler(composeFiles = "docker/basic/docker-compose.yaml") +@DisableIfTestFails public class WorkflowJavaTaskE2ETest { private static final String project = "test-workflow-1"; diff --git a/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/cases/WorkflowSwitchE2ETest.java b/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/cases/WorkflowSwitchE2ETest.java index 685d5fdc52..6d6215c8ba 100644 --- a/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/cases/WorkflowSwitchE2ETest.java +++ b/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/cases/WorkflowSwitchE2ETest.java @@ -43,10 +43,12 @@ import org.junit.jupiter.api.AfterAll; import org.junit.jupiter.api.BeforeAll; import org.junit.jupiter.api.Order; import org.junit.jupiter.api.Test; +import org.junitpioneer.jupiter.DisableIfTestFails; import org.openqa.selenium.remote.RemoteWebDriver; import org.testcontainers.shaded.org.awaitility.Awaitility; @DolphinScheduler(composeFiles = "docker/basic/docker-compose.yaml") +@DisableIfTestFails class WorkflowSwitchE2ETest { private static final String project = "test-workflow-1"; diff --git a/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/cases/tasks/PythonTaskE2ETest.java b/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/cases/tasks/PythonTaskE2ETest.java index 239da7dfeb..cb18e58848 100644 --- a/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/cases/tasks/PythonTaskE2ETest.java +++ b/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/cases/tasks/PythonTaskE2ETest.java @@ -42,8 +42,10 @@ import java.util.Date; import org.junit.jupiter.api.BeforeAll; import org.junit.jupiter.api.Order; import org.junit.jupiter.api.Test; +import org.junitpioneer.jupiter.DisableIfTestFails; @DolphinScheduler(composeFiles = "docker/python-task/docker-compose.yaml") +@DisableIfTestFails public class PythonTaskE2ETest extends BaseWorkflowE2ETest { private static final PythonEnvironment pythonEnvironment = new PythonEnvironment(); diff --git a/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/cases/tasks/ShellTaskE2ETest.java b/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/cases/tasks/ShellTaskE2ETest.java index 12f8a4b14e..7a547201c9 100644 --- a/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/cases/tasks/ShellTaskE2ETest.java +++ b/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/cases/tasks/ShellTaskE2ETest.java @@ -39,9 +39,11 @@ import org.junit.jupiter.api.BeforeAll; import org.junit.jupiter.api.MethodOrderer; import org.junit.jupiter.api.Test; import org.junit.jupiter.api.TestMethodOrder; +import org.junitpioneer.jupiter.DisableIfTestFails; @TestMethodOrder(MethodOrderer.MethodName.class) @DolphinScheduler(composeFiles = "docker/basic/docker-compose.yaml") +@DisableIfTestFails public class ShellTaskE2ETest extends BaseWorkflowE2ETest { @BeforeAll diff --git a/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/pages/project/ProjectDetailPage.java b/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/pages/project/ProjectDetailPage.java index 5087e3644c..b7cee41c43 100644 --- a/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/pages/project/ProjectDetailPage.java +++ b/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/pages/project/ProjectDetailPage.java @@ -37,10 +37,10 @@ import org.openqa.selenium.support.ui.ExpectedConditions; public final class ProjectDetailPage extends NavBarPage { @FindBy(css = ".tab-vertical .n-submenu:nth-of-type(2) .n-menu-item:nth-of-type(2) > .n-menu-item-content") - private WebElement menuProcessDefinition; + private WebElement menuWorkflowDefinition; @FindBy(css = ".tab-vertical .n-submenu:nth-of-type(2) .n-menu-item:nth-of-type(3) > .n-menu-item-content") - private WebElement menuProcessInstances; + private WebElement menuWorkflowInstances; @FindBy(xpath = "//div[contains(@class, 'n-menu-item-content')]//div[contains(., 'Task Instance')]") private WebElement menuTaskInstances; @@ -52,13 +52,13 @@ public final class ProjectDetailPage extends NavBarPage { @SneakyThrows public T goToTab(Class tab) { if (tab == WorkflowDefinitionTab.class) { - menuProcessDefinition().click(); + menuWorkflowDefinition().click(); WebDriverWaitFactory.createWebDriverWait(driver) .until(ExpectedConditions.urlContains("/workflow-definition")); return tab.cast(new WorkflowDefinitionTab(driver)); } if (tab == WorkflowInstanceTab.class) { - menuProcessInstances().click(); + menuWorkflowInstances().click(); WebDriverWaitFactory.createWebDriverWait(driver) .until(ExpectedConditions.urlContains("/workflow/instances")); return tab.cast(new WorkflowInstanceTab(driver)); diff --git a/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/pages/project/workflow/TaskInstanceTab.java b/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/pages/project/workflow/TaskInstanceTab.java index 592fd0e0b2..8bd1747ab9 100644 --- a/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/pages/project/workflow/TaskInstanceTab.java +++ b/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/pages/project/workflow/TaskInstanceTab.java @@ -62,7 +62,7 @@ public final class TaskInstanceTab extends NavBarPage implements ProjectDetailPa } public String workflowInstanceName() { - return row.findElement(By.cssSelector("td[data-col-key=processInstanceName]")).getText(); + return row.findElement(By.cssSelector("td[data-col-key=workflowInstanceName]")).getText(); } public int retryTimes() { diff --git a/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/pages/project/workflow/WorkflowDefinitionTab.java b/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/pages/project/workflow/WorkflowDefinitionTab.java index ee38a0ff7d..012ed53be6 100644 --- a/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/pages/project/workflow/WorkflowDefinitionTab.java +++ b/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/pages/project/workflow/WorkflowDefinitionTab.java @@ -40,8 +40,8 @@ import org.testcontainers.shaded.org.awaitility.Awaitility; @Getter public final class WorkflowDefinitionTab extends NavBarPage implements ProjectDetailPage.Tab { - @FindBy(className = "btn-create-process") - private WebElement buttonCreateProcess; + @FindBy(className = "btn-create-workflow") + private WebElement buttonCreateWorkflow; @FindBys({ @FindBy(className = "btn-selected"), @@ -78,13 +78,13 @@ public final class WorkflowDefinitionTab extends NavBarPage implements ProjectDe } public WorkflowForm createWorkflow() { - buttonCreateProcess().click(); + buttonCreateWorkflow().click(); return new WorkflowForm(driver); } - public WorkflowForm createSubProcessWorkflow() { - buttonCreateProcess().click(); + public WorkflowForm createSubWorkflowTask() { + buttonCreateWorkflow().click(); subProcessList().click(); return new WorkflowForm(driver); diff --git a/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/pages/project/workflow/WorkflowForm.java b/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/pages/project/workflow/WorkflowForm.java index 319a59d37d..dcb2c419f7 100644 --- a/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/pages/project/workflow/WorkflowForm.java +++ b/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/pages/project/workflow/WorkflowForm.java @@ -82,7 +82,7 @@ public final class WorkflowForm { switch (type) { case SHELL: return (T) new ShellTaskForm(this); - case SUB_PROCESS: + case SUB_WORKFLOW: return (T) new SubWorkflowTaskForm(this); case SWITCH: return (T) new SwitchTaskForm(this); @@ -127,7 +127,7 @@ public final class WorkflowForm { public enum TaskType { SHELL, - SUB_PROCESS, + SUB_WORKFLOW, SWITCH, HTTP, JAVA, diff --git a/dolphinscheduler-e2e/pom.xml b/dolphinscheduler-e2e/pom.xml index 3ee8886acb..6f63da0ea0 100644 --- a/dolphinscheduler-e2e/pom.xml +++ b/dolphinscheduler-e2e/pom.xml @@ -45,6 +45,7 @@ 31.0.1-jre 2.22.2 1.19.8 + 2.2.0 @@ -72,6 +73,12 @@ org.testcontainers junit-jupiter + + org.junit-pioneer + junit-pioneer + ${junit-pioneer.version} + test + org.testcontainers diff --git a/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/engine/TaskGroupCoordinator.java b/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/engine/TaskGroupCoordinator.java index 4c0da812c1..9d1ea6472d 100644 --- a/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/engine/TaskGroupCoordinator.java +++ b/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/engine/TaskGroupCoordinator.java @@ -382,7 +382,7 @@ public class TaskGroupCoordinator extends BaseDaemonThread { .taskId(taskInstance.getId()) .taskName(taskInstance.getName()) .groupId(taskInstance.getTaskGroupId()) - .processId(taskInstance.getProcessInstanceId()) + .workflowInstanceId(taskInstance.getWorkflowInstanceId()) .priority(taskInstance.getTaskGroupPriority()) .inQueue(Flag.YES.getCode()) .forceStart(Flag.NO.getCode()) @@ -448,10 +448,10 @@ public class TaskGroupCoordinator extends BaseDaemonThread { "The TaskInstance: " + taskInstance.getId() + " state is " + taskInstance.getState() + ", no need to notify"); } - WorkflowInstance workflowInstance = workflowInstanceDao.queryById(taskInstance.getProcessInstanceId()); + WorkflowInstance workflowInstance = workflowInstanceDao.queryById(taskInstance.getWorkflowInstanceId()); if (workflowInstance == null) { throw new UnsupportedOperationException( - "The WorkflowInstance: " + taskInstance.getProcessInstanceId() + "The WorkflowInstance: " + taskInstance.getWorkflowInstanceId() + " is not exist, no need to notify"); } if (workflowInstance.getState() != WorkflowExecutionStatus.RUNNING_EXECUTION) { diff --git a/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/engine/command/handler/AbstractCommandHandler.java b/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/engine/command/handler/AbstractCommandHandler.java index 8980aa66be..094ee79f18 100644 --- a/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/engine/command/handler/AbstractCommandHandler.java +++ b/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/engine/command/handler/AbstractCommandHandler.java @@ -98,8 +98,8 @@ public abstract class AbstractCommandHandler implements ICommandHandler { protected void assembleWorkflowDefinition( final WorkflowExecuteContextBuilder workflowExecuteContextBuilder) { final Command command = workflowExecuteContextBuilder.getCommand(); - final long workflowDefinitionCode = command.getProcessDefinitionCode(); - final int workflowDefinitionVersion = command.getProcessDefinitionVersion(); + final long workflowDefinitionCode = command.getWorkflowDefinitionCode(); + final int workflowDefinitionVersion = command.getWorkflowDefinitionVersion(); final WorkflowDefinition workflowDefinition = workflowDefinitionLogDao.queryByDefinitionCodeAndVersion( workflowDefinitionCode, diff --git a/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/engine/command/handler/ReRunWorkflowCommandHandler.java b/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/engine/command/handler/ReRunWorkflowCommandHandler.java index f11a5fd4cc..c685adf7f5 100644 --- a/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/engine/command/handler/ReRunWorkflowCommandHandler.java +++ b/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/engine/command/handler/ReRunWorkflowCommandHandler.java @@ -68,7 +68,7 @@ public class ReRunWorkflowCommandHandler extends RunWorkflowCommandHandler { @Override protected void assembleWorkflowInstance(final WorkflowExecuteContextBuilder workflowExecuteContextBuilder) { final Command command = workflowExecuteContextBuilder.getCommand(); - final int workflowInstanceId = command.getProcessInstanceId(); + final int workflowInstanceId = command.getWorkflowInstanceId(); final WorkflowInstance workflowInstance = workflowInstanceDao.queryOptionalById(workflowInstanceId) .orElseThrow(() -> new IllegalArgumentException("Cannot find WorkflowInstance:" + workflowInstanceId)); workflowInstance.setVarPool(null); diff --git a/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/engine/command/handler/RecoverFailureTaskCommandHandler.java b/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/engine/command/handler/RecoverFailureTaskCommandHandler.java index 6b712de492..7447070f55 100644 --- a/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/engine/command/handler/RecoverFailureTaskCommandHandler.java +++ b/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/engine/command/handler/RecoverFailureTaskCommandHandler.java @@ -92,7 +92,7 @@ public class RecoverFailureTaskCommandHandler extends AbstractCommandHandler { protected void assembleWorkflowInstance( final WorkflowExecuteContextBuilder workflowExecuteContextBuilder) { final Command command = workflowExecuteContextBuilder.getCommand(); - final int workflowInstanceId = command.getProcessInstanceId(); + final int workflowInstanceId = command.getWorkflowInstanceId(); final WorkflowInstance workflowInstance = workflowInstanceDao.queryOptionalById(workflowInstanceId) .orElseThrow(() -> new IllegalArgumentException("Cannot find WorkflowInstance:" + workflowInstanceId)); workflowInstance.setVarPool(null); diff --git a/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/engine/command/handler/RunWorkflowCommandHandler.java b/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/engine/command/handler/RunWorkflowCommandHandler.java index 27ebeb9796..020eaeda23 100644 --- a/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/engine/command/handler/RunWorkflowCommandHandler.java +++ b/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/engine/command/handler/RunWorkflowCommandHandler.java @@ -79,7 +79,7 @@ public class RunWorkflowCommandHandler extends AbstractCommandHandler { protected void assembleWorkflowInstance(final WorkflowExecuteContextBuilder workflowExecuteContextBuilder) { final WorkflowDefinition workflowDefinition = workflowExecuteContextBuilder.getWorkflowDefinition(); final Command command = workflowExecuteContextBuilder.getCommand(); - final WorkflowInstance workflowInstance = workflowInstanceDao.queryById(command.getProcessInstanceId()); + final WorkflowInstance workflowInstance = workflowInstanceDao.queryById(command.getWorkflowInstanceId()); workflowInstance.setStateWithDesc(WorkflowExecutionStatus.RUNNING_EXECUTION, command.getCommandType().name()); workflowInstance.setHost(masterConfig.getMasterAddress()); workflowInstance.setCommandParam(command.getCommandParam()); diff --git a/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/engine/command/handler/WorkflowFailoverCommandHandler.java b/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/engine/command/handler/WorkflowFailoverCommandHandler.java index f47f43adc1..b4b14e2695 100644 --- a/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/engine/command/handler/WorkflowFailoverCommandHandler.java +++ b/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/engine/command/handler/WorkflowFailoverCommandHandler.java @@ -83,7 +83,7 @@ public class WorkflowFailoverCommandHandler extends AbstractCommandHandler { protected void assembleWorkflowInstance( final WorkflowExecuteContextBuilder workflowExecuteContextBuilder) { final Command command = workflowExecuteContextBuilder.getCommand(); - final int workflowInstanceId = command.getProcessInstanceId(); + final int workflowInstanceId = command.getWorkflowInstanceId(); final WorkflowInstance workflowInstance = workflowInstanceDao.queryOptionalById(workflowInstanceId) .orElseThrow(() -> new IllegalArgumentException("Cannot find WorkflowInstance:" + workflowInstanceId)); final WorkflowFailoverCommandParam workflowFailoverCommandParam = JSONUtils.parseObject( diff --git a/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/engine/task/runnable/AbstractTaskInstanceFactory.java b/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/engine/task/runnable/AbstractTaskInstanceFactory.java index 85993b0388..163b9732da 100644 --- a/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/engine/task/runnable/AbstractTaskInstanceFactory.java +++ b/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/engine/task/runnable/AbstractTaskInstanceFactory.java @@ -39,8 +39,8 @@ public abstract class AbstractTaskInstanceFactory private DependResult calculateConditionResult() { final List taskInstances = taskInstanceDao.queryValidTaskListByWorkflowInstanceId( - taskExecutionContext.getProcessInstanceId(), taskExecutionContext.getTestFlag()); + taskExecutionContext.getWorkflowInstanceId(), taskExecutionContext.getTestFlag()); final Map taskInstanceMap = taskInstances.stream() .collect(Collectors.toMap(TaskInstance::getTaskCode, Function.identity())); diff --git a/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/runner/task/condition/ConditionLogicTaskPluginFactory.java b/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/runner/task/condition/ConditionLogicTaskPluginFactory.java index 7572decb16..dc797c6a68 100644 --- a/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/runner/task/condition/ConditionLogicTaskPluginFactory.java +++ b/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/runner/task/condition/ConditionLogicTaskPluginFactory.java @@ -42,7 +42,7 @@ public class ConditionLogicTaskPluginFactory implements ILogicTaskPluginFactory< @Override public ConditionLogicTask createLogicTask(TaskExecutionContext taskExecutionContext) { IWorkflowExecutionRunnable workflowExecutionRunnable = - workflowExecutionRunnableMemoryRepository.get(taskExecutionContext.getProcessInstanceId()); + workflowExecutionRunnableMemoryRepository.get(taskExecutionContext.getWorkflowInstanceId()); return new ConditionLogicTask(workflowExecutionRunnable, taskExecutionContext, taskInstanceDao); } diff --git a/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/runner/task/dependent/DependentAsyncTaskExecuteFunction.java b/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/runner/task/dependent/DependentAsyncTaskExecuteFunction.java index ee80b90d4b..bf8752a98d 100644 --- a/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/runner/task/dependent/DependentAsyncTaskExecuteFunction.java +++ b/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/runner/task/dependent/DependentAsyncTaskExecuteFunction.java @@ -87,7 +87,7 @@ public class DependentAsyncTaskExecuteFunction implements AsyncTaskExecuteFuncti this.taskDefinitionDao = taskDefinitionDao; this.taskInstanceDao = taskInstanceDao; this.workflowInstance = - workflowInstanceDao.queryById(taskExecutionContext.getProcessInstanceId()); + workflowInstanceDao.queryById(taskExecutionContext.getWorkflowInstanceId()); this.dependentDate = calculateDependentDate(); this.dependentTaskList = initializeDependentTaskList(); log.info("Initialized dependent task list successfully"); diff --git a/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/runner/task/dependent/DependentLogicTaskPluginFactory.java b/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/runner/task/dependent/DependentLogicTaskPluginFactory.java index 25825010e2..31c4f67bf6 100644 --- a/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/runner/task/dependent/DependentLogicTaskPluginFactory.java +++ b/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/runner/task/dependent/DependentLogicTaskPluginFactory.java @@ -53,7 +53,7 @@ public class DependentLogicTaskPluginFactory implements ILogicTaskPluginFactory< @Override public DependentLogicTask createLogicTask(TaskExecutionContext taskExecutionContext) throws LogicTaskInitializeException { - final int workflowInstanceId = taskExecutionContext.getProcessInstanceId(); + final int workflowInstanceId = taskExecutionContext.getWorkflowInstanceId(); final IWorkflowExecutionRunnable workflowExecutionRunnable = IWorkflowRepository.get(workflowInstanceId); if (workflowExecutionRunnable == null) { throw new LogicTaskInitializeException("Cannot find the WorkflowExecuteRunnable: " + workflowInstanceId); diff --git a/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/runner/task/dynamic/DynamicAsyncTaskExecuteFunction.java b/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/runner/task/dynamic/DynamicAsyncTaskExecuteFunction.java index d89ac0062c..9f2b483579 100644 --- a/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/runner/task/dynamic/DynamicAsyncTaskExecuteFunction.java +++ b/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/runner/task/dynamic/DynamicAsyncTaskExecuteFunction.java @@ -157,9 +157,9 @@ public class DynamicAsyncTaskExecuteFunction implements AsyncTaskExecuteFunction Map parameters = JSONUtils.toMap(DynamicCommandUtils .getDataFromCommandParam(subWorkflowInstance.getCommandParam(), CMD_DYNAMIC_START_PARAMS)); Command command = DynamicCommandUtils.createCommand(this.workflowInstance, - subWorkflowInstance.getProcessDefinitionCode(), subWorkflowInstance.getProcessDefinitionVersion(), + subWorkflowInstance.getWorkflowDefinitionCode(), subWorkflowInstance.getWorkflowDefinitionVersion(), parameters); - command.setProcessInstanceId(subWorkflowInstance.getId()); + command.setWorkflowInstanceId(subWorkflowInstance.getId()); commandMapper.insert(command); log.info("start sub process instance, sub process instance id: {}, command: {}", subWorkflowInstance.getId(), diff --git a/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/runner/task/dynamic/DynamicCommandUtils.java b/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/runner/task/dynamic/DynamicCommandUtils.java index 70442d65a0..567cd7ff88 100644 --- a/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/runner/task/dynamic/DynamicCommandUtils.java +++ b/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/runner/task/dynamic/DynamicCommandUtils.java @@ -45,8 +45,8 @@ public class DynamicCommandUtils { } else { command.setCommandType(workflowInstance.getCommandType()); } - command.setProcessDefinitionCode(subProcessDefinitionCode); - command.setProcessDefinitionVersion(subProcessDefinitionVersion); + command.setWorkflowDefinitionCode(subProcessDefinitionCode); + command.setWorkflowDefinitionVersion(subProcessDefinitionVersion); command.setTaskDependType(TaskDependType.TASK_POST); command.setFailureStrategy(workflowInstance.getFailureStrategy()); command.setWarningType(workflowInstance.getWarningType()); @@ -62,7 +62,7 @@ public class DynamicCommandUtils { addDataToCommandParam(command, CommandKeyConstants.CMD_PARAM_START_PARAMS, JSONUtils.toJsonString(parameters)); command.setExecutorId(workflowInstance.getExecutorId()); command.setWarningGroupId(workflowInstance.getWarningGroupId()); - command.setProcessInstancePriority(workflowInstance.getProcessInstancePriority()); + command.setWorkflowInstancePriority(workflowInstance.getWorkflowInstancePriority()); command.setWorkerGroup(workflowInstance.getWorkerGroup()); command.setDryRun(workflowInstance.getDryRun()); command.setTenantCode(workflowInstance.getTenantCode()); diff --git a/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/runner/task/dynamic/DynamicLogicTask.java b/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/runner/task/dynamic/DynamicLogicTask.java index 547360b539..6942260d8b 100644 --- a/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/runner/task/dynamic/DynamicLogicTask.java +++ b/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/runner/task/dynamic/DynamicLogicTask.java @@ -68,7 +68,7 @@ public class DynamicLogicTask extends BaseAsyncLogicTask { private final SubWorkflowService subWorkflowService; - private final WorkflowDefinitionMapper processDefineMapper; + private final WorkflowDefinitionMapper workflowDefinitionMapper; private final CommandMapper commandMapper; @@ -85,7 +85,7 @@ public class DynamicLogicTask extends BaseAsyncLogicTask { TaskInstanceDao taskInstanceDao, SubWorkflowService subWorkflowService, ProcessService processService, - WorkflowDefinitionMapper processDefineMapper, + WorkflowDefinitionMapper workflowDefinitionMapper, CommandMapper commandMapper) { super(taskExecutionContext, JSONUtils.parseObject(taskExecutionContext.getTaskParams(), new TypeReference() { @@ -93,10 +93,10 @@ public class DynamicLogicTask extends BaseAsyncLogicTask { this.workflowInstanceDao = workflowInstanceDao; this.subWorkflowService = subWorkflowService; this.processService = processService; - this.processDefineMapper = processDefineMapper; + this.workflowDefinitionMapper = workflowDefinitionMapper; this.commandMapper = commandMapper; - this.workflowInstance = workflowInstanceDao.queryById(taskExecutionContext.getProcessInstanceId()); + this.workflowInstance = workflowInstanceDao.queryById(taskExecutionContext.getWorkflowInstanceId()); this.taskInstance = taskInstanceDao.queryById(taskExecutionContext.getTaskInstanceId()); } @@ -146,7 +146,7 @@ public class DynamicLogicTask extends BaseAsyncLogicTask { public void generateSubWorkflowInstance(List> parameterGroup) throws MasterTaskExecuteException { List workflowInstanceList = new ArrayList<>(); WorkflowDefinition subWorkflowDefinition = - processDefineMapper.queryByCode(taskParameters.getProcessDefinitionCode()); + workflowDefinitionMapper.queryByCode(taskParameters.getWorkflowDefinitionCode()); for (Map parameters : parameterGroup) { String dynamicStartParams = JSONUtils.toJsonString(parameters); Command command = DynamicCommandUtils.createCommand(workflowInstance, subWorkflowDefinition.getCode(), @@ -160,7 +160,7 @@ public class DynamicLogicTask extends BaseAsyncLogicTask { WorkflowInstance subWorkflowInstance = createSubProcessInstance(command); subWorkflowInstance.setState(WorkflowExecutionStatus.WAIT_TO_RUN); workflowInstanceDao.insert(subWorkflowInstance); - command.setProcessInstanceId(subWorkflowInstance.getId()); + command.setWorkflowInstanceId(subWorkflowInstance.getId()); workflowInstanceList.add(subWorkflowInstance); } @@ -183,8 +183,8 @@ public class DynamicLogicTask extends BaseAsyncLogicTask { public WorkflowInstance createSubProcessInstance(Command command) throws MasterTaskExecuteException { WorkflowInstance subWorkflowInstance; try { - subWorkflowInstance = processService.constructProcessInstance(command, workflowInstance.getHost()); - subWorkflowInstance.setIsSubProcess(Flag.YES); + subWorkflowInstance = processService.constructWorkflowInstance(command, workflowInstance.getHost()); + subWorkflowInstance.setIsSubWorkflow(Flag.YES); subWorkflowInstance.setVarPool(taskExecutionContext.getVarPool()); } catch (Exception e) { log.error("create sub process instance error", e); diff --git a/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/runner/task/fake/LogicFakeTaskPluginFactory.java b/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/runner/task/fake/LogicFakeTaskPluginFactory.java index a3f8ee149a..8fc2ea9633 100644 --- a/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/runner/task/fake/LogicFakeTaskPluginFactory.java +++ b/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/runner/task/fake/LogicFakeTaskPluginFactory.java @@ -38,10 +38,10 @@ public class LogicFakeTaskPluginFactory implements ILogicTaskPluginFactory { +public class SubWorkflowLogicTask extends BaseAsyncLogicTask { private SubWorkflowLogicTaskRuntimeContext subWorkflowLogicTaskRuntimeContext; @@ -60,7 +60,7 @@ public class SubWorkflowLogicTask extends BaseAsyncLogicTask() { + JSONUtils.parseObject(taskExecutionContext.getTaskParams(), new TypeReference() { })); this.workflowExecutionRunnable = workflowExecutionRunnable; this.applicationContext = applicationContext; @@ -176,9 +176,9 @@ public class SubWorkflowLogicTask extends BaseAsyncLogicTask new IllegalArgumentException( - "Cannot find the sub workflow definition: " + taskParameters.getProcessDefinitionCode())); + "Cannot find the sub workflow definition: " + taskParameters.getWorkflowDefinitionCode())); final ICommandParam commandParam = JSONUtils.parseObject(workflowInstance.getCommandParam(), ICommandParam.class); @@ -190,7 +190,7 @@ public class SubWorkflowLogicTask extends BaseAsyncLogicTask workflowTaskRelations = - processService.findRelationByCode(workflowInstance.getProcessDefinitionCode(), - workflowInstance.getProcessDefinitionVersion()); + processService.findRelationByCode(workflowInstance.getWorkflowDefinitionCode(), + workflowInstance.getWorkflowDefinitionVersion()); List taskDefinitionLogs = taskDefinitionLogDao.queryTaskDefineLogList(workflowTaskRelations); Map taskDefinitionCodeMap = @@ -224,13 +224,13 @@ public class DependentExecute { if (!taskExecutionStatusMap.containsKey(taskCode)) { log.warn( "The task of the workflow is not being executed, taskCode: {}, workflowInstanceId: {}, workflowInstanceName: {}.", - taskCode, workflowInstance.getProcessDefinitionCode(), workflowInstance.getName()); + taskCode, workflowInstance.getWorkflowDefinitionCode(), workflowInstance.getName()); return DependResult.FAILED; } else { if (!taskExecutionStatusMap.get(taskCode).isSuccess()) { log.warn( "The task of the workflow is not being executed successfully, taskCode: {}, workflowInstanceId: {}, workflowInstanceName: {}.", - taskCode, workflowInstance.getProcessDefinitionCode(), workflowInstance.getName()); + taskCode, workflowInstance.getWorkflowDefinitionCode(), workflowInstance.getName()); return DependResult.FAILED; } } @@ -274,7 +274,7 @@ public class DependentExecute { if (!workflowInstance.getState().isFinished()) { log.info( "Wait for the dependent workflow to complete, workflowDefinitionCode: {}, workflowInstanceId: {}.", - workflowInstance.getProcessDefinitionCode(), workflowInstance.getId()); + workflowInstance.getWorkflowDefinitionCode(), workflowInstance.getId()); return DependResult.WAITING; } @@ -457,7 +457,7 @@ public class DependentExecute { * @return */ public boolean isSelfDependent(DependentItem dependentItem) { - if (workflowInstance.getProcessDefinitionCode().equals(dependentItem.getDefinitionCode())) { + if (workflowInstance.getWorkflowDefinitionCode().equals(dependentItem.getDefinitionCode())) { if (dependentItem.getDepTaskCode() == Constants.DEPENDENT_ALL_TASK_CODE) { return true; } diff --git a/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/utils/WorkflowInstanceUtils.java b/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/utils/WorkflowInstanceUtils.java index 2ea6674dd4..4a5552aa29 100644 --- a/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/utils/WorkflowInstanceUtils.java +++ b/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/utils/WorkflowInstanceUtils.java @@ -68,7 +68,7 @@ public class WorkflowInstanceUtils { .getAllTaskExecutionRunnable().size()) .append("\n") .append("Host: ").append(workflowInstance.getHost()).append("\n") - .append("Is SubWorkflow: ").append(workflowInstance.getIsSubProcess().name()).append("\n") + .append("Is SubWorkflow: ").append(workflowInstance.getIsSubWorkflow().name()).append("\n") .append("Run Times: ").append(workflowInstance.getRunTimes()).append("\n") .append("Tenant: ").append(workflowInstance.getTenantCode()).append("\n") .append("Work Group: ").append(workflowInstance.getWorkerGroup()).append("\n") diff --git a/dolphinscheduler-master/src/test/java/org/apache/dolphinscheduler/server/master/ParamsTest.java b/dolphinscheduler-master/src/test/java/org/apache/dolphinscheduler/server/master/ParamsTest.java index ab4e1f9626..455633554b 100644 --- a/dolphinscheduler-master/src/test/java/org/apache/dolphinscheduler/server/master/ParamsTest.java +++ b/dolphinscheduler-master/src/test/java/org/apache/dolphinscheduler/server/master/ParamsTest.java @@ -47,7 +47,7 @@ public class ParamsTest { command = ParameterUtils.convertParameterPlaceholders(command, timeParams); - logger.info("start process : {}", command); + logger.info("start workflow : {}", command); Calendar calendar = Calendar.getInstance(); calendar.setTime(new Date()); diff --git a/dolphinscheduler-master/src/test/java/org/apache/dolphinscheduler/server/master/it/Repository.java b/dolphinscheduler-master/src/test/java/org/apache/dolphinscheduler/server/master/it/Repository.java index 6418ed3a5a..d3a5b47162 100644 --- a/dolphinscheduler-master/src/test/java/org/apache/dolphinscheduler/server/master/it/Repository.java +++ b/dolphinscheduler-master/src/test/java/org/apache/dolphinscheduler/server/master/it/Repository.java @@ -45,9 +45,9 @@ public class Repository { public List queryWorkflowInstance(final WorkflowDefinition workflowDefinition) { return workflowInstanceDao.queryAll() .stream() - .filter(workflowInstance -> workflowInstance.getProcessDefinitionCode() + .filter(workflowInstance -> workflowInstance.getWorkflowDefinitionCode() .equals(workflowDefinition.getCode())) - .filter(workflowInstance -> workflowInstance.getProcessDefinitionVersion() == workflowDefinition + .filter(workflowInstance -> workflowInstance.getWorkflowDefinitionVersion() == workflowDefinition .getVersion()) .sorted(Comparator.comparingInt(WorkflowInstance::getId)) .collect(Collectors.toList()); diff --git a/dolphinscheduler-master/src/test/java/org/apache/dolphinscheduler/server/master/it/cases/WorkflowInstancePauseIT.java b/dolphinscheduler-master/src/test/java/org/apache/dolphinscheduler/server/master/it/cases/WorkflowInstancePauseIT.java index 2fd2797ddb..94bc314232 100644 --- a/dolphinscheduler-master/src/test/java/org/apache/dolphinscheduler/server/master/it/cases/WorkflowInstancePauseIT.java +++ b/dolphinscheduler-master/src/test/java/org/apache/dolphinscheduler/server/master/it/cases/WorkflowInstancePauseIT.java @@ -284,7 +284,7 @@ public class WorkflowInstancePauseIT extends AbstractMasterIntegrationTest { .assertThat(repository.queryWorkflowInstance(workflowInstanceId)) .satisfies(workflowInstance -> { assertThat(workflowInstance.getState()).isEqualTo(WorkflowExecutionStatus.PAUSE); - assertThat(workflowInstance.getIsSubProcess()).isEqualTo(Flag.NO); + assertThat(workflowInstance.getIsSubWorkflow()).isEqualTo(Flag.NO); }); Assertions .assertThat(repository.queryTaskInstance(workflowInstanceId)) @@ -298,7 +298,7 @@ public class WorkflowInstancePauseIT extends AbstractMasterIntegrationTest { .assertThat(repository.queryWorkflowInstance(subWorkflowDefinition)) .satisfiesExactly(workflowInstance -> { assertThat(workflowInstance.getState()).isEqualTo(WorkflowExecutionStatus.PAUSE); - assertThat(workflowInstance.getIsSubProcess()).isEqualTo(Flag.YES); + assertThat(workflowInstance.getIsSubWorkflow()).isEqualTo(Flag.YES); }); final List taskInstances = repository.queryTaskInstance(subWorkflowDefinition); diff --git a/dolphinscheduler-master/src/test/java/org/apache/dolphinscheduler/server/master/it/cases/WorkflowInstanceRecoverPauseIT.java b/dolphinscheduler-master/src/test/java/org/apache/dolphinscheduler/server/master/it/cases/WorkflowInstanceRecoverPauseIT.java index 393ea84b02..6c1ed8f284 100644 --- a/dolphinscheduler-master/src/test/java/org/apache/dolphinscheduler/server/master/it/cases/WorkflowInstanceRecoverPauseIT.java +++ b/dolphinscheduler-master/src/test/java/org/apache/dolphinscheduler/server/master/it/cases/WorkflowInstanceRecoverPauseIT.java @@ -92,7 +92,7 @@ public class WorkflowInstanceRecoverPauseIT extends AbstractMasterIntegrationTes .assertThat(repository.queryWorkflowInstance(workflowInstanceId)) .satisfies(workflowInstance -> { assertThat(workflowInstance.getState()).isEqualTo(WorkflowExecutionStatus.PAUSE); - assertThat(workflowInstance.getIsSubProcess()).isEqualTo(Flag.NO); + assertThat(workflowInstance.getIsSubWorkflow()).isEqualTo(Flag.NO); }); }); diff --git a/dolphinscheduler-master/src/test/java/org/apache/dolphinscheduler/server/master/it/cases/WorkflowInstanceRecoverStopIT.java b/dolphinscheduler-master/src/test/java/org/apache/dolphinscheduler/server/master/it/cases/WorkflowInstanceRecoverStopIT.java index e676325959..cb65c96def 100644 --- a/dolphinscheduler-master/src/test/java/org/apache/dolphinscheduler/server/master/it/cases/WorkflowInstanceRecoverStopIT.java +++ b/dolphinscheduler-master/src/test/java/org/apache/dolphinscheduler/server/master/it/cases/WorkflowInstanceRecoverStopIT.java @@ -92,7 +92,7 @@ public class WorkflowInstanceRecoverStopIT extends AbstractMasterIntegrationTest .assertThat(repository.queryWorkflowInstance(workflowInstanceId)) .satisfies(workflowInstance -> { assertThat(workflowInstance.getState()).isEqualTo(WorkflowExecutionStatus.STOP); - assertThat(workflowInstance.getIsSubProcess()).isEqualTo(Flag.NO); + assertThat(workflowInstance.getIsSubWorkflow()).isEqualTo(Flag.NO); }); }); diff --git a/dolphinscheduler-master/src/test/java/org/apache/dolphinscheduler/server/master/it/cases/WorkflowInstanceStopIT.java b/dolphinscheduler-master/src/test/java/org/apache/dolphinscheduler/server/master/it/cases/WorkflowInstanceStopIT.java index d80fa85bc1..a7bef21dda 100644 --- a/dolphinscheduler-master/src/test/java/org/apache/dolphinscheduler/server/master/it/cases/WorkflowInstanceStopIT.java +++ b/dolphinscheduler-master/src/test/java/org/apache/dolphinscheduler/server/master/it/cases/WorkflowInstanceStopIT.java @@ -255,7 +255,7 @@ public class WorkflowInstanceStopIT extends AbstractMasterIntegrationTest { .assertThat(repository.queryWorkflowInstance(workflowInstanceId)) .satisfies(workflowInstance -> { assertThat(workflowInstance.getState()).isEqualTo(WorkflowExecutionStatus.STOP); - assertThat(workflowInstance.getIsSubProcess()).isEqualTo(Flag.NO); + assertThat(workflowInstance.getIsSubWorkflow()).isEqualTo(Flag.NO); }); Assertions .assertThat(repository.queryTaskInstance(workflowInstanceId)) @@ -269,7 +269,7 @@ public class WorkflowInstanceStopIT extends AbstractMasterIntegrationTest { .assertThat(repository.queryWorkflowInstance(subWorkflowDefinition)) .satisfiesExactly(workflowInstance -> { assertThat(workflowInstance.getState()).isEqualTo(WorkflowExecutionStatus.STOP); - assertThat(workflowInstance.getIsSubProcess()).isEqualTo(Flag.YES); + assertThat(workflowInstance.getIsSubWorkflow()).isEqualTo(Flag.YES); }); Assertions diff --git a/dolphinscheduler-master/src/test/java/org/apache/dolphinscheduler/server/master/it/cases/WorkflowSchedulingIT.java b/dolphinscheduler-master/src/test/java/org/apache/dolphinscheduler/server/master/it/cases/WorkflowSchedulingIT.java index 3ea319a41b..55eb93c08a 100644 --- a/dolphinscheduler-master/src/test/java/org/apache/dolphinscheduler/server/master/it/cases/WorkflowSchedulingIT.java +++ b/dolphinscheduler-master/src/test/java/org/apache/dolphinscheduler/server/master/it/cases/WorkflowSchedulingIT.java @@ -71,7 +71,7 @@ public class WorkflowSchedulingIT extends AbstractMasterIntegrationTest { final WorkflowDefinition workflow = context.getWorkflows().get(0); final Schedule schedule = Schedule.builder() - .processDefinitionCode(workflow.getCode()) + .workflowDefinitionCode(workflow.getCode()) .startTime(new Date()) .endTime(DateUtils.addDays(new Date(), 1)) .timezoneId(TimeZone.getDefault().getID()) @@ -82,7 +82,7 @@ public class WorkflowSchedulingIT extends AbstractMasterIntegrationTest { .updateTime(new Date()) .userId(1) .releaseState(ReleaseState.ONLINE) - .processInstancePriority(Priority.MEDIUM) + .workflowInstancePriority(Priority.MEDIUM) .build(); scheduleMapper.insert(schedule); diff --git a/dolphinscheduler-master/src/test/java/org/apache/dolphinscheduler/server/master/it/cases/WorkflowStartIT.java b/dolphinscheduler-master/src/test/java/org/apache/dolphinscheduler/server/master/it/cases/WorkflowStartIT.java index 039b2f4ced..e360280259 100644 --- a/dolphinscheduler-master/src/test/java/org/apache/dolphinscheduler/server/master/it/cases/WorkflowStartIT.java +++ b/dolphinscheduler-master/src/test/java/org/apache/dolphinscheduler/server/master/it/cases/WorkflowStartIT.java @@ -121,7 +121,7 @@ public class WorkflowStartIT extends AbstractMasterIntegrationTest { .matches( workflowInstance -> workflowInstance.getState() == WorkflowExecutionStatus.SUCCESS) .matches( - workflowInstance -> workflowInstance.getIsSubProcess() == Flag.NO); + workflowInstance -> workflowInstance.getIsSubWorkflow() == Flag.NO); final List subWorkflowInstance = repository.queryWorkflowInstance(context.getWorkflows().get(1)); @@ -130,7 +130,7 @@ public class WorkflowStartIT extends AbstractMasterIntegrationTest { .hasSize(1) .satisfiesExactly(workflowInstance -> { assertThat(workflowInstance.getState()).isEqualTo(WorkflowExecutionStatus.SUCCESS); - assertThat(workflowInstance.getIsSubProcess()).isEqualTo(Flag.YES); + assertThat(workflowInstance.getIsSubWorkflow()).isEqualTo(Flag.YES); }); Assertions @@ -173,7 +173,7 @@ public class WorkflowStartIT extends AbstractMasterIntegrationTest { .matches( workflowInstance -> workflowInstance.getState() == WorkflowExecutionStatus.FAILURE) .matches( - workflowInstance -> workflowInstance.getIsSubProcess() == Flag.NO); + workflowInstance -> workflowInstance.getIsSubWorkflow() == Flag.NO); final List subWorkflowInstance = repository.queryWorkflowInstance(context.getWorkflows().get(1)); @@ -182,7 +182,7 @@ public class WorkflowStartIT extends AbstractMasterIntegrationTest { .hasSize(1) .satisfiesExactly(workflowInstance -> { assertThat(workflowInstance.getState()).isEqualTo(WorkflowExecutionStatus.FAILURE); - assertThat(workflowInstance.getIsSubProcess()).isEqualTo(Flag.YES); + assertThat(workflowInstance.getIsSubWorkflow()).isEqualTo(Flag.YES); }); Assertions diff --git a/dolphinscheduler-master/src/test/java/org/apache/dolphinscheduler/server/master/runner/GlobalTaskDispatchWaitingQueueTest.java b/dolphinscheduler-master/src/test/java/org/apache/dolphinscheduler/server/master/runner/GlobalTaskDispatchWaitingQueueTest.java index 11c5ea2424..1f10f48090 100644 --- a/dolphinscheduler-master/src/test/java/org/apache/dolphinscheduler/server/master/runner/GlobalTaskDispatchWaitingQueueTest.java +++ b/dolphinscheduler-master/src/test/java/org/apache/dolphinscheduler/server/master/runner/GlobalTaskDispatchWaitingQueueTest.java @@ -178,7 +178,7 @@ class GlobalTaskDispatchWaitingQueueTest { private ITaskExecutionRunnable createTaskExecuteRunnable() { WorkflowInstance workflowInstance = new WorkflowInstance(); - workflowInstance.setProcessInstancePriority(Priority.MEDIUM); + workflowInstance.setWorkflowInstancePriority(Priority.MEDIUM); TaskInstance taskInstance = new TaskInstance(); taskInstance.setId(RandomUtils.nextInt()); diff --git a/dolphinscheduler-master/src/test/java/org/apache/dolphinscheduler/server/master/runner/task/dynamic/DynamicCommandUtilsTest.java b/dolphinscheduler-master/src/test/java/org/apache/dolphinscheduler/server/master/runner/task/dynamic/DynamicCommandUtilsTest.java index 003a78900c..9074679d77 100644 --- a/dolphinscheduler-master/src/test/java/org/apache/dolphinscheduler/server/master/runner/task/dynamic/DynamicCommandUtilsTest.java +++ b/dolphinscheduler-master/src/test/java/org/apache/dolphinscheduler/server/master/runner/task/dynamic/DynamicCommandUtilsTest.java @@ -52,7 +52,7 @@ class DynamicCommandUtilsTest { workflowInstance.setGlobalParams("{\"prop\":\"value\"}"); workflowInstance.setExecutorId(1); workflowInstance.setWarningGroupId(1); - workflowInstance.setProcessInstancePriority(null); // update this + workflowInstance.setWorkflowInstancePriority(null); // update this workflowInstance.setWorkerGroup("worker"); workflowInstance.setTenantCode("unit-root"); workflowInstance.setDryRun(0); @@ -64,14 +64,14 @@ class DynamicCommandUtilsTest { subProcessDefinitionVersion, parameters); Assertions.assertEquals(CommandType.DYNAMIC_GENERATION, command.getCommandType()); - Assertions.assertEquals(subProcessDefinitionCode, command.getProcessDefinitionCode()); - Assertions.assertEquals(subProcessDefinitionVersion, command.getProcessDefinitionVersion()); + Assertions.assertEquals(subProcessDefinitionCode, command.getWorkflowDefinitionCode()); + Assertions.assertEquals(subProcessDefinitionVersion, command.getWorkflowDefinitionVersion()); Assertions.assertEquals(TaskDependType.TASK_POST, command.getTaskDependType()); Assertions.assertEquals(workflowInstance.getFailureStrategy(), command.getFailureStrategy()); Assertions.assertEquals(workflowInstance.getWarningType(), command.getWarningType()); Assertions.assertEquals(workflowInstance.getExecutorId(), command.getExecutorId()); Assertions.assertEquals(workflowInstance.getWarningGroupId(), command.getWarningGroupId()); - Assertions.assertEquals(workflowInstance.getProcessInstancePriority(), command.getProcessInstancePriority()); + Assertions.assertEquals(workflowInstance.getWorkflowInstancePriority(), command.getWorkflowInstancePriority()); Assertions.assertEquals(workflowInstance.getWorkerGroup(), command.getWorkerGroup()); Assertions.assertEquals(workflowInstance.getDryRun(), command.getDryRun()); Assertions.assertEquals(workflowInstance.getTenantCode(), command.getTenantCode()); diff --git a/dolphinscheduler-master/src/test/java/org/apache/dolphinscheduler/server/master/utils/WorkflowInstanceUtilsTest.java b/dolphinscheduler-master/src/test/java/org/apache/dolphinscheduler/server/master/utils/WorkflowInstanceUtilsTest.java index 52c51d48e2..603fe50eb2 100644 --- a/dolphinscheduler-master/src/test/java/org/apache/dolphinscheduler/server/master/utils/WorkflowInstanceUtilsTest.java +++ b/dolphinscheduler-master/src/test/java/org/apache/dolphinscheduler/server/master/utils/WorkflowInstanceUtilsTest.java @@ -46,7 +46,7 @@ public class WorkflowInstanceUtilsTest { workflowInstance.setCommandType(CommandType.REPEAT_RUNNING); workflowInstance.setState(WorkflowExecutionStatus.SUCCESS); workflowInstance.setHost("127.0.0.1"); - workflowInstance.setIsSubProcess(Flag.NO); + workflowInstance.setIsSubWorkflow(Flag.NO); workflowInstance.setRunTimes(1); workflowInstance.setMaxTryTimes(0); workflowInstance.setScheduleTime(Date.valueOf("2023-08-01")); diff --git a/dolphinscheduler-master/src/test/resources/it/pause/workflow_with_sub_workflow_task_success.yaml b/dolphinscheduler-master/src/test/resources/it/pause/workflow_with_sub_workflow_task_success.yaml index 2192122a8a..e565f7ce3d 100644 --- a/dolphinscheduler-master/src/test/resources/it/pause/workflow_with_sub_workflow_task_success.yaml +++ b/dolphinscheduler-master/src/test/resources/it/pause/workflow_with_sub_workflow_task_success.yaml @@ -52,8 +52,8 @@ tasks: version: 1 projectCode: 1 userId: 1 - taskType: SUB_PROCESS - taskParams: '{"localParams":[],"resourceList":[],"processDefinitionCode":1}' + taskType: SUB_WORKFLOW + taskParams: '{"localParams":[],"resourceList":[],"workflowDefinitionCode":1}' workerGroup: default createTime: 2024-08-12 00:00:00 updateTime: 2021-08-12 00:00:00 @@ -83,8 +83,8 @@ tasks: taskRelations: - projectCode: 1 - processDefinitionCode: 2 - processDefinitionVersion: 1 + workflowDefinitionCode: 2 + workflowDefinitionVersion: 1 preTaskCode: 0 preTaskVersion: 0 postTaskCode: 3 @@ -92,8 +92,8 @@ taskRelations: createTime: 2024-08-12 00:00:00 updateTime: 2024-08-12 00:00:00 - projectCode: 1 - processDefinitionCode: 1 - processDefinitionVersion: 1 + workflowDefinitionCode: 1 + workflowDefinitionVersion: 1 preTaskCode: 0 preTaskVersion: 0 postTaskCode: 1 @@ -101,8 +101,8 @@ taskRelations: createTime: 2024-08-12 00:00:00 updateTime: 2024-08-12 00:00:00 - projectCode: 1 - processDefinitionCode: 1 - processDefinitionVersion: 1 + workflowDefinitionCode: 1 + workflowDefinitionVersion: 1 preTaskCode: 1 preTaskVersion: 1 postTaskCode: 2 diff --git a/dolphinscheduler-master/src/test/resources/it/recover_paused/workflow_with_sub_workflow_task_success.yaml b/dolphinscheduler-master/src/test/resources/it/recover_paused/workflow_with_sub_workflow_task_success.yaml index 2192122a8a..e565f7ce3d 100644 --- a/dolphinscheduler-master/src/test/resources/it/recover_paused/workflow_with_sub_workflow_task_success.yaml +++ b/dolphinscheduler-master/src/test/resources/it/recover_paused/workflow_with_sub_workflow_task_success.yaml @@ -52,8 +52,8 @@ tasks: version: 1 projectCode: 1 userId: 1 - taskType: SUB_PROCESS - taskParams: '{"localParams":[],"resourceList":[],"processDefinitionCode":1}' + taskType: SUB_WORKFLOW + taskParams: '{"localParams":[],"resourceList":[],"workflowDefinitionCode":1}' workerGroup: default createTime: 2024-08-12 00:00:00 updateTime: 2021-08-12 00:00:00 @@ -83,8 +83,8 @@ tasks: taskRelations: - projectCode: 1 - processDefinitionCode: 2 - processDefinitionVersion: 1 + workflowDefinitionCode: 2 + workflowDefinitionVersion: 1 preTaskCode: 0 preTaskVersion: 0 postTaskCode: 3 @@ -92,8 +92,8 @@ taskRelations: createTime: 2024-08-12 00:00:00 updateTime: 2024-08-12 00:00:00 - projectCode: 1 - processDefinitionCode: 1 - processDefinitionVersion: 1 + workflowDefinitionCode: 1 + workflowDefinitionVersion: 1 preTaskCode: 0 preTaskVersion: 0 postTaskCode: 1 @@ -101,8 +101,8 @@ taskRelations: createTime: 2024-08-12 00:00:00 updateTime: 2024-08-12 00:00:00 - projectCode: 1 - processDefinitionCode: 1 - processDefinitionVersion: 1 + workflowDefinitionCode: 1 + workflowDefinitionVersion: 1 preTaskCode: 1 preTaskVersion: 1 postTaskCode: 2 diff --git a/dolphinscheduler-master/src/test/resources/it/recover_stopped/workflow_with_sub_workflow_task_success.yaml b/dolphinscheduler-master/src/test/resources/it/recover_stopped/workflow_with_sub_workflow_task_success.yaml index 2192122a8a..e565f7ce3d 100644 --- a/dolphinscheduler-master/src/test/resources/it/recover_stopped/workflow_with_sub_workflow_task_success.yaml +++ b/dolphinscheduler-master/src/test/resources/it/recover_stopped/workflow_with_sub_workflow_task_success.yaml @@ -52,8 +52,8 @@ tasks: version: 1 projectCode: 1 userId: 1 - taskType: SUB_PROCESS - taskParams: '{"localParams":[],"resourceList":[],"processDefinitionCode":1}' + taskType: SUB_WORKFLOW + taskParams: '{"localParams":[],"resourceList":[],"workflowDefinitionCode":1}' workerGroup: default createTime: 2024-08-12 00:00:00 updateTime: 2021-08-12 00:00:00 @@ -83,8 +83,8 @@ tasks: taskRelations: - projectCode: 1 - processDefinitionCode: 2 - processDefinitionVersion: 1 + workflowDefinitionCode: 2 + workflowDefinitionVersion: 1 preTaskCode: 0 preTaskVersion: 0 postTaskCode: 3 @@ -92,8 +92,8 @@ taskRelations: createTime: 2024-08-12 00:00:00 updateTime: 2024-08-12 00:00:00 - projectCode: 1 - processDefinitionCode: 1 - processDefinitionVersion: 1 + workflowDefinitionCode: 1 + workflowDefinitionVersion: 1 preTaskCode: 0 preTaskVersion: 0 postTaskCode: 1 @@ -101,8 +101,8 @@ taskRelations: createTime: 2024-08-12 00:00:00 updateTime: 2024-08-12 00:00:00 - projectCode: 1 - processDefinitionCode: 1 - processDefinitionVersion: 1 + workflowDefinitionCode: 1 + workflowDefinitionVersion: 1 preTaskCode: 1 preTaskVersion: 1 postTaskCode: 2 diff --git a/dolphinscheduler-master/src/test/resources/it/start/workflow_with_sub_workflow_task_failed.yaml b/dolphinscheduler-master/src/test/resources/it/start/workflow_with_sub_workflow_task_failed.yaml index 09d5d6f11e..977a8a231c 100644 --- a/dolphinscheduler-master/src/test/resources/it/start/workflow_with_sub_workflow_task_failed.yaml +++ b/dolphinscheduler-master/src/test/resources/it/start/workflow_with_sub_workflow_task_failed.yaml @@ -52,8 +52,8 @@ tasks: version: 1 projectCode: 1 userId: 1 - taskType: SUB_PROCESS - taskParams: '{"localParams":[],"resourceList":[],"processDefinitionCode":1}' + taskType: SUB_WORKFLOW + taskParams: '{"localParams":[],"resourceList":[],"workflowDefinitionCode":1}' workerGroup: default createTime: 2024-08-12 00:00:00 updateTime: 2021-08-12 00:00:00 @@ -72,8 +72,8 @@ tasks: taskRelations: - projectCode: 1 - processDefinitionCode: 2 - processDefinitionVersion: 1 + workflowDefinitionCode: 2 + workflowDefinitionVersion: 1 preTaskCode: 0 preTaskVersion: 0 postTaskCode: 2 @@ -81,8 +81,8 @@ taskRelations: createTime: 2024-08-12 00:00:00 updateTime: 2024-08-12 00:00:00 - projectCode: 1 - processDefinitionCode: 1 - processDefinitionVersion: 1 + workflowDefinitionCode: 1 + workflowDefinitionVersion: 1 preTaskCode: 0 preTaskVersion: 0 postTaskCode: 1 diff --git a/dolphinscheduler-master/src/test/resources/it/start/workflow_with_sub_workflow_task_success.yaml b/dolphinscheduler-master/src/test/resources/it/start/workflow_with_sub_workflow_task_success.yaml index 19920c0d83..12da9b1792 100644 --- a/dolphinscheduler-master/src/test/resources/it/start/workflow_with_sub_workflow_task_success.yaml +++ b/dolphinscheduler-master/src/test/resources/it/start/workflow_with_sub_workflow_task_success.yaml @@ -52,8 +52,8 @@ tasks: version: 1 projectCode: 1 userId: 1 - taskType: SUB_PROCESS - taskParams: '{"localParams":[],"resourceList":[],"processDefinitionCode":1}' + taskType: SUB_WORKFLOW + taskParams: '{"localParams":[],"resourceList":[],"workflowDefinitionCode":1}' workerGroup: default createTime: 2024-08-12 00:00:00 updateTime: 2021-08-12 00:00:00 @@ -72,8 +72,8 @@ tasks: taskRelations: - projectCode: 1 - processDefinitionCode: 2 - processDefinitionVersion: 1 + workflowDefinitionCode: 2 + workflowDefinitionVersion: 1 preTaskCode: 0 preTaskVersion: 0 postTaskCode: 2 @@ -81,8 +81,8 @@ taskRelations: createTime: 2024-08-12 00:00:00 updateTime: 2024-08-12 00:00:00 - projectCode: 1 - processDefinitionCode: 1 - processDefinitionVersion: 1 + workflowDefinitionCode: 1 + workflowDefinitionVersion: 1 preTaskCode: 0 preTaskVersion: 0 postTaskCode: 1 diff --git a/dolphinscheduler-master/src/test/resources/it/stop/workflow_with_sub_workflow_task_success.yaml b/dolphinscheduler-master/src/test/resources/it/stop/workflow_with_sub_workflow_task_success.yaml index 2192122a8a..e565f7ce3d 100644 --- a/dolphinscheduler-master/src/test/resources/it/stop/workflow_with_sub_workflow_task_success.yaml +++ b/dolphinscheduler-master/src/test/resources/it/stop/workflow_with_sub_workflow_task_success.yaml @@ -52,8 +52,8 @@ tasks: version: 1 projectCode: 1 userId: 1 - taskType: SUB_PROCESS - taskParams: '{"localParams":[],"resourceList":[],"processDefinitionCode":1}' + taskType: SUB_WORKFLOW + taskParams: '{"localParams":[],"resourceList":[],"workflowDefinitionCode":1}' workerGroup: default createTime: 2024-08-12 00:00:00 updateTime: 2021-08-12 00:00:00 @@ -83,8 +83,8 @@ tasks: taskRelations: - projectCode: 1 - processDefinitionCode: 2 - processDefinitionVersion: 1 + workflowDefinitionCode: 2 + workflowDefinitionVersion: 1 preTaskCode: 0 preTaskVersion: 0 postTaskCode: 3 @@ -92,8 +92,8 @@ taskRelations: createTime: 2024-08-12 00:00:00 updateTime: 2024-08-12 00:00:00 - projectCode: 1 - processDefinitionCode: 1 - processDefinitionVersion: 1 + workflowDefinitionCode: 1 + workflowDefinitionVersion: 1 preTaskCode: 0 preTaskVersion: 0 postTaskCode: 1 @@ -101,8 +101,8 @@ taskRelations: createTime: 2024-08-12 00:00:00 updateTime: 2024-08-12 00:00:00 - projectCode: 1 - processDefinitionCode: 1 - processDefinitionVersion: 1 + workflowDefinitionCode: 1 + workflowDefinitionVersion: 1 preTaskCode: 1 preTaskVersion: 1 postTaskCode: 2 diff --git a/dolphinscheduler-scheduler-plugin/dolphinscheduler-scheduler-quartz/src/main/java/org/apache/dolphinscheduler/scheduler/quartz/ProcessScheduleTask.java b/dolphinscheduler-scheduler-plugin/dolphinscheduler-scheduler-quartz/src/main/java/org/apache/dolphinscheduler/scheduler/quartz/ProcessScheduleTask.java index 22b45127ef..09d6946092 100644 --- a/dolphinscheduler-scheduler-plugin/dolphinscheduler-scheduler-quartz/src/main/java/org/apache/dolphinscheduler/scheduler/quartz/ProcessScheduleTask.java +++ b/dolphinscheduler-scheduler-plugin/dolphinscheduler-scheduler-quartz/src/main/java/org/apache/dolphinscheduler/scheduler/quartz/ProcessScheduleTask.java @@ -74,7 +74,7 @@ public class ProcessScheduleTask extends QuartzJobBean { } WorkflowDefinition workflowDefinition = - processService.findProcessDefinitionByCode(schedule.getProcessDefinitionCode()); + processService.findWorkflowDefinitionByCode(schedule.getWorkflowDefinitionCode()); // release state : online/offline ReleaseState releaseState = workflowDefinition.getReleaseState(); if (releaseState == ReleaseState.OFFLINE) { @@ -94,7 +94,7 @@ public class ProcessScheduleTask extends QuartzJobBean { .taskDependType(TaskDependType.TASK_POST) .warningType(schedule.getWarningType()) .warningGroupId(schedule.getWarningGroupId()) - .workflowInstancePriority(schedule.getProcessInstancePriority()) + .workflowInstancePriority(schedule.getWorkflowInstancePriority()) .workerGroup(WorkerGroupUtils.getWorkerGroupOrDefault(schedule.getWorkerGroup())) .tenantCode(schedule.getTenantCode()) .environmentCode(schedule.getEnvironmentCode()) diff --git a/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/alert/WorkflowAlertManager.java b/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/alert/WorkflowAlertManager.java index fa92ad1315..1be47623d4 100644 --- a/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/alert/WorkflowAlertManager.java +++ b/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/alert/WorkflowAlertManager.java @@ -112,8 +112,8 @@ public class WorkflowAlertManager { String res = ""; WorkflowDefinitionLog workflowDefinitionLog = workflowDefinitionLogMapper - .queryByDefinitionCodeAndVersion(workflowInstance.getProcessDefinitionCode(), - workflowInstance.getProcessDefinitionVersion()); + .queryByDefinitionCodeAndVersion(workflowInstance.getWorkflowDefinitionCode(), + workflowInstance.getWorkflowDefinitionVersion()); String modifyBy = ""; if (workflowDefinitionLog != null) { @@ -127,17 +127,17 @@ public class WorkflowAlertManager { .projectCode(projectUser.getProjectCode()) .projectName(projectUser.getProjectName()) .owner(projectUser.getUserName()) - .processId(workflowInstance.getId()) - .processDefinitionCode(workflowInstance.getProcessDefinitionCode()) - .processName(workflowInstance.getName()) - .processType(workflowInstance.getCommandType()) - .processState(workflowInstance.getState()) + .workflowInstanceId(workflowInstance.getId()) + .workflowDefinitionCode(workflowInstance.getWorkflowDefinitionCode()) + .workflowInstanceName(workflowInstance.getName()) + .commandType(workflowInstance.getCommandType()) + .workflowExecutionStatus(workflowInstance.getState()) .modifyBy(modifyBy) .recovery(workflowInstance.getRecovery()) .runTimes(workflowInstance.getRunTimes()) - .processStartTime(workflowInstance.getStartTime()) - .processEndTime(workflowInstance.getEndTime()) - .processHost(workflowInstance.getHost()) + .workflowStartTime(workflowInstance.getStartTime()) + .workflowEndTime(workflowInstance.getEndTime()) + .workflowHost(workflowInstance.getHost()) .build(); successTaskList.add(workflowAlertContent); res = JSONUtils.toJsonString(successTaskList); @@ -152,9 +152,9 @@ public class WorkflowAlertManager { .projectCode(projectUser.getProjectCode()) .projectName(projectUser.getProjectName()) .owner(projectUser.getUserName()) - .processId(workflowInstance.getId()) - .processDefinitionCode(workflowInstance.getProcessDefinitionCode()) - .processName(workflowInstance.getName()) + .workflowInstanceId(workflowInstance.getId()) + .workflowDefinitionCode(workflowInstance.getWorkflowDefinitionCode()) + .workflowInstanceName(workflowInstance.getName()) .modifyBy(modifyBy) .taskCode(task.getTaskCode()) .taskName(task.getName()) @@ -186,8 +186,8 @@ public class WorkflowAlertManager { List toleranceTaskInstanceList = new ArrayList<>(); WorkflowDefinitionLog workflowDefinitionLog = workflowDefinitionLogMapper - .queryByDefinitionCodeAndVersion(workflowInstance.getProcessDefinitionCode(), - workflowInstance.getProcessDefinitionVersion()); + .queryByDefinitionCodeAndVersion(workflowInstance.getWorkflowDefinitionCode(), + workflowInstance.getWorkflowDefinitionVersion()); String modifyBy = ""; if (workflowDefinitionLog != null) { User operator = userMapper.selectById(workflowDefinitionLog.getOperator()); @@ -196,9 +196,9 @@ public class WorkflowAlertManager { for (TaskInstance taskInstance : toleranceTaskList) { WorkflowAlertContent workflowAlertContent = WorkflowAlertContent.builder() - .processId(workflowInstance.getId()) - .processDefinitionCode(workflowInstance.getProcessDefinitionCode()) - .processName(workflowInstance.getName()) + .workflowInstanceId(workflowInstance.getId()) + .workflowDefinitionCode(workflowInstance.getWorkflowDefinitionCode()) + .workflowInstanceName(workflowInstance.getName()) .modifyBy(modifyBy) .taskCode(taskInstance.getTaskCode()) .taskName(taskInstance.getName()) @@ -258,8 +258,8 @@ public class WorkflowAlertManager { alert.setAlertGroupId(workflowInstance.getWarningGroupId()); alert.setCreateTime(new Date()); alert.setProjectCode(projectUser.getProjectCode()); - alert.setProcessDefinitionCode(workflowInstance.getProcessDefinitionCode()); - alert.setProcessInstanceId(workflowInstance.getId()); + alert.setWorkflowDefinitionCode(workflowInstance.getWorkflowDefinitionCode()); + alert.setWorkflowInstanceId(workflowInstance.getId()); alert.setAlertType(workflowInstance.getState().isSuccess() ? AlertType.WORKFLOW_INSTANCE_SUCCESS : AlertType.WORKFLOW_INSTANCE_FAILURE); alertDao.addAlert(alert); @@ -272,7 +272,7 @@ public class WorkflowAlertManager { * @return */ public boolean isNeedToSendWarning(WorkflowInstance workflowInstance) { - if (Flag.YES == workflowInstance.getIsSubProcess()) { + if (Flag.YES == workflowInstance.getIsSubWorkflow()) { return false; } boolean sendWarning = false; @@ -318,8 +318,8 @@ public class WorkflowAlertManager { alert.setUpdateTime(new Date()); alert.setCreateTime(new Date()); alert.setProjectCode(workflowInstance.getWorkflowDefinition().getProjectCode()); - alert.setProcessDefinitionCode(workflowInstance.getProcessDefinitionCode()); - alert.setProcessInstanceId(workflowInstance.getId()); + alert.setWorkflowDefinitionCode(workflowInstance.getWorkflowDefinitionCode()); + alert.setWorkflowInstanceId(workflowInstance.getId()); alert.setAlertType(AlertType.CLOSE_ALERT); alertDao.addAlert(alert); } @@ -331,7 +331,7 @@ public class WorkflowAlertManager { * @param projectUser projectUser */ public void sendWorkflowTimeoutAlert(WorkflowInstance workflowInstance, ProjectUser projectUser) { - alertDao.sendProcessTimeoutAlert(workflowInstance, projectUser); + alertDao.sendWorkflowTimeoutAlert(workflowInstance, projectUser); } /** @@ -346,8 +346,8 @@ public class WorkflowAlertManager { alert.setAlertGroupId(workflowInstance.getWarningGroupId()); alert.setCreateTime(new Date()); alert.setProjectCode(result.getProjectCode()); - alert.setProcessDefinitionCode(workflowInstance.getProcessDefinitionCode()); - alert.setProcessInstanceId(workflowInstance.getId()); + alert.setWorkflowDefinitionCode(workflowInstance.getWorkflowDefinitionCode()); + alert.setWorkflowInstanceId(workflowInstance.getId()); // might need to change to data quality status alert.setAlertType(workflowInstance.getState().isSuccess() ? AlertType.WORKFLOW_INSTANCE_SUCCESS : AlertType.WORKFLOW_INSTANCE_FAILURE); @@ -364,8 +364,8 @@ public class WorkflowAlertManager { alert.setContent(content); alert.setAlertGroupId(workflowInstance.getWarningGroupId()); alert.setCreateTime(new Date()); - alert.setProcessDefinitionCode(workflowInstance.getProcessDefinitionCode()); - alert.setProcessInstanceId(workflowInstance.getId()); + alert.setWorkflowDefinitionCode(workflowInstance.getWorkflowDefinitionCode()); + alert.setWorkflowInstanceId(workflowInstance.getId()); alert.setAlertType(AlertType.TASK_FAILURE); alertDao.addAlert(alert); } @@ -378,8 +378,8 @@ public class WorkflowAlertManager { public String getDataQualityAlterContent(DqExecuteResult result) { DqExecuteResultAlertContent content = DqExecuteResultAlertContent.newBuilder() - .processDefinitionId(result.getProcessDefinitionId()) - .processDefinitionName(result.getProcessDefinitionName()) + .processDefinitionId(result.getWorkflowDefinitionId()) + .processDefinitionName(result.getWorkflowDefinitionName()) .processInstanceId(result.getProcessInstanceId()) .processInstanceName(result.getProcessInstanceName()) .taskInstanceId(result.getTaskInstanceId()) @@ -409,8 +409,8 @@ public class WorkflowAlertManager { public String getTaskAlterContent(TaskInstance taskInstance) { TaskAlertContent content = TaskAlertContent.builder() - .processInstanceName(taskInstance.getProcessInstanceName()) - .processInstanceId(taskInstance.getProcessInstanceId()) + .processInstanceName(taskInstance.getWorkflowInstanceName()) + .processInstanceId(taskInstance.getWorkflowInstanceId()) .taskInstanceId(taskInstance.getId()) .taskName(taskInstance.getName()) .taskType(taskInstance.getTaskType()) @@ -445,8 +445,8 @@ public class WorkflowAlertManager { List blockingNodeList = new ArrayList<>(1); WorkflowDefinitionLog workflowDefinitionLog = workflowDefinitionLogMapper - .queryByDefinitionCodeAndVersion(workflowInstance.getProcessDefinitionCode(), - workflowInstance.getProcessDefinitionVersion()); + .queryByDefinitionCodeAndVersion(workflowInstance.getWorkflowDefinitionCode(), + workflowInstance.getWorkflowDefinitionVersion()); String modifyBy = ""; if (workflowDefinitionLog != null) { @@ -458,15 +458,15 @@ public class WorkflowAlertManager { .projectCode(projectUser.getProjectCode()) .projectName(projectUser.getProjectName()) .owner(projectUser.getUserName()) - .processId(workflowInstance.getId()) - .processName(workflowInstance.getName()) - .processType(workflowInstance.getCommandType()) - .processState(workflowInstance.getState()) + .workflowInstanceId(workflowInstance.getId()) + .workflowInstanceName(workflowInstance.getName()) + .commandType(workflowInstance.getCommandType()) + .workflowExecutionStatus(workflowInstance.getState()) .modifyBy(modifyBy) .runTimes(workflowInstance.getRunTimes()) - .processStartTime(workflowInstance.getStartTime()) - .processEndTime(workflowInstance.getEndTime()) - .processHost(workflowInstance.getHost()) + .workflowStartTime(workflowInstance.getStartTime()) + .workflowEndTime(workflowInstance.getEndTime()) + .workflowHost(workflowInstance.getHost()) .build(); blockingNodeList.add(workflowAlertContent); String content = JSONUtils.toJsonString(blockingNodeList); @@ -475,8 +475,8 @@ public class WorkflowAlertManager { alert.setAlertGroupId(workflowInstance.getWarningGroupId()); alert.setCreateTime(new Date()); alert.setProjectCode(projectUser.getProjectCode()); - alert.setProcessDefinitionCode(workflowInstance.getProcessDefinitionCode()); - alert.setProcessInstanceId(workflowInstance.getId()); + alert.setWorkflowDefinitionCode(workflowInstance.getWorkflowDefinitionCode()); + alert.setWorkflowInstanceId(workflowInstance.getId()); alert.setAlertType(AlertType.WORKFLOW_INSTANCE_BLOCKED); alertDao.addAlert(alert); } diff --git a/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/command/CommandServiceImpl.java b/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/command/CommandServiceImpl.java index fca121de81..06518ae9ec 100644 --- a/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/command/CommandServiceImpl.java +++ b/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/command/CommandServiceImpl.java @@ -89,7 +89,7 @@ public class CommandServiceImpl implements CommandService { return result; } // add command timezone - Schedule schedule = scheduleMapper.queryByProcessDefinitionCode(command.getProcessDefinitionCode()); + Schedule schedule = scheduleMapper.queryByWorkflowDefinitionCode(command.getWorkflowDefinitionCode()); if (schedule != null) { Map commandParams = StringUtils.isNotBlank(command.getCommandParam()) ? JSONUtils.toMap(command.getCommandParam()) @@ -179,7 +179,7 @@ public class CommandServiceImpl implements CommandService { parentWorkflowInstance.getScheduleTime(), task.getWorkerGroup(), task.getEnvironmentCode(), - parentWorkflowInstance.getProcessInstancePriority(), + parentWorkflowInstance.getWorkflowInstancePriority(), parentWorkflowInstance.getDryRun(), subProcessInstanceId, subWorkflowDefinition.getVersion(), diff --git a/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/cron/CronUtils.java b/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/cron/CronUtils.java index c76bb773ef..e7a165025e 100644 --- a/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/cron/CronUtils.java +++ b/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/cron/CronUtils.java @@ -276,7 +276,7 @@ public class CronUtils { calendar.add(Calendar.DATE, 1); break; default: - log.error("Dependent process definition's cycleEnum is {},not support!!", cycleEnum); + log.error("Dependent workflow definition's cycleEnum is {}, not supported!", cycleEnum); break; } maxExpirationTime = calendar.getTime(); diff --git a/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/expand/CuringParamsService.java b/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/expand/CuringParamsService.java index d764951c73..0c353d5639 100644 --- a/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/expand/CuringParamsService.java +++ b/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/expand/CuringParamsService.java @@ -42,12 +42,12 @@ public interface CuringParamsService { /** * time function extension - * @param processInstanceId + * @param workflowInstanceId * @param timezone * @param placeholderName * @return */ - String timeFunctionExtension(Integer processInstanceId, String timezone, String placeholderName); + String timeFunctionExtension(Integer workflowInstanceId, String timezone, String placeholderName); /** * convert parameter placeholders @@ -59,7 +59,7 @@ public interface CuringParamsService { /** * curing global params - * @param processInstanceId + * @param workflowInstanceId * @param globalParamMap * @param globalParamList * @param commandType @@ -67,7 +67,7 @@ public interface CuringParamsService { * @param timezone * @return */ - String curingGlobalParams(Integer processInstanceId, Map globalParamMap, + String curingGlobalParams(Integer workflowInstanceId, Map globalParamMap, List globalParamList, CommandType commandType, Date scheduleTime, String timezone); diff --git a/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/expand/CuringParamsServiceImpl.java b/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/expand/CuringParamsServiceImpl.java index 43666cfb13..1cea96b7e3 100644 --- a/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/expand/CuringParamsServiceImpl.java +++ b/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/expand/CuringParamsServiceImpl.java @@ -92,14 +92,15 @@ public class CuringParamsServiceImpl implements CuringParamsService { } @Override - public String timeFunctionExtension(Integer processInstanceId, String timezone, String placeholderName) { - return timePlaceholderResolverExpandService.timeFunctionExtension(processInstanceId, timezone, placeholderName); + public String timeFunctionExtension(Integer workflowInstanceId, String timezone, String placeholderName) { + return timePlaceholderResolverExpandService.timeFunctionExtension(workflowInstanceId, timezone, + placeholderName); } /** * here it is judged whether external expansion calculation is required and the calculation result is obtained * - * @param processInstanceId + * @param workflowInstanceId * @param globalParamMap * @param globalParamList * @param commandType @@ -108,7 +109,7 @@ public class CuringParamsServiceImpl implements CuringParamsService { * @return */ @Override - public String curingGlobalParams(Integer processInstanceId, Map globalParamMap, + public String curingGlobalParams(Integer workflowInstanceId, Map globalParamMap, List globalParamList, CommandType commandType, Date scheduleTime, String timezone) { if (globalParamList == null || globalParamList.isEmpty()) { @@ -134,7 +135,7 @@ public class CuringParamsServiceImpl implements CuringParamsService { String str = val; // whether external scaling calculation is required if (timeFunctionNeedExpand(val)) { - str = timeFunctionExtension(processInstanceId, timezone, val); + str = timeFunctionExtension(workflowInstanceId, timezone, val); } resolveMap.put(entry.getKey(), str); } @@ -249,7 +250,7 @@ public class CuringParamsServiceImpl implements CuringParamsService { String val = property.getValue(); // whether external scaling calculation is required if (timeFunctionNeedExpand(val)) { - val = timeFunctionExtension(taskInstance.getProcessInstanceId(), timeZone, val); + val = timeFunctionExtension(taskInstance.getWorkflowInstanceId(), timeZone, val); } else { // handle some chain parameter assign, such as `{"var1": "${var2}", "var2": 1}` should be convert to // `{"var1": 1, "var2": 1}` @@ -287,10 +288,10 @@ public class CuringParamsServiceImpl implements CuringParamsService { params.put(PARAMETER_TASK_INSTANCE_ID, Integer.toString(taskInstance.getId())); params.put(PARAMETER_TASK_DEFINITION_NAME, taskInstance.getName()); params.put(PARAMETER_TASK_DEFINITION_CODE, Long.toString(taskInstance.getTaskCode())); - params.put(PARAMETER_WORKFLOW_INSTANCE_ID, Integer.toString(taskInstance.getProcessInstanceId())); + params.put(PARAMETER_WORKFLOW_INSTANCE_ID, Integer.toString(taskInstance.getWorkflowInstanceId())); // todo: set workflow definitionName and projectName params.put(PARAMETER_WORKFLOW_DEFINITION_NAME, null); - params.put(PARAMETER_WORKFLOW_DEFINITION_CODE, Long.toString(workflowInstance.getProcessDefinitionCode())); + params.put(PARAMETER_WORKFLOW_DEFINITION_CODE, Long.toString(workflowInstance.getWorkflowDefinitionCode())); params.put(PARAMETER_PROJECT_NAME, null); params.put(PARAMETER_PROJECT_CODE, Long.toString(workflowInstance.getProjectCode())); return params; diff --git a/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/expand/TimePlaceholderResolverExpandService.java b/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/expand/TimePlaceholderResolverExpandService.java index 98a0bb2b00..a1d5d737e3 100644 --- a/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/expand/TimePlaceholderResolverExpandService.java +++ b/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/expand/TimePlaceholderResolverExpandService.java @@ -31,5 +31,5 @@ public interface TimePlaceholderResolverExpandService { * @param placeholderName * @return */ - String timeFunctionExtension(Integer processInstanceId, String timeZone, String placeholderName); + String timeFunctionExtension(Integer workflowInstanceId, String timeZone, String placeholderName); } diff --git a/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/expand/TimePlaceholderResolverExpandServiceImpl.java b/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/expand/TimePlaceholderResolverExpandServiceImpl.java index 2ac37b0887..a43c20d44a 100644 --- a/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/expand/TimePlaceholderResolverExpandServiceImpl.java +++ b/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/expand/TimePlaceholderResolverExpandServiceImpl.java @@ -28,7 +28,7 @@ public class TimePlaceholderResolverExpandServiceImpl implements TimePlaceholder } @Override - public String timeFunctionExtension(Integer processInstanceId, String timeZone, String placeholderName) { + public String timeFunctionExtension(Integer workflowInstanceId, String timeZone, String placeholderName) { return null; } } diff --git a/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/process/ProcessService.java b/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/process/ProcessService.java index 2c72eae29b..9a349b8cf5 100644 --- a/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/process/ProcessService.java +++ b/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/process/ProcessService.java @@ -26,12 +26,9 @@ import org.apache.dolphinscheduler.dao.entity.Command; import org.apache.dolphinscheduler.dao.entity.DagData; import org.apache.dolphinscheduler.dao.entity.DataSource; import org.apache.dolphinscheduler.dao.entity.DqComparisonType; -import org.apache.dolphinscheduler.dao.entity.DqExecuteResult; import org.apache.dolphinscheduler.dao.entity.DqRule; import org.apache.dolphinscheduler.dao.entity.DqRuleExecuteSql; import org.apache.dolphinscheduler.dao.entity.DqRuleInputEntry; -import org.apache.dolphinscheduler.dao.entity.Environment; -import org.apache.dolphinscheduler.dao.entity.ProjectUser; import org.apache.dolphinscheduler.dao.entity.Schedule; import org.apache.dolphinscheduler.dao.entity.TaskDefinition; import org.apache.dolphinscheduler.dao.entity.TaskDefinitionLog; @@ -50,76 +47,52 @@ import java.util.List; import java.util.Map; import java.util.Optional; -import org.springframework.transaction.annotation.Transactional; - public interface ProcessService { - WorkflowInstance constructProcessInstance(Command command, - String host) throws CronParseException, CodeGenerateUtils.CodeGenerateException; + WorkflowInstance constructWorkflowInstance(Command command, + String host) throws CronParseException, CodeGenerateUtils.CodeGenerateException; - Optional findWorkflowInstanceDetailById(int processId); + Optional findWorkflowInstanceDetailById(int workflowInstanceId); - WorkflowInstance findProcessInstanceById(int processId); + WorkflowInstance findWorkflowInstanceById(int workflowInstanceId); - WorkflowDefinition findProcessDefinition(Long processDefinitionCode, int processDefinitionVersion); + WorkflowDefinition findWorkflowDefinition(Long workflowDefinitionCode, int workflowDefinitionVersion); - WorkflowDefinition findProcessDefinitionByCode(Long processDefinitionCode); + WorkflowDefinition findWorkflowDefinitionByCode(Long workflowDefinitionCode); - int deleteWorkProcessInstanceById(int processInstanceId); + int deleteWorkflowInstanceById(int workflowInstanceId); - int deleteAllSubWorkProcessByParentId(int processInstanceId); + int deleteAllSubWorkflowByParentId(int workflowInstanceId); - void removeTaskLogFile(Integer processInstanceId); + void removeTaskLogFile(Integer workflowInstanceId); List findAllSubWorkflowDefinitionCode(long workflowDefinitionCode); - String getTenantForProcess(String tenantCode, int userId); + String getTenantForWorkflow(String tenantCode, int userId); - Environment findEnvironmentByCode(Long environmentCode); + int deleteWorkflowMapByParentId(int parentWorkflowId); - void setSubProcessParam(WorkflowInstance subWorkflowInstance); + WorkflowInstance findSubWorkflowInstance(Integer parentWorkflowInstanceId, Integer parentTaskId); - @Transactional - boolean submitTask(WorkflowInstance workflowInstance, TaskInstance taskInstance); - - void createSubWorkProcess(WorkflowInstance parentWorkflowInstance, TaskInstance task); - - void packageTaskInstance(TaskInstance taskInstance, WorkflowInstance workflowInstance); - - void updateTaskDefinitionResources(TaskDefinition taskDefinition); - - int deleteWorkProcessMapByParentId(int parentWorkProcessId); - - WorkflowInstance findSubWorkflowInstance(Integer parentProcessId, Integer parentTaskId); - - WorkflowInstance findParentWorkflowInstance(Integer subProcessId); + WorkflowInstance findParentWorkflowInstance(Integer subWorkflowInstanceId); void changeOutParam(TaskInstance taskInstance); Schedule querySchedule(int id); - List queryReleaseSchedulerListByProcessDefinitionCode(long processDefinitionCode); - - List queryNeedFailoverProcessInstances(String host); - - List queryNeedFailoverProcessInstanceHost(); - - @Transactional - void processNeedFailoverProcessInstances(WorkflowInstance workflowInstance); + List queryReleaseSchedulerListByWorkflowDefinitionCode(long workflowDefinitionCode); DataSource findDataSourceById(int id); - ProjectUser queryProjectWithUserByProcessInstanceId(int processInstanceId); - List listUnauthorized(int userId, T[] needChecks, AuthorizationType authorizationType); User getUserById(int userId); String formatTaskAppId(TaskInstance taskInstance); - int switchVersion(WorkflowDefinition workflowDefinition, WorkflowDefinitionLog processDefinitionLog); + int switchVersion(WorkflowDefinition workflowDefinition, WorkflowDefinitionLog workflowDefinitionLog); - int switchProcessTaskRelationVersion(WorkflowDefinition workflowDefinition); + int switchWorkflowTaskRelationVersion(WorkflowDefinition workflowDefinition); int switchTaskDefinitionVersion(long taskCode, int taskVersion); @@ -128,9 +101,9 @@ public interface ProcessService { int saveTaskDefine(User operator, long projectCode, List taskDefinitionLogs, Boolean syncDefine); int saveWorkflowDefine(User operator, WorkflowDefinition workflowDefinition, Boolean syncDefine, - Boolean isFromProcessDefine); + Boolean isFromWorkflowDefinition); - int saveTaskRelation(User operator, long projectCode, long processDefinitionCode, int processDefinitionVersion, + int saveTaskRelation(User operator, long projectCode, long workflowDefinitionCode, int workflowDefinitionVersion, List taskRelationList, List taskDefinitionLogs, Boolean syncDefine); @@ -140,21 +113,11 @@ public interface ProcessService { DagData genDagData(WorkflowDefinition workflowDefinition); - List findRelationByCode(long processDefinitionCode, int processDefinitionVersion); + List findRelationByCode(long workflowDefinitionCode, int workflowDefinitionVersion); List transformTask(List taskRelationList, List taskDefinitionLogs); - DqExecuteResult getDqExecuteResultByTaskInstanceId(int taskInstanceId); - - int updateDqExecuteResultUserId(int taskInstanceId); - - int updateDqExecuteResultState(DqExecuteResult dqExecuteResult); - - int deleteDqExecuteResultByTaskInstanceId(int taskInstanceId); - - int deleteTaskStatisticsValueByTaskInstanceId(int taskInstanceId); - DqRule getDqRule(int ruleId); List getRuleInputEntry(int ruleId); @@ -166,15 +129,13 @@ public interface ProcessService { TaskGroupQueue insertIntoTaskGroupQueue(Integer taskId, String taskName, Integer groupId, - Integer processId, + Integer workflowInstanceId, Integer priority, TaskGroupQueueStatus status); - WorkflowInstance loadNextProcess4Serial(long code, int state, int id); + String findConfigYamlByName(String clusterName); - public String findConfigYamlByName(String clusterName); - - void forceProcessInstanceSuccessByTaskInstanceId(TaskInstance taskInstance); + void forceWorkflowInstanceSuccessByTaskInstanceId(TaskInstance taskInstance); void setGlobalParamIfCommanded(WorkflowDefinition workflowDefinition, Map cmdParam); } diff --git a/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/process/ProcessServiceImpl.java b/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/process/ProcessServiceImpl.java index 8918a87203..31c3772b70 100644 --- a/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/process/ProcessServiceImpl.java +++ b/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/process/ProcessServiceImpl.java @@ -21,14 +21,8 @@ import static java.util.stream.Collectors.toSet; import static org.apache.dolphinscheduler.common.constants.CommandKeyConstants.CMD_PARAM_COMPLEMENT_DATA_END_DATE; import static org.apache.dolphinscheduler.common.constants.CommandKeyConstants.CMD_PARAM_COMPLEMENT_DATA_SCHEDULE_DATE_LIST; import static org.apache.dolphinscheduler.common.constants.CommandKeyConstants.CMD_PARAM_COMPLEMENT_DATA_START_DATE; -import static org.apache.dolphinscheduler.common.constants.CommandKeyConstants.CMD_PARAM_EMPTY_SUB_PROCESS; -import static org.apache.dolphinscheduler.common.constants.CommandKeyConstants.CMD_PARAM_RECOVER_WORKFLOW_ID_STRING; -import static org.apache.dolphinscheduler.common.constants.CommandKeyConstants.CMD_PARAM_START_PARAMS; -import static org.apache.dolphinscheduler.common.constants.CommandKeyConstants.CMD_PARAM_SUB_PROCESS; import static org.apache.dolphinscheduler.common.constants.CommandKeyConstants.CMD_PARAM_SUB_WORKFLOW_DEFINITION_CODE; -import static org.apache.dolphinscheduler.common.constants.CommandKeyConstants.CMD_PARAM_SUB_WORKFLOW_PARENT_INSTANCE_ID; import static org.apache.dolphinscheduler.common.constants.Constants.LOCAL_PARAMS; -import static org.apache.dolphinscheduler.plugin.task.api.utils.DataQualityConstants.TASK_INSTANCE_ID; import org.apache.dolphinscheduler.common.constants.CommandKeyConstants; import org.apache.dolphinscheduler.common.constants.Constants; @@ -52,13 +46,9 @@ import org.apache.dolphinscheduler.dao.entity.Command; import org.apache.dolphinscheduler.dao.entity.DagData; import org.apache.dolphinscheduler.dao.entity.DataSource; import org.apache.dolphinscheduler.dao.entity.DqComparisonType; -import org.apache.dolphinscheduler.dao.entity.DqExecuteResult; import org.apache.dolphinscheduler.dao.entity.DqRule; import org.apache.dolphinscheduler.dao.entity.DqRuleExecuteSql; import org.apache.dolphinscheduler.dao.entity.DqRuleInputEntry; -import org.apache.dolphinscheduler.dao.entity.DqTaskStatisticsValue; -import org.apache.dolphinscheduler.dao.entity.Environment; -import org.apache.dolphinscheduler.dao.entity.ProjectUser; import org.apache.dolphinscheduler.dao.entity.Schedule; import org.apache.dolphinscheduler.dao.entity.TaskDefinition; import org.apache.dolphinscheduler.dao.entity.TaskDefinitionLog; @@ -76,19 +66,13 @@ import org.apache.dolphinscheduler.dao.mapper.ClusterMapper; import org.apache.dolphinscheduler.dao.mapper.CommandMapper; import org.apache.dolphinscheduler.dao.mapper.DataSourceMapper; import org.apache.dolphinscheduler.dao.mapper.DqComparisonTypeMapper; -import org.apache.dolphinscheduler.dao.mapper.DqExecuteResultMapper; import org.apache.dolphinscheduler.dao.mapper.DqRuleExecuteSqlMapper; import org.apache.dolphinscheduler.dao.mapper.DqRuleInputEntryMapper; import org.apache.dolphinscheduler.dao.mapper.DqRuleMapper; -import org.apache.dolphinscheduler.dao.mapper.DqTaskStatisticsValueMapper; -import org.apache.dolphinscheduler.dao.mapper.EnvironmentMapper; -import org.apache.dolphinscheduler.dao.mapper.ProjectMapper; import org.apache.dolphinscheduler.dao.mapper.ScheduleMapper; import org.apache.dolphinscheduler.dao.mapper.TaskDefinitionLogMapper; import org.apache.dolphinscheduler.dao.mapper.TaskDefinitionMapper; -import org.apache.dolphinscheduler.dao.mapper.TaskGroupMapper; import org.apache.dolphinscheduler.dao.mapper.TaskGroupQueueMapper; -import org.apache.dolphinscheduler.dao.mapper.TaskInstanceMapper; import org.apache.dolphinscheduler.dao.mapper.TenantMapper; import org.apache.dolphinscheduler.dao.mapper.UserMapper; import org.apache.dolphinscheduler.dao.mapper.WorkflowDefinitionLogMapper; @@ -109,10 +93,9 @@ import org.apache.dolphinscheduler.extract.base.client.Clients; import org.apache.dolphinscheduler.extract.common.ILogService; import org.apache.dolphinscheduler.plugin.task.api.enums.Direct; import org.apache.dolphinscheduler.plugin.task.api.enums.TaskExecutionStatus; -import org.apache.dolphinscheduler.plugin.task.api.enums.dp.DqTaskState; import org.apache.dolphinscheduler.plugin.task.api.model.Property; import org.apache.dolphinscheduler.plugin.task.api.model.ResourceInfo; -import org.apache.dolphinscheduler.plugin.task.api.parameters.SubProcessParameters; +import org.apache.dolphinscheduler.plugin.task.api.parameters.SubWorkflowParameters; import org.apache.dolphinscheduler.plugin.task.api.parameters.TaskTimeoutParameter; import org.apache.dolphinscheduler.plugin.task.api.utils.TaskTypeUtils; import org.apache.dolphinscheduler.service.command.CommandService; @@ -149,7 +132,6 @@ import lombok.extern.slf4j.Slf4j; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Component; -import org.springframework.transaction.annotation.Transactional; import com.baomidou.mybatisplus.core.conditions.query.QueryWrapper; import com.fasterxml.jackson.core.type.TypeReference; @@ -168,10 +150,10 @@ public class ProcessServiceImpl implements ProcessService { private UserMapper userMapper; @Autowired - private WorkflowDefinitionMapper processDefineMapper; + private WorkflowDefinitionMapper workflowDefinitionMapper; @Autowired - private WorkflowDefinitionLogMapper processDefineLogMapper; + private WorkflowDefinitionLogMapper workflowDefinitionLogMapper; // todo replace with workflowInstanceDao @Autowired @@ -198,9 +180,6 @@ public class ProcessServiceImpl implements ProcessService { @Autowired private WorkflowInstanceRelationMapper workflowInstanceRelationMapper; - @Autowired - private TaskInstanceMapper taskInstanceMapper; - @Autowired private CommandMapper commandMapper; @@ -210,12 +189,6 @@ public class ProcessServiceImpl implements ProcessService { @Autowired private TenantMapper tenantMapper; - @Autowired - private ProjectMapper projectMapper; - - @Autowired - private DqExecuteResultMapper dqExecuteResultMapper; - @Autowired private DqRuleMapper dqRuleMapper; @@ -228,9 +201,6 @@ public class ProcessServiceImpl implements ProcessService { @Autowired private DqComparisonTypeMapper dqComparisonTypeMapper; - @Autowired - private DqTaskStatisticsValueMapper dqTaskStatisticsValueMapper; - @Autowired private TaskDefinitionMapper taskDefinitionMapper; @@ -243,15 +213,9 @@ public class ProcessServiceImpl implements ProcessService { @Autowired private WorkflowTaskRelationLogMapper workflowTaskRelationLogMapper; - @Autowired - private EnvironmentMapper environmentMapper; - @Autowired private TaskGroupQueueMapper taskGroupQueueMapper; - @Autowired - private TaskGroupMapper taskGroupMapper; - @Autowired private ClusterMapper clusterMapper; @@ -262,38 +226,39 @@ public class ProcessServiceImpl implements ProcessService { private CommandService commandService; /** - * find process instance detail by id + * find workflow instance detail by id * - * @param processId processId - * @return process instance + * @param workflowInstanceId workflowInstanceId + * @return workflow instance */ @Override - public Optional findWorkflowInstanceDetailById(int processId) { - return Optional.ofNullable(workflowInstanceMapper.queryDetailById(processId)); + public Optional findWorkflowInstanceDetailById(int workflowInstanceId) { + return Optional.ofNullable(workflowInstanceMapper.queryDetailById(workflowInstanceId)); } /** - * find process instance by id + * find workflow instance by id * - * @param processId processId - * @return process instance + * @param workflowInstanceId workflowInstanceId + * @return workflow instance */ @Override - public WorkflowInstance findProcessInstanceById(int processId) { - return workflowInstanceMapper.selectById(processId); + public WorkflowInstance findWorkflowInstanceById(int workflowInstanceId) { + return workflowInstanceMapper.selectById(workflowInstanceId); } /** - * find process define by code and version. + * find workflow define by code and version. * - * @param processDefinitionCode processDefinitionCode - * @return process definition + * @param workflowDefinitionCode workflowDefinitionCode + * @return workflow definition */ @Override - public WorkflowDefinition findProcessDefinition(Long processDefinitionCode, int version) { - WorkflowDefinition workflowDefinition = processDefineMapper.queryByCode(processDefinitionCode); - if (workflowDefinition == null || workflowDefinition.getVersion() != version) { - workflowDefinition = processDefineLogMapper.queryByDefinitionCodeAndVersion(processDefinitionCode, version); + public WorkflowDefinition findWorkflowDefinition(Long workflowDefinitionCode, int workflowDefinitionVersion) { + WorkflowDefinition workflowDefinition = workflowDefinitionMapper.queryByCode(workflowDefinitionCode); + if (workflowDefinition == null || workflowDefinition.getVersion() != workflowDefinitionVersion) { + workflowDefinition = workflowDefinitionLogMapper.queryByDefinitionCodeAndVersion(workflowDefinitionCode, + workflowDefinitionVersion); if (workflowDefinition != null) { workflowDefinition.setId(0); } @@ -302,43 +267,43 @@ public class ProcessServiceImpl implements ProcessService { } /** - * find process define by code. + * find workflow define by code. * - * @param processDefinitionCode processDefinitionCode - * @return process definition + * @param workflowDefinitionCode workflowDefinitionCode + * @return workflow definition */ @Override - public WorkflowDefinition findProcessDefinitionByCode(Long processDefinitionCode) { - return processDefineMapper.queryByCode(processDefinitionCode); + public WorkflowDefinition findWorkflowDefinitionByCode(Long workflowDefinitionCode) { + return workflowDefinitionMapper.queryByCode(workflowDefinitionCode); } /** - * delete work process instance by id + * delete work workflow instance by id * - * @param processInstanceId processInstanceId - * @return delete process instance result + * @param workflowInstanceId workflowInstanceId + * @return delete workflow instance result */ @Override - public int deleteWorkProcessInstanceById(int processInstanceId) { - return workflowInstanceMapper.deleteById(processInstanceId); + public int deleteWorkflowInstanceById(int workflowInstanceId) { + return workflowInstanceMapper.deleteById(workflowInstanceId); } /** - * delete all sub process by parent instance id + * delete all sub workflow by parent instance id * - * @param processInstanceId processInstanceId - * @return delete all sub process instance result + * @param workflowInstanceId workflowInstanceId + * @return delete all sub workflow instance result */ @Override - public int deleteAllSubWorkProcessByParentId(int processInstanceId) { + public int deleteAllSubWorkflowByParentId(int workflowInstanceId) { - List subProcessIdList = workflowInstanceRelationMapper.querySubIdListByParentId(processInstanceId); + List subWorkflowIdList = workflowInstanceRelationMapper.querySubIdListByParentId(workflowInstanceId); - for (Integer subId : subProcessIdList) { - deleteAllSubWorkProcessByParentId(subId); - deleteWorkProcessMapByParentId(subId); + for (Integer subId : subWorkflowIdList) { + deleteAllSubWorkflowByParentId(subId); + deleteWorkflowMapByParentId(subId); removeTaskLogFile(subId); - deleteWorkProcessInstanceById(subId); + deleteWorkflowInstanceById(subId); } return 1; } @@ -346,11 +311,11 @@ public class ProcessServiceImpl implements ProcessService { /** * remove task log file * - * @param processInstanceId processInstanceId + * @param workflowInstanceId workflowInstanceId */ @Override - public void removeTaskLogFile(Integer processInstanceId) { - List taskInstanceList = taskInstanceDao.queryByWorkflowInstanceId(processInstanceId); + public void removeTaskLogFile(Integer workflowInstanceId) { + List taskInstanceList = taskInstanceDao.queryByWorkflowInstanceId(workflowInstanceId); if (CollectionUtils.isEmpty(taskInstanceList)) { return; } @@ -367,7 +332,7 @@ public class ProcessServiceImpl implements ProcessService { } /** - * recursive query sub process definition id by parent id. + * recursive query sub workflow definition id by parent id. * * @param parentCode parentCode */ @@ -383,8 +348,8 @@ public class ProcessServiceImpl implements ProcessService { String parameter = taskNode.getTaskParams(); ObjectNode parameterJson = JSONUtils.parseObject(parameter); if (parameterJson.get(CMD_PARAM_SUB_WORKFLOW_DEFINITION_CODE) != null) { - SubProcessParameters subProcessParam = JSONUtils.parseObject(parameter, SubProcessParameters.class); - long subWorkflowDefinitionCode = subProcessParam.getProcessDefinitionCode(); + SubWorkflowParameters subProcessParam = JSONUtils.parseObject(parameter, SubWorkflowParameters.class); + long subWorkflowDefinitionCode = subProcessParam.getWorkflowDefinitionCode(); subWorkflowDefinitionCodes.add(subWorkflowDefinitionCode); subWorkflowDefinitionCodes.addAll(findAllSubWorkflowDefinitionCode(subWorkflowDefinitionCode)); } @@ -393,30 +358,30 @@ public class ProcessServiceImpl implements ProcessService { } /** - * generate a new work process instance from command. + * generate a new work workflow instance from command. * - * @param workflowDefinition processDefinition + * @param workflowDefinition workflowDefinition * @param command command * @param cmdParam cmdParam map - * @return process instance + * @return workflow instance */ - private WorkflowInstance generateNewProcessInstance(WorkflowDefinition workflowDefinition, - Command command, - Map cmdParam) { + private WorkflowInstance generateNewWorkflowInstance(WorkflowDefinition workflowDefinition, + Command command, + Map cmdParam) { WorkflowInstance workflowInstance = new WorkflowInstance(workflowDefinition); - workflowInstance.setProcessDefinitionCode(workflowDefinition.getCode()); - workflowInstance.setProcessDefinitionVersion(workflowDefinition.getVersion()); + workflowInstance.setWorkflowDefinitionCode(workflowDefinition.getCode()); + workflowInstance.setWorkflowDefinitionVersion(workflowDefinition.getVersion()); workflowInstance.setProjectCode(workflowDefinition.getProjectCode()); workflowInstance.setStateWithDesc(WorkflowExecutionStatus.RUNNING_EXECUTION, "init running"); workflowInstance.setRecovery(Flag.NO); workflowInstance.setStartTime(new Date()); - // the new process instance restart time is null. + // the new workflow instance restart time is null. workflowInstance.setRestartTime(null); workflowInstance.setRunTimes(1); workflowInstance.setMaxTryTimes(0); workflowInstance.setCommandParam(command.getCommandParam()); workflowInstance.setCommandType(command.getCommandType()); - workflowInstance.setIsSubProcess(Flag.NO); + workflowInstance.setIsSubWorkflow(Flag.NO); workflowInstance.setTaskDependType(command.getTaskDependType()); workflowInstance.setFailureStrategy(command.getFailureStrategy()); workflowInstance.setExecutorId(command.getExecutorId()); @@ -452,8 +417,8 @@ public class ProcessServiceImpl implements ProcessService { workflowInstance.getScheduleTime(), timezoneId); workflowInstance.setGlobalParams(globalParams); - // set process instance priority - workflowInstance.setProcessInstancePriority(command.getProcessInstancePriority()); + // set workflow instance priority + workflowInstance.setWorkflowInstancePriority(command.getWorkflowInstancePriority()); workflowInstance.setWorkerGroup(WorkerGroupUtils.getWorkerGroupOrDefault(command.getWorkerGroup())); workflowInstance.setEnvironmentCode(EnvironmentUtils.getEnvironmentCodeOrDefault(command.getEnvironmentCode())); workflowInstance.setTimeout(workflowDefinition.getTimeout()); @@ -507,7 +472,7 @@ public class ProcessServiceImpl implements ProcessService { * @return tenant code */ @Override - public String getTenantForProcess(String tenantCode, int userId) { + public String getTenantForWorkflow(String tenantCode, int userId) { if (StringUtils.isNoneBlank(tenantCode) && !Constants.DEFAULT.equals(tenantCode)) { return tenantCode; } @@ -521,22 +486,6 @@ public class ProcessServiceImpl implements ProcessService { return tenant.getTenantCode(); } - /** - * get an environment - * use the code of the environment to find a environment. - * - * @param environmentCode environmentCode - * @return Environment - */ - @Override - public Environment findEnvironmentByCode(Long environmentCode) { - Environment environment = null; - if (environmentCode >= 0) { - environment = environmentMapper.queryByEnvironmentCode(environmentCode); - } - return environment; - } - /** * check command parameters is valid * @@ -558,34 +507,35 @@ public class ProcessServiceImpl implements ProcessService { } /** - * construct process instance according to one command. + * construct workflow instance according to one command. * * @param command command * @param host host - * @return process instance + * @return workflow instance */ @Override - public @Nullable WorkflowInstance constructProcessInstance(Command command, - String host) throws CronParseException, CodeGenerateException { + public @Nullable WorkflowInstance constructWorkflowInstance(Command command, + String host) throws CronParseException, CodeGenerateException { WorkflowInstance workflowInstance; WorkflowDefinition workflowDefinition; CommandType commandType = command.getCommandType(); workflowDefinition = - this.findProcessDefinition(command.getProcessDefinitionCode(), command.getProcessDefinitionVersion()); + this.findWorkflowDefinition(command.getWorkflowDefinitionCode(), + command.getWorkflowDefinitionVersion()); if (workflowDefinition == null) { - log.error("cannot find the work process define! define code : {}", command.getProcessDefinitionCode()); - throw new IllegalArgumentException("Cannot find the process definition for this workflowInstance"); + log.error("cannot find the work workflow define! define code : {}", command.getWorkflowDefinitionCode()); + throw new IllegalArgumentException("Cannot find the workflow definition for this workflowInstance"); } Map cmdParam = JSONUtils.toMap(command.getCommandParam()); if (cmdParam == null) { cmdParam = new HashMap<>(); } - int processInstanceId = command.getProcessInstanceId(); - if (processInstanceId == 0) { - workflowInstance = generateNewProcessInstance(workflowDefinition, command, cmdParam); + int workflowInstanceId = command.getWorkflowInstanceId(); + if (workflowInstanceId == 0) { + workflowInstance = generateNewWorkflowInstance(workflowDefinition, command, cmdParam); } else { - workflowInstance = this.findWorkflowInstanceDetailById(processInstanceId).orElse(null); + workflowInstance = this.findWorkflowInstanceDetailById(workflowInstanceId).orElse(null); setGlobalParamIfCommanded(workflowDefinition, cmdParam); if (workflowInstance == null) { return null; @@ -593,7 +543,7 @@ public class ProcessServiceImpl implements ProcessService { } CommandType commandTypeIfComplement = getCommandTypeIfComplement(workflowInstance, command); - // reset global params while repeat running and recover tolerance fault process is needed by cmdParam + // reset global params while repeat running and recover tolerance fault workflow is needed by cmdParam if (commandTypeIfComplement == CommandType.REPEAT_RUNNING || commandTypeIfComplement == CommandType.RECOVER_TOLERANCE_FAULT_PROCESS || commandTypeIfComplement == CommandType.RECOVER_SERIAL_WAIT) { @@ -614,16 +564,16 @@ public class ProcessServiceImpl implements ProcessService { // reset command parameter if (workflowInstance.getCommandParam() != null) { - Map processCmdParam = JSONUtils.toMap(workflowInstance.getCommandParam()); + Map workflowCmdParam = JSONUtils.toMap(workflowInstance.getCommandParam()); Map finalCmdParam = cmdParam; - processCmdParam.forEach((key, value) -> { + workflowCmdParam.forEach((key, value) -> { if (!finalCmdParam.containsKey(key)) { finalCmdParam.put(key, value); } }); } - // reset command parameter if sub process - if (cmdParam.containsKey(CommandKeyConstants.CMD_PARAM_SUB_PROCESS)) { + // reset command parameter if sub workflow + if (cmdParam.containsKey(CommandKeyConstants.CMD_PARAM_SUB_WORKFLOW)) { workflowInstance.setCommandParam(command.getCommandParam()); } if (Boolean.FALSE.equals(checkCmdParam(command, cmdParam))) { @@ -670,7 +620,7 @@ public class ProcessServiceImpl implements ProcessService { case START_CURRENT_TASK_PROCESS: break; case RECOVER_TOLERANCE_FAULT_PROCESS: - // recover tolerance fault process + // recover tolerance fault workflow // If the workflow instance is in ready state, we will change to running, this can avoid the workflow // instance // status is not correct with taskInstance status @@ -736,44 +686,9 @@ public class ProcessServiceImpl implements ProcessService { } /** - * get process definition by command - * If it is a fault-tolerant command, get the specified version of ProcessDefinition through ProcessInstance - * Otherwise, get the latest version of ProcessDefinition + * return complement data if the workflow start with complement data * - * @return ProcessDefinition - */ - private @Nullable WorkflowDefinition getProcessDefinitionByCommand(long processDefinitionCode, - Map cmdParam) { - if (cmdParam != null) { - int processInstanceId = 0; - if (cmdParam.containsKey(CommandKeyConstants.CMD_PARAM_RECOVER_WORKFLOW_ID_STRING)) { - processInstanceId = - Integer.parseInt(cmdParam.get(CommandKeyConstants.CMD_PARAM_RECOVER_WORKFLOW_ID_STRING)); - } else if (cmdParam.containsKey(CommandKeyConstants.CMD_PARAM_SUB_PROCESS)) { - processInstanceId = Integer.parseInt(cmdParam.get(CommandKeyConstants.CMD_PARAM_SUB_PROCESS)); - } else if (cmdParam.containsKey(CommandKeyConstants.CMD_PARAM_RECOVERY_WAITING_THREAD)) { - processInstanceId = - Integer.parseInt(cmdParam.get(CommandKeyConstants.CMD_PARAM_RECOVERY_WAITING_THREAD)); - } - - if (processInstanceId != 0) { - WorkflowInstance workflowInstance = this.findWorkflowInstanceDetailById(processInstanceId).orElse(null); - if (workflowInstance == null) { - return null; - } - - return processDefineLogMapper.queryByDefinitionCodeAndVersion( - workflowInstance.getProcessDefinitionCode(), workflowInstance.getProcessDefinitionVersion()); - } - } - - return processDefineMapper.queryByCode(processDefinitionCode); - } - - /** - * return complement data if the process start with complement data - * - * @param workflowInstance processInstance + * @param workflowInstance workflowInstance * @param command command * @return command type */ @@ -788,8 +703,8 @@ public class ProcessServiceImpl implements ProcessService { /** * initialize complement data parameters * - * @param workflowDefinition processDefinition - * @param workflowInstance processInstance + * @param workflowDefinition workflowDefinition + * @param workflowInstance workflowInstance * @param cmdParam cmdParam */ private void initComplementDataParam(WorkflowDefinition workflowDefinition, @@ -804,14 +719,14 @@ public class ProcessServiceImpl implements ProcessService { List complementDate = Lists.newLinkedList(); if (start != null && end != null) { List listSchedules = - queryReleaseSchedulerListByProcessDefinitionCode(workflowInstance.getProcessDefinitionCode()); + queryReleaseSchedulerListByWorkflowDefinitionCode(workflowInstance.getWorkflowDefinitionCode()); complementDate = CronUtils.getSelfFireDateList(start, end, listSchedules); } if (cmdParam.containsKey(CMD_PARAM_COMPLEMENT_DATA_SCHEDULE_DATE_LIST)) { complementDate = CronUtils.getSelfScheduleDateList(cmdParam); } - if (CollectionUtils.isNotEmpty(complementDate) && Flag.NO == workflowInstance.getIsSubProcess()) { + if (CollectionUtils.isNotEmpty(complementDate) && Flag.NO == workflowInstance.getIsSubWorkflow()) { workflowInstance.setScheduleTime(complementDate.get(0)); } @@ -825,104 +740,6 @@ public class ProcessServiceImpl implements ProcessService { workflowInstance.setGlobalParams(globalParams); } - /** - * set sub work process parameters. - * handle sub work process instance, update relation table and command parameters - * set sub work process flag, extends parent work process command parameters - * - * @param subWorkflowInstance subProcessInstance - */ - @Override - public void setSubProcessParam(WorkflowInstance subWorkflowInstance) { - String cmdParam = subWorkflowInstance.getCommandParam(); - if (Strings.isNullOrEmpty(cmdParam)) { - return; - } - Map paramMap = JSONUtils.toMap(cmdParam); - // write sub process id into cmd param. - if (paramMap.containsKey(CMD_PARAM_SUB_PROCESS) - && CMD_PARAM_EMPTY_SUB_PROCESS.equals(paramMap.get(CMD_PARAM_SUB_PROCESS))) { - paramMap.remove(CMD_PARAM_SUB_PROCESS); - paramMap.put(CMD_PARAM_SUB_PROCESS, String.valueOf(subWorkflowInstance.getId())); - subWorkflowInstance.setCommandParam(JSONUtils.toJsonString(paramMap)); - subWorkflowInstance.setIsSubProcess(Flag.YES); - workflowInstanceDao.upsertWorkflowInstance(subWorkflowInstance); - } - // copy parent instance user def params to sub process.. - String parentInstanceId = paramMap.get(CMD_PARAM_SUB_WORKFLOW_PARENT_INSTANCE_ID); - if (!Strings.isNullOrEmpty(parentInstanceId)) { - WorkflowInstance parentInstance = - findWorkflowInstanceDetailById(Integer.parseInt(parentInstanceId)).orElse(null); - if (parentInstance != null) { - subWorkflowInstance.setGlobalParams( - joinGlobalParams(parentInstance.getGlobalParams(), subWorkflowInstance.getGlobalParams())); - subWorkflowInstance - .setVarPool(joinVarPool(parentInstance.getVarPool(), subWorkflowInstance.getVarPool())); - workflowInstanceDao.upsertWorkflowInstance(subWorkflowInstance); - } else { - log.error("sub process command params error, cannot find parent instance: {} ", cmdParam); - } - } - WorkflowInstanceRelation workflowInstanceRelation = - JSONUtils.parseObject(cmdParam, WorkflowInstanceRelation.class); - if (workflowInstanceRelation == null || workflowInstanceRelation.getParentProcessInstanceId() == 0) { - return; - } - // update sub process id to process map table - workflowInstanceRelation.setProcessInstanceId(subWorkflowInstance.getId()); - - workflowInstanceMapDao.updateById(workflowInstanceRelation); - } - - /** - * join parent global params into sub process. - * only the keys doesn't in sub process global would be joined. - * - * @param parentGlobalParams parentGlobalParams - * @param subGlobalParams subGlobalParams - * @return global params join - */ - private String joinGlobalParams(String parentGlobalParams, String subGlobalParams) { - - // Since JSONUtils.toList return unmodified list, we need to creat a new List here. - List parentParams = Lists.newArrayList(JSONUtils.toList(parentGlobalParams, Property.class)); - List subParams = JSONUtils.toList(subGlobalParams, Property.class); - - Set parentParamKeys = parentParams.stream().map(Property::getProp).collect(toSet()); - - // We will combine the params of parent workflow and sub workflow - // If the params are defined in both, we will use parent's params to override the sub workflow(ISSUE-7962) - // todo: Do we need to consider the other attribute of Property? - // e.g. the subProp's type is not equals with parent, or subProp's direct is not equals with parent - // It's suggested to add node name in property, this kind of problem can be solved. - List extraSubParams = subParams.stream() - .filter(subProp -> !parentParamKeys.contains(subProp.getProp())).collect(Collectors.toList()); - parentParams.addAll(extraSubParams); - return JSONUtils.toJsonString(parentParams); - } - - /** - * join parent var pool params into sub process. - * only the keys doesn't in sub process global would be joined. - * - * @param parentValPool - * @param subValPool - * @return - */ - private String joinVarPool(String parentValPool, String subValPool) { - List parentValPools = Lists.newArrayList(JSONUtils.toList(parentValPool, Property.class)); - parentValPools = parentValPools.stream().filter(valPool -> valPool.getDirect() == Direct.OUT) - .collect(Collectors.toList()); - - List subValPools = Lists.newArrayList(JSONUtils.toList(subValPool, Property.class)); - - Set parentValPoolKeys = parentValPools.stream().map(Property::getProp).collect(toSet()); - List extraSubValPools = subValPools.stream().filter(sub -> !parentValPoolKeys.contains(sub.getProp())) - .collect(Collectors.toList()); - parentValPools.addAll(extraSubValPools); - return JSONUtils.toJsonString(parentValPools); - } - /** * initialize task instance * @@ -940,233 +757,6 @@ public class ProcessServiceImpl implements ProcessService { taskInstanceDao.updateById(taskInstance); } - /** - * // todo: This method need to refactor, we find when the db down, but the taskInstanceId is not 0. It's better to change to void, rather than return TaskInstance - * submit task to db - * submit sub process to command - * - * @param workflowInstance processInstance - * @param taskInstance taskInstance - * @return task instance - */ - @Override - @Transactional - public boolean submitTask(WorkflowInstance workflowInstance, TaskInstance taskInstance) { - log.info("Start save taskInstance to database : {}, processInstance id:{}, state: {}", - taskInstance.getName(), - taskInstance.getProcessInstanceId(), - workflowInstance.getState()); - // submit to db - if (!taskInstanceDao.submitTaskInstanceToDB(taskInstance, workflowInstance)) { - log.error("Save taskInstance to db error, task name:{}, process id:{} state: {} ", - taskInstance.getName(), - taskInstance.getWorkflowInstance().getId(), - workflowInstance.getState()); - return false; - } - - if (!taskInstance.getState().isFinished()) { - createSubWorkProcess(workflowInstance, taskInstance); - } - - log.info( - "End save taskInstance to db successfully:{}, taskInstanceName: {}, taskInstance state:{}, processInstanceId:{}, processInstanceState: {}", - taskInstance.getId(), - taskInstance.getName(), - taskInstance.getState(), - workflowInstance.getId(), - workflowInstance.getState()); - return true; - } - - /** - * set work process instance map - * consider o - * repeat running does not generate new sub process instance - * set map {parent instance id, task instance id, 0(child instance id)} - * - * @param parentInstance parentInstance - * @param parentTask parentTask - * @param processMap processMap - * @return process instance map - */ - private WorkflowInstanceRelation setProcessInstanceMap(WorkflowInstance parentInstance, TaskInstance parentTask, - WorkflowInstanceRelation processMap) { - if (processMap != null) { - return processMap; - } - if (parentInstance.getCommandType() == CommandType.REPEAT_RUNNING) { - // update current task id to map - processMap = findPreviousTaskProcessMap(parentInstance, parentTask); - if (processMap != null) { - processMap.setParentTaskInstanceId(parentTask.getId()); - workflowInstanceMapDao.updateById(processMap); - return processMap; - } - } - // new task - processMap = new WorkflowInstanceRelation(); - processMap.setParentProcessInstanceId(parentInstance.getId()); - processMap.setParentTaskInstanceId(parentTask.getId()); - workflowInstanceMapDao.insert(processMap); - return processMap; - } - - /** - * find previous task work process map. - * - * @param parentWorkflowInstance parentProcessInstance - * @param parentTask parentTask - * @return process instance map - */ - private WorkflowInstanceRelation findPreviousTaskProcessMap(WorkflowInstance parentWorkflowInstance, - TaskInstance parentTask) { - - Integer preTaskId = 0; - List preTaskList = - taskInstanceDao.queryPreviousTaskListByWorkflowInstanceId(parentWorkflowInstance.getId()); - for (TaskInstance task : preTaskList) { - if (task.getName().equals(parentTask.getName())) { - preTaskId = task.getId(); - WorkflowInstanceRelation map = - workflowInstanceMapDao.queryWorkflowMapByParent(parentWorkflowInstance.getId(), preTaskId); - if (map != null) { - return map; - } - } - } - log.info("sub process instance is not found,parent task:{},parent instance:{}", - parentTask.getId(), parentWorkflowInstance.getId()); - return null; - } - - /** - * create sub work process command - * - * @param parentWorkflowInstance parentProcessInstance - * @param task task - */ - @Override - public void createSubWorkProcess(WorkflowInstance parentWorkflowInstance, TaskInstance task) { - if (!TaskTypeUtils.isSubWorkflowTask(task.getTaskType())) { - return; - } - // check create sub work flow firstly - WorkflowInstanceRelation instanceMap = - workflowInstanceMapDao.queryWorkflowMapByParent(parentWorkflowInstance.getId(), task.getId()); - if (null != instanceMap - && CommandType.RECOVER_TOLERANCE_FAULT_PROCESS == parentWorkflowInstance.getCommandType()) { - // recover failover tolerance would not create a new command when the sub command already have been created - return; - } - instanceMap = setProcessInstanceMap(parentWorkflowInstance, task, instanceMap); - WorkflowInstance childInstance = null; - if (instanceMap.getProcessInstanceId() != 0) { - childInstance = findProcessInstanceById(instanceMap.getProcessInstanceId()); - } - if (childInstance != null && childInstance.getState() == WorkflowExecutionStatus.SUCCESS - && CommandType.START_FAILURE_TASK_PROCESS == parentWorkflowInstance.getCommandType()) { - log.info("sub process instance {} status is success, so skip creating command", childInstance.getId()); - return; - } - Command subProcessCommand = - commandService.createSubProcessCommand(parentWorkflowInstance, childInstance, instanceMap, task); - if (subProcessCommand == null) { - log.error("create sub process command failed, so skip creating command"); - return; - } - updateSubProcessDefinitionByParent(parentWorkflowInstance, subProcessCommand.getProcessDefinitionCode()); - initSubInstanceState(childInstance); - commandService.createCommand(subProcessCommand); - log.info("sub process command created: {} ", subProcessCommand); - } - - /** - * initialize sub work flow state - * child instance state would be initialized when 'recovery from pause/stop/failure' - */ - private void initSubInstanceState(WorkflowInstance childInstance) { - if (childInstance != null) { - childInstance.setStateWithDesc(WorkflowExecutionStatus.RUNNING_EXECUTION, "init sub workflow instance"); - workflowInstanceDao.updateById(childInstance); - } - } - - /** - * update sub process definition - * - * @param parentWorkflowInstance parentProcessInstance - * @param childDefinitionCode childDefinitionId - */ - private void updateSubProcessDefinitionByParent(WorkflowInstance parentWorkflowInstance, long childDefinitionCode) { - WorkflowDefinition fatherDefinition = - this.findProcessDefinition(parentWorkflowInstance.getProcessDefinitionCode(), - parentWorkflowInstance.getProcessDefinitionVersion()); - WorkflowDefinition childDefinition = this.findProcessDefinitionByCode(childDefinitionCode); - if (childDefinition != null && fatherDefinition != null) { - childDefinition.setWarningGroupId(fatherDefinition.getWarningGroupId()); - processDefineMapper.updateById(childDefinition); - } - } - - /** - * package task instance - */ - @Override - public void packageTaskInstance(TaskInstance taskInstance, WorkflowInstance workflowInstance) { - taskInstance.setWorkflowInstance(workflowInstance); - taskInstance.setProcessDefine(workflowInstance.getWorkflowDefinition()); - taskInstance.setProcessInstancePriority(workflowInstance.getProcessInstancePriority()); - TaskDefinition taskDefinition = taskDefinitionDao.findTaskDefinition( - taskInstance.getTaskCode(), - taskInstance.getTaskDefinitionVersion()); - this.updateTaskDefinitionResources(taskDefinition); - taskInstance.setTaskDefine(taskDefinition); - taskInstance.setTestFlag(workflowInstance.getTestFlag()); - } - - /** - * Update {@link ResourceInfo} information in {@link TaskDefinition} - * - * @param taskDefinition the given {@link TaskDefinition} - */ - @Override - public void updateTaskDefinitionResources(TaskDefinition taskDefinition) { - Map taskParameters = JSONUtils.parseObject( - taskDefinition.getTaskParams(), - new TypeReference>() { - }); - if (taskParameters != null) { - // if contains mainJar field, query resource from database - // Flink, Spark, MR - if (taskParameters.containsKey("mainJar")) { - Object mainJarObj = taskParameters.get("mainJar"); - ResourceInfo mainJar = JSONUtils.parseObject( - JSONUtils.toJsonString(mainJarObj), - ResourceInfo.class); - ResourceInfo resourceInfo = - updateResourceInfo(taskDefinitionMapper.queryByCode(taskDefinition.getCode()).getId(), mainJar); - if (resourceInfo != null) { - taskParameters.put("mainJar", resourceInfo); - } - } - // update resourceList information - if (taskParameters.containsKey("resourceList")) { - String resourceListStr = JSONUtils.toJsonString(taskParameters.get("resourceList")); - List resourceInfos = JSONUtils.toList(resourceListStr, ResourceInfo.class); - List updatedResourceInfos = resourceInfos - .stream() - .map(resourceInfo -> updateResourceInfo( - taskDefinitionMapper.queryByCode(taskDefinition.getCode()).getId(), resourceInfo)) - .filter(Objects::nonNull) - .collect(Collectors.toList()); - taskParameters.put("resourceList", updatedResourceInfos); - } - // set task parameters - taskDefinition.setTaskParams(JSONUtils.toJsonString(taskParameters)); - } - } - /** * update {@link ResourceInfo} by given original ResourceInfo * @@ -1191,51 +781,51 @@ public class ProcessServiceImpl implements ProcessService { } /** - * delete work process map by parent process id + * delete work workflow map by parent workflow id * - * @param parentWorkProcessId parentWorkProcessId - * @return delete process map result + * @param parentWorkflowId parentWorkflowId + * @return delete workflow map result */ @Override - public int deleteWorkProcessMapByParentId(int parentWorkProcessId) { - return workflowInstanceRelationMapper.deleteByParentProcessId(parentWorkProcessId); + public int deleteWorkflowMapByParentId(int parentWorkflowId) { + return workflowInstanceRelationMapper.deleteByParentWorkflowInstanceId(parentWorkflowId); } /** - * find sub process instance + * find sub workflow instance * - * @param parentProcessId parentProcessId + * @param parentWorkflowInstanceId parentWorkflowInstanceId * @param parentTaskId parentTaskId - * @return process instance + * @return workflow instance */ @Override - public WorkflowInstance findSubWorkflowInstance(Integer parentProcessId, Integer parentTaskId) { + public WorkflowInstance findSubWorkflowInstance(Integer parentWorkflowInstanceId, Integer parentTaskId) { WorkflowInstance workflowInstance = null; WorkflowInstanceRelation workflowInstanceRelation = - workflowInstanceRelationMapper.queryByParentId(parentProcessId, parentTaskId); - if (workflowInstanceRelation == null || workflowInstanceRelation.getProcessInstanceId() == 0) { + workflowInstanceRelationMapper.queryByParentId(parentWorkflowInstanceId, parentTaskId); + if (workflowInstanceRelation == null || workflowInstanceRelation.getWorkflowInstanceId() == 0) { return workflowInstance; } - workflowInstance = findProcessInstanceById(workflowInstanceRelation.getProcessInstanceId()); + workflowInstance = findWorkflowInstanceById(workflowInstanceRelation.getWorkflowInstanceId()); return workflowInstance; } /** - * find parent process instance + * find parent workflow instance * - * @param subProcessId subProcessId - * @return process instance + * @param subWorkflowInstanceId subWorkflowId + * @return workflow instance */ @Override - public WorkflowInstance findParentWorkflowInstance(Integer subProcessId) { + public WorkflowInstance findParentWorkflowInstance(Integer subWorkflowInstanceId) { WorkflowInstance workflowInstance = null; WorkflowInstanceRelation workflowInstanceRelation = - workflowInstanceRelationMapper.queryBySubProcessId(subProcessId); - if (workflowInstanceRelation == null || workflowInstanceRelation.getProcessInstanceId() == 0) { + workflowInstanceRelationMapper.queryBySubWorkflowId(subWorkflowInstanceId); + if (workflowInstanceRelation == null || workflowInstanceRelation.getWorkflowInstanceId() == 0) { return workflowInstance; } - workflowInstance = findProcessInstanceById(workflowInstanceRelation.getParentProcessInstanceId()); + workflowInstance = findWorkflowInstanceById(workflowInstanceRelation.getParentWorkflowInstanceId()); return workflowInstance; } @@ -1305,58 +895,14 @@ public class ProcessServiceImpl implements ProcessService { } /** - * query Schedule by processDefinitionCode + * query Schedule by workflowDefinitionCode * - * @param processDefinitionCode processDefinitionCode + * @param workflowDefinitionCode workflowDefinitionCode * @see Schedule */ @Override - public List queryReleaseSchedulerListByProcessDefinitionCode(long processDefinitionCode) { - return scheduleMapper.queryReleaseSchedulerListByProcessDefinitionCode(processDefinitionCode); - } - - /** - * query need failover process instance - * - * @param host host - * @return process instance list - */ - @Override - public List queryNeedFailoverProcessInstances(String host) { - return workflowInstanceMapper.queryByHostAndStatus(host, - WorkflowExecutionStatus.getNeedFailoverWorkflowInstanceState()); - } - - @Override - public List queryNeedFailoverProcessInstanceHost() { - return workflowInstanceMapper - .queryNeedFailoverProcessInstanceHost(WorkflowExecutionStatus.getNeedFailoverWorkflowInstanceState()); - } - - /** - * process need failover process instance - * - * @param workflowInstance processInstance - */ - @Override - @Transactional - public void processNeedFailoverProcessInstances(WorkflowInstance workflowInstance) { - // updateProcessInstance host is null to mark this processInstance has been failover - // and insert a failover command - workflowInstance.setHost(Constants.NULL); - workflowInstanceMapper.updateById(workflowInstance); - - // 2 insert into recover command - Command cmd = new Command(); - cmd.setProcessDefinitionCode(workflowInstance.getProcessDefinitionCode()); - cmd.setProcessDefinitionVersion(workflowInstance.getProcessDefinitionVersion()); - cmd.setProcessInstanceId(workflowInstance.getId()); - cmd.setCommandParam(JSONUtils.toJsonString(createCommandParams(workflowInstance))); - cmd.setExecutorId(workflowInstance.getExecutorId()); - cmd.setCommandType(CommandType.RECOVER_TOLERANCE_FAULT_PROCESS); - cmd.setProcessInstancePriority(workflowInstance.getProcessInstancePriority()); - cmd.setTestFlag(workflowInstance.getTestFlag()); - commandService.createCommand(cmd); + public List queryReleaseSchedulerListByWorkflowDefinitionCode(long workflowDefinitionCode) { + return scheduleMapper.queryReleaseSchedulerListByWorkflowDefinitionCode(workflowDefinitionCode); } /** @@ -1370,17 +916,6 @@ public class ProcessServiceImpl implements ProcessService { return dataSourceMapper.selectById(id); } - /** - * query project name and user name by processInstanceId. - * - * @param processInstanceId processInstanceId - * @return projectName and userName - */ - @Override - public ProjectUser queryProjectWithUserByProcessInstanceId(int processInstanceId) { - return projectMapper.queryProjectWithUserByProcessInstanceId(processInstanceId); - } - /** * get user by user id * @@ -1397,12 +932,12 @@ public class ProcessServiceImpl implements ProcessService { */ @Override public String formatTaskAppId(TaskInstance taskInstance) { - WorkflowInstance workflowInstance = findProcessInstanceById(taskInstance.getProcessInstanceId()); + WorkflowInstance workflowInstance = findWorkflowInstanceById(taskInstance.getWorkflowInstanceId()); if (workflowInstance == null) { return ""; } - WorkflowDefinition definition = findProcessDefinition(workflowInstance.getProcessDefinitionCode(), - workflowInstance.getProcessDefinitionVersion()); + WorkflowDefinition definition = findWorkflowDefinition(workflowInstance.getWorkflowDefinitionCode(), + workflowInstance.getWorkflowDefinitionVersion()); if (definition == null) { return ""; } @@ -1440,20 +975,20 @@ public class ProcessServiceImpl implements ProcessService { } /** - * switch process definition version to process definition log version + * switch workflow definition version to workflow definition log version */ @Override - public int switchVersion(WorkflowDefinition workflowDefinition, WorkflowDefinitionLog processDefinitionLog) { - if (null == workflowDefinition || null == processDefinitionLog) { + public int switchVersion(WorkflowDefinition workflowDefinition, WorkflowDefinitionLog workflowDefinitionLog) { + if (null == workflowDefinition || null == workflowDefinitionLog) { return Constants.DEFINITION_FAILURE; } - processDefinitionLog.setId(workflowDefinition.getId()); - processDefinitionLog.setReleaseState(ReleaseState.OFFLINE); - processDefinitionLog.setFlag(Flag.YES); + workflowDefinitionLog.setId(workflowDefinition.getId()); + workflowDefinitionLog.setReleaseState(ReleaseState.OFFLINE); + workflowDefinitionLog.setFlag(Flag.YES); - int result = processDefineMapper.updateById(processDefinitionLog); + int result = workflowDefinitionMapper.updateById(workflowDefinitionLog); if (result > 0) { - result = switchProcessTaskRelationVersion(processDefinitionLog); + result = switchWorkflowTaskRelationVersion(workflowDefinitionLog); if (result <= 0) { return Constants.EXIT_CODE_FAILURE; } @@ -1462,14 +997,15 @@ public class ProcessServiceImpl implements ProcessService { } @Override - public int switchProcessTaskRelationVersion(WorkflowDefinition workflowDefinition) { + public int switchWorkflowTaskRelationVersion(WorkflowDefinition workflowDefinition) { List workflowTaskRelationList = - workflowTaskRelationMapper.queryByProcessCode(workflowDefinition.getCode()); + workflowTaskRelationMapper.queryByWorkflowDefinitionCode(workflowDefinition.getCode()); if (!workflowTaskRelationList.isEmpty()) { - workflowTaskRelationMapper.deleteByCode(workflowDefinition.getProjectCode(), workflowDefinition.getCode()); + workflowTaskRelationMapper.deleteByWorkflowDefinitionCode(workflowDefinition.getProjectCode(), + workflowDefinition.getCode()); } List workflowTaskRelationListFromLog = workflowTaskRelationLogMapper - .queryByProcessCodeAndVersion(workflowDefinition.getCode(), workflowDefinition.getVersion()).stream() + .queryByWorkflowCodeAndVersion(workflowDefinition.getCode(), workflowDefinition.getVersion()).stream() .map(WorkflowTaskRelation::new).collect(Collectors.toList()); int batchInsert = workflowTaskRelationMapper.batchInsert(workflowTaskRelationListFromLog); if (batchInsert == 0) { @@ -1604,31 +1140,32 @@ public class ProcessServiceImpl implements ProcessService { } /** - * save processDefinition (including create or update processDefinition) + * save workflowDefinition (including create or update workflowDefinition) */ @Override public int saveWorkflowDefine(User operator, WorkflowDefinition workflowDefinition, Boolean syncDefine, - Boolean isFromProcessDefine) { - WorkflowDefinitionLog processDefinitionLog = new WorkflowDefinitionLog(workflowDefinition); - Integer version = processDefineLogMapper.queryMaxVersionForDefinition(workflowDefinition.getCode()); + Boolean isFromWorkflowDefinition) { + WorkflowDefinitionLog workflowDefinitionLog = new WorkflowDefinitionLog(workflowDefinition); + Integer version = workflowDefinitionLogMapper.queryMaxVersionForDefinition(workflowDefinition.getCode()); int insertVersion = version == null || version == 0 ? Constants.VERSION_FIRST : version + 1; - processDefinitionLog.setVersion(insertVersion); - processDefinitionLog - .setReleaseState(!isFromProcessDefine || processDefinitionLog.getReleaseState() == ReleaseState.ONLINE - ? ReleaseState.ONLINE - : ReleaseState.OFFLINE); - processDefinitionLog.setOperator(operator.getId()); - processDefinitionLog.setOperateTime(workflowDefinition.getUpdateTime()); - processDefinitionLog.setId(null); - int insertLog = processDefineLogMapper.insert(processDefinitionLog); + workflowDefinitionLog.setVersion(insertVersion); + workflowDefinitionLog + .setReleaseState( + !isFromWorkflowDefinition || workflowDefinitionLog.getReleaseState() == ReleaseState.ONLINE + ? ReleaseState.ONLINE + : ReleaseState.OFFLINE); + workflowDefinitionLog.setOperator(operator.getId()); + workflowDefinitionLog.setOperateTime(workflowDefinition.getUpdateTime()); + workflowDefinitionLog.setId(null); + int insertLog = workflowDefinitionLogMapper.insert(workflowDefinitionLog); int result = 1; if (Boolean.TRUE.equals(syncDefine)) { if (workflowDefinition.getId() == null) { - result = processDefineMapper.insert(processDefinitionLog); - workflowDefinition.setId(processDefinitionLog.getId()); + result = workflowDefinitionMapper.insert(workflowDefinitionLog); + workflowDefinition.setId(workflowDefinitionLog.getId()); } else { - processDefinitionLog.setId(workflowDefinition.getId()); - result = processDefineMapper.updateById(processDefinitionLog); + workflowDefinitionLog.setId(workflowDefinition.getId()); + result = workflowDefinitionMapper.updateById(workflowDefinitionLog); } } return (insertLog & result) > 0 ? insertVersion : 0; @@ -1638,8 +1175,8 @@ public class ProcessServiceImpl implements ProcessService { * save task relations */ @Override - public int saveTaskRelation(User operator, long projectCode, long processDefinitionCode, - int processDefinitionVersion, + public int saveTaskRelation(User operator, long projectCode, long workflowDefinitionCode, + int workflowDefinitionVersion, List taskRelationList, List taskDefinitionLogs, Boolean syncDefine) { @@ -1653,41 +1190,41 @@ public class ProcessServiceImpl implements ProcessService { .collect(Collectors.toMap(TaskDefinition::getCode, taskDefinitionLog -> taskDefinitionLog)); } Date now = new Date(); - for (WorkflowTaskRelationLog processTaskRelationLog : taskRelationList) { - processTaskRelationLog.setProjectCode(projectCode); - processTaskRelationLog.setProcessDefinitionCode(processDefinitionCode); - processTaskRelationLog.setProcessDefinitionVersion(processDefinitionVersion); + for (WorkflowTaskRelationLog workflowTaskRelationLog : taskRelationList) { + workflowTaskRelationLog.setProjectCode(projectCode); + workflowTaskRelationLog.setWorkflowDefinitionCode(workflowDefinitionCode); + workflowTaskRelationLog.setWorkflowDefinitionVersion(workflowDefinitionVersion); if (taskDefinitionLogMap != null) { TaskDefinitionLog preTaskDefinitionLog = - taskDefinitionLogMap.get(processTaskRelationLog.getPreTaskCode()); + taskDefinitionLogMap.get(workflowTaskRelationLog.getPreTaskCode()); if (preTaskDefinitionLog != null) { - processTaskRelationLog.setPreTaskVersion(preTaskDefinitionLog.getVersion()); + workflowTaskRelationLog.setPreTaskVersion(preTaskDefinitionLog.getVersion()); } TaskDefinitionLog postTaskDefinitionLog = - taskDefinitionLogMap.get(processTaskRelationLog.getPostTaskCode()); + taskDefinitionLogMap.get(workflowTaskRelationLog.getPostTaskCode()); if (postTaskDefinitionLog != null) { - processTaskRelationLog.setPostTaskVersion(postTaskDefinitionLog.getVersion()); + workflowTaskRelationLog.setPostTaskVersion(postTaskDefinitionLog.getVersion()); } } - processTaskRelationLog.setCreateTime(now); - processTaskRelationLog.setUpdateTime(now); - processTaskRelationLog.setOperator(operator.getId()); - processTaskRelationLog.setOperateTime(now); + workflowTaskRelationLog.setCreateTime(now); + workflowTaskRelationLog.setUpdateTime(now); + workflowTaskRelationLog.setOperator(operator.getId()); + workflowTaskRelationLog.setOperateTime(now); } int insert = taskRelationList.size(); if (Boolean.TRUE.equals(syncDefine)) { List workflowTaskRelationList = - workflowTaskRelationMapper.queryByProcessCode(processDefinitionCode); + workflowTaskRelationMapper.queryByWorkflowDefinitionCode(workflowDefinitionCode); if (!workflowTaskRelationList.isEmpty()) { - Set processTaskRelationSet = + Set workflowTaskRelationSet = workflowTaskRelationList.stream().map(WorkflowTaskRelation::hashCode).collect(toSet()); Set taskRelationSet = taskRelationList.stream().map(WorkflowTaskRelationLog::hashCode).collect(toSet()); - boolean result = CollectionUtils.isEqualCollection(processTaskRelationSet, taskRelationSet); + boolean result = CollectionUtils.isEqualCollection(workflowTaskRelationSet, taskRelationSet); if (result) { return Constants.EXIT_CODE_SUCCESS; } - workflowTaskRelationMapper.deleteByCode(projectCode, processDefinitionCode); + workflowTaskRelationMapper.deleteByWorkflowDefinitionCode(projectCode, workflowDefinitionCode); } List workflowTaskRelations = taskRelationList.stream().map(WorkflowTaskRelation::new).collect(Collectors.toList()); @@ -1703,9 +1240,10 @@ public class ProcessServiceImpl implements ProcessService { if (!workflowTaskRelationList.isEmpty()) { Set processDefinitionCodes = workflowTaskRelationList .stream() - .map(WorkflowTaskRelation::getProcessDefinitionCode) + .map(WorkflowTaskRelation::getWorkflowDefinitionCode) .collect(toSet()); - List workflowDefinitionList = processDefineMapper.queryByCodes(processDefinitionCodes); + List workflowDefinitionList = + workflowDefinitionMapper.queryByCodes(processDefinitionCodes); // check process definition is already online for (WorkflowDefinition workflowDefinition : workflowDefinitionList) { if (workflowDefinition.getReleaseState() == ReleaseState.ONLINE) { @@ -1717,10 +1255,10 @@ public class ProcessServiceImpl implements ProcessService { } /** - * Generate the DAG Graph based on the process definition id + * Generate the DAG Graph based on the workflow definition id * Use temporarily before refactoring taskNode * - * @param workflowDefinition process definition + * @param workflowDefinition workflow definition * @return dag graph */ @Override @@ -1728,9 +1266,9 @@ public class ProcessServiceImpl implements ProcessService { List taskRelations = this.findRelationByCode(workflowDefinition.getCode(), workflowDefinition.getVersion()); List taskNodeList = transformTask(taskRelations, Lists.newArrayList()); - ProcessDag processDag = DagHelper.getProcessDag(taskNodeList, new ArrayList<>(taskRelations)); + WorkflowDag workflowDag = DagHelper.getWorkflowDag(taskNodeList, new ArrayList<>(taskRelations)); // Generate concrete Dag to be executed - return DagHelper.buildDagGraph(processDag); + return DagHelper.buildDagGraph(workflowDag); } /** @@ -1748,13 +1286,13 @@ public class ProcessServiceImpl implements ProcessService { } /** - * find process task relation list by process + * find workflow task relation list by workflow */ @Override - public List findRelationByCode(long processDefinitionCode, int processDefinitionVersion) { - List processTaskRelationLogList = workflowTaskRelationLogMapper - .queryByProcessCodeAndVersion(processDefinitionCode, processDefinitionVersion); - return processTaskRelationLogList.stream().map(r -> (WorkflowTaskRelation) r).collect(Collectors.toList()); + public List findRelationByCode(long workflowDefinitionCode, int workflowDefinitionVersion) { + List workflowTaskRelationLogList = workflowTaskRelationLogMapper + .queryByWorkflowCodeAndVersion(workflowDefinitionCode, workflowDefinitionVersion); + return workflowTaskRelationLogList.stream().map(r -> (WorkflowTaskRelation) r).collect(Collectors.toList()); } /** @@ -1817,56 +1355,6 @@ public class ProcessServiceImpl implements ProcessService { return taskNodeList; } - @Override - public DqExecuteResult getDqExecuteResultByTaskInstanceId(int taskInstanceId) { - return dqExecuteResultMapper.getExecuteResultById(taskInstanceId); - } - - @Override - public int updateDqExecuteResultUserId(int taskInstanceId) { - DqExecuteResult dqExecuteResult = - dqExecuteResultMapper - .selectOne(new QueryWrapper().eq(TASK_INSTANCE_ID, taskInstanceId)); - if (dqExecuteResult == null) { - return -1; - } - - WorkflowInstance workflowInstance = workflowInstanceMapper.selectById(dqExecuteResult.getProcessInstanceId()); - if (workflowInstance == null) { - return -1; - } - - WorkflowDefinition workflowDefinition = - processDefineMapper.queryByCode(workflowInstance.getProcessDefinitionCode()); - if (workflowDefinition == null) { - return -1; - } - - dqExecuteResult.setProcessDefinitionId(workflowDefinition.getId()); - dqExecuteResult.setUserId(workflowDefinition.getUserId()); - dqExecuteResult.setState(DqTaskState.DEFAULT.getCode()); - return dqExecuteResultMapper.updateById(dqExecuteResult); - } - - @Override - public int updateDqExecuteResultState(DqExecuteResult dqExecuteResult) { - return dqExecuteResultMapper.updateById(dqExecuteResult); - } - - @Override - public int deleteDqExecuteResultByTaskInstanceId(int taskInstanceId) { - return dqExecuteResultMapper.delete( - new QueryWrapper() - .eq(TASK_INSTANCE_ID, taskInstanceId)); - } - - @Override - public int deleteTaskStatisticsValueByTaskInstanceId(int taskInstanceId) { - return dqTaskStatisticsValueMapper.delete( - new QueryWrapper() - .eq(TASK_INSTANCE_ID, taskInstanceId)); - } - @Override public DqRule getDqRule(int ruleId) { return dqRuleMapper.selectById(ruleId); @@ -1899,7 +1387,7 @@ public class ProcessServiceImpl implements ProcessService { .taskId(taskInstanceId) .taskName(taskName) .groupId(taskGroupId) - .processId(workflowInstanceId) + .workflowInstanceId(workflowInstanceId) .priority(taskGroupPriority) .status(status) .forceStart(Flag.NO.getCode()) @@ -1911,11 +1399,6 @@ public class ProcessServiceImpl implements ProcessService { return taskGroupQueue; } - @Override - public WorkflowInstance loadNextProcess4Serial(long code, int state, int id) { - return this.workflowInstanceMapper.loadNextProcess4Serial(code, state, id); - } - protected void deleteCommandWithCheck(int commandId) { int delete = this.commandMapper.deleteById(commandId); if (delete != 1) { @@ -1943,8 +1426,8 @@ public class ProcessServiceImpl implements ProcessService { } @Override - public void forceProcessInstanceSuccessByTaskInstanceId(TaskInstance task) { - WorkflowInstance workflowInstance = findWorkflowInstanceDetailById(task.getProcessInstanceId()).orElse(null); + public void forceWorkflowInstanceSuccessByTaskInstanceId(TaskInstance task) { + WorkflowInstance workflowInstance = findWorkflowInstanceDetailById(task.getWorkflowInstanceId()).orElse(null); if (workflowInstance != null && (workflowInstance.getState().isFailure() || workflowInstance.getState().isStop())) { List validTaskList = @@ -1952,8 +1435,8 @@ public class ProcessServiceImpl implements ProcessService { workflowInstance.getTestFlag()); List instanceTaskCodeList = validTaskList.stream().map(TaskInstance::getTaskCode).collect(Collectors.toList()); - List taskRelations = findRelationByCode(workflowInstance.getProcessDefinitionCode(), - workflowInstance.getProcessDefinitionVersion()); + List taskRelations = findRelationByCode(workflowInstance.getWorkflowDefinitionCode(), + workflowInstance.getWorkflowDefinitionVersion()); List taskDefinitionLogs = taskDefinitionLogDao.queryTaskDefineLogList(taskRelations); List definiteTaskCodeList = taskDefinitionLogs.stream().filter(definitionLog -> definitionLog.getFlag() == Flag.YES) @@ -1972,15 +1455,4 @@ public class ProcessServiceImpl implements ProcessService { } } - private Map createCommandParams(WorkflowInstance workflowInstance) { - Map commandMap = - JSONUtils.parseObject(workflowInstance.getCommandParam(), new TypeReference>() { - }); - Map recoverFailoverCommandParams = new HashMap<>(); - Optional.ofNullable(MapUtils.getObject(commandMap, CMD_PARAM_START_PARAMS)) - .ifPresent(startParams -> recoverFailoverCommandParams.put(CMD_PARAM_START_PARAMS, startParams)); - recoverFailoverCommandParams.put(CMD_PARAM_RECOVER_WORKFLOW_ID_STRING, workflowInstance.getId()); - return recoverFailoverCommandParams; - } - } diff --git a/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/process/ProcessDag.java b/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/process/WorkflowDag.java similarity index 62% rename from dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/process/ProcessDag.java rename to dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/process/WorkflowDag.java index c4f42c0e51..0d853f39e8 100644 --- a/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/process/ProcessDag.java +++ b/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/process/WorkflowDag.java @@ -22,7 +22,10 @@ import org.apache.dolphinscheduler.service.model.TaskNode; import java.util.List; -public class ProcessDag { +import lombok.Data; + +@Data +public class WorkflowDag { /** * DAG edge list @@ -34,49 +37,9 @@ public class ProcessDag { */ private List nodes; - /** - * getter method - * - * @return the edges - * @see ProcessDag#edges - */ - public List getEdges() { - return edges; - } - - /** - * setter method - * - * @param edges the edges to set - * @see ProcessDag#edges - */ - public void setEdges(List edges) { - this.edges = edges; - } - - /** - * getter method - * - * @return the nodes - * @see ProcessDag#nodes - */ - public List getNodes() { - return nodes; - } - - /** - * setter method - * - * @param nodes the nodes to set - * @see ProcessDag#nodes - */ - public void setNodes(List nodes) { - this.nodes = nodes; - } - @Override public String toString() { - return "ProcessDag{" + return "WorkflowDag{" + "edges=" + edges + ", nodes=" + nodes + '}'; diff --git a/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/queue/StandByTaskInstancePriorityQueue.java b/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/queue/StandByTaskInstancePriorityQueue.java index d02c5715e3..8cde9c1497 100644 --- a/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/queue/StandByTaskInstancePriorityQueue.java +++ b/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/queue/StandByTaskInstancePriorityQueue.java @@ -151,7 +151,7 @@ public class StandByTaskInstancePriorityQueue implements TaskPriorityQueue { - /** - * processInstancePriority - */ - private int processInstancePriority; + private int workflowInstancePriority; - /** - * processInstanceId - */ - private int processInstanceId; + private int workflowInstanceId; - /** - * taskInstancePriority - */ private int taskInstancePriority; - /** - * taskId - */ private int taskId; - /** - * taskExecutionContext - */ private TaskExecutionContext taskExecutionContext; - /** - * groupName - */ private String groupName; - /** - * context - */ private Map context; - - /** - * checkpoint - */ private long checkpoint; private int taskGroupPriority; @@ -76,13 +51,13 @@ public class TaskPriority implements Comparable { this.checkpoint = System.currentTimeMillis(); } - public TaskPriority(int processInstancePriority, - int processInstanceId, + public TaskPriority(int workflowInstancePriority, + int workflowInstanceId, int taskInstancePriority, int taskId, int taskGroupPriority, String groupName) { - this.processInstancePriority = processInstancePriority; - this.processInstanceId = processInstanceId; + this.workflowInstancePriority = workflowInstancePriority; + this.workflowInstanceId = workflowInstanceId; this.taskInstancePriority = taskInstancePriority; this.taskId = taskId; this.taskGroupPriority = taskGroupPriority; @@ -90,91 +65,19 @@ public class TaskPriority implements Comparable { this.checkpoint = System.currentTimeMillis(); } - public int getProcessInstancePriority() { - return processInstancePriority; - } - - public void setProcessInstancePriority(int processInstancePriority) { - this.processInstancePriority = processInstancePriority; - } - - public int getProcessInstanceId() { - return processInstanceId; - } - - public void setProcessInstanceId(int processInstanceId) { - this.processInstanceId = processInstanceId; - } - - public int getTaskInstancePriority() { - return taskInstancePriority; - } - - public void setTaskInstancePriority(int taskInstancePriority) { - this.taskInstancePriority = taskInstancePriority; - } - - public int getTaskId() { - return taskId; - } - - public Map getContext() { - return context; - } - - public void setTaskId(int taskId) { - this.taskId = taskId; - } - - public String getGroupName() { - return groupName; - } - - public void setGroupName(String groupName) { - this.groupName = groupName; - } - - public void setContext(Map context) { - this.context = context; - } - - public TaskExecutionContext getTaskExecutionContext() { - return taskExecutionContext; - } - - public void setTaskExecutionContext(TaskExecutionContext taskExecutionContext) { - this.taskExecutionContext = taskExecutionContext; - } - - public long getCheckpoint() { - return checkpoint; - } - - public void setCheckpoint(long checkpoint) { - this.checkpoint = checkpoint; - } - - public int getTaskGroupPriority() { - return taskGroupPriority; - } - - public void setTaskGroupPriority(int taskGroupPriority) { - this.taskGroupPriority = taskGroupPriority; - } - @Override public int compareTo(TaskPriority other) { - if (this.getProcessInstancePriority() > other.getProcessInstancePriority()) { + if (this.getWorkflowInstancePriority() > other.getWorkflowInstancePriority()) { return 1; } - if (this.getProcessInstancePriority() < other.getProcessInstancePriority()) { + if (this.getWorkflowInstancePriority() < other.getWorkflowInstancePriority()) { return -1; } - if (this.getProcessInstanceId() > other.getProcessInstanceId()) { + if (this.getWorkflowInstanceId() > other.getWorkflowInstanceId()) { return 1; } - if (this.getProcessInstanceId() < other.getProcessInstanceId()) { + if (this.getWorkflowInstanceId() < other.getWorkflowInstanceId()) { return -1; } @@ -214,8 +117,8 @@ public class TaskPriority implements Comparable { return false; } TaskPriority that = (TaskPriority) o; - return processInstancePriority == that.processInstancePriority - && processInstanceId == that.processInstanceId + return workflowInstancePriority == that.workflowInstancePriority + && workflowInstanceId == that.workflowInstanceId && taskInstancePriority == that.taskInstancePriority && taskId == that.taskId && taskGroupPriority == that.taskGroupPriority @@ -224,8 +127,8 @@ public class TaskPriority implements Comparable { @Override public int hashCode() { - return Objects.hash(processInstancePriority, - processInstanceId, + return Objects.hash(workflowInstancePriority, + workflowInstanceId, taskInstancePriority, taskId, taskGroupPriority, @@ -235,10 +138,10 @@ public class TaskPriority implements Comparable { @Override public String toString() { return "TaskPriority{" - + "processInstancePriority=" - + processInstancePriority - + ", processInstanceId=" - + processInstanceId + + "workflowInstancePriority=" + + workflowInstancePriority + + ", workflowInstanceId=" + + workflowInstanceId + ", taskInstancePriority=" + taskInstancePriority + ", taskId=" diff --git a/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/subworkflow/SubWorkflowServiceImpl.java b/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/subworkflow/SubWorkflowServiceImpl.java index 2bfb1beb9f..22d9396d07 100644 --- a/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/subworkflow/SubWorkflowServiceImpl.java +++ b/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/subworkflow/SubWorkflowServiceImpl.java @@ -51,7 +51,7 @@ public class SubWorkflowServiceImpl implements SubWorkflowService { @Override public List getAllDynamicSubWorkflow(long processInstanceId, long taskCode) { List relationSubWorkflows = - relationSubWorkflowMapper.queryAllSubProcessInstance(processInstanceId, taskCode); + relationSubWorkflowMapper.queryAllSubWorkflowInstance(processInstanceId, taskCode); List allSubProcessInstanceId = relationSubWorkflows.stream() .map(RelationSubWorkflow::getSubWorkflowInstanceId).collect(Collectors.toList()); @@ -103,8 +103,8 @@ public class SubWorkflowServiceImpl implements SubWorkflowService { new ArrayList<>(JSONUtils.toList(workflowInstance.getVarPool(), Property.class)); WorkflowDefinitionLog processDefinition = workflowDefinitionLogMapper - .queryByDefinitionCodeAndVersion(workflowInstance.getProcessDefinitionCode(), - workflowInstance.getProcessDefinitionVersion()); + .queryByDefinitionCodeAndVersion(workflowInstance.getWorkflowDefinitionCode(), + workflowInstance.getWorkflowDefinitionVersion()); List globalParamList = JSONUtils.toList(processDefinition.getGlobalParams(), Property.class); Set ouputParamSet = outputParamList.stream().map(Property::getProp).collect(Collectors.toSet()); diff --git a/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/utils/DagHelper.java b/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/utils/DagHelper.java index da283cad25..cc4e9b7100 100644 --- a/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/utils/DagHelper.java +++ b/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/utils/DagHelper.java @@ -30,7 +30,7 @@ import org.apache.dolphinscheduler.plugin.task.api.task.ConditionsLogicTaskChann import org.apache.dolphinscheduler.plugin.task.api.task.SwitchLogicTaskChannelFactory; import org.apache.dolphinscheduler.plugin.task.api.utils.TaskTypeUtils; import org.apache.dolphinscheduler.service.model.TaskNode; -import org.apache.dolphinscheduler.service.process.ProcessDag; +import org.apache.dolphinscheduler.service.process.WorkflowDag; import org.apache.commons.collections4.CollectionUtils; import org.apache.commons.lang3.StringUtils; @@ -96,7 +96,7 @@ public class DagHelper { List startNodeList = startNodeNameList; if (taskDependType != TaskDependType.TASK_POST && CollectionUtils.isEmpty(startNodeList)) { - log.error("start node list is empty! cannot continue run the process "); + log.error("start node list is empty! cannot continue run the workflow "); return destFlowNodeList; } @@ -214,13 +214,13 @@ public class DagHelper { * @param startNodeNameList startNodeNameList * @param recoveryNodeCodeList recoveryNodeCodeList * @param depNodeType depNodeType - * @return process dag + * @return workflow dag * @throws Exception if error throws Exception */ - public static ProcessDag generateFlowDag(List totalTaskNodeList, - List startNodeNameList, - List recoveryNodeCodeList, - TaskDependType depNodeType) throws Exception { + public static WorkflowDag generateFlowDag(List totalTaskNodeList, + List startNodeNameList, + List recoveryNodeCodeList, + TaskDependType depNodeType) throws Exception { List destTaskNodeList = generateFlowNodeListByStartNode(totalTaskNodeList, startNodeNameList, recoveryNodeCodeList, depNodeType); @@ -228,10 +228,10 @@ public class DagHelper { return null; } List taskNodeRelations = generateRelationListByFlowNodes(destTaskNodeList); - ProcessDag processDag = new ProcessDag(); - processDag.setEdges(taskNodeRelations); - processDag.setNodes(destTaskNodeList); - return processDag; + WorkflowDag workflowDag = new WorkflowDag(); + workflowDag.setEdges(taskNodeRelations); + workflowDag.setNodes(destTaskNodeList); + return workflowDag; } /** @@ -343,7 +343,7 @@ public class DagHelper { } /** - * parse condition task find the branch process + * parse condition task find the branch workflow * set skip flag for another one. */ public static List parseConditionTask(Long nodeCode, @@ -382,7 +382,7 @@ public class DagHelper { } /** - * parse condition task find the branch process + * parse condition task find the branch workflow * set skip flag for another one. * * @param nodeCode @@ -461,23 +461,23 @@ public class DagHelper { /*** * build dag graph - * @param processDag processDag + * @param workflowDag workflowDag * @return dag */ - public static DAG buildDagGraph(ProcessDag processDag) { + public static DAG buildDagGraph(WorkflowDag workflowDag) { DAG dag = new DAG<>(); // add vertex - if (CollectionUtils.isNotEmpty(processDag.getNodes())) { - for (TaskNode node : processDag.getNodes()) { + if (CollectionUtils.isNotEmpty(workflowDag.getNodes())) { + for (TaskNode node : workflowDag.getNodes()) { dag.addNode(node.getCode(), node); } } // add edge - if (CollectionUtils.isNotEmpty(processDag.getEdges())) { - for (TaskNodeRelation edge : processDag.getEdges()) { + if (CollectionUtils.isNotEmpty(workflowDag.getEdges())) { + for (TaskNodeRelation edge : workflowDag.getEdges()) { dag.addEdge(edge.getStartNode(), edge.getEndNode()); } } @@ -485,12 +485,12 @@ public class DagHelper { } /** - * get process dag + * get workflow dag * * @param taskNodeList task node list - * @return Process dag + * @return workflow dag */ - public static ProcessDag getProcessDag(List taskNodeList) { + public static WorkflowDag getWorkflowDag(List taskNodeList) { List taskNodeRelations = new ArrayList<>(); // Traverse node information and build relationships @@ -506,20 +506,20 @@ public class DagHelper { } } - ProcessDag processDag = new ProcessDag(); - processDag.setEdges(taskNodeRelations); - processDag.setNodes(taskNodeList); - return processDag; + WorkflowDag workflowDag = new WorkflowDag(); + workflowDag.setEdges(taskNodeRelations); + workflowDag.setNodes(taskNodeList); + return workflowDag; } /** - * get process dag + * get workflow dag * * @param taskNodeList task node list - * @return Process dag + * @return workflow dag */ - public static ProcessDag getProcessDag(List taskNodeList, - List workflowTaskRelations) { + public static WorkflowDag getWorkflowDag(List taskNodeList, + List workflowTaskRelations) { Map taskNodeMap = new HashMap<>(); taskNodeList.forEach(taskNode -> { @@ -539,10 +539,10 @@ public class DagHelper { .add(new TaskNodeRelation(preNode.getCode(), postNode.getCode())); } } - ProcessDag processDag = new ProcessDag(); - processDag.setEdges(taskNodeRelations); - processDag.setNodes(taskNodeList); - return processDag; + WorkflowDag workflowDag = new WorkflowDag(); + workflowDag.setEdges(taskNodeRelations); + workflowDag.setNodes(taskNodeList); + return workflowDag; } /** diff --git a/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/utils/ParamUtils.java b/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/utils/ParamUtils.java index 15a893fd7c..54349af38b 100644 --- a/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/utils/ParamUtils.java +++ b/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/utils/ParamUtils.java @@ -59,17 +59,17 @@ public class ParamUtils { /** * Get sub workflow parameters - * @param instanceMap process instance map - * @param parentWorkflowInstance parent process instance + * @param instanceMap workflow instance map + * @param parentWorkflowInstance parent workflow instance * @param fatherParams fatherParams * @return sub workflow parameters */ public static String getSubWorkFlowParam(WorkflowInstanceRelation instanceMap, WorkflowInstance parentWorkflowInstance, Map fatherParams) { - // set sub work process command - String processMapStr = JSONUtils.toJsonString(instanceMap); - Map cmdParam = JSONUtils.toMap(processMapStr); + // set sub work workflow command + String workflowMapStr = JSONUtils.toJsonString(instanceMap); + Map cmdParam = JSONUtils.toMap(workflowMapStr); if (parentWorkflowInstance.isComplementData()) { Map parentParam = JSONUtils.toMap(parentWorkflowInstance.getCommandParam()); String endTime = parentParam.get(CMD_PARAM_COMPLEMENT_DATA_END_DATE); @@ -82,13 +82,13 @@ public class ParamUtils { if (StringUtils.isNotEmpty(scheduleTime)) { cmdParam.put(CMD_PARAM_COMPLEMENT_DATA_SCHEDULE_DATE_LIST, scheduleTime); } - processMapStr = JSONUtils.toJsonString(cmdParam); + workflowMapStr = JSONUtils.toJsonString(cmdParam); } if (MapUtils.isNotEmpty(fatherParams)) { cmdParam.put(CMD_PARAM_FATHER_PARAMS, JSONUtils.toJsonString(fatherParams)); - processMapStr = JSONUtils.toJsonString(cmdParam); + workflowMapStr = JSONUtils.toJsonString(cmdParam); } - return processMapStr; + return workflowMapStr; } } diff --git a/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/utils/ProcessUtils.java b/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/utils/ProcessUtils.java index e8349d0cf0..307907265b 100644 --- a/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/utils/ProcessUtils.java +++ b/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/utils/ProcessUtils.java @@ -57,9 +57,9 @@ public class ProcessUtils { .setExecutePath(FileUtils.getTaskInstanceWorkingDirectory( taskExecutionContext.getTenantCode(), taskExecutionContext.getProjectCode(), - taskExecutionContext.getProcessDefineCode(), - taskExecutionContext.getProcessDefineVersion(), - taskExecutionContext.getProcessInstanceId(), + taskExecutionContext.getWorkflowDefinitionCode(), + taskExecutionContext.getWorkflowDefinitionVersion(), + taskExecutionContext.getWorkflowInstanceId(), taskExecutionContext.getTaskInstanceId())); } FileUtils.createDirectoryWith755(Paths.get(taskExecutionContext.getExecutePath())); diff --git a/dolphinscheduler-service/src/test/java/org/apache/dolphinscheduler/service/alert/WorkflowAlertManagerTest.java b/dolphinscheduler-service/src/test/java/org/apache/dolphinscheduler/service/alert/WorkflowAlertManagerTest.java index f0cac401b2..80af7ada8f 100644 --- a/dolphinscheduler-service/src/test/java/org/apache/dolphinscheduler/service/alert/WorkflowAlertManagerTest.java +++ b/dolphinscheduler-service/src/test/java/org/apache/dolphinscheduler/service/alert/WorkflowAlertManagerTest.java @@ -89,8 +89,8 @@ public class WorkflowAlertManagerTest { workflowInstance.setState(WorkflowExecutionStatus.SUCCESS); workflowInstance.setCommandType(CommandType.COMPLEMENT_DATA); workflowInstance.setWarningGroupId(1); - workflowInstance.setProcessDefinitionCode(1L); - workflowInstance.setProcessDefinitionVersion(1); + workflowInstance.setWorkflowDefinitionCode(1L); + workflowInstance.setWorkflowDefinitionVersion(1); ProjectUser projectUser = new ProjectUser(); TaskInstance taskInstance = new TaskInstance(); @@ -116,8 +116,8 @@ public class WorkflowAlertManagerTest { workflowInstance.setEndTime(new Date()); workflowInstance.setHost("127.0.0.1"); workflowInstance.setWarningGroupId(1); - workflowInstance.setProcessDefinitionCode(1L); - workflowInstance.setProcessDefinitionVersion(1); + workflowInstance.setWorkflowDefinitionCode(1L); + workflowInstance.setWorkflowDefinitionVersion(1); ProjectUser projectUser = new ProjectUser(); diff --git a/dolphinscheduler-service/src/test/java/org/apache/dolphinscheduler/service/command/MessageServiceImplTest.java b/dolphinscheduler-service/src/test/java/org/apache/dolphinscheduler/service/command/MessageServiceImplTest.java index 81bed486c2..dc4547dcb5 100644 --- a/dolphinscheduler-service/src/test/java/org/apache/dolphinscheduler/service/command/MessageServiceImplTest.java +++ b/dolphinscheduler-service/src/test/java/org/apache/dolphinscheduler/service/command/MessageServiceImplTest.java @@ -82,7 +82,7 @@ class MessageServiceImplTest { WorkflowInstance childInstance = null; WorkflowInstanceRelation instanceMap = new WorkflowInstanceRelation(); - instanceMap.setParentProcessInstanceId(1); + instanceMap.setParentWorkflowInstanceId(1); instanceMap.setParentTaskInstanceId(10); Command command; @@ -160,7 +160,7 @@ class MessageServiceImplTest { @Test public void testCreateCommand() { Command command = new Command(); - command.setProcessDefinitionCode(123); + command.setWorkflowDefinitionCode(123); command.setCommandParam("{\"ProcessInstanceId\":222}"); command.setCommandType(CommandType.START_PROCESS); int mockResult = 1; diff --git a/dolphinscheduler-service/src/test/java/org/apache/dolphinscheduler/service/expand/CuringParamsServiceTest.java b/dolphinscheduler-service/src/test/java/org/apache/dolphinscheduler/service/expand/CuringParamsServiceTest.java index ee2f1d7f8a..6ae13626f0 100644 --- a/dolphinscheduler-service/src/test/java/org/apache/dolphinscheduler/service/expand/CuringParamsServiceTest.java +++ b/dolphinscheduler-service/src/test/java/org/apache/dolphinscheduler/service/expand/CuringParamsServiceTest.java @@ -31,7 +31,7 @@ import org.apache.dolphinscheduler.plugin.task.api.enums.DataType; import org.apache.dolphinscheduler.plugin.task.api.enums.Direct; import org.apache.dolphinscheduler.plugin.task.api.model.Property; import org.apache.dolphinscheduler.plugin.task.api.parameters.AbstractParameters; -import org.apache.dolphinscheduler.plugin.task.api.parameters.SubProcessParameters; +import org.apache.dolphinscheduler.plugin.task.api.parameters.SubWorkflowParameters; import org.apache.commons.collections4.MapUtils; @@ -209,14 +209,14 @@ public class CuringParamsServiceTest { workflowDefinition.setProjectCode(3000001L); workflowDefinition.setCode(200001L); - workflowInstance.setProcessDefinitionCode(workflowDefinition.getCode()); + workflowInstance.setWorkflowDefinitionCode(workflowDefinition.getCode()); workflowInstance.setProjectCode(workflowDefinition.getProjectCode()); taskInstance.setTaskCode(taskDefinition.getCode()); taskInstance.setTaskDefinitionVersion(taskDefinition.getVersion()); taskInstance.setProjectCode(workflowDefinition.getProjectCode()); - taskInstance.setProcessInstanceId(workflowInstance.getId()); + taskInstance.setWorkflowInstanceId(workflowInstance.getId()); - AbstractParameters parameters = new SubProcessParameters(); + AbstractParameters parameters = new SubWorkflowParameters(); Mockito.when(projectParameterMapper.queryByProjectCode(Mockito.anyLong())).thenReturn(Collections.emptyList()); diff --git a/dolphinscheduler-service/src/test/java/org/apache/dolphinscheduler/service/process/ProcessServiceTest.java b/dolphinscheduler-service/src/test/java/org/apache/dolphinscheduler/service/process/ProcessServiceTest.java index 5969c3417e..5878cbbc4a 100644 --- a/dolphinscheduler-service/src/test/java/org/apache/dolphinscheduler/service/process/ProcessServiceTest.java +++ b/dolphinscheduler-service/src/test/java/org/apache/dolphinscheduler/service/process/ProcessServiceTest.java @@ -39,31 +39,19 @@ import org.apache.dolphinscheduler.dao.entity.WorkflowDefinition; import org.apache.dolphinscheduler.dao.entity.WorkflowDefinitionLog; import org.apache.dolphinscheduler.dao.entity.WorkflowInstance; import org.apache.dolphinscheduler.dao.entity.WorkflowTaskRelationLog; -import org.apache.dolphinscheduler.dao.mapper.CommandMapper; -import org.apache.dolphinscheduler.dao.mapper.DataSourceMapper; -import org.apache.dolphinscheduler.dao.mapper.DqComparisonTypeMapper; -import org.apache.dolphinscheduler.dao.mapper.DqExecuteResultMapper; import org.apache.dolphinscheduler.dao.mapper.DqRuleExecuteSqlMapper; import org.apache.dolphinscheduler.dao.mapper.DqRuleInputEntryMapper; import org.apache.dolphinscheduler.dao.mapper.DqRuleMapper; -import org.apache.dolphinscheduler.dao.mapper.ErrorCommandMapper; import org.apache.dolphinscheduler.dao.mapper.TaskDefinitionLogMapper; import org.apache.dolphinscheduler.dao.mapper.TaskDefinitionMapper; -import org.apache.dolphinscheduler.dao.mapper.TaskGroupMapper; import org.apache.dolphinscheduler.dao.mapper.TaskGroupQueueMapper; -import org.apache.dolphinscheduler.dao.mapper.TaskInstanceMapper; -import org.apache.dolphinscheduler.dao.mapper.TenantMapper; import org.apache.dolphinscheduler.dao.mapper.UserMapper; import org.apache.dolphinscheduler.dao.mapper.WorkflowDefinitionLogMapper; import org.apache.dolphinscheduler.dao.mapper.WorkflowDefinitionMapper; import org.apache.dolphinscheduler.dao.mapper.WorkflowInstanceMapper; import org.apache.dolphinscheduler.dao.mapper.WorkflowTaskRelationLogMapper; -import org.apache.dolphinscheduler.dao.mapper.WorkflowTaskRelationMapper; import org.apache.dolphinscheduler.dao.repository.TaskDefinitionDao; import org.apache.dolphinscheduler.dao.repository.TaskDefinitionLogDao; -import org.apache.dolphinscheduler.dao.repository.TaskInstanceDao; -import org.apache.dolphinscheduler.dao.repository.WorkflowInstanceDao; -import org.apache.dolphinscheduler.plugin.task.api.TaskPluginManager; import org.apache.dolphinscheduler.plugin.task.api.enums.Direct; import org.apache.dolphinscheduler.plugin.task.api.enums.dp.DataType; import org.apache.dolphinscheduler.plugin.task.api.enums.dp.DqTaskState; @@ -102,52 +90,36 @@ public class ProcessServiceTest { @InjectMocks private ProcessServiceImpl processService; - @Mock - private CommandMapper commandMapper; + @Mock private WorkflowTaskRelationLogMapper workflowTaskRelationLogMapper; + @Mock - private ErrorCommandMapper errorCommandMapper; - @Mock - private WorkflowDefinitionMapper processDefineMapper; + private WorkflowDefinitionMapper workflowDefinitionMapper; + @Mock private WorkflowInstanceMapper workflowInstanceMapper; - @Mock - private WorkflowInstanceDao workflowInstanceDao; @Mock - private TaskInstanceDao taskInstanceDao; - - @Mock - private TaskDefinitionLogDao taskDefinitionLogDao; + private WorkflowDefinitionLogMapper workflowDefinitionLogMapper; @Mock private TaskDefinitionDao taskDefinitionDao; @Mock - private UserMapper userMapper; - @Mock - private TenantMapper tenantMapper; + private TaskDefinitionLogDao taskDefinitionLogDao; @Mock - private TaskInstanceMapper taskInstanceMapper; + private UserMapper userMapper; + @Mock private TaskDefinitionLogMapper taskDefinitionLogMapper; - @Mock - private TaskDefinitionMapper taskDefinitionMapper; - @Mock - private WorkflowTaskRelationMapper workflowTaskRelationMapper; - @Mock - private WorkflowDefinitionLogMapper processDefineLogMapper; - @Mock - private TaskGroupMapper taskGroupMapper; - @Mock - private DataSourceMapper dataSourceMapper; - @Mock - private TaskGroupQueueMapper taskGroupQueueMapper; @Mock - private DqExecuteResultMapper dqExecuteResultMapper; + private TaskDefinitionMapper taskDefinitionMapper; + + @Mock + private TaskGroupQueueMapper taskGroupQueueMapper; @Mock private DqRuleMapper dqRuleMapper; @@ -158,15 +130,9 @@ public class ProcessServiceTest { @Mock private DqRuleExecuteSqlMapper dqRuleExecuteSqlMapper; - @Mock - private DqComparisonTypeMapper dqComparisonTypeMapper; - @Mock CuringParamsService curingGlobalParamsService; - @Mock - TaskPluginManager taskPluginManager; - @Test public void testGetUserById() { User user = new User(); @@ -179,17 +145,17 @@ public class ProcessServiceTest { public void testFormatTaskAppId() { TaskInstance taskInstance = new TaskInstance(); taskInstance.setId(333); - taskInstance.setProcessInstanceId(222); - when(processService.findProcessInstanceById(taskInstance.getProcessInstanceId())).thenReturn(null); + taskInstance.setWorkflowInstanceId(222); + when(processService.findWorkflowInstanceById(taskInstance.getWorkflowInstanceId())).thenReturn(null); Assertions.assertEquals("", processService.formatTaskAppId(taskInstance)); WorkflowDefinition workflowDefinition = new WorkflowDefinition(); workflowDefinition.setId(111); WorkflowInstance workflowInstance = new WorkflowInstance(); workflowInstance.setId(222); - workflowInstance.setProcessDefinitionVersion(1); - workflowInstance.setProcessDefinitionCode(1L); - when(processService.findProcessInstanceById(taskInstance.getProcessInstanceId())) + workflowInstance.setWorkflowDefinitionVersion(1); + workflowInstance.setWorkflowDefinitionCode(1L); + when(processService.findWorkflowInstanceById(taskInstance.getWorkflowInstanceId())) .thenReturn(workflowInstance); Assertions.assertEquals("", processService.formatTaskAppId(taskInstance)); } @@ -203,7 +169,7 @@ public class ProcessServiceTest { WorkflowDefinition workflowDefinition = new WorkflowDefinition(); workflowDefinition.setCode(parentProcessDefineCode); workflowDefinition.setVersion(parentProcessDefineVersion); - when(processDefineMapper.selectById(parentProcessDefineId)).thenReturn(workflowDefinition); + when(workflowDefinitionMapper.selectById(parentProcessDefineId)).thenReturn(workflowDefinition); long postTaskCode = 2L; int postTaskVersion = 2; @@ -213,12 +179,12 @@ public class ProcessServiceTest { processTaskRelationLog.setPostTaskCode(postTaskCode); processTaskRelationLog.setPostTaskVersion(postTaskVersion); relationLogList.add(processTaskRelationLog); - when(workflowTaskRelationLogMapper.queryByProcessCodeAndVersion(parentProcessDefineCode, + when(workflowTaskRelationLogMapper.queryByWorkflowCodeAndVersion(parentProcessDefineCode, parentProcessDefineVersion)).thenReturn(relationLogList); List taskDefinitionLogs = new ArrayList<>(); TaskDefinitionLog taskDefinitionLog1 = new TaskDefinitionLog(); - taskDefinitionLog1.setTaskParams("{\"processDefinitionCode\": 123L}"); + taskDefinitionLog1.setTaskParams("{\"workflowDefinitionCode\": 123L}"); taskDefinitionLogs.add(taskDefinitionLog1); when(taskDefinitionLogMapper.queryByTaskDefinitions(Mockito.anySet())).thenReturn(taskDefinitionLogs); @@ -411,9 +377,9 @@ public class ProcessServiceTest { WorkflowTaskRelationLog processTaskRelation = new WorkflowTaskRelationLog(); processTaskRelation.setName("def 1"); - processTaskRelation.setProcessDefinitionVersion(1); + processTaskRelation.setWorkflowDefinitionVersion(1); processTaskRelation.setProjectCode(1L); - processTaskRelation.setProcessDefinitionCode(1L); + processTaskRelation.setWorkflowDefinitionCode(1L); processTaskRelation.setPostTaskCode(3L); processTaskRelation.setPreTaskCode(2L); processTaskRelation.setUpdateTime(new Date()); @@ -447,7 +413,7 @@ public class ProcessServiceTest { taskDefinitionLogs.add(td2); when(taskDefinitionLogDao.queryTaskDefineLogList(any())).thenReturn(taskDefinitionLogs); - when(workflowTaskRelationLogMapper.queryByProcessCodeAndVersion(Mockito.anyLong(), Mockito.anyInt())) + when(workflowTaskRelationLogMapper.queryByWorkflowCodeAndVersion(Mockito.anyLong(), Mockito.anyInt())) .thenReturn(list); DAG stringTaskNodeTaskNodeRelationDAG = @@ -458,7 +424,7 @@ public class ProcessServiceTest { @Test public void testChangeOutParam() { TaskInstance taskInstance = new TaskInstance(); - taskInstance.setProcessInstanceId(62); + taskInstance.setWorkflowInstanceId(62); WorkflowInstance workflowInstance = new WorkflowInstance(); workflowInstance.setId(62); taskInstance.setVarPool("[{\"direct\":\"OUT\",\"prop\":\"test1\",\"type\":\"VARCHAR\",\"value\":\"\"}]"); diff --git a/dolphinscheduler-service/src/test/java/org/apache/dolphinscheduler/service/queue/StandByTaskInstancePriorityQueueTest.java b/dolphinscheduler-service/src/test/java/org/apache/dolphinscheduler/service/queue/StandByTaskInstancePriorityQueueTest.java index 80a2513929..6c22944b50 100644 --- a/dolphinscheduler-service/src/test/java/org/apache/dolphinscheduler/service/queue/StandByTaskInstancePriorityQueueTest.java +++ b/dolphinscheduler-service/src/test/java/org/apache/dolphinscheduler/service/queue/StandByTaskInstancePriorityQueueTest.java @@ -113,7 +113,7 @@ public class StandByTaskInstancePriorityQueueTest { queue.put(taskInstanceMediumPriority); Assertions.assertTrue(queue.contains(taskInstanceMediumPriority)); TaskInstance taskInstance2 = createTaskInstance("medium2", Priority.MEDIUM, 1); - taskInstance2.setProcessInstanceId(2); + taskInstance2.setWorkflowInstanceId(2); Assertions.assertFalse(queue.contains(taskInstance2)); } diff --git a/dolphinscheduler-service/src/test/java/org/apache/dolphinscheduler/service/utils/DagHelperTest.java b/dolphinscheduler-service/src/test/java/org/apache/dolphinscheduler/service/utils/DagHelperTest.java index daaa6ac81d..f5d8b3674b 100644 --- a/dolphinscheduler-service/src/test/java/org/apache/dolphinscheduler/service/utils/DagHelperTest.java +++ b/dolphinscheduler-service/src/test/java/org/apache/dolphinscheduler/service/utils/DagHelperTest.java @@ -31,7 +31,7 @@ import org.apache.dolphinscheduler.plugin.task.api.task.ConditionsLogicTaskChann import org.apache.dolphinscheduler.plugin.task.api.task.DependentLogicTaskChannelFactory; import org.apache.dolphinscheduler.plugin.task.api.task.SwitchLogicTaskChannelFactory; import org.apache.dolphinscheduler.service.model.TaskNode; -import org.apache.dolphinscheduler.service.process.ProcessDag; +import org.apache.dolphinscheduler.service.process.WorkflowDag; import java.io.IOException; import java.util.ArrayList; @@ -82,10 +82,10 @@ public class DagHelperTest { taskNodes.add(subNode); taskNodes.add(subNextNode); - ProcessDag processDag = new ProcessDag(); - processDag.setEdges(taskNodeRelations); - processDag.setNodes(taskNodes); - DAG dag = DagHelper.buildDagGraph(processDag); + WorkflowDag workflowDag = new WorkflowDag(); + workflowDag.setEdges(taskNodeRelations); + workflowDag.setNodes(taskNodes); + DAG dag = DagHelper.buildDagGraph(workflowDag); boolean canSubmit = DagHelper.haveAllNodeAfterNode(parentNodeCode, dag); Assertions.assertTrue(canSubmit); @@ -180,14 +180,14 @@ public class DagHelperTest { List destTaskNodeList = DagHelper.generateFlowNodeListByStartNode(taskNodeList, startNodes, recoveryNodes, TaskDependType.TASK_POST); List taskNodeRelations = DagHelper.generateRelationListByFlowNodes(destTaskNodeList); - ProcessDag processDag = new ProcessDag(); - processDag.setEdges(taskNodeRelations); - processDag.setNodes(destTaskNodeList); + WorkflowDag workflowDag = new WorkflowDag(); + workflowDag.setEdges(taskNodeRelations); + workflowDag.setNodes(destTaskNodeList); // 1->2->3->5->7 // 4->3->6 // 1->2->8->5->7 - DAG dag = DagHelper.buildDagGraph(processDag); + DAG dag = DagHelper.buildDagGraph(workflowDag); TaskNode taskNode3 = dag.getNode(3L); Map completeTaskList = new HashMap<>(); Map skipNodeList = new HashMap<>(); @@ -303,14 +303,14 @@ public class DagHelperTest { List destTaskNodeList = DagHelper.generateFlowNodeListByStartNode(taskNodeList, startNodes, recoveryNodes, TaskDependType.TASK_POST); List taskNodeRelations = DagHelper.generateRelationListByFlowNodes(destTaskNodeList); - ProcessDag processDag = new ProcessDag(); - processDag.setEdges(taskNodeRelations); - processDag.setNodes(destTaskNodeList); + WorkflowDag workflowDag = new WorkflowDag(); + workflowDag.setEdges(taskNodeRelations); + workflowDag.setNodes(destTaskNodeList); // 1->2->3->5->7 // 4->3->6 // 1->2->8->5->7 - DAG dag = DagHelper.buildDagGraph(processDag); + DAG dag = DagHelper.buildDagGraph(workflowDag); Map completeTaskList = new HashMap<>(); Map skipNodeList = new HashMap<>(); @@ -524,11 +524,11 @@ public class DagHelperTest { List destTaskNodeList = DagHelper.generateFlowNodeListByStartNode(taskNodeList, startNodes, recoveryNodes, TaskDependType.TASK_POST); List taskNodeRelations = DagHelper.generateRelationListByFlowNodes(destTaskNodeList); - ProcessDag processDag = new ProcessDag(); - processDag.setEdges(taskNodeRelations); - processDag.setNodes(destTaskNodeList); + WorkflowDag workflowDag = new WorkflowDag(); + workflowDag.setEdges(taskNodeRelations); + workflowDag.setNodes(destTaskNodeList); - DAG dag = DagHelper.buildDagGraph(processDag); + DAG dag = DagHelper.buildDagGraph(workflowDag); Map skipTaskNodeList = new HashMap<>(); Map completeTaskList = new HashMap<>(); completeTaskList.put(0L, new TaskInstance()); @@ -652,10 +652,10 @@ public class DagHelperTest { List destTaskNodeList = DagHelper.generateFlowNodeListByStartNode(taskNodeList, startNodes, recoveryNodes, TaskDependType.TASK_POST); List taskNodeRelations = DagHelper.generateRelationListByFlowNodes(destTaskNodeList); - ProcessDag processDag = new ProcessDag(); - processDag.setEdges(taskNodeRelations); - processDag.setNodes(destTaskNodeList); - return DagHelper.buildDagGraph(processDag); + WorkflowDag workflowDag = new WorkflowDag(); + workflowDag.setEdges(taskNodeRelations); + workflowDag.setNodes(destTaskNodeList); + return DagHelper.buildDagGraph(workflowDag); } @Test @@ -674,8 +674,8 @@ public class DagHelperTest { ProcessData processData = JSONUtils.parseObject(shellJson, ProcessData.class); assert processData != null; List taskNodeList = processData.getTasks(); - ProcessDag processDag = DagHelper.getProcessDag(taskNodeList); - DAG dag = DagHelper.buildDagGraph(processDag); + WorkflowDag workflowDag = DagHelper.getWorkflowDag(taskNodeList); + DAG dag = DagHelper.buildDagGraph(workflowDag); Assertions.assertNotNull(dag); } diff --git a/dolphinscheduler-standalone-server/src/main/resources/application.yaml b/dolphinscheduler-standalone-server/src/main/resources/application.yaml index b13b8f04a7..3304d8ad5f 100644 --- a/dolphinscheduler-standalone-server/src/main/resources/application.yaml +++ b/dolphinscheduler-standalone-server/src/main/resources/application.yaml @@ -27,10 +27,10 @@ spring: init: schema-locations: classpath:sql/dolphinscheduler_h2.sql datasource: - driver-class-name: org.h2.Driver - url: jdbc:h2:mem:dolphinscheduler;MODE=MySQL;DB_CLOSE_DELAY=-1;DATABASE_TO_LOWER=true - username: sa - password: "" + driver-class-name: org.h2.Driver + url: jdbc:h2:mem:dolphinscheduler;MODE=MySQL;DB_CLOSE_DELAY=-1;DATABASE_TO_LOWER=true + username: sa + password: "" quartz: job-store-type: jdbc jdbc: diff --git a/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/api/TaskExecutionContext.java b/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/api/TaskExecutionContext.java index 9004bc2a91..4b14639652 100644 --- a/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/api/TaskExecutionContext.java +++ b/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/api/TaskExecutionContext.java @@ -45,131 +45,56 @@ public class TaskExecutionContext implements Serializable { private static final long serialVersionUID = -1L; - /** - * task id - */ private int taskInstanceId; - /** - * task name - */ private String taskName; - /** - * task first submit time. - */ private long firstSubmitTime; - /** - * task start time - */ private long startTime; - /** - * task type - */ private String taskType; private String workflowInstanceHost; - /** - * host - */ private String host; - /** - * task execute path - */ private String executePath; - /** - * log path - */ private String logPath; - /** - * applicationId path - */ private String appInfoPath; - /** - * task json - */ private String taskJson; - /** - * processId - */ private int processId; - /** - * processCode - */ - private Long processDefineCode; + private Long workflowDefinitionCode; - /** - * processVersion - */ - private int processDefineVersion; + private int workflowDefinitionVersion; - /** - * appIds - */ private String appIds; - /** - * process instance id - */ - private int processInstanceId; + private int workflowInstanceId; - /** - * process instance schedule time - */ private long scheduleTime; - /** - * process instance global parameters - */ private String globalParams; - /** - * execute user id - */ private int executorId; - /** - * command type if complement - */ private int cmdTypeIfComplement; - /** - * tenant code - */ private String tenantCode; - /** - * process define id - */ - private int processDefineId; + private int workflowDefinitionId; - /** - * project id - */ private int projectId; - /** - * project code - */ private long projectCode; - /** - * taskParams - */ private String taskParams; - /** - * environmentConfig - */ private String environmentConfig; /** @@ -178,81 +103,40 @@ public class TaskExecutionContext implements Serializable { */ private Map definedParams; - /** - * prepare params map - */ private Map prepareParamsMap; - /** - * task AppId - */ private String taskAppId; - /** - * task timeout strategy - */ private TaskTimeoutStrategy taskTimeoutStrategy; - /** - * task timeout - */ private int taskTimeout; - /** - * worker group - */ private String workerGroup; - /** - * current execution status - */ private TaskExecutionStatus currentExecutionStatus; private ResourceParametersHelper resourceParametersHelper; - /** - * endTime - */ private long endTime; - /** - * sql TaskExecutionContext - */ private SQLTaskExecutionContext sqlTaskExecutionContext; - /** - * k8s TaskExecutionContext - */ + private K8sTaskExecutionContext k8sTaskExecutionContext; private ResourceContext resourceContext; - /** - * taskInstance varPool - */ private String varPool; - /** - * dry run flag - */ private int dryRun; private Map paramsMap; private DataQualityTaskExecutionContext dataQualityTaskExecutionContext; - /** - * cpu quota - */ private Integer cpuQuota; - /** - * max memory - */ private Integer memoryMax; - /** - * test flag - */ private int testFlag; private boolean logBufferEnable; diff --git a/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/api/k8s/impl/K8sTaskExecutor.java b/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/api/k8s/impl/K8sTaskExecutor.java index 986c9dc8a7..e8ca6370f0 100644 --- a/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/api/k8s/impl/K8sTaskExecutor.java +++ b/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/api/k8s/impl/K8sTaskExecutor.java @@ -205,7 +205,7 @@ public class K8sTaskExecutor extends AbstractK8sTaskExecutor { @Override public void eventReceived(Action action, Job job) { try { - LogUtils.setWorkflowAndTaskInstanceIDMDC(taskRequest.getProcessInstanceId(), + LogUtils.setWorkflowAndTaskInstanceIDMDC(taskRequest.getWorkflowInstanceId(), taskRequest.getTaskInstanceId()); LogUtils.setTaskInstanceLogFullPathMDC(taskRequest.getLogPath()); log.info("event received : job:{} action:{}", job.getMetadata().getName(), action); @@ -230,7 +230,7 @@ public class K8sTaskExecutor extends AbstractK8sTaskExecutor { @Override public void onClose(WatcherException e) { - LogUtils.setWorkflowAndTaskInstanceIDMDC(taskRequest.getProcessInstanceId(), + LogUtils.setWorkflowAndTaskInstanceIDMDC(taskRequest.getWorkflowInstanceId(), taskRequest.getTaskInstanceId()); log.error("[K8sJobExecutor-{}] fail in k8s: {}", job.getMetadata().getName(), e.getMessage()); taskResponse.setExitStatusCode(EXIT_CODE_FAILURE); @@ -266,7 +266,7 @@ public class K8sTaskExecutor extends AbstractK8sTaskExecutor { String containerName = String.format("%s-%s", taskName, taskInstanceId); podLogOutputFuture = collectPodLogExecutorService.submit(() -> { TaskOutputParameterParser taskOutputParameterParser = new TaskOutputParameterParser(); - LogUtils.setWorkflowAndTaskInstanceIDMDC(taskRequest.getProcessInstanceId(), + LogUtils.setWorkflowAndTaskInstanceIDMDC(taskRequest.getWorkflowInstanceId(), taskRequest.getTaskInstanceId()); LogUtils.setTaskInstanceLogFullPathMDC(taskRequest.getLogPath()); try ( diff --git a/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/api/parameters/DynamicParameters.java b/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/api/parameters/DynamicParameters.java index 2a594655f9..57cd4f6b20 100644 --- a/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/api/parameters/DynamicParameters.java +++ b/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/api/parameters/DynamicParameters.java @@ -24,18 +24,17 @@ import java.util.List; import lombok.AllArgsConstructor; import lombok.Builder; import lombok.Data; +import lombok.EqualsAndHashCode; import lombok.NoArgsConstructor; +@EqualsAndHashCode(callSuper = true) @Data @Builder @NoArgsConstructor @AllArgsConstructor public class DynamicParameters extends AbstractParameters { - /** - * process definition id - */ - private long processDefinitionCode; + private long workflowDefinitionCode; private int maxNumOfSubWorkflowInstances; @@ -54,6 +53,6 @@ public class DynamicParameters extends AbstractParameters { } catch (Exception e) { return false; } - return this.processDefinitionCode != 0; + return this.workflowDefinitionCode != 0; } } diff --git a/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/api/parameters/SubProcessParameters.java b/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/api/parameters/SubWorkflowParameters.java similarity index 70% rename from dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/api/parameters/SubProcessParameters.java rename to dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/api/parameters/SubWorkflowParameters.java index 655ba7540c..48a771d1fb 100644 --- a/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/api/parameters/SubProcessParameters.java +++ b/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/api/parameters/SubWorkflowParameters.java @@ -17,21 +17,18 @@ package org.apache.dolphinscheduler.plugin.task.api.parameters; -public class SubProcessParameters extends AbstractParameters { +import lombok.Data; +import lombok.EqualsAndHashCode; - private long processDefinitionCode; +@EqualsAndHashCode(callSuper = true) +@Data +public class SubWorkflowParameters extends AbstractParameters { - public void setProcessDefinitionCode(long processDefinitionCode) { - this.processDefinitionCode = processDefinitionCode; - } - - public long getProcessDefinitionCode() { - return this.processDefinitionCode; - } + private long workflowDefinitionCode; @Override public boolean checkParameters() { - return this.processDefinitionCode > 0; + return this.workflowDefinitionCode > 0; } } diff --git a/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/api/task/SubWorkflowLogicTaskChannel.java b/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/api/task/SubWorkflowLogicTaskChannel.java index 354be54986..9474eb0452 100644 --- a/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/api/task/SubWorkflowLogicTaskChannel.java +++ b/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/api/task/SubWorkflowLogicTaskChannel.java @@ -19,12 +19,12 @@ package org.apache.dolphinscheduler.plugin.task.api.task; import org.apache.dolphinscheduler.common.utils.JSONUtils; import org.apache.dolphinscheduler.plugin.task.api.parameters.AbstractParameters; -import org.apache.dolphinscheduler.plugin.task.api.parameters.SubProcessParameters; +import org.apache.dolphinscheduler.plugin.task.api.parameters.SubWorkflowParameters; public class SubWorkflowLogicTaskChannel extends AbstractLogicTaskChannel { @Override public AbstractParameters parseParameters(String taskParams) { - return JSONUtils.parseObject(taskParams, SubProcessParameters.class); + return JSONUtils.parseObject(taskParams, SubWorkflowParameters.class); } } diff --git a/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/api/task/SubWorkflowLogicTaskChannelFactory.java b/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/api/task/SubWorkflowLogicTaskChannelFactory.java index 06bb6c5ebe..6cb533d2ce 100644 --- a/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/api/task/SubWorkflowLogicTaskChannelFactory.java +++ b/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/api/task/SubWorkflowLogicTaskChannelFactory.java @@ -25,7 +25,7 @@ import com.google.auto.service.AutoService; @AutoService(TaskChannelFactory.class) public class SubWorkflowLogicTaskChannelFactory implements TaskChannelFactory { - public static final String NAME = "SUB_PROCESS"; + public static final String NAME = "SUB_WORKFLOW"; @Override public String getName() { diff --git a/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/api/utils/LogUtils.java b/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/api/utils/LogUtils.java index 61d901d80c..011e96a2ce 100644 --- a/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/api/utils/LogUtils.java +++ b/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/api/utils/LogUtils.java @@ -90,9 +90,9 @@ public class LogUtils { public static String getTaskInstanceLogFullPath(TaskExecutionContext taskExecutionContext) { return getTaskInstanceLogFullPath( DateUtils.timeStampToDate(taskExecutionContext.getFirstSubmitTime()), - taskExecutionContext.getProcessDefineCode(), - taskExecutionContext.getProcessDefineVersion(), - taskExecutionContext.getProcessInstanceId(), + taskExecutionContext.getWorkflowDefinitionCode(), + taskExecutionContext.getWorkflowDefinitionVersion(), + taskExecutionContext.getWorkflowInstanceId(), taskExecutionContext.getTaskInstanceId()); } diff --git a/dolphinscheduler-task-plugin/dolphinscheduler-task-dataquality/src/main/java/org/apache/dolphinscheduler/plugin/task/dq/DataQualityTask.java b/dolphinscheduler-task-plugin/dolphinscheduler-task-dataquality/src/main/java/org/apache/dolphinscheduler/plugin/task/dq/DataQualityTask.java index ec8adc3eef..f5ff69dc92 100644 --- a/dolphinscheduler-task-plugin/dolphinscheduler-task-dataquality/src/main/java/org/apache/dolphinscheduler/plugin/task/dq/DataQualityTask.java +++ b/dolphinscheduler-task-plugin/dolphinscheduler-task-dataquality/src/main/java/org/apache/dolphinscheduler/plugin/task/dq/DataQualityTask.java @@ -138,8 +138,8 @@ public class DataQualityTask extends AbstractYarnTask { inputParameter.put(RULE_NAME, ArgsUtils.wrapperSingleQuotes(dataQualityTaskExecutionContext.getRuleName())); inputParameter.put(CREATE_TIME, ArgsUtils.wrapperSingleQuotes(now)); inputParameter.put(UPDATE_TIME, ArgsUtils.wrapperSingleQuotes(now)); - inputParameter.put(PROCESS_DEFINITION_ID, String.valueOf(dqTaskExecutionContext.getProcessDefineId())); - inputParameter.put(PROCESS_INSTANCE_ID, String.valueOf(dqTaskExecutionContext.getProcessInstanceId())); + inputParameter.put(PROCESS_DEFINITION_ID, String.valueOf(dqTaskExecutionContext.getWorkflowDefinitionId())); + inputParameter.put(PROCESS_INSTANCE_ID, String.valueOf(dqTaskExecutionContext.getWorkflowInstanceId())); inputParameter.put(TASK_INSTANCE_ID, String.valueOf(dqTaskExecutionContext.getTaskInstanceId())); if (StringUtils.isEmpty(inputParameter.get(DATA_TIME))) { @@ -154,8 +154,8 @@ public class DataQualityTask extends AbstractYarnTask { if (StringUtils.isNotEmpty(dataQualityTaskExecutionContext.getHdfsPath())) { inputParameter.put(ERROR_OUTPUT_PATH, dataQualityTaskExecutionContext.getHdfsPath() - + SLASH + dqTaskExecutionContext.getProcessDefineId() - + UNDERLINE + dqTaskExecutionContext.getProcessInstanceId() + + SLASH + dqTaskExecutionContext.getWorkflowDefinitionId() + + UNDERLINE + dqTaskExecutionContext.getWorkflowInstanceId() + UNDERLINE + dqTaskExecutionContext.getTaskName()); } else { inputParameter.put(ERROR_OUTPUT_PATH, ""); diff --git a/dolphinscheduler-tools/src/main/java/org/apache/dolphinscheduler/tools/demo/ProcessDefinitionDemo.java b/dolphinscheduler-tools/src/main/java/org/apache/dolphinscheduler/tools/demo/ProcessDefinitionDemo.java index 5cdc48c230..86e26f898d 100644 --- a/dolphinscheduler-tools/src/main/java/org/apache/dolphinscheduler/tools/demo/ProcessDefinitionDemo.java +++ b/dolphinscheduler-tools/src/main/java/org/apache/dolphinscheduler/tools/demo/ProcessDefinitionDemo.java @@ -763,7 +763,7 @@ public class ProcessDefinitionDemo { String taskCode = String.valueOf(taskCodes.get(0)).replaceAll("\\[|\\]", ""); WorkflowDefinitionLog processDefinitionLog = new WorkflowDefinitionLog(); - processDefinitionLog.setName("demo_sub_process"); + processDefinitionLog.setName("demo_sub_workflow"); processDefinitionLog.setDescription("Start the production line"); processDefinitionLog.setGlobalParams("[]"); processDefinitionLog.setLocations(null); @@ -798,7 +798,7 @@ public class ProcessDefinitionDemo { taskDefinitionLog.setTimeoutFlag(TimeoutFlag.CLOSE); taskDefinitionLog.setTimeoutNotifyStrategy(null); taskDefinitionLog.setWorkerGroup("default"); - taskDefinitionLog.setTaskType("SUB_PROCESS"); + taskDefinitionLog.setTaskType("SUB_WORKFLOW"); taskDefinitionLogs.add(taskDefinitionLog); } TaskDefinitionLog taskDefinitionLogFirst = taskDefinitionLogs.get(0); diff --git a/dolphinscheduler-tools/src/main/java/org/apache/dolphinscheduler/tools/lineage/MigrateLineageService.java b/dolphinscheduler-tools/src/main/java/org/apache/dolphinscheduler/tools/lineage/MigrateLineageService.java index 6338cdf879..5d2d0c5e3a 100644 --- a/dolphinscheduler-tools/src/main/java/org/apache/dolphinscheduler/tools/lineage/MigrateLineageService.java +++ b/dolphinscheduler-tools/src/main/java/org/apache/dolphinscheduler/tools/lineage/MigrateLineageService.java @@ -84,9 +84,9 @@ public class MigrateLineageService { .findFirst() .ifPresent(processTaskRelation -> { workflowTaskLineage - .setWorkflowDefinitionCode(processTaskRelation.getProcessDefinitionCode()); + .setWorkflowDefinitionCode(processTaskRelation.getWorkflowDefinitionCode()); workflowTaskLineage - .setWorkflowDefinitionVersion(processTaskRelation.getProcessDefinitionVersion()); + .setWorkflowDefinitionVersion(processTaskRelation.getWorkflowDefinitionVersion()); }); } } diff --git a/dolphinscheduler-ui/public/images/task-icons/sub_process.png b/dolphinscheduler-ui/public/images/task-icons/sub_workflow.png similarity index 100% rename from dolphinscheduler-ui/public/images/task-icons/sub_process.png rename to dolphinscheduler-ui/public/images/task-icons/sub_workflow.png diff --git a/dolphinscheduler-ui/public/images/task-icons/sub_process_hover.png b/dolphinscheduler-ui/public/images/task-icons/sub_workflow_hover.png similarity index 100% rename from dolphinscheduler-ui/public/images/task-icons/sub_process_hover.png rename to dolphinscheduler-ui/public/images/task-icons/sub_workflow_hover.png diff --git a/dolphinscheduler-ui/src/locales/en_US/home.ts b/dolphinscheduler-ui/src/locales/en_US/home.ts index df7638b91e..3daba6a795 100644 --- a/dolphinscheduler-ui/src/locales/en_US/home.ts +++ b/dolphinscheduler-ui/src/locales/en_US/home.ts @@ -17,8 +17,8 @@ export default { task_state_statistics: 'Task State Statistics', - process_state_statistics: 'Process State Statistics', - process_definition_statistics: 'Process Definition Statistics', + workflow_state_statistics: 'Workflow State Statistics', + workflow_definition_statistics: 'Workflow Definition Statistics', number: 'Number', state: 'State', submitted_success: 'SUBMITTED_SUCCESS', diff --git a/dolphinscheduler-ui/src/locales/en_US/project.ts b/dolphinscheduler-ui/src/locales/en_US/project.ts index c80beb0e57..61e8dac82b 100644 --- a/dolphinscheduler-ui/src/locales/en_US/project.ts +++ b/dolphinscheduler-ui/src/locales/en_US/project.ts @@ -132,7 +132,7 @@ export default { failure_send: 'Failure', all_send: 'All', warning_group_tip: 'Please select alarm group', - whether_complement_data: 'Whether it is a complement process?', + whether_complement_data: 'Whether it is a complement workflow?', schedule_date: 'Schedule date', select_date: 'Select Date', enter_date: 'Enter Date', @@ -165,7 +165,7 @@ export default { dry_run_flag: 'Dry-run Flag', executor: 'Executor', host: 'Host', - start_process: 'Start Process', + start_process: 'Start Workflow', execute_from_the_current_node: 'Execute from the current node', recover_tolerance_fault_process: 'Recover tolerance fault process', resume_the_suspension_process: 'Resume the suspension process', @@ -266,7 +266,6 @@ export default { task_type: 'Task Type', create_task: 'Create Task', workflow_instance: 'Workflow Instance', - workflow_name: 'Workflow Name', workflow_name_tips: 'Please select workflow name', workflow_state: 'Workflow State', version: 'Version', @@ -339,10 +338,9 @@ export default { rows: 'Rows', cols: 'Cols', copy_success: 'Copy Success', - workflow_name: 'Workflow Name', description: 'Description', timeout_alert: 'Timeout Alert', - process_execute_type: 'Process execute type', + workflow_execute_type: 'Workflow execute type', parallel: 'parallel', serial_wait: 'Serial wait', serial_discard: 'Serial discard', @@ -386,7 +384,6 @@ export default { task_type: 'Task Type', task_type_tips: 'Please select a task type (required)', workflow_name: 'Workflow Name', - workflow_name_tips: 'Please select a workflow (required)', child_node: 'Child Node', child_node_tips: 'Please select a child node (required)', run_flag: 'Run flag', @@ -673,8 +670,7 @@ export default { waiting_dependent_complete: 'Waiting Dependent complete', project_name: 'Project Name', project_name_tips: 'Please select a project(required)', - process_name: 'Workflow Name', - process_name_tips: 'Please select a workflow(required)', + workflow_name_tips: 'Please select a workflow(required)', cycle_time: 'Cycle Time', cycle_time_tips: 'Please select a cycle time(required)', date_tips: 'Please select a date(required)', diff --git a/dolphinscheduler-ui/src/locales/zh_CN/home.ts b/dolphinscheduler-ui/src/locales/zh_CN/home.ts index ab68a665a9..18999cc866 100644 --- a/dolphinscheduler-ui/src/locales/zh_CN/home.ts +++ b/dolphinscheduler-ui/src/locales/zh_CN/home.ts @@ -17,8 +17,8 @@ export default { task_state_statistics: '任务实例状态统计', - process_state_statistics: '工作流实例状态统计', - process_definition_statistics: '工作流定义统计', + workflow_state_statistics: '工作流实例状态统计', + workflow_definition_statistics: '工作流定义统计', number: '数量', state: '状态', submitted_success: '提交成功', diff --git a/dolphinscheduler-ui/src/locales/zh_CN/project.ts b/dolphinscheduler-ui/src/locales/zh_CN/project.ts index 434dbec3a3..225ab4eaca 100644 --- a/dolphinscheduler-ui/src/locales/zh_CN/project.ts +++ b/dolphinscheduler-ui/src/locales/zh_CN/project.ts @@ -264,7 +264,6 @@ export default { create_task: '创建任务', workflow_instance: '工作流实例', workflow_name: '工作流名称', - workflow_name_tips: '请选择工作流名称', workflow_state: '工作流状态', version: '版本', current_version: '当前版本', @@ -334,10 +333,9 @@ export default { rows: '行数', cols: '列数', copy_success: '复制成功', - workflow_name: '工作流名称', description: '描述', timeout_alert: '超时告警', - process_execute_type: '执行策略', + workflow_execute_type: '执行策略', parallel: '并行', serial_wait: '串行等待', serial_discard: '串行抛弃', @@ -380,8 +378,6 @@ export default { name_tips: '请输入名称(必填)', task_type: '任务类型', task_type_tips: '请选择任务类型(必选)', - workflow_name: '工作流名称', - workflow_name_tips: '请选择工作流(必选)', child_node: '子节点', child_node_tips: '请选择子节点(必选)', run_flag: '运行标志', @@ -659,8 +655,8 @@ export default { waiting_dependent_complete: '等待依赖完成', project_name: '项目名称', project_name_tips: '项目名称(必填)', - process_name: '工作流名称', - process_name_tips: '工作流名称(必填)', + workflow_name: '工作流名称', + workflow_name_tips: '工作流名称(必填)', cycle_time: '时间周期', cycle_time_tips: '时间周期(必填)', date_tips: '日期(必填)', diff --git a/dolphinscheduler-ui/src/service/modules/audit/types.ts b/dolphinscheduler-ui/src/service/modules/audit/types.ts index e25efada31..b59b146284 100644 --- a/dolphinscheduler-ui/src/service/modules/audit/types.ts +++ b/dolphinscheduler-ui/src/service/modules/audit/types.ts @@ -21,7 +21,7 @@ interface AuditListReq { endDate?: string moduleType?: string operationType?: string - processName?: string + workflowName?: string projectName?: string resourceType?: string startDate?: string diff --git a/dolphinscheduler-ui/src/service/modules/data-quality/types.ts b/dolphinscheduler-ui/src/service/modules/data-quality/types.ts index 1a8f71a4de..a8065560be 100644 --- a/dolphinscheduler-ui/src/service/modules/data-quality/types.ts +++ b/dolphinscheduler-ui/src/service/modules/data-quality/types.ts @@ -36,11 +36,11 @@ interface ResultListReq extends ListReq { interface ResultItem { id: number - processDefinitionId: number - processDefinitionName: string - processDefinitionCode: number - processInstanceId: number - processInstanceName: string + workflowDefinitionId: number + workflowDefinitionName: string + workflowDefinitionCode: number + workflowInstanceId: number + workflowInstanceName: string projectCode: number taskInstanceId: number taskName: string diff --git a/dolphinscheduler-ui/src/service/modules/executors/index.ts b/dolphinscheduler-ui/src/service/modules/executors/index.ts index ead8dcec0c..35fcb44755 100644 --- a/dolphinscheduler-ui/src/service/modules/executors/index.ts +++ b/dolphinscheduler-ui/src/service/modules/executors/index.ts @@ -16,7 +16,7 @@ */ import { axios } from '@/service/service' -import { ExecuteReq, ExecuteTaskReq, ProcessInstanceReq } from './types' +import { ExecuteReq, ExecuteTaskReq, WorkflowInstanceReq } from './types' export function execute(data: ExecuteReq, code: number): any { return axios({ @@ -34,12 +34,12 @@ export function executeTask(data: ExecuteTaskReq, code: number): any { }) } -export function startProcessInstance( - data: ProcessInstanceReq, +export function startWorkflowInstance( + data: WorkflowInstanceReq, code: number ): any { return axios({ - url: `/projects/${code}/executors/start-process-instance`, + url: `/projects/${code}/executors/start-workflow-instance`, method: 'post', data }) diff --git a/dolphinscheduler-ui/src/service/modules/executors/types.ts b/dolphinscheduler-ui/src/service/modules/executors/types.ts index b864441022..a443d30066 100644 --- a/dolphinscheduler-ui/src/service/modules/executors/types.ts +++ b/dolphinscheduler-ui/src/service/modules/executors/types.ts @@ -38,11 +38,11 @@ type Exec = interface ExecuteReq { executeType: Execute - processInstanceId: number + workflowInstanceId: number } interface ExecuteTaskReq { - processInstanceId: number + workflowInstanceId: number startNodeList: number taskDependType: string } @@ -51,13 +51,13 @@ interface ProjectCodeReq { projectCode: number } -interface ProcessDefinitionCodeReq { - processDefinitionCode: number +interface WorkflowDefinitionCodeReq { + workflowDefinitionCode: number } -interface ProcessInstanceReq extends ProcessDefinitionCodeReq { +interface WorkflowInstanceReq extends WorkflowDefinitionCodeReq { failureStrategy: 'END' | 'CONTINUE' - processInstancePriority: 'HIGHEST' | 'HIGH' | 'MEDIUM' | 'LOW' | 'LOWEST' + workflowInstancePriority: 'HIGHEST' | 'HIGH' | 'MEDIUM' | 'LOW' | 'LOWEST' scheduleTime: string warningGroupId: number warningType: 'NONE' | 'SUCCESS' | 'FAILURE' | 'ALL' @@ -78,6 +78,6 @@ export { ExecuteReq, ExecuteTaskReq, ProjectCodeReq, - ProcessDefinitionCodeReq, - ProcessInstanceReq + WorkflowDefinitionCodeReq, + WorkflowInstanceReq } diff --git a/dolphinscheduler-ui/src/service/modules/projects-analysis/index.ts b/dolphinscheduler-ui/src/service/modules/projects-analysis/index.ts index ed651c2139..cd8d9a15b2 100644 --- a/dolphinscheduler-ui/src/service/modules/projects-analysis/index.ts +++ b/dolphinscheduler-ui/src/service/modules/projects-analysis/index.ts @@ -33,9 +33,9 @@ export function countDefinitionByUser(params: CodeReq): any { }) } -export function countProcessInstanceState(params: StateReq): any { +export function countWorkflowInstanceState(params: StateReq): any { return axios({ - url: '/projects/analysis/process-state-count', + url: '/projects/analysis/workflow-state-count', method: 'get', params }) diff --git a/dolphinscheduler-ui/src/service/modules/projects-analysis/types.ts b/dolphinscheduler-ui/src/service/modules/projects-analysis/types.ts index aca1a190f3..5743052ebe 100644 --- a/dolphinscheduler-ui/src/service/modules/projects-analysis/types.ts +++ b/dolphinscheduler-ui/src/service/modules/projects-analysis/types.ts @@ -30,7 +30,7 @@ interface UserList { count: number } -interface ProcessDefinitionRes { +interface WorkflowDefinitionRes { count: number userList: UserList[] } @@ -75,7 +75,7 @@ interface ListReq { export { CodeReq, StateReq, - ProcessDefinitionRes, + WorkflowDefinitionRes, WorkflowInstanceCountVo, TaskInstanceCountVo, TaskQueueRes, diff --git a/dolphinscheduler-ui/src/service/modules/schedules/index.ts b/dolphinscheduler-ui/src/service/modules/schedules/index.ts index ae530658f4..17f5fb2141 100644 --- a/dolphinscheduler-ui/src/service/modules/schedules/index.ts +++ b/dolphinscheduler-ui/src/service/modules/schedules/index.ts @@ -23,11 +23,11 @@ import { ScheduleReq, WorkerGroupIdReq, CreateScheduleReq, - ProcessDefinitionCodeReq + WorkflowDefinitionCodeReq } from './types' export function queryScheduleListPaging( - params: ListReq & ProcessDefinitionCodeReq, + params: ListReq & WorkflowDefinitionCodeReq, projectCode: number ): any { return axios({ @@ -63,7 +63,7 @@ export function previewSchedule(data: ScheduleReq, projectCode: number): any { }) } -export function updateScheduleByProcessDefinitionCode( +export function updateScheduleByWorkflowDefinitionCode( data: CreateScheduleReq, projectCode: ProjectCodeReq, code: CodeReq diff --git a/dolphinscheduler-ui/src/service/modules/schedules/types.ts b/dolphinscheduler-ui/src/service/modules/schedules/types.ts index 8c7c3d346e..9fbe519892 100644 --- a/dolphinscheduler-ui/src/service/modules/schedules/types.ts +++ b/dolphinscheduler-ui/src/service/modules/schedules/types.ts @@ -33,8 +33,8 @@ interface ListReq { searchVal?: string } -interface ProcessDefinitionCodeReq { - processDefinitionCode?: number +interface WorkflowDefinitionCodeReq { + workflowDefinitionCode?: number } interface ScheduleReq { @@ -45,14 +45,14 @@ interface WorkerGroupIdReq { workerGroupId?: number } -interface ScheduleListReq extends ListReq, ProcessDefinitionCodeReq { - processDefinitionId: number +interface ScheduleListReq extends ListReq, WorkflowDefinitionCodeReq { + workflowDefinitionId: number } -interface CreateScheduleReq extends ScheduleReq, ProcessDefinitionCodeReq { +interface CreateScheduleReq extends ScheduleReq, WorkflowDefinitionCodeReq { environmentCode?: number failureStrategy?: 'END' | 'CONTINUE' - processInstancePriority?: 'HIGHEST' | 'HIGH' | 'MEDIUM' | 'LOW' | 'LOWEST' + workflowInstancePriority?: 'HIGHEST' | 'HIGH' | 'MEDIUM' | 'LOW' | 'LOWEST' warningGroupId?: number warningType?: 'NONE' | 'SUCCESS' | 'FAILURE' | 'ALL' workerGroup?: string @@ -79,7 +79,7 @@ export { IdReq, CodeReq, ListReq, - ProcessDefinitionCodeReq, + WorkflowDefinitionCodeReq, ScheduleReq, WorkerGroupIdReq, ScheduleListReq, diff --git a/dolphinscheduler-ui/src/service/modules/task-group/types.ts b/dolphinscheduler-ui/src/service/modules/task-group/types.ts index 1970083c47..c328fbebf9 100644 --- a/dolphinscheduler-ui/src/service/modules/task-group/types.ts +++ b/dolphinscheduler-ui/src/service/modules/task-group/types.ts @@ -63,9 +63,9 @@ interface TaskGroupQueue { taskName: string projectName: string projectCode: string - processInstanceName: string + workflowInstanceName: string groupId: number - processId: number + workflowInstanceId: number priority: number forceStart: number inQueue: number diff --git a/dolphinscheduler-ui/src/service/modules/task-instances/types.ts b/dolphinscheduler-ui/src/service/modules/task-instances/types.ts index 744894473e..b8a6c77c1e 100644 --- a/dolphinscheduler-ui/src/service/modules/task-instances/types.ts +++ b/dolphinscheduler-ui/src/service/modules/task-instances/types.ts @@ -29,9 +29,9 @@ interface TaskListReq { endDate?: string executorName?: string host?: string - processInstanceId?: number - processInstanceName?: string - processDefinitionName?: string + workflowInstanceId?: number + workflowInstanceName?: string + workflowDefinitionName?: string searchVal?: string startDate?: string stateType?: string @@ -59,7 +59,7 @@ interface TotalList { taskComplete: boolean firstRun: boolean environmentCode: number - processInstance?: any + workflowInstance?: any pid: number appLink: string taskCode: any @@ -69,8 +69,8 @@ interface TotalList { state: string workerGroup: string conditionsTask: boolean - processInstancePriority?: any - processInstanceId: number + workflowInstancePriority?: any + workflowInstanceId: number dependency: Dependency alertFlag: string dependentResult?: any @@ -78,19 +78,19 @@ interface TotalList { switchDependency: SwitchDependency maxRetryTimes: number executorName: string - subProcess: boolean + subWorkflow: boolean submitTime: string taskGroupId: number name: string taskDefinitionVersion: number - processInstanceName: string + workflowInstanceName: string taskGroupPriority: number taskDefine?: any dryRun: number flag: string taskParams: string duration: string - processDefine?: any + workflowDefinition?: any taskType: string taskInstancePriority: string logPath: string diff --git a/dolphinscheduler-ui/src/service/modules/process-definition/index.ts b/dolphinscheduler-ui/src/service/modules/workflow-definition/index.ts similarity index 58% rename from dolphinscheduler-ui/src/service/modules/process-definition/index.ts rename to dolphinscheduler-ui/src/service/modules/workflow-definition/index.ts index 8adc0441ac..7e6900617f 100644 --- a/dolphinscheduler-ui/src/service/modules/process-definition/index.ts +++ b/dolphinscheduler-ui/src/service/modules/workflow-definition/index.ts @@ -24,32 +24,32 @@ import { LimitReq, PageReq, ListReq, - ProcessDefinitionReq, + WorkflowDefinitionReq, TargetCodeReq } from './types' export function queryListPaging(params: PageReq & ListReq, code: number): any { return axios({ - url: `/projects/${code}/process-definition`, + url: `/projects/${code}/workflow-definition`, method: 'get', params }) } -export function createProcessDefinition( - data: ProcessDefinitionReq, +export function createWorkflowDefinition( + data: WorkflowDefinitionReq, projectCode: number ): any { return axios({ - url: `/projects/${projectCode}/process-definition`, + url: `/projects/${projectCode}/workflow-definition`, method: 'post', data }) } -export function queryProcessDefinitionList(projectCode: number): any { +export function queryWorkflowDefinitionList(projectCode: number): any { return axios({ - url: `/projects/${projectCode}/process-definition/query-process-definition-list`, + url: `/projects/${projectCode}/workflow-definition/query-workflow-definition-list`, method: 'get' }) } @@ -59,7 +59,7 @@ export function batchCopyByCodes( code: number ): any { return axios({ - url: `/projects/${code}/process-definition/batch-copy`, + url: `/projects/${code}/workflow-definition/batch-copy`, method: 'post', data }) @@ -67,7 +67,7 @@ export function batchCopyByCodes( export function batchDeleteByCodes(data: CodesReq, code: number): any { return axios({ - url: `/projects/${code}/process-definition/batch-delete`, + url: `/projects/${code}/workflow-definition/batch-delete`, method: 'post', data }) @@ -75,7 +75,7 @@ export function batchDeleteByCodes(data: CodesReq, code: number): any { export function batchExportByCodes(data: CodesReq, code: number): any { return axios({ - url: `/projects/${code}/process-definition/batch-export`, + url: `/projects/${code}/workflow-definition/batch-export`, method: 'post', responseType: 'blob', data @@ -87,7 +87,7 @@ export function batchMoveByCodes( code: CodeReq ): any { return axios({ - url: `/projects/${code}/process-definition/batch-move`, + url: `/projects/${code}/workflow-definition/batch-move`, method: 'post', data }) @@ -98,15 +98,15 @@ export function getTaskListByDefinitionCodes( code: number ): any { return axios({ - url: `/projects/${code}/process-definition/batch-query-tasks`, + url: `/projects/${code}/workflow-definition/batch-query-tasks`, method: 'get', params }) } -export function importProcessDefinition(data: FormData, code: number): any { +export function importWorkflowDefinition(data: FormData, code: number): any { return axios({ - url: `/projects/${code}/process-definition/import`, + url: `/projects/${code}/workflow-definition/import`, method: 'post', data }) @@ -114,17 +114,17 @@ export function importProcessDefinition(data: FormData, code: number): any { export function queryList(code: CodeReq): any { return axios({ - url: `/projects/${code}/process-definition/list`, + url: `/projects/${code}/workflow-definition/list`, method: 'get' }) } -export function queryProcessDefinitionByName( +export function queryWorkflowDefinitionByName( params: NameReq, code: CodeReq ): any { return axios({ - url: `/projects/${code}/process-definition/query-by-name`, + url: `/projects/${code}/workflow-definition/query-by-name`, method: 'get', params }) @@ -132,7 +132,7 @@ export function queryProcessDefinitionByName( export function querySimpleList(code: number): any { return axios({ - url: `/projects/${code}/process-definition/simple-list`, + url: `/projects/${code}/workflow-definition/simple-list`, method: 'get' }) } @@ -142,37 +142,40 @@ export function verifyName( projectCode: number ): any { return axios({ - url: `/projects/${projectCode}/process-definition/verify-name`, + url: `/projects/${projectCode}/workflow-definition/verify-name`, method: 'get', params }) } -export function queryProcessDefinitionByCode( +export function queryWorkflowDefinitionByCode( code: number, projectCode: number ): any { return axios({ - url: `/projects/${projectCode}/process-definition/${code}`, + url: `/projects/${projectCode}/workflow-definition/${code}`, method: 'get' }) } -export function updateProcessDefinition( - data: ProcessDefinitionReq & ReleaseStateReq, +export function updateWorkflowDefinition( + data: WorkflowDefinitionReq & ReleaseStateReq, code: number, projectCode: number ): any { return axios({ - url: `/projects/${projectCode}/process-definition/${code}`, + url: `/projects/${projectCode}/workflow-definition/${code}`, method: 'put', data }) } -export function deleteByCode(code: number, processCode: number): any { +export function deleteByCode( + code: number, + workflowDefinitionCode: number +): any { return axios({ - url: `/projects/${code}/process-definition/${processCode}`, + url: `/projects/${code}/workflow-definition/${workflowDefinitionCode}`, method: 'delete' }) } @@ -180,10 +183,10 @@ export function deleteByCode(code: number, processCode: number): any { export function release( data: NameReq & ReleaseStateReq, code: number, - processCode: number + workflowDefinitionCode: number ): any { return axios({ - url: `/projects/${code}/process-definition/${processCode}/release`, + url: `/projects/${code}/workflow-definition/${workflowDefinitionCode}/release`, method: 'post', data }) @@ -191,13 +194,13 @@ export function release( export function getTasksByDefinitionList( projectCode: number, - processCode: number + workflowDefinitionCode: number ): any { return axios({ - url: `/projects/${projectCode}/process-definition/query-task-definition-list`, + url: `/projects/${projectCode}/workflow-definition/query-task-definition-list`, method: 'get', params: { - processDefinitionCode: processCode + workflowDefinitionCode: workflowDefinitionCode } }) } @@ -205,10 +208,10 @@ export function getTasksByDefinitionList( export function queryVersions( params: PageReq, code: number, - processCode: number + workflowDefinitionCode: number ): any { return axios({ - url: `/projects/${code}/process-definition/${processCode}/versions`, + url: `/projects/${code}/workflow-definition/${workflowDefinitionCode}/versions`, method: 'get', params }) @@ -216,44 +219,44 @@ export function queryVersions( export function switchVersion( code: number, - processCode: number, + workflowDefinitionCode: number, version: number ): any { return axios({ - url: `/projects/${code}/process-definition/${processCode}/versions/${version}`, + url: `/projects/${code}/workflow-definition/${workflowDefinitionCode}/versions/${version}`, method: 'get' }) } export function deleteVersion( code: number, - processCode: number, + workflowDefinitionCode: number, version: number ): any { return axios({ - url: `/projects/${code}/process-definition/${processCode}/versions/${version}`, + url: `/projects/${code}/workflow-definition/${workflowDefinitionCode}/versions/${version}`, method: 'delete' }) } export function viewTree( code: number, - processCode: number, + workflowDefinitionCode: number, params: LimitReq ): any { return axios({ - url: `/projects/${code}/process-definition/${processCode}/view-tree`, + url: `/projects/${code}/workflow-definition/${workflowDefinitionCode}/view-tree`, method: 'get', params }) } -export function viewProcessDefinitionVariables( +export function viewWorkflowDefinitionVariables( code: number, - processCode: number + workflowDefinitionCode: number ): any { return axios({ - url: `/projects/${code}/process-definition/${processCode}/view-variables`, + url: `/projects/${code}/workflow-definition/${workflowDefinitionCode}/view-variables`, method: 'get' }) } diff --git a/dolphinscheduler-ui/src/service/modules/process-definition/types.ts b/dolphinscheduler-ui/src/service/modules/workflow-definition/types.ts similarity index 96% rename from dolphinscheduler-ui/src/service/modules/process-definition/types.ts rename to dolphinscheduler-ui/src/service/modules/workflow-definition/types.ts index 7bd71db907..e67065a3a7 100644 --- a/dolphinscheduler-ui/src/service/modules/process-definition/types.ts +++ b/dolphinscheduler-ui/src/service/modules/workflow-definition/types.ts @@ -53,7 +53,7 @@ interface ListReq extends PageReq { userId?: number } -interface ProcessDefinitionReq { +interface WorkflowDefinitionReq { name: string locations: string taskDefinitionJson: string @@ -85,7 +85,7 @@ export { LimitReq, PageReq, ListReq, - ProcessDefinitionReq, + WorkflowDefinitionReq, TargetCodeReq, SimpleListRes } diff --git a/dolphinscheduler-ui/src/service/modules/process-instances/index.ts b/dolphinscheduler-ui/src/service/modules/workflow-instances/index.ts similarity index 61% rename from dolphinscheduler-ui/src/service/modules/process-instances/index.ts rename to dolphinscheduler-ui/src/service/modules/workflow-instances/index.ts index be8c019600..b78b5f4811 100644 --- a/dolphinscheduler-ui/src/service/modules/process-instances/index.ts +++ b/dolphinscheduler-ui/src/service/modules/workflow-instances/index.ts @@ -18,31 +18,31 @@ import { axios } from '@/service/service' import { CodeReq, - ProcessInstanceListReq, + WorkflowInstanceListReq, BatchDeleteReq, SubIdReq, TaskReq, LongestReq, - ProcessInstanceReq + WorkflowInstanceReq } from './types' -export function queryProcessInstanceListPaging( - params: ProcessInstanceListReq, +export function queryWorkflowInstanceListPaging( + params: WorkflowInstanceListReq, code: number ): any { return axios({ - url: `/projects/${code}/process-instances`, + url: `/projects/${code}/workflow-instances`, method: 'get', params }) } -export function batchDeleteProcessInstanceByIds( +export function batchDeleteWorkflowInstanceByIds( data: BatchDeleteReq, code: number ): any { return axios({ - url: `/projects/${code}/process-instances/batch-delete`, + url: `/projects/${code}/workflow-instances/batch-delete`, method: 'post', data }) @@ -53,80 +53,80 @@ export function queryParentInstanceBySubId( code: CodeReq ): any { return axios({ - url: `/projects/${code}/process-instances/query-parent-by-sub`, + url: `/projects/${code}/workflow-instances/query-parent-by-sub`, method: 'get', params }) } -export function querySubProcessInstanceByTaskCode( +export function querySubWorkflowInstanceByTaskCode( params: TaskReq, code: CodeReq ): any { return axios({ - url: `/projects/${code.projectCode}/process-instances/query-sub-by-parent`, + url: `/projects/${code.projectCode}/workflow-instances/query-sub-by-parent`, method: 'get', params }) } -export function queryTopNLongestRunningProcessInstance( +export function queryTopNLongestRunningWorkflowInstance( params: LongestReq, code: CodeReq ): any { return axios({ - url: `/projects/${code}/process-instances/top-n`, + url: `/projects/${code}/workflow-instances/top-n`, method: 'get', params }) } -export function queryProcessInstanceById( +export function queryWorkflowInstanceById( instanceId: number, projectCode: number ): any { return axios({ - url: `/projects/${projectCode}/process-instances/${instanceId}`, + url: `/projects/${projectCode}/workflow-instances/${instanceId}`, method: 'get' }) } -export function updateProcessInstance( - data: ProcessInstanceReq, +export function updateWorkflowInstance( + data: WorkflowInstanceReq, id: number, code: number ): any { return axios({ - url: `/projects/${code}/process-instances/${id}`, + url: `/projects/${code}/workflow-instances/${id}`, method: 'put', data }) } -export function deleteProcessInstanceById(id: number, code: number): any { +export function deleteWorkflowInstanceById(id: number, code: number): any { return axios({ - url: `/projects/${code}/process-instances/${id}`, + url: `/projects/${code}/workflow-instances/${id}`, method: 'delete' }) } -export function queryTaskListByProcessId(id: number, code: number): any { +export function queryTaskListByWorkflowId(id: number, code: number): any { return axios({ - url: `/projects/${code}/process-instances/${id}/tasks`, + url: `/projects/${code}/workflow-instances/${id}/tasks`, method: 'get' }) } export function viewGanttTree(id: number, code: number): any { return axios({ - url: `/projects/${code}/process-instances/${id}/view-gantt`, + url: `/projects/${code}/workflow-instances/${id}/view-gantt`, method: 'get' }) } export function viewVariables(id: number, code: number): any { return axios({ - url: `/projects/${code}/process-instances/${id}/view-variables`, + url: `/projects/${code}/workflow-instances/${id}/view-variables`, method: 'get' }) } diff --git a/dolphinscheduler-ui/src/service/modules/process-instances/types.ts b/dolphinscheduler-ui/src/service/modules/workflow-instances/types.ts similarity index 89% rename from dolphinscheduler-ui/src/service/modules/process-instances/types.ts rename to dolphinscheduler-ui/src/service/modules/workflow-instances/types.ts index da932e5614..61f34e7990 100644 --- a/dolphinscheduler-ui/src/service/modules/process-instances/types.ts +++ b/dolphinscheduler-ui/src/service/modules/workflow-instances/types.ts @@ -20,21 +20,21 @@ interface CodeReq { projectCode: number } -interface ProcessInstanceListReq { +interface WorkflowInstanceListReq { pageNo: number pageSize: number endDate?: string executorName?: string host?: string - processDefineCode?: number - processDefiniteCode?: string + workflowDefinitionCode?: number + workflowDefinitionVersion?: number searchVal?: string startDate?: string stateType?: string } interface BatchDeleteReq { - processInstanceIds: string + workflowInstanceIds: string projectName?: string alertGroup?: string createTime?: string @@ -71,7 +71,7 @@ interface IdReq { id: number } -interface ProcessInstanceReq { +interface WorkflowInstanceReq { syncDefine: boolean flag?: string globalParams?: string @@ -88,7 +88,7 @@ interface IWorkflowInstance { state: IWorkflowExecutionState commandType: string scheduleTime?: string - processDefinitionCode?: number + workflowDefinitionCode?: number startTime: string endTime: string duration?: string @@ -105,12 +105,12 @@ interface IWorkflowInstance { export { CodeReq, - ProcessInstanceListReq, + WorkflowInstanceListReq, BatchDeleteReq, SubIdReq, TaskReq, LongestReq, IdReq, - ProcessInstanceReq, + WorkflowInstanceReq, IWorkflowInstance } diff --git a/dolphinscheduler-ui/src/service/modules/process-task-relation/index.ts b/dolphinscheduler-ui/src/service/modules/workflow-task-relation/index.ts similarity index 75% rename from dolphinscheduler-ui/src/service/modules/process-task-relation/index.ts rename to dolphinscheduler-ui/src/service/modules/workflow-task-relation/index.ts index dcc4738572..0559b2da99 100644 --- a/dolphinscheduler-ui/src/service/modules/process-task-relation/index.ts +++ b/dolphinscheduler-ui/src/service/modules/workflow-task-relation/index.ts @@ -17,7 +17,7 @@ import { ProjectCodeReq, - ProcessDefinitionCodeReq, + WorkflowDefinitionCodeReq, PreTaskCodesReq, PostTaskCodesReq, TaskCodeReq, @@ -27,7 +27,7 @@ import { axios } from '@/service/service' export function save(data: SaveReq, projectCode: ProjectCodeReq): any { return axios({ - url: `/projects/${projectCode}/process-task-relation`, + url: `/projects/${projectCode}/workflow-task-relation`, method: 'post', data }) @@ -35,18 +35,18 @@ export function save(data: SaveReq, projectCode: ProjectCodeReq): any { export function deleteEdge(data: SaveReq): any { return axios({ - url: `/projects/${data.projectCode}/process-task-relation/${data.processDefinitionCode}/${data.preTaskCode}/${data.postTaskCode}`, + url: `/projects/${data.projectCode}/workflow-task-relation/${data.workflowDefinitionCode}/${data.preTaskCode}/${data.postTaskCode}`, method: 'delete' }) } export function deleteRelation( - data: ProcessDefinitionCodeReq, + data: WorkflowDefinitionCodeReq, projectCode: ProjectCodeReq, taskCode: TaskCodeReq ): any { return axios({ - url: `/projects/${projectCode}/process-task-relation/${taskCode}`, + url: `/projects/${projectCode}/workflow-task-relation/${taskCode}`, method: 'delete', data }) @@ -57,7 +57,7 @@ export function queryDownstreamRelation( taskCode: TaskCodeReq ): any { return axios({ - url: `/projects/${projectCode}/process-task-relation/${taskCode}/downstream`, + url: `/projects/${projectCode}/workflow-task-relation/${taskCode}/downstream`, method: 'get' }) } @@ -68,7 +68,7 @@ export function deleteDownstreamRelation( taskCode: TaskCodeReq ): any { return axios({ - url: `/projects/${projectCode}/process-task-relation/${taskCode}/downstream`, + url: `/projects/${projectCode}/workflow-task-relation/${taskCode}/downstream`, method: 'delete', data }) @@ -79,7 +79,7 @@ export function queryUpstreamRelation( taskCode: TaskCodeReq ): any { return axios({ - url: `/projects/${projectCode}/process-task-relation/${taskCode}/upstream`, + url: `/projects/${projectCode}/workflow-task-relation/${taskCode}/upstream`, method: 'get' }) } @@ -90,7 +90,7 @@ export function deleteUpstreamRelation( taskCode: TaskCodeReq ): any { return axios({ - url: `/projects/${projectCode}/process-task-relation/${taskCode}/upstream`, + url: `/projects/${projectCode}/workflow-task-relation/${taskCode}/upstream`, method: 'delete', data }) diff --git a/dolphinscheduler-ui/src/service/modules/process-task-relation/types.ts b/dolphinscheduler-ui/src/service/modules/workflow-task-relation/types.ts similarity index 87% rename from dolphinscheduler-ui/src/service/modules/process-task-relation/types.ts rename to dolphinscheduler-ui/src/service/modules/workflow-task-relation/types.ts index d33ce903b8..582248cbf7 100644 --- a/dolphinscheduler-ui/src/service/modules/process-task-relation/types.ts +++ b/dolphinscheduler-ui/src/service/modules/workflow-task-relation/types.ts @@ -19,8 +19,8 @@ interface ProjectCodeReq { projectCode: string } -interface ProcessDefinitionCodeReq { - processDefinitionCode: string +interface WorkflowDefinitionCodeReq { + workflowDefinitionCode: string } interface PreTaskCodesReq { @@ -35,14 +35,14 @@ interface TaskCodeReq { taskCode: string } -interface SaveReq extends ProcessDefinitionCodeReq, ProjectCodeReq { +interface SaveReq extends WorkflowDefinitionCodeReq, ProjectCodeReq { postTaskCode: string preTaskCode: string } export { ProjectCodeReq, - ProcessDefinitionCodeReq, + WorkflowDefinitionCodeReq, PreTaskCodesReq, PostTaskCodesReq, TaskCodeReq, diff --git a/dolphinscheduler-ui/src/store/project/task-node.ts b/dolphinscheduler-ui/src/store/project/task-node.ts index 23a077b672..971c2b80e4 100644 --- a/dolphinscheduler-ui/src/store/project/task-node.ts +++ b/dolphinscheduler-ui/src/store/project/task-node.ts @@ -65,7 +65,7 @@ export const useTaskNodeStore = defineStore({ actions: { updateDefinition(definition?: EditWorkflowDefinition, code?: number) { if (!definition) return - const { processTaskRelationList = [], taskDefinitionList = [] } = + const { workflowTaskRelationList = [], taskDefinitionList = [] } = definition const preTaskOptions: { value: number; label: string }[] = [] @@ -76,7 +76,7 @@ export const useTaskNodeStore = defineStore({ if (task.code === code) return if ( task.taskType === 'CONDITIONS' && - processTaskRelationList.filter( + workflowTaskRelationList.filter( (relation: { preTaskCode: number }) => relation.preTaskCode === task.code ).length >= 2 @@ -94,7 +94,7 @@ export const useTaskNodeStore = defineStore({ if (!code) return const preTasks: number[] = [] const postTaskOptions: { value: number; label: string }[] = [] - processTaskRelationList.forEach( + workflowTaskRelationList.forEach( (relation: { preTaskCode: number; postTaskCode: number }) => { if (relation.preTaskCode === code) { postTaskOptions.push({ diff --git a/dolphinscheduler-ui/src/store/project/task-type.ts b/dolphinscheduler-ui/src/store/project/task-type.ts index 9eb1d8cd41..e1c56da4d1 100644 --- a/dolphinscheduler-ui/src/store/project/task-type.ts +++ b/dolphinscheduler-ui/src/store/project/task-type.ts @@ -29,8 +29,8 @@ export const TASK_TYPES_MAP = { SHELL: { alias: 'SHELL' }, - SUB_PROCESS: { - alias: 'SUB_PROCESS' + SUB_WORKFLOW: { + alias: 'SUB_WORKFLOW' }, DYNAMIC: { alias: 'DYNAMIC' diff --git a/dolphinscheduler-ui/src/store/project/types.ts b/dolphinscheduler-ui/src/store/project/types.ts index bc8dbd0b2b..b3a077e6dc 100644 --- a/dolphinscheduler-ui/src/store/project/types.ts +++ b/dolphinscheduler-ui/src/store/project/types.ts @@ -22,7 +22,7 @@ type TaskExecuteType = 'STREAM' | 'BATCH' type TaskType = | 'SHELL' - | 'SUB_PROCESS' + | 'SUB_WORKFLOW' | 'DYNAMIC' | 'PROCEDURE' | 'SQL' diff --git a/dolphinscheduler-ui/src/views/data-quality/task-result/use-table.ts b/dolphinscheduler-ui/src/views/data-quality/task-result/use-table.ts index 333171b0b9..1882d5142a 100644 --- a/dolphinscheduler-ui/src/views/data-quality/task-result/use-table.ts +++ b/dolphinscheduler-ui/src/views/data-quality/task-result/use-table.ts @@ -67,7 +67,7 @@ export function useTable() { }, { title: t('data_quality.task_result.workflow_instance'), - key: 'processInstanceName', + key: 'workflowInstanceName', ...COLUMN_WIDTH_CONFIG['name'], render: (row: ResultItem) => h( @@ -78,9 +78,9 @@ export function useTable() { name: 'workflow-instance-detail', params: { projectCode: row.projectCode, - id: row.processInstanceId + id: row.workflowInstanceId }, - query: { code: row.processDefinitionCode } + query: { code: row.workflowDefinitionCode } }) }, { @@ -88,7 +88,7 @@ export function useTable() { h( NEllipsis, COLUMN_WIDTH_CONFIG['linkEllipsis'], - () => row.processInstanceName + () => row.workflowInstanceName ) } ) diff --git a/dolphinscheduler-ui/src/views/home/components/definition-card.tsx b/dolphinscheduler-ui/src/views/home/components/definition-card.tsx index fb73f5be81..ef555e95eb 100644 --- a/dolphinscheduler-ui/src/views/home/components/definition-card.tsx +++ b/dolphinscheduler-ui/src/views/home/components/definition-card.tsx @@ -16,7 +16,7 @@ */ import { defineComponent, PropType } from 'vue' -import { useProcessDefinition } from '../use-process-definition' +import { useWorkflowDefinition } from '../use-workflow-definition' import BarChart from '@/components/chart/modules/Bar' import Card from '@/components/card' @@ -30,23 +30,23 @@ const DefinitionCard = defineComponent({ name: 'DefinitionCard', props, setup() { - const { getProcessDefinition } = useProcessDefinition() - const processDefinition = getProcessDefinition() + const { getWorkflowDefinition } = useWorkflowDefinition() + const workflowDefinition = getWorkflowDefinition() - return { processDefinition } + return { workflowDefinition: workflowDefinition } }, render() { - const { title, processDefinition } = this + const { title, workflowDefinition } = this return ( {{ default: () => - processDefinition.xAxisData.length > 0 && - processDefinition.seriesData.length > 0 && ( + workflowDefinition.xAxisData.length > 0 && + workflowDefinition.seriesData.length > 0 && ( ) }} diff --git a/dolphinscheduler-ui/src/views/home/index.tsx b/dolphinscheduler-ui/src/views/home/index.tsx index 93a27f86a2..6752ba3870 100644 --- a/dolphinscheduler-ui/src/views/home/index.tsx +++ b/dolphinscheduler-ui/src/views/home/index.tsx @@ -20,7 +20,7 @@ import { NGrid, NGi } from 'naive-ui' import { startOfToday, getTime } from 'date-fns' import { useI18n } from 'vue-i18n' import { useTaskState } from './use-task-state' -import { useProcessState } from './use-process-state' +import { useWorkflowState } from './use-workflow-state' import StateCard from './components/state-card' import DefinitionCard from './components/definition-card' @@ -30,22 +30,23 @@ export default defineComponent({ const { t, locale } = useI18n() const dateRef = ref([getTime(startOfToday()), Date.now()]) const taskStateRef = ref() - const processStateRef = ref() + const workflowStateRef = ref() const { getTaskState, taskVariables } = useTaskState() - const { getProcessState, processVariables } = useProcessState() + const { getWorkflowState, workflowVariables: workflowVariables } = + useWorkflowState() const initData = () => { taskStateRef.value = getTaskState(dateRef.value) || taskStateRef.value - processStateRef.value = - getProcessState(dateRef.value) || processStateRef.value + workflowStateRef.value = + getWorkflowState(dateRef.value) || workflowStateRef.value } const handleTaskDate = (val: any) => { taskStateRef.value = getTaskState(val) } - const handleProcessDate = (val: any) => { - processStateRef.value = getProcessState(val) + const handleWorkflowDate = (val: any) => { + workflowStateRef.value = getWorkflowState(val) } onMounted(() => { @@ -61,11 +62,11 @@ export default defineComponent({ t, dateRef, handleTaskDate, - handleProcessDate, + handleWorkflowDate: handleWorkflowDate, taskStateRef, - processStateRef, + workflowStateRef: workflowStateRef, ...toRefs(taskVariables), - ...toRefs(processVariables) + ...toRefs(workflowVariables) } }, render() { @@ -73,9 +74,9 @@ export default defineComponent({ t, dateRef, handleTaskDate, - handleProcessDate, + handleWorkflowDate, taskLoadingRef, - processLoadingRef + workflowLoadingRef } = this return ( @@ -93,18 +94,18 @@ export default defineComponent({ - + diff --git a/dolphinscheduler-ui/src/views/home/use-process-definition.ts b/dolphinscheduler-ui/src/views/home/use-workflow-definition.ts similarity index 83% rename from dolphinscheduler-ui/src/views/home/use-process-definition.ts rename to dolphinscheduler-ui/src/views/home/use-workflow-definition.ts index c7ba6edf62..feb409fd18 100644 --- a/dolphinscheduler-ui/src/views/home/use-process-definition.ts +++ b/dolphinscheduler-ui/src/views/home/use-workflow-definition.ts @@ -17,14 +17,14 @@ import { useAsyncState } from '@vueuse/core' import { countDefinitionByUser } from '@/service/modules/projects-analysis' -import type { ProcessDefinitionRes } from '@/service/modules/projects-analysis/types' +import type { WorkflowDefinitionRes } from '@/service/modules/projects-analysis/types' import type { DefinitionChartData } from './types' -export function useProcessDefinition() { - const getProcessDefinition = () => { +export function useWorkflowDefinition() { + const getWorkflowDefinition = () => { const { state } = useAsyncState( countDefinitionByUser({}).then( - (res: ProcessDefinitionRes): DefinitionChartData => { + (res: WorkflowDefinitionRes): DefinitionChartData => { const xAxisData = res.userList.map((item) => item.userName) const seriesData = res.userList.map((item) => item.count) @@ -36,5 +36,5 @@ export function useProcessDefinition() { return state } - return { getProcessDefinition } + return { getWorkflowDefinition } } diff --git a/dolphinscheduler-ui/src/views/home/use-process-state.ts b/dolphinscheduler-ui/src/views/home/use-workflow-state.ts similarity index 79% rename from dolphinscheduler-ui/src/views/home/use-process-state.ts rename to dolphinscheduler-ui/src/views/home/use-workflow-state.ts index 95e9df081c..edbcf159a5 100644 --- a/dolphinscheduler-ui/src/views/home/use-process-state.ts +++ b/dolphinscheduler-ui/src/views/home/use-workflow-state.ts @@ -16,7 +16,7 @@ */ import { useAsyncState } from '@vueuse/core' -import { countProcessInstanceState } from '@/service/modules/projects-analysis' +import { countWorkflowInstanceState } from '@/service/modules/projects-analysis' import { format } from 'date-fns' import { toLower } from 'lodash' import { useI18n } from 'vue-i18n' @@ -24,17 +24,17 @@ import type { WorkflowInstanceCountVo } from '@/service/modules/projects-analysi import type { StateData } from './types' import { reactive, ref } from 'vue' -export function useProcessState() { +export function useWorkflowState() { const { t } = useI18n() - const processVariables = reactive({ - processLoadingRef: ref(false) + const workflowVariables = reactive({ + workflowLoadingRef: ref(false) }) - const getProcessState = (date: Array) => { - if (processVariables.processLoadingRef) return - processVariables.processLoadingRef = true + const getWorkflowState = (date: Array) => { + if (workflowVariables.workflowLoadingRef) return + workflowVariables.workflowLoadingRef = true const { state } = useAsyncState( - countProcessInstanceState({ + countWorkflowInstanceState({ startDate: !date ? '' : format(date[0], 'yyyy-MM-dd HH:mm:ss'), endDate: !date ? '' : format(date[1], 'yyyy-MM-dd HH:mm:ss') }).then((res: WorkflowInstanceCountVo): StateData => { @@ -52,7 +52,7 @@ export function useProcessState() { } }) - processVariables.processLoadingRef = false + workflowVariables.workflowLoadingRef = false return { table, chart } }), { table: [], chart: [] } @@ -61,5 +61,5 @@ export function useProcessState() { return state } - return { getProcessState, processVariables } + return { getWorkflowState, workflowVariables } } diff --git a/dolphinscheduler-ui/src/views/monitor/statistics/statistics/list-command-table.tsx b/dolphinscheduler-ui/src/views/monitor/statistics/statistics/list-command-table.tsx index 11c3e58e47..11fb9c1099 100644 --- a/dolphinscheduler-ui/src/views/monitor/statistics/statistics/list-command-table.tsx +++ b/dolphinscheduler-ui/src/views/monitor/statistics/statistics/list-command-table.tsx @@ -75,13 +75,13 @@ const ListCommandTable = defineComponent({ width: 300, render: (row: any) => { return h('div', [ - `Definition Code:${row.processDefinitionCode} `, + `Definition Code:${row.workflowDefinitionCode} `, h('br'), - `Definition Version:${row.processDefinitionVersion} `, + `Definition Version:${row.workflowDefinitionVersion} `, h('br'), - `Instance Id:${row.processInstanceId} `, + `Instance Id:${row.workflowInstanceId} `, h('br'), - `Instance Priority:${row.processInstancePriority} ` + `Instance Priority:${row.workflowInstancePriority} ` ]) } }, diff --git a/dolphinscheduler-ui/src/views/monitor/statistics/statistics/list-error-command-table.tsx b/dolphinscheduler-ui/src/views/monitor/statistics/statistics/list-error-command-table.tsx index 04a2297299..ea1da974c5 100644 --- a/dolphinscheduler-ui/src/views/monitor/statistics/statistics/list-error-command-table.tsx +++ b/dolphinscheduler-ui/src/views/monitor/statistics/statistics/list-error-command-table.tsx @@ -75,13 +75,13 @@ const ListErrorCommandTable = defineComponent({ width: 300, render: (row: any) => { return h('div', [ - `Definition Code:${row.processDefinitionCode} `, + `Definition Code:${row.workflowDefinitionCode} `, h('br'), - `Definition Version:${row.processDefinitionVersion} `, + `Definition Version:${row.workflowDefinitionVersion} `, h('br'), - `Instance Id:${row.processInstanceId} `, + `Instance Id:${row.workflowInstanceId} `, h('br'), - `Instance Priority:${row.processInstancePriority} ` + `Instance Priority:${row.workflowInstancePriority} ` ]) } }, diff --git a/dolphinscheduler-ui/src/views/projects/components/dependencies/use-dependencies.ts b/dolphinscheduler-ui/src/views/projects/components/dependencies/use-dependencies.ts index 9de2bef762..a16a3c00f3 100644 --- a/dolphinscheduler-ui/src/views/projects/components/dependencies/use-dependencies.ts +++ b/dolphinscheduler-ui/src/views/projects/components/dependencies/use-dependencies.ts @@ -32,10 +32,10 @@ export function useDependencies() { } as DependentTaskReq const res = await queryDependentTasks(projectCode, dependentTaskReq) res - .filter((item: any) => item.processDefinitionCode !== workflowCode) + .filter((item: any) => item.workflowDefinitionCode !== workflowCode) .forEach((item: any) => { tasks.push( - item.processDefinitionName + '->' + item.taskDefinitionName + item.workflowDefinitionName + '->' + item.taskDefinitionName ) }) } @@ -53,10 +53,10 @@ export function useDependencies() { } as DependentTaskReq const res = await queryDependentTasks(projectCode, dependentTaskReq) res - .filter((item: any) => item.processDefinitionCode !== workflowCode) + .filter((item: any) => item.workflowDefinitionCode !== workflowCode) .forEach((item: any) => { tasks.push( - item.processDefinitionName + '->' + item.taskDefinitionName + item.workflowDefinitionName + '->' + item.taskDefinitionName ) }) } @@ -112,8 +112,8 @@ export function useDependencies() { (res: any) => { res.data .filter((item: any) => { - if (item.processDefinitionCode) { - return item.processDefinitionCode !== workflowCode + if (item.workflowDefinitionCode) { + return item.workflowDefinitionCode !== workflowCode } else { return false } @@ -121,10 +121,10 @@ export function useDependencies() { .forEach((item: any) => { dependentTaskLinks.push({ text: - item.processDefinitionName + '->' + item.taskDefinitionName, + item.workflowDefinitionName + '->' + item.taskDefinitionName, show: true, action: () => { - const url = `/projects/${item.projectCode}/workflow/definitions/${item.processDefinitionCode}` + const url = `/projects/${item.projectCode}/workflow/definitions/${item.workflowDefinitionCode}` window.open(url, '_blank') } }) @@ -149,14 +149,14 @@ export function useDependencies() { await queryDependentTasks(projectCode, dependentTaskReq).then( (res: any) => { res - .filter((item: any) => item.processDefinitionCode !== workflowCode) + .filter((item: any) => item.workflowDefinitionCode !== workflowCode) .forEach((item: any) => { dependentTaskLinks.push({ text: - item.processDefinitionName + '->' + item.taskDefinitionName, + item.workflowDefinitionName + '->' + item.taskDefinitionName, show: true, action: () => { - const url = `/projects/${item.projectCode}/workflow/definitions/${item.processDefinitionCode}` + const url = `/projects/${item.projectCode}/workflow/definitions/${item.workflowDefinitionCode}` window.open(url, '_blank') } }) diff --git a/dolphinscheduler-ui/src/views/projects/overview/components/definition-card.tsx b/dolphinscheduler-ui/src/views/projects/overview/components/definition-card.tsx index b9199504b8..a04a61b99f 100644 --- a/dolphinscheduler-ui/src/views/projects/overview/components/definition-card.tsx +++ b/dolphinscheduler-ui/src/views/projects/overview/components/definition-card.tsx @@ -16,7 +16,7 @@ */ import { defineComponent, PropType } from 'vue' -import { useProcessDefinition } from '../use-process-definition' +import { useWorkflowDefinition } from '../use-workflow-definition' import BarChart from '@/components/chart/modules/Bar' import Card from '@/components/card' @@ -30,23 +30,23 @@ const DefinitionCard = defineComponent({ name: 'DefinitionCard', props, setup() { - const { getProcessDefinition } = useProcessDefinition() - const processDefinition = getProcessDefinition() + const { getWorkflowDefinition } = useWorkflowDefinition() + const workflowDefinition = getWorkflowDefinition() - return { processDefinition } + return { workflowDefinition: workflowDefinition } }, render() { - const { title, processDefinition } = this + const { title, workflowDefinition } = this return ( - processDefinition.xAxisData.length > 0 && - processDefinition.seriesData.length > 0 && ( + workflowDefinition.xAxisData.length > 0 && + workflowDefinition.seriesData.length > 0 && ( {{ default: () => ( ) }} diff --git a/dolphinscheduler-ui/src/views/projects/overview/index.tsx b/dolphinscheduler-ui/src/views/projects/overview/index.tsx index f7dee7616f..f2dfa235fe 100644 --- a/dolphinscheduler-ui/src/views/projects/overview/index.tsx +++ b/dolphinscheduler-ui/src/views/projects/overview/index.tsx @@ -20,7 +20,7 @@ import { NGrid, NGi } from 'naive-ui' import { startOfToday, getTime } from 'date-fns' import { useI18n } from 'vue-i18n' import { useTaskState } from './use-task-state' -import { useProcessState } from './use-process-state' +import { useWorkflowState } from './use-workflow-state' import StateCard from './components/state-card' import DefinitionCard from './components/definition-card' @@ -30,22 +30,22 @@ const workflowMonitor = defineComponent({ const { t, locale } = useI18n() const dateRef = ref([getTime(startOfToday()), Date.now()]) const taskStateRef = ref() - const processStateRef = ref() + const workflowStateRef = ref() const { getTaskState, taskVariables } = useTaskState() - const { getProcessState, processVariables } = useProcessState() + const { getWorkflowState, workflowVariables } = useWorkflowState() const handleTaskDate = (val: any) => { taskStateRef.value = getTaskState(val) } - const handleProcessDate = (val: any) => { - processStateRef.value = getProcessState(val) + const handleWorkflowDate = (val: any) => { + workflowStateRef.value = getWorkflowState(val) } const initData = () => { taskStateRef.value = getTaskState(dateRef.value) || taskStateRef.value - processStateRef.value = - getProcessState(dateRef.value) || processStateRef.value + workflowStateRef.value = + getWorkflowState(dateRef.value) || workflowStateRef.value } onMounted(() => { @@ -61,11 +61,11 @@ const workflowMonitor = defineComponent({ t, dateRef, handleTaskDate, - handleProcessDate, + handleWorkflowDate: handleWorkflowDate, taskStateRef, - processStateRef, + workflowStateRef: workflowStateRef, ...toRefs(taskVariables), - ...toRefs(processVariables) + ...toRefs(workflowVariables) } }, render() { @@ -73,9 +73,9 @@ const workflowMonitor = defineComponent({ t, dateRef, handleTaskDate, - handleProcessDate, + handleWorkflowDate, taskLoadingRef, - processLoadingRef + workflowLoadingRef } = this return ( @@ -93,18 +93,18 @@ const workflowMonitor = defineComponent({ - + diff --git a/dolphinscheduler-ui/src/views/projects/overview/use-process-definition.ts b/dolphinscheduler-ui/src/views/projects/overview/use-workflow-definition.ts similarity index 83% rename from dolphinscheduler-ui/src/views/projects/overview/use-process-definition.ts rename to dolphinscheduler-ui/src/views/projects/overview/use-workflow-definition.ts index ebe9a5f025..6c922aec5b 100644 --- a/dolphinscheduler-ui/src/views/projects/overview/use-process-definition.ts +++ b/dolphinscheduler-ui/src/views/projects/overview/use-workflow-definition.ts @@ -18,17 +18,17 @@ import { useRoute } from 'vue-router' import { useAsyncState } from '@vueuse/core' import { countDefinitionByUser } from '@/service/modules/projects-analysis' -import type { ProcessDefinitionRes } from '@/service/modules/projects-analysis/types' +import type { WorkflowDefinitionRes } from '@/service/modules/projects-analysis/types' import type { DefinitionChartData } from './types' -export function useProcessDefinition() { +export function useWorkflowDefinition() { const route = useRoute() - const getProcessDefinition = () => { + const getWorkflowDefinition = () => { const { state } = useAsyncState( countDefinitionByUser({ projectCode: Number(route.params.projectCode) - }).then((res: ProcessDefinitionRes): DefinitionChartData => { + }).then((res: WorkflowDefinitionRes): DefinitionChartData => { const xAxisData = res.userList.map((item) => item.userName) const seriesData = res.userList.map((item) => item.count) @@ -39,5 +39,5 @@ export function useProcessDefinition() { return state } - return { getProcessDefinition } + return { getWorkflowDefinition } } diff --git a/dolphinscheduler-ui/src/views/projects/overview/use-process-state.ts b/dolphinscheduler-ui/src/views/projects/overview/use-workflow-state.ts similarity index 80% rename from dolphinscheduler-ui/src/views/projects/overview/use-process-state.ts rename to dolphinscheduler-ui/src/views/projects/overview/use-workflow-state.ts index 5add218a30..6e30016ed2 100644 --- a/dolphinscheduler-ui/src/views/projects/overview/use-process-state.ts +++ b/dolphinscheduler-ui/src/views/projects/overview/use-workflow-state.ts @@ -17,7 +17,7 @@ import { useRoute } from 'vue-router' import { useAsyncState } from '@vueuse/core' -import { countProcessInstanceState } from '@/service/modules/projects-analysis' +import { countWorkflowInstanceState } from '@/service/modules/projects-analysis' import { format } from 'date-fns' import { toLower } from 'lodash' import { useI18n } from 'vue-i18n' @@ -25,18 +25,18 @@ import type { WorkflowInstanceCountVo } from '@/service/modules/projects-analysi import type { StateData } from './types' import { reactive, ref } from 'vue' -export function useProcessState() { +export function useWorkflowState() { const route = useRoute() const { t } = useI18n() - const processVariables = reactive({ - processLoadingRef: ref(false) + const workflowVariables = reactive({ + workflowLoadingRef: ref(false) }) - const getProcessState = (date: Array) => { - if (processVariables.processLoadingRef) return - processVariables.processLoadingRef = true + const getWorkflowState = (date: Array) => { + if (workflowVariables.workflowLoadingRef) return + workflowVariables.workflowLoadingRef = true const { state } = useAsyncState( - countProcessInstanceState({ + countWorkflowInstanceState({ startDate: !date ? '' : format(date[0], 'yyyy-MM-dd HH:mm:ss'), endDate: !date ? '' : format(date[1], 'yyyy-MM-dd HH:mm:ss'), projectCode: Number(route.params.projectCode) @@ -54,7 +54,7 @@ export function useProcessState() { name: t('home.' + toLower(item.state)) } }) - processVariables.processLoadingRef = false + workflowVariables.workflowLoadingRef = false return { table, chart } }), @@ -64,5 +64,5 @@ export function useProcessState() { return state } - return { getProcessState, processVariables } + return { getWorkflowState, workflowVariables } } diff --git a/dolphinscheduler-ui/src/views/projects/task/components/node/detail-modal.tsx b/dolphinscheduler-ui/src/views/projects/task/components/node/detail-modal.tsx index e6d9398ece..66bc7559ae 100644 --- a/dolphinscheduler-ui/src/views/projects/task/components/node/detail-modal.tsx +++ b/dolphinscheduler-ui/src/views/projects/task/components/node/detail-modal.tsx @@ -40,7 +40,7 @@ import { import { NIcon } from 'naive-ui' import { TASK_TYPES_MAP } from '../../constants/task-type' import { Router, useRouter } from 'vue-router' -import { querySubProcessInstanceByTaskCode } from '@/service/modules/process-instances' +import { querySubWorkflowInstanceByTaskCode } from '@/service/modules/workflow-instances' import { useTaskNodeStore } from '@/store/project/task-node' import type { ITaskData, @@ -77,7 +77,7 @@ const props = { definition: { type: Object as PropType> }, - processInstance: { + workflowInstance: { type: Object as PropType }, taskInstance: { @@ -138,7 +138,7 @@ const NodeDetailModal = defineComponent({ } } - const initHeaderLinks = (processInstance: any, taskType?: ITaskType) => { + const initHeaderLinks = (workflowInstance: any, taskType?: ITaskType) => { headerLinks.value = [ { text: t('project.node.instructions'), @@ -180,7 +180,7 @@ const NodeDetailModal = defineComponent({ { text: t('project.node.enter_this_child_node'), show: - props.data.taskType === 'SUB_PROCESS' || + props.data.taskType === 'SUB_WORKFLOW' || props.data.taskType === 'DYNAMIC', disabled: !props.data.id || @@ -188,20 +188,20 @@ const NodeDetailModal = defineComponent({ !props.taskInstance), action: () => { if (router.currentRoute.value.name === 'workflow-instance-detail') { - querySubProcessInstanceByTaskCode( + querySubWorkflowInstanceByTaskCode( { taskId: props.taskInstance?.id }, { projectCode: props.projectCode } ).then((res: any) => { router.push({ name: 'workflow-instance-detail', - params: { id: res.subProcessInstanceId }, - query: { code: props.data.taskParams?.processDefinitionCode } + params: { id: res.subWorkflowInstanceId }, + query: { code: props.data.taskParams?.workflowDefinitionCode } }) }) } else { router.push({ name: 'workflow-definition-detail', - params: { code: props.data.taskParams?.processDefinitionCode } + params: { code: props.data.taskParams?.workflowDefinitionCode } }) } }, @@ -213,7 +213,7 @@ const NodeDetailModal = defineComponent({ const onTaskTypeChange = (taskType: ITaskType) => { // eslint-disable-next-line vue/no-mutating-props props.data.taskType = taskType - initHeaderLinks(props.processInstance, props.data.taskType) + initHeaderLinks(props.workflowInstance, props.data.taskType) } provide( @@ -235,7 +235,7 @@ const NodeDetailModal = defineComponent({ () => [props.show, props.data], async () => { if (!props.show) return - initHeaderLinks(props.processInstance, props.data.taskType) + initHeaderLinks(props.workflowInstance, props.data.taskType) taskStore.init() const nodeData = formatModel(props.data) await nextTick() diff --git a/dolphinscheduler-ui/src/views/projects/task/components/node/fields/index.ts b/dolphinscheduler-ui/src/views/projects/task/components/node/fields/index.ts index 673aced136..058f0a00c0 100644 --- a/dolphinscheduler-ui/src/views/projects/task/components/node/fields/index.ts +++ b/dolphinscheduler-ui/src/views/projects/task/components/node/fields/index.ts @@ -29,7 +29,7 @@ export { useDelayTime } from './use-delay-time' export { useTimeoutAlarm } from './use-timeout-alarm' export { usePreTasks } from './use-pre-tasks' export { useTaskType } from './use-task-type' -export { useProcessName } from './use-process-name' +export { useWorkflowName } from './use-workflow-name' export { useChildNode } from './use-child-node' export { useTargetTaskName } from './use-target-task-name' export { useDatasource } from './use-datasource' diff --git a/dolphinscheduler-ui/src/views/projects/task/components/node/fields/use-child-node.ts b/dolphinscheduler-ui/src/views/projects/task/components/node/fields/use-child-node.ts index 98b232f271..a880bc29a9 100644 --- a/dolphinscheduler-ui/src/views/projects/task/components/node/fields/use-child-node.ts +++ b/dolphinscheduler-ui/src/views/projects/task/components/node/fields/use-child-node.ts @@ -19,21 +19,21 @@ import { ref, onMounted } from 'vue' import { useI18n } from 'vue-i18n' import { querySimpleList, - queryProcessDefinitionByCode -} from '@/service/modules/process-definition' + queryWorkflowDefinitionByCode +} from '@/service/modules/workflow-definition' import type { IJsonItem } from '../types' export function useChildNode({ model, projectCode, from, - processName, + workflowName, code }: { model: { [field: string]: any } projectCode: number from?: number - processName?: number + workflowName?: number code?: number }): IJsonItem { const { t } = useI18n() @@ -41,7 +41,7 @@ export function useChildNode({ const options = ref([] as { label: string; value: string }[]) const loading = ref(false) - const getProcessList = async () => { + const getWorkflowList = async () => { if (loading.value) return loading.value = true const res = await querySimpleList(projectCode) @@ -53,22 +53,22 @@ export function useChildNode({ })) loading.value = false } - const getProcessListByCode = async (processCode: number) => { - if (!processCode) return - const res = await queryProcessDefinitionByCode(processCode, projectCode) + const getWorkflowListByCode = async (workflowCode: number) => { + if (!workflowCode) return + const res = await queryWorkflowDefinitionByCode(workflowCode, projectCode) model.definition = res } onMounted(() => { - if (from === 1 && processName) { - getProcessListByCode(processName) + if (from === 1 && workflowName) { + getWorkflowListByCode(workflowName) } - getProcessList() + getWorkflowList() }) return { type: 'select', - field: 'processDefinitionCode', + field: 'workflowDefinitionCode', span: 24, name: t('project.node.child_node'), props: { diff --git a/dolphinscheduler-ui/src/views/projects/task/components/node/fields/use-dependent.ts b/dolphinscheduler-ui/src/views/projects/task/components/node/fields/use-dependent.ts index 4a8e8cf2c0..8fd407f6fb 100644 --- a/dolphinscheduler-ui/src/views/projects/task/components/node/fields/use-dependent.ts +++ b/dolphinscheduler-ui/src/views/projects/task/components/node/fields/use-dependent.ts @@ -23,9 +23,9 @@ import { useTaskNodeStore } from '@/store/project/task-node' import { queryAllProjectListForDependent } from '@/service/modules/projects' import { tasksState } from '@/common/common' import { - queryProcessDefinitionList, + queryWorkflowDefinitionList, getTasksByDefinitionList -} from '@/service/modules/process-definition' +} from '@/service/modules/workflow-definition' import { Router, useRouter } from 'vue-router' import type { IJsonItem, @@ -61,7 +61,7 @@ export function useDependent(model: { [field: string]: any }): IJsonItem[] { const dependentResult = nodeStore.getDependentResult const TasksStateConfig = tasksState(t) const projectList = ref([] as IRenderOption[]) - const processCache = {} as { + const workflowCache = {} as { [key: number]: IRenderOption[] } const taskCache = {} as { @@ -214,26 +214,26 @@ export function useDependent(model: { [field: string]: any }): IJsonItem[] { })) return projectList } - const getProcessList = async (code: number) => { - if (processCache[code]) { - return processCache[code] + const getWorkflowList = async (code: number) => { + if (workflowCache[code]) { + return workflowCache[code] } - const result = await queryProcessDefinitionList(code) - const processList = result.map((item: { code: number; name: string }) => ({ + const result = await queryWorkflowDefinitionList(code) + const workflowList = result.map((item: { code: number; name: string }) => ({ value: item.code, label: () => h(NEllipsis, null, item.name), filterLabel: item.name })) - processCache[code] = processList + workflowCache[code] = workflowList - return processList + return workflowList } - const getTaskList = async (code: number, processCode: number) => { - if (taskCache[processCode]) { - return taskCache[processCode] + const getTaskList = async (code: number, workflowCode: number) => { + if (taskCache[workflowCode]) { + return taskCache[workflowCode] } - const result = await getTasksByDefinitionList(code, processCode) + const result = await getTasksByDefinitionList(code, workflowCode) const taskList = result.map((item: { code: number; name: string }) => ({ value: item.code, label: () => h(NEllipsis, null, item.name), @@ -244,7 +244,7 @@ export function useDependent(model: { [field: string]: any }): IJsonItem[] { label: 'ALL', filterLabel: 'ALL' }) - taskCache[processCode] = taskList + taskCache[workflowCode] = taskList return taskList } @@ -286,7 +286,7 @@ export function useDependent(model: { [field: string]: any }): IJsonItem[] { } if (dependItem.projectCode) { itemListOptions.value[itemIndex].definitionCodeOptions = - await getProcessList(dependItem.projectCode) + await getWorkflowList(dependItem.projectCode) } if (dependItem.projectCode && dependItem.definitionCode) { itemListOptions.value[itemIndex].depTaskCodeOptions = @@ -349,7 +349,7 @@ export function useDependent(model: { [field: string]: any }): IJsonItem[] { const options = selectOptions?.value[i] || {} const itemListOptions = options?.dependItemList || [] const itemOptions = {} as IDependentItemOptions - itemOptions.definitionCodeOptions = await getProcessList( + itemOptions.definitionCodeOptions = await getWorkflowList( projectCode ) itemListOptions[j] = itemOptions @@ -376,7 +376,7 @@ export function useDependent(model: { [field: string]: any }): IJsonItem[] { type: 'select', field: 'definitionCode', span: 24, - name: t('project.node.process_name'), + name: t('project.node.workflow_name'), props: { filterable: true, filter: (query: string, option: IRenderOption) => { @@ -384,10 +384,10 @@ export function useDependent(model: { [field: string]: any }): IJsonItem[] { .toLowerCase() .includes(query.toLowerCase()) }, - onUpdateValue: async (processCode: number) => { + onUpdateValue: async (workflowCode: number) => { const item = model.dependTaskList[i].dependItemList[j] selectOptions.value[i].dependItemList[j].depTaskCodeOptions = - await getTaskList(item.projectCode, processCode) + await getTaskList(item.projectCode, workflowCode) item.depTaskCode = item.dependentType === 'DEPENDENT_ON_WORKFLOW' ? 0 : -1 } @@ -401,7 +401,7 @@ export function useDependent(model: { [field: string]: any }): IJsonItem[] { trigger: ['input', 'blur'], validator(validate: any, value: string) { if (!value) { - return Error(t('project.node.process_name_tips')) + return Error(t('project.node.workflow_name_tips')) } } } diff --git a/dolphinscheduler-ui/src/views/projects/task/components/node/fields/use-switch.ts b/dolphinscheduler-ui/src/views/projects/task/components/node/fields/use-switch.ts index 0b3a070220..db4d0a2ea9 100644 --- a/dolphinscheduler-ui/src/views/projects/task/components/node/fields/use-switch.ts +++ b/dolphinscheduler-ui/src/views/projects/task/components/node/fields/use-switch.ts @@ -17,7 +17,7 @@ import { ref, watch, onMounted, nextTick } from 'vue' import { useI18n } from 'vue-i18n' import { useTaskNodeStore } from '@/store/project/task-node' -import { queryProcessDefinitionByCode } from '@/service/modules/process-definition' +import { queryWorkflowDefinitionByCode } from '@/service/modules/workflow-definition' import { findIndex } from 'lodash' import type { IJsonItem } from '../types' @@ -33,8 +33,8 @@ export function useSwitch( if (loading.value) return loading.value = true branchFlowOptions.value = [] - const res = await queryProcessDefinitionByCode( - model.processName, + const res = await queryWorkflowDefinitionByCode( + model.workflowDefinitionName, projectCode ) res?.taskDefinitionList.forEach((item: any) => { @@ -75,9 +75,9 @@ export function useSwitch( } watch( - () => [model.processName, model.nextCode], + () => [model.workflowName, model.nextCode], () => { - if (model.processName) { + if (model.workflowName) { getOtherTaskDefinitionList() } } diff --git a/dolphinscheduler-ui/src/views/projects/task/components/node/fields/use-task-definition.ts b/dolphinscheduler-ui/src/views/projects/task/components/node/fields/use-task-definition.ts index d740e366c1..1f33f7da9e 100644 --- a/dolphinscheduler-ui/src/views/projects/task/components/node/fields/use-task-definition.ts +++ b/dolphinscheduler-ui/src/views/projects/task/components/node/fields/use-task-definition.ts @@ -15,7 +15,7 @@ * limitations under the License. */ -import { useTaskType, useProcessName } from '.' +import { useTaskType, useWorkflowName } from '.' import type { IJsonItem, ITaskData } from '../types' export const useTaskDefinition = ({ @@ -34,12 +34,12 @@ export const useTaskDefinition = ({ if (from === 0) return [] return [ useTaskType(model, readonly), - useProcessName({ + useWorkflowName({ model, projectCode, isCreate: !data?.id, from, - processName: data?.processName, + workflowName: data?.workflowDefinitionName, taskCode: data?.code }) ] diff --git a/dolphinscheduler-ui/src/views/projects/task/components/node/fields/use-process-name.ts b/dolphinscheduler-ui/src/views/projects/task/components/node/fields/use-workflow-name.ts similarity index 80% rename from dolphinscheduler-ui/src/views/projects/task/components/node/fields/use-process-name.ts rename to dolphinscheduler-ui/src/views/projects/task/components/node/fields/use-workflow-name.ts index f9cdccc224..86060968fe 100644 --- a/dolphinscheduler-ui/src/views/projects/task/components/node/fields/use-process-name.ts +++ b/dolphinscheduler-ui/src/views/projects/task/components/node/fields/use-workflow-name.ts @@ -19,24 +19,24 @@ import { ref, onMounted } from 'vue' import { useI18n } from 'vue-i18n' import { querySimpleList, - queryProcessDefinitionByCode -} from '@/service/modules/process-definition' + queryWorkflowDefinitionByCode +} from '@/service/modules/workflow-definition' import { useTaskNodeStore } from '@/store/project/task-node' import type { IJsonItem } from '../types' -export function useProcessName({ +export function useWorkflowName({ model, projectCode, isCreate, from, - processName, + workflowName, taskCode }: { model: { [field: string]: any } projectCode: number isCreate: boolean from?: number - processName?: number + workflowName?: number taskCode?: number }): IJsonItem { const { t } = useI18n() @@ -44,7 +44,7 @@ export function useProcessName({ const options = ref([] as { label: string; value: string }[]) const loading = ref(false) - const getProcessList = async () => { + const getWorkflowList = async () => { if (loading.value) return loading.value = true const res = await querySimpleList(projectCode) @@ -54,27 +54,27 @@ export function useProcessName({ })) loading.value = false } - const getProcessListByCode = async (processCode: number) => { - if (!processCode) return - const res = await queryProcessDefinitionByCode(processCode, projectCode) + const getWorkflowListByCode = async (workflowCode: number) => { + if (!workflowCode) return + const res = await queryWorkflowDefinitionByCode(workflowCode, projectCode) model.definition = res taskStore.updateDefinition(res, taskCode) } const onChange = (code: number) => { - getProcessListByCode(code) + getWorkflowListByCode(code) } onMounted(() => { - if (from === 1 && processName) { - getProcessListByCode(processName) + if (from === 1 && workflowName) { + getWorkflowListByCode(workflowName) } - getProcessList() + getWorkflowList() }) return { type: 'select', - field: 'processName', + field: 'workflowName', span: 24, name: t('project.node.workflow_name'), props: { diff --git a/dolphinscheduler-ui/src/views/projects/task/components/node/format-data.ts b/dolphinscheduler-ui/src/views/projects/task/components/node/format-data.ts index e803ca5160..fc5a1d9466 100644 --- a/dolphinscheduler-ui/src/views/projects/task/components/node/format-data.ts +++ b/dolphinscheduler-ui/src/views/projects/task/components/node/format-data.ts @@ -28,14 +28,14 @@ import type { import { ref } from 'vue' export function formatParams(data: INodeData): { - processDefinitionCode: string + workflowDefinitionCode: string upstreamCodes: string taskDefinitionJsonObj: object } { const rdbmsSourceTypes = ref(['MYSQL', 'ORACLE', 'SQLSERVER', 'HANA']) const taskParams: ITaskParams = {} - if (data.taskType === 'SUB_PROCESS' || data.taskType === 'DYNAMIC') { - taskParams.processDefinitionCode = data.processDefinitionCode + if (data.taskType === 'SUB_WORKFLOW' || data.taskType === 'DYNAMIC') { + taskParams.workflowDefinitionCode = data.workflowDefinitionCode } if (data.taskType === 'JAVA') { @@ -496,7 +496,7 @@ export function formatParams(data: INodeData): { } if (data.taskType === 'DYNAMIC') { - taskParams.processDefinitionCode = data.processDefinitionCode + taskParams.workflowDefinitionCode = data.workflowDefinitionCode taskParams.maxNumOfSubWorkflowInstances = data.maxNumOfSubWorkflowInstances taskParams.degreeOfParallelism = data.degreeOfParallelism taskParams.filterCondition = data.filterCondition @@ -513,7 +513,9 @@ export function formatParams(data: INodeData): { } } const params = { - processDefinitionCode: data.processName ? String(data.processName) : '', + workflowDefinitionCode: data.workflowDefinitionName + ? String(data.workflowDefinitionName) + : '', upstreamCodes: data?.preTasks?.join(','), taskDefinitionJsonObj: { code: data.code, @@ -554,7 +556,7 @@ export function formatParams(data: INodeData): { taskExecuteType: data.taskExecuteType } } as { - processDefinitionCode: string + workflowDefinitionCode: string upstreamCodes: string taskDefinitionJsonObj: { timeout: number; timeoutNotifyStrategy: string } } diff --git a/dolphinscheduler-ui/src/views/projects/task/components/node/tasks/index.ts b/dolphinscheduler-ui/src/views/projects/task/components/node/tasks/index.ts index 7da82e2a13..6f777da549 100644 --- a/dolphinscheduler-ui/src/views/projects/task/components/node/tasks/index.ts +++ b/dolphinscheduler-ui/src/views/projects/task/components/node/tasks/index.ts @@ -18,7 +18,7 @@ import { useFlink } from './use-flink' import { useFlinkStream } from './use-flink-stream' import { useShell } from './use-shell' -import { useSubProcess } from './use-sub-process' +import { useSubWorkflow } from './use-sub-workflow' import { usePython } from './use-python' import { useSpark } from './use-spark' import { useMr } from './use-mr' @@ -56,7 +56,7 @@ import { useAliyunServerlessSpark } from './use-aliyun-serverless-spark' export default { SHELL: useShell, - SUB_PROCESS: useSubProcess, + SUB_WORKFLOW: useSubWorkflow, DYNAMIC: useDynamic, PYTHON: usePython, SPARK: useSpark, diff --git a/dolphinscheduler-ui/src/views/projects/task/components/node/tasks/use-datasync.ts b/dolphinscheduler-ui/src/views/projects/task/components/node/tasks/use-datasync.ts index 843f90f349..2c3f26c7b9 100644 --- a/dolphinscheduler-ui/src/views/projects/task/components/node/tasks/use-datasync.ts +++ b/dolphinscheduler-ui/src/views/projects/task/components/node/tasks/use-datasync.ts @@ -50,12 +50,12 @@ export function useDatasync({ if (from === 1) { extra = [ Fields.useTaskType(model, readonly), - Fields.useProcessName({ + Fields.useWorkflowName({ model, projectCode, isCreate: !data?.id, from, - processName: data?.processName + workflowName: data?.workflowDefinitionName }) ] } diff --git a/dolphinscheduler-ui/src/views/projects/task/components/node/tasks/use-dms.ts b/dolphinscheduler-ui/src/views/projects/task/components/node/tasks/use-dms.ts index 7fe0922a73..b635b8b26a 100644 --- a/dolphinscheduler-ui/src/views/projects/task/components/node/tasks/use-dms.ts +++ b/dolphinscheduler-ui/src/views/projects/task/components/node/tasks/use-dms.ts @@ -51,12 +51,12 @@ export function useDms({ if (from === 1) { extra = [ Fields.useTaskType(model, readonly), - Fields.useProcessName({ + Fields.useWorkflowName({ model, projectCode, isCreate: !data?.id, from, - processName: data?.processName + workflowName: data?.workflowDefinitionName }) ] } diff --git a/dolphinscheduler-ui/src/views/projects/task/components/node/tasks/use-dynamic.ts b/dolphinscheduler-ui/src/views/projects/task/components/node/tasks/use-dynamic.ts index 8f498fbfe9..8d20d87c9f 100644 --- a/dolphinscheduler-ui/src/views/projects/task/components/node/tasks/use-dynamic.ts +++ b/dolphinscheduler-ui/src/views/projects/task/components/node/tasks/use-dynamic.ts @@ -72,7 +72,7 @@ export function useDynamic({ model, projectCode, from, - processName: data?.processName, + workflowName: data?.workflowDefinitionName, code: from === 1 ? 0 : Number(workflowCode) }), ...Fields.useDynamic(model), diff --git a/dolphinscheduler-ui/src/views/projects/task/components/node/tasks/use-hive-cli.ts b/dolphinscheduler-ui/src/views/projects/task/components/node/tasks/use-hive-cli.ts index e9e485a9b7..a2f34f8f8d 100644 --- a/dolphinscheduler-ui/src/views/projects/task/components/node/tasks/use-hive-cli.ts +++ b/dolphinscheduler-ui/src/views/projects/task/components/node/tasks/use-hive-cli.ts @@ -50,12 +50,12 @@ export function useHiveCli({ if (from === 1) { extra = [ Fields.useTaskType(model, readonly), - Fields.useProcessName({ + Fields.useWorkflowName({ model, projectCode, isCreate: !data?.id, from, - processName: data?.processName + workflowName: data?.workflowDefinitionName }) ] } diff --git a/dolphinscheduler-ui/src/views/projects/task/components/node/tasks/use-java.ts b/dolphinscheduler-ui/src/views/projects/task/components/node/tasks/use-java.ts index 1eb99de965..875a872f0a 100644 --- a/dolphinscheduler-ui/src/views/projects/task/components/node/tasks/use-java.ts +++ b/dolphinscheduler-ui/src/views/projects/task/components/node/tasks/use-java.ts @@ -58,12 +58,12 @@ export function useJava({ if (from === 1) { extra = [ Fields.useTaskType(model, readonly), - Fields.useProcessName({ + Fields.useWorkflowName({ model, projectCode, isCreate: !data?.id, from, - processName: data?.processName + workflowName: data?.workflowDefinitionName }) ] } diff --git a/dolphinscheduler-ui/src/views/projects/task/components/node/tasks/use-pytorch.ts b/dolphinscheduler-ui/src/views/projects/task/components/node/tasks/use-pytorch.ts index 2c005700f6..91df96706f 100644 --- a/dolphinscheduler-ui/src/views/projects/task/components/node/tasks/use-pytorch.ts +++ b/dolphinscheduler-ui/src/views/projects/task/components/node/tasks/use-pytorch.ts @@ -55,12 +55,12 @@ export function usePytorch({ if (from === 1) { extra = [ Fields.useTaskType(model, readonly), - Fields.useProcessName({ + Fields.useWorkflowName({ model, projectCode, isCreate: !data?.id, from, - processName: data?.processName + workflowName: data?.workflowDefinitionName }) ] } diff --git a/dolphinscheduler-ui/src/views/projects/task/components/node/tasks/use-sub-process.ts b/dolphinscheduler-ui/src/views/projects/task/components/node/tasks/use-sub-workflow.ts similarity index 94% rename from dolphinscheduler-ui/src/views/projects/task/components/node/tasks/use-sub-process.ts rename to dolphinscheduler-ui/src/views/projects/task/components/node/tasks/use-sub-workflow.ts index 8c855666bc..540460700c 100644 --- a/dolphinscheduler-ui/src/views/projects/task/components/node/tasks/use-sub-process.ts +++ b/dolphinscheduler-ui/src/views/projects/task/components/node/tasks/use-sub-workflow.ts @@ -20,7 +20,7 @@ import * as Fields from '../fields/index' import { useRouter } from 'vue-router' import type { IJsonItem, INodeData, ITaskData } from '../types' -export function useSubProcess({ +export function useSubWorkflow({ projectCode, from = 0, readonly, @@ -34,7 +34,7 @@ export function useSubProcess({ const router = useRouter() const workflowCode = router.currentRoute.value.params.code const model = reactive({ - taskType: 'SUB_PROCESS', + taskType: 'SUB_WORKFLOW', name: '', flag: 'YES', description: '', @@ -64,7 +64,7 @@ export function useSubProcess({ model, projectCode, from, - processName: data?.processName, + workflowName: data?.workflowDefinitionName, code: from === 1 ? 0 : Number(workflowCode) }), Fields.usePreTasks() diff --git a/dolphinscheduler-ui/src/views/projects/task/components/node/types.ts b/dolphinscheduler-ui/src/views/projects/task/components/node/types.ts index a6b9bda8b4..ff4dcae33a 100644 --- a/dolphinscheduler-ui/src/views/projects/task/components/node/types.ts +++ b/dolphinscheduler-ui/src/views/projects/task/components/node/types.ts @@ -376,7 +376,7 @@ interface ITaskParams { startupScript?: string executionTimeout?: string startTimeout?: string - processDefinitionCode?: number + workflowDefinitionCode?: number conditionResult?: { successNode?: number[] failedNode?: number[] @@ -484,7 +484,7 @@ interface INodeData Omit { id?: string taskType?: ITaskType - processName?: number + workflowDefinitionName?: number delayTime?: number description?: string environmentCode?: number | null diff --git a/dolphinscheduler-ui/src/views/projects/task/constants/task-type.ts b/dolphinscheduler-ui/src/views/projects/task/constants/task-type.ts index 0234df55fa..22235fa0e2 100644 --- a/dolphinscheduler-ui/src/views/projects/task/constants/task-type.ts +++ b/dolphinscheduler-ui/src/views/projects/task/constants/task-type.ts @@ -16,7 +16,7 @@ */ export type TaskType = | 'SHELL' - | 'SUB_PROCESS' + | 'SUB_WORKFLOW' | 'DYNAMIC' | 'PROCEDURE' | 'SQL' @@ -63,8 +63,8 @@ export const TASK_TYPES_MAP = { SHELL: { alias: 'SHELL' }, - SUB_PROCESS: { - alias: 'SUB_PROCESS' + SUB_WORKFLOW: { + alias: 'SUB_WORKFLOW' }, DYNAMIC: { alias: 'DYNAMIC' diff --git a/dolphinscheduler-ui/src/views/projects/task/instance/batch-task.tsx b/dolphinscheduler-ui/src/views/projects/task/instance/batch-task.tsx index 9e2eed243b..6094dc6d9e 100644 --- a/dolphinscheduler-ui/src/views/projects/task/instance/batch-task.tsx +++ b/dolphinscheduler-ui/src/views/projects/task/instance/batch-task.tsx @@ -55,12 +55,12 @@ const BatchTaskInstance = defineComponent({ pageNo: variables.page, searchVal: variables.searchVal, taskCode: variables.taskCode, - processInstanceId: variables.processInstanceId, + workflowInstanceId: variables.workflowInstanceId, host: variables.host, stateType: variables.stateType, datePickerRange: variables.datePickerRange, executorName: variables.executorName, - processInstanceName: variables.processInstanceName + workflowInstanceName: variables.workflowInstanceName }) } @@ -84,8 +84,8 @@ const BatchTaskInstance = defineComponent({ onSearch() } - const onClearSearchProcessInstanceName = () => { - variables.processInstanceName = null + const onClearSearchWorkflowInstanceName = () => { + variables.workflowInstanceName = null onSearch() } @@ -190,7 +190,7 @@ const BatchTaskInstance = defineComponent({ onSearch, onClearSearchTaskCode, onClearSearchTaskName, - onClearSearchProcessInstanceName, + onClearSearchWorkflowInstanceName, onClearSearchExecutorName, onClearSearchHost, onClearSearchStateType, @@ -233,11 +233,11 @@ const BatchTaskInstance = defineComponent({ /> { - variables.processDefinitionName = null + variables.workflowDefinitionName = null onSearch() } @@ -206,7 +206,7 @@ const BatchTaskInstance = defineComponent({ /> h( ButtonLink, @@ -107,7 +107,7 @@ export function useTable() { onClick: () => { const routeUrl = router.resolve({ name: 'workflow-instance-detail', - params: { id: row.processInstanceId }, + params: { id: row.workflowInstanceId }, query: { code: projectCode } }) window.open(routeUrl.href, '_blank') @@ -120,7 +120,7 @@ export function useTable() { { style: 'max-width: 580px;line-height: 1.5' }, - () => row.processInstanceName + () => row.workflowInstanceName ) } ) @@ -297,12 +297,12 @@ export function useTable() { : variables.page, searchVal: variables.searchVal, taskCode: variables.taskCode, - processInstanceId: variables.processInstanceId, + workflowInstanceId: variables.workflowInstanceId, host: variables.host, stateType: variables.stateType, datePickerRange: variables.datePickerRange, executorName: variables.executorName, - processInstanceName: variables.processInstanceName + workflowInstanceName: variables.workflowInstanceName }) }) } @@ -315,7 +315,7 @@ export function useTable() { pageNo: params.pageNo, searchVal: params.searchVal, taskCode: params.taskCode, - processInstanceId: params.processInstanceId, + workflowInstanceId: params.workflowInstanceId, host: params.host, stateType: params.stateType, startDate: params.datePickerRange @@ -325,7 +325,7 @@ export function useTable() { ? format(parseTime(params.datePickerRange[1]), 'yyyy-MM-dd HH:mm:ss') : '', executorName: params.executorName, - processInstanceName: params.processInstanceName + workflowInstanceName: params.workflowInstanceName } const { state } = useAsyncState( diff --git a/dolphinscheduler-ui/src/views/projects/workflow/components/dag/dag-save-modal.tsx b/dolphinscheduler-ui/src/views/projects/workflow/components/dag/dag-save-modal.tsx index 81d7f54019..a6b78cda38 100644 --- a/dolphinscheduler-ui/src/views/projects/workflow/components/dag/dag-save-modal.tsx +++ b/dolphinscheduler-ui/src/views/projects/workflow/components/dag/dag-save-modal.tsx @@ -38,7 +38,7 @@ import { NGrid } from 'naive-ui' import { useRoute } from 'vue-router' -import { verifyName } from '@/service/modules/process-definition' +import { verifyName } from '@/service/modules/workflow-definition' import './x6-style.scss' import { positiveIntegerRegex } from '@/utils/regex' import type { SaveForm, WorkflowDefinition, WorkflowInstance } from './types' @@ -123,10 +123,10 @@ export default defineComponent({ if (!valid) { const params = { name: formValue.value.name, - code: props.definition?.processDefinition.code + code: props.definition?.workflowDefinition.code } as { name: string; code?: number } if ( - props.definition?.processDefinition.name !== formValue.value.name + props.definition?.workflowDefinition.name !== formValue.value.name ) { verifyName(params, projectCode).then(() => context.emit('save', formValue.value) @@ -142,21 +142,23 @@ export default defineComponent({ } const updateModalData = () => { - const process = props.definition?.processDefinition - if (process) { - formValue.value.name = process.name - formValue.value.description = process.description - formValue.value.executionType = process.executionType || 'PARALLEL' - if (process.timeout && process.timeout > 0) { + const workflow = props.definition?.workflowDefinition + if (workflow) { + formValue.value.name = workflow.name + formValue.value.description = workflow.description + formValue.value.executionType = workflow.executionType || 'PARALLEL' + if (workflow.timeout && workflow.timeout > 0) { formValue.value.timeoutFlag = true - formValue.value.timeout = process.timeout + formValue.value.timeout = workflow.timeout } - formValue.value.globalParams = process.globalParamList.map((param) => ({ - key: param.prop, - value: param.value, - direct: param.direct, - type: param.type - })) + formValue.value.globalParams = workflow.globalParamList.map( + (param) => ({ + key: param.prop, + value: param.value, + direct: param.direct, + type: param.type + }) + ) } } @@ -165,7 +167,7 @@ export default defineComponent({ onMounted(() => updateModalData()) watch( - () => props.definition?.processDefinition, + () => props.definition?.workflowDefinition, () => updateModalData() ) @@ -178,7 +180,7 @@ export default defineComponent({ autoFocus={false} > - + - {this.startupParam?.processInstancePriority} + {this.startupParam?.workflowInstancePriority}
  • diff --git a/dolphinscheduler-ui/src/views/projects/workflow/components/dag/dag-toolbar.tsx b/dolphinscheduler-ui/src/views/projects/workflow/components/dag/dag-toolbar.tsx index 02c62b9bd6..42182abee4 100644 --- a/dolphinscheduler-ui/src/views/projects/workflow/components/dag/dag-toolbar.tsx +++ b/dolphinscheduler-ui/src/views/projects/workflow/components/dag/dag-toolbar.tsx @@ -62,7 +62,7 @@ const props = { default: null }, definition: { - // The same as the structure responsed by the queryProcessDefinitionByCode api + // The same as the structure responsed by the queryWorkflowDefinitionByCode api type: Object as PropType, default: null }, @@ -212,10 +212,10 @@ export default defineComponent({ {route.name === 'workflow-instance-detail' ? props.instance?.name - : props.definition?.processDefinition?.name || + : props.definition?.workflowDefinition?.name || t('project.dag.create')} - {props.definition?.processDefinition?.name && ( + {props.definition?.workflowDefinition?.name && ( ( @@ -226,7 +226,7 @@ export default defineComponent({ const name = route.name === 'workflow-instance-detail' ? props.instance?.name - : props.definition?.processDefinition?.name + : props.definition?.workflowDefinition?.name copy(name) }} class={Styles['toolbar-btn']} @@ -240,7 +240,7 @@ export default defineComponent({ }} > )} - {props.definition?.processDefinition?.name && ( + {props.definition?.workflowDefinition?.name && ( ( @@ -273,7 +273,7 @@ export default defineComponent({ )}
    {route.name !== 'workflow-instance-detail' && - props.definition?.processDefinition?.releaseState === 'ONLINE' && ( + props.definition?.workflowDefinition?.releaseState === 'ONLINE' && ( {t('project.dag.online')} @@ -509,7 +509,7 @@ export default defineComponent({ secondary round disabled={ - props.definition?.processDefinition?.releaseState === 'ONLINE' && + props.definition?.workflowDefinition?.releaseState === 'ONLINE' && !props.instance } onClick={() => { diff --git a/dolphinscheduler-ui/src/views/projects/workflow/components/dag/dag.module.scss b/dolphinscheduler-ui/src/views/projects/workflow/components/dag/dag.module.scss index 99c2ce6f5e..af0606b299 100644 --- a/dolphinscheduler-ui/src/views/projects/workflow/components/dag/dag.module.scss +++ b/dolphinscheduler-ui/src/views/projects/workflow/components/dag/dag.module.scss @@ -104,8 +104,8 @@ $bgLight: #ffffff; &.icon-shell { background-image: url('/images/task-icons/shell.png'); } - &.icon-sub_process { - background-image: url('/images/task-icons/sub_process.png'); + &.icon-sub_workflow { + background-image: url('/images/task-icons/sub_workflow.png'); } &.icon-dynamic { background-image: url('/images/task-icons/dynamic.png'); @@ -220,8 +220,8 @@ $bgLight: #ffffff; &.icon-shell { background-image: url('/images/task-icons/shell_hover.png'); } - &.icon-sub_process { - background-image: url('/images/task-icons/sub_process_hover.png'); + &.icon-sub_workflow { + background-image: url('/images/task-icons/sub_workflow_hover.png'); } &.icon-dynamic { background-image: url('/images/task-icons/dynamic_hover.png'); diff --git a/dolphinscheduler-ui/src/views/projects/workflow/components/dag/index.tsx b/dolphinscheduler-ui/src/views/projects/workflow/components/dag/index.tsx index c9febe14e0..78f0220285 100644 --- a/dolphinscheduler-ui/src/views/projects/workflow/components/dag/index.tsx +++ b/dolphinscheduler-ui/src/views/projects/workflow/components/dag/index.tsx @@ -118,7 +118,7 @@ export default defineComponent({ appendTask, editTask, copyTask, - processDefinition, + workflowDefinition, removeTasks } = useTaskEdit({ graph, definition: toRef(props, 'definition') }) @@ -132,7 +132,7 @@ export default defineComponent({ if (props.definition) { return ( route.name === 'workflow-definition-detail' && - props.definition!.processDefinition.releaseState === 'ONLINE' + props.definition!.workflowDefinition.releaseState === 'ONLINE' ) } else { return false @@ -155,7 +155,7 @@ export default defineComponent({ props.instance.state === 'STOP' ) } else if (props.definition) { - return props.definition!.processDefinition.releaseState === 'OFFLINE' + return props.definition!.workflowDefinition.releaseState === 'OFFLINE' } else { return false } @@ -231,11 +231,11 @@ export default defineComponent({ const connects = getConnects( nodes, edges, - processDefinition.value.taskDefinitionList as any + workflowDefinition.value.taskDefinitionList as any ) const locations = getLocations(nodes) context.emit('save', { - taskDefinitions: processDefinition.value.taskDefinitionList, + taskDefinitions: workflowDefinition.value.taskDefinitionList, saveForm, connects, locations @@ -297,7 +297,7 @@ export default defineComponent({ ) => { executeTask( { - processInstanceId: Number(route.params.id), + workflowInstanceId: Number(route.params.id), startNodeList: startNodeList, taskDependType: taskDependType }, @@ -399,7 +399,7 @@ export default defineComponent({ {!!props.definition && ( @@ -414,11 +414,11 @@ export default defineComponent({ readonly={props.readonly} show={taskModalVisible.value} projectCode={props.projectCode} - processInstance={props.instance} + workflowInstance={props.instance} taskInstance={currentTaskInstance.value} onViewLog={handleViewLog} data={currTask.value as any} - definition={processDefinition} + definition={workflowDefinition} onSubmit={taskConfirm} onCancel={taskCancel} /> @@ -450,7 +450,7 @@ export default defineComponent({ /> {!!props.definition && ( diff --git a/dolphinscheduler-ui/src/views/projects/workflow/components/dag/types.ts b/dolphinscheduler-ui/src/views/projects/workflow/components/dag/types.ts index c80f2f9216..f136fa5be0 100644 --- a/dolphinscheduler-ui/src/views/projects/workflow/components/dag/types.ts +++ b/dolphinscheduler-ui/src/views/projects/workflow/components/dag/types.ts @@ -18,7 +18,7 @@ import type { TaskType } from '@/store/project/types' export type { ITaskState } from '@/common/types' -export interface ProcessDefinition { +export interface WorkflowDefinition { id: number code: number name: string @@ -48,9 +48,9 @@ export interface ProcessDefinition { export interface Connect { id?: number name: string - processDefinitionVersion?: number + workflowDefinitionVersion?: number projectCode?: number - processDefinitionCode?: number + workflowDefinitionCode?: number preTaskCode: number preTaskVersion: number postTaskCode: number @@ -99,8 +99,8 @@ export type NodeData = { } & Partial export interface WorkflowDefinition { - processDefinition: ProcessDefinition - processTaskRelationList: Connect[] + workflowDefinition: WorkflowDefinition + workflowTaskRelationList: Connect[] taskDefinitionList: TaskDefinition[] } @@ -111,7 +111,7 @@ export interface WorkflowInstance { commandType: string commandParam: string failureStrategy: string - processInstancePriority: string + workflowInstancePriority: string workerGroup: string tenantCode: string warningType: string @@ -119,8 +119,8 @@ export interface WorkflowInstance { } export interface EditWorkflowDefinition { - processDefinition: ProcessDefinition - processTaskRelationList: Connect[] + workflowDefinition: WorkflowDefinition + workflowTaskRelationList: Connect[] taskDefinitionList: NodeData[] } @@ -163,7 +163,7 @@ export interface IStartupParam { commandType: string commandParam: string failureStrategy: string - processInstancePriority: string + workflowInstancePriority: string workerGroup: string tenantCode: string warningType: string diff --git a/dolphinscheduler-ui/src/views/projects/workflow/components/dag/use-business-mapper.ts b/dolphinscheduler-ui/src/views/projects/workflow/components/dag/use-business-mapper.ts index d8cb125b27..142e8217ca 100644 --- a/dolphinscheduler-ui/src/views/projects/workflow/components/dag/use-business-mapper.ts +++ b/dolphinscheduler-ui/src/views/projects/workflow/components/dag/use-business-mapper.ts @@ -25,7 +25,7 @@ import { get } from 'lodash' */ export function useBusinessMapper() { /** - * Get connects, connects and processTaskRelationList are the same + * Get connects, connects and workflowTaskRelationList are the same * @param {Node[]} nodes * @param {Edge[]} edges * @param {TaskDefinition[]} taskDefinitions diff --git a/dolphinscheduler-ui/src/views/projects/workflow/components/dag/use-custom-cell-builder.ts b/dolphinscheduler-ui/src/views/projects/workflow/components/dag/use-custom-cell-builder.ts index d6705d20c6..c9ca1a1277 100644 --- a/dolphinscheduler-ui/src/views/projects/workflow/components/dag/use-custom-cell-builder.ts +++ b/dolphinscheduler-ui/src/views/projects/workflow/components/dag/use-custom-cell-builder.ts @@ -119,9 +119,9 @@ export function useCustomCellBuilder() { const edges: Edge.Metadata[] = [] const locations = - parseLocationStr(definition.processDefinition.locations) || [] + parseLocationStr(definition.workflowDefinition.locations) || [] const tasks = definition.taskDefinitionList - const connects = definition.processTaskRelationList + const connects = definition.workflowTaskRelationList const taskTypeMap = {} as { [key in string]: TaskType } tasks.forEach((task) => { diff --git a/dolphinscheduler-ui/src/views/projects/workflow/components/dag/use-node-status.ts b/dolphinscheduler-ui/src/views/projects/workflow/components/dag/use-node-status.ts index f5c75d73af..0b2c68a0c5 100644 --- a/dolphinscheduler-ui/src/views/projects/workflow/components/dag/use-node-status.ts +++ b/dolphinscheduler-ui/src/views/projects/workflow/components/dag/use-node-status.ts @@ -20,7 +20,7 @@ import { useRoute } from 'vue-router' import { useI18n } from 'vue-i18n' import { tasksState } from '@/common/common' import { NODE, NODE_STATUS_MARKUP } from './dag-config' -import { queryTaskListByProcessId } from '@/service/modules/process-instances' +import { queryTaskListByWorkflowId } from '@/service/modules/workflow-instances' import NodeStatus from '@/views/projects/workflow/components/dag/dag-node-status' import { useTaskNodeStore } from '@/store/project/task-node' import type { IWorkflowTaskInstance, ITaskState } from './types' @@ -73,7 +73,7 @@ export function useNodeStatus(options: Options) { const projectCode = Number(route.params.projectCode) const instanceId = Number(route.params.id) - queryTaskListByProcessId(instanceId, projectCode).then((res: any) => { + queryTaskListByWorkflowId(instanceId, projectCode).then((res: any) => { window.$message.success(t('project.workflow.refresh_status_succeeded')) taskList.value = res.taskList if (taskList.value) { diff --git a/dolphinscheduler-ui/src/views/projects/workflow/components/dag/use-task-edit.ts b/dolphinscheduler-ui/src/views/projects/workflow/components/dag/use-task-edit.ts index 361a1fef39..1c23885c61 100644 --- a/dolphinscheduler-ui/src/views/projects/workflow/components/dag/use-task-edit.ts +++ b/dolphinscheduler-ui/src/views/projects/workflow/components/dag/use-task-edit.ts @@ -52,10 +52,10 @@ export function useTaskEdit(options: Options) { } = useCellUpdate({ graph }) - const processDefinition = ref( + const workflowDefinition = ref( definition?.value || { - processDefinition: {}, - processTaskRelationList: [], + workflowDefinition: {}, + workflowTaskRelationList: [], taskDefinitionList: [] } ) as Ref @@ -72,7 +72,7 @@ export function useTaskEdit(options: Options) { */ function appendTask(code: number, type: TaskType, coordinate: Coordinate) { addNode(code + '', type, '', 'YES', coordinate) - processDefinition.value.taskDefinitionList.push({ + workflowDefinition.value.taskDefinitionList.push({ code, taskType: type, name: '' @@ -92,7 +92,7 @@ export function useTaskEdit(options: Options) { coordinate: Coordinate ) { addNode(code + '', type, name, flag, coordinate) - const definition = processDefinition.value.taskDefinitionList.find( + const definition = workflowDefinition.value.taskDefinitionList.find( (t) => t.code === targetCode ) @@ -102,7 +102,7 @@ export function useTaskEdit(options: Options) { name } as NodeData - processDefinition.value.taskDefinitionList.push(newDefinition) + workflowDefinition.value.taskDefinitionList.push(newDefinition) } /** @@ -110,15 +110,15 @@ export function useTaskEdit(options: Options) { * @param {number} codes */ function removeTasks(codes: number[], cells: any[]) { - processDefinition.value.taskDefinitionList = - processDefinition.value.taskDefinitionList.filter( + workflowDefinition.value.taskDefinitionList = + workflowDefinition.value.taskDefinitionList.filter( (task) => !codes.includes(task.code) ) codes.forEach((code: number) => { remove( - processDefinition.value.processTaskRelationList, - (process) => - process.postTaskCode === code || process.preTaskCode === code + workflowDefinition.value.workflowTaskRelationList, + (workflow) => + workflow.postTaskCode === code || workflow.preTaskCode === code ) }) cells?.forEach((cell) => { @@ -126,10 +126,10 @@ export function useTaskEdit(options: Options) { const preTaskCode = cell.getSourceCellId() const postTaskCode = cell.getTargetCellId() remove( - processDefinition.value.processTaskRelationList, - (process) => - String(process.postTaskCode) === postTaskCode && - String(process.preTaskCode) === preTaskCode + workflowDefinition.value.workflowTaskRelationList, + (workflow) => + String(workflow.postTaskCode) === postTaskCode && + String(workflow.preTaskCode) === preTaskCode ) } }) @@ -145,7 +145,7 @@ export function useTaskEdit(options: Options) { * @param {number} code */ function editTask(code: number) { - const definition = processDefinition.value.taskDefinitionList.find( + const definition = workflowDefinition.value.taskDefinitionList.find( (t) => t.code === code ) if (definition) { @@ -165,8 +165,8 @@ export function useTaskEdit(options: Options) { const taskDef = formatParams(data).taskDefinitionJsonObj as NodeData // override target config - processDefinition.value.taskDefinitionList = - processDefinition.value.taskDefinitionList.map((task) => { + workflowDefinition.value.taskDefinitionList = + workflowDefinition.value.taskDefinitionList.map((task) => { if (task.code === currTask.value?.code) { setNodeName(task.code + '', taskDef.name) let fillColor = '#ffffff' @@ -199,22 +199,22 @@ export function useTaskEdit(options: Options) { if (!currTask.value.name) { removeNode(String(currTask.value.code)) remove( - processDefinition.value.taskDefinitionList, + workflowDefinition.value.taskDefinitionList, (task) => task.code === currTask.value.code ) } } function updatePreTasks(preTasks: number[], code: number) { - if (processDefinition.value?.processTaskRelationList?.length) { + if (workflowDefinition.value?.workflowTaskRelationList?.length) { remove( - processDefinition.value.processTaskRelationList, - (process) => process.postTaskCode === code + workflowDefinition.value.workflowTaskRelationList, + (workflow) => workflow.postTaskCode === code ) } if (!preTasks?.length) return preTasks.forEach((task) => { - processDefinition.value?.processTaskRelationList.push({ + workflowDefinition.value?.workflowTaskRelationList.push({ postTaskCode: code, preTaskCode: task, name: '', @@ -230,12 +230,12 @@ export function useTaskEdit(options: Options) { const targets = getTargets(String(code)) targets.forEach((target: number) => { if ( - !processDefinition.value?.processTaskRelationList.find( + !workflowDefinition.value?.workflowTaskRelationList.find( (relation) => relation.postTaskCode === target && relation.preTaskCode === code ) ) { - processDefinition.value?.processTaskRelationList.push({ + workflowDefinition.value?.workflowTaskRelationList.push({ postTaskCode: target, preTaskCode: code, name: '', @@ -258,13 +258,13 @@ export function useTaskEdit(options: Options) { }) watch(definition, () => { - if (definition.value) processDefinition.value = definition.value + if (definition.value) workflowDefinition.value = definition.value }) return { currTask, taskModalVisible, - processDefinition, + workflowDefinition, taskConfirm, taskCancel, appendTask, diff --git a/dolphinscheduler-ui/src/views/projects/workflow/definition/components/start-modal.tsx b/dolphinscheduler-ui/src/views/projects/workflow/definition/components/start-modal.tsx index 40fd41a975..a53f660ab2 100644 --- a/dolphinscheduler-ui/src/views/projects/workflow/definition/components/start-modal.tsx +++ b/dolphinscheduler-ui/src/views/projects/workflow/definition/components/start-modal.tsx @@ -199,7 +199,7 @@ export default defineComponent({ const restructureForm = async (form: any) => { await initProjectPreferences(props.row.projectCode) if (projectPreferences.value?.taskPriority) { - form.processInstancePriority = projectPreferences.value.taskPriority + form.workflowInstancePriority = projectPreferences.value.taskPriority } if (projectPreferences.value?.warningType) { form.warningType = projectPreferences.value.warningType @@ -387,12 +387,12 @@ export default defineComponent({ )} { if (projectPreferences.value?.taskPriority) { - timingForm.processInstancePriority = + timingForm.workflowInstancePriority = projectPreferences.value.taskPriority } if (projectPreferences.value?.warningType) { @@ -297,8 +297,8 @@ export default defineComponent({ timingState.timingForm.timezoneId = props.row.timezoneId timingState.timingForm.failureStrategy = props.row.failureStrategy timingState.timingForm.warningType = props.row.warningType - timingState.timingForm.processInstancePriority = - props.row.processInstancePriority + timingState.timingForm.workflowInstancePriority = + props.row.workflowInstancePriority timingState.timingForm.workerGroup = props.row.workerGroup timingState.timingForm.tenantCode = props.row.tenantCode initWarningGroup() @@ -466,12 +466,12 @@ export default defineComponent({ )} { const startState = reactive({ startFormRef: ref(), startForm: { - processDefinitionCode: -1, + workflowDefinitionCode: -1, startEndTime: [new Date(year, month, day), new Date(year, month, day)], scheduleTime: '', dataDateType: 1, @@ -61,7 +61,7 @@ export const useForm = () => { taskDependType: 'TASK_POST', complementDependentMode: 'OFF_MODE', runMode: 'RUN_MODE_SERIAL', - processInstancePriority: 'MEDIUM', + workflowInstancePriority: 'MEDIUM', workerGroup: 'default', tenantCode: 'default', environmentCode: null, @@ -117,7 +117,7 @@ export const useForm = () => { timezoneId: Intl.DateTimeFormat().resolvedOptions().timeZone, failureStrategy: 'CONTINUE', warningType: 'NONE', - processInstancePriority: 'MEDIUM', + workflowInstancePriority: 'MEDIUM', warningGroupId: null as null | number, workerGroup: 'default', tenantCode: 'default', diff --git a/dolphinscheduler-ui/src/views/projects/workflow/definition/components/use-modal.ts b/dolphinscheduler-ui/src/views/projects/workflow/definition/components/use-modal.ts index 7b97a0be4d..44f25dde46 100644 --- a/dolphinscheduler-ui/src/views/projects/workflow/definition/components/use-modal.ts +++ b/dolphinscheduler-ui/src/views/projects/workflow/definition/components/use-modal.ts @@ -23,12 +23,12 @@ import type { Router } from 'vue-router' import { format } from 'date-fns' import { batchCopyByCodes, - importProcessDefinition, - queryProcessDefinitionByCode -} from '@/service/modules/process-definition' + importWorkflowDefinition, + queryWorkflowDefinitionByCode +} from '@/service/modules/workflow-definition' import { queryAllEnvironmentList } from '@/service/modules/environment' import { listAlertGroupById } from '@/service/modules/alert-group' -import { startProcessInstance } from '@/service/modules/executors' +import { startWorkflowInstance } from '@/service/modules/executors' import { createSchedule, updateSchedule, @@ -36,7 +36,7 @@ import { } from '@/service/modules/schedules' import { parseTime } from '@/common/common' import { EnvironmentItem } from '@/service/modules/environment/types' -import { ITimingState, ProcessInstanceReq } from './types' +import { ITimingState, WorkflowInstanceReq } from './types' import { queryTenantList } from '@/service/modules/tenants' import { queryWorkerGroupsByProjectCode } from '@/service/modules/projects-worker-group' @@ -76,7 +76,7 @@ export function useModal( const formData = new FormData() formData.append('file', state.importForm.file) const code = Number(router.currentRoute.value.params.projectCode) - await importProcessDefinition(formData, code) + await importWorkflowDefinition(formData, code) window.$message.success(t('project.workflow.success')) state.saving = false ctx.emit('updateList') @@ -93,13 +93,13 @@ export function useModal( if (state.saving) return state.saving = true try { - state.startForm.processDefinitionCode = code + state.startForm.workflowDefinitionCode = code state.startForm.version = version const params = omit(state.startForm, [ 'startEndTime', 'scheduleTime', 'dataDateType' - ]) as ProcessInstanceReq + ]) as WorkflowInstanceReq if (state.startForm.dataDateType === 1) { const start = format( new Date(state.startForm.startEndTime[0]), @@ -122,7 +122,7 @@ export function useModal( params.startParams = !_.isEmpty(variables.startParamsList) ? JSON.stringify(variables.startParamsList) : '' - await startProcessInstance(params, variables.projectCode) + await startWorkflowInstance(params, variables.projectCode) window.$message.success(t('project.workflow.success')) state.saving = false ctx.emit('updateList') @@ -139,7 +139,7 @@ export function useModal( state.saving = true try { const data: any = getTimingData() - data.processDefinitionCode = code + data.workflowDefinitionCode = code await createSchedule(data, variables.projectCode) window.$message.success(t('project.workflow.success')) @@ -210,7 +210,7 @@ export function useModal( }), failureStrategy: state.timingForm.failureStrategy, warningType: state.timingForm.warningType, - processInstancePriority: state.timingForm.processInstancePriority, + workflowInstancePriority: state.timingForm.workflowInstancePriority, warningGroupId: state.timingForm.warningGroupId ? state.timingForm.warningGroupId : 0, @@ -263,9 +263,9 @@ export function useModal( variables.startParamsList = cloneDeep(cachedStartParams[code]) return } - queryProcessDefinitionByCode(code, variables.projectCode).then( + queryWorkflowDefinitionByCode(code, variables.projectCode).then( (res: any) => { - variables.startParamsList = res.processDefinition.globalParamList + variables.startParamsList = res.workflowDefinition.globalParamList cachedStartParams[code] = cloneDeep(variables.startParamsList) } ) diff --git a/dolphinscheduler-ui/src/views/projects/workflow/definition/components/use-table.ts b/dolphinscheduler-ui/src/views/projects/workflow/definition/components/use-table.ts index dce5ffedff..aefaaaa590 100644 --- a/dolphinscheduler-ui/src/views/projects/workflow/definition/components/use-table.ts +++ b/dolphinscheduler-ui/src/views/projects/workflow/definition/components/use-table.ts @@ -22,7 +22,7 @@ import { deleteVersion, queryVersions, switchVersion -} from '@/service/modules/process-definition' +} from '@/service/modules/workflow-definition' import { DeleteOutlined, ExclamationCircleOutlined } from '@vicons/antd' import { NSpace, NTooltip, NButton, NPopconfirm, NTag } from 'naive-ui' import type { Router } from 'vue-router' diff --git a/dolphinscheduler-ui/src/views/projects/workflow/definition/create/index.tsx b/dolphinscheduler-ui/src/views/projects/workflow/definition/create/index.tsx index e18e798595..511f830c11 100644 --- a/dolphinscheduler-ui/src/views/projects/workflow/definition/create/index.tsx +++ b/dolphinscheduler-ui/src/views/projects/workflow/definition/create/index.tsx @@ -27,7 +27,7 @@ import { Connect, Location } from '../../components/dag/types' -import { createProcessDefinition } from '@/service/modules/process-definition' +import { createWorkflowDefinition } from '@/service/modules/workflow-definition' import { useI18n } from 'vue-i18n' import Styles from './index.module.scss' @@ -63,7 +63,7 @@ export default defineComponent({ } }) - createProcessDefinition( + createWorkflowDefinition( { taskDefinitionJson: JSON.stringify(taskDefinitions), taskRelationJson: JSON.stringify(connects), diff --git a/dolphinscheduler-ui/src/views/projects/workflow/definition/detail/index.tsx b/dolphinscheduler-ui/src/views/projects/workflow/definition/detail/index.tsx index 979be030ca..2831014c55 100644 --- a/dolphinscheduler-ui/src/views/projects/workflow/definition/detail/index.tsx +++ b/dolphinscheduler-ui/src/views/projects/workflow/definition/detail/index.tsx @@ -22,9 +22,9 @@ import { useMessage } from 'naive-ui' import { useI18n } from 'vue-i18n' import Dag from '../../components/dag' import { - queryProcessDefinitionByCode, - updateProcessDefinition -} from '@/service/modules/process-definition' + queryWorkflowDefinitionByCode, + updateWorkflowDefinition +} from '@/service/modules/workflow-definition' import { WorkflowDefinition, SaveForm, @@ -60,11 +60,11 @@ export default defineComponent({ const refresh = () => { isLoading.value = true - queryProcessDefinitionByCode(code, projectCode).then((res: any) => { - readonly.value = res.processDefinition.releaseState === 'ONLINE' + queryWorkflowDefinitionByCode(code, projectCode).then((res: any) => { + readonly.value = res.workflowDefinition.releaseState === 'ONLINE' definition.value = res isLoading.value = false - if (!res.processDefinition.locations) { + if (!res.workflowDefinition.locations) { setTimeout(() => { const graph = dagRef.value const { submit } = useGraphAutoLayout({ graph }) @@ -89,7 +89,7 @@ export default defineComponent({ } }) - updateProcessDefinition( + updateWorkflowDefinition( { taskDefinitionJson: JSON.stringify(taskDefinitions), taskRelationJson: JSON.stringify(connects), diff --git a/dolphinscheduler-ui/src/views/projects/workflow/definition/index.tsx b/dolphinscheduler-ui/src/views/projects/workflow/definition/index.tsx index 467ac85f59..75276815ac 100644 --- a/dolphinscheduler-ui/src/views/projects/workflow/definition/index.tsx +++ b/dolphinscheduler-ui/src/views/projects/workflow/definition/index.tsx @@ -161,7 +161,7 @@ export default defineComponent({ type='primary' size='small' onClick={this.createDefinition} - class='btn-create-process' + class='btn-create-workflow' > {t('project.workflow.create_workflow')} diff --git a/dolphinscheduler-ui/src/views/projects/workflow/definition/timing/index.tsx b/dolphinscheduler-ui/src/views/projects/workflow/definition/timing/index.tsx index 2e97cb3512..4a94d7dc8e 100644 --- a/dolphinscheduler-ui/src/views/projects/workflow/definition/timing/index.tsx +++ b/dolphinscheduler-ui/src/views/projects/workflow/definition/timing/index.tsx @@ -37,7 +37,7 @@ export default defineComponent({ pageNo: variables.page, searchVal: variables.searchVal, projectCode: variables.projectCode, - processDefinitionCode: variables.processDefinitionCode + workflowDefinitionCode: variables.workflowDefinitionCode }) } diff --git a/dolphinscheduler-ui/src/views/projects/workflow/definition/timing/types.ts b/dolphinscheduler-ui/src/views/projects/workflow/definition/timing/types.ts index 8d6231658a..2c593471fb 100644 --- a/dolphinscheduler-ui/src/views/projects/workflow/definition/timing/types.ts +++ b/dolphinscheduler-ui/src/views/projects/workflow/definition/timing/types.ts @@ -19,6 +19,6 @@ export interface ISearchParam { pageSize: number pageNo: number projectCode: number - processDefinitionCode: number + workflowDefinitionCode: number searchVal: string | undefined } diff --git a/dolphinscheduler-ui/src/views/projects/workflow/definition/timing/use-table.ts b/dolphinscheduler-ui/src/views/projects/workflow/definition/timing/use-table.ts index 0b79b6fd77..4d0196df70 100644 --- a/dolphinscheduler-ui/src/views/projects/workflow/definition/timing/use-table.ts +++ b/dolphinscheduler-ui/src/views/projects/workflow/definition/timing/use-table.ts @@ -59,7 +59,7 @@ export function useTable() { totalPage: ref(1), showRef: ref(false), loadingRef: ref(false), - processDefinitionCode: router.currentRoute.value.params.definitionCode + workflowDefinitionCode: router.currentRoute.value.params.definitionCode ? ref(Number(router.currentRoute.value.params.definitionCode)) : ref(), dependenciesData: ref({ @@ -192,7 +192,7 @@ export function useTable() { }, { title: t('project.workflow.workflow_name'), - key: 'processDefinitionName', + key: 'workflowDefinitionName', ...COLUMN_WIDTH_CONFIG['name'] }, { @@ -401,7 +401,7 @@ export function useTable() { variables.row = row getDependentTaskLinks( variables.projectCode, - row.processDefinitionCode + row.workflowDefinitionCode ).then((res: any) => { if (res && res.length > 0) { variables.dependenciesData.showRef = true @@ -419,7 +419,7 @@ export function useTable() { pageNo: variables.page, searchVal: variables.searchVal, projectCode: variables.projectCode, - processDefinitionCode: variables.processDefinitionCode + workflowDefinitionCode: variables.workflowDefinitionCode }) }) } @@ -432,7 +432,7 @@ export function useTable() { pageNo: variables.page, searchVal: variables.searchVal, projectCode: variables.projectCode, - processDefinitionCode: variables.processDefinitionCode + workflowDefinitionCode: variables.workflowDefinitionCode }) }) } @@ -446,7 +446,7 @@ export function useTable() { pageNo: variables.page, searchVal: variables.searchVal, projectCode: variables.projectCode, - processDefinitionCode: variables.processDefinitionCode + workflowDefinitionCode: variables.workflowDefinitionCode }) }) variables.dependenciesData.showRef = false @@ -460,7 +460,7 @@ export function useTable() { pageNo: variables.page, searchVal: variables.searchVal, projectCode: variables.projectCode, - processDefinitionCode: variables.processDefinitionCode + workflowDefinitionCode: variables.workflowDefinitionCode }) }) variables.dependenciesData.showRef = false @@ -474,7 +474,7 @@ export function useTable() { variables.row = row getDependentTaskLinks( variables.projectCode, - row.processDefinitionCode + row.workflowDefinitionCode ).then((res: any) => { if (res && res.length > 0) { variables.dependenciesData.showRef = true @@ -492,7 +492,7 @@ export function useTable() { pageNo: variables.page, searchVal: variables.searchVal, projectCode: variables.projectCode, - processDefinitionCode: variables.processDefinitionCode + workflowDefinitionCode: variables.workflowDefinitionCode }) }) } diff --git a/dolphinscheduler-ui/src/views/projects/workflow/definition/tree/index.tsx b/dolphinscheduler-ui/src/views/projects/workflow/definition/tree/index.tsx index 4aeee0938f..26d6540eb6 100644 --- a/dolphinscheduler-ui/src/views/projects/workflow/definition/tree/index.tsx +++ b/dolphinscheduler-ui/src/views/projects/workflow/definition/tree/index.tsx @@ -43,7 +43,7 @@ import UseD3Tree from '@/views/projects/workflow/definition/tree/use-d3-tree' import Tree from '@/views/projects/workflow/definition/tree/use-d3-tree/tree' import { IChartDataItem } from '@/components/chart/modules/types' import { Router, useRouter } from 'vue-router' -import { viewTree } from '@/service/modules/process-definition' +import { viewTree } from '@/service/modules/workflow-definition' import { SelectMixedOption } from 'naive-ui/lib/select/src/interface' import { tasksState, uuid } from '@/common/common' import type { ITaskTypeNodeOption } from './types' @@ -78,9 +78,9 @@ export default defineComponent({ image: `${import.meta.env.BASE_URL}images/task-icons/shell.png` }, { - taskType: 'SUB_PROCESS', + taskType: 'SUB_WORKFLOW', color: '#4295DA', - image: `${import.meta.env.BASE_URL}images/task-icons/sub_process.png` + image: `${import.meta.env.BASE_URL}images/task-icons/sub_workflow.png` }, { taskType: 'PROCEDURE', diff --git a/dolphinscheduler-ui/src/views/projects/workflow/definition/types.ts b/dolphinscheduler-ui/src/views/projects/workflow/definition/types.ts index df76bf3d91..a474faca2b 100644 --- a/dolphinscheduler-ui/src/views/projects/workflow/definition/types.ts +++ b/dolphinscheduler-ui/src/views/projects/workflow/definition/types.ts @@ -56,9 +56,9 @@ export interface ICrontabData { endTime: string environmentCode: number failureStrategy: string - processDefinitionCode: number - processDefinitionName: string - processInstancePriority: string + workflowDefinitionCode: number + workflowDefinitionName: string + workflowInstancePriority: string projectName: string releaseState: 'ONLINE' | 'OFFLINE' startTime: string diff --git a/dolphinscheduler-ui/src/views/projects/workflow/definition/use-table.ts b/dolphinscheduler-ui/src/views/projects/workflow/definition/use-table.ts index 2b66cfd693..58eb87f92f 100644 --- a/dolphinscheduler-ui/src/views/projects/workflow/definition/use-table.ts +++ b/dolphinscheduler-ui/src/views/projects/workflow/definition/use-table.ts @@ -28,7 +28,7 @@ import { deleteByCode, queryListPaging, release -} from '@/service/modules/process-definition' +} from '@/service/modules/workflow-definition' import { offline, online } from '@/service/modules/schedules' import TableAction from './components/table-action' import styles from './index.module.scss' @@ -176,7 +176,7 @@ export function useTable() { onClick: () => { void router.push({ name: 'workflow-instance-list', - query: { processDefineCode: row.code } + query: { workflowDefinitionCode: row.code } }) } }, diff --git a/dolphinscheduler-ui/src/views/projects/workflow/instance/components/table-action.tsx b/dolphinscheduler-ui/src/views/projects/workflow/instance/components/table-action.tsx index 2b7da5b82f..fffcaddf0e 100644 --- a/dolphinscheduler-ui/src/views/projects/workflow/instance/components/table-action.tsx +++ b/dolphinscheduler-ui/src/views/projects/workflow/instance/components/table-action.tsx @@ -30,7 +30,7 @@ import { import { useI18n } from 'vue-i18n' import { useRouter } from 'vue-router' import type { Router } from 'vue-router' -import { IWorkflowInstance } from '@/service/modules/process-instances/types' +import { IWorkflowInstance } from '@/service/modules/workflow-instances/types' const props = { row: { @@ -57,7 +57,7 @@ export default defineComponent({ router.push({ name: 'workflow-instance-detail', params: { id: props.row!.id }, - query: { code: props.row!.processDefinitionCode } + query: { code: props.row!.workflowDefinitionCode } }) } @@ -65,7 +65,7 @@ export default defineComponent({ router.push({ name: 'workflow-instance-gantt', params: { id: props.row!.id }, - query: { code: props.row!.processDefinitionCode } + query: { code: props.row!.workflowDefinitionCode } }) } diff --git a/dolphinscheduler-ui/src/views/projects/workflow/instance/components/variables-view.tsx b/dolphinscheduler-ui/src/views/projects/workflow/instance/components/variables-view.tsx index 6c1e302766..dcd37ad03a 100644 --- a/dolphinscheduler-ui/src/views/projects/workflow/instance/components/variables-view.tsx +++ b/dolphinscheduler-ui/src/views/projects/workflow/instance/components/variables-view.tsx @@ -18,8 +18,8 @@ import { useRoute } from 'vue-router' import { defineComponent, onMounted, ref, computed } from 'vue' import { useI18n } from 'vue-i18n' -import { viewVariables } from '@/service/modules/process-instances' -import { viewProcessDefinitionVariables } from '@/service/modules/process-definition' +import { viewVariables } from '@/service/modules/workflow-instances' +import { viewWorkflowDefinitionVariables } from '@/service/modules/workflow-definition' import styles from './variables.module.scss' import { NButton } from 'naive-ui' @@ -34,7 +34,7 @@ export default defineComponent({ const instanceId = Number(route.params.id) - const processCode = Number(route.params.code) + const workflowCode = Number(route.params.code) const globalParams = computed(() => { return paramsRef.value && paramsRef.value.globalParams @@ -50,7 +50,7 @@ export default defineComponent({ const getViewVariables = () => { if (Number.isNaN(instanceId)) { - viewProcessDefinitionVariables(projectCode, processCode).then( + viewWorkflowDefinitionVariables(projectCode, workflowCode).then( (res: any) => { paramsRef.value = res } diff --git a/dolphinscheduler-ui/src/views/projects/workflow/instance/components/process-instance-condition.tsx b/dolphinscheduler-ui/src/views/projects/workflow/instance/components/workflow-instance-condition.tsx similarity index 84% rename from dolphinscheduler-ui/src/views/projects/workflow/instance/components/process-instance-condition.tsx rename to dolphinscheduler-ui/src/views/projects/workflow/instance/components/workflow-instance-condition.tsx index 8aecceb021..f0fcccb0ac 100644 --- a/dolphinscheduler-ui/src/views/projects/workflow/instance/components/process-instance-condition.tsx +++ b/dolphinscheduler-ui/src/views/projects/workflow/instance/components/workflow-instance-condition.tsx @@ -29,13 +29,13 @@ import { defineComponent, getCurrentInstance, h, ref, unref } from 'vue' import { useI18n } from 'vue-i18n' import { format } from 'date-fns' import { workflowExecutionStateType } from '@/common/common' -import { queryProcessDefinitionList } from '@/service/modules/process-definition' +import { queryWorkflowDefinitionList } from '@/service/modules/workflow-definition' import { SelectMixedOption } from 'naive-ui/lib/select/src/interface' import { Router, useRouter } from 'vue-router' import { SelectOption } from 'naive-ui/es/select/src/interface' export default defineComponent({ - name: 'ProcessInstanceCondition', + name: 'WorkflowInstanceCondition', emits: ['handleSearch'], setup(props, ctx) { const router: Router = useRouter() @@ -48,27 +48,27 @@ export default defineComponent({ const projectCode = ref( Number(router.currentRoute.value.params.projectCode) ) - const processDefineCodeRef = router.currentRoute.value.query - .processDefineCode - ? ref(Number(router.currentRoute.value.query.processDefineCode)) + const workflowDefinitionCodeRef = router.currentRoute.value.query + .workflowDefineCode + ? ref(Number(router.currentRoute.value.query.workflowDefineCode)) : ref() - const processDefinitionOptions = ref>([]) + const workflowDefinitionOptions = ref>([]) - const initProcessList = (code: number) => { - queryProcessDefinitionList(code).then((result: any) => { + const initWorkflowList = (code: number) => { + queryWorkflowDefinitionList(code).then((result: any) => { result.map((item: { code: number; name: string }) => { const option: SelectMixedOption = { value: item.code, label: () => h(NEllipsis, null, item.name), filterLabel: item.name } - processDefinitionOptions.value.push(option) + workflowDefinitionOptions.value.push(option) }) }) } - initProcessList(projectCode.value) + initWorkflowList(projectCode.value) const handleSearch = () => { let startDate = '' @@ -91,7 +91,7 @@ export default defineComponent({ stateType: stateTypeRef.value, startDate, endDate, - processDefineCode: processDefineCodeRef.value + workflowDefinitionCode: workflowDefinitionCodeRef.value }) } @@ -122,7 +122,7 @@ export default defineComponent({ } const updateValue = (value: number) => { - processDefineCodeRef.value = value + workflowDefinitionCodeRef.value = value } return { @@ -136,8 +136,8 @@ export default defineComponent({ onClearSearchExecutor, onClearSearchHost, trim, - processDefinitionOptions, - processDefineCodeRef, + workflowDefinitionOptions: workflowDefinitionOptions, + workflowDefineCodeRef: workflowDefinitionCodeRef, selectFilter, updateValue } @@ -146,8 +146,8 @@ export default defineComponent({ const { t } = useI18n() const options = workflowExecutionStateType(t) const { - processDefinitionOptions, - processDefineCodeRef, + workflowDefinitionOptions, + workflowDefineCodeRef, selectFilter, updateValue } = this @@ -161,8 +161,8 @@ export default defineComponent({ size: 'small', clearable: true, filterable: true, - options: unref(processDefinitionOptions), - value: processDefineCodeRef, + options: unref(workflowDefinitionOptions), + value: workflowDefineCodeRef, filter: selectFilter, onUpdateValue: (value: any) => { updateValue(value) diff --git a/dolphinscheduler-ui/src/views/projects/workflow/instance/detail/index.tsx b/dolphinscheduler-ui/src/views/projects/workflow/instance/detail/index.tsx index 2b87d17d33..1957a6fe91 100644 --- a/dolphinscheduler-ui/src/views/projects/workflow/instance/detail/index.tsx +++ b/dolphinscheduler-ui/src/views/projects/workflow/instance/detail/index.tsx @@ -21,9 +21,9 @@ import { useThemeStore } from '@/store/theme/theme' import { useI18n } from 'vue-i18n' import Dag from '../../components/dag' import { - queryProcessInstanceById, - updateProcessInstance -} from '@/service/modules/process-instances' + queryWorkflowInstanceById, + updateWorkflowInstance +} from '@/service/modules/workflow-instances' import { WorkflowDefinition, WorkflowInstance, @@ -56,9 +56,9 @@ export default defineComponent({ const dagInstanceRef = ref() const refresh = () => { - queryProcessInstanceById(id, projectCode).then((res: any) => { + queryWorkflowInstanceById(id, projectCode).then((res: any) => { instance.value = res - if (!res.dagData.processDefinition.locations) { + if (!res.dagData.workflowDefinition.locations) { setTimeout(() => { const graph = dagInstanceRef.value const { submit } = useGraphAutoLayout({ graph }) @@ -86,7 +86,7 @@ export default defineComponent({ } }) - updateProcessInstance( + updateWorkflowInstance( { syncDefine: saveForm.sync, globalParams: JSON.stringify(globalParams), diff --git a/dolphinscheduler-ui/src/views/projects/workflow/instance/gantt/use-gantt.ts b/dolphinscheduler-ui/src/views/projects/workflow/instance/gantt/use-gantt.ts index 195d24ce6b..ab7b28c0ca 100644 --- a/dolphinscheduler-ui/src/views/projects/workflow/instance/gantt/use-gantt.ts +++ b/dolphinscheduler-ui/src/views/projects/workflow/instance/gantt/use-gantt.ts @@ -17,7 +17,7 @@ import { reactive } from 'vue' import { useAsyncState } from '@vueuse/core' -import { viewGanttTree } from '@/service/modules/process-instances' +import { viewGanttTree } from '@/service/modules/workflow-instances' import { IGanttRes } from './type' export function useGantt() { diff --git a/dolphinscheduler-ui/src/views/projects/workflow/instance/index.tsx b/dolphinscheduler-ui/src/views/projects/workflow/instance/index.tsx index 5e5f88e1c8..690a5c6dd3 100644 --- a/dolphinscheduler-ui/src/views/projects/workflow/instance/index.tsx +++ b/dolphinscheduler-ui/src/views/projects/workflow/instance/index.tsx @@ -27,7 +27,7 @@ import { } from 'naive-ui' import { useTable } from './use-table' import Card from '@/components/card' -import ProcessInstanceCondition from './components/process-instance-condition' +import WorkflowInstanceCondition from './components/workflow-instance-condition' import type { IWorkflowInstanceSearch } from './types' export default defineComponent({ @@ -42,7 +42,7 @@ export default defineComponent({ } const handleSearch = (params: IWorkflowInstanceSearch) => { - variables.processDefineCode = params.processDefineCode + variables.workflowDefinitionCode = params.workflowDefinitionCode variables.searchVal = params.searchVal variables.executorName = params.executorName variables.host = params.host @@ -96,7 +96,7 @@ export default defineComponent({ return ( - + diff --git a/dolphinscheduler-ui/src/views/projects/workflow/instance/types.ts b/dolphinscheduler-ui/src/views/projects/workflow/instance/types.ts index 57b9d0b0ec..1b8f0f6e39 100644 --- a/dolphinscheduler-ui/src/views/projects/workflow/instance/types.ts +++ b/dolphinscheduler-ui/src/views/projects/workflow/instance/types.ts @@ -29,7 +29,7 @@ interface IWorkflowInstanceSearch { stateType: string startDate: string endDate: string - processDefineCode: number + workflowDefinitionCode: number } export { ICountDownParam, IWorkflowInstanceSearch } diff --git a/dolphinscheduler-ui/src/views/projects/workflow/instance/use-table.ts b/dolphinscheduler-ui/src/views/projects/workflow/instance/use-table.ts index fdf481b0b0..8f65e4caec 100644 --- a/dolphinscheduler-ui/src/views/projects/workflow/instance/use-table.ts +++ b/dolphinscheduler-ui/src/views/projects/workflow/instance/use-table.ts @@ -23,10 +23,10 @@ import ButtonLink from '@/components/button-link' import { RowKey } from 'naive-ui/lib/data-table/src/interface' import { NEllipsis, NIcon, NSpin, NTooltip } from 'naive-ui' import { - queryProcessInstanceListPaging, - deleteProcessInstanceById, - batchDeleteProcessInstanceByIds -} from '@/service/modules/process-instances' + queryWorkflowInstanceListPaging, + deleteWorkflowInstanceById, + batchDeleteWorkflowInstanceByIds +} from '@/service/modules/workflow-instances' import { execute } from '@/service/modules/executors' import TableAction from './components/table-action' import { @@ -40,7 +40,7 @@ import { DefaultTableWidth } from '@/common/column-width-config' import type { Router } from 'vue-router' -import type { IWorkflowInstance } from '@/service/modules/process-instances/types' +import type { IWorkflowInstance } from '@/service/modules/workflow-instances/types' import type { ICountDownParam } from './types' import type { ExecuteReq } from '@/service/modules/executors/types' import { IWorkflowExecutionState } from '@/common/types' @@ -64,8 +64,9 @@ export function useTable() { startDate: ref(), endDate: ref(), projectCode: ref(Number(router.currentRoute.value.params.projectCode)), - processDefineCode: router.currentRoute.value.query.processDefineCode - ? ref(Number(router.currentRoute.value.query.processDefineCode)) + workflowDefinitionCode: router.currentRoute.value.query + .workflowDefinitionCode + ? ref(Number(router.currentRoute.value.query.workflowDefinitionCode)) : ref(), loadingRef: ref(false) }) @@ -100,7 +101,7 @@ export function useTable() { const routeUrl = router.resolve({ name: 'workflow-instance-detail', params: { id: row.id }, - query: { code: row.processDefinitionCode } + query: { code: row.workflowDefinitionCode } }) window.open(routeUrl.href, '_blank') } @@ -197,14 +198,14 @@ export function useTable() { onReRun: () => _countDownFn({ index, - processInstanceId: _row.id, + workflowInstanceId: _row.id, executeType: 'REPEAT_RUNNING', buttonType: 'run' }), onReStore: () => _countDownFn({ index, - processInstanceId: _row.id, + workflowInstanceId: _row.id, executeType: 'START_FAILURE_TASK_PROCESS', buttonType: 'store' }), @@ -212,13 +213,13 @@ export function useTable() { if (_row.state === 'STOP') { _countDownFn({ index, - processInstanceId: _row.id, + workflowInstanceId: _row.id, executeType: 'RECOVER_SUSPENDED_PROCESS', buttonType: 'suspend' }) } else { _upExecutorsState({ - processInstanceId: _row.id, + workflowInstanceId: _row.id, executeType: 'STOP' }) } @@ -227,13 +228,13 @@ export function useTable() { if (_row.state === 'PAUSE') { _countDownFn({ index, - processInstanceId: _row.id, + workflowInstanceId: _row.id, executeType: 'RECOVER_SUSPENDED_PROCESS', buttonType: 'suspend' }) } else { _upExecutorsState({ - processInstanceId: _row.id, + workflowInstanceId: _row.id, executeType: 'PAUSE' }) } @@ -259,9 +260,9 @@ export function useTable() { stateType: variables.stateType, startDate: variables.startDate, endDate: variables.endDate, - processDefineCode: variables.processDefineCode + workflowDefinitionCode: variables.workflowDefinitionCode } - queryProcessInstanceListPaging({ ...params }, variables.projectCode).then( + queryWorkflowInstanceListPaging({ ...params }, variables.projectCode).then( (res: any) => { variables.totalPage = res.totalPage variables.tableData = res.totalList.map((item: any) => { @@ -273,7 +274,7 @@ export function useTable() { } const deleteInstance = (id: number) => { - deleteProcessInstanceById(id, variables.projectCode).then(() => { + deleteWorkflowInstanceById(id, variables.projectCode).then(() => { window.$message.success(t('project.workflow.success')) if (variables.tableData.length === 1 && variables.page > 1) { variables.page -= 1 @@ -285,10 +286,10 @@ export function useTable() { const batchDeleteInstance = () => { const data = { - processInstanceIds: _.join(variables.checkedRowKeys, ',') + workflowInstanceIds: _.join(variables.checkedRowKeys, ',') } - batchDeleteProcessInstanceByIds(data, variables.projectCode).then(() => { + batchDeleteWorkflowInstanceByIds(data, variables.projectCode).then(() => { window.$message.success(t('project.workflow.success')) if ( diff --git a/dolphinscheduler-ui/src/views/projects/workflow/timing/components/timing-condition.tsx b/dolphinscheduler-ui/src/views/projects/workflow/timing/components/timing-condition.tsx index 7dbe32938e..dfa42c3eec 100644 --- a/dolphinscheduler-ui/src/views/projects/workflow/timing/components/timing-condition.tsx +++ b/dolphinscheduler-ui/src/views/projects/workflow/timing/components/timing-condition.tsx @@ -18,7 +18,7 @@ import { SearchOutlined } from '@vicons/antd' import { NButton, NSelect, NIcon, NSpace, NEllipsis } from 'naive-ui' import { defineComponent, h, ref, unref } from 'vue' -import { queryProcessDefinitionList } from '@/service/modules/process-definition' +import { queryWorkflowDefinitionList } from '@/service/modules/workflow-definition' import { SelectMixedOption } from 'naive-ui/lib/select/src/interface' import { Router, useRouter } from 'vue-router' import { SelectOption } from 'naive-ui/es/select/src/interface' @@ -32,31 +32,31 @@ export default defineComponent({ const projectCode = ref( Number(router.currentRoute.value.params.projectCode) ) - const processDefineCodeRef = router.currentRoute.value.query - .processDefineCode - ? ref(Number(router.currentRoute.value.query.processDefineCode)) + const workflowDefinitionCodeRef = router.currentRoute.value.query + .workflowDefinitionCode + ? ref(Number(router.currentRoute.value.query.workflowDefinitionCode)) : ref() - const processDefinitionOptions = ref>([]) + const workflowDefinitionOptions = ref>([]) - const initProcessList = (code: number) => { - queryProcessDefinitionList(code).then((result: any) => { + const initWorkflowList = (code: number) => { + queryWorkflowDefinitionList(code).then((result: any) => { result.map((item: { code: number; name: string }) => { const option: SelectMixedOption = { value: item.code, label: () => h(NEllipsis, null, item.name), filterLabel: item.name } - processDefinitionOptions.value.push(option) + workflowDefinitionOptions.value.push(option) }) }) } - initProcessList(projectCode.value) + initWorkflowList(projectCode.value) const handleSearch = () => { ctx.emit('handleSearch', { - processDefinitionCode: processDefineCodeRef.value + workflowDefinitionCode: workflowDefinitionCodeRef.value }) } @@ -70,21 +70,21 @@ export default defineComponent({ } const updateValue = (value: number) => { - processDefineCodeRef.value = value + workflowDefinitionCodeRef.value = value } return { handleSearch, - processDefinitionOptions, - processDefineCodeRef, + workflowDefinitionOptions: workflowDefinitionOptions, + workflowDefinitionCodeRef: workflowDefinitionCodeRef, selectFilter, updateValue } }, render() { const { - processDefineCodeRef, - processDefinitionOptions, + workflowDefinitionCodeRef, + workflowDefinitionOptions, selectFilter, updateValue } = this @@ -97,8 +97,8 @@ export default defineComponent({ size: 'small', clearable: true, filterable: true, - value: processDefineCodeRef, - options: unref(processDefinitionOptions), + value: workflowDefinitionCodeRef, + options: unref(workflowDefinitionOptions), filter: selectFilter, onUpdateValue: (value: any) => { updateValue(value) diff --git a/dolphinscheduler-ui/src/views/projects/workflow/timing/index.tsx b/dolphinscheduler-ui/src/views/projects/workflow/timing/index.tsx index 3e6e9d5423..a010ee21f1 100644 --- a/dolphinscheduler-ui/src/views/projects/workflow/timing/index.tsx +++ b/dolphinscheduler-ui/src/views/projects/workflow/timing/index.tsx @@ -36,7 +36,7 @@ export default defineComponent({ pageNo: variables.page, searchVal: variables.searchVal, projectCode: variables.projectCode, - processDefinitionCode: variables.processDefinitionCode + workflowDefinitionCode: variables.workflowDefinitionCode }) } @@ -45,7 +45,7 @@ export default defineComponent({ } const handleSearch = (params: ITimingSearch) => { - variables.processDefinitionCode = params.processDefinitionCode + variables.workflowDefinitionCode = params.workflowDefinitionCode variables.page = 1 requestData() } diff --git a/dolphinscheduler-ui/src/views/projects/workflow/timing/types.ts b/dolphinscheduler-ui/src/views/projects/workflow/timing/types.ts index c6f8719ad9..9b187357c1 100644 --- a/dolphinscheduler-ui/src/views/projects/workflow/timing/types.ts +++ b/dolphinscheduler-ui/src/views/projects/workflow/timing/types.ts @@ -16,7 +16,7 @@ */ interface ITimingSearch { - processDefinitionCode: number + workflowDefinitionCode: number } export { ITimingSearch } diff --git a/dolphinscheduler-ui/src/views/resource/task-group/queue/index.tsx b/dolphinscheduler-ui/src/views/resource/task-group/queue/index.tsx index 33ac14e4b9..55babf2b49 100644 --- a/dolphinscheduler-ui/src/views/resource/task-group/queue/index.tsx +++ b/dolphinscheduler-ui/src/views/resource/task-group/queue/index.tsx @@ -57,7 +57,7 @@ const taskGroupQueue = defineComponent({ const searchParamRef = reactive({ groupId: ref(), - processName: '', + workflowInstanceName: '', instanceName: '', pageSize: 10, pageNo: 1 @@ -74,7 +74,7 @@ const taskGroupQueue = defineComponent({ pageNo: variables.page, groupId: searchParamRef.groupId, taskInstanceName: searchParamRef.instanceName, - processInstanceName: searchParamRef.processName + workflowInstanceName: searchParamRef.workflowInstanceName }) } @@ -177,7 +177,7 @@ const taskGroupQueue = defineComponent({ placeholder={t('resource.task_group_queue.task_group_name')} />