[DSIP-68] Unify the table structure and table fields in backend and frontend ui sections (#16544)

This commit is contained in:
xiangzihao 2024-09-04 21:22:34 +08:00 committed by GitHub
parent 0f4bce1883
commit fee4955724
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
431 changed files with 3934 additions and 5244 deletions

View File

@ -89,26 +89,35 @@ jobs:
strategy:
matrix:
case:
- name: Tenant
- name: TenantAPITest
class: org.apache.dolphinscheduler.api.test.cases.TenantAPITest
- name: WorkerGroup
- name: WorkerGroupAPITest
class: org.apache.dolphinscheduler.api.test.cases.WorkerGroupAPITest
- name: Project
- name: ProjectAPITest
class: org.apache.dolphinscheduler.api.test.cases.ProjectAPITest
- name: Workflow
class: org.apache.dolphinscheduler.api.test.cases.ProcessDefinitionAPITest
- name: Scheduler
- name: WorkflowDefinitionAPITest
class: org.apache.dolphinscheduler.api.test.cases.WorkflowDefinitionAPITest
- name: SchedulerAPITest
class: org.apache.dolphinscheduler.api.test.cases.SchedulerAPITest
- name: Executor
- name: ExecutorAPITest
class: org.apache.dolphinscheduler.api.test.cases.ExecutorAPITest
- name: ProcessInstance
class: org.apache.dolphinscheduler.api.test.cases.ProcessInstanceAPITest
- name: WorkflowInstanceAPITest
class: org.apache.dolphinscheduler.api.test.cases.WorkflowInstanceAPITest
env:
RECORDING_PATH: /tmp/recording-${{ matrix.case.name }}
steps:
- uses: actions/checkout@v4
with:
submodules: true
- name: Set up JDK 11
uses: actions/setup-java@v4
with:
java-version: 11
distribution: 'adopt'
- name: Collect Workflow Telemetry
uses: ./.github/actions/workflow-telemetry-action
with:
comment_on_pr: false
- name: Cache local Maven repository
uses: actions/cache@v4
with:

View File

@ -132,136 +132,35 @@ jobs:
run: |
/bin/bash ${{ matrix.case.script }}
schema-check:
runs-on: ubuntu-latest
if: ${{ (needs.paths-filter.outputs.db-schema == 'true') || (github.event_name == 'push') }}
timeout-minutes: 20
name: ${{ matrix.case.name }}-${{ matrix.version }}
needs: build
services:
mysql:
image: mysql:5.7
env:
MYSQL_ROOT_PASSWORD: mysql
MYSQL_DATABASE: dolphinscheduler_dev
ports:
- 3306:3306
options: --name=mysql --health-cmd="mysqladmin ping" --health-interval=10s --health-timeout=5s --health-retries=3
postgres:
image: postgres:15
env:
POSTGRES_PASSWORD: postgres
POSTGRES_DB: dolphinscheduler_dev
ports:
- 5432:5432
options: --name=postgres --health-cmd=pg_isready --health-interval=10s --health-timeout=5s --health-retries=5
runs-on: ubuntu-latest
timeout-minutes: 20
strategy:
fail-fast: false
matrix:
db: ["mysql", "postgresql"]
version: ["3.1.9", "3.2.0"]
case:
- name: schema-check-with-mysql
script: .github/workflows/schema-check/mysql/start-job.sh
- name: schema-check-with-postgresql
script: .github/workflows/schema-check/postgresql/start-job.sh
steps:
- name: Set up JDK 8
uses: actions/setup-java@v4
- uses: actions/checkout@v4
with:
java-version: 8
distribution: 'adopt'
- name: Install Atlas and Create Dir
run: |
mkdir -p dolphinscheduler/dev dolphinscheduler/${{ matrix.version }}
curl -sSf https://atlasgo.sh | sh
- name: Download Tarball
submodules: true
- name: Collect Workflow Telemetry
uses: ./.github/actions/workflow-telemetry-action
with:
comment_on_pr: false
- name: Download Binary Package
uses: actions/download-artifact@v4
with:
name: binary-package-8
path: dolphinscheduler/dev
- name: Set Env
path: ds_schema_check_test/dev
- name: Running Schema Check
run: |
VERSION=${{ matrix.version }}
echo "DATABASE_VERSION=${VERSION//\./}" >> $GITHUB_ENV
- name: Prepare
run: |
wget https://archive.apache.org/dist/dolphinscheduler/${{ matrix.version }}/apache-dolphinscheduler-${{ matrix.version }}-bin.tar.gz -P dolphinscheduler/${{ matrix.version }}
tar -xzf dolphinscheduler/${{ matrix.version }}/apache-dolphinscheduler-${{ matrix.version }}-bin.tar.gz -C dolphinscheduler/${{ matrix.version }} --strip-components 1
tar -xzf dolphinscheduler/dev/apache-dolphinscheduler-*-bin.tar.gz -C dolphinscheduler/dev --strip-components 1
if [[ ${{ matrix.db }} == "mysql" ]]; then
MYSQL_JDBC_URL="https://repo.maven.apache.org/maven2/mysql/mysql-connector-java/8.0.16/mysql-connector-java-8.0.16.jar"
MYSQL_JDBC_JAR="mysql-connector-java-8.0.16.jar"
wget ${MYSQL_JDBC_URL} -O /tmp/${MYSQL_JDBC_JAR}
for base_dir in dolphinscheduler/dev dolphinscheduler/${{ matrix.version }}; do
if [[ $base_dir == *"dolphinscheduler/2"* ]]; then
cp /tmp/${MYSQL_JDBC_JAR} ${base_dir}/lib
else
for d in alert-server api-server master-server worker-server tools; do
cp /tmp/${MYSQL_JDBC_JAR} ${base_dir}/${d}/libs
done
fi
done
docker exec -i mysql mysql -uroot -pmysql -e "create database dolphinscheduler_${{ env.DATABASE_VERSION }}";
else
docker exec -i postgres psql -U postgres -c "create database dolphinscheduler_${{ env.DATABASE_VERSION }};"
fi
- name: Check
run: |
if [[ $DATABASE_VERSION -lt 300 ]]; then
chmod +x dolphinscheduler/dev/tools/bin/upgrade-schema.sh dolphinscheduler/${{ matrix.version }}/script/create-dolphinscheduler.sh
else
chmod +x dolphinscheduler/dev/tools/bin/upgrade-schema.sh dolphinscheduler/${{ matrix.version }}/tools/bin/upgrade-schema.sh
fi
if [[ ${{ matrix.db }} == "mysql" ]]; then
export DATABASE="mysql"
export SPRING_DATASOURCE_DRIVER_CLASS_NAME="com.mysql.cj.jdbc.Driver"
export SPRING_DATASOURCE_URL="jdbc:mysql://127.0.0.1:3306/dolphinscheduler_dev?useUnicode=true&characterEncoding=UTF-8&allowMultiQueries=true&useSSL=false"
export SPRING_DATASOURCE_USERNAME="root"
export SPRING_DATASOURCE_PASSWORD="mysql"
bash dolphinscheduler/dev/tools/bin/upgrade-schema.sh
export SPRING_DATASOURCE_URL="jdbc:mysql://127.0.0.1:3306/dolphinscheduler_${{ env.DATABASE_VERSION }}?useUnicode=true&characterEncoding=UTF-8&allowMultiQueries=true&useSSL=false"
if [[ $DATABASE_VERSION -lt 300 ]]; then
bash dolphinscheduler/${{ matrix.version }}/script/create-dolphinscheduler.sh
else
bash dolphinscheduler/${{ matrix.version }}/tools/bin/upgrade-schema.sh
fi
bash dolphinscheduler/dev/tools/bin/upgrade-schema.sh
atlas_result=$(atlas schema diff \
--from "mysql://root:mysql@127.0.0.1:3306/dolphinscheduler_${{ env.DATABASE_VERSION }}" \
--to "mysql://root:mysql@127.0.0.1:3306/dolphinscheduler_dev")
if [[ ${atlas_result} != *"Schemas are synced"* ]]; then
echo "================================================================================================"
echo " !!!!! For Contributors !!!!!"
echo "================================================================================================"
echo "Database schema not sync, please add below change in the latest version of dolphinscheduler-dao/src/main/resources/sql/upgrade directory"
echo "${atlas_result}"
exit 1
fi
else
export DATABASE="postgresql"
export SPRING_DATASOURCE_DRIVER_CLASS_NAME="org.postgresql.Driver"
export SPRING_DATASOURCE_URL="jdbc:postgresql://127.0.0.1:5432/dolphinscheduler_dev"
export SPRING_DATASOURCE_USERNAME="postgres"
export SPRING_DATASOURCE_PASSWORD="postgres"
bash dolphinscheduler/dev/tools/bin/upgrade-schema.sh
export SPRING_DATASOURCE_URL="jdbc:postgresql://127.0.0.1:5432/dolphinscheduler_${{ env.DATABASE_VERSION }}"
if [[ $DATABASE_VERSION -lt 300 ]]; then
bash dolphinscheduler/${{ matrix.version }}/script/create-dolphinscheduler.sh
else
bash dolphinscheduler/${{ matrix.version }}/tools/bin/upgrade-schema.sh
fi
bash dolphinscheduler/dev/tools/bin/upgrade-schema.sh
atlas_result=$(atlas schema diff \
--from "postgres://postgres:postgres@127.0.0.1:5432/dolphinscheduler_${{ env.DATABASE_VERSION }}?search_path=public&sslmode=disable" \
--to "postgres://postgres:postgres@127.0.0.1:5432/dolphinscheduler_dev?search_path=public&sslmode=disable")
if [[ ${atlas_result} != *"Schemas are synced"* ]]; then
echo "================================================================================================"
echo " !!!!! For Contributors !!!!!"
echo "================================================================================================"
echo "Database schema not sync, please add below change in the latest version in dolphinscheduler-dao/src/main/resources/sql/upgrade directory"
echo "${atlas_result}"
exit 1
fi
fi
/bin/bash ${{ matrix.case.script }} ${{ matrix.version }}
result:
name: Build
runs-on: ubuntu-latest
@ -275,7 +174,7 @@ jobs:
echo "Skip Build!"
exit 0
fi
if [[ ${{ needs.build.result }} != 'success' || ${{ needs.cluster-test.result }} != 'success' ]]; then
if [[ ${{ needs.build.result }} != 'success' || ${{ needs.cluster-test.result }} != 'success' || ${{ needs.schema-check.result }} != 'success' ]]; then
echo "Build Failed!"
exit -1
fi

View File

@ -92,45 +92,45 @@ jobs:
strategy:
matrix:
case:
- name: Tenant
- name: TenantE2ETest
class: org.apache.dolphinscheduler.e2e.cases.TenantE2ETest
- name: User
- name: UserE2ETest
class: org.apache.dolphinscheduler.e2e.cases.UserE2ETest
- name: WorkerGroup
- name: WorkerGroupE2ETest
class: org.apache.dolphinscheduler.e2e.cases.WorkerGroupE2ETest
- name: Project
- name: ProjectE2ETest
class: org.apache.dolphinscheduler.e2e.cases.ProjectE2ETest
- name: Queue
- name: QueueE2ETest
class: org.apache.dolphinscheduler.e2e.cases.QueueE2ETest
- name: Environment
- name: EnvironmentE2ETest
class: org.apache.dolphinscheduler.e2e.cases.EnvironmentE2ETest
- name: Cluster
- name: ClusterE2ETest
class: org.apache.dolphinscheduler.e2e.cases.ClusterE2ETest
- name: Token
- name: TokenE2ETest
class: org.apache.dolphinscheduler.e2e.cases.TokenE2ETest
- name: Workflow
- name: WorkflowE2ETest
class: org.apache.dolphinscheduler.e2e.cases.WorkflowE2ETest
- name: WorkflowHttp
- name: WorkflowHttpTaskE2ETest
class: org.apache.dolphinscheduler.e2e.cases.WorkflowHttpTaskE2ETest
- name: WorkflowJava
- name: WorkflowJavaTaskE2ETest
class: org.apache.dolphinscheduler.e2e.cases.WorkflowJavaTaskE2ETest
# - name: WorkflowForSwitch
# class: org.apache.dolphinscheduler.e2e.cases.WorkflowSwitchE2ETest
- name: FileManage
- name: FileManageE2ETest
class: org.apache.dolphinscheduler.e2e.cases.FileManageE2ETest
- name: MysqlDataSource
- name: MysqlDataSourceE2ETest
class: org.apache.dolphinscheduler.e2e.cases.MysqlDataSourceE2ETest
- name: ClickhouseDataSource
- name: ClickhouseDataSourceE2ETest
class: org.apache.dolphinscheduler.e2e.cases.ClickhouseDataSourceE2ETest
- name: PostgresDataSource
- name: PostgresDataSourceE2ETest
class: org.apache.dolphinscheduler.e2e.cases.PostgresDataSourceE2ETest
- name: ShellTaskE2ETest
class: org.apache.dolphinscheduler.e2e.cases.tasks.ShellTaskE2ETest
- name: PythonTaskE2ETest
class: org.apache.dolphinscheduler.e2e.cases.tasks.PythonTaskE2ETest
- name: SqlServerDataSource
- name: SqlServerDataSourceE2ETest
class: org.apache.dolphinscheduler.e2e.cases.SqlServerDataSourceE2ETest
- name: HiveDataSource
- name: HiveDataSourceE2ETest
class: org.apache.dolphinscheduler.e2e.cases.HiveDataSourceE2ETest
env:
RECORDING_PATH: /tmp/recording-${{ matrix.case.name }}

View File

@ -0,0 +1,35 @@
#
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
version: "3"
services:
mysql:
container_name: mysql
image: mysql:8.0.33
command: --default-authentication-plugin=mysql_native_password
restart: always
environment:
MYSQL_ROOT_PASSWORD: mysql
MYSQL_DATABASE: dolphinscheduler_dev
ports:
- "3306:3306"
healthcheck:
test: mysqladmin ping -h 127.0.0.1 -u root --password=$$MYSQL_ROOT_PASSWORD
interval: 5s
timeout: 60s
retries: 120

View File

@ -0,0 +1,54 @@
#!/bin/bash
#
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
set -euox pipefail
DS_VERSION=$1
DATABASE_VERSION=$2
# Install dev schema
export DATABASE="mysql"
export SPRING_DATASOURCE_DRIVER_CLASS_NAME="com.mysql.cj.jdbc.Driver"
export SPRING_DATASOURCE_URL="jdbc:mysql://127.0.0.1:3306/dolphinscheduler_dev?useUnicode=true&characterEncoding=UTF-8&allowMultiQueries=true&useSSL=false"
export SPRING_DATASOURCE_USERNAME="root"
export SPRING_DATASOURCE_PASSWORD="mysql"
bash ds_schema_check_test/dev/tools/bin/upgrade-schema.sh
# Install the target version schema and upgrade it
export SPRING_DATASOURCE_URL="jdbc:mysql://127.0.0.1:3306/dolphinscheduler_${DATABASE_VERSION}?useUnicode=true&characterEncoding=UTF-8&allowMultiQueries=true&useSSL=false"
bash ds_schema_check_test/${DS_VERSION}/tools/bin/upgrade-schema.sh
bash ds_schema_check_test/dev/tools/bin/upgrade-schema.sh
# Compare the schema
set +x
atlas_result=$(atlas schema diff \
--from "mysql://root:mysql@127.0.0.1:3306/dolphinscheduler_${DATABASE_VERSION}" \
--to "mysql://root:mysql@127.0.0.1:3306/dolphinscheduler_dev")
if [[ ${atlas_result} != *"Schemas are synced"* ]]; then
echo "================================================================================================"
echo " !!!!! For Contributors !!!!!"
echo "================================================================================================"
echo "Database schema not sync, please add below change in the latest version of dolphinscheduler-dao/src/main/resources/sql/upgrade directory"
echo "${atlas_result}"
exit 1
else
echo "================================================================================================"
echo " !!!!! For Contributors !!!!!"
echo "================================================================================================"
echo "Database schema sync successfully"
exit 0
fi

View File

@ -0,0 +1,57 @@
#!/bin/bash
#
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
set -euox pipefail
DS_VERSION=$1
DATABASE_VERSION=${DS_VERSION//\./}
# Install Atlas and Create Dir
mkdir -p ds_schema_check_test/dev ds_schema_check_test/${DS_VERSION}
curl -sSf https://atlasgo.sh | sh
# Preparing the environment
wget https://archive.apache.org/dist/dolphinscheduler/${DS_VERSION}/apache-dolphinscheduler-${DS_VERSION}-bin.tar.gz -P ds_schema_check_test/${DS_VERSION}
tar -xzf ds_schema_check_test/${DS_VERSION}/apache-dolphinscheduler-${DS_VERSION}-bin.tar.gz -C ds_schema_check_test/${DS_VERSION} --strip-components 1
tar -xzf ds_schema_check_test/dev/apache-dolphinscheduler-*-bin.tar.gz -C ds_schema_check_test/dev --strip-components 1
if [[ $DATABASE_VERSION -lt 300 ]]; then
chmod +x ds_schema_check_test/dev/tools/bin/upgrade-schema.sh ds_schema_check_test/${DS_VERSION}/script/create-dolphinscheduler.sh
else
chmod +x ds_schema_check_test/dev/tools/bin/upgrade-schema.sh ds_schema_check_test/${DS_VERSION}/tools/bin/upgrade-schema.sh
fi
MYSQL_JDBC_URL="https://repo.maven.apache.org/maven2/mysql/mysql-connector-java/8.0.16/mysql-connector-java-8.0.16.jar"
MYSQL_JDBC_JAR="mysql-connector-java-8.0.16.jar"
wget ${MYSQL_JDBC_URL} -O ds_schema_check_test/${MYSQL_JDBC_JAR}
for base_dir in ds_schema_check_test/dev ds_schema_check_test/${DS_VERSION}; do
if [[ $base_dir == *"dolphinscheduler/2"* ]]; then
cp ds_schema_check_test/${MYSQL_JDBC_JAR} ${base_dir}/lib
else
for d in alert-server api-server master-server worker-server tools; do
cp ds_schema_check_test/${MYSQL_JDBC_JAR} ${base_dir}/${d}/libs
done
fi
done
docker compose -f .github/workflows/schema-check/mysql/docker-compose-base.yaml up -d --wait
docker exec -i mysql mysql -uroot -pmysql -e "create database dolphinscheduler_${DATABASE_VERSION}";
#Running schema check tests
/bin/bash .github/workflows/schema-check/mysql/running-test.sh ${DS_VERSION} ${DATABASE_VERSION}
#Cleanup
docker compose -f .github/workflows/schema-check/mysql/docker-compose-base.yaml down -v --remove-orphans

View File

@ -0,0 +1,34 @@
#
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
version: "3"
services:
postgres:
container_name: postgres
image: postgres:14.1
restart: always
environment:
POSTGRES_PASSWORD: postgres
POSTGRES_DB: dolphinscheduler_dev
ports:
- "5432:5432"
healthcheck:
test: [ "CMD-SHELL", "pg_isready -U postgres" ]
interval: 5s
timeout: 60s
retries: 120

View File

@ -0,0 +1,54 @@
#!/bin/bash
#
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
set -euox pipefail
DS_VERSION=$1
DATABASE_VERSION=$2
# Install dev schema
export DATABASE="postgresql"
export SPRING_DATASOURCE_DRIVER_CLASS_NAME="org.postgresql.Driver"
export SPRING_DATASOURCE_USERNAME="postgres"
export SPRING_DATASOURCE_PASSWORD="postgres"
export SPRING_DATASOURCE_URL="jdbc:postgresql://127.0.0.1:5432/dolphinscheduler_dev"
bash ds_schema_check_test/dev/tools/bin/upgrade-schema.sh
# Install the target version schema and upgrade it
export SPRING_DATASOURCE_URL="jdbc:postgresql://127.0.0.1:5432/dolphinscheduler_${DATABASE_VERSION}"
bash ds_schema_check_test/${DS_VERSION}/tools/bin/upgrade-schema.sh
bash ds_schema_check_test/dev/tools/bin/upgrade-schema.sh
# Compare the schema
set +x
atlas_result=$(atlas schema diff \
--from "postgres://postgres:postgres@127.0.0.1:5432/dolphinscheduler_${DATABASE_VERSION}?search_path=public&sslmode=disable" \
--to "postgres://postgres:postgres@127.0.0.1:5432/dolphinscheduler_dev?search_path=public&sslmode=disable")
if [[ ${atlas_result} != *"Schemas are synced"* ]]; then
echo "================================================================================================"
echo " !!!!! For Contributors !!!!!"
echo "================================================================================================"
echo "Database schema not sync, please add below change in the latest version of dolphinscheduler-dao/src/main/resources/sql/upgrade directory"
echo "${atlas_result}"
exit 1
else
echo "================================================================================================"
echo " !!!!! For Contributors !!!!!"
echo "================================================================================================"
echo "Database schema sync successfully"
exit 0
fi

View File

@ -0,0 +1,45 @@
#!/bin/bash
#
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
set -euox pipefail
DS_VERSION=$1
DATABASE_VERSION=${DS_VERSION//\./}
# Install Atlas and Create Dir
mkdir -p ds_schema_check_test/dev ds_schema_check_test/${DS_VERSION}
curl -sSf https://atlasgo.sh | sh
# Preparing the environment
wget https://archive.apache.org/dist/dolphinscheduler/${DS_VERSION}/apache-dolphinscheduler-${DS_VERSION}-bin.tar.gz -P ds_schema_check_test/${DS_VERSION}
tar -xzf ds_schema_check_test/${DS_VERSION}/apache-dolphinscheduler-${DS_VERSION}-bin.tar.gz -C ds_schema_check_test/${DS_VERSION} --strip-components 1
tar -xzf ds_schema_check_test/dev/apache-dolphinscheduler-*-bin.tar.gz -C ds_schema_check_test/dev --strip-components 1
if [[ $DATABASE_VERSION -lt 300 ]]; then
chmod +x ds_schema_check_test/dev/tools/bin/upgrade-schema.sh ds_schema_check_test/${DS_VERSION}/script/create-dolphinscheduler.sh
else
chmod +x ds_schema_check_test/dev/tools/bin/upgrade-schema.sh ds_schema_check_test/${DS_VERSION}/tools/bin/upgrade-schema.sh
fi
docker compose -f .github/workflows/schema-check/postgresql/docker-compose-base.yaml up -d --wait
docker exec -i postgres psql -U postgres -c "create database dolphinscheduler_${DATABASE_VERSION}";
#Running schema check tests
/bin/bash .github/workflows/schema-check/postgresql/running-test.sh ${DS_VERSION} ${DATABASE_VERSION}
#Cleanup
docker compose -f .github/workflows/schema-check/postgresql/docker-compose-base.yaml down -v --remove-orphans

View File

@ -100,8 +100,8 @@ jobs:
-Dsonar.projectKey=apache-dolphinscheduler
-Dsonar.login=e4058004bc6be89decf558ac819aa1ecbee57682
-Dsonar.exclusions=dolphinscheduler-ui/src/**/i18n/locale/*.js,dolphinscheduler-microbench/src/**/*
-Dhttp.keepAlive=false
-Dmaven.wagon.http.pool=false
-Dhttp.keepAlive=false
-Dmaven.wagon.http.pool=false
-Dmaven.wagon.httpconnectionManager.ttlSeconds=120
-DskipUT=true
env:

1
.gitignore vendored
View File

@ -54,3 +54,4 @@ dolphinscheduler-worker/logs
dolphinscheduler-master/logs
dolphinscheduler-api/logs
__pycache__
ds_schema_check_test

View File

@ -19,8 +19,8 @@ manual start or scheduled scheduling. Each time the process definition runs, a p
**Task instance**: The task instance is the instantiation of the task node in the process definition, which identifies
the specific task
**Task type**: Currently supports SHELL, SQL, SUB_PROCESS (sub-process), PROCEDURE, MR, SPARK, PYTHON, DEPENDENT (
depends), and plans to support dynamic plug-in expansion, note: **SUB_PROCESS** need relation with another workflow definition which also a separate process
**Task type**: Currently supports SHELL, SQL, SUB_WORKFLOW, PROCEDURE, MR, SPARK, PYTHON, DEPENDENT (
depends), and plans to support dynamic plug-in expansion, note: **SUB_WORKFLOW** need relation with another workflow definition which also a separate process
definition that can be started and executed separately
**Scheduling method**: The system supports scheduled scheduling and manual scheduling based on cron expressions. Command

View File

@ -919,7 +919,7 @@ No.|parameter name||type|description |notes
```bash
{
"type":"SUB_PROCESS",
"type":"SUB_WORKFLOW",
"id":"tasks-14806",
"name":"SubProcessTask",
"params":{

View File

@ -19,7 +19,7 @@ Before explaining the architecture of the schedule system, let us first understa
**Task instance**: A task instance is the instantiation of a specific task node when a process instance runs, which indicates the specific task execution status
**Task type**: Currently supports SHELL, SQL, SUB_PROCESS (sub-process), PROCEDURE, MR, SPARK, PYTHON, DEPENDENT (dependency), and plans to support dynamic plug-in extension, note: the sub-**SUB_PROCESS** is also A separate process definition that can be launched separately
**Task type**: Currently supports SHELL, SQL, SUB_WORKFLOW, PROCEDURE, MR, SPARK, PYTHON, DEPENDENT (dependency), and plans to support dynamic plug-in extension, note: the sub-**SUB_WORKFLOW** is also A separate process definition that can be launched separately
**Schedule mode** : The system supports timing schedule and manual schedule based on cron expressions. Command type support: start workflow, start execution from current node, resume fault-tolerant workflow, resume pause process, start execution from failed node, complement, timer, rerun, pause, stop, resume waiting thread. Where **recovers the fault-tolerant workflow** and **restores the waiting thread** The two command types are used by the scheduling internal control and cannot be called externally

View File

@ -106,11 +106,11 @@ Save the subprocess_example1 workflow and set the global parameters var1.
![context-subprocess02](../../../../img/new_ui/dev/parameter/context-subprocess02.png)
Create a sub_process task in a new workflow, and use the subprocess_example1 workflow as the sub-node.
Create a sub_workflow task in a new workflow, and use the subprocess_example1 workflow as the sub-node.
![context-subprocess03](../../../../img/new_ui/dev/parameter/context-subprocess03.png)
Create a shell task as a downstream task of the sub_process task, and write the following script:
Create a shell task as a downstream task of the sub_workflow task, and write the following script:
![context-subprocess04](../../../../img/new_ui/dev/parameter/context-subprocess04.png)

View File

@ -2,14 +2,14 @@
This page describes details regarding Project screen in Apache DolphinScheduler. Here, you will see all the functions which can be handled in this screen. The following table explains commonly used terms in Apache DolphinScheduler:
| Glossary | description |
|---------------------|---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
| DAG | Tasks in a workflow are assembled in form of Directed Acyclic Graph (DAG). A topological traversal is performed from nodes with zero degrees of entry until there are no subsequent nodes. |
| Workflow Definition | Visualization formed by dragging task nodes and establishing task node associations (DAG). |
| Workflow Instance | Instantiation of the workflow definition, which can be generated by manual start or scheduled scheduling. Each time the process definition runs, a workflow instance is generated. |
| Workflow Relation | Shows dynamic status of all the workflows in a project. |
| Task | Task is a discrete action in a Workflow. Apache DolphinScheduler supports SHELL, SQL, SUB_PROCESS (sub-process), PROCEDURE, MR, SPARK, PYTHON, DEPENDENT ( depends), and plans to support dynamic plug-in expansion, (SUB_PROCESS). It is also a separate process definition that can be started and executed separately. |
| Task Instance | Instantiation of the task node in the process definition, which identifies the specific task execution status. |
| Glossary | description |
|---------------------|---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
| DAG | Tasks in a workflow are assembled in form of Directed Acyclic Graph (DAG). A topological traversal is performed from nodes with zero degrees of entry until there are no subsequent nodes. |
| Workflow Definition | Visualization formed by dragging task nodes and establishing task node associations (DAG). |
| Workflow Instance | Instantiation of the workflow definition, which can be generated by manual start or scheduled scheduling. Each time the process definition runs, a workflow instance is generated. |
| Workflow Relation | Shows dynamic status of all the workflows in a project. |
| Task | Task is a discrete action in a Workflow. Apache DolphinScheduler supports SHELL, SQL, SUB_WORKFLOW, PROCEDURE, MR, SPARK, PYTHON, DEPENDENT ( depends), and plans to support dynamic plug-in expansion, (SUB_WORKFLOW). It is also a separate process definition that can be started and executed separately. |
| Task Instance | Instantiation of the task node in the process definition, which identifies the specific task execution status. |
## Project List

View File

@ -30,7 +30,7 @@ Click the button to view task group usage information:
### Use of Task Groups
**Note**: The use of task groups is applicable to tasks executed by workers, such as `switch` nodes, `condition` nodes, `sub_process` and other node types executed by the master are not controlled by the task group.
**Note**: The use of task groups is applicable to tasks executed by workers, such as `switch` nodes, `condition` nodes, `sub_workflow` and other node types executed by the master are not controlled by the task group.
Let's take the shell node as an example:

View File

@ -7,7 +7,7 @@ The sub-process node is to execute an external workflow definition as a task nod
## Create Task
- Click `Project Management -> Project Name -> Workflow Definition`, and click the `Create Workflow` button to enter the DAG editing page.
- Drag from the toolbar <img src="../../../../img/tasks/icons/sub_process.png" width="15"/> task node to canvas to create a new SubProcess task.
- Drag from the toolbar <img src="../../../../img/tasks/icons/sub_workflow.png" width="15"/> task node to canvas to create a new SubProcess task.
## Task Parameter
@ -30,16 +30,16 @@ Create a shell task to print "hello" and define the workflow as `test_dag01`.
![subprocess_task01](../../../../img/tasks/demo/subprocess_task01.png)
## Create the Sub_process task
## Create the Sub_workflow task
To use the sub_process, you need to create the sub-node task, which is the workflow `test_dag01` we created in the first step. After that, as shown in the diagram below, select the corresponding sub-node in position ⑤.
To use the sub_workflow, you need to create the sub-node task, which is the workflow `test_dag01` we created in the first step. After that, as shown in the diagram below, select the corresponding sub-node in position ⑤.
![subprocess_task02](../../../../img/tasks/demo/subprocess_task02.png)
After creating the sub_process, create a corresponding shell task for printing "world" and link both together. Save the current workflow and run it to get the expected result.
After creating the sub_workflow, create a corresponding shell task for printing "world" and link both together. Save the current workflow and run it to get the expected result.
![subprocess_task03](../../../../img/tasks/demo/subprocess_task03.png)
## Note
When using `sub_process` to recall a sub-node task, you need to ensure that the defined sub-node is online status, otherwise, the sub_process workflow will not work properly.
When using `sub_workflow` to recall a sub-node task, you don't need to ensure that the defined sub-node is online status.

View File

@ -30,4 +30,6 @@ This document records the incompatible updates between each version. You need to
* Remove the `udf-manage` function from the `resource center` ([#16209])
* Remove the `Pigeon` from the `Task Plugin` ([#16218])
* Uniformly name `process` in code as `workflow` ([#16515])
* Deprecated upgrade code of 1.x and 2.x in 3.3.0-release ([#16543])

View File

@ -60,7 +60,7 @@ Execute script: `sh ./tools/bin/migrate-lineage.sh`.
Execution result:
- Migrate lineage data to new table `t_ds_process_task_lineage`.
- Migrate lineage data to new table `t_ds_workflow_task_lineage`.
- This script only performs upsert operations, not deletes. You can delete it manually if you need to.
### Upgrade Service

View File

@ -14,7 +14,7 @@
**任务实例**:任务实例是流程定义中任务节点的实例化,标识着某个具体的任务
**任务类型**:目前支持有 SHELL、SQL、SUB_PROCESS(子流程)、PROCEDURE、MR、SPARK、PYTHON、DEPENDENT(依赖),同时计划支持动态插件扩展,注意:其中 **SUB_PROCESS**类型的任务需要关联另外一个流程定义,被关联的流程定义是可以单独启动执行的
**任务类型**:目前支持有 SHELL、SQL、SUB_WORKFLOW(子工作流)、PROCEDURE、MR、SPARK、PYTHON、DEPENDENT(依赖),同时计划支持动态插件扩展,注意:其中 **SUB_WORKFLOW**类型的任务需要关联另外一个流程定义,被关联的流程定义是可以单独启动执行的
**调度方式**:系统支持基于 cron 表达式的定时调度和手动调度。命令类型支持:启动工作流、从当前节点开始执行、恢复被容错的工作流、恢复暂停流程、从失败节点开始执行、补数、定时、重跑、暂停、停止、恢复等待线程。
其中 **恢复被容错的工作流****恢复等待线程** 两种命令类型是由调度内部控制使用,外部无法调用

View File

@ -918,7 +918,7 @@
```bash
{
"type":"SUB_PROCESS",
"type":"SUB_WORKFLOW",
"id":"tasks-14806",
"name":"SubProcessTask",
"params":{

View File

@ -19,7 +19,7 @@
**任务实例**:任务实例是流程定义中任务节点的实例化,标识着具体的任务执行状态
**任务类型** 目前支持有 SHELL、SQL、SUB_PROCESS(子流程)、PROCEDURE、MR、SPARK、PYTHON、DEPENDENT(依赖),同时计划支持动态插件扩展,注意:其中子 **SUB_PROCESS** 也是一个单独的流程定义,是可以单独启动执行的
**任务类型** 目前支持有 SHELL、SQL、SUB_WORKFLOW(子工作流)、PROCEDURE、MR、SPARK、PYTHON、DEPENDENT(依赖),同时计划支持动态插件扩展,注意:其中子 **SUB_WORKFLOW** 也是一个单独的流程定义,是可以单独启动执行的
**调度方式:** 系统支持基于 cron 表达式的定时调度和手动调度。命令类型支持:启动工作流、从当前节点开始执行、恢复被容错的工作流、恢复暂停流程、从失败节点开始执行、补数、定时、重跑、暂停、停止、恢复等待线程。其中 **恢复被容错的工作流****恢复等待线程** 两种命令类型是由调度内部控制使用,外部无法调用

View File

@ -105,11 +105,11 @@ Node_mysql 运行结果如下:
![context-subprocess02](../../../../img/new_ui/dev/parameter/context-subprocess02.png)
在新的工作流中创建 sub_process 任务,使用 subprocess_example1 工作流作为子节点。
在新的工作流中创建 sub_workflow 任务,使用 subprocess_example1 工作流作为子节点。
![context-subprocess03](../../../../img/new_ui/dev/parameter/context-subprocess03.png)
创建一个 shell 任务作为 sub_process 任务的下游任务,并编写如下脚本:
创建一个 shell 任务作为 sub_workflow 任务的下游任务,并编写如下脚本:
![context-subprocess04](../../../../img/new_ui/dev/parameter/context-subprocess04.png)

View File

@ -32,7 +32,7 @@
#### 任务组的使用
注:任务组的使用适用于由 worker 执行的任务例如【switch】节点、【condition】节点、【sub_process】等由 master 负责执行的节点类型不受任务组控制。
注:任务组的使用适用于由 worker 执行的任务例如【switch】节点、【condition】节点、【sub_workflow】等由 master 负责执行的节点类型不受任务组控制。
我们以 shell 节点为例:

View File

@ -8,7 +8,7 @@
- 点击项目管理 -> 项目名称 -> 工作流定义,点击”创建工作流”按钮,进入 DAG 编辑页面:
- 拖动工具栏的 <img src="../../../../img/tasks/icons/sub_process.png" width="15"/> 任务节点到画板中。
- 拖动工具栏的 <img src="../../../../img/tasks/icons/sub_workflow.png" width="15"/> 任务节点到画板中。
## 任务参数
@ -31,16 +31,16 @@
![subprocess_task01](../../../../img/tasks/demo/subprocess_task01.png)
### 创建 sub_process 任务
### 创建 sub_workflow 任务
在使用 sub_process 的过程中,需要创建所需的子结点任务,也就是我们第一步所创建的 test_dag01 工作流。然后如下图所示,在 ⑤ 的位置选择对应的子结点即可。
在使用 sub_workflow 的过程中,需要创建所需的子结点任务,也就是我们第一步所创建的 test_dag01 工作流。然后如下图所示,在 ⑤ 的位置选择对应的子结点即可。
![subprocess_task02](../../../../img/tasks/demo/subprocess_task02.png)
创建 sub_process 完成之后,再创建一个对应的 shell 任务,用于打印 “world”并将二者连接起来。保存当前工作流并上线运行即可得到想要的结果。
创建 sub_workflow 完成之后,再创建一个对应的 shell 任务,用于打印 “world”并将二者连接起来。保存当前工作流并上线运行即可得到想要的结果。
![subprocess_task03](../../../../img/tasks/demo/subprocess_task03.png)
## 注意事项
在使用 sub_process 调用子结点任务的时候,需要保证定义的子结点为上线状态,否则 sub_process 的工作流无法正常运行
在使用 sub_workflow 调用子结点任务的时候,不需要保证定义的子结点为上线状态

View File

@ -28,4 +28,6 @@
* 从 `资源中心` 中移除了 `udf-manage` 功能 ([#16209])
* 从 `任务插件` 中移除了 `Pigeon` 类型 ([#16218])
* 统一代码中的 `process``workflow` ([#16515])
* 在 3.3.0-release 中废弃了从 1.x 至 2.x 的升级代码 ([#16543])

View File

@ -59,7 +59,7 @@ jar 包 并添加到 `./tools/libs` 目录下,设置以下环境变量
执行结果:
- 原血缘数据迁移至新血缘表 `t_ds_process_task_lineage`。
- 原血缘数据迁移至新血缘表 `t_ds_workflow_task_lineage`。
- 此脚本仅执行 upsert 操作,不执行删除操作,如果需要删除,您可以手动删除。
### 服务升级

View File

Before

Width:  |  Height:  |  Size: 692 B

After

Width:  |  Height:  |  Size: 692 B

View File

@ -25,7 +25,7 @@ import org.apache.dolphinscheduler.api.test.entity.LoginResponseData;
import org.apache.dolphinscheduler.api.test.pages.LoginPage;
import org.apache.dolphinscheduler.api.test.pages.project.ProjectPage;
import org.apache.dolphinscheduler.api.test.pages.workflow.ExecutorPage;
import org.apache.dolphinscheduler.api.test.pages.workflow.ProcessDefinitionPage;
import org.apache.dolphinscheduler.api.test.pages.workflow.WorkflowDefinitionPage;
import org.apache.dolphinscheduler.api.test.utils.JSONUtils;
import org.apache.dolphinscheduler.common.enums.FailureStrategy;
import org.apache.dolphinscheduler.common.enums.ReleaseState;
@ -50,7 +50,7 @@ import org.junit.jupiter.api.BeforeAll;
import org.junit.jupiter.api.Order;
import org.junit.jupiter.api.Test;
//TODO: Some test cases rely on ProcessInstance APIs. Should complete remaining cases after ProcessInstance related API tests done.
//TODO: Some test cases rely on WorkflowInstance APIs. Should complete remaining cases after WorkflowInstance related API tests done.
@DolphinScheduler(composeFiles = "docker/basic/docker-compose.yaml")
@Slf4j
public class ExecutorAPITest {
@ -65,13 +65,13 @@ public class ExecutorAPITest {
private static ExecutorPage executorPage;
private static ProcessDefinitionPage processDefinitionPage;
private static WorkflowDefinitionPage workflowDefinitionPage;
private static ProjectPage projectPage;
private static long projectCode;
private static long processDefinitionCode;
private static long workflowDefinitionCode;
private static List<Integer> workflowInstanceIds;
@ -82,7 +82,7 @@ public class ExecutorAPITest {
sessionId =
JSONUtils.convertValue(loginHttpResponse.getBody().getData(), LoginResponseData.class).getSessionId();
executorPage = new ExecutorPage(sessionId);
processDefinitionPage = new ProcessDefinitionPage(sessionId);
workflowDefinitionPage = new WorkflowDefinitionPage(sessionId);
projectPage = new ProjectPage(sessionId);
loginUser = new User();
loginUser.setUserName("admin");
@ -97,7 +97,7 @@ public class ExecutorAPITest {
@Test
@Order(1)
public void testStartProcessInstance() {
public void testStartWorkflowInstance() {
try {
// create test project
HttpResponse createProjectResponse = projectPage.createProject(loginUser, "project-test");
@ -109,36 +109,36 @@ public class ExecutorAPITest {
// upload test workflow definition json
ClassLoader classLoader = getClass().getClassLoader();
File file = new File(classLoader.getResource("workflow-json/test.json").getFile());
CloseableHttpResponse importProcessDefinitionResponse = processDefinitionPage
.importProcessDefinition(loginUser, projectCode, file);
String data = EntityUtils.toString(importProcessDefinitionResponse.getEntity());
CloseableHttpResponse importWorkflowDefinitionResponse = workflowDefinitionPage
.importWorkflowDefinition(loginUser, projectCode, file);
String data = EntityUtils.toString(importWorkflowDefinitionResponse.getEntity());
Assertions.assertTrue(data.contains("\"success\":true"));
// get workflow definition code
HttpResponse queryAllProcessDefinitionByProjectCodeResponse =
processDefinitionPage.queryAllProcessDefinitionByProjectCode(loginUser, projectCode);
Assertions.assertTrue(queryAllProcessDefinitionByProjectCodeResponse.getBody().getSuccess());
Assertions.assertTrue(queryAllProcessDefinitionByProjectCodeResponse.getBody().getData().toString()
HttpResponse queryAllWorkflowDefinitionByProjectCodeResponse =
workflowDefinitionPage.queryAllWorkflowDefinitionByProjectCode(loginUser, projectCode);
Assertions.assertTrue(queryAllWorkflowDefinitionByProjectCodeResponse.getBody().getSuccess());
Assertions.assertTrue(queryAllWorkflowDefinitionByProjectCodeResponse.getBody().getData().toString()
.contains("hello world"));
processDefinitionCode =
(long) ((LinkedHashMap<String, Object>) ((LinkedHashMap<String, Object>) ((List<LinkedHashMap>) queryAllProcessDefinitionByProjectCodeResponse
.getBody().getData()).get(0)).get("processDefinition")).get("code");
workflowDefinitionCode =
(long) ((LinkedHashMap<String, Object>) ((LinkedHashMap<String, Object>) ((List<LinkedHashMap>) queryAllWorkflowDefinitionByProjectCodeResponse
.getBody().getData()).get(0)).get("workflowDefinition")).get("code");
// release test workflow
HttpResponse releaseProcessDefinitionResponse = processDefinitionPage.releaseProcessDefinition(loginUser,
projectCode, processDefinitionCode, ReleaseState.ONLINE);
Assertions.assertTrue(releaseProcessDefinitionResponse.getBody().getSuccess());
HttpResponse releaseWorkflowDefinitionResponse = workflowDefinitionPage.releaseWorkflowDefinition(loginUser,
projectCode, workflowDefinitionCode, ReleaseState.ONLINE);
Assertions.assertTrue(releaseWorkflowDefinitionResponse.getBody().getSuccess());
// trigger workflow instance
SimpleDateFormat formatter = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
Date date = new Date();
String scheduleTime = String.format("%s,%s", formatter.format(date), formatter.format(date));
log.info("use current time {} as scheduleTime", scheduleTime);
HttpResponse startProcessInstanceResponse = executorPage.startProcessInstance(loginUser, projectCode,
processDefinitionCode, scheduleTime, FailureStrategy.END, WarningType.NONE);
Assertions.assertTrue(startProcessInstanceResponse.getBody().getSuccess());
HttpResponse startWorkflowInstanceResponse = executorPage.startWorkflowInstance(loginUser, projectCode,
workflowDefinitionCode, scheduleTime, FailureStrategy.END, WarningType.NONE);
Assertions.assertTrue(startWorkflowInstanceResponse.getBody().getSuccess());
workflowInstanceIds = (List<Integer>) startProcessInstanceResponse.getBody().getData();
workflowInstanceIds = (List<Integer>) startWorkflowInstanceResponse.getBody().getData();
} catch (Exception e) {
log.error("failed", e);
Assertions.fail();

View File

@ -39,9 +39,11 @@ import org.junit.jupiter.api.Assertions;
import org.junit.jupiter.api.BeforeAll;
import org.junit.jupiter.api.Order;
import org.junit.jupiter.api.Test;
import org.junitpioneer.jupiter.DisableIfTestFails;
@DolphinScheduler(composeFiles = "docker/basic/docker-compose.yaml")
@Slf4j
@DisableIfTestFails
// TODO: Add more detailed permission control related cases after userPage test cases completed
public class ProjectAPITest {

View File

@ -24,8 +24,8 @@ import org.apache.dolphinscheduler.api.test.entity.HttpResponse;
import org.apache.dolphinscheduler.api.test.entity.LoginResponseData;
import org.apache.dolphinscheduler.api.test.pages.LoginPage;
import org.apache.dolphinscheduler.api.test.pages.project.ProjectPage;
import org.apache.dolphinscheduler.api.test.pages.workflow.ProcessDefinitionPage;
import org.apache.dolphinscheduler.api.test.pages.workflow.SchedulerPage;
import org.apache.dolphinscheduler.api.test.pages.workflow.WorkflowDefinitionPage;
import org.apache.dolphinscheduler.api.test.utils.JSONUtils;
import org.apache.dolphinscheduler.common.enums.ReleaseState;
import org.apache.dolphinscheduler.common.enums.UserType;
@ -42,9 +42,11 @@ import org.junit.jupiter.api.Assertions;
import org.junit.jupiter.api.BeforeAll;
import org.junit.jupiter.api.Order;
import org.junit.jupiter.api.Test;
import org.junitpioneer.jupiter.DisableIfTestFails;
@DolphinScheduler(composeFiles = "docker/basic/docker-compose.yaml")
@Slf4j
@DisableIfTestFails
public class SchedulerAPITest {
private static final String username = "admin";
@ -57,13 +59,13 @@ public class SchedulerAPITest {
private static SchedulerPage schedulerPage;
private static ProcessDefinitionPage processDefinitionPage;
private static WorkflowDefinitionPage workflowDefinitionPage;
private static ProjectPage projectPage;
private static long projectCode;
private static long processDefinitionCode;
private static long workflowDefinitionCode;
private static int scheduleId;
@ -75,7 +77,7 @@ public class SchedulerAPITest {
JSONUtils.convertValue(loginHttpResponse.getBody().getData(), LoginResponseData.class).getSessionId();
projectPage = new ProjectPage(sessionId);
schedulerPage = new SchedulerPage(sessionId);
processDefinitionPage = new ProcessDefinitionPage(sessionId);
workflowDefinitionPage = new WorkflowDefinitionPage(sessionId);
loginUser = new User();
loginUser.setUserName("admin");
loginUser.setId(1);
@ -98,20 +100,20 @@ public class SchedulerAPITest {
.getBody().getData()).get(0)).get("code");
ClassLoader classLoader = getClass().getClassLoader();
File file = new File(classLoader.getResource("workflow-json/test.json").getFile());
processDefinitionPage.importProcessDefinition(loginUser, projectCode, file);
HttpResponse queryAllProcessDefinitionByProjectCodeResponse =
processDefinitionPage.queryAllProcessDefinitionByProjectCode(loginUser, projectCode);
Assertions.assertTrue(queryAllProcessDefinitionByProjectCodeResponse.getBody().getSuccess());
processDefinitionCode =
(long) ((LinkedHashMap<String, Object>) ((LinkedHashMap<String, Object>) ((List<LinkedHashMap>) queryAllProcessDefinitionByProjectCodeResponse
.getBody().getData()).get(0)).get("processDefinition")).get("code");
workflowDefinitionPage.importWorkflowDefinition(loginUser, projectCode, file);
HttpResponse queryAllWorkflowDefinitionByProjectCodeResponse =
workflowDefinitionPage.queryAllWorkflowDefinitionByProjectCode(loginUser, projectCode);
Assertions.assertTrue(queryAllWorkflowDefinitionByProjectCodeResponse.getBody().getSuccess());
workflowDefinitionCode =
(long) ((LinkedHashMap<String, Object>) ((LinkedHashMap<String, Object>) ((List<LinkedHashMap>) queryAllWorkflowDefinitionByProjectCodeResponse
.getBody().getData()).get(0)).get("workflowDefinition")).get("code");
processDefinitionPage.releaseProcessDefinition(loginUser, projectCode, processDefinitionCode,
workflowDefinitionPage.releaseWorkflowDefinition(loginUser, projectCode, workflowDefinitionCode,
ReleaseState.ONLINE);
final String schedule =
"{\"startTime\":\"2019-08-08 00:00:00\",\"endTime\":\"2100-08-08 00:00:00\",\"timezoneId\":\"America/Phoenix\",\"crontab\":\"0 0 3/6 * * ? *\"}";
HttpResponse createScheduleResponse =
schedulerPage.createSchedule(loginUser, projectCode, processDefinitionCode, schedule);
schedulerPage.createSchedule(loginUser, projectCode, workflowDefinitionCode, schedule);
Assertions.assertTrue(createScheduleResponse.getBody().getSuccess());
Assertions.assertTrue(createScheduleResponse.getBody().getData().toString().contains("2019-08-08"));
}

View File

@ -35,9 +35,11 @@ import org.junit.jupiter.api.Assertions;
import org.junit.jupiter.api.BeforeAll;
import org.junit.jupiter.api.Order;
import org.junit.jupiter.api.Test;
import org.junitpioneer.jupiter.DisableIfTestFails;
@DolphinScheduler(composeFiles = "docker/basic/docker-compose.yaml")
@Slf4j
@DisableIfTestFails
public class TenantAPITest {
private static final String tenant = System.getProperty("user.name");

View File

@ -40,9 +40,11 @@ import org.junit.jupiter.api.Assertions;
import org.junit.jupiter.api.BeforeAll;
import org.junit.jupiter.api.Order;
import org.junit.jupiter.api.Test;
import org.junitpioneer.jupiter.DisableIfTestFails;
@DolphinScheduler(composeFiles = "docker/basic/docker-compose.yaml")
@Slf4j
@DisableIfTestFails
public class WorkerGroupAPITest {
private static final String username = "admin";

View File

@ -24,7 +24,7 @@ import org.apache.dolphinscheduler.api.test.entity.HttpResponse;
import org.apache.dolphinscheduler.api.test.entity.LoginResponseData;
import org.apache.dolphinscheduler.api.test.pages.LoginPage;
import org.apache.dolphinscheduler.api.test.pages.project.ProjectPage;
import org.apache.dolphinscheduler.api.test.pages.workflow.ProcessDefinitionPage;
import org.apache.dolphinscheduler.api.test.pages.workflow.WorkflowDefinitionPage;
import org.apache.dolphinscheduler.api.test.utils.JSONUtils;
import org.apache.dolphinscheduler.common.enums.ReleaseState;
import org.apache.dolphinscheduler.common.enums.UserType;
@ -44,9 +44,11 @@ import org.junit.jupiter.api.Assertions;
import org.junit.jupiter.api.BeforeAll;
import org.junit.jupiter.api.Order;
import org.junit.jupiter.api.Test;
import org.junitpioneer.jupiter.DisableIfTestFails;
@DolphinScheduler(composeFiles = "docker/basic/docker-compose.yaml")
@Slf4j
@DisableIfTestFails
public class WorkflowDefinitionAPITest {
private static final String username = "admin";
@ -57,15 +59,15 @@ public class WorkflowDefinitionAPITest {
private static User loginUser;
private static ProcessDefinitionPage processDefinitionPage;
private static WorkflowDefinitionPage workflowDefinitionPage;
private static ProjectPage projectPage;
private static long projectCode;
private static long processDefinitionCode;
private static long workflowDefinitionCode;
private static String processDefinitionName;
private static String workflowDefinitionName;
@BeforeAll
public static void setup() {
@ -73,7 +75,7 @@ public class WorkflowDefinitionAPITest {
HttpResponse loginHttpResponse = loginPage.login(username, password);
sessionId =
JSONUtils.convertValue(loginHttpResponse.getBody().getData(), LoginResponseData.class).getSessionId();
processDefinitionPage = new ProcessDefinitionPage(sessionId);
workflowDefinitionPage = new WorkflowDefinitionPage(sessionId);
projectPage = new ProjectPage(sessionId);
loginUser = new User();
loginUser.setId(123);
@ -87,7 +89,7 @@ public class WorkflowDefinitionAPITest {
@Test
@Order(1)
public void testImportProcessDefinition() {
public void testImportWorkflowDefinition() {
try {
HttpResponse createProjectResponse = projectPage.createProject(loginUser, "project-test");
HttpResponse queryAllProjectListResponse = projectPage.queryAllProjectList(loginUser);
@ -97,9 +99,9 @@ public class WorkflowDefinitionAPITest {
.getBody().getData()).get(0)).get("code");
ClassLoader classLoader = getClass().getClassLoader();
File file = new File(classLoader.getResource("workflow-json/test.json").getFile());
CloseableHttpResponse importProcessDefinitionResponse = processDefinitionPage
.importProcessDefinition(loginUser, projectCode, file);
String data = EntityUtils.toString(importProcessDefinitionResponse.getEntity());
CloseableHttpResponse importWorkflowDefinitionResponse = workflowDefinitionPage
.importWorkflowDefinition(loginUser, projectCode, file);
String data = EntityUtils.toString(importWorkflowDefinitionResponse.getEntity());
Assertions.assertTrue(data.contains("\"success\":true"));
} catch (Exception e) {
log.error("failed", e);
@ -109,93 +111,95 @@ public class WorkflowDefinitionAPITest {
@Test
@Order(2)
public void testQueryAllProcessDefinitionByProjectCode() {
HttpResponse queryAllProcessDefinitionByProjectCodeResponse =
processDefinitionPage.queryAllProcessDefinitionByProjectCode(loginUser, projectCode);
Assertions.assertTrue(queryAllProcessDefinitionByProjectCodeResponse.getBody().getSuccess());
public void testQueryAllWorkflowDefinitionByProjectCode() {
HttpResponse queryAllWorkflowDefinitionByProjectCodeResponse =
workflowDefinitionPage.queryAllWorkflowDefinitionByProjectCode(loginUser, projectCode);
Assertions.assertTrue(queryAllWorkflowDefinitionByProjectCodeResponse.getBody().getSuccess());
Assertions.assertTrue(
queryAllProcessDefinitionByProjectCodeResponse.getBody().getData().toString().contains("hello world"));
processDefinitionCode =
(long) ((LinkedHashMap<String, Object>) ((LinkedHashMap<String, Object>) ((List<LinkedHashMap>) queryAllProcessDefinitionByProjectCodeResponse
.getBody().getData()).get(0)).get("processDefinition")).get("code");
processDefinitionName =
(String) ((LinkedHashMap<String, Object>) ((LinkedHashMap<String, Object>) ((List<LinkedHashMap>) queryAllProcessDefinitionByProjectCodeResponse
.getBody().getData()).get(0)).get("processDefinition")).get("name");
queryAllWorkflowDefinitionByProjectCodeResponse.getBody().getData().toString().contains("hello world"));
workflowDefinitionCode =
(long) ((LinkedHashMap<String, Object>) ((LinkedHashMap<String, Object>) ((List<LinkedHashMap>) queryAllWorkflowDefinitionByProjectCodeResponse
.getBody().getData()).get(0)).get("workflowDefinition")).get("code");
workflowDefinitionName =
(String) ((LinkedHashMap<String, Object>) ((LinkedHashMap<String, Object>) ((List<LinkedHashMap>) queryAllWorkflowDefinitionByProjectCodeResponse
.getBody().getData()).get(0)).get("workflowDefinition")).get("name");
}
@Test
@Order(3)
public void testQueryProcessDefinitionByCode() {
HttpResponse queryProcessDefinitionByCodeResponse =
processDefinitionPage.queryProcessDefinitionByCode(loginUser, projectCode, processDefinitionCode);
Assertions.assertTrue(queryProcessDefinitionByCodeResponse.getBody().getSuccess());
public void testQueryWorkflowDefinitionByCode() {
HttpResponse queryWorkflowDefinitionByCodeResponse =
workflowDefinitionPage.queryWorkflowDefinitionByCode(loginUser, projectCode, workflowDefinitionCode);
Assertions.assertTrue(queryWorkflowDefinitionByCodeResponse.getBody().getSuccess());
Assertions.assertTrue(
queryProcessDefinitionByCodeResponse.getBody().getData().toString().contains("hello world"));
queryWorkflowDefinitionByCodeResponse.getBody().getData().toString().contains("hello world"));
}
@Test
@Order(4)
public void testgetProcessListByProjectCode() {
HttpResponse getProcessListByProjectCodeResponse =
processDefinitionPage.getProcessListByProjectCode(loginUser, projectCode);
Assertions.assertTrue(getProcessListByProjectCodeResponse.getBody().getSuccess());
public void testGetWorkflowListByProjectCode() {
HttpResponse getWorkflowListByProjectCodeResponse =
workflowDefinitionPage.getWorkflowListByProjectCode(loginUser, projectCode);
Assertions.assertTrue(getWorkflowListByProjectCodeResponse.getBody().getSuccess());
Assertions
.assertTrue(getProcessListByProjectCodeResponse.getBody().getData().toString().contains("test_import"));
.assertTrue(
getWorkflowListByProjectCodeResponse.getBody().getData().toString().contains("test_import"));
}
@Test
@Order(5)
public void testQueryProcessDefinitionByName() {
HttpResponse queryProcessDefinitionByNameResponse =
processDefinitionPage.queryProcessDefinitionByName(loginUser, projectCode, processDefinitionName);
Assertions.assertTrue(queryProcessDefinitionByNameResponse.getBody().getSuccess());
public void testQueryWorkflowDefinitionByName() {
HttpResponse queryWorkflowDefinitionByNameResponse =
workflowDefinitionPage.queryWorkflowDefinitionByName(loginUser, projectCode, workflowDefinitionName);
Assertions.assertTrue(queryWorkflowDefinitionByNameResponse.getBody().getSuccess());
Assertions.assertTrue(
queryProcessDefinitionByNameResponse.getBody().getData().toString().contains("hello world"));
queryWorkflowDefinitionByNameResponse.getBody().getData().toString().contains("hello world"));
}
@Test
@Order(6)
public void testQueryProcessDefinitionList() {
HttpResponse queryProcessDefinitionListResponse =
processDefinitionPage.queryProcessDefinitionList(loginUser, projectCode);
Assertions.assertTrue(queryProcessDefinitionListResponse.getBody().getSuccess());
public void testQueryWorkflowDefinitionList() {
HttpResponse queryWorkflowDefinitionListResponse =
workflowDefinitionPage.queryWorkflowDefinitionList(loginUser, projectCode);
Assertions.assertTrue(queryWorkflowDefinitionListResponse.getBody().getSuccess());
Assertions
.assertTrue(queryProcessDefinitionListResponse.getBody().getData().toString().contains("hello world"));
.assertTrue(queryWorkflowDefinitionListResponse.getBody().getData().toString().contains("hello world"));
}
@Test
@Order(7)
public void testReleaseProcessDefinition() {
HttpResponse releaseProcessDefinitionResponse = processDefinitionPage.releaseProcessDefinition(loginUser,
projectCode, processDefinitionCode, ReleaseState.ONLINE);
Assertions.assertTrue(releaseProcessDefinitionResponse.getBody().getSuccess());
public void testReleaseWorkflowDefinition() {
HttpResponse releaseWorkflowDefinitionResponse = workflowDefinitionPage.releaseWorkflowDefinition(loginUser,
projectCode, workflowDefinitionCode, ReleaseState.ONLINE);
Assertions.assertTrue(releaseWorkflowDefinitionResponse.getBody().getSuccess());
HttpResponse queryProcessDefinitionByCodeResponse =
processDefinitionPage.queryProcessDefinitionByCode(loginUser, projectCode, processDefinitionCode);
Assertions.assertTrue(queryProcessDefinitionByCodeResponse.getBody().getSuccess());
HttpResponse queryWorkflowDefinitionByCodeResponse =
workflowDefinitionPage.queryWorkflowDefinitionByCode(loginUser, projectCode, workflowDefinitionCode);
Assertions.assertTrue(queryWorkflowDefinitionByCodeResponse.getBody().getSuccess());
Assertions.assertTrue(
queryProcessDefinitionByCodeResponse.getBody().getData().toString().contains("releaseState=ONLINE"));
queryWorkflowDefinitionByCodeResponse.getBody().getData().toString().contains("releaseState=ONLINE"));
}
@Test
@Order(8)
public void testDeleteProcessDefinitionByCode() {
HttpResponse deleteProcessDefinitionByCodeResponse =
processDefinitionPage.deleteProcessDefinitionByCode(loginUser, projectCode, processDefinitionCode);
Assertions.assertFalse(deleteProcessDefinitionByCodeResponse.getBody().getSuccess());
public void testDeleteWorkflowDefinitionByCode() {
HttpResponse deleteWorkflowDefinitionByCodeResponse =
workflowDefinitionPage.deleteWorkflowDefinitionByCode(loginUser, projectCode, workflowDefinitionCode);
Assertions.assertFalse(deleteWorkflowDefinitionByCodeResponse.getBody().getSuccess());
HttpResponse releaseProcessDefinitionResponse = processDefinitionPage.releaseProcessDefinition(loginUser,
projectCode, processDefinitionCode, ReleaseState.OFFLINE);
Assertions.assertTrue(releaseProcessDefinitionResponse.getBody().getSuccess());
HttpResponse releaseWorkflowDefinitionResponse = workflowDefinitionPage.releaseWorkflowDefinition(loginUser,
projectCode, workflowDefinitionCode, ReleaseState.OFFLINE);
Assertions.assertTrue(releaseWorkflowDefinitionResponse.getBody().getSuccess());
deleteProcessDefinitionByCodeResponse =
processDefinitionPage.deleteProcessDefinitionByCode(loginUser, projectCode, processDefinitionCode);
Assertions.assertTrue(deleteProcessDefinitionByCodeResponse.getBody().getSuccess());
deleteWorkflowDefinitionByCodeResponse =
workflowDefinitionPage.deleteWorkflowDefinitionByCode(loginUser, projectCode, workflowDefinitionCode);
Assertions.assertTrue(deleteWorkflowDefinitionByCodeResponse.getBody().getSuccess());
HttpResponse queryProcessDefinitionListResponse =
processDefinitionPage.queryProcessDefinitionList(loginUser, projectCode);
Assertions.assertTrue(queryProcessDefinitionListResponse.getBody().getSuccess());
HttpResponse queryWorkflowDefinitionListResponse =
workflowDefinitionPage.queryWorkflowDefinitionList(loginUser, projectCode);
Assertions.assertTrue(queryWorkflowDefinitionListResponse.getBody().getSuccess());
Assertions
.assertFalse(queryProcessDefinitionListResponse.getBody().getData().toString().contains("hello world"));
.assertFalse(
queryWorkflowDefinitionListResponse.getBody().getData().toString().contains("hello world"));
}
}

View File

@ -28,8 +28,8 @@ import org.apache.dolphinscheduler.api.test.entity.LoginResponseData;
import org.apache.dolphinscheduler.api.test.pages.LoginPage;
import org.apache.dolphinscheduler.api.test.pages.project.ProjectPage;
import org.apache.dolphinscheduler.api.test.pages.workflow.ExecutorPage;
import org.apache.dolphinscheduler.api.test.pages.workflow.ProcessDefinitionPage;
import org.apache.dolphinscheduler.api.test.pages.workflow.ProcessInstancePage;
import org.apache.dolphinscheduler.api.test.pages.workflow.WorkflowDefinitionPage;
import org.apache.dolphinscheduler.api.test.pages.workflow.WorkflowInstancePage;
import org.apache.dolphinscheduler.api.test.utils.JSONUtils;
import org.apache.dolphinscheduler.common.enums.FailureStrategy;
import org.apache.dolphinscheduler.common.enums.ReleaseState;
@ -55,10 +55,12 @@ import org.junit.jupiter.api.Assertions;
import org.junit.jupiter.api.BeforeAll;
import org.junit.jupiter.api.Order;
import org.junit.jupiter.api.Test;
import org.junitpioneer.jupiter.DisableIfTestFails;
import org.testcontainers.shaded.org.awaitility.Awaitility;
@DolphinScheduler(composeFiles = "docker/basic/docker-compose.yaml")
@Slf4j
@DisableIfTestFails
public class WorkflowInstanceAPITest {
private static final String username = "admin";
@ -69,19 +71,19 @@ public class WorkflowInstanceAPITest {
private static User loginUser;
private static ProcessInstancePage processInstancePage;
private static WorkflowInstancePage workflowInstancePage;
private static ExecutorPage executorPage;
private static ProcessDefinitionPage processDefinitionPage;
private static WorkflowDefinitionPage workflowDefinitionPage;
private static ProjectPage projectPage;
private static long projectCode;
private static long processDefinitionCode;
private static long workflowDefinitionCode;
private static int processInstanceId;
private static int workflowInstanceId;
@BeforeAll
public static void setup() {
@ -89,9 +91,9 @@ public class WorkflowInstanceAPITest {
HttpResponse loginHttpResponse = loginPage.login(username, password);
sessionId =
JSONUtils.convertValue(loginHttpResponse.getBody().getData(), LoginResponseData.class).getSessionId();
processInstancePage = new ProcessInstancePage(sessionId);
workflowInstancePage = new WorkflowInstancePage(sessionId);
executorPage = new ExecutorPage(sessionId);
processDefinitionPage = new ProcessDefinitionPage(sessionId);
workflowDefinitionPage = new WorkflowDefinitionPage(sessionId);
projectPage = new ProjectPage(sessionId);
loginUser = new User();
loginUser.setUserName("admin");
@ -106,7 +108,7 @@ public class WorkflowInstanceAPITest {
@Test
@Order(1)
public void testQueryProcessInstancesByWorkflowInstanceId() {
public void testQueryWorkflowInstancesByWorkflowInstanceId() {
try {
// create test project
HttpResponse createProjectResponse = projectPage.createProject(loginUser, "project-test");
@ -118,49 +120,50 @@ public class WorkflowInstanceAPITest {
// upload test workflow definition json
ClassLoader classLoader = getClass().getClassLoader();
File file = new File(classLoader.getResource("workflow-json/test.json").getFile());
CloseableHttpResponse importProcessDefinitionResponse = processDefinitionPage
.importProcessDefinition(loginUser, projectCode, file);
String data = EntityUtils.toString(importProcessDefinitionResponse.getEntity());
CloseableHttpResponse importWorkflowDefinitionResponse = workflowDefinitionPage
.importWorkflowDefinition(loginUser, projectCode, file);
String data = EntityUtils.toString(importWorkflowDefinitionResponse.getEntity());
assertTrue(data.contains("\"success\":true"));
// get workflow definition code
HttpResponse queryAllProcessDefinitionByProjectCodeResponse =
processDefinitionPage.queryAllProcessDefinitionByProjectCode(loginUser, projectCode);
assertTrue(queryAllProcessDefinitionByProjectCodeResponse.getBody().getSuccess());
assertTrue(queryAllProcessDefinitionByProjectCodeResponse.getBody().getData().toString()
HttpResponse queryAllWorkflowDefinitionByProjectCodeResponse =
workflowDefinitionPage.queryAllWorkflowDefinitionByProjectCode(loginUser, projectCode);
assertTrue(queryAllWorkflowDefinitionByProjectCodeResponse.getBody().getSuccess());
assertTrue(queryAllWorkflowDefinitionByProjectCodeResponse.getBody().getData().toString()
.contains("hello world"));
processDefinitionCode =
(long) ((LinkedHashMap<String, Object>) ((LinkedHashMap<String, Object>) ((List<LinkedHashMap>) queryAllProcessDefinitionByProjectCodeResponse
.getBody().getData()).get(0)).get("processDefinition")).get("code");
workflowDefinitionCode =
(long) ((LinkedHashMap<String, Object>) ((LinkedHashMap<String, Object>) ((List<LinkedHashMap>) queryAllWorkflowDefinitionByProjectCodeResponse
.getBody().getData()).get(0)).get("workflowDefinition")).get("code");
// release test workflow
HttpResponse releaseProcessDefinitionResponse = processDefinitionPage.releaseProcessDefinition(loginUser,
projectCode, processDefinitionCode, ReleaseState.ONLINE);
assertTrue(releaseProcessDefinitionResponse.getBody().getSuccess());
HttpResponse releaseWorkflowDefinitionResponse = workflowDefinitionPage.releaseWorkflowDefinition(loginUser,
projectCode, workflowDefinitionCode, ReleaseState.ONLINE);
assertTrue(releaseWorkflowDefinitionResponse.getBody().getSuccess());
// trigger workflow instance
SimpleDateFormat formatter = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
Date date = new Date();
String scheduleTime = String.format("%s,%s", formatter.format(date), formatter.format(date));
log.info("use current time {} as scheduleTime", scheduleTime);
HttpResponse startProcessInstanceResponse = executorPage.startProcessInstance(loginUser, projectCode,
processDefinitionCode, scheduleTime, FailureStrategy.END, WarningType.NONE);
assertTrue(startProcessInstanceResponse.getBody().getSuccess());
final List<Integer> workflowInstanceIds = (List<Integer>) startProcessInstanceResponse.getBody().getData();
HttpResponse startWorkflowInstanceResponse = executorPage.startWorkflowInstance(loginUser, projectCode,
workflowDefinitionCode, scheduleTime, FailureStrategy.END, WarningType.NONE);
assertTrue(startWorkflowInstanceResponse.getBody().getSuccess());
final List<Integer> workflowInstanceIds = (List<Integer>) startWorkflowInstanceResponse.getBody().getData();
assertEquals(1, workflowInstanceIds.size());
processInstanceId = workflowInstanceIds.get(0);
workflowInstanceId = workflowInstanceIds.get(0);
// make sure process instance has completed and successfully persisted into db
// make sure workflow instance has completed and successfully persisted into db
Awaitility.await()
.atMost(30, TimeUnit.SECONDS)
.untilAsserted(() -> {
// query workflow instance by trigger code
HttpResponse queryProcessInstanceListResponse =
processInstancePage.queryProcessInstanceById(loginUser, projectCode, processInstanceId);
assertTrue(queryProcessInstanceListResponse.getBody().getSuccess());
HttpResponse queryWorkflowInstanceListResponse =
workflowInstancePage.queryWorkflowInstanceById(loginUser, projectCode,
workflowInstanceId);
assertTrue(queryWorkflowInstanceListResponse.getBody().getSuccess());
final Map<String, Object> workflowInstance =
(Map<String, Object>) queryProcessInstanceListResponse.getBody().getData();
(Map<String, Object>) queryWorkflowInstanceListResponse.getBody().getData();
assertEquals("SUCCESS", workflowInstance.get("state"));
});
} catch (Exception e) {
@ -171,42 +174,43 @@ public class WorkflowInstanceAPITest {
@Test
@Order(2)
public void testQueryProcessInstanceList() {
HttpResponse queryProcessInstanceListResponse =
processInstancePage.queryProcessInstanceList(loginUser, projectCode, 1, 10);
assertTrue(queryProcessInstanceListResponse.getBody().getSuccess());
assertTrue(queryProcessInstanceListResponse.getBody().getData().toString().contains("test_import"));
public void testQueryWorkflowInstanceList() {
HttpResponse queryWorkflowInstanceListResponse =
workflowInstancePage.queryWorkflowInstanceList(loginUser, projectCode, 1, 10);
assertTrue(queryWorkflowInstanceListResponse.getBody().getSuccess());
assertTrue(queryWorkflowInstanceListResponse.getBody().getData().toString().contains("test_import"));
}
@Test
@Order(3)
public void testQueryTaskListByProcessId() {
HttpResponse queryTaskListByProcessIdResponse =
processInstancePage.queryTaskListByProcessId(loginUser, projectCode, processInstanceId);
assertTrue(queryTaskListByProcessIdResponse.getBody().getSuccess());
assertTrue(queryTaskListByProcessIdResponse.getBody().getData().toString().contains("test_import"));
public void testQueryTaskListByWorkflowInstanceId() {
HttpResponse queryTaskListByWorkflowInstanceIdResponse =
workflowInstancePage.queryTaskListByWorkflowInstanceId(loginUser, projectCode, workflowInstanceId);
assertTrue(queryTaskListByWorkflowInstanceIdResponse.getBody().getSuccess());
assertTrue(queryTaskListByWorkflowInstanceIdResponse.getBody().getData().toString().contains("test_import"));
}
@Test
@Order(4)
public void testQueryProcessInstanceById() {
HttpResponse queryProcessInstanceByIdResponse =
processInstancePage.queryProcessInstanceById(loginUser, projectCode, processInstanceId);
assertTrue(queryProcessInstanceByIdResponse.getBody().getSuccess());
assertTrue(queryProcessInstanceByIdResponse.getBody().getData().toString().contains("test_import"));
public void testQueryWorkflowInstanceById() {
HttpResponse queryWorkflowInstanceByIdResponse =
workflowInstancePage.queryWorkflowInstanceById(loginUser, projectCode, workflowInstanceId);
assertTrue(queryWorkflowInstanceByIdResponse.getBody().getSuccess());
assertTrue(queryWorkflowInstanceByIdResponse.getBody().getData().toString().contains("test_import"));
}
@Test
@Order(5)
public void testDeleteProcessInstanceById() {
HttpResponse deleteProcessInstanceByIdResponse =
processInstancePage.deleteProcessInstanceById(loginUser, projectCode, processInstanceId);
assertTrue(deleteProcessInstanceByIdResponse.getBody().getSuccess());
public void testDeleteWorkflowInstanceById() {
HttpResponse deleteWorkflowInstanceByIdResponse =
workflowInstancePage.deleteWorkflowInstanceById(loginUser, projectCode, workflowInstanceId);
assertTrue(deleteWorkflowInstanceByIdResponse.getBody().getSuccess());
HttpResponse queryProcessInstanceListResponse =
processInstancePage.queryProcessInstanceList(loginUser, projectCode, 1, 10);
assertTrue(queryProcessInstanceListResponse.getBody().getSuccess());
Assertions.assertFalse(queryProcessInstanceListResponse.getBody().getData().toString().contains("test_import"));
HttpResponse queryWorkflowInstanceListResponse =
workflowInstancePage.queryWorkflowInstanceList(loginUser, projectCode, 1, 10);
assertTrue(queryWorkflowInstanceListResponse.getBody().getSuccess());
Assertions
.assertFalse(queryWorkflowInstanceListResponse.getBody().getData().toString().contains("test_import"));
}
}

View File

@ -40,15 +40,15 @@ public class ExecutorPage {
private String sessionId;
public HttpResponse startProcessInstance(User loginUser,
long projectCode,
long processDefinitionCode,
String scheduleTime,
FailureStrategy failureStrategy,
WarningType warningType) {
public HttpResponse startWorkflowInstance(User loginUser,
long projectCode,
long workflowDefinitionCode,
String scheduleTime,
FailureStrategy failureStrategy,
WarningType warningType) {
Map<String, Object> params = new HashMap<>();
params.put("loginUser", loginUser);
params.put("processDefinitionCode", processDefinitionCode);
params.put("workflowDefinitionCode", workflowDefinitionCode);
params.put("scheduleTime", scheduleTime);
params.put("failureStrategy", failureStrategy);
params.put("warningType", warningType);
@ -56,14 +56,14 @@ public class ExecutorPage {
headers.put(Constants.SESSION_ID_KEY, sessionId);
RequestClient requestClient = new RequestClient();
String url = String.format("/projects/%s/executors/start-process-instance", projectCode);
String url = String.format("/projects/%s/executors/start-workflow-instance", projectCode);
return requestClient.post(url, headers, params);
}
public HttpResponse queryExecutingWorkflow(User loginUser, long projectCode, long processInstanceCode) {
public HttpResponse queryExecutingWorkflow(User loginUser, long projectCode, long workflowInstanceCode) {
Map<String, Object> params = new HashMap<>();
params.put("loginUser", loginUser);
params.put("id", processInstanceCode);
params.put("id", workflowInstanceCode);
Map<String, String> headers = new HashMap<>();
headers.put(Constants.SESSION_ID_KEY, sessionId);
RequestClient requestClient = new RequestClient();
@ -71,11 +71,11 @@ public class ExecutorPage {
return requestClient.get(url, headers, params);
}
public HttpResponse execute(User loginUser, long projectCode, int processInstanceId, ExecuteType executeType) {
public HttpResponse execute(User loginUser, long projectCode, int workflowInstanceId, ExecuteType executeType) {
Map<String, Object> params = new HashMap<>();
params.put("loginUser", loginUser);
params.put("projectCode", projectCode);
params.put("processInstanceId", processInstanceId);
params.put("workflowInstanceId", workflowInstanceId);
params.put("executeType", executeType);
Map<String, String> headers = new HashMap<>();
headers.put(Constants.SESSION_ID_KEY, sessionId);
@ -85,11 +85,11 @@ public class ExecutorPage {
return requestClient.post(url, headers, params);
}
public HttpResponse executeTask(User loginUser, long projectCode, int processInstanceId, String startNodeList,
public HttpResponse executeTask(User loginUser, long projectCode, int workflowInstanceId, String startNodeList,
TaskDependType taskDependType) {
Map<String, Object> params = new HashMap<>();
params.put("loginUser", loginUser);
params.put("processInstanceId", processInstanceId);
params.put("workflowInstanceId", workflowInstanceId);
params.put("startNodeList", startNodeList);
params.put("taskDependType", taskDependType);
Map<String, String> headers = new HashMap<>();

View File

@ -36,11 +36,11 @@ public class SchedulerPage {
private String sessionId;
public HttpResponse createSchedule(User loginUser, long projectCode, long processDefinitionCode, String schedule) {
public HttpResponse createSchedule(User loginUser, long projectCode, long workflowDefinitionCode, String schedule) {
Map<String, Object> params = new HashMap<>();
params.put("loginUser", loginUser);
params.put("projectCode", projectCode);
params.put("processDefinitionCode", processDefinitionCode);
params.put("workflowDefinitionCode", workflowDefinitionCode);
params.put("schedule", schedule);
Map<String, String> headers = new HashMap<>();
headers.put(Constants.SESSION_ID_KEY, sessionId);

View File

@ -36,54 +36,54 @@ import lombok.extern.slf4j.Slf4j;
@Slf4j
@AllArgsConstructor
public class ProcessDefinitionPage {
public class WorkflowDefinitionPage {
private String sessionId;
public CloseableHttpResponse importProcessDefinition(User loginUser, long projectCode, File file) {
public CloseableHttpResponse importWorkflowDefinition(User loginUser, long projectCode, File file) {
Map<String, Object> params = new HashMap<>();
params.put("loginUser", loginUser);
Map<String, String> headers = new HashMap<>();
headers.put(Constants.SESSION_ID_KEY, sessionId);
RequestClient requestClient = new RequestClient();
String url = String.format("/projects/%s/process-definition/import", projectCode);
String url = String.format("/projects/%s/workflow-definition/import", projectCode);
return requestClient.postWithFile(url, headers, params, file);
}
public HttpResponse queryAllProcessDefinitionByProjectCode(User loginUser, long projectCode) {
public HttpResponse queryAllWorkflowDefinitionByProjectCode(User loginUser, long projectCode) {
Map<String, Object> params = new HashMap<>();
params.put("loginUser", loginUser);
Map<String, String> headers = new HashMap<>();
headers.put(Constants.SESSION_ID_KEY, sessionId);
RequestClient requestClient = new RequestClient();
String url = String.format("/projects/%s/process-definition/all", projectCode);
String url = String.format("/projects/%s/workflow-definition/all", projectCode);
return requestClient.get(url, headers, params);
}
public HttpResponse queryProcessDefinitionByCode(User loginUser, long projectCode, long processDefinitionCode) {
public HttpResponse queryWorkflowDefinitionByCode(User loginUser, long projectCode, long workflowDefinitionCode) {
Map<String, Object> params = new HashMap<>();
params.put("loginUser", loginUser);
Map<String, String> headers = new HashMap<>();
headers.put(Constants.SESSION_ID_KEY, sessionId);
RequestClient requestClient = new RequestClient();
String url = String.format("/projects/%s/process-definition/%s", projectCode, processDefinitionCode);
String url = String.format("/projects/%s/workflow-definition/%s", projectCode, workflowDefinitionCode);
return requestClient.get(url, headers, params);
}
public HttpResponse getProcessListByProjectCode(User loginUser, long projectCode) {
public HttpResponse getWorkflowListByProjectCode(User loginUser, long projectCode) {
Map<String, Object> params = new HashMap<>();
params.put("loginUser", loginUser);
Map<String, String> headers = new HashMap<>();
headers.put(Constants.SESSION_ID_KEY, sessionId);
RequestClient requestClient = new RequestClient();
String url = String.format("/projects/%s/process-definition/query-process-definition-list", projectCode);
String url = String.format("/projects/%s/workflow-definition/query-workflow-definition-list", projectCode);
return requestClient.get(url, headers, params);
}
public HttpResponse queryProcessDefinitionByName(User loginUser, long projectCode, String name) {
public HttpResponse queryWorkflowDefinitionByName(User loginUser, long projectCode, String name) {
Map<String, Object> params = new HashMap<>();
params.put("loginUser", loginUser);
params.put("name", name);
@ -91,23 +91,23 @@ public class ProcessDefinitionPage {
headers.put(Constants.SESSION_ID_KEY, sessionId);
RequestClient requestClient = new RequestClient();
String url = String.format("/projects/%s/process-definition/query-by-name", projectCode);
String url = String.format("/projects/%s/workflow-definition/query-by-name", projectCode);
return requestClient.get(url, headers, params);
}
public HttpResponse queryProcessDefinitionList(User loginUser, long projectCode) {
public HttpResponse queryWorkflowDefinitionList(User loginUser, long projectCode) {
Map<String, Object> params = new HashMap<>();
params.put("loginUser", loginUser);
Map<String, String> headers = new HashMap<>();
headers.put(Constants.SESSION_ID_KEY, sessionId);
RequestClient requestClient = new RequestClient();
String url = String.format("/projects/%s/process-definition/list", projectCode);
String url = String.format("/projects/%s/workflow-definition/list", projectCode);
return requestClient.get(url, headers, params);
}
public HttpResponse releaseProcessDefinition(User loginUser, long projectCode, long code,
ReleaseState releaseState) {
public HttpResponse releaseWorkflowDefinition(User loginUser, long projectCode, long code,
ReleaseState releaseState) {
Map<String, Object> params = new HashMap<>();
params.put("loginUser", loginUser);
params.put("code", code);
@ -116,11 +116,11 @@ public class ProcessDefinitionPage {
headers.put(Constants.SESSION_ID_KEY, sessionId);
RequestClient requestClient = new RequestClient();
String url = String.format("/projects/%s/process-definition/%s/release", projectCode, code);
String url = String.format("/projects/%s/workflow-definition/%s/release", projectCode, code);
return requestClient.post(url, headers, params);
}
public HttpResponse deleteProcessDefinitionByCode(User loginUser, long projectCode, long code) {
public HttpResponse deleteWorkflowDefinitionByCode(User loginUser, long projectCode, long code) {
Map<String, Object> params = new HashMap<>();
params.put("loginUser", loginUser);
params.put("code", code);
@ -128,7 +128,7 @@ public class ProcessDefinitionPage {
headers.put(Constants.SESSION_ID_KEY, sessionId);
RequestClient requestClient = new RequestClient();
String url = String.format("/projects/%s/process-definition/%s", projectCode, code);
String url = String.format("/projects/%s/workflow-definition/%s", projectCode, code);
return requestClient.delete(url, headers, params);
}
}

View File

@ -32,11 +32,11 @@ import lombok.extern.slf4j.Slf4j;
@Slf4j
@AllArgsConstructor
public class ProcessInstancePage {
public class WorkflowInstancePage {
private String sessionId;
public HttpResponse queryProcessInstancesByTriggerCode(User loginUser, long projectCode, long triggerCode) {
public HttpResponse queryWorkflowInstancesByTriggerCode(User loginUser, long projectCode, long triggerCode) {
Map<String, Object> params = new HashMap<>();
params.put("loginUser", loginUser);
params.put("triggerCode", triggerCode);
@ -44,11 +44,11 @@ public class ProcessInstancePage {
headers.put(Constants.SESSION_ID_KEY, sessionId);
RequestClient requestClient = new RequestClient();
String url = String.format("/projects/%s/process-instances/trigger", projectCode);
String url = String.format("/projects/%s/workflow-instances/trigger", projectCode);
return requestClient.get(url, headers, params);
}
public HttpResponse queryProcessInstanceList(User loginUser, long projectCode, int pageNo, int pageSize) {
public HttpResponse queryWorkflowInstanceList(User loginUser, long projectCode, int pageNo, int pageSize) {
Map<String, Object> params = new HashMap<>();
params.put("loginUser", loginUser);
params.put("pageNo", pageNo);
@ -57,39 +57,39 @@ public class ProcessInstancePage {
headers.put(Constants.SESSION_ID_KEY, sessionId);
RequestClient requestClient = new RequestClient();
String url = String.format("/projects/%s/process-instances", projectCode);
String url = String.format("/projects/%s/workflow-instances", projectCode);
return requestClient.get(url, headers, params);
}
public HttpResponse queryTaskListByProcessId(User loginUser, long projectCode, long processInstanceId) {
public HttpResponse queryTaskListByWorkflowInstanceId(User loginUser, long projectCode, long workflowInstanceId) {
Map<String, Object> params = new HashMap<>();
params.put("loginUser", loginUser);
Map<String, String> headers = new HashMap<>();
headers.put(Constants.SESSION_ID_KEY, sessionId);
RequestClient requestClient = new RequestClient();
String url = String.format("/projects/%s/process-instances/%s/tasks", projectCode, processInstanceId);
String url = String.format("/projects/%s/workflow-instances/%s/tasks", projectCode, workflowInstanceId);
return requestClient.get(url, headers, params);
}
public HttpResponse queryProcessInstanceById(User loginUser, long projectCode, long processInstanceId) {
public HttpResponse queryWorkflowInstanceById(User loginUser, long projectCode, long workflowInstanceId) {
Map<String, Object> params = new HashMap<>();
params.put("loginUser", loginUser);
Map<String, String> headers = new HashMap<>();
headers.put(Constants.SESSION_ID_KEY, sessionId);
RequestClient requestClient = new RequestClient();
String url = String.format("/projects/%s/process-instances/%s", projectCode, processInstanceId);
String url = String.format("/projects/%s/workflow-instances/%s", projectCode, workflowInstanceId);
return requestClient.get(url, headers, params);
}
public HttpResponse deleteProcessInstanceById(User loginUser, long projectCode, long processInstanceId) {
public HttpResponse deleteWorkflowInstanceById(User loginUser, long projectCode, long workflowInstanceId) {
Map<String, Object> params = new HashMap<>();
params.put("loginUser", loginUser);
Map<String, String> headers = new HashMap<>();
headers.put(Constants.SESSION_ID_KEY, sessionId);
RequestClient requestClient = new RequestClient();
String url = String.format("/projects/%s/process-instances/%s", projectCode, processInstanceId);
String url = String.format("/projects/%s/workflow-instances/%s", projectCode, workflowInstanceId);
return requestClient.delete(url, headers, params);
}

View File

@ -1,5 +1,5 @@
[ {
"processDefinition" : {
"workflowDefinition" : {
"id" : 1,
"code" : 9752686452032,
"name" : "test",
@ -23,12 +23,12 @@
"warningGroupId" : null,
"executionType" : "PARALLEL"
},
"processTaskRelationList" : [ {
"workflowTaskRelationList" : [ {
"id" : 1,
"name" : "",
"processDefinitionVersion" : 1,
"workflowDefinitionVersion" : 1,
"projectCode" : 9752680865600,
"processDefinitionCode" : 9752686452032,
"workflowDefinitionCode" : 9752686452032,
"preTaskCode" : 0,
"preTaskVersion" : 0,
"postTaskCode" : 9752684723008,
@ -78,4 +78,4 @@
"operateTime" : "2023-06-01 20:41:02"
} ],
"schedule" : null
} ]
} ]

View File

@ -31,8 +31,8 @@
</modules>
<properties>
<maven.compiler.source>8</maven.compiler.source>
<maven.compiler.target>8</maven.compiler.target>
<maven.compiler.source>11</maven.compiler.source>
<maven.compiler.target>11</maven.compiler.target>
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
<junit.version>5.7.2</junit.version>
@ -46,6 +46,7 @@
<guava.version>31.0.1-jre</guava.version>
<jackson.version>2.13.2</jackson.version>
<checkstyle.version>3.1.2</checkstyle.version>
<junit-pioneer.version>2.2.0</junit-pioneer.version>
</properties>
<dependencies>
@ -65,6 +66,14 @@
<artifactId>junit-jupiter</artifactId>
</dependency>
<dependency>
<groupId>org.junit-pioneer</groupId>
<artifactId>junit-pioneer</artifactId>
<version>${junit-pioneer.version}</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.testcontainers</groupId>
<artifactId>testcontainers</artifactId>

View File

@ -26,10 +26,9 @@ public final class AuditLogConstants {
public static final String CODE = "code";
public static final String CODES = "codes";
public static final String VERSION = "version";
public static final String PROCESS_DEFINITION_CODE = "processDefinitionCode";
public static final String PROCESS_DEFINITION_CODES = "processDefinitionCodes";
public static final String PROCESS_INSTANCE_IDS = "processInstanceIds";
public static final String PROCESS_INSTANCE_ID = "processInstanceId";
public static final String WORKFLOW_DEFINITION_CODES = "workflowDefinitionCodes";
public static final String WORKFLOW_INSTANCE_IDS = "workflowInstanceIds";
public static final String WORKFLOW_INSTANCE_ID = "workflowInstanceId";
public static final String WORKFLOW_DEFINITION_CODE = "workflowDefinitionCode";
public static final String TYPE = "type";
public static final String NAME = "name";

View File

@ -27,15 +27,14 @@ import static org.apache.dolphinscheduler.api.audit.constants.AuditLogConstants.
import static org.apache.dolphinscheduler.api.audit.constants.AuditLogConstants.ID;
import static org.apache.dolphinscheduler.api.audit.constants.AuditLogConstants.NAME;
import static org.apache.dolphinscheduler.api.audit.constants.AuditLogConstants.PRIORITY;
import static org.apache.dolphinscheduler.api.audit.constants.AuditLogConstants.PROCESS_DEFINITION_CODE;
import static org.apache.dolphinscheduler.api.audit.constants.AuditLogConstants.PROCESS_DEFINITION_CODES;
import static org.apache.dolphinscheduler.api.audit.constants.AuditLogConstants.PROCESS_INSTANCE_ID;
import static org.apache.dolphinscheduler.api.audit.constants.AuditLogConstants.PROCESS_INSTANCE_IDS;
import static org.apache.dolphinscheduler.api.audit.constants.AuditLogConstants.QUEUE_ID;
import static org.apache.dolphinscheduler.api.audit.constants.AuditLogConstants.TYPE;
import static org.apache.dolphinscheduler.api.audit.constants.AuditLogConstants.USER_ID;
import static org.apache.dolphinscheduler.api.audit.constants.AuditLogConstants.VERSION;
import static org.apache.dolphinscheduler.api.audit.constants.AuditLogConstants.WORKFLOW_DEFINITION_CODE;
import static org.apache.dolphinscheduler.api.audit.constants.AuditLogConstants.WORKFLOW_DEFINITION_CODES;
import static org.apache.dolphinscheduler.api.audit.constants.AuditLogConstants.WORKFLOW_INSTANCE_ID;
import static org.apache.dolphinscheduler.api.audit.constants.AuditLogConstants.WORKFLOW_INSTANCE_IDS;
import static org.apache.dolphinscheduler.common.enums.AuditModelType.ALARM_GROUP;
import static org.apache.dolphinscheduler.common.enums.AuditModelType.ALARM_INSTANCE;
import static org.apache.dolphinscheduler.common.enums.AuditModelType.CLUSTER;
@ -76,6 +75,7 @@ import static org.apache.dolphinscheduler.common.enums.AuditOperationType.START;
import static org.apache.dolphinscheduler.common.enums.AuditOperationType.SWITCH_VERSION;
import static org.apache.dolphinscheduler.common.enums.AuditOperationType.UPDATE;
import org.apache.dolphinscheduler.api.audit.constants.AuditLogConstants;
import org.apache.dolphinscheduler.api.audit.operator.AuditOperator;
import org.apache.dolphinscheduler.api.audit.operator.impl.AlertGroupAuditOperatorImpl;
import org.apache.dolphinscheduler.api.audit.operator.impl.AlertInstanceAuditOperatorImpl;
@ -83,7 +83,6 @@ import org.apache.dolphinscheduler.api.audit.operator.impl.ClusterAuditOperatorI
import org.apache.dolphinscheduler.api.audit.operator.impl.DatasourceAuditOperatorImpl;
import org.apache.dolphinscheduler.api.audit.operator.impl.EnvironmentAuditOperatorImpl;
import org.apache.dolphinscheduler.api.audit.operator.impl.K8SNamespaceAuditOperatorImpl;
import org.apache.dolphinscheduler.api.audit.operator.impl.ProcessInstanceAuditOperatorImpl;
import org.apache.dolphinscheduler.api.audit.operator.impl.ProjectAuditOperatorImpl;
import org.apache.dolphinscheduler.api.audit.operator.impl.ResourceAuditOperatorImpl;
import org.apache.dolphinscheduler.api.audit.operator.impl.ScheduleAuditOperatorImpl;
@ -95,6 +94,7 @@ import org.apache.dolphinscheduler.api.audit.operator.impl.TokenAuditOperatorImp
import org.apache.dolphinscheduler.api.audit.operator.impl.UserAuditOperatorImpl;
import org.apache.dolphinscheduler.api.audit.operator.impl.WorkerGroupAuditOperatorImpl;
import org.apache.dolphinscheduler.api.audit.operator.impl.WorkflowAuditOperatorImpl;
import org.apache.dolphinscheduler.api.audit.operator.impl.WorkflowInstanceAuditOperatorImpl;
import org.apache.dolphinscheduler.api.audit.operator.impl.YarnQueueAuditOperatorImpl;
import org.apache.dolphinscheduler.common.enums.AuditModelType;
import org.apache.dolphinscheduler.common.enums.AuditOperationType;
@ -120,22 +120,24 @@ public enum AuditType {
WORKFLOW_EXPORT(WORKFLOW, EXPORT, WorkflowAuditOperatorImpl.class, new String[]{CODES}, new String[]{}),
WORKFLOW_DELETE(WORKFLOW, DELETE, WorkflowAuditOperatorImpl.class, new String[]{CODE}, new String[]{}),
WORKFLOW_BATCH_DELETE(WORKFLOW, BATCH_DELETE, WorkflowAuditOperatorImpl.class, new String[]{CODES}, new String[]{}),
WORKFLOW_START(WORKFLOW, START, WorkflowAuditOperatorImpl.class, new String[]{PROCESS_DEFINITION_CODE},
WORKFLOW_START(WORKFLOW, START, WorkflowAuditOperatorImpl.class,
new String[]{AuditLogConstants.WORKFLOW_DEFINITION_CODE},
new String[]{}),
WORKFLOW_BATCH_START(WORKFLOW, BATCH_START, WorkflowAuditOperatorImpl.class, new String[]{PROCESS_DEFINITION_CODES},
WORKFLOW_BATCH_START(WORKFLOW, BATCH_START, WorkflowAuditOperatorImpl.class,
new String[]{WORKFLOW_DEFINITION_CODES},
new String[]{}),
WORKFLOW_BATCH_RERUN(WORKFLOW, BATCH_RERUN, ProcessInstanceAuditOperatorImpl.class,
new String[]{PROCESS_INSTANCE_IDS},
WORKFLOW_BATCH_RERUN(WORKFLOW, BATCH_RERUN, WorkflowInstanceAuditOperatorImpl.class,
new String[]{WORKFLOW_INSTANCE_IDS},
new String[]{}),
WORKFLOW_EXECUTE(WORKFLOW, EXECUTE, ProcessInstanceAuditOperatorImpl.class, new String[]{PROCESS_INSTANCE_ID},
WORKFLOW_EXECUTE(WORKFLOW, EXECUTE, WorkflowInstanceAuditOperatorImpl.class, new String[]{WORKFLOW_INSTANCE_ID},
new String[]{}),
WORKFLOW_IMPORT(WORKFLOW, IMPORT, WorkflowAuditOperatorImpl.class, new String[]{}, new String[]{CODE}),
WORKFLOW_INSTANCE_UPDATE(WORKFLOW_INSTANCE, UPDATE, ProcessInstanceAuditOperatorImpl.class, new String[]{ID},
WORKFLOW_INSTANCE_UPDATE(WORKFLOW_INSTANCE, UPDATE, WorkflowInstanceAuditOperatorImpl.class, new String[]{ID},
new String[]{}),
WORKFLOW_INSTANCE_DELETE(WORKFLOW_INSTANCE, DELETE, ProcessInstanceAuditOperatorImpl.class, new String[]{ID},
WORKFLOW_INSTANCE_DELETE(WORKFLOW_INSTANCE, DELETE, WorkflowInstanceAuditOperatorImpl.class, new String[]{ID},
new String[]{}),
WORKFLOW_INSTANCE_BATCH_DELETE(WORKFLOW_INSTANCE, BATCH_DELETE, ProcessInstanceAuditOperatorImpl.class,
new String[]{PROCESS_INSTANCE_IDS}, new String[]{}),
WORKFLOW_INSTANCE_BATCH_DELETE(WORKFLOW_INSTANCE, BATCH_DELETE, WorkflowInstanceAuditOperatorImpl.class,
new String[]{WORKFLOW_INSTANCE_IDS}, new String[]{}),
TASK_CREATE(TASK, CREATE, TaskAuditOperatorImpl.class, new String[]{}, new String[]{CODE}),
TASK_UPDATE(TASK, UPDATE, TaskAuditOperatorImpl.class, new String[]{}, new String[]{CODE}),
@ -147,7 +149,8 @@ public enum AuditType {
TASK_INSTANCE_FORCE_SUCCESS(TASK_INSTANCE, FORCE_SUCCESS, TaskInstancesAuditOperatorImpl.class, new String[]{ID},
new String[]{}),
SCHEDULE_CREATE(SCHEDULE, CREATE, ScheduleAuditOperatorImpl.class, new String[]{PROCESS_DEFINITION_CODE},
SCHEDULE_CREATE(SCHEDULE, CREATE, ScheduleAuditOperatorImpl.class,
new String[]{AuditLogConstants.WORKFLOW_DEFINITION_CODE},
new String[]{ID}),
SCHEDULE_UPDATE(SCHEDULE, UPDATE, ScheduleAuditOperatorImpl.class, new String[]{ID}, new String[]{}),
SCHEDULE_ONLINE(SCHEDULE, ONLINE, ScheduleAuditOperatorImpl.class, new String[]{ID}, new String[]{}),

View File

@ -51,7 +51,7 @@ public class ScheduleAuditOperatorImpl extends BaseAuditOperator {
int id = (int) paramsMap.get(paramNameArr[0]);
Schedule schedule = scheduleMapper.selectById(id);
if (schedule != null) {
paramsMap.put(AuditLogConstants.CODE, schedule.getProcessDefinitionCode());
paramsMap.put(AuditLogConstants.CODE, schedule.getWorkflowDefinitionCode());
paramNameArr[0] = AuditLogConstants.CODE;
auditLogList.forEach(auditLog -> auditLog.setDetail(String.valueOf(id)));
}

View File

@ -49,8 +49,8 @@ public class WorkflowAuditOperatorImpl extends BaseAuditOperator {
protected void setObjectByParam(String[] paramNameArr, Map<String, Object> paramsMap,
List<AuditLog> auditLogList) {
if (paramNameArr[0].equals(AuditLogConstants.CODES)
|| paramNameArr[0].equals(AuditLogConstants.PROCESS_DEFINITION_CODES)
|| paramNameArr[0].equals(AuditLogConstants.PROCESS_INSTANCE_IDS)) {
|| paramNameArr[0].equals(AuditLogConstants.WORKFLOW_DEFINITION_CODES)
|| paramNameArr[0].equals(AuditLogConstants.WORKFLOW_INSTANCE_IDS)) {
super.setObjectByParamArr(paramNameArr, paramsMap, auditLogList);
} else {
super.setObjectByParam(paramNameArr, paramsMap, auditLogList);

View File

@ -35,7 +35,7 @@ import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
@Service
public class ProcessInstanceAuditOperatorImpl extends BaseAuditOperator {
public class WorkflowInstanceAuditOperatorImpl extends BaseAuditOperator {
@Autowired
private WorkflowInstanceMapper workflowInstanceMapper;
@ -50,7 +50,7 @@ public class ProcessInstanceAuditOperatorImpl extends BaseAuditOperator {
@Override
protected void setObjectByParam(String[] paramNameArr, Map<String, Object> paramsMap,
List<AuditLog> auditLogList) {
if (paramNameArr[0].equals(AuditLogConstants.PROCESS_INSTANCE_IDS)) {
if (paramNameArr[0].equals(AuditLogConstants.WORKFLOW_INSTANCE_IDS)) {
super.setObjectByParamArr(paramNameArr, paramsMap, auditLogList);
} else {
super.setObjectByParam(paramNameArr, paramsMap, auditLogList);

View File

@ -86,13 +86,13 @@ public class DataAnalysisController extends BaseController {
dataAnalysisService.getTaskInstanceStateCountByProject(loginUser, projectCode, startDate, endDate));
}
@Operation(summary = "countProcessInstanceState", description = "COUNT_WORKFLOW_INSTANCE_NOTES")
@Operation(summary = "countWorkflowInstanceState", description = "COUNT_WORKFLOW_INSTANCE_NOTES")
@Parameters({
@Parameter(name = "startDate", description = "START_DATE", schema = @Schema(implementation = String.class)),
@Parameter(name = "endDate", description = "END_DATE", schema = @Schema(implementation = String.class)),
@Parameter(name = "projectCode", description = "PROJECT_CODE", schema = @Schema(implementation = long.class, example = "100"))
})
@GetMapping(value = "/process-state-count")
@GetMapping(value = "/workflow-state-count")
@ResponseStatus(HttpStatus.OK)
@ApiException(COUNT_WORKFLOW_INSTANCE_STATE_ERROR)
public Result<WorkflowInstanceCountVO> getWorkflowInstanceStateCount(@Parameter(hidden = true) @RequestAttribute(value = Constants.SESSION_USER) User loginUser,

View File

@ -85,10 +85,10 @@ public class ExecutorController extends BaseController {
private ExecutorService execService;
/**
* execute process instance
* execute workflow instance
*
* @param loginUser login user
* @param processDefinitionCode process definition code
* @param workflowDefinitionCode workflow definition code
* @param scheduleTime schedule time when CommandType is COMPLEMENT_DATA there are two ways to transfer parameters 1.date range, for example:{"complementStartDate":"2022-01-01 12:12:12","complementEndDate":"2022-01-6 12:12:12"} 2.manual input, for example:{"complementScheduleDateList":"2022-01-01 00:00:00,2022-01-02 12:12:12,2022-01-03 12:12:12"}
* @param failureStrategy failure strategy
* @param startNodeList start nodes list
@ -97,16 +97,16 @@ public class ExecutorController extends BaseController {
* @param warningType warning type
* @param warningGroupId warning group id
* @param runMode run mode
* @param processInstancePriority process instance priority
* @param workflowInstancePriority workflow instance priority
* @param workerGroup worker group
* @param expectedParallelismNumber the expected parallelism number when execute complement in parallel mode
* @param testFlag testFlag
* @param executionOrder complement data in some kind of order
* @return start process result code
* @return start workflow result code
*/
@Operation(summary = "startProcessInstance", description = "RUN_WORKFLOW_INSTANCE_NOTES")
@Operation(summary = "startWorkflowInstance", description = "RUN_WORKFLOW_INSTANCE_NOTES")
@Parameters({
@Parameter(name = "processDefinitionCode", description = "WORKFLOW_DEFINITION_CODE", required = true, schema = @Schema(implementation = Long.class), example = "100"),
@Parameter(name = "workflowDefinitionCode", description = "WORKFLOW_DEFINITION_CODE", required = true, schema = @Schema(implementation = Long.class), example = "100"),
@Parameter(name = "scheduleTime", description = "SCHEDULE_TIME", required = true, schema = @Schema(implementation = String.class), example = "2022-04-06 00:00:00,2022-04-06 00:00:00"),
@Parameter(name = "failureStrategy", description = "FAILURE_STRATEGY", required = true, schema = @Schema(implementation = FailureStrategy.class)),
@Parameter(name = "startNodeList", description = "START_NODE_LIST", schema = @Schema(implementation = String.class)),
@ -115,7 +115,7 @@ public class ExecutorController extends BaseController {
@Parameter(name = "warningType", description = "WARNING_TYPE", required = true, schema = @Schema(implementation = WarningType.class)),
@Parameter(name = "warningGroupId", description = "WARNING_GROUP_ID", schema = @Schema(implementation = int.class, example = "100")),
@Parameter(name = "runMode", description = "RUN_MODE", schema = @Schema(implementation = RunMode.class)),
@Parameter(name = "processInstancePriority", description = "WORKFLOW_INSTANCE_PRIORITY", required = true, schema = @Schema(implementation = Priority.class)),
@Parameter(name = "workflowInstancePriority", description = "WORKFLOW_INSTANCE_PRIORITY", required = true, schema = @Schema(implementation = Priority.class)),
@Parameter(name = "workerGroup", description = "WORKER_GROUP", schema = @Schema(implementation = String.class, example = "default")),
@Parameter(name = "tenantCode", description = "TENANT_CODE", schema = @Schema(implementation = String.class, example = "default")),
@Parameter(name = "environmentCode", description = "ENVIRONMENT_CODE", schema = @Schema(implementation = Long.class, example = "-1")),
@ -127,12 +127,12 @@ public class ExecutorController extends BaseController {
@Parameter(name = "allLevelDependent", description = "ALL_LEVEL_DEPENDENT", schema = @Schema(implementation = boolean.class, example = "false")),
@Parameter(name = "executionOrder", description = "EXECUTION_ORDER", schema = @Schema(implementation = ExecutionOrder.class))
})
@PostMapping(value = "start-process-instance")
@PostMapping(value = "start-workflow-instance")
@ResponseStatus(HttpStatus.OK)
@ApiException(START_WORKFLOW_INSTANCE_ERROR)
@OperatorLog(auditType = AuditType.WORKFLOW_START)
public Result<List<Integer>> triggerWorkflowDefinition(@Parameter(hidden = true) @RequestAttribute(value = Constants.SESSION_USER) User loginUser,
@RequestParam(value = "processDefinitionCode") long processDefinitionCode,
@RequestParam(value = "workflowDefinitionCode") long workflowDefinitionCode,
@RequestParam(value = "scheduleTime") String scheduleTime,
@RequestParam(value = "failureStrategy") FailureStrategy failureStrategy,
@RequestParam(value = "startNodeList", required = false) String startNodeList,
@ -141,7 +141,7 @@ public class ExecutorController extends BaseController {
@RequestParam(value = "warningType") WarningType warningType,
@RequestParam(value = "warningGroupId", required = false) Integer warningGroupId,
@RequestParam(value = "runMode", required = false) RunMode runMode,
@RequestParam(value = "processInstancePriority", required = false) Priority processInstancePriority,
@RequestParam(value = "workflowInstancePriority", required = false) Priority workflowInstancePriority,
@RequestParam(value = "workerGroup", required = false, defaultValue = "default") String workerGroup,
@RequestParam(value = "tenantCode", required = false, defaultValue = "default") String tenantCode,
@RequestParam(value = "environmentCode", required = false, defaultValue = "-1") Long environmentCode,
@ -156,14 +156,14 @@ public class ExecutorController extends BaseController {
case START_PROCESS:
final WorkflowTriggerRequest workflowTriggerRequest = WorkflowTriggerRequest.builder()
.loginUser(loginUser)
.workflowDefinitionCode(processDefinitionCode)
.workflowDefinitionCode(workflowDefinitionCode)
.startNodes(startNodeList)
.failureStrategy(failureStrategy)
.taskDependType(taskDependType)
.execType(execType)
.warningType(warningType)
.warningGroupId(warningGroupId)
.workflowInstancePriority(processInstancePriority)
.workflowInstancePriority(workflowInstancePriority)
.workerGroup(workerGroup)
.tenantCode(tenantCode)
.environmentCode(environmentCode)
@ -176,7 +176,7 @@ public class ExecutorController extends BaseController {
case COMPLEMENT_DATA:
final WorkflowBackFillRequest workflowBackFillRequest = WorkflowBackFillRequest.builder()
.loginUser(loginUser)
.workflowDefinitionCode(processDefinitionCode)
.workflowDefinitionCode(workflowDefinitionCode)
.startNodes(startNodeList)
.failureStrategy(failureStrategy)
.taskDependType(taskDependType)
@ -184,7 +184,7 @@ public class ExecutorController extends BaseController {
.warningType(warningType)
.warningGroupId(warningGroupId)
.backfillRunMode(runMode)
.workflowInstancePriority(processInstancePriority)
.workflowInstancePriority(workflowInstancePriority)
.workerGroup(workerGroup)
.tenantCode(tenantCode)
.environmentCode(environmentCode)
@ -204,12 +204,12 @@ public class ExecutorController extends BaseController {
}
/**
* batch execute process instance
* If any processDefinitionCode cannot be found, the failure information is returned and the status is set to
* batch execute workflow instance
* If any workflowDefinitionCode cannot be found, the failure information is returned and the status is set to
* failed. The successful task will run normally and will not stop
*
* @param loginUser login user
* @param processDefinitionCodes process definition codes
* @param workflowDefinitionCodes workflow definition codes
* @param scheduleTime schedule time
* @param failureStrategy failure strategy
* @param startNodeList start nodes list
@ -218,17 +218,17 @@ public class ExecutorController extends BaseController {
* @param warningType warning type
* @param warningGroupId warning group id
* @param runMode run mode
* @param processInstancePriority process instance priority
* @param workflowInstancePriority workflow instance priority
* @param workerGroup worker group
* @param tenantCode tenant code
* @param expectedParallelismNumber the expected parallelism number when execute complement in parallel mode
* @param testFlag testFlag
* @param executionOrder complement data in some kind of order
* @return start process result code
* @return start workflow result code
*/
@Operation(summary = "batchStartProcessInstance", description = "BATCH_RUN_WORKFLOW_INSTANCE_NOTES")
@Operation(summary = "batchStartWorkflowInstance", description = "BATCH_RUN_WORKFLOW_INSTANCE_NOTES")
@Parameters({
@Parameter(name = "processDefinitionCodes", description = "WORKFLOW_DEFINITION_CODE_LIST", required = true, schema = @Schema(implementation = String.class, example = "1,2,3")),
@Parameter(name = "workflowDefinitionCodes", description = "WORKFLOW_DEFINITION_CODE_LIST", required = true, schema = @Schema(implementation = String.class, example = "1,2,3")),
@Parameter(name = "scheduleTime", description = "SCHEDULE_TIME", required = true, schema = @Schema(implementation = String.class, example = "2022-04-06 00:00:00,2022-04-06 00:00:00")),
@Parameter(name = "failureStrategy", description = "FAILURE_STRATEGY", required = true, schema = @Schema(implementation = FailureStrategy.class)),
@Parameter(name = "startNodeList", description = "START_NODE_LIST", schema = @Schema(implementation = String.class)),
@ -237,7 +237,7 @@ public class ExecutorController extends BaseController {
@Parameter(name = "warningType", description = "WARNING_TYPE", required = true, schema = @Schema(implementation = WarningType.class)),
@Parameter(name = "warningGroupId", description = "WARNING_GROUP_ID", required = true, schema = @Schema(implementation = int.class, example = "100")),
@Parameter(name = "runMode", description = "RUN_MODE", schema = @Schema(implementation = RunMode.class)),
@Parameter(name = "processInstancePriority", description = "WORKFLOW_INSTANCE_PRIORITY", required = true, schema = @Schema(implementation = Priority.class)),
@Parameter(name = "workflowInstancePriority", description = "WORKFLOW_INSTANCE_PRIORITY", required = true, schema = @Schema(implementation = Priority.class)),
@Parameter(name = "workerGroup", description = "WORKER_GROUP", schema = @Schema(implementation = String.class, example = "default")),
@Parameter(name = "tenantCode", description = "TENANT_CODE", schema = @Schema(implementation = String.class, example = "default")),
@Parameter(name = "environmentCode", description = "ENVIRONMENT_CODE", schema = @Schema(implementation = Long.class, example = "-1")),
@ -248,12 +248,12 @@ public class ExecutorController extends BaseController {
@Parameter(name = "allLevelDependent", description = "ALL_LEVEL_DEPENDENT", schema = @Schema(implementation = boolean.class, example = "false")),
@Parameter(name = "executionOrder", description = "EXECUTION_ORDER", schema = @Schema(implementation = ExecutionOrder.class))
})
@PostMapping(value = "batch-start-process-instance")
@PostMapping(value = "batch-start-workflow-instance")
@ResponseStatus(HttpStatus.OK)
@ApiException(BATCH_START_WORKFLOW_INSTANCE_ERROR)
@OperatorLog(auditType = AuditType.WORKFLOW_BATCH_START)
public Result<List<Integer>> batchTriggerWorkflowDefinitions(@Parameter(hidden = true) @RequestAttribute(value = Constants.SESSION_USER) User loginUser,
@RequestParam(value = "processDefinitionCodes") String processDefinitionCodes,
@RequestParam(value = "workflowDefinitionCodes") String workflowDefinitionCodes,
@RequestParam(value = "scheduleTime") String scheduleTime,
@RequestParam(value = "failureStrategy") FailureStrategy failureStrategy,
@RequestParam(value = "startNodeList", required = false) String startNodeList,
@ -262,7 +262,7 @@ public class ExecutorController extends BaseController {
@RequestParam(value = "warningType") WarningType warningType,
@RequestParam(value = "warningGroupId", required = false) Integer warningGroupId,
@RequestParam(value = "runMode", required = false) RunMode runMode,
@RequestParam(value = "processInstancePriority", required = false) Priority processInstancePriority,
@RequestParam(value = "workflowInstancePriority", required = false) Priority workflowInstancePriority,
@RequestParam(value = "workerGroup", required = false, defaultValue = "default") String workerGroup,
@RequestParam(value = "tenantCode", required = false, defaultValue = "default") String tenantCode,
@RequestParam(value = "environmentCode", required = false, defaultValue = "-1") Long environmentCode,
@ -273,12 +273,12 @@ public class ExecutorController extends BaseController {
@RequestParam(value = "complementDependentMode", required = false) ComplementDependentMode complementDependentMode,
@RequestParam(value = "allLevelDependent", required = false, defaultValue = "false") boolean allLevelDependent,
@RequestParam(value = "executionOrder", required = false) ExecutionOrder executionOrder) {
List<Long> workflowDefinitionCodes = Arrays.stream(processDefinitionCodes.split(Constants.COMMA))
List<Long> workflowDefinitionCodeList = Arrays.stream(workflowDefinitionCodes.split(Constants.COMMA))
.map(Long::parseLong)
.collect(Collectors.toList());
List<Integer> result = new ArrayList<>();
for (Long workflowDefinitionCode : workflowDefinitionCodes) {
for (Long workflowDefinitionCode : workflowDefinitionCodeList) {
Result<List<Integer>> workflowInstanceIds = triggerWorkflowDefinition(loginUser,
workflowDefinitionCode,
scheduleTime,
@ -289,7 +289,7 @@ public class ExecutorController extends BaseController {
warningType,
warningGroupId,
runMode,
processInstancePriority,
workflowInstancePriority,
workerGroup,
tenantCode,
environmentCode,
@ -306,11 +306,11 @@ public class ExecutorController extends BaseController {
}
/**
* do action to process instance: pause, stop, repeat, recover from pause, recover from stop
* do action to workflow instance: pause, stop, repeat, recover from pause, recover from stop
*/
@Operation(summary = "execute", description = "EXECUTE_ACTION_TO_WORKFLOW_INSTANCE_NOTES")
@Parameters({
@Parameter(name = "processInstanceId", description = "WORKFLOW_INSTANCE_ID", required = true, schema = @Schema(implementation = int.class, example = "100")),
@Parameter(name = "workflowInstanceId", description = "WORKFLOW_INSTANCE_ID", required = true, schema = @Schema(implementation = int.class, example = "100")),
@Parameter(name = "executeType", description = "EXECUTE_TYPE", required = true, schema = @Schema(implementation = ExecuteType.class))
})
@PostMapping(value = "/execute")
@ -318,24 +318,24 @@ public class ExecutorController extends BaseController {
@ApiException(EXECUTE_WORKFLOW_INSTANCE_ERROR)
@OperatorLog(auditType = AuditType.WORKFLOW_EXECUTE)
public Result<Void> controlWorkflowInstance(@Parameter(hidden = true) @RequestAttribute(value = Constants.SESSION_USER) User loginUser,
@RequestParam("processInstanceId") Integer processInstanceId,
@RequestParam("workflowInstanceId") Integer workflowInstanceId,
@RequestParam("executeType") ExecuteType executeType) {
execService.controlWorkflowInstance(loginUser, processInstanceId, executeType);
execService.controlWorkflowInstance(loginUser, workflowInstanceId, executeType);
return Result.success();
}
/**
* batch execute and do action to process instance
* batch execute and do action to workflow instance
*
* @param loginUser login user
* @param processInstanceIds process instance ids, delimiter by "," if more than one id
* @param workflowInstanceIds workflow instance ids, delimiter by "," if more than one id
* @param executeType execute type
* @return execute result code
*/
@Operation(summary = "batchExecute", description = "BATCH_EXECUTE_ACTION_TO_WORKFLOW_INSTANCE_NOTES")
@Parameters({
@Parameter(name = "projectCode", description = "PROJECT_CODE", required = true, schema = @Schema(implementation = int.class)),
@Parameter(name = "processInstanceIds", description = "PROCESS_INSTANCE_IDS", required = true, schema = @Schema(implementation = String.class)),
@Parameter(name = "workflowInstanceIds", description = "WORKFLOW_INSTANCE_IDS", required = true, schema = @Schema(implementation = String.class)),
@Parameter(name = "executeType", description = "EXECUTE_TYPE", required = true, schema = @Schema(implementation = ExecuteType.class))
})
@PostMapping(value = "/batch-execute")
@ -343,20 +343,20 @@ public class ExecutorController extends BaseController {
@ApiException(BATCH_EXECUTE_WORKFLOW_INSTANCE_ERROR)
@OperatorLog(auditType = AuditType.WORKFLOW_BATCH_RERUN)
public Result<Void> batchControlWorkflowInstance(@RequestAttribute(value = Constants.SESSION_USER) User loginUser,
@RequestParam("processInstanceIds") String processInstanceIds,
@RequestParam("workflowInstanceIds") String workflowInstanceIds,
@RequestParam("executeType") ExecuteType executeType) {
String[] processInstanceIdArray = processInstanceIds.split(Constants.COMMA);
String[] workflowInstanceIdArray = workflowInstanceIds.split(Constants.COMMA);
List<String> errorMessage = new ArrayList<>();
for (String strProcessInstanceId : processInstanceIdArray) {
int processInstanceId = Integer.parseInt(strProcessInstanceId);
for (String strWorkflowInstanceId : workflowInstanceIdArray) {
int workflowInstanceId = Integer.parseInt(strWorkflowInstanceId);
try {
execService.controlWorkflowInstance(loginUser, processInstanceId, executeType);
log.info("Success do action {} on workflowInstance: {}", executeType, processInstanceId);
execService.controlWorkflowInstance(loginUser, workflowInstanceId, executeType);
log.info("Success do action {} on workflowInstance: {}", executeType, workflowInstanceId);
} catch (Exception e) {
errorMessage.add("Failed do action " + executeType + " on workflowInstance: " + processInstanceId
errorMessage.add("Failed do action " + executeType + " on workflowInstance: " + workflowInstanceId
+ "reason: " + e.getMessage());
log.error("Failed do action {} on workflowInstance: {}, error: {}", executeType, processInstanceId, e);
log.error("Failed do action {} on workflowInstance: {}, error: {}", executeType, workflowInstanceId, e);
}
}
if (org.apache.commons.collections4.CollectionUtils.isNotEmpty(errorMessage)) {
@ -417,18 +417,18 @@ public class ExecutorController extends BaseController {
}
/**
* do action to process instance: pause, stop, repeat, recover from pause, recover from stop
* do action to workflow instance: pause, stop, repeat, recover from pause, recover from stop
*
* @param loginUser login user
* @param projectCode project code
* @param processInstanceId process instance id
* @param workflowInstanceId workflow instance id
* @param startNodeList start node list
* @param taskDependType task depend type
* @return execute result code
*/
@Operation(summary = "execute-task", description = "EXECUTE_ACTION_TO_WORKFLOW_INSTANCE_NOTES")
@Parameters({
@Parameter(name = "processInstanceId", description = "WORKFLOW_INSTANCE_ID", required = true, schema = @Schema(implementation = int.class, example = "100")),
@Parameter(name = "workflowInstanceId", description = "WORKFLOW_INSTANCE_ID", required = true, schema = @Schema(implementation = int.class, example = "100")),
@Parameter(name = "startNodeList", description = "START_NODE_LIST", required = true, schema = @Schema(implementation = String.class)),
@Parameter(name = "taskDependType", description = "TASK_DEPEND_TYPE", required = true, schema = @Schema(implementation = TaskDependType.class))
})
@ -438,13 +438,13 @@ public class ExecutorController extends BaseController {
@OperatorLog(auditType = AuditType.WORKFLOW_EXECUTE)
public Result executeTask(@Parameter(hidden = true) @RequestAttribute(value = Constants.SESSION_USER) User loginUser,
@Parameter(name = "projectCode", description = "PROJECT_CODE", required = true) @PathVariable long projectCode,
@RequestParam("processInstanceId") Integer processInstanceId,
@RequestParam("workflowInstanceId") Integer workflowInstanceId,
@RequestParam("startNodeList") String startNodeList,
@RequestParam("taskDependType") TaskDependType taskDependType) {
log.info("Start to execute task in process instance, projectCode:{}, processInstanceId:{}.",
log.info("Start to execute task in workflow instance, projectCode:{}, workflowInstanceId:{}.",
projectCode,
processInstanceId);
return execService.executeTask(loginUser, projectCode, processInstanceId, startNodeList, taskDependType);
workflowInstanceId);
return execService.executeTask(loginUser, projectCode, workflowInstanceId, startNodeList, taskDependType);
}
}

View File

@ -71,7 +71,7 @@ public class SchedulerController extends BaseController {
public static final String DEFAULT_WARNING_TYPE = "NONE";
public static final String DEFAULT_NOTIFY_GROUP_ID = "1";
public static final String DEFAULT_FAILURE_POLICY = "CONTINUE";
public static final String DEFAULT_PROCESS_INSTANCE_PRIORITY = "MEDIUM";
public static final String DEFAULT_WORKFLOW_INSTANCE_PRIORITY = "MEDIUM";
@Autowired
private SchedulerService schedulerService;
@ -81,19 +81,19 @@ public class SchedulerController extends BaseController {
*
* @param loginUser login user
* @param projectCode project code
* @param processDefinitionCode process definition code
* @param workflowDefinitionCode workflow definition code
* @param schedule scheduler
* @param warningType warning type
* @param warningGroupId warning group id
* @param failureStrategy failure strategy
* @param processInstancePriority process instance priority
* @param workflowInstancePriority workflow instance priority
* @param workerGroup worker group
* @param tenantCode tenant code
* @return create result code
*/
@Operation(summary = "createSchedule", description = "CREATE_SCHEDULE_NOTES")
@Parameters({
@Parameter(name = "processDefinitionCode", description = "WORKFLOW_DEFINITION_CODE", required = true, schema = @Schema(implementation = long.class, example = "100")),
@Parameter(name = "workflowDefinitionCode", description = "WORKFLOW_DEFINITION_CODE", required = true, schema = @Schema(implementation = long.class, example = "100")),
@Parameter(name = "schedule", description = "SCHEDULE", schema = @Schema(implementation = String.class, example = "{'startTime':'2019-06-10 00:00:00','endTime':'2019-06-13 00:00:00','timezoneId':'America/Phoenix','crontab':'0 0 3/6 * * ? *'}")),
@Parameter(name = "warningType", description = "WARNING_TYPE", schema = @Schema(implementation = WarningType.class)),
@Parameter(name = "warningGroupId", description = "WARNING_GROUP_ID", schema = @Schema(implementation = int.class, example = "100")),
@ -101,7 +101,7 @@ public class SchedulerController extends BaseController {
@Parameter(name = "workerGroup", description = "WORKER_GROUP", schema = @Schema(implementation = String.class, example = "default")),
@Parameter(name = "tenantCode", description = "TENANT_CODE", schema = @Schema(implementation = String.class, example = "default")),
@Parameter(name = "environmentCode", description = "ENVIRONMENT_CODE", schema = @Schema(implementation = long.class)),
@Parameter(name = "processInstancePriority", description = "WORKFLOW_INSTANCE_PRIORITY", schema = @Schema(implementation = Priority.class)),
@Parameter(name = "workflowInstancePriority", description = "WORKFLOW_INSTANCE_PRIORITY", schema = @Schema(implementation = Priority.class)),
})
@PostMapping()
@ResponseStatus(HttpStatus.CREATED)
@ -109,7 +109,7 @@ public class SchedulerController extends BaseController {
@OperatorLog(auditType = AuditType.SCHEDULE_CREATE)
public Result createSchedule(@Parameter(hidden = true) @RequestAttribute(value = SESSION_USER) User loginUser,
@Parameter(name = "projectCode", description = "PROJECT_CODE", required = true) @PathVariable long projectCode,
@RequestParam(value = "processDefinitionCode") long processDefinitionCode,
@RequestParam(value = "workflowDefinitionCode") long workflowDefinitionCode,
@RequestParam(value = "schedule") String schedule,
@RequestParam(value = "warningType", required = false, defaultValue = DEFAULT_WARNING_TYPE) WarningType warningType,
@RequestParam(value = "warningGroupId", required = false, defaultValue = DEFAULT_NOTIFY_GROUP_ID) int warningGroupId,
@ -117,16 +117,16 @@ public class SchedulerController extends BaseController {
@RequestParam(value = "workerGroup", required = false, defaultValue = "default") String workerGroup,
@RequestParam(value = "tenantCode", required = false, defaultValue = "default") String tenantCode,
@RequestParam(value = "environmentCode", required = false, defaultValue = "-1") Long environmentCode,
@RequestParam(value = "processInstancePriority", required = false, defaultValue = DEFAULT_PROCESS_INSTANCE_PRIORITY) Priority processInstancePriority) {
@RequestParam(value = "workflowInstancePriority", required = false, defaultValue = DEFAULT_WORKFLOW_INSTANCE_PRIORITY) Priority workflowInstancePriority) {
Map<String, Object> result = schedulerService.insertSchedule(
loginUser,
projectCode,
processDefinitionCode,
workflowDefinitionCode,
schedule,
warningType,
warningGroupId,
failureStrategy,
processInstancePriority,
workflowInstancePriority,
workerGroup,
tenantCode,
environmentCode);
@ -146,7 +146,7 @@ public class SchedulerController extends BaseController {
* @param failureStrategy failure strategy
* @param workerGroup worker group
* @param tenantCode tenant code
* @param processInstancePriority process instance priority
* @param workflowInstancePriority workflow instance priority
* @return update result code
*/
@Operation(summary = "updateSchedule", description = "UPDATE_SCHEDULE_NOTES")
@ -158,7 +158,7 @@ public class SchedulerController extends BaseController {
@Parameter(name = "failureStrategy", description = "FAILURE_STRATEGY", schema = @Schema(implementation = FailureStrategy.class)),
@Parameter(name = "workerGroup", description = "WORKER_GROUP", schema = @Schema(implementation = String.class, example = "default")),
@Parameter(name = "tenantCode", description = "TENANT_CODE", schema = @Schema(implementation = String.class, example = "default")),
@Parameter(name = "processInstancePriority", description = "WORKFLOW_INSTANCE_PRIORITY", schema = @Schema(implementation = Priority.class)),
@Parameter(name = "workflowInstancePriority", description = "WORKFLOW_INSTANCE_PRIORITY", schema = @Schema(implementation = Priority.class)),
@Parameter(name = "environmentCode", description = "ENVIRONMENT_CODE", schema = @Schema(implementation = long.class)),
})
@PutMapping("/{id}")
@ -175,10 +175,10 @@ public class SchedulerController extends BaseController {
@RequestParam(value = "workerGroup", required = false, defaultValue = "default") String workerGroup,
@RequestParam(value = "tenantCode", required = false, defaultValue = "default") String tenantCode,
@RequestParam(value = "environmentCode", required = false, defaultValue = "-1") Long environmentCode,
@RequestParam(value = "processInstancePriority", required = false, defaultValue = DEFAULT_PROCESS_INSTANCE_PRIORITY) Priority processInstancePriority) {
@RequestParam(value = "workflowInstancePriority", required = false, defaultValue = DEFAULT_WORKFLOW_INSTANCE_PRIORITY) Priority workflowInstancePriority) {
Map<String, Object> result = schedulerService.updateSchedule(loginUser, projectCode, id, schedule,
warningType, warningGroupId, failureStrategy, processInstancePriority, workerGroup, tenantCode,
warningType, warningGroupId, failureStrategy, workflowInstancePriority, workerGroup, tenantCode,
environmentCode);
return returnDataList(result);
}
@ -216,7 +216,7 @@ public class SchedulerController extends BaseController {
*
* @param loginUser login user
* @param projectCode project code
* @param processDefinitionCode process definition code
* @param workflowDefinitionCode workflow definition code
* @param pageNo page number
* @param pageSize page size
* @param searchVal search value
@ -224,7 +224,7 @@ public class SchedulerController extends BaseController {
*/
@Operation(summary = "queryScheduleListPaging", description = "QUERY_SCHEDULE_LIST_PAGING_NOTES")
@Parameters({
@Parameter(name = "processDefinitionId", description = "WORKFLOW_DEFINITION_ID", required = true, schema = @Schema(implementation = int.class, example = "100")),
@Parameter(name = "searchVal", description = "SEARCH_VAL", schema = @Schema(implementation = String.class)),
@Parameter(name = "pageNo", description = "PAGE_NO", schema = @Schema(implementation = int.class, example = "1")),
@Parameter(name = "pageSize", description = "PAGE_SIZE", schema = @Schema(implementation = int.class, example = "20"))
@ -233,13 +233,13 @@ public class SchedulerController extends BaseController {
@ApiException(QUERY_SCHEDULE_LIST_PAGING_ERROR)
public Result queryScheduleListPaging(@Parameter(hidden = true) @RequestAttribute(value = SESSION_USER) User loginUser,
@Parameter(name = "projectCode", description = "PROJECT_CODE", required = true) @PathVariable long projectCode,
@RequestParam(value = "processDefinitionCode", required = false, defaultValue = "0") long processDefinitionCode,
@RequestParam(value = "workflowDefinitionCode", required = false, defaultValue = "0") long workflowDefinitionCode,
@RequestParam(value = "searchVal", required = false) String searchVal,
@RequestParam("pageNo") Integer pageNo,
@RequestParam("pageSize") Integer pageSize) {
checkPageParams(pageNo, pageSize);
searchVal = ParameterUtils.handleEscapes(searchVal);
return schedulerService.querySchedule(loginUser, projectCode, processDefinitionCode, searchVal, pageNo,
return schedulerService.querySchedule(loginUser, projectCode, workflowDefinitionCode, searchVal, pageNo,
pageSize);
}
@ -304,49 +304,49 @@ public class SchedulerController extends BaseController {
}
/**
* update process definition schedule
* update workflow definition schedule
*
* @param loginUser login user
* @param projectCode project code
* @param processDefinitionCode process definition code
* @param workflowDefinitionCode workflow definition code
* @param schedule scheduler
* @param warningType warning type
* @param warningGroupId warning group id
* @param failureStrategy failure strategy
* @param workerGroup worker group
* @param processInstancePriority process instance priority
* @param workflowInstancePriority workflow instance priority
* @return update result code
*/
@Operation(summary = "updateScheduleByWorkflowDefinitionCode", description = "UPDATE_SCHEDULE_BY_PROCESS_DEFINITION_CODE_NOTES")
@Operation(summary = "updateScheduleByWorkflowDefinitionCode", description = "UPDATE_SCHEDULE_BY_WORKFLOW_DEFINITION_CODE_NOTES")
@Parameters({
@Parameter(name = "processDefinitionCode", description = "WORKFLOW_DEFINITION_CODE", required = true, schema = @Schema(implementation = long.class, example = "12345678")),
@Parameter(name = "workflowDefinitionCode", description = "WORKFLOW_DEFINITION_CODE", required = true, schema = @Schema(implementation = long.class, example = "12345678")),
@Parameter(name = "schedule", description = "SCHEDULE", schema = @Schema(implementation = String.class, example = "{'startTime':'2019-06-10 00:00:00','endTime':'2019-06-13 00:00:00','crontab':'0 0 3/6 * * ? *'}")),
@Parameter(name = "warningType", description = "WARNING_TYPE", schema = @Schema(implementation = WarningType.class)),
@Parameter(name = "warningGroupId", description = "WARNING_GROUP_ID", schema = @Schema(implementation = int.class, example = "100")),
@Parameter(name = "failureStrategy", description = "FAILURE_STRATEGY", schema = @Schema(implementation = FailureStrategy.class)),
@Parameter(name = "workerGroup", description = "WORKER_GROUP", schema = @Schema(implementation = String.class, example = "default")),
@Parameter(name = "tenantCode", description = "TENANT_CODE", schema = @Schema(implementation = String.class, example = "default")),
@Parameter(name = "processInstancePriority", description = "WORKFLOW_INSTANCE_PRIORITY", schema = @Schema(implementation = Priority.class)),
@Parameter(name = "workflowInstancePriority", description = "WORKFLOW_INSTANCE_PRIORITY", schema = @Schema(implementation = Priority.class)),
@Parameter(name = "environmentCode", description = "ENVIRONMENT_CODE", schema = @Schema(implementation = long.class)),
})
@PutMapping("/update/{code}")
@ResponseStatus(HttpStatus.OK)
@ApiException(UPDATE_SCHEDULE_ERROR)
@OperatorLog(auditType = AuditType.SCHEDULE_UPDATE)
public Result updateScheduleByProcessDefinitionCode(@Parameter(hidden = true) @RequestAttribute(value = SESSION_USER) User loginUser,
@Parameter(name = "projectCode", description = "PROJECT_CODE", required = true) @PathVariable long projectCode,
@PathVariable(value = "code") long processDefinitionCode,
@RequestParam(value = "schedule") String schedule,
@RequestParam(value = "warningType", required = false, defaultValue = DEFAULT_WARNING_TYPE) WarningType warningType,
@RequestParam(value = "warningGroupId", required = false) int warningGroupId,
@RequestParam(value = "failureStrategy", required = false, defaultValue = "END") FailureStrategy failureStrategy,
@RequestParam(value = "workerGroup", required = false, defaultValue = "default") String workerGroup,
@RequestParam(value = "tenantCode", required = false, defaultValue = "default") String tenantCode,
@RequestParam(value = "environmentCode", required = false, defaultValue = "-1") long environmentCode,
@RequestParam(value = "processInstancePriority", required = false) Priority processInstancePriority) {
public Result updateScheduleByWorkflowDefinitionCode(@Parameter(hidden = true) @RequestAttribute(value = SESSION_USER) User loginUser,
@Parameter(name = "projectCode", description = "PROJECT_CODE", required = true) @PathVariable long projectCode,
@PathVariable(value = "code") long workflowDefinitionCode,
@RequestParam(value = "schedule") String schedule,
@RequestParam(value = "warningType", required = false, defaultValue = DEFAULT_WARNING_TYPE) WarningType warningType,
@RequestParam(value = "warningGroupId", required = false) int warningGroupId,
@RequestParam(value = "failureStrategy", required = false, defaultValue = "END") FailureStrategy failureStrategy,
@RequestParam(value = "workerGroup", required = false, defaultValue = "default") String workerGroup,
@RequestParam(value = "tenantCode", required = false, defaultValue = "default") String tenantCode,
@RequestParam(value = "environmentCode", required = false, defaultValue = "-1") long environmentCode,
@RequestParam(value = "workflowInstancePriority", required = false) Priority workflowInstancePriority) {
Map<String, Object> result = schedulerService.updateScheduleByWorkflowDefinitionCode(loginUser, projectCode,
processDefinitionCode, schedule,
warningType, warningGroupId, failureStrategy, processInstancePriority, workerGroup, tenantCode,
workflowDefinitionCode, schedule,
warningType, warningGroupId, failureStrategy, workflowInstancePriority, workerGroup, tenantCode,
environmentCode);
return returnDataList(result);
}

View File

@ -299,7 +299,7 @@ public class TaskGroupController extends BaseController {
*
* @param groupId ID for task group
* @param taskName Task Name
* @param processName Process instance name
* @param workflowInstanceName workflow instance name
* @param status Task queue status
* @param loginUser login user
* @param pageNo page number
@ -310,7 +310,7 @@ public class TaskGroupController extends BaseController {
@Parameters({
@Parameter(name = "groupId", description = "GROUP_ID", required = false, schema = @Schema(implementation = int.class, example = "1", defaultValue = "-1")),
@Parameter(name = "taskInstanceName", description = "TASK_INSTANCE_NAME", required = false, schema = @Schema(implementation = String.class, example = "taskName")),
@Parameter(name = "processInstanceName", description = "PROCESS_INSTANCE_NAME", required = false, schema = @Schema(implementation = String.class, example = "processName")),
@Parameter(name = "workflowInstanceName", description = "WORKFLOW_INSTANCE_NAME", required = false, schema = @Schema(implementation = String.class, example = "workflowInstanceName")),
@Parameter(name = "status", description = "TASK_GROUP_STATUS", required = false, schema = @Schema(implementation = int.class, example = "1")),
@Parameter(name = "pageNo", description = "PAGE_NO", required = true, schema = @Schema(implementation = int.class, example = "1")),
@Parameter(name = "pageSize", description = "PAGE_SIZE", required = true, schema = @Schema(implementation = int.class, example = "20"))
@ -321,14 +321,14 @@ public class TaskGroupController extends BaseController {
public Result queryTaskGroupQueues(@Parameter(hidden = true) @RequestAttribute(value = Constants.SESSION_USER) User loginUser,
@RequestParam(value = "groupId", required = false, defaultValue = "-1") Integer groupId,
@RequestParam(value = "taskInstanceName", required = false) String taskName,
@RequestParam(value = "processInstanceName", required = false) String processName,
@RequestParam(value = "workflowInstanceName", required = false) String workflowInstanceName,
@RequestParam(value = "status", required = false) Integer status,
@RequestParam("pageNo") Integer pageNo,
@RequestParam("pageSize") Integer pageSize) {
Map<String, Object> result = taskGroupQueueService.queryTasksByGroupId(
loginUser,
taskName,
processName,
workflowInstanceName,
status,
groupId,
pageNo,

View File

@ -69,7 +69,7 @@ public class TaskInstanceController extends BaseController {
*
* @param loginUser login user
* @param projectCode project code
* @param processInstanceId process instance id
* @param workflowInstanceId workflow instance id
* @param searchVal search value
* @param taskName task name
* @param stateType state type
@ -83,8 +83,8 @@ public class TaskInstanceController extends BaseController {
*/
@Operation(summary = "queryTaskListPaging", description = "QUERY_TASK_INSTANCE_LIST_PAGING_NOTES")
@Parameters({
@Parameter(name = "processInstanceId", description = "WORKFLOW_INSTANCE_ID", schema = @Schema(implementation = int.class, example = "100")),
@Parameter(name = "processInstanceName", description = "PROCESS_INSTANCE_NAME", schema = @Schema(implementation = String.class)),
@Parameter(name = "workflowInstanceId", description = "WORKFLOW_INSTANCE_ID", schema = @Schema(implementation = int.class, example = "100")),
@Parameter(name = "workflowInstanceName", description = "WORKFLOW_INSTANCE_NAME", schema = @Schema(implementation = String.class)),
@Parameter(name = "searchVal", description = "SEARCH_VAL", schema = @Schema(implementation = String.class)),
@Parameter(name = "taskName", description = "TASK_NAME", schema = @Schema(implementation = String.class)),
@Parameter(name = "taskCode", description = "TASK_CODE", schema = @Schema(implementation = Long.class)),
@ -102,9 +102,9 @@ public class TaskInstanceController extends BaseController {
@ApiException(QUERY_TASK_LIST_PAGING_ERROR)
public Result queryTaskListPaging(@Parameter(hidden = true) @RequestAttribute(value = Constants.SESSION_USER) User loginUser,
@Parameter(name = "projectCode", description = "PROJECT_CODE", required = true) @PathVariable long projectCode,
@RequestParam(value = "processInstanceId", required = false, defaultValue = "0") Integer processInstanceId,
@RequestParam(value = "processInstanceName", required = false) String processInstanceName,
@RequestParam(value = "processDefinitionName", required = false) String processDefinitionName,
@RequestParam(value = "workflowInstanceId", required = false, defaultValue = "0") Integer workflowInstanceId,
@RequestParam(value = "workflowInstanceName", required = false) String workflowInstanceName,
@RequestParam(value = "workflowDefinitionName", required = false) String workflowDefinitionName,
@RequestParam(value = "searchVal", required = false) String searchVal,
@RequestParam(value = "taskName", required = false) String taskName,
@RequestParam(value = "taskCode", required = false) Long taskCode,
@ -121,9 +121,9 @@ public class TaskInstanceController extends BaseController {
return taskInstanceService.queryTaskListPaging(
loginUser,
projectCode,
processInstanceId,
processInstanceName,
processDefinitionName,
workflowInstanceId,
workflowInstanceName,
workflowDefinitionName,
taskName,
taskCode,
executorName,

View File

@ -82,7 +82,7 @@ import io.swagger.v3.oas.annotations.tags.Tag;
*/
@Tag(name = "WORKFLOW_DEFINITION_TAG")
@RestController
@RequestMapping("projects/{projectCode}/process-definition")
@RequestMapping("projects/{projectCode}/workflow-definition")
@Slf4j
public class WorkflowDefinitionController extends BaseController {
@ -204,10 +204,10 @@ public class WorkflowDefinitionController extends BaseController {
public Result verifyWorkflowDefinitionName(@Parameter(hidden = true) @RequestAttribute(value = Constants.SESSION_USER) User loginUser,
@Parameter(name = "projectCode", description = "PROJECT_CODE", required = true) @PathVariable long projectCode,
@RequestParam(value = "name", required = true) String name,
@RequestParam(value = "code", required = false, defaultValue = "0") long processDefinitionCode) {
@RequestParam(value = "workflowDefinitionCode", required = false, defaultValue = "0") long workflowDefinitionCode) {
Map<String, Object> result =
workflowDefinitionService.verifyWorkflowDefinitionName(loginUser, projectCode, name,
processDefinitionCode);
workflowDefinitionCode);
return returnDataList(result);
}
@ -581,7 +581,7 @@ public class WorkflowDefinitionController extends BaseController {
@Parameters({
@Parameter(name = "projectCode", description = "PROJECT_CODE", required = true, schema = @Schema(implementation = long.class, example = "100"))
})
@GetMapping(value = "/query-process-definition-list")
@GetMapping(value = "/query-workflow-definition-list")
@ResponseStatus(HttpStatus.OK)
@ApiException(GET_TASKS_LIST_BY_WORKFLOW_DEFINITION_CODE_ERROR)
public Result getWorkflowListByProjectCode(@Parameter(hidden = true) @RequestAttribute(value = Constants.SESSION_USER) User loginUser,
@ -600,20 +600,20 @@ public class WorkflowDefinitionController extends BaseController {
@Operation(summary = "getTaskListByWorkflowDefinitionCode", description = "GET_TASK_LIST_BY_WORKFLOW_CODE_NOTES")
@Parameters({
@Parameter(name = "projectCode", description = "PROJECT_CODE", required = true, schema = @Schema(implementation = long.class, example = "100")),
@Parameter(name = "processDefinitionCode", description = "WORKFLOW_DEFINITION_CODE", required = true, schema = @Schema(implementation = long.class, example = "100")),
@Parameter(name = "workflowDefinitionCode", description = "WORKFLOW_DEFINITION_CODE", required = true, schema = @Schema(implementation = long.class, example = "100")),
})
@GetMapping(value = "/query-task-definition-list")
@ResponseStatus(HttpStatus.OK)
@ApiException(GET_TASKS_LIST_BY_WORKFLOW_DEFINITION_CODE_ERROR)
public Result getTaskListByWorkflowDefinitionCode(@Parameter(hidden = true) @RequestAttribute(value = Constants.SESSION_USER) User loginUser,
@Parameter(name = "projectCode", description = "PROJECT_CODE", required = true) @PathVariable long projectCode,
@RequestParam(value = "processDefinitionCode") Long processDefinitionCode) {
@RequestParam(value = "workflowDefinitionCode") Long workflowDefinitionCode) {
Map<String, Object> result = workflowDefinitionService
.queryTaskDefinitionListByWorkflowDefinitionCode(projectCode, processDefinitionCode);
.queryTaskDefinitionListByWorkflowDefinitionCode(projectCode, workflowDefinitionCode);
return returnDataList(result);
}
@Operation(summary = "deleteByCode", description = "DELETE_WORKFLOW_DEFINITION_BY_ID_NOTES")
@Operation(summary = "deleteByWorkflowDefinitionCode", description = "DELETE_WORKFLOW_DEFINITION_BY_ID_NOTES")
@Parameters({
@Parameter(name = "code", description = "WORKFLOW_DEFINITION_CODE", schema = @Schema(implementation = int.class, example = "100"))
})

View File

@ -67,7 +67,7 @@ import io.swagger.v3.oas.annotations.tags.Tag;
*/
@Tag(name = "WORKFLOW_INSTANCE_TAG")
@RestController
@RequestMapping("/projects/{projectCode}/process-instances")
@RequestMapping("/projects/{projectCode}/workflow-instances")
@Slf4j
public class WorkflowInstanceController extends BaseController {
@ -81,7 +81,7 @@ public class WorkflowInstanceController extends BaseController {
* @param projectCode project code
* @param pageNo page number
* @param pageSize page size
* @param processDefineCode process definition code
* @param workflowDefinitionCode workflow definition code
* @param searchVal search value
* @param stateType state type
* @param host host
@ -92,7 +92,7 @@ public class WorkflowInstanceController extends BaseController {
*/
@Operation(summary = "queryWorkflowInstanceListPaging", description = "QUERY_WORKFLOW_INSTANCE_LIST_NOTES")
@Parameters({
@Parameter(name = "processDefineCode", description = "WORKFLOW_DEFINITION_CODE", schema = @Schema(implementation = long.class, example = "100")),
@Parameter(name = "workflowDefinitionCode", description = "WORKFLOW_DEFINITION_CODE", schema = @Schema(implementation = long.class, example = "100")),
@Parameter(name = "searchVal", description = "SEARCH_VAL", schema = @Schema(implementation = String.class)),
@Parameter(name = "executorName", description = "EXECUTOR_NAME", schema = @Schema(implementation = String.class)),
@Parameter(name = "stateType", description = "EXECUTION_STATUS", schema = @Schema(implementation = WorkflowExecutionStatus.class)),
@ -107,7 +107,7 @@ public class WorkflowInstanceController extends BaseController {
@ApiException(Status.QUERY_WORKFLOW_INSTANCE_LIST_PAGING_ERROR)
public Result queryWorkflowInstanceList(@Parameter(hidden = true) @RequestAttribute(value = Constants.SESSION_USER) User loginUser,
@Parameter(name = "projectCode", description = "PROJECT_CODE", required = true) @PathVariable long projectCode,
@RequestParam(value = "processDefineCode", required = false, defaultValue = "0") long processDefineCode,
@RequestParam(value = "workflowDefinitionCode", required = false, defaultValue = "0") long workflowDefinitionCode,
@RequestParam(value = "searchVal", required = false) String searchVal,
@RequestParam(value = "executorName", required = false) String executorName,
@RequestParam(value = "stateType", required = false) WorkflowExecutionStatus stateType,
@ -120,7 +120,8 @@ public class WorkflowInstanceController extends BaseController {
checkPageParams(pageNo, pageSize);
searchVal = ParameterUtils.handleEscapes(searchVal);
return workflowInstanceService.queryWorkflowInstanceList(loginUser, projectCode, processDefineCode, startTime,
return workflowInstanceService.queryWorkflowInstanceList(loginUser, projectCode, workflowDefinitionCode,
startTime,
endTime,
searchVal, executorName, stateType, host, otherParamsJson, pageNo, pageSize);
}
@ -153,7 +154,7 @@ public class WorkflowInstanceController extends BaseController {
*
* @param loginUser login user
* @param projectCode project code
* @param taskRelationJson process task relation json
* @param taskRelationJson workflow task relation json
* @param taskDefinitionJson taskDefinitionJson
* @param id workflow instance id
* @param scheduleTime schedule time
@ -168,9 +169,9 @@ public class WorkflowInstanceController extends BaseController {
@Parameter(name = "id", description = "WORKFLOW_INSTANCE_ID", required = true, schema = @Schema(implementation = int.class, example = "1")),
@Parameter(name = "scheduleTime", description = "SCHEDULE_TIME", schema = @Schema(implementation = String.class)),
@Parameter(name = "syncDefine", description = "SYNC_DEFINE", required = true, schema = @Schema(implementation = boolean.class, example = "false")),
@Parameter(name = "globalParams", description = "PROCESS_GLOBAL_PARAMS", schema = @Schema(implementation = String.class, example = "[]")),
@Parameter(name = "globalParams", description = "WORKFLOW_GLOBAL_PARAMS", schema = @Schema(implementation = String.class, example = "[]")),
@Parameter(name = "locations", description = "WORKFLOW_INSTANCE_LOCATIONS", schema = @Schema(implementation = String.class)),
@Parameter(name = "timeout", description = "PROCESS_TIMEOUT", schema = @Schema(implementation = int.class, example = "0")),
@Parameter(name = "timeout", description = "WORKFLOW_TIMEOUT", schema = @Schema(implementation = int.class, example = "0")),
})
@PutMapping(value = "/{id}")
@ResponseStatus(HttpStatus.OK)
@ -223,7 +224,7 @@ public class WorkflowInstanceController extends BaseController {
* @param endTime end time
* @return list of workflow instance
*/
@Operation(summary = "queryTopNLongestRunningWorkflowInstance", description = "QUERY_TOPN_LONGEST_RUNNING_PROCESS_INSTANCE_NOTES")
@Operation(summary = "queryTopNLongestRunningWorkflowInstance", description = "QUERY_TOPN_LONGEST_RUNNING_WORKFLOW_INSTANCE_NOTES")
@Parameters({
@Parameter(name = "size", description = "WORKFLOW_INSTANCE_SIZE", required = true, schema = @Schema(implementation = int.class, example = "10")),
@Parameter(name = "startTime", description = "WORKFLOW_INSTANCE_START_TIME", required = true, schema = @Schema(implementation = String.class)),
@ -381,13 +382,13 @@ public class WorkflowInstanceController extends BaseController {
*
* @param loginUser login user
* @param projectCode project code
* @param processInstanceIds workflow instance id
* @param workflowInstanceIds workflow instance id
* @return delete result code
*/
@Operation(summary = "batchDeleteWorkflowInstanceByIds", description = "BATCH_DELETE_WORKFLOW_INSTANCE_BY_IDS_NOTES")
@Parameters({
@Parameter(name = "projectCode", description = "PROJECT_CODE", required = true, schema = @Schema(implementation = int.class)),
@Parameter(name = "processInstanceIds", description = "PROCESS_INSTANCE_IDS", required = true, schema = @Schema(implementation = String.class)),
@Parameter(name = "workflowInstanceIds", description = "WORKFLOW_INSTANCE_IDS", required = true, schema = @Schema(implementation = String.class)),
})
@PostMapping(value = "/batch-delete")
@ResponseStatus(HttpStatus.OK)
@ -395,21 +396,21 @@ public class WorkflowInstanceController extends BaseController {
@OperatorLog(auditType = AuditType.WORKFLOW_INSTANCE_BATCH_DELETE)
public Result batchDeleteWorkflowInstanceByIds(@RequestAttribute(value = Constants.SESSION_USER) User loginUser,
@PathVariable long projectCode,
@RequestParam("processInstanceIds") String processInstanceIds) {
@RequestParam("workflowInstanceIds") String workflowInstanceIds) {
// task queue
Map<String, Object> result = new HashMap<>();
List<String> deleteFailedIdList = new ArrayList<>();
if (!StringUtils.isEmpty(processInstanceIds)) {
String[] processInstanceIdArray = processInstanceIds.split(Constants.COMMA);
if (!StringUtils.isEmpty(workflowInstanceIds)) {
String[] workflowInstanceIdArray = workflowInstanceIds.split(Constants.COMMA);
for (String strProcessInstanceId : processInstanceIdArray) {
int processInstanceId = Integer.parseInt(strProcessInstanceId);
for (String strWorkflowInstanceId : workflowInstanceIdArray) {
int workflowInstanceId = Integer.parseInt(strWorkflowInstanceId);
try {
workflowInstanceService.deleteWorkflowInstanceById(loginUser, processInstanceId);
workflowInstanceService.deleteWorkflowInstanceById(loginUser, workflowInstanceId);
} catch (Exception e) {
log.error("Delete workflow instance: {} error", strProcessInstanceId, e);
log.error("Delete workflow instance: {} error", strWorkflowInstanceId, e);
deleteFailedIdList
.add(MessageFormat.format(Status.WORKFLOW_INSTANCE_ERROR.getMsg(), strProcessInstanceId));
.add(MessageFormat.format(Status.WORKFLOW_INSTANCE_ERROR.getMsg(), strWorkflowInstanceId));
}
}
}

View File

@ -75,10 +75,10 @@ public class WorkflowLineageController extends BaseController {
@ApiException(QUERY_WORKFLOW_LINEAGE_ERROR)
public Result<List<WorkFlowRelationDetail>> queryWorkFlowLineageByName(@Parameter(hidden = true) @RequestAttribute(value = SESSION_USER) User loginUser,
@Parameter(name = "projectCode", description = "PROJECT_CODE", required = true) @PathVariable long projectCode,
@RequestParam(value = "processDefinitionName", required = false) String processDefinitionName) {
processDefinitionName = ParameterUtils.handleEscapes(processDefinitionName);
@RequestParam(value = "workflowDefinitionName", required = false) String workflowDefinitionName) {
workflowDefinitionName = ParameterUtils.handleEscapes(workflowDefinitionName);
List<WorkFlowRelationDetail> workFlowLineages =
workflowLineageService.queryWorkFlowLineageByName(projectCode, processDefinitionName);
workflowLineageService.queryWorkFlowLineageByName(projectCode, workflowDefinitionName);
return Result.success(workFlowLineages);
}
@ -117,14 +117,14 @@ public class WorkflowLineageController extends BaseController {
*
* @param loginUser login user
* @param projectCode project codes which taskCode belong
* @param processDefinitionCode project code which taskCode belong
* @param workflowDefinitionCode project code which taskCode belong
* @param taskCode task definition code
* @return Result of task can be deleted or not
*/
@Operation(summary = "verifyTaskCanDelete", description = "VERIFY_TASK_CAN_DELETE")
@Parameters({
@Parameter(name = "projectCode", description = "PROJECT_CODE", required = true, schema = @Schema(implementation = long.class)),
@Parameter(name = "processDefinitionCode", description = "WORKFLOW_DEFINITION_CODE", required = true, schema = @Schema(implementation = long.class)),
@Parameter(name = "workflowDefinitionCode", description = "WORKFLOW_DEFINITION_CODE", required = true, schema = @Schema(implementation = long.class)),
@Parameter(name = "taskCode", description = "TASK_DEFINITION_CODE", required = true, schema = @Schema(implementation = long.class, example = "123456789")),
})
@PostMapping(value = "/tasks/verify-delete")
@ -132,11 +132,11 @@ public class WorkflowLineageController extends BaseController {
@ApiException(TASK_WITH_DEPENDENT_ERROR)
public Result<Map<String, Object>> verifyTaskCanDelete(@Parameter(hidden = true) @RequestAttribute(value = Constants.SESSION_USER) User loginUser,
@Parameter(name = "projectCode", description = "PROJECT_CODE", required = true) @PathVariable long projectCode,
@RequestParam(value = "processDefinitionCode", required = true) long processDefinitionCode,
@RequestParam(value = "taskCode", required = true) long taskCode) {
@RequestParam(value = "workflowDefinitionCode") long workflowDefinitionCode,
@RequestParam(value = "taskCode") long taskCode) {
Result<Map<String, Object>> result = new Result<>();
Optional<String> taskDepMsg =
workflowLineageService.taskDependentMsg(projectCode, processDefinitionCode, taskCode);
workflowLineageService.taskDependentMsg(projectCode, workflowDefinitionCode, taskCode);
if (taskDepMsg.isPresent()) {
throw new ServiceException(taskDepMsg.get());
}

View File

@ -55,7 +55,7 @@ import io.swagger.v3.oas.annotations.tags.Tag;
*/
@Tag(name = "WORKFLOW_TASK_RELATION_TAG")
@RestController
@RequestMapping("projects/{projectCode}/process-task-relation")
@RequestMapping("projects/{projectCode}/workflow-task-relation")
public class WorkflowTaskRelationController extends BaseController {
@Autowired
@ -66,7 +66,7 @@ public class WorkflowTaskRelationController extends BaseController {
*
* @param loginUser login user
* @param projectCode project code
* @param processDefinitionCode processDefinitionCode
* @param workflowDefinitionCode workflowDefinitionCode
* @param preTaskCode preTaskCode
* @param postTaskCode postTaskCode
* @return create result code
@ -74,26 +74,26 @@ public class WorkflowTaskRelationController extends BaseController {
@Operation(summary = "save", description = "CREATE_WORKFLOW_TASK_RELATION_NOTES")
@Parameters({
@Parameter(name = "projectCode", description = "PROJECT_CODE", required = true, schema = @Schema(implementation = long.class)),
@Parameter(name = "processDefinitionCode", description = "WORKFLOW_DEFINITION_CODE", required = true, schema = @Schema(implementation = long.class)),
@Parameter(name = "workflowDefinitionCode", description = "WORKFLOW_DEFINITION_CODE", required = true, schema = @Schema(implementation = long.class)),
@Parameter(name = "preTaskCode", description = "PRE_TASK_CODE", required = true, schema = @Schema(implementation = long.class)),
@Parameter(name = "postTaskCode", description = "POST_TASK_CODE", required = true, schema = @Schema(implementation = long.class))
})
@PostMapping()
@ResponseStatus(HttpStatus.CREATED)
@ApiException(CREATE_WORKFLOW_TASK_RELATION_ERROR)
public Result createProcessTaskRelation(@Parameter(hidden = true) @RequestAttribute(value = Constants.SESSION_USER) User loginUser,
@Parameter(name = "projectCode", description = "PROJECT_CODE", required = true) @PathVariable long projectCode,
@RequestParam(name = "processDefinitionCode", required = true) long processDefinitionCode,
@RequestParam(name = "preTaskCode", required = true) long preTaskCode,
@RequestParam(name = "postTaskCode", required = true) long postTaskCode) {
public Result createWorkflowTaskRelation(@Parameter(hidden = true) @RequestAttribute(value = Constants.SESSION_USER) User loginUser,
@Parameter(name = "projectCode", description = "PROJECT_CODE", required = true) @PathVariable long projectCode,
@RequestParam(name = "workflowDefinitionCode", required = true) long workflowDefinitionCode,
@RequestParam(name = "preTaskCode", required = true) long preTaskCode,
@RequestParam(name = "postTaskCode", required = true) long postTaskCode) {
Map<String, Object> result = new HashMap<>();
if (postTaskCode == 0L) {
putMsg(result, DATA_IS_NOT_VALID, "postTaskCode");
} else if (processDefinitionCode == 0L) {
putMsg(result, DATA_IS_NOT_VALID, "processDefinitionCode");
} else if (workflowDefinitionCode == 0L) {
putMsg(result, DATA_IS_NOT_VALID, "workflowDefinitionCode");
} else {
result = workflowTaskRelationService.createWorkflowTaskRelation(loginUser, projectCode,
processDefinitionCode,
workflowDefinitionCode,
preTaskCode, postTaskCode);
}
return returnDataList(result);
@ -104,25 +104,25 @@ public class WorkflowTaskRelationController extends BaseController {
*
* @param loginUser login user
* @param projectCode project code
* @param processDefinitionCode process definition code
* @param workflowDefinitionCode workflow definition code
* @param taskCode the post task code
* @return delete result code
*/
@Operation(summary = "deleteRelation", description = "DELETE_WORKFLOW_TASK_RELATION_NOTES")
@Parameters({
@Parameter(name = "projectCode", description = "PROJECT_CODE", required = true, schema = @Schema(implementation = long.class)),
@Parameter(name = "processDefinitionCode", description = "WORKFLOW_DEFINITION_CODE", required = true, schema = @Schema(implementation = long.class)),
@Parameter(name = "workflowDefinitionCode", description = "WORKFLOW_DEFINITION_CODE", required = true, schema = @Schema(implementation = long.class)),
@Parameter(name = "taskCode", description = "TASK_CODE", required = true, schema = @Schema(implementation = long.class))
})
@DeleteMapping(value = "/{taskCode}")
@ResponseStatus(HttpStatus.OK)
@ApiException(DELETE_TASK_WORKFLOW_RELATION_ERROR)
public Result deleteTaskProcessRelation(@Parameter(hidden = true) @RequestAttribute(value = Constants.SESSION_USER) User loginUser,
@Parameter(name = "projectCode", description = "PROJECT_CODE", required = true) @PathVariable long projectCode,
@RequestParam(name = "processDefinitionCode", required = true) long processDefinitionCode,
@PathVariable("taskCode") long taskCode) {
public Result deleteTaskWorkflowRelation(@Parameter(hidden = true) @RequestAttribute(value = Constants.SESSION_USER) User loginUser,
@Parameter(name = "projectCode", description = "PROJECT_CODE", required = true) @PathVariable long projectCode,
@RequestParam(name = "workflowDefinitionCode") long workflowDefinitionCode,
@PathVariable("taskCode") long taskCode) {
return returnDataList(workflowTaskRelationService.deleteTaskWorkflowRelation(loginUser, projectCode,
processDefinitionCode, taskCode));
workflowDefinitionCode, taskCode));
}
/**
@ -226,7 +226,7 @@ public class WorkflowTaskRelationController extends BaseController {
*
* @param loginUser login user
* @param projectCode project code
* @param processDefinitionCode process definition code
* @param workflowDefinitionCode workflow definition code
* @param preTaskCode pre task code
* @param postTaskCode post task code
* @return delete result code
@ -234,19 +234,19 @@ public class WorkflowTaskRelationController extends BaseController {
@Operation(summary = "deleteEdge", description = "DELETE_EDGE_NOTES")
@Parameters({
@Parameter(name = "projectCode", description = "PROJECT_CODE", required = true, schema = @Schema(implementation = long.class)),
@Parameter(name = "processDefinitionCode", description = "WORKFLOW_DEFINITION_CODE", required = true, schema = @Schema(implementation = long.class)),
@Parameter(name = "workflowDefinitionCode", description = "WORKFLOW_DEFINITION_CODE", required = true, schema = @Schema(implementation = long.class)),
@Parameter(name = "preTaskCode", description = "PRE_TASK_CODE", required = true, schema = @Schema(implementation = long.class)),
@Parameter(name = "postTaskCode", description = "POST_TASK_CODE", required = true, schema = @Schema(implementation = long.class))
})
@DeleteMapping(value = "/{processDefinitionCode}/{preTaskCode}/{postTaskCode}")
@DeleteMapping(value = "/{workflowDefinitionCode}/{preTaskCode}/{postTaskCode}")
@ResponseStatus(HttpStatus.OK)
@ApiException(DELETE_EDGE_ERROR)
public Result deleteEdge(@Parameter(hidden = true) @RequestAttribute(value = Constants.SESSION_USER) User loginUser,
@Parameter(name = "projectCode", description = "PROJECT_CODE", required = true) @PathVariable long projectCode,
@PathVariable long processDefinitionCode,
@PathVariable long workflowDefinitionCode,
@PathVariable long preTaskCode,
@PathVariable long postTaskCode) {
return returnDataList(workflowTaskRelationService.deleteEdge(loginUser, projectCode, processDefinitionCode,
return returnDataList(workflowTaskRelationService.deleteEdge(loginUser, projectCode, workflowDefinitionCode,
preTaskCode, postTaskCode));
}

View File

@ -132,7 +132,7 @@ public class StatisticsV2Controller extends BaseController {
* @param statisticsStateRequest statisticsStateRequest
* @return workflow count in project code
*/
@Operation(summary = "countDefinitionV2ByUserId", description = "COUNT_PROCESS_DEFINITION_V2_BY_USERID_NOTES")
@Operation(summary = "countDefinitionV2ByUserId", description = "COUNT_WORKFLOW_DEFINITION_V2_BY_USERID_NOTES")
@GetMapping(value = "/workflows/users/count")
@ResponseStatus(HttpStatus.OK)
@ApiException(COUNT_WORKFLOW_DEFINITION_USER_ERROR)
@ -149,7 +149,7 @@ public class StatisticsV2Controller extends BaseController {
* @param userId userId
* @return workflow count in project code
*/
@Operation(summary = "countDefinitionV2ByUser", description = "COUNT_PROCESS_DEFINITION_V2_BY_USER_NOTES")
@Operation(summary = "countDefinitionV2ByUser", description = "COUNT_WORKFLOW_DEFINITION_V2_BY_USER_NOTES")
@GetMapping(value = "/workflows/users/{userId}/count")
@ResponseStatus(HttpStatus.OK)
@ApiException(COUNT_WORKFLOW_DEFINITION_USER_ERROR)
@ -167,7 +167,7 @@ public class StatisticsV2Controller extends BaseController {
* @param releaseState releaseState
* @return workflow count in project code
*/
@Operation(summary = "countDefinitionV2ByUser", description = "COUNT_PROCESS_DEFINITION_V2_BY_USER_NOTES")
@Operation(summary = "countDefinitionV2ByUser", description = "COUNT_WORKFLOW_DEFINITION_V2_BY_USER_NOTES")
@GetMapping(value = "/workflows/users/{userId}/{releaseState}/count")
@ResponseStatus(HttpStatus.OK)
@ApiException(COUNT_WORKFLOW_DEFINITION_USER_ERROR)

View File

@ -73,8 +73,8 @@ public class TaskInstanceV2Controller extends BaseController {
*/
@Operation(summary = "queryTaskListPaging", description = "QUERY_TASK_INSTANCE_LIST_PAGING_NOTES")
@Parameters({
@Parameter(name = "processInstanceId", description = "WORKFLOW_INSTANCE_ID", schema = @Schema(implementation = int.class), example = "100"),
@Parameter(name = "processInstanceName", description = "PROCESS_INSTANCE_NAME", schema = @Schema(implementation = String.class)),
@Parameter(name = "workflowInstanceId", description = "WORKFLOW_INSTANCE_ID", schema = @Schema(implementation = int.class), example = "100"),
@Parameter(name = "workflowInstanceName", description = "WORKFLOW_INSTANCE_NAME", schema = @Schema(implementation = String.class)),
@Parameter(name = "searchVal", description = "SEARCH_VAL", schema = @Schema(implementation = String.class)),
@Parameter(name = "taskName", description = "TASK_NAME", schema = @Schema(implementation = String.class)),
@Parameter(name = "taskCode", description = "TASK_CODE", schema = @Schema(implementation = Long.class)),
@ -97,8 +97,8 @@ public class TaskInstanceV2Controller extends BaseController {
String searchVal = ParameterUtils.handleEscapes(taskInstanceQueryReq.getSearchVal());
return taskInstanceService.queryTaskListPaging(loginUser, projectCode,
taskInstanceQueryReq.getProcessInstanceId(), taskInstanceQueryReq.getProcessInstanceName(),
taskInstanceQueryReq.getProcessDefinitionName(),
taskInstanceQueryReq.getWorkflowInstanceId(), taskInstanceQueryReq.getWorkflowInstanceName(),
taskInstanceQueryReq.getWorkflowDefinitionName(),
taskInstanceQueryReq.getTaskName(), taskInstanceQueryReq.getTaskCode(),
taskInstanceQueryReq.getExecutorName(),
taskInstanceQueryReq.getStartTime(), taskInstanceQueryReq.getEndTime(), searchVal,

View File

@ -52,22 +52,19 @@ import io.swagger.v3.oas.annotations.Parameters;
import io.swagger.v3.oas.annotations.media.Schema;
import io.swagger.v3.oas.annotations.tags.Tag;
/**
* process task relation controller
*/
@Tag(name = "WORKFLOW_TASK_RELATION_TAG")
@RestController
@RequestMapping("v2/relations")
public class ProcessTaskRelationV2Controller extends BaseController {
public class WorkflowTaskRelationV2Controller extends BaseController {
@Autowired
private WorkflowTaskRelationService workflowTaskRelationService;
/**
* create resource process task relation
* create resource workflow task relation
*
* @param loginUser login user
* @param TaskRelationCreateRequest process task definition json contains the object you want to create
* @param TaskRelationCreateRequest workflow task definition json contains the object you want to create
* @return Result object created
*/
@Operation(summary = "create", description = "CREATE_WORKFLOW_TASK_RELATION_NOTES")
@ -82,7 +79,7 @@ public class ProcessTaskRelationV2Controller extends BaseController {
}
/**
* delete resource process task relation
* delete resource workflow task relation
*
* @param loginUser login user
* @param codePair code pair you want to delete the task relation, use `upstream,downstream` as example, will delete exists relation upstream -> downstream, throw error if not exists
@ -111,7 +108,7 @@ public class ProcessTaskRelationV2Controller extends BaseController {
* @param taskRelationUpdateUpstreamRequest workflowUpdateRequest
* @return ResourceResponse object updated
*/
@Operation(summary = "update", description = "UPDATE_PROCESS_TASK_RELATION_NOTES")
@Operation(summary = "update", description = "UPDATE_WORKFLOW_TASK_RELATION_NOTES")
@Parameters({
@Parameter(name = "code", description = "DOWNSTREAM_TASK_DEFINITION_CODE", schema = @Schema(implementation = long.class, example = "123456", required = true))
})

View File

@ -86,7 +86,7 @@ public class WorkflowV2Controller extends BaseController {
* Delete workflow by code
*
* @param loginUser login user
* @param code process definition code
* @param code workflow definition code
* @return Result result object delete
*/
@Operation(summary = "delete", description = "DELETE_WORKFLOWS_NOTES")
@ -152,8 +152,8 @@ public class WorkflowV2Controller extends BaseController {
@ApiException(QUERY_WORKFLOW_DEFINITION_LIST)
public Result<PageInfo<WorkflowDefinition>> filterWorkflows(@Parameter(hidden = true) @RequestAttribute(value = Constants.SESSION_USER) User loginUser,
@RequestBody WorkflowFilterRequest workflowFilterRequest) {
PageInfo<WorkflowDefinition> processDefinitions =
PageInfo<WorkflowDefinition> workflowDefinitions =
workflowDefinitionService.filterWorkflowDefinition(loginUser, workflowFilterRequest);
return Result.success(processDefinitions);
return Result.success(workflowDefinitions);
}
}

View File

@ -22,21 +22,18 @@ import java.util.List;
import lombok.Data;
/**
* ClusterDto
*/
@Data
public class ClusterDto {
private int id;
/**
* clluster code
* cluster code
*/
private Long code;
/**
* clluster name
* cluster name
*/
private String name;
@ -47,7 +44,7 @@ public class ClusterDto {
private String description;
private List<String> processDefinitions;
private List<String> workflowDefinitions;
/**
* operator user id

View File

@ -20,14 +20,13 @@ package org.apache.dolphinscheduler.api.dto;
import org.apache.dolphinscheduler.dao.entity.DagData;
import org.apache.dolphinscheduler.dao.entity.Schedule;
/**
* DagDataSchedule
*/
import lombok.Data;
import lombok.EqualsAndHashCode;
@EqualsAndHashCode(callSuper = true)
@Data
public class DagDataSchedule extends DagData {
/**
* schedule
*/
private Schedule schedule;
public DagDataSchedule() {
@ -35,16 +34,9 @@ public class DagDataSchedule extends DagData {
public DagDataSchedule(DagData dagData) {
super();
this.setProcessDefinition(dagData.getProcessDefinition());
this.setWorkflowDefinition(dagData.getWorkflowDefinition());
this.setTaskDefinitionList(dagData.getTaskDefinitionList());
this.setProcessTaskRelationList(dagData.getProcessTaskRelationList());
this.setWorkflowTaskRelationList(dagData.getWorkflowTaskRelationList());
}
public Schedule getSchedule() {
return schedule;
}
public void setSchedule(Schedule schedule) {
this.schedule = schedule;
}
}

View File

@ -21,9 +21,9 @@ import org.apache.dolphinscheduler.dao.model.WorkflowDefinitionCountDto;
import java.util.List;
/**
* user process define dto
*/
import lombok.Data;
@Data
public class DefineUserDto {
private int count;
@ -38,19 +38,4 @@ public class DefineUserDto {
this.userList = defineGroupByUsers;
}
public int getCount() {
return count;
}
public void setCount(int count) {
this.count = count;
}
public List<WorkflowDefinitionCountDto> getUserList() {
return userList;
}
public void setUserList(List<WorkflowDefinitionCountDto> userList) {
this.userList = userList;
}
}

View File

@ -28,7 +28,7 @@ import lombok.NoArgsConstructor;
@NoArgsConstructor
public class DynamicSubWorkflowDto {
private long processInstanceId;
private long workflowInstanceId;
private String name;

View File

@ -1,26 +0,0 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.api.dto;
import org.apache.dolphinscheduler.dao.entity.WorkflowDefinition;
/**
* ProcessDefinitionDto
*/
public class WorkflowDefinitionDto extends WorkflowDefinition {
}

View File

@ -1,26 +0,0 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.api.dto;
import org.apache.dolphinscheduler.dao.entity.WorkflowInstance;
/**
* ProcessInstanceDto
*/
public class WorkflowInstanceDto extends WorkflowInstance {
}

View File

@ -33,14 +33,11 @@ import com.google.gson.Gson;
import com.google.gson.GsonBuilder;
import io.swagger.v3.oas.annotations.media.Schema;
/**
* schedule create request
*/
@Data
public class ScheduleCreateRequest {
@Schema(example = "1234567890123", required = true)
private long processDefinitionCode;
private long workflowDefinitionCode;
@Schema(example = "schedule timezone", required = true)
private String crontab;
@ -67,7 +64,7 @@ public class ScheduleCreateRequest {
private int warningGroupId;
@Schema(allowableValues = "HIGHEST / HIGH / MEDIUM / LOW / LOWEST", example = "MEDIUM", description = "default MEDIUM if value not provide.")
private String processInstancePriority;
private String workflowInstancePriority;
@Schema(example = "worker-group-name")
private String workerGroup;
@ -87,7 +84,7 @@ public class ScheduleCreateRequest {
public Schedule convert2Schedule() {
Schedule schedule = new Schedule();
schedule.setProcessDefinitionCode(this.processDefinitionCode);
schedule.setWorkflowDefinitionCode(this.workflowDefinitionCode);
schedule.setCrontab(this.crontab);
schedule.setStartTime(stringToDate(this.startTime));
schedule.setEndTime(stringToDate(this.endTime));
@ -110,8 +107,9 @@ public class ScheduleCreateRequest {
schedule.setWarningType(newWarningType);
Priority newPriority =
this.processInstancePriority == null ? Priority.MEDIUM : Priority.valueOf(this.processInstancePriority);
schedule.setProcessInstancePriority(newPriority);
this.workflowInstancePriority == null ? Priority.MEDIUM
: Priority.valueOf(this.workflowInstancePriority);
schedule.setWorkflowInstancePriority(newPriority);
Date date = new Date();
schedule.setCreateTime(date);

View File

@ -39,8 +39,8 @@ public class ScheduleFilterRequest extends PageQueryDto {
@Schema(example = "project-name")
private String projectName;
@Schema(example = "process-definition-name")
private String processDefinitionName;
@Schema(example = "workflow-definition-name")
private String workflowDefinitionName;
@Schema(allowableValues = "ONLINE / OFFLINE", example = "OFFLINE", description = "default OFFLINE if value not provide.")
private String releaseState;
@ -50,8 +50,8 @@ public class ScheduleFilterRequest extends PageQueryDto {
if (this.projectName != null) {
schedule.setProjectName(this.projectName);
}
if (this.processDefinitionName != null) {
schedule.setProcessDefinitionName(this.processDefinitionName);
if (this.workflowDefinitionName != null) {
schedule.setWorkflowDefinitionName(this.workflowDefinitionName);
}
if (this.releaseState != null) {
schedule.setReleaseState(ReleaseState.valueOf(this.releaseState));

View File

@ -73,7 +73,7 @@ public class ScheduleUpdateRequest {
private int warningGroupId;
@Schema(allowableValues = "HIGHEST / HIGH / MEDIUM / LOW / LOWEST", example = "MEDIUM", description = "default MEDIUM if value not provide.")
private String processInstancePriority;
private String workflowInstancePriority;
@Schema(example = "worker-group-name")
private String workerGroup;
@ -122,8 +122,8 @@ public class ScheduleUpdateRequest {
if (this.warningGroupId != 0) {
scheduleDeepCopy.setWarningGroupId(this.warningGroupId);
}
if (this.processInstancePriority != null) {
scheduleDeepCopy.setProcessInstancePriority(Priority.valueOf(this.processInstancePriority));
if (this.workflowInstancePriority != null) {
scheduleDeepCopy.setWorkflowInstancePriority(Priority.valueOf(this.workflowInstancePriority));
}
if (this.workerGroup != null) {
scheduleDeepCopy.setWorkerGroup(this.workerGroup);

View File

@ -22,6 +22,7 @@ import org.apache.dolphinscheduler.common.enums.TaskExecuteType;
import org.apache.dolphinscheduler.plugin.task.api.enums.TaskExecutionStatus;
import lombok.Data;
import lombok.EqualsAndHashCode;
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
import com.fasterxml.jackson.annotation.JsonInclude;
@ -30,19 +31,20 @@ import io.swagger.v3.oas.annotations.media.Schema;
/**
* task instance request
*/
@EqualsAndHashCode(callSuper = true)
@JsonIgnoreProperties(ignoreUnknown = true)
@JsonInclude(JsonInclude.Include.NON_NULL)
@Data
public class TaskInstanceQueryRequest extends PageQueryDto {
@Schema(name = "processInstanceId", example = "WORKFLOW_INSTANCE_ID", defaultValue = "0")
Integer processInstanceId;
@Schema(name = "workflowInstanceId", example = "WORKFLOW_INSTANCE_ID", defaultValue = "0")
Integer workflowInstanceId;
@Schema(name = "processInstanceName", example = "PROCESS-INSTANCE-NAME")
String processInstanceName;
@Schema(name = "workflowInstanceName", example = "WORKFLOW-INSTANCE-NAME")
String workflowInstanceName;
@Schema(name = "processDefinitionName", example = "PROCESS-DEFINITION-NAME")
String processDefinitionName;
@Schema(name = "workflowDefinitionName", example = "WORKFLOW-DEFINITION-NAME")
String workflowDefinitionName;
@Schema(name = "searchVal", example = "SEARCH-VAL")
String searchVal;

View File

@ -24,9 +24,6 @@ import java.util.Date;
import lombok.Data;
import io.swagger.v3.oas.annotations.media.Schema;
/**
* task relation create request
*/
@Data
public class TaskRelationCreateRequest {
@ -42,11 +39,11 @@ public class TaskRelationCreateRequest {
@Schema(example = "54321", required = true)
private long postTaskCode;
public WorkflowTaskRelation convert2ProcessTaskRelation() {
public WorkflowTaskRelation convert2WorkflowTaskRelation() {
WorkflowTaskRelation workflowTaskRelation = new WorkflowTaskRelation();
workflowTaskRelation.setProjectCode(this.projectCode);
workflowTaskRelation.setProcessDefinitionCode(this.workflowCode);
workflowTaskRelation.setWorkflowDefinitionCode(this.workflowCode);
workflowTaskRelation.setPreTaskCode(this.preTaskCode);
workflowTaskRelation.setPostTaskCode(this.postTaskCode);

View File

@ -58,7 +58,7 @@ public class TaskRelationFilterRequest extends PageQueryDto {
public WorkflowTaskRelation convert2TaskDefinition() {
WorkflowTaskRelation workflowTaskRelation = new WorkflowTaskRelation();
if (this.workflowCode != 0L) {
workflowTaskRelation.setProcessDefinitionCode(this.workflowCode);
workflowTaskRelation.setWorkflowDefinitionCode(this.workflowCode);
}
if (this.preTaskCode != 0L) {
workflowTaskRelation.setPreTaskCode(this.preTaskCode);

View File

@ -58,7 +58,7 @@ public class WorkflowCreateRequest {
@Schema(allowableValues = "PARALLEL / SERIAL_WAIT / SERIAL_DISCARD / SERIAL_PRIORITY", example = "PARALLEL", description = "default PARALLEL if not provide.")
private String executionType;
public WorkflowDefinition convert2ProcessDefinition() {
public WorkflowDefinition convert2WorkflowDefinition() {
WorkflowDefinition workflowDefinition = new WorkflowDefinition();
workflowDefinition.setName(this.name);

View File

@ -63,12 +63,12 @@ public class WorkflowUpdateRequest {
private String location;
/**
* Merge workflowUpdateRequest information into exists processDefinition object
* Merge workflowUpdateRequest information into exists workflowDefinition object
*
* @param workflowDefinition exists processDefinition object
* @return process definition
* @param workflowDefinition exists workflowDefinition object
* @return workflow definition
*/
public WorkflowDefinition mergeIntoProcessDefinition(WorkflowDefinition workflowDefinition) {
public WorkflowDefinition mergeIntoWorkflowDefinition(WorkflowDefinition workflowDefinition) {
WorkflowDefinition workflowDefinitionDeepCopy =
JSONUtils.parseObject(JSONUtils.toJsonString(workflowDefinition), WorkflowDefinition.class);
assert workflowDefinitionDeepCopy != null;

View File

@ -52,7 +52,7 @@ public class WorkflowInstanceQueryRequest extends PageQueryDto {
@Schema(name = "state", example = "STATE")
Integer state;
public WorkflowInstance convert2ProcessInstance() {
public WorkflowInstance convert2WorkflowInstance() {
WorkflowInstance workflowInstance = new WorkflowInstance();
if (this.workflowName != null) {
workflowInstance.setName(this.workflowName);

View File

@ -177,7 +177,7 @@ public class PythonGateway {
return taskDefinitionService.genTaskCodeList(genNum);
}
public Map<String, Long> getCodeAndVersion(String projectName, String processDefinitionName,
public Map<String, Long> getCodeAndVersion(String projectName, String workflowDefinitionName,
String taskName) throws CodeGenerateUtils.CodeGenerateException {
Project project = projectMapper.queryByName(projectName);
Map<String, Long> result = new HashMap<>();
@ -189,7 +189,7 @@ public class PythonGateway {
}
WorkflowDefinition workflowDefinition =
workflowDefinitionMapper.queryByDefineName(project.getCode(), processDefinitionName);
workflowDefinitionMapper.queryByDefineName(project.getCode(), workflowDefinitionName);
// In the case project exists, but current workflow still not created, we should also return the init
// version of it
if (workflowDefinition == null) {
@ -259,14 +259,14 @@ public class PythonGateway {
WorkflowDefinition workflowDefinition = getWorkflow(user, projectCode, name);
WorkflowExecutionTypeEnum executionTypeEnum = WorkflowExecutionTypeEnum.valueOf(executionType);
long processDefinitionCode;
long workflowDefinitionCode;
// create or update workflow
if (workflowDefinition != null) {
processDefinitionCode = workflowDefinition.getCode();
workflowDefinitionCode = workflowDefinition.getCode();
// make sure workflow offline which could edit
workflowDefinitionService.offlineWorkflowDefinition(user, projectCode, processDefinitionCode);
workflowDefinitionService.offlineWorkflowDefinition(user, projectCode, workflowDefinitionCode);
workflowDefinitionService.updateWorkflowDefinition(user, projectCode, name,
processDefinitionCode, description, globalParams,
workflowDefinitionCode, description, globalParams,
null, timeout, taskRelationJson, taskDefinitionJson,
executionTypeEnum);
} else {
@ -279,21 +279,21 @@ public class PythonGateway {
throw new ServiceException(result.get(Constants.MSG).toString());
}
workflowDefinition = (WorkflowDefinition) result.get(Constants.DATA_LIST);
processDefinitionCode = workflowDefinition.getCode();
workflowDefinitionCode = workflowDefinition.getCode();
}
// Fresh workflow schedule
if (schedule != null) {
createOrUpdateSchedule(user, projectCode, processDefinitionCode, schedule, onlineSchedule, workerGroup,
createOrUpdateSchedule(user, projectCode, workflowDefinitionCode, schedule, onlineSchedule, workerGroup,
warningType,
warningGroupId);
}
if (ReleaseState.ONLINE.equals(ReleaseState.getEnum(releaseState))) {
workflowDefinitionService.onlineWorkflowDefinition(user, projectCode, processDefinitionCode);
workflowDefinitionService.onlineWorkflowDefinition(user, projectCode, workflowDefinitionCode);
} else if (ReleaseState.OFFLINE.equals(ReleaseState.getEnum(releaseState))) {
workflowDefinitionService.offlineWorkflowDefinition(user, projectCode, processDefinitionCode);
workflowDefinitionService.offlineWorkflowDefinition(user, projectCode, workflowDefinitionCode);
}
return processDefinitionCode;
return workflowDefinitionCode;
}
/**
@ -304,9 +304,9 @@ public class PythonGateway {
* @param workflowName workflow name
*/
private WorkflowDefinition getWorkflow(User user, long projectCode, String workflowName) {
Map<String, Object> verifyProcessDefinitionExists =
Map<String, Object> verifyWorkflowDefinitionExists =
workflowDefinitionService.verifyWorkflowDefinitionName(user, projectCode, workflowName, 0);
Status verifyStatus = (Status) verifyProcessDefinitionExists.get(Constants.STATUS);
Status verifyStatus = (Status) verifyWorkflowDefinitionExists.get(Constants.STATUS);
WorkflowDefinition workflowDefinition = null;
if (verifyStatus == Status.WORKFLOW_DEFINITION_NAME_EXIST) {
@ -343,7 +343,7 @@ public class PythonGateway {
String workerGroup,
String warningType,
int warningGroupId) {
Schedule scheduleObj = scheduleMapper.queryByProcessDefinitionCode(workflowCode);
Schedule scheduleObj = scheduleMapper.queryByWorkflowDefinitionCode(workflowCode);
// create or update schedule
int scheduleId;
if (scheduleObj == null) {
@ -526,7 +526,7 @@ public class PythonGateway {
/**
* Get workflow object by given workflow name. It returns map contain workflow id, name, code.
* Useful in Python API create subProcess task which need workflow information.
* Useful in Python API create sub workflow task which need workflow information.
*
* @param userName user who create or update schedule
* @param projectName project name which workflow belongs to
@ -583,7 +583,7 @@ public class PythonGateway {
log.error(msg);
throw new IllegalArgumentException(msg);
}
result.put("processDefinitionCode", workflowDefinition.getCode());
result.put("workflowDefinitionCode", workflowDefinition.getCode());
if (taskName != null) {
TaskDefinition taskDefinition =

View File

@ -210,7 +210,7 @@ public class ExecutorServiceImpl extends BaseServiceImpl implements ExecutorServ
public boolean checkSubWorkflowDefinitionValid(WorkflowDefinition workflowDefinition) {
// query all sub workflows under the current workflow
List<WorkflowTaskRelation> workflowTaskRelations =
workflowTaskRelationMapper.queryDownstreamByProcessDefinitionCode(workflowDefinition.getCode());
workflowTaskRelationMapper.queryDownstreamByWorkflowDefinitionCode(workflowDefinition.getCode());
if (workflowTaskRelations.isEmpty()) {
return true;
}
@ -337,11 +337,11 @@ public class ExecutorServiceImpl extends BaseServiceImpl implements ExecutorServ
}
WorkflowDefinition workflowDefinition =
processService.findProcessDefinition(workflowInstance.getProcessDefinitionCode(),
workflowInstance.getProcessDefinitionVersion());
processService.findWorkflowDefinition(workflowInstance.getWorkflowDefinitionCode(),
workflowInstance.getWorkflowDefinitionVersion());
workflowDefinition.setReleaseState(ReleaseState.ONLINE);
this.checkWorkflowDefinitionValid(projectCode, workflowDefinition, workflowInstance.getProcessDefinitionCode(),
workflowInstance.getProcessDefinitionVersion());
this.checkWorkflowDefinitionValid(projectCode, workflowDefinition, workflowInstance.getWorkflowDefinitionCode(),
workflowInstance.getWorkflowDefinitionVersion());
// get the startParams user specified at the first starting while repeat running is needed
@ -367,11 +367,11 @@ public class ExecutorServiceImpl extends BaseServiceImpl implements ExecutorServ
Command command = new Command();
command.setCommandType(CommandType.EXECUTE_TASK);
command.setProcessDefinitionCode(workflowDefinition.getCode());
command.setWorkflowDefinitionCode(workflowDefinition.getCode());
command.setCommandParam(JSONUtils.toJsonString(cmdParam));
command.setExecutorId(loginUser.getId());
command.setProcessDefinitionVersion(workflowDefinition.getVersion());
command.setProcessInstanceId(workflowInstanceId);
command.setWorkflowDefinitionVersion(workflowDefinition.getVersion());
command.setWorkflowInstanceId(workflowInstanceId);
command.setTestFlag(workflowInstance.getTestFlag());
// Add taskDependType
@ -391,13 +391,13 @@ public class ExecutorServiceImpl extends BaseServiceImpl implements ExecutorServ
if (create > 0) {
log.info("Create {} command complete, workflowDefinitionCode:{}, workflowDefinitionVersion:{}.",
command.getCommandType().getDescp(), command.getProcessDefinitionCode(),
command.getCommandType().getDescp(), command.getWorkflowDefinitionCode(),
workflowDefinition.getVersion());
putMsg(response, Status.SUCCESS);
} else {
log.error(
"Execute workflow instance failed because create {} command error, workflowDefinitionCode:{}, workflowDefinitionVersion:{} workflowInstanceId:{}.",
command.getCommandType().getDescp(), command.getProcessDefinitionCode(),
command.getCommandType().getDescp(), command.getWorkflowDefinitionCode(),
workflowDefinition.getVersion(),
workflowInstanceId);
putMsg(response, Status.EXECUTE_WORKFLOW_INSTANCE_ERROR);
@ -411,9 +411,10 @@ public class ExecutorServiceImpl extends BaseServiceImpl implements ExecutorServ
Map<String, Object> result = new HashMap<>();
TaskGroupQueue taskGroupQueue = taskGroupQueueMapper.selectById(queueId);
// check workflow instance exist
workflowInstanceDao.queryOptionalById(taskGroupQueue.getProcessId())
workflowInstanceDao.queryOptionalById(taskGroupQueue.getWorkflowInstanceId())
.orElseThrow(
() -> new ServiceException(Status.WORKFLOW_INSTANCE_NOT_EXIST, taskGroupQueue.getProcessId()));
() -> new ServiceException(Status.WORKFLOW_INSTANCE_NOT_EXIST,
taskGroupQueue.getWorkflowInstanceId()));
if (taskGroupQueue.getInQueue() == Flag.NO.getCode()) {
throw new ServiceException(Status.TASK_GROUP_QUEUE_ALREADY_START);
@ -442,20 +443,20 @@ public class ExecutorServiceImpl extends BaseServiceImpl implements ExecutorServ
if (createCount > 0) {
log.info("Create {} command complete, workflowDefinitionCode:{}",
command.getCommandType().getDescp(), command.getProcessDefinitionCode());
command.getCommandType().getDescp(), command.getWorkflowDefinitionCode());
} else {
log.error("Create {} command error, workflowDefinitionCode:{}",
command.getCommandType().getDescp(), command.getProcessDefinitionCode());
command.getCommandType().getDescp(), command.getWorkflowDefinitionCode());
}
if (schedules.isEmpty() || complementDependentMode == ComplementDependentMode.OFF_MODE) {
log.info(
"Complement dependent mode is off mode or Scheduler is empty, so skip create complement dependent command, workflowDefinitionCode:{}.",
command.getProcessDefinitionCode());
command.getWorkflowDefinitionCode());
} else {
log.info(
"Complement dependent mode is all dependent and Scheduler is not empty, need create complement dependent command, workflowDefinitionCode:{}.",
command.getProcessDefinitionCode());
command.getWorkflowDefinitionCode());
createComplementDependentCommand(schedules, command, allLevelDependent);
}
@ -478,8 +479,8 @@ public class ExecutorServiceImpl extends BaseServiceImpl implements ExecutorServ
executionOrder = ExecutionOrder.DESC_ORDER;
}
List<Schedule> schedules = processService.queryReleaseSchedulerListByProcessDefinitionCode(
command.getProcessDefinitionCode());
List<Schedule> schedules = processService.queryReleaseSchedulerListByWorkflowDefinitionCode(
command.getWorkflowDefinitionCode());
List<ZonedDateTime> listDate = new ArrayList<>();
if (scheduleParam.containsKey(CMD_PARAM_COMPLEMENT_DATA_START_DATE) && scheduleParam.containsKey(
@ -518,14 +519,14 @@ public class ExecutorServiceImpl extends BaseServiceImpl implements ExecutorServ
switch (runMode) {
case RUN_MODE_SERIAL: {
log.info("RunMode of {} command is serial run, workflowDefinitionCode:{}.",
command.getCommandType().getDescp(), command.getProcessDefinitionCode());
command.getCommandType().getDescp(), command.getWorkflowDefinitionCode());
createCount = createComplementCommand(triggerCode, command, cmdParam, listDate, schedules,
complementDependentMode, allLevelDependent);
break;
}
case RUN_MODE_PARALLEL: {
log.info("RunMode of {} command is parallel run, workflowDefinitionCode:{}.",
command.getCommandType().getDescp(), command.getProcessDefinitionCode());
command.getCommandType().getDescp(), command.getWorkflowDefinitionCode());
int queueNum = 0;
if (CollectionUtils.isNotEmpty(listDate)) {
@ -573,7 +574,7 @@ public class ExecutorServiceImpl extends BaseServiceImpl implements ExecutorServ
}
List<DependentWorkflowDefinition> dependentWorkflowDefinitionList =
getComplementDependentDefinitionList(dependentCommand.getProcessDefinitionCode(),
getComplementDependentDefinitionList(dependentCommand.getWorkflowDefinitionCode(),
CronUtils.getMaxCycle(schedules.get(0).getCrontab()), dependentCommand.getWorkerGroup(),
allLevelDependent);
dependentCommand.setTaskDependType(TaskDependType.TASK_POST);
@ -581,8 +582,8 @@ public class ExecutorServiceImpl extends BaseServiceImpl implements ExecutorServ
// If the id is Integer, the auto-increment id will be obtained by mybatis-plus
// and causing duplicate when clone it.
dependentCommand.setId(null);
dependentCommand.setProcessDefinitionCode(dependentWorkflowDefinition.getProcessDefinitionCode());
dependentCommand.setProcessDefinitionVersion(dependentWorkflowDefinition.getProcessDefinitionVersion());
dependentCommand.setWorkflowDefinitionCode(dependentWorkflowDefinition.getWorkflowDefinitionCode());
dependentCommand.setWorkflowDefinitionVersion(dependentWorkflowDefinition.getWorkflowDefinitionVersion());
dependentCommand.setWorkerGroup(dependentWorkflowDefinition.getWorkerGroup());
Map<String, String> cmdParam = JSONUtils.toMap(dependentCommand.getCommandParam());
cmdParam.put(CMD_PARAM_START_NODES, String.valueOf(dependentWorkflowDefinition.getTaskDefinitionCode()));
@ -618,10 +619,10 @@ public class ExecutorServiceImpl extends BaseServiceImpl implements ExecutorServ
.stream()
.flatMap(dependentWorkflowDefinition -> checkDependentWorkflowDefinitionValid(
workflowLineageService.queryDownstreamDependentWorkflowDefinitions(
dependentWorkflowDefinition.getProcessDefinitionCode()),
dependentWorkflowDefinition.getWorkflowDefinitionCode()),
workflowDefinitionCycle,
workerGroup,
dependentWorkflowDefinition.getProcessDefinitionCode()).stream())
dependentWorkflowDefinition.getWorkflowDefinitionCode()).stream())
.collect(Collectors.toList());
if (childDependentList.isEmpty()) {
break;
@ -646,17 +647,17 @@ public class ExecutorServiceImpl extends BaseServiceImpl implements ExecutorServ
List<DependentWorkflowDefinition> validDependentWorkflowDefinitionList = new ArrayList<>();
List<Long> workflowDefinitionCodeList =
dependentWorkflowDefinitionList.stream().map(DependentWorkflowDefinition::getProcessDefinitionCode)
dependentWorkflowDefinitionList.stream().map(DependentWorkflowDefinition::getWorkflowDefinitionCode)
.collect(Collectors.toList());
Map<Long, String> processDefinitionWorkerGroupMap =
Map<Long, String> workflowDefinitionWorkerGroupMap =
workerGroupService.queryWorkerGroupByWorkflowDefinitionCodes(workflowDefinitionCodeList);
for (DependentWorkflowDefinition dependentWorkflowDefinition : dependentWorkflowDefinitionList) {
if (dependentWorkflowDefinition
.getDependentCycle(upstreamWorkflowDefinitionCode) == workflowDefinitionCycle) {
if (processDefinitionWorkerGroupMap
.get(dependentWorkflowDefinition.getProcessDefinitionCode()) == null) {
if (workflowDefinitionWorkerGroupMap
.get(dependentWorkflowDefinition.getWorkflowDefinitionCode()) == null) {
dependentWorkflowDefinition.setWorkerGroup(workerGroup);
}

View File

@ -395,7 +395,7 @@ public class ProjectServiceImpl extends BaseServiceImpl implements ProjectServic
.map(Project::getUserId).distinct().collect(Collectors.toList()));
Map<Integer, String> userMap = userList.stream().collect(Collectors.toMap(User::getId, User::getUserName));
List<ProjectWorkflowDefinitionCount> projectWorkflowDefinitionCountList =
workflowDefinitionMapper.queryProjectProcessDefinitionCountByProjectCodes(
workflowDefinitionMapper.queryProjectWorkflowDefinitionCountByProjectCodes(
projectList.stream().map(Project::getCode).distinct().collect(Collectors.toList()));
Map<Long, Integer> projectWorkflowDefinitionCountMap = projectWorkflowDefinitionCountList.stream()
.collect(Collectors.toMap(ProjectWorkflowDefinitionCount::getProjectCode,

View File

@ -155,7 +155,7 @@ public class SchedulerServiceImpl extends BaseServiceImpl implements SchedulerSe
workflowDefinition.getVersion());
Schedule scheduleExists =
scheduleMapper.queryByProcessDefinitionCode(workflowDefinitionCode);
scheduleMapper.queryByWorkflowDefinitionCode(workflowDefinitionCode);
if (scheduleExists != null) {
log.error("Schedule already exist, scheduleId:{}, workflowDefinitionCode:{}", scheduleExists.getId(),
workflowDefinitionCode);
@ -170,8 +170,8 @@ public class SchedulerServiceImpl extends BaseServiceImpl implements SchedulerSe
scheduleObj.setTenantCode(tenantCode);
scheduleObj.setProjectName(project.getName());
scheduleObj.setProcessDefinitionCode(workflowDefinitionCode);
scheduleObj.setProcessDefinitionName(workflowDefinition.getName());
scheduleObj.setWorkflowDefinitionCode(workflowDefinitionCode);
scheduleObj.setWorkflowDefinitionName(workflowDefinition.getName());
ScheduleParam scheduleParam = JSONUtils.parseObject(schedule, ScheduleParam.class);
if (DateUtils.differSec(scheduleParam.getStartTime(), scheduleParam.getEndTime()) == 0) {
@ -202,7 +202,7 @@ public class SchedulerServiceImpl extends BaseServiceImpl implements SchedulerSe
scheduleObj.setUserId(loginUser.getId());
scheduleObj.setUserName(loginUser.getUserName());
scheduleObj.setReleaseState(ReleaseState.OFFLINE);
scheduleObj.setProcessInstancePriority(workflowInstancePriority);
scheduleObj.setWorkflowInstancePriority(workflowInstancePriority);
scheduleObj.setWorkerGroup(workerGroup);
scheduleObj.setEnvironmentCode(environmentCode);
scheduleMapper.insert(scheduleObj);
@ -259,19 +259,20 @@ public class SchedulerServiceImpl extends BaseServiceImpl implements SchedulerSe
@Transactional
public Schedule createSchedulesV2(User loginUser,
ScheduleCreateRequest scheduleCreateRequest) {
this.projectPermCheckByWorkflowCode(loginUser, scheduleCreateRequest.getProcessDefinitionCode());
this.projectPermCheckByWorkflowCode(loginUser, scheduleCreateRequest.getWorkflowDefinitionCode());
WorkflowDefinition workflowDefinition =
workflowDefinitionMapper.queryByCode(scheduleCreateRequest.getProcessDefinitionCode());
workflowDefinitionMapper.queryByCode(scheduleCreateRequest.getWorkflowDefinitionCode());
// check workflow define release state
executorService.checkWorkflowDefinitionValid(workflowDefinition.getProjectCode(), workflowDefinition,
workflowDefinition.getCode(), workflowDefinition.getVersion());
Schedule scheduleExists =
scheduleMapper.queryByProcessDefinitionCode(scheduleCreateRequest.getProcessDefinitionCode());
scheduleMapper.queryByWorkflowDefinitionCode(scheduleCreateRequest.getWorkflowDefinitionCode());
if (scheduleExists != null) {
throw new ServiceException(Status.SCHEDULE_ALREADY_EXISTS, scheduleCreateRequest.getProcessDefinitionCode(),
throw new ServiceException(Status.SCHEDULE_ALREADY_EXISTS,
scheduleCreateRequest.getWorkflowDefinitionCode(),
scheduleExists.getId());
}
@ -286,7 +287,7 @@ public class SchedulerServiceImpl extends BaseServiceImpl implements SchedulerSe
schedule.setUserId(loginUser.getId());
// give more detail when return schedule object
schedule.setUserName(loginUser.getUserName());
schedule.setProcessDefinitionName(workflowDefinition.getName());
schedule.setWorkflowDefinitionName(workflowDefinition.getName());
this.scheduleParamCheck(scheduleCreateRequest.getScheduleParam());
int create = scheduleMapper.insert(schedule);
@ -348,11 +349,11 @@ public class SchedulerServiceImpl extends BaseServiceImpl implements SchedulerSe
}
WorkflowDefinition workflowDefinition =
workflowDefinitionMapper.queryByCode(schedule.getProcessDefinitionCode());
workflowDefinitionMapper.queryByCode(schedule.getWorkflowDefinitionCode());
if (workflowDefinition == null || projectCode != workflowDefinition.getProjectCode()) {
log.error("workflow definition does not exist, workflowDefinitionCode:{}.",
schedule.getProcessDefinitionCode());
putMsg(result, Status.WORKFLOW_DEFINITION_NOT_EXIST, String.valueOf(schedule.getProcessDefinitionCode()));
schedule.getWorkflowDefinitionCode());
putMsg(result, Status.WORKFLOW_DEFINITION_NOT_EXIST, String.valueOf(schedule.getWorkflowDefinitionCode()));
return result;
}
@ -389,7 +390,7 @@ public class SchedulerServiceImpl extends BaseServiceImpl implements SchedulerSe
throw new ServiceException(Status.REQUEST_PARAMS_NOT_VALID_ERROR, scheduleUpdateRequest.toString());
}
// check update params
this.projectPermCheckByWorkflowCode(loginUser, scheduleUpdate.getProcessDefinitionCode());
this.projectPermCheckByWorkflowCode(loginUser, scheduleUpdate.getWorkflowDefinitionCode());
if (scheduleUpdate.getEnvironmentCode() != null) {
Environment environment = environmentMapper.queryByEnvironmentCode(scheduleUpdate.getEnvironmentCode());
@ -420,7 +421,7 @@ public class SchedulerServiceImpl extends BaseServiceImpl implements SchedulerSe
if (schedule == null) {
throw new ServiceException(Status.SCHEDULE_NOT_EXISTS, scheduleId);
}
this.projectPermCheckByWorkflowCode(loginUser, schedule.getProcessDefinitionCode());
this.projectPermCheckByWorkflowCode(loginUser, schedule.getWorkflowDefinitionCode());
return schedule;
}
@ -460,7 +461,7 @@ public class SchedulerServiceImpl extends BaseServiceImpl implements SchedulerSe
Page<Schedule> page = new Page<>(pageNo, pageSize);
IPage<Schedule> schedulePage =
scheduleMapper.queryByProjectAndProcessDefineCodePaging(page, projectCode, workflowDefinitionCode,
scheduleMapper.queryByProjectAndWorkflowDefinitionCodePaging(page, projectCode, workflowDefinitionCode,
searchVal);
List<ScheduleVO> scheduleList = new ArrayList<>();
@ -480,7 +481,7 @@ public class SchedulerServiceImpl extends BaseServiceImpl implements SchedulerSe
if (CollectionUtils.isEmpty(workflowDefinitionCodes)) {
return Collections.emptyList();
}
return scheduleMapper.querySchedulesByProcessDefinitionCodes(workflowDefinitionCodes);
return scheduleMapper.querySchedulesByWorkflowDefinitionCodes(workflowDefinitionCodes);
}
/**
@ -578,7 +579,7 @@ public class SchedulerServiceImpl extends BaseServiceImpl implements SchedulerSe
throw new ServiceException(Status.USER_NO_OPERATION_PERM);
}
this.projectPermCheckByWorkflowCode(loginUser, schedule.getProcessDefinitionCode());
this.projectPermCheckByWorkflowCode(loginUser, schedule.getWorkflowDefinitionCode());
int delete = scheduleMapper.deleteById(scheduleId);
if (delete <= 0) {
throw new ServiceException(Status.DELETE_SCHEDULE_BY_ID_ERROR);
@ -655,7 +656,7 @@ public class SchedulerServiceImpl extends BaseServiceImpl implements SchedulerSe
return result;
}
// check schedule exists
Schedule schedule = scheduleMapper.queryByProcessDefinitionCode(workflowDefinitionCode);
Schedule schedule = scheduleMapper.queryByWorkflowDefinitionCode(workflowDefinitionCode);
if (schedule == null) {
log.error("Schedule of workflow definition does not exist, workflowDefinitionCode:{}.",
workflowDefinitionCode);
@ -686,7 +687,7 @@ public class SchedulerServiceImpl extends BaseServiceImpl implements SchedulerSe
@Transactional
@Override
public void onlineSchedulerByWorkflowCode(Long workflowDefinitionCode) {
Schedule schedule = scheduleMapper.queryByProcessDefinitionCode(workflowDefinitionCode);
Schedule schedule = scheduleMapper.queryByWorkflowDefinitionCode(workflowDefinitionCode);
doOnlineScheduler(schedule);
}
@ -699,7 +700,7 @@ public class SchedulerServiceImpl extends BaseServiceImpl implements SchedulerSe
return;
}
WorkflowDefinition workflowDefinition =
workflowDefinitionMapper.queryByCode(schedule.getProcessDefinitionCode());
workflowDefinitionMapper.queryByCode(schedule.getWorkflowDefinitionCode());
if (!ReleaseState.ONLINE.equals(workflowDefinition.getReleaseState())) {
throw new ServiceException(Status.WORKFLOW_DEFINITION_NOT_RELEASE, workflowDefinition.getName());
}
@ -722,7 +723,7 @@ public class SchedulerServiceImpl extends BaseServiceImpl implements SchedulerSe
@Transactional
@Override
public void offlineSchedulerByWorkflowCode(Long workflowDefinitionCode) {
Schedule schedule = scheduleMapper.queryByProcessDefinitionCode(workflowDefinitionCode);
Schedule schedule = scheduleMapper.queryByWorkflowDefinitionCode(workflowDefinitionCode);
doOfflineScheduler(schedule);
}
@ -737,7 +738,7 @@ public class SchedulerServiceImpl extends BaseServiceImpl implements SchedulerSe
schedule.setReleaseState(ReleaseState.OFFLINE);
scheduleMapper.updateById(schedule);
WorkflowDefinition workflowDefinition =
workflowDefinitionMapper.queryByCode(schedule.getProcessDefinitionCode());
workflowDefinitionMapper.queryByCode(schedule.getWorkflowDefinitionCode());
Project project = projectMapper.queryByCode(workflowDefinition.getProjectCode());
schedulerApi.deleteScheduleTask(project.getId(), schedule.getId());
}
@ -802,7 +803,7 @@ public class SchedulerServiceImpl extends BaseServiceImpl implements SchedulerSe
schedule.setWorkerGroup(workerGroup);
schedule.setEnvironmentCode(environmentCode);
schedule.setUpdateTime(now);
schedule.setProcessInstancePriority(workflowInstancePriority);
schedule.setWorkflowInstancePriority(workflowInstancePriority);
scheduleMapper.updateById(schedule);
workflowDefinition.setWarningGroupId(warningGroupId);

View File

@ -419,11 +419,11 @@ public class TaskDefinitionServiceImpl extends BaseServiceImpl implements TaskDe
projectCode, taskCode, taskDefinitionToUpdate.getVersion());
// update workflow task relation
List<WorkflowTaskRelation> workflowTaskRelations = workflowTaskRelationMapper
.queryProcessTaskRelationByTaskCodeAndTaskVersion(taskDefinitionToUpdate.getCode(),
.queryWorkflowTaskRelationByTaskCodeAndTaskVersion(taskDefinitionToUpdate.getCode(),
taskDefinition.getVersion());
if (CollectionUtils.isNotEmpty(workflowTaskRelations)) {
Map<Long, List<WorkflowTaskRelation>> workflowTaskRelationGroupList = workflowTaskRelations.stream()
.collect(Collectors.groupingBy(WorkflowTaskRelation::getProcessDefinitionCode));
.collect(Collectors.groupingBy(WorkflowTaskRelation::getWorkflowDefinitionCode));
for (Map.Entry<Long, List<WorkflowTaskRelation>> workflowTaskRelationMap : workflowTaskRelationGroupList
.entrySet()) {
Long workflowDefinitionCode = workflowTaskRelationMap.getKey();
@ -437,9 +437,9 @@ public class TaskDefinitionServiceImpl extends BaseServiceImpl implements TaskDe
} else if (taskCode == workflowTaskRelation.getPostTaskCode()) {
workflowTaskRelation.setPostTaskVersion(version);
}
workflowTaskRelation.setProcessDefinitionVersion(workflowDefinitionVersion);
workflowTaskRelation.setWorkflowDefinitionVersion(workflowDefinitionVersion);
int updateWorkflowDefinitionVersionCount =
workflowTaskRelationMapper.updateProcessTaskRelationTaskVersion(workflowTaskRelation);
workflowTaskRelationMapper.updateWorkflowTaskRelationTaskVersion(workflowTaskRelation);
if (updateWorkflowDefinitionVersionCount != 1) {
log.error("batch update workflow task relation error, projectCode:{}, taskDefinitionCode:{}.",
projectCode, taskCode);
@ -527,11 +527,11 @@ public class TaskDefinitionServiceImpl extends BaseServiceImpl implements TaskDe
if (MapUtils.isNotEmpty(queryUpStreamTaskCodeMap)) {
WorkflowTaskRelation taskRelation = upstreamTaskRelations.get(0);
List<WorkflowTaskRelation> workflowTaskRelations =
workflowTaskRelationMapper.queryByProcessCode(taskRelation.getProcessDefinitionCode());
workflowTaskRelationMapper.queryByWorkflowDefinitionCode(taskRelation.getWorkflowDefinitionCode());
// set upstream code list
updateUpstreamTask(new HashSet<>(queryUpStreamTaskCodeMap.keySet()),
taskCode, projectCode, taskRelation.getProcessDefinitionCode(), loginUser);
taskCode, projectCode, taskRelation.getWorkflowDefinitionCode(), loginUser);
List<WorkflowTaskRelation> workflowTaskRelationList = Lists.newArrayList(workflowTaskRelations);
List<WorkflowTaskRelation> relationList = Lists.newArrayList();
@ -695,9 +695,9 @@ public class TaskDefinitionServiceImpl extends BaseServiceImpl implements TaskDe
log.info(
"Task definition has upstream tasks, start handle them after switch task, taskDefinitionCode:{}.",
taskCode);
long workflowDefinitionCode = taskRelationList.get(0).getProcessDefinitionCode();
long workflowDefinitionCode = taskRelationList.get(0).getWorkflowDefinitionCode();
List<WorkflowTaskRelation> workflowTaskRelations =
workflowTaskRelationMapper.queryByProcessCode(workflowDefinitionCode);
workflowTaskRelationMapper.queryByWorkflowDefinitionCode(workflowDefinitionCode);
updateDag(loginUser, workflowDefinitionCode, workflowTaskRelations,
Lists.newArrayList(taskDefinitionUpdate));
} else {

View File

@ -229,9 +229,9 @@ public class TaskInstanceServiceImpl extends BaseServiceImpl implements TaskInst
throw new ServiceException("The task instance is not under the project: " + projectCode);
}
WorkflowInstance workflowInstance = workflowInstanceDao.queryOptionalById(task.getProcessInstanceId())
WorkflowInstance workflowInstance = workflowInstanceDao.queryOptionalById(task.getWorkflowInstanceId())
.orElseThrow(
() -> new ServiceException(Status.WORKFLOW_INSTANCE_NOT_EXIST, task.getProcessInstanceId()));
() -> new ServiceException(Status.WORKFLOW_INSTANCE_NOT_EXIST, task.getWorkflowInstanceId()));
if (!workflowInstance.getState().isFinished()) {
throw new ServiceException("The workflow instance is not finished: " + workflowInstance.getState()
+ " cannot force start task instance");
@ -249,7 +249,7 @@ public class TaskInstanceServiceImpl extends BaseServiceImpl implements TaskInst
if (changedNum <= 0) {
throw new ServiceException(Status.FORCE_TASK_SUCCESS_ERROR);
}
processService.forceProcessInstanceSuccessByTaskInstanceId(task);
processService.forceWorkflowInstanceSuccessByTaskInstanceId(task);
log.info("Force success task instance:{} success", taskInstanceId);
}

View File

@ -251,7 +251,7 @@ public class TenantServiceImpl extends BaseServiceImpl implements TenantService
throw new ServiceException(Status.DELETE_TENANT_BY_ID_ERROR);
}
workflowInstanceMapper.updateProcessInstanceByTenantCode(tenant.getTenantCode(), Constants.DEFAULT);
workflowInstanceMapper.updateWorkflowInstanceByTenantCode(tenant.getTenantCode(), Constants.DEFAULT);
}
private List<WorkflowInstance> getWorkflowInstancesByTenant(Tenant tenant) {

View File

@ -217,7 +217,7 @@ public class WorkerGroupServiceImpl extends BaseServiceImpl implements WorkerGro
if (CollectionUtils.isNotEmpty(schedules)) {
List<String> workflowDefinitionNames = schedules.stream().limit(3)
.map(schedule -> workflowDefinitionMapper.queryByCode(schedule.getProcessDefinitionCode())
.map(schedule -> workflowDefinitionMapper.queryByCode(schedule.getWorkflowDefinitionCode())
.getName())
.collect(Collectors.toList());
@ -432,8 +432,8 @@ public class WorkerGroupServiceImpl extends BaseServiceImpl implements WorkerGro
@Override
public Map<Long, String> queryWorkerGroupByWorkflowDefinitionCodes(List<Long> workflowDefinitionCodeList) {
List<Schedule> workflowDefinitionScheduleList =
scheduleMapper.querySchedulesByProcessDefinitionCodes(workflowDefinitionCodeList);
return workflowDefinitionScheduleList.stream().collect(Collectors.toMap(Schedule::getProcessDefinitionCode,
scheduleMapper.querySchedulesByWorkflowDefinitionCodes(workflowDefinitionCodeList);
return workflowDefinitionScheduleList.stream().collect(Collectors.toMap(Schedule::getWorkflowDefinitionCode,
Schedule::getWorkerGroup));
}

View File

@ -346,7 +346,7 @@ public class WorkflowDefinitionServiceImpl extends BaseServiceImpl implements Wo
@Transactional
public WorkflowDefinition createSingleWorkflowDefinition(User loginUser,
WorkflowCreateRequest workflowCreateRequest) {
WorkflowDefinition workflowDefinition = workflowCreateRequest.convert2ProcessDefinition();
WorkflowDefinition workflowDefinition = workflowCreateRequest.convert2WorkflowDefinition();
this.createWorkflowValid(loginUser, workflowDefinition);
long workflowDefinitionCode;
@ -626,13 +626,13 @@ public class WorkflowDefinitionServiceImpl extends BaseServiceImpl implements Wo
Map<Long, Schedule> scheduleMap =
schedulerService.queryScheduleByWorkflowDefinitionCodes(workflowDefinitionCodes)
.stream()
.collect(Collectors.toMap(Schedule::getProcessDefinitionCode, Function.identity()));
List<UserWithWorkflowDefinitionCode> userWithCodes = userMapper.queryUserWithProcessDefinitionCode(
.collect(Collectors.toMap(Schedule::getWorkflowDefinitionCode, Function.identity()));
List<UserWithWorkflowDefinitionCode> userWithCodes = userMapper.queryUserWithWorkflowDefinitionCode(
workflowDefinitionCodes);
for (WorkflowDefinition pd : workflowDefinitions) {
userWithCodes.stream()
.filter(userWithCode -> userWithCode.getProcessDefinitionCode() == pd.getCode()
&& userWithCode.getProcessDefinitionVersion() == pd.getVersion())
.filter(userWithCode -> userWithCode.getWorkflowDefinitionCode() == pd.getCode()
&& userWithCode.getWorkflowDefinitionVersion() == pd.getVersion())
.findAny().ifPresent(userWithCode -> {
pd.setModifyBy(userWithCode.getModifierName());
pd.setUserName(userWithCode.getCreatorName());
@ -670,7 +670,7 @@ public class WorkflowDefinitionServiceImpl extends BaseServiceImpl implements Wo
Page<WorkflowDefinition> page =
new Page<>(workflowFilterRequest.getPageNo(), workflowFilterRequest.getPageSize());
IPage<WorkflowDefinition> workflowDefinitionIPage =
workflowDefinitionMapper.filterProcessDefinition(page, workflowDefinition);
workflowDefinitionMapper.filterWorkflowDefinition(page, workflowDefinition);
List<WorkflowDefinition> records = workflowDefinitionIPage.getRecords();
for (WorkflowDefinition pd : records) {
@ -869,7 +869,7 @@ public class WorkflowDefinitionServiceImpl extends BaseServiceImpl implements Wo
private void taskUsedInOtherTaskValid(WorkflowDefinition workflowDefinition,
List<WorkflowTaskRelationLog> taskRelationList) {
List<WorkflowTaskRelation> oldWorkflowTaskRelationList =
workflowTaskRelationMapper.queryByProcessCode(workflowDefinition.getCode());
workflowTaskRelationMapper.queryByWorkflowDefinitionCode(workflowDefinition.getCode());
Set<WorkflowTaskRelationLog> oldWorkflowTaskRelationSet =
oldWorkflowTaskRelationList.stream().map(WorkflowTaskRelationLog::new).collect(Collectors.toSet());
StringBuilder sb = new StringBuilder();
@ -878,7 +878,7 @@ public class WorkflowDefinitionServiceImpl extends BaseServiceImpl implements Wo
.anyMatch(relation -> oldWorkflowTaskRelation.getPostTaskCode() == relation.getPostTaskCode());
if (!oldTaskExists) {
Optional<String> taskDepMsg = workflowLineageService.taskDependentMsg(
workflowDefinition.getProjectCode(), oldWorkflowTaskRelation.getProcessDefinitionCode(),
workflowDefinition.getProjectCode(), oldWorkflowTaskRelation.getWorkflowDefinitionCode(),
oldWorkflowTaskRelation.getPostTaskCode());
taskDepMsg.ifPresent(sb::append);
}
@ -909,7 +909,7 @@ public class WorkflowDefinitionServiceImpl extends BaseServiceImpl implements Wo
boolean isChange = false;
if (workflowDefinition.equals(workflowDefinitionDeepCopy) && saveTaskResult == Constants.EXIT_CODE_SUCCESS) {
List<WorkflowTaskRelationLog> workflowTaskRelationLogList = workflowTaskRelationLogMapper
.queryByProcessCodeAndVersion(workflowDefinition.getCode(), workflowDefinition.getVersion());
.queryByWorkflowCodeAndVersion(workflowDefinition.getCode(), workflowDefinition.getVersion());
if (taskRelationList.size() == workflowTaskRelationLogList.size()) {
Set<WorkflowTaskRelationLog> taskRelationSet = new HashSet<>(taskRelationList);
Set<WorkflowTaskRelationLog> workflowTaskRelationLogSet = new HashSet<>(workflowTaskRelationLogList);
@ -1095,7 +1095,7 @@ public class WorkflowDefinitionServiceImpl extends BaseServiceImpl implements Wo
workflowDefinitionUsedInOtherTaskValid(workflowDefinition);
// get the timing according to the workflow definition
Schedule scheduleObj = scheduleMapper.queryByProcessDefinitionCode(code);
Schedule scheduleObj = scheduleMapper.queryByWorkflowDefinitionCode(code);
if (scheduleObj != null) {
if (scheduleObj.getReleaseState() == ReleaseState.OFFLINE) {
int delete = scheduleMapper.deleteById(scheduleObj.getId());
@ -1203,7 +1203,7 @@ public class WorkflowDefinitionServiceImpl extends BaseServiceImpl implements Wo
* @return DagDataSchedule
*/
public DagDataSchedule exportWorkflowDagData(WorkflowDefinition workflowDefinition) {
Schedule scheduleObj = scheduleMapper.queryByProcessDefinitionCode(workflowDefinition.getCode());
Schedule scheduleObj = scheduleMapper.queryByWorkflowDefinitionCode(workflowDefinition.getCode());
DagDataSchedule dagDataSchedule = new DagDataSchedule(processService.genDagData(workflowDefinition));
if (scheduleObj != null) {
scheduleObj.setReleaseState(ReleaseState.OFFLINE);
@ -1463,7 +1463,7 @@ public class WorkflowDefinitionServiceImpl extends BaseServiceImpl implements Wo
if (!checkImportanceParams(dagDataSchedule, result)) {
return false;
}
WorkflowDefinition workflowDefinition = dagDataSchedule.getProcessDefinition();
WorkflowDefinition workflowDefinition = dagDataSchedule.getWorkflowDefinition();
// generate import workflowDefinitionName
String workflowDefinitionName = recursionWorkflowDefinitionName(projectCode, workflowDefinition.getName(), 1);
@ -1525,7 +1525,7 @@ public class WorkflowDefinitionServiceImpl extends BaseServiceImpl implements Wo
throw new ServiceException(Status.CREATE_TASK_DEFINITION_ERROR);
}
List<WorkflowTaskRelation> taskRelationList = dagDataSchedule.getProcessTaskRelationList();
List<WorkflowTaskRelation> taskRelationList = dagDataSchedule.getWorkflowTaskRelationList();
List<WorkflowTaskRelationLog> taskRelationLogList = new ArrayList<>();
for (WorkflowTaskRelation workflowTaskRelation : taskRelationList) {
WorkflowTaskRelationLog workflowTaskRelationLog = new WorkflowTaskRelationLog(workflowTaskRelation);
@ -1572,7 +1572,7 @@ public class WorkflowDefinitionServiceImpl extends BaseServiceImpl implements Wo
if (null != schedule) {
WorkflowDefinition newWorkflowDefinition =
workflowDefinitionMapper.queryByCode(workflowDefinition.getCode());
schedule.setProcessDefinitionCode(newWorkflowDefinition.getCode());
schedule.setWorkflowDefinitionCode(newWorkflowDefinition.getCode());
schedule.setId(null);
schedule.setUserId(loginUser.getId());
schedule.setCreateTime(now);
@ -1597,7 +1597,7 @@ public class WorkflowDefinitionServiceImpl extends BaseServiceImpl implements Wo
* check importance params
*/
private boolean checkImportanceParams(DagDataSchedule dagDataSchedule, Map<String, Object> result) {
if (dagDataSchedule.getProcessDefinition() == null) {
if (dagDataSchedule.getWorkflowDefinition() == null) {
log.warn("workflow definition is null.");
putMsg(result, Status.DATA_IS_NULL, "WorkflowDefinition");
return false;
@ -1607,7 +1607,7 @@ public class WorkflowDefinitionServiceImpl extends BaseServiceImpl implements Wo
putMsg(result, Status.DATA_IS_NULL, "TaskDefinitionList");
return false;
}
if (CollectionUtils.isEmpty(dagDataSchedule.getProcessTaskRelationList())) {
if (CollectionUtils.isEmpty(dagDataSchedule.getWorkflowTaskRelationList())) {
log.warn("workflow task relation list is null.");
putMsg(result, Status.DATA_IS_NULL, "WorkflowTaskRelationList");
return false;
@ -1801,7 +1801,7 @@ public class WorkflowDefinitionServiceImpl extends BaseServiceImpl implements Wo
public Map<String, Object> queryWorkflowDefinitionListByProjectCode(long projectCode) {
Map<String, Object> result = new HashMap<>();
List<DependentSimplifyDefinition> workflowDefinitions =
workflowDefinitionMapper.queryDefinitionListByProjectCodeAndProcessDefinitionCodes(projectCode, null);
workflowDefinitionMapper.queryDefinitionListByProjectCodeAndWorkflowDefinitionCodes(projectCode, null);
result.put(Constants.DATA_LIST, workflowDefinitions);
putMsg(result, Status.SUCCESS);
return result;
@ -1822,7 +1822,7 @@ public class WorkflowDefinitionServiceImpl extends BaseServiceImpl implements Wo
Set<Long> definitionCodesSet = new HashSet<>();
definitionCodesSet.add(workflowDefinitionCode);
List<DependentSimplifyDefinition> workflowDefinitions = workflowDefinitionMapper
.queryDefinitionListByProjectCodeAndProcessDefinitionCodes(projectCode, definitionCodesSet);
.queryDefinitionListByProjectCodeAndWorkflowDefinitionCodes(projectCode, definitionCodesSet);
// query task definition log
List<TaskDefinitionLog> taskDefinitionLogsList = taskDefinitionLogDao.queryByWorkflowDefinitionCodeAndVersion(
@ -1901,7 +1901,7 @@ public class WorkflowDefinitionServiceImpl extends BaseServiceImpl implements Wo
Date endTime = workflowInstance.getEndTime() == null ? new Date() : workflowInstance.getEndTime();
parentTreeViewDto.getInstances()
.add(new Instance(workflowInstance.getId(), workflowInstance.getName(),
workflowInstance.getProcessDefinitionCode(),
workflowInstance.getWorkflowDefinitionCode(),
"", workflowInstance.getState().name(), workflowInstance.getStartTime(), endTime,
workflowInstance.getHost(),
DateUtils.format2Readable(endTime.getTime() - workflowInstance.getStartTime().getTime())));
@ -2110,7 +2110,7 @@ public class WorkflowDefinitionServiceImpl extends BaseServiceImpl implements Wo
diffCode.forEach(code -> failedWorkflowList.add(code + "[null]"));
for (WorkflowDefinition workflowDefinition : workflowDefinitionList) {
List<WorkflowTaskRelation> workflowTaskRelations =
workflowTaskRelationMapper.queryByProcessCode(workflowDefinition.getCode());
workflowTaskRelationMapper.queryByWorkflowDefinitionCode(workflowDefinition.getCode());
List<WorkflowTaskRelationLog> taskRelationList =
workflowTaskRelations.stream().map(WorkflowTaskRelationLog::new).collect(Collectors.toList());
workflowDefinition.setProjectCode(targetProjectCode);
@ -2168,11 +2168,11 @@ public class WorkflowDefinitionServiceImpl extends BaseServiceImpl implements Wo
workflowDefinition.setLocations(JSONUtils.toJsonString(jsonNodes));
}
// copy timing configuration
Schedule scheduleObj = scheduleMapper.queryByProcessDefinitionCode(oldWorkflowDefinitionCode);
Schedule scheduleObj = scheduleMapper.queryByWorkflowDefinitionCode(oldWorkflowDefinitionCode);
if (scheduleObj != null) {
scheduleObj.setId(null);
scheduleObj.setUserId(loginUser.getId());
scheduleObj.setProcessDefinitionCode(workflowDefinition.getCode());
scheduleObj.setWorkflowDefinitionCode(workflowDefinition.getCode());
scheduleObj.setReleaseState(ReleaseState.OFFLINE);
scheduleObj.setCreateTime(date);
scheduleObj.setUpdateTime(date);
@ -2282,7 +2282,7 @@ public class WorkflowDefinitionServiceImpl extends BaseServiceImpl implements Wo
}
List<WorkflowTaskRelation> workflowTaskRelationList = workflowTaskRelationMapper
.queryProcessTaskRelationsByProcessDefinitionCode(workflowDefinitionLog.getCode(),
.queryWorkflowTaskRelationsByWorkflowDefinitionCode(workflowDefinitionLog.getCode(),
workflowDefinitionLog.getVersion());
List<TaskCodeVersionDto> taskDefinitionList = getTaskCodeVersionDtos(workflowTaskRelationList);
List<TaskDefinitionLog> taskDefinitionLogList =
@ -2380,7 +2380,7 @@ public class WorkflowDefinitionServiceImpl extends BaseServiceImpl implements Wo
PageInfo<WorkflowDefinitionLog> pageInfo = new PageInfo<>(pageNo, pageSize);
Page<WorkflowDefinitionLog> page = new Page<>(pageNo, pageSize);
IPage<WorkflowDefinitionLog> workflowDefinitionLogIPage =
workflowDefinitionLogMapper.queryProcessDefinitionVersionsPaging(page, code, projectCode);
workflowDefinitionLogMapper.queryWorkflowDefinitionVersionsPaging(page, code, projectCode);
List<WorkflowDefinitionLog> workflowDefinitionLogs = workflowDefinitionLogIPage.getRecords();
pageInfo.setTotalList(workflowDefinitionLogs);
@ -2423,7 +2423,7 @@ public class WorkflowDefinitionServiceImpl extends BaseServiceImpl implements Wo
throw new ServiceException(Status.DELETE_WORKFLOW_DEFINITION_EXECUTING_FAIL, workflowInstances.size());
}
int deleteLog = workflowDefinitionLogMapper.deleteByProcessDefinitionCodeAndVersion(code, version);
int deleteLog = workflowDefinitionLogMapper.deleteByWorkflowDefinitionCodeAndVersion(code, version);
int deleteRelationLog = workflowTaskRelationLogMapper.deleteByCode(code, version);
if (deleteLog == 0 || deleteRelationLog == 0) {
throw new ServiceException(Status.DELETE_WORKFLOW_DEFINE_BY_CODE_ERROR);
@ -2483,7 +2483,7 @@ public class WorkflowDefinitionServiceImpl extends BaseServiceImpl implements Wo
}
WorkflowDefinition workflowDefinitionUpdate =
workflowUpdateRequest.mergeIntoProcessDefinition(workflowDefinition);
workflowUpdateRequest.mergeIntoWorkflowDefinition(workflowDefinition);
this.updateWorkflowValid(loginUser, workflowDefinition, workflowDefinitionUpdate);
int insertVersion = this.saveWorkflowDefine(loginUser, workflowDefinitionUpdate);
@ -2532,7 +2532,7 @@ public class WorkflowDefinitionServiceImpl extends BaseServiceImpl implements Wo
long projectCode = workflowDefinition.getProjectCode();
long workflowDefinitionCode = workflowDefinition.getCode();
List<WorkflowTaskRelation> taskRelations =
workflowTaskRelationMapper.queryByProcessCode(workflowDefinitionCode);
workflowTaskRelationMapper.queryByWorkflowDefinitionCode(workflowDefinitionCode);
List<WorkflowTaskRelationLog> taskRelationList =
taskRelations.stream().map(WorkflowTaskRelationLog::new).collect(Collectors.toList());
@ -2554,8 +2554,8 @@ public class WorkflowDefinitionServiceImpl extends BaseServiceImpl implements Wo
Date now = new Date();
for (WorkflowTaskRelationLog workflowTaskRelationLog : taskRelationList) {
workflowTaskRelationLog.setProjectCode(projectCode);
workflowTaskRelationLog.setProcessDefinitionCode(workflowDefinitionCode);
workflowTaskRelationLog.setProcessDefinitionVersion(workflowDefinitionVersion);
workflowTaskRelationLog.setWorkflowDefinitionCode(workflowDefinitionCode);
workflowTaskRelationLog.setWorkflowDefinitionVersion(workflowDefinitionVersion);
if (taskDefinitionLogMap != null) {
TaskDefinitionLog preTaskDefinitionLog =
taskDefinitionLogMap.get(workflowTaskRelationLog.getPreTaskCode());
@ -2585,7 +2585,7 @@ public class WorkflowDefinitionServiceImpl extends BaseServiceImpl implements Wo
workflowDefinition.getProjectCode(), workflowDefinition.getCode());
return Constants.EXIT_CODE_SUCCESS;
}
workflowTaskRelationMapper.deleteByCode(projectCode, workflowDefinitionCode);
workflowTaskRelationMapper.deleteByWorkflowDefinitionCode(projectCode, workflowDefinitionCode);
}
List<WorkflowTaskRelation> workflowTaskRelations =
taskRelationList.stream().map(WorkflowTaskRelation::new).collect(Collectors.toList());
@ -2690,7 +2690,7 @@ public class WorkflowDefinitionServiceImpl extends BaseServiceImpl implements Wo
Set<Long> taskCodeSet = new TreeSet<>();
workflowTaskRelationMapper.queryByProcessCode(workflowDefinition.getCode())
workflowTaskRelationMapper.queryByWorkflowDefinitionCode(workflowDefinition.getCode())
.forEach(processTaskRelation -> {
if (processTaskRelation.getPreTaskCode() > 0) {
taskCodeSet.add(processTaskRelation.getPreTaskCode());
@ -2720,7 +2720,7 @@ public class WorkflowDefinitionServiceImpl extends BaseServiceImpl implements Wo
private void checkWorkflowDefinitionIsValidated(Long workflowDefinitionCode) {
// todo: build dag check if the dag is validated
List<WorkflowTaskRelation> workflowTaskRelations =
workflowTaskRelationMapper.queryByProcessCode(workflowDefinitionCode);
workflowTaskRelationMapper.queryByWorkflowDefinitionCode(workflowDefinitionCode);
if (CollectionUtils.isEmpty(workflowTaskRelations)) {
throw new ServiceException(Status.WORKFLOW_DAG_IS_EMPTY);
}

View File

@ -23,8 +23,8 @@ import static org.apache.dolphinscheduler.api.enums.Status.WORKFLOW_INSTANCE_STA
import static org.apache.dolphinscheduler.common.constants.Constants.DATA_LIST;
import static org.apache.dolphinscheduler.common.constants.Constants.GLOBAL_PARAMS;
import static org.apache.dolphinscheduler.common.constants.Constants.LOCAL_PARAMS;
import static org.apache.dolphinscheduler.common.constants.Constants.PROCESS_INSTANCE_STATE;
import static org.apache.dolphinscheduler.common.constants.Constants.TASK_LIST;
import static org.apache.dolphinscheduler.common.constants.Constants.WORKFLOW_INSTANCE_STATE;
import static org.apache.dolphinscheduler.plugin.task.api.TaskPluginManager.checkTaskParameters;
import org.apache.dolphinscheduler.api.constants.ApiFuncIdentificationConstant;
@ -212,7 +212,7 @@ public class WorkflowInstanceServiceImpl extends BaseServiceImpl implements Work
return result;
}
List<WorkflowInstance> workflowInstances = workflowInstanceMapper.queryTopNProcessInstance(size, start, end,
List<WorkflowInstance> workflowInstances = workflowInstanceMapper.queryTopNWorkflowInstance(size, start, end,
WorkflowExecutionStatus.SUCCESS, projectCode);
result.put(DATA_LIST, workflowInstances);
putMsg(result, Status.SUCCESS);
@ -241,8 +241,8 @@ public class WorkflowInstanceServiceImpl extends BaseServiceImpl implements Work
.orElseThrow(() -> new ServiceException(WORKFLOW_INSTANCE_NOT_EXIST, workflowInstanceId));
WorkflowDefinition workflowDefinition =
processService.findProcessDefinition(workflowInstance.getProcessDefinitionCode(),
workflowInstance.getProcessDefinitionVersion());
processService.findWorkflowDefinition(workflowInstance.getWorkflowDefinitionCode(),
workflowInstance.getWorkflowDefinitionVersion());
if (workflowDefinition == null || projectCode != workflowDefinition.getProjectCode()) {
log.error("workflow definition does not exist, projectCode:{}.", projectCode);
@ -277,7 +277,7 @@ public class WorkflowInstanceServiceImpl extends BaseServiceImpl implements Work
public Map<String, Object> queryWorkflowInstanceById(User loginUser, Integer workflowInstanceId) {
WorkflowInstance workflowInstance = workflowInstanceMapper.selectById(workflowInstanceId);
WorkflowDefinition workflowDefinition =
workflowDefinitionMapper.queryByCode(workflowInstance.getProcessDefinitionCode());
workflowDefinitionMapper.queryByCode(workflowInstance.getWorkflowDefinitionCode());
return queryWorkflowInstanceById(loginUser, workflowDefinition.getProjectCode(), workflowInstanceId);
}
@ -328,7 +328,7 @@ public class WorkflowInstanceServiceImpl extends BaseServiceImpl implements Work
Page<WorkflowInstance> page = new Page<>(pageNo, pageSize);
PageInfo<WorkflowInstance> pageInfo = new PageInfo<>(pageNo, pageSize);
IPage<WorkflowInstance> workflowInstanceList = workflowInstanceMapper.queryProcessInstanceListPaging(
IPage<WorkflowInstance> workflowInstanceList = workflowInstanceMapper.queryWorkflowInstanceListPaging(
page,
projectCode,
workflowDefinitionCode,
@ -375,7 +375,7 @@ public class WorkflowInstanceServiceImpl extends BaseServiceImpl implements Work
@Override
public Result queryWorkflowInstanceList(User loginUser, WorkflowInstanceQueryRequest workflowInstanceQueryRequest) {
Result result = new Result();
WorkflowInstance workflowInstance = workflowInstanceQueryRequest.convert2ProcessInstance();
WorkflowInstance workflowInstance = workflowInstanceQueryRequest.convert2WorkflowInstance();
String projectName = workflowInstanceQueryRequest.getProjectName();
if (!StringUtils.isBlank(projectName)) {
Project project = projectMapper.queryByName(projectName);
@ -383,7 +383,7 @@ public class WorkflowInstanceServiceImpl extends BaseServiceImpl implements Work
ApiFuncIdentificationConstant.WORKFLOW_DEFINITION);
WorkflowDefinition workflowDefinition =
workflowDefinitionMapper.queryByDefineName(project.getCode(), workflowInstance.getName());
workflowInstance.setProcessDefinitionCode(workflowDefinition.getCode());
workflowInstance.setWorkflowDefinitionCode(workflowDefinition.getCode());
workflowInstance.setProjectCode(project.getCode());
}
@ -392,10 +392,10 @@ public class WorkflowInstanceServiceImpl extends BaseServiceImpl implements Work
PageInfo<WorkflowInstance> pageInfo =
new PageInfo<>(workflowInstanceQueryRequest.getPageNo(), workflowInstanceQueryRequest.getPageSize());
IPage<WorkflowInstance> workflowInstanceList = workflowInstanceMapper.queryProcessInstanceListV2Paging(
IPage<WorkflowInstance> workflowInstanceList = workflowInstanceMapper.queryWorkflowInstanceListV2Paging(
page,
workflowInstance.getProjectCode(),
workflowInstance.getProcessDefinitionCode(),
workflowInstance.getWorkflowDefinitionCode(),
workflowInstance.getName(),
workflowInstanceQueryRequest.getStartTime(),
workflowInstanceQueryRequest.getEndTime(),
@ -450,7 +450,7 @@ public class WorkflowInstanceServiceImpl extends BaseServiceImpl implements Work
WorkflowInstance workflowInstance = processService.findWorkflowInstanceDetailById(workflowInstanceId)
.orElseThrow(() -> new ServiceException(WORKFLOW_INSTANCE_NOT_EXIST, workflowInstanceId));
WorkflowDefinition workflowDefinition =
workflowDefinitionMapper.queryByCode(workflowInstance.getProcessDefinitionCode());
workflowDefinitionMapper.queryByCode(workflowInstance.getWorkflowDefinitionCode());
if (workflowDefinition != null && projectCode != workflowDefinition.getProjectCode()) {
log.error("workflow definition does not exist, projectCode:{}, workflowInstanceId:{}.", projectCode,
workflowInstanceId);
@ -461,7 +461,7 @@ public class WorkflowInstanceServiceImpl extends BaseServiceImpl implements Work
taskInstanceDao.queryValidTaskListByWorkflowInstanceId(workflowInstanceId,
workflowInstance.getTestFlag());
Map<String, Object> resultMap = new HashMap<>();
resultMap.put(PROCESS_INSTANCE_STATE, workflowInstance.getState().toString());
resultMap.put(WORKFLOW_INSTANCE_STATE, workflowInstance.getState().toString());
resultMap.put(TASK_LIST, taskInstanceList);
result.put(DATA_LIST, resultMap);
@ -489,20 +489,20 @@ public class WorkflowInstanceServiceImpl extends BaseServiceImpl implements Work
throw new ServiceException(Status.TASK_INSTANCE_NOT_EXISTS, taskId);
}
List<RelationSubWorkflow> relationSubWorkflows = relationSubWorkflowMapper
.queryAllSubProcessInstance((long) taskInstance.getProcessInstanceId(),
.queryAllSubWorkflowInstance((long) taskInstance.getWorkflowInstanceId(),
taskInstance.getTaskCode());
List<Long> allSubProcessInstanceId = relationSubWorkflows.stream()
List<Long> allSubWorkflowInstanceId = relationSubWorkflows.stream()
.map(RelationSubWorkflow::getSubWorkflowInstanceId).collect(java.util.stream.Collectors.toList());
List<WorkflowInstance> allSubWorkflows = workflowInstanceDao.queryByIds(allSubProcessInstanceId);
List<WorkflowInstance> allSubWorkflows = workflowInstanceDao.queryByIds(allSubWorkflowInstanceId);
if (allSubWorkflows == null || allSubWorkflows.isEmpty()) {
putMsg(result, Status.SUB_WORKFLOW_INSTANCE_NOT_EXIST, taskId);
throw new ServiceException(Status.SUB_WORKFLOW_INSTANCE_NOT_EXIST, taskId);
}
Long subWorkflowCode = allSubWorkflows.get(0).getProcessDefinitionCode();
int subWorkflowVersion = allSubWorkflows.get(0).getProcessDefinitionVersion();
Long subWorkflowCode = allSubWorkflows.get(0).getWorkflowDefinitionCode();
int subWorkflowVersion = allSubWorkflows.get(0).getWorkflowDefinitionVersion();
WorkflowDefinition subWorkflowDefinition =
processService.findProcessDefinition(subWorkflowCode, subWorkflowVersion);
processService.findWorkflowDefinition(subWorkflowCode, subWorkflowVersion);
if (subWorkflowDefinition == null) {
putMsg(result, Status.WORKFLOW_DEFINITION_NOT_EXIST, subWorkflowCode);
throw new ServiceException(Status.WORKFLOW_DEFINITION_NOT_EXIST, subWorkflowCode);
@ -514,7 +514,7 @@ public class WorkflowInstanceServiceImpl extends BaseServiceImpl implements Work
int index = 1;
for (WorkflowInstance workflowInstance : allSubWorkflows) {
DynamicSubWorkflowDto dynamicSubWorkflowDto = new DynamicSubWorkflowDto();
dynamicSubWorkflowDto.setProcessInstanceId(workflowInstance.getId());
dynamicSubWorkflowDto.setWorkflowInstanceId(workflowInstance.getId());
dynamicSubWorkflowDto.setIndex(index);
dynamicSubWorkflowDto.setState(workflowInstance.getState());
dynamicSubWorkflowDto.setName(subWorkflowDefinition.getName());
@ -569,7 +569,7 @@ public class WorkflowInstanceServiceImpl extends BaseServiceImpl implements Work
}
WorkflowInstance subWorkflowInstance = processService.findSubWorkflowInstance(
taskInstance.getProcessInstanceId(), taskInstance.getId());
taskInstance.getWorkflowInstanceId(), taskInstance.getId());
if (subWorkflowInstance == null) {
log.error("Sub workflow instance does not exist, projectCode:{}, taskInstanceId:{}.", projectCode,
taskInstance.getId());
@ -577,7 +577,7 @@ public class WorkflowInstanceServiceImpl extends BaseServiceImpl implements Work
return result;
}
Map<String, Object> dataMap = new HashMap<>();
dataMap.put(Constants.SUBPROCESS_INSTANCE_ID, subWorkflowInstance.getId());
dataMap.put(Constants.SUBWORKFLOW_INSTANCE_ID, subWorkflowInstance.getId());
result.put(DATA_LIST, dataMap);
putMsg(result, Status.SUCCESS);
return result;
@ -615,10 +615,10 @@ public class WorkflowInstanceServiceImpl extends BaseServiceImpl implements Work
.orElseThrow(() -> new ServiceException(WORKFLOW_INSTANCE_NOT_EXIST, workflowInstanceId));
// check workflow instance exists in project
WorkflowDefinition workflowDefinition0 =
workflowDefinitionMapper.queryByCode(workflowInstance.getProcessDefinitionCode());
workflowDefinitionMapper.queryByCode(workflowInstance.getWorkflowDefinitionCode());
if (workflowDefinition0 != null && projectCode != workflowDefinition0.getProjectCode()) {
log.error("workflow definition does not exist, projectCode:{}, workflowDefinitionCode:{}.", projectCode,
workflowInstance.getProcessDefinitionCode());
workflowInstance.getWorkflowDefinitionCode());
putMsg(result, WORKFLOW_INSTANCE_NOT_EXIST, workflowInstanceId);
return result;
}
@ -662,7 +662,7 @@ public class WorkflowInstanceServiceImpl extends BaseServiceImpl implements Work
throw new ServiceException(Status.UPDATE_TASK_DEFINITION_ERROR);
}
WorkflowDefinition workflowDefinition =
workflowDefinitionMapper.queryByCode(workflowInstance.getProcessDefinitionCode());
workflowDefinitionMapper.queryByCode(workflowInstance.getWorkflowDefinitionCode());
List<WorkflowTaskRelationLog> taskRelationList =
JSONUtils.toList(taskRelationJson, WorkflowTaskRelationLog.class);
// check workflow json is valid
@ -705,7 +705,7 @@ public class WorkflowInstanceServiceImpl extends BaseServiceImpl implements Work
putMsg(result, Status.UPDATE_WORKFLOW_DEFINITION_ERROR);
throw new ServiceException(Status.UPDATE_WORKFLOW_DEFINITION_ERROR);
}
workflowInstance.setProcessDefinitionVersion(insertVersion);
workflowInstance.setWorkflowDefinitionVersion(insertVersion);
boolean update = workflowInstanceDao.updateById(workflowInstance);
if (!update) {
log.error(
@ -761,7 +761,7 @@ public class WorkflowInstanceServiceImpl extends BaseServiceImpl implements Work
WorkflowInstance subInstance = processService.findWorkflowInstanceDetailById(subId)
.orElseThrow(() -> new ServiceException(WORKFLOW_INSTANCE_NOT_EXIST, subId));
if (subInstance.getIsSubProcess() == Flag.NO) {
if (subInstance.getIsSubWorkflow() == Flag.NO) {
log.warn(
"workflow instance is not sub workflow instance type, workflowInstanceId:{}, workflowInstanceName:{}.",
subId, subInstance.getName());
@ -796,7 +796,7 @@ public class WorkflowInstanceServiceImpl extends BaseServiceImpl implements Work
WorkflowInstance workflowInstance = processService.findWorkflowInstanceDetailById(workflowInstanceId)
.orElseThrow(() -> new ServiceException(WORKFLOW_INSTANCE_NOT_EXIST, workflowInstanceId));
WorkflowDefinition workflowDefinition = workflowDefinitionLogMapper.queryByDefinitionCodeAndVersion(
workflowInstance.getProcessDefinitionCode(), workflowInstance.getProcessDefinitionVersion());
workflowInstance.getWorkflowDefinitionCode(), workflowInstance.getWorkflowDefinitionVersion());
Project project = projectMapper.queryByCode(workflowDefinition.getProjectCode());
// check user access for project
@ -833,10 +833,10 @@ public class WorkflowInstanceServiceImpl extends BaseServiceImpl implements Work
}
WorkflowDefinition workflowDefinition =
workflowDefinitionMapper.queryByCode(workflowInstance.getProcessDefinitionCode());
workflowDefinitionMapper.queryByCode(workflowInstance.getWorkflowDefinitionCode());
if (workflowDefinition != null && projectCode != workflowDefinition.getProjectCode()) {
log.error("workflow definition does not exist, projectCode:{}, workflowDefinitionCode:{}.", projectCode,
workflowInstance.getProcessDefinitionCode());
workflowInstance.getWorkflowDefinitionCode());
putMsg(result, WORKFLOW_INSTANCE_NOT_EXIST, workflowInstanceId);
return result;
}
@ -884,7 +884,7 @@ public class WorkflowInstanceServiceImpl extends BaseServiceImpl implements Work
Map<String, String> timeParams) {
Map<String, Map<String, Object>> localUserDefParams = new HashMap<>();
List<TaskInstance> taskInstanceList =
taskInstanceMapper.findValidTaskListByProcessId(workflowInstance.getId(), Flag.YES,
taskInstanceMapper.findValidTaskListByWorkflowInstanceId(workflowInstance.getId(), Flag.YES,
workflowInstance.getTestFlag());
for (TaskInstance taskInstance : taskInstanceList) {
TaskDefinitionLog taskDefinitionLog = taskDefinitionLogMapper.queryByDefinitionCodeAndVersion(
@ -927,11 +927,11 @@ public class WorkflowInstanceServiceImpl extends BaseServiceImpl implements Work
}
WorkflowDefinition workflowDefinition = workflowDefinitionLogMapper.queryByDefinitionCodeAndVersion(
workflowInstance.getProcessDefinitionCode(),
workflowInstance.getProcessDefinitionVersion());
workflowInstance.getWorkflowDefinitionCode(),
workflowInstance.getWorkflowDefinitionVersion());
if (workflowDefinition == null || projectCode != workflowDefinition.getProjectCode()) {
log.error("workflow definition does not exist, projectCode:{}, workflowDefinitionCode:{}.", projectCode,
workflowInstance.getProcessDefinitionCode());
workflowInstance.getWorkflowDefinitionCode());
putMsg(result, WORKFLOW_INSTANCE_NOT_EXIST, workflowInstanceId);
return result;
}
@ -944,12 +944,12 @@ public class WorkflowInstanceServiceImpl extends BaseServiceImpl implements Work
List<Task> taskList = new ArrayList<>();
if (CollectionUtils.isNotEmpty(nodeList)) {
List<TaskInstance> taskInstances = taskInstanceMapper.queryByProcessInstanceIdsAndTaskCodes(
List<TaskInstance> taskInstances = taskInstanceMapper.queryByWorkflowInstanceIdsAndTaskCodes(
Collections.singletonList(workflowInstanceId), nodeList);
for (Long node : nodeList) {
TaskInstance taskInstance = null;
for (TaskInstance instance : taskInstances) {
if (instance.getProcessInstanceId() == workflowInstanceId && instance.getTaskCode() == node) {
if (instance.getWorkflowInstanceId() == workflowInstanceId && instance.getTaskCode() == node) {
taskInstance = instance;
break;
}
@ -987,7 +987,7 @@ public class WorkflowInstanceServiceImpl extends BaseServiceImpl implements Work
*/
@Override
public List<WorkflowInstance> queryByWorkflowDefinitionCodeAndStatus(Long workflowDefinitionCode, int[] states) {
return workflowInstanceMapper.queryByProcessDefineCodeAndStatus(workflowDefinitionCode, states);
return workflowInstanceMapper.queryByWorkflowDefinitionCodeAndStatus(workflowDefinitionCode, states);
}
@Override
@ -1006,7 +1006,7 @@ public class WorkflowInstanceServiceImpl extends BaseServiceImpl implements Work
*/
@Override
public List<WorkflowInstance> queryByWorkflowDefinitionCode(Long workflowDefinitionCode, int size) {
return workflowInstanceMapper.queryByProcessDefineCode(workflowDefinitionCode, size);
return workflowInstanceMapper.queryByWorkflowDefinitionCode(workflowDefinitionCode, size);
}
/**
@ -1039,7 +1039,7 @@ public class WorkflowInstanceServiceImpl extends BaseServiceImpl implements Work
public void deleteWorkflowInstanceByWorkflowDefinitionCode(long workflowDefinitionCode) {
while (true) {
List<WorkflowInstance> workflowInstances =
workflowInstanceMapper.queryByProcessDefineCode(workflowDefinitionCode, 100);
workflowInstanceMapper.queryByWorkflowDefinitionCode(workflowDefinitionCode, 100);
if (CollectionUtils.isEmpty(workflowInstances)) {
break;
}

View File

@ -136,35 +136,35 @@ public class WorkflowLineageServiceImpl extends BaseServiceImpl implements Workf
private List<WorkFlowRelation> getWorkFlowRelations(List<WorkflowTaskLineage> workflowTaskLineageList) {
List<WorkFlowRelation> workFlowRelations = new ArrayList<>();
List<Long> processDefinitionCodes = workflowTaskLineageList.stream()
List<Long> workflowDefinitionCodes = workflowTaskLineageList.stream()
.map(WorkflowTaskLineage::getWorkflowDefinitionCode).distinct().collect(Collectors.toList());
for (WorkflowTaskLineage workflowTaskLineage : workflowTaskLineageList) {
workFlowRelations.add(new WorkFlowRelation(workflowTaskLineage.getDeptWorkflowDefinitionCode(),
workflowTaskLineage.getWorkflowDefinitionCode()));
if (!processDefinitionCodes.contains(workflowTaskLineage.getDeptWorkflowDefinitionCode())) {
if (!workflowDefinitionCodes.contains(workflowTaskLineage.getDeptWorkflowDefinitionCode())) {
workFlowRelations.add(new WorkFlowRelation(0, workflowTaskLineage.getWorkflowDefinitionCode()));
}
}
return workFlowRelations;
}
private List<WorkFlowRelationDetail> getWorkflowRelationDetails(List<Long> processDefinitionCodes) {
private List<WorkFlowRelationDetail> getWorkflowRelationDetails(List<Long> workflowDefinitionCodes) {
List<WorkFlowRelationDetail> workFlowRelationDetails = new ArrayList<>();
for (Long processDefinitionCode : processDefinitionCodes) {
for (Long workflowDefinitionCode : workflowDefinitionCodes) {
List<WorkFlowRelationDetail> workFlowRelationDetailList =
workflowTaskLineageDao.queryWorkFlowLineageByCode(processDefinitionCode);
workflowTaskLineageDao.queryWorkFlowLineageByCode(workflowDefinitionCode);
workFlowRelationDetails.addAll(workFlowRelationDetailList);
}
return workFlowRelationDetails;
}
/**
* Query tasks depend on process definition, include upstream or downstream
* Query tasks depend on workflow definition, include upstream or downstream
* and return tasks dependence with string format.
*
* @param projectCode Project code want to query tasks dependence
* @param workflowDefinitionCode Process definition code want to query tasks dependence
* @param workflowDefinitionCode workflow definition code want to query tasks dependence
* @param taskCode Task code want to query tasks dependence
* @return Optional of formatter message
*/
@ -174,15 +174,15 @@ public class WorkflowLineageServiceImpl extends BaseServiceImpl implements Workf
if (taskCode != 0) {
queryTaskCode = taskCode;
}
List<WorkflowTaskLineage> dependentProcessList =
List<WorkflowTaskLineage> dependentWorkflowList =
workflowTaskLineageDao.queryWorkFlowLineageByDept(projectCode, workflowDefinitionCode, queryTaskCode);
if (CollectionUtils.isEmpty(dependentProcessList)) {
if (CollectionUtils.isEmpty(dependentWorkflowList)) {
return Optional.empty();
}
List<String> taskDepStrList = new ArrayList<>();
for (WorkflowTaskLineage workflowTaskLineage : dependentProcessList) {
for (WorkflowTaskLineage workflowTaskLineage : dependentWorkflowList) {
WorkflowDefinition workflowDefinition =
workflowDefinitionMapper.queryByCode(workflowTaskLineage.getDeptWorkflowDefinitionCode());
String taskName = "";
@ -207,10 +207,10 @@ public class WorkflowLineageServiceImpl extends BaseServiceImpl implements Workf
}
/**
* Query downstream tasks depend on a process definition or a task
* Query downstream tasks depend on a workflow definition or a task
*
* @param workflowDefinitionCode Process definition code want to query tasks dependence
* @return downstream dependent process definition list
* @param workflowDefinitionCode workflow definition code want to query tasks dependence
* @return downstream dependent workflow definition list
*/
@Override
public List<DependentWorkflowDefinition> queryDownstreamDependentWorkflowDefinitions(Long workflowDefinitionCode) {
@ -232,21 +232,21 @@ public class WorkflowLineageServiceImpl extends BaseServiceImpl implements Workf
for (TaskDefinition taskDefinition : taskDefinitionList) {
DependentWorkflowDefinition dependentWorkflowDefinition = new DependentWorkflowDefinition();
workflowTaskLineageList.stream()
.filter(processLineage -> processLineage.getDeptTaskDefinitionCode() == taskDefinition.getCode())
.filter(workflowLineage -> workflowLineage.getDeptTaskDefinitionCode() == taskDefinition.getCode())
.findFirst()
.ifPresent(processLineage -> {
.ifPresent(workflowLineage -> {
dependentWorkflowDefinition
.setProcessDefinitionCode(processLineage.getDeptWorkflowDefinitionCode());
.setWorkflowDefinitionCode(workflowLineage.getDeptWorkflowDefinitionCode());
dependentWorkflowDefinition.setTaskDefinitionCode(taskDefinition.getCode());
dependentWorkflowDefinition.setTaskParams(taskDefinition.getTaskParams());
dependentWorkflowDefinition.setWorkerGroup(taskDefinition.getWorkerGroup());
});
workflowDefinitionList.stream()
.filter(processDefinition -> processDefinition.getCode() == dependentWorkflowDefinition
.getProcessDefinitionCode())
.filter(workflowDefinition -> workflowDefinition.getCode() == dependentWorkflowDefinition
.getWorkflowDefinitionCode())
.findFirst()
.ifPresent(processDefinition -> {
dependentWorkflowDefinition.setProcessDefinitionVersion(processDefinition.getVersion());
.ifPresent(workflowDefinition -> {
dependentWorkflowDefinition.setWorkflowDefinitionVersion(workflowDefinition.getVersion());
});
}
@ -280,13 +280,13 @@ public class WorkflowLineageServiceImpl extends BaseServiceImpl implements Workf
dependentLineageTask.setTaskDefinitionName(taskDefinition.getName());
});
workflowDefinitionList.stream()
.filter(processDefinition -> processDefinition.getCode() == workflowTaskLineage
.filter(workflowDefinition -> workflowDefinition.getCode() == workflowTaskLineage
.getWorkflowDefinitionCode())
.findFirst()
.ifPresent(processDefinition -> {
dependentLineageTask.setProcessDefinitionCode(processDefinition.getCode());
dependentLineageTask.setProcessDefinitionName(processDefinition.getName());
dependentLineageTask.setProjectCode(processDefinition.getProjectCode());
.ifPresent(workflowDefinition -> {
dependentLineageTask.setWorkflowDefinitionCode(workflowDefinition.getCode());
dependentLineageTask.setWorkflowDefinitionName(workflowDefinition.getName());
dependentLineageTask.setProjectCode(workflowDefinition.getProjectCode());
});
dependentLineageTaskList.add(dependentLineageTask);
}

View File

@ -132,7 +132,7 @@ public class WorkflowTaskRelationServiceImpl extends BaseServiceImpl implements
}
updateWorkflowDefiniteVersion(loginUser, result, workflowDefinition);
List<WorkflowTaskRelation> workflowTaskRelationList =
workflowTaskRelationMapper.queryByProcessCode(workflowDefinitionCode);
workflowTaskRelationMapper.queryByWorkflowDefinitionCode(workflowDefinitionCode);
List<WorkflowTaskRelation> workflowTaskRelations = Lists.newArrayList(workflowTaskRelationList);
if (!workflowTaskRelations.isEmpty()) {
Map<Long, WorkflowTaskRelation> preTaskCodeMap =
@ -190,8 +190,8 @@ public class WorkflowTaskRelationServiceImpl extends BaseServiceImpl implements
private void updateVersions(WorkflowTaskRelation workflowTaskRelation) {
// workflow
WorkflowDefinition workflowDefinition =
workflowDefinitionMapper.queryByCode(workflowTaskRelation.getProcessDefinitionCode());
workflowTaskRelation.setProcessDefinitionVersion(workflowDefinition.getVersion());
workflowDefinitionMapper.queryByCode(workflowTaskRelation.getWorkflowDefinitionCode());
workflowTaskRelation.setWorkflowDefinitionVersion(workflowDefinition.getVersion());
// tasks
TaskDefinition preTaskDefinition = taskDefinitionMapper.queryByCode(workflowTaskRelation.getPreTaskCode());
@ -211,12 +211,12 @@ public class WorkflowTaskRelationServiceImpl extends BaseServiceImpl implements
@Transactional
public WorkflowTaskRelation createWorkflowTaskRelationV2(User loginUser,
TaskRelationCreateRequest taskRelationCreateRequest) {
WorkflowTaskRelation workflowTaskRelation = taskRelationCreateRequest.convert2ProcessTaskRelation();
WorkflowTaskRelation workflowTaskRelation = taskRelationCreateRequest.convert2WorkflowTaskRelation();
WorkflowDefinition workflowDefinition =
workflowDefinitionMapper.queryByCode(workflowTaskRelation.getProcessDefinitionCode());
workflowDefinitionMapper.queryByCode(workflowTaskRelation.getWorkflowDefinitionCode());
if (workflowDefinition == null) {
throw new ServiceException(Status.WORKFLOW_DEFINITION_NOT_EXIST,
String.valueOf(workflowTaskRelation.getProcessDefinitionCode()));
String.valueOf(workflowTaskRelation.getWorkflowDefinitionCode()));
}
if (workflowTaskRelation.getProjectCode() == 0) {
workflowTaskRelation.setProjectCode(workflowDefinition.getProjectCode());
@ -241,8 +241,8 @@ public class WorkflowTaskRelationServiceImpl extends BaseServiceImpl implements
Date now = new Date();
WorkflowTaskRelation workflowTaskRelation = new WorkflowTaskRelation();
workflowTaskRelation.setProjectCode(workflowDefinition.getProjectCode());
workflowTaskRelation.setProcessDefinitionCode(workflowDefinition.getCode());
workflowTaskRelation.setProcessDefinitionVersion(workflowDefinition.getVersion());
workflowTaskRelation.setWorkflowDefinitionCode(workflowDefinition.getCode());
workflowTaskRelation.setWorkflowDefinitionVersion(workflowDefinition.getVersion());
workflowTaskRelation.setPostTaskCode(taskDefinition.getCode());
workflowTaskRelation.setPostTaskVersion(taskDefinition.getVersion());
workflowTaskRelation.setConditionType(ConditionType.NONE);
@ -307,7 +307,7 @@ public class WorkflowTaskRelationServiceImpl extends BaseServiceImpl implements
return result;
}
List<WorkflowTaskRelation> workflowTaskRelations =
workflowTaskRelationMapper.queryByProcessCode(workflowDefinitionCode);
workflowTaskRelationMapper.queryByWorkflowDefinitionCode(workflowDefinitionCode);
List<WorkflowTaskRelation> workflowTaskRelationList = Lists.newArrayList(workflowTaskRelations);
if (CollectionUtils.isEmpty(workflowTaskRelationList)) {
log.error("workflow task relations are empty, projectCode:{}, workflowDefinitionCode:{}.", projectCode,
@ -369,7 +369,7 @@ public class WorkflowTaskRelationServiceImpl extends BaseServiceImpl implements
new Page<>(new TaskRelationFilterRequest(preTaskCode, postTaskCode).getPageNo(),
new TaskRelationFilterRequest(preTaskCode, postTaskCode).getPageSize());
IPage<WorkflowTaskRelation> workflowTaskRelationIPage =
workflowTaskRelationMapper.filterProcessTaskRelation(page, workflowTaskRelation);
workflowTaskRelationMapper.filterWorkflowTaskRelation(page, workflowTaskRelation);
List<WorkflowTaskRelation> workflowTaskRelations = workflowTaskRelationIPage.getRecords();
if (workflowTaskRelations.size() != 1) {
@ -408,13 +408,13 @@ public class WorkflowTaskRelationServiceImpl extends BaseServiceImpl implements
Page<WorkflowTaskRelation> page = new Page<>(taskRelationUpdateUpstreamRequest.getPageNo(),
taskRelationUpdateUpstreamRequest.getPageSize());
IPage<WorkflowTaskRelation> workflowTaskRelationExistsIPage =
workflowTaskRelationMapper.filterProcessTaskRelation(page, workflowTaskRelation);
workflowTaskRelationMapper.filterWorkflowTaskRelation(page, workflowTaskRelation);
List<WorkflowTaskRelation> workflowTaskRelationExists = workflowTaskRelationExistsIPage.getRecords();
WorkflowDefinition workflowDefinition = null;
if (CollectionUtils.isNotEmpty(workflowTaskRelationExists)) {
workflowDefinition =
workflowDefinitionMapper.queryByCode(workflowTaskRelationExists.get(0).getProcessDefinitionCode());
workflowDefinitionMapper.queryByCode(workflowTaskRelationExists.get(0).getWorkflowDefinitionCode());
} else if (taskRelationUpdateUpstreamRequest.getWorkflowCode() != 0L) {
workflowDefinition =
workflowDefinitionMapper.queryByCode(taskRelationUpdateUpstreamRequest.getWorkflowCode());
@ -487,7 +487,7 @@ public class WorkflowTaskRelationServiceImpl extends BaseServiceImpl implements
log.info(
"Save workflow task relations complete, projectCode:{}, workflowDefinitionCode:{}, workflowDefinitionVersion:{}.",
workflowDefinition.getProjectCode(), workflowDefinition.getCode(), insertVersion);
workflowTaskRelations.get(0).setProcessDefinitionVersion(insertVersion);
workflowTaskRelations.get(0).setWorkflowDefinitionVersion(insertVersion);
return workflowTaskRelations;
}
@ -496,7 +496,7 @@ public class WorkflowTaskRelationServiceImpl extends BaseServiceImpl implements
long projectCode = workflowDefinition.getProjectCode();
long workflowDefinitionCode = workflowDefinition.getCode();
List<WorkflowTaskRelation> taskRelations =
workflowTaskRelationMapper.queryByProcessCode(workflowDefinitionCode);
workflowTaskRelationMapper.queryByWorkflowDefinitionCode(workflowDefinitionCode);
List<WorkflowTaskRelationLog> taskRelationList =
taskRelations.stream().map(WorkflowTaskRelationLog::new).collect(Collectors.toList());
@ -518,8 +518,8 @@ public class WorkflowTaskRelationServiceImpl extends BaseServiceImpl implements
Date now = new Date();
for (WorkflowTaskRelationLog workflowTaskRelationLog : taskRelationList) {
workflowTaskRelationLog.setProjectCode(projectCode);
workflowTaskRelationLog.setProcessDefinitionCode(workflowDefinitionCode);
workflowTaskRelationLog.setProcessDefinitionVersion(workflowDefinitionVersion);
workflowTaskRelationLog.setWorkflowDefinitionCode(workflowDefinitionCode);
workflowTaskRelationLog.setWorkflowDefinitionVersion(workflowDefinitionVersion);
if (taskDefinitionLogMap != null) {
TaskDefinitionLog preTaskDefinitionLog =
taskDefinitionLogMap.get(workflowTaskRelationLog.getPreTaskCode());
@ -547,7 +547,7 @@ public class WorkflowTaskRelationServiceImpl extends BaseServiceImpl implements
if (isSame) {
return Constants.EXIT_CODE_SUCCESS;
}
workflowTaskRelationMapper.deleteByCode(projectCode, workflowDefinitionCode);
workflowTaskRelationMapper.deleteByWorkflowDefinitionCode(projectCode, workflowDefinitionCode);
}
List<WorkflowTaskRelation> workflowTaskRelations =
taskRelationList.stream().map(WorkflowTaskRelation::new).collect(Collectors.toList());
@ -648,16 +648,16 @@ public class WorkflowTaskRelationServiceImpl extends BaseServiceImpl implements
return result;
}
WorkflowDefinition workflowDefinition =
workflowDefinitionMapper.queryByCode(upstreamList.get(0).getProcessDefinitionCode());
workflowDefinitionMapper.queryByCode(upstreamList.get(0).getWorkflowDefinitionCode());
if (workflowDefinition == null) {
log.error("workflow definition does not exist, workflowDefinitionCode:{}.",
upstreamList.get(0).getProcessDefinitionCode());
upstreamList.get(0).getWorkflowDefinitionCode());
putMsg(result, Status.WORKFLOW_DEFINITION_NOT_EXIST,
String.valueOf(upstreamList.get(0).getProcessDefinitionCode()));
String.valueOf(upstreamList.get(0).getWorkflowDefinitionCode()));
return result;
}
List<WorkflowTaskRelation> workflowTaskRelations =
workflowTaskRelationMapper.queryByProcessCode(workflowDefinition.getCode());
workflowTaskRelationMapper.queryByWorkflowDefinitionCode(workflowDefinition.getCode());
List<WorkflowTaskRelation> workflowTaskRelationList = Lists.newArrayList(workflowTaskRelations);
List<WorkflowTaskRelation> workflowTaskRelationWaitRemove = Lists.newArrayList();
for (WorkflowTaskRelation workflowTaskRelation : workflowTaskRelationList) {
@ -719,16 +719,16 @@ public class WorkflowTaskRelationServiceImpl extends BaseServiceImpl implements
return result;
}
WorkflowDefinition workflowDefinition =
workflowDefinitionMapper.queryByCode(downstreamList.get(0).getProcessDefinitionCode());
workflowDefinitionMapper.queryByCode(downstreamList.get(0).getWorkflowDefinitionCode());
if (workflowDefinition == null) {
log.error("workflow definition does not exist, workflowDefinitionCode:{}.",
downstreamList.get(0).getProcessDefinitionCode());
downstreamList.get(0).getWorkflowDefinitionCode());
putMsg(result, Status.WORKFLOW_DEFINITION_NOT_EXIST,
String.valueOf(downstreamList.get(0).getProcessDefinitionCode()));
String.valueOf(downstreamList.get(0).getWorkflowDefinitionCode()));
return result;
}
List<WorkflowTaskRelation> workflowTaskRelations =
workflowTaskRelationMapper.queryByProcessCode(workflowDefinition.getCode());
workflowTaskRelationMapper.queryByWorkflowDefinitionCode(workflowDefinition.getCode());
List<WorkflowTaskRelation> workflowTaskRelationList = Lists.newArrayList(workflowTaskRelations);
workflowTaskRelationList
.removeIf(workflowTaskRelation -> postTaskCodeList.contains(workflowTaskRelation.getPostTaskCode())
@ -840,7 +840,7 @@ public class WorkflowTaskRelationServiceImpl extends BaseServiceImpl implements
return result;
}
List<WorkflowTaskRelation> workflowTaskRelations =
workflowTaskRelationMapper.queryByProcessCode(workflowDefinitionCode);
workflowTaskRelationMapper.queryByWorkflowDefinitionCode(workflowDefinitionCode);
List<WorkflowTaskRelation> workflowTaskRelationList = Lists.newArrayList(workflowTaskRelations);
if (CollectionUtils.isEmpty(workflowTaskRelationList)) {
log.error("workflow task relations are empty, projectCode:{}, workflowDefinitionCode:{}.", projectCode,
@ -897,13 +897,14 @@ public class WorkflowTaskRelationServiceImpl extends BaseServiceImpl implements
@Override
public List<WorkflowTaskRelation> queryByWorkflowDefinitionCode(long workflowDefinitionCode,
int workflowDefinitionVersion) {
return workflowTaskRelationMapper.queryProcessTaskRelationsByProcessDefinitionCode(workflowDefinitionCode,
return workflowTaskRelationMapper.queryWorkflowTaskRelationsByWorkflowDefinitionCode(workflowDefinitionCode,
workflowDefinitionVersion);
}
@Override
public void deleteByWorkflowDefinitionCode(long workflowDefinitionCode, int workflowDefinitionVersion) {
workflowTaskRelationMapper.deleteByWorkflowDefinitionCode(workflowDefinitionCode, workflowDefinitionVersion);
workflowTaskRelationMapper.deleteByWorkflowDefinitionCodeAndVersion(workflowDefinitionCode,
workflowDefinitionVersion);
}
/**

View File

@ -105,7 +105,7 @@ public class BackfillWorkflowRequestTransformer implements ITransformer<Workflow
@SneakyThrows
private List<ZonedDateTime> parseBackfillDateList(WorkflowBackFillRequest workflowBackFillRequest) {
final WorkflowBackFillRequest.BackfillTime backfillTime = workflowBackFillRequest.getBackfillTime();
List<Schedule> schedules = processService.queryReleaseSchedulerListByProcessDefinitionCode(
List<Schedule> schedules = processService.queryReleaseSchedulerListByWorkflowDefinitionCode(
workflowBackFillRequest.getWorkflowDefinitionCode());
if (StringUtils.isNotEmpty(backfillTime.getComplementStartDate())

View File

@ -34,34 +34,16 @@ public class ScheduleVO {
private int id;
/**
* process definition code
*/
private long processDefinitionCode;
private long workflowDefinitionCode;
/**
* process definition name
*/
private String processDefinitionName;
private String workflowDefinitionName;
/**
* project name
*/
private String projectName;
/**
* schedule description
*/
private String definitionDescription;
/**
* schedule start time
*/
private String startTime;
/**
* schedule end time
*/
private String endTime;
/**
@ -70,74 +52,32 @@ public class ScheduleVO {
*/
private String timezoneId;
/**
* crontab expression
*/
private String crontab;
/**
* failure strategy
*/
private FailureStrategy failureStrategy;
/**
* warning type
*/
private WarningType warningType;
/**
* create time
*/
private Date createTime;
/**
* update time
*/
private Date updateTime;
/**
* created user id
*/
private int userId;
/**
* created user name
*/
private String userName;
/**
* release state
*/
private ReleaseState releaseState;
/**
* warning group id
*/
private int warningGroupId;
/**
* process instance priority
*/
private Priority processInstancePriority;
private Priority workflowInstancePriority;
/**
* worker group
*/
private String workerGroup;
/**
* tenantCode
*/
private String tenantCode;
/**
* environment code
*/
private Long environmentCode;
/**
* environment name
*/
private String environmentName;
public ScheduleVO(Schedule schedule) {
@ -153,9 +93,9 @@ public class ScheduleVO {
this.setUpdateTime(schedule.getUpdateTime());
this.setTimezoneId(schedule.getTimezoneId());
this.setReleaseState(schedule.getReleaseState());
this.setProcessInstancePriority(schedule.getProcessInstancePriority());
this.setProcessDefinitionName(schedule.getProcessDefinitionName());
this.setProcessDefinitionCode(schedule.getProcessDefinitionCode());
this.setWorkflowInstancePriority(schedule.getWorkflowInstancePriority());
this.setWorkflowDefinitionName(schedule.getWorkflowDefinitionName());
this.setWorkflowDefinitionCode(schedule.getWorkflowDefinitionCode());
this.setFailureStrategy(schedule.getFailureStrategy());
this.setEnvironmentCode(schedule.getEnvironmentCode());
this.setStartTime(DateUtils.dateToString(schedule.getStartTime(), ZoneId.systemDefault().getId()));

View File

@ -24,18 +24,14 @@ import org.apache.dolphinscheduler.dao.entity.WorkflowTaskRelation;
import java.util.List;
import lombok.Data;
import lombok.EqualsAndHashCode;
import org.springframework.beans.BeanUtils;
/**
* @author fanwanlong
*/
@EqualsAndHashCode(callSuper = true)
@Data
public class TaskDefinitionVO extends TaskDefinition {
/**
* process task related list
*/
private List<WorkflowTaskRelation> workflowTaskRelationList;
public TaskDefinitionVO() {

View File

@ -21,7 +21,7 @@ dynamic-task:
cloud:
- {name: EMR,icon: shell-icon.png,hover: shell-hover.png}
logic:
- {name: SUB_PROCESS,icon: shell-icon.png,hover: shell-hover.png}
- {name: SUB_WORKFLOW,icon: shell-icon.png,hover: shell-hover.png}
dataIntegration:
- {name: SEATUNNEL,icon: shell-icon.png,hover: shell-hover.png}
dataQuality:

View File

@ -43,7 +43,7 @@ UPDATE_ALERT_PLUGIN_INSTANCE_NOTES=update alert plugin instance operation
CREATE_ALERT_PLUGIN_INSTANCE_NOTES=create alert plugin instance operation
DELETE_ALERT_PLUGIN_INSTANCE_NOTES=delete alert plugin instance operation
QUERY_ALERT_PLUGIN_INSTANCE_LIST_PAGING_NOTES=query alert plugin instance paging
QUERY_TOPN_LONGEST_RUNNING_PROCESS_INSTANCE_NOTES=query topN longest running process instance
QUERY_TOPN_LONGEST_RUNNING_WORKFLOW_INSTANCE_NOTES=query topN longest running workflow instance
ALERT_PLUGIN_INSTANCE_NAME=alert plugin instance name
ALERT_PLUGIN_DEFINE_ID=alert plugin define id
ALERT_PLUGIN_ID=alert plugin id
@ -175,7 +175,7 @@ SEARCH_VAL=search val
USER_ID=user id
FORCE_TASK_SUCCESS=force task success
QUERY_TASK_INSTANCE_LIST_PAGING_NOTES=query task instance list paging
PROCESS_INSTANCE_NAME=process instance name
WORKFLOW_INSTANCE_NAME=workflow instance name
TASK_INSTANCE_ID=task instance id
VERIFY_TENANT_CODE_NOTES=verify tenant code
QUERY_UI_PLUGIN_DETAIL_BY_ID=query ui plugin detail by id
@ -201,7 +201,7 @@ BATCH_DELETE_WORKFLOW_INSTANCE_BY_IDS_NOTES=batch delete workflow instance by wo
QUERY_WORKFLOW_INSTANCE_BY_ID_NOTES=query process instance by process instance id
DELETE_WORKFLOW_INSTANCE_BY_ID_NOTES=delete process instance by process instance id
TASK_ID=task instance id
PROCESS_INSTANCE_IDS=process_instance ids, delimiter by "," if more than one id
WORKFLOW_INSTANCE_IDS=workflow instance ids, delimiter by "," if more than one id
SKIP_LINE_NUM=skip line num
QUERY_TASK_INSTANCE_LOG_NOTES=query task instance log
DOWNLOAD_TASK_INSTANCE_LOG_NOTES=download task instance log

View File

@ -60,7 +60,7 @@ CREATE_DATA_SOURCE_NOTES=\u521B\u5EFA\u6570\u636E\u6E90
DATA_SOURCE_NAME=\u6570\u636E\u6E90\u540D\u79F0
DB_TYPE=\u6570\u636E\u6E90\u7C7B\u578B
QUEUE_TAG=\u961F\u5217\u76F8\u5173\u64CD\u4F5C
QUERY_TOPN_LONGEST_RUNNING_PROCESS_INSTANCE_NOTES=\u67E5\u8BE2topN\u6700\u957F\u8FD0\u884C\u6D41\u7A0B\u5B9E\u4F8B
QUERY_TOPN_LONGEST_RUNNING_WORKFLOW_INSTANCE_NOTES=\u67E5\u8BE2topN\u6700\u957F\u8FD0\u884C\u5DE5\u4F5C\u6D41\u5B9E\u4F8B
QUERY_QUEUE_LIST_NOTES=\u67E5\u8BE2\u961F\u5217\u5217\u8868
QUERY_QUEUE_LIST_PAGING_NOTES=\u5206\u9875\u67E5\u8BE2\u961F\u5217\u5217\u8868
CREATE_QUEUE_NOTES=\u521B\u5EFA\u961F\u5217
@ -150,15 +150,15 @@ QUERY_WORKFLOW_DEFINITION_LIST_NOTES=\u67E5\u8BE2\u6D41\u7A0B\u5B9A\u4E49\u5217\
QUERY_WORKFLOW_DEFINITION_LIST_PAGING_NOTES=\u5206\u9875\u67E5\u8BE2\u6D41\u7A0B\u5B9A\u4E49\u5217\u8868
QUERY_WORKFLOW_DEFINITION_BY_CODE_NOTES=\u901A\u8FC7\u5DE5\u4F5C\u6D41\u5B9A\u4E49code\u67E5\u8BE2\u5DE5\u4F5C\u6D41\u5B9A\u4E49
PAGE_NO=\u9875\u7801\u53F7
WORKFLOW_INSTANCE_ID=\u6D41\u7A0B\u5B9E\u4F8BID
PROCESS_INSTANCE_IDS=\u6D41\u7A0B\u5B9E\u4F8BID\u96C6\u5408\uFF0C\u5982\u679C\u6709\u591A\u4E2A\u6D41\u7A0B\u5B9E\u4F8B\u5219\u7528 "," \u5206\u9694
WORKFLOW_INSTANCE_ID=\u5DE5\u4F5C\u6D41\u5B9E\u4F8BID
WORKFLOW_INSTANCE_IDS=\u5DE5\u4F5C\u6D41\u5B9E\u4F8BID\u96C6\u5408\uFF0C\u5982\u679C\u6709\u591A\u4E2A\u5DE5\u4F5C\u6D41\u5B9E\u4F8B\u5219\u7528 "," \u5206\u9694
PREVIEW_SCHEDULE_NOTES=\u5B9A\u65F6\u8C03\u5EA6\u9884\u89C8
SCHEDULE_TIME=\u5B9A\u65F6\u65F6\u95F4,\u7A7A\u5B57\u7B26\u4E32\u8868\u793A\u5F53\u524D\u5929
SYNC_DEFINE=\u66F4\u65B0\u6D41\u7A0B\u5B9E\u4F8B\u7684\u4FE1\u606F\u662F\u5426\u540C\u6B65\u5230\u6D41\u7A0B\u5B9A\u4E49
SEARCH_VAL=\u641C\u7D22\u503C
FORCE_TASK_SUCCESS=\u5F3A\u5236TASK\u6210\u529F
QUERY_TASK_INSTANCE_LIST_PAGING_NOTES=\u5206\u9875\u67E5\u8BE2\u4EFB\u52A1\u5B9E\u4F8B\u5217\u8868
PROCESS_INSTANCE_NAME=\u6D41\u7A0B\u5B9E\u4F8B\u540D\u79F0
WORKFLOW_INSTANCE_NAME=\u6D41\u7A0B\u5B9E\u4F8B\u540D\u79F0
TASK_INSTANCE_ID=\u4EFB\u52A1\u5B9E\u4F8BID
VERIFY_TENANT_CODE_NOTES=\u9A8C\u8BC1\u79DF\u6237
QUERY_UI_PLUGIN_DETAIL_BY_ID=\u901A\u8FC7ID\u67E5\u8BE2UI\u63D2\u4EF6\u8BE6\u60C5

View File

@ -37,7 +37,7 @@ task:
- 'DATA_FACTORY'
- 'ALIYUN_SERVERLESS_SPARK'
logic:
- 'SUB_PROCESS'
- 'SUB_WORKFLOW'
- 'DEPENDENT'
- 'CONDITIONS'
- 'SWITCH'

View File

@ -43,9 +43,6 @@ import org.springframework.util.MultiValueMap;
import com.fasterxml.jackson.core.type.TypeReference;
/**
* data analysis controller test
*/
public class DataAnalysisControllerTest extends AbstractControllerTest {
private static final Logger logger = LoggerFactory.getLogger(DataAnalysisControllerTest.class);
@ -95,7 +92,7 @@ public class DataAnalysisControllerTest extends AbstractControllerTest {
paramsMap.add("endDate", "2019-12-28 00:00:00");
paramsMap.add("projectCode", "16");
MvcResult mvcResult = mockMvc.perform(get("/projects/analysis/process-state-count")
MvcResult mvcResult = mockMvc.perform(get("/projects/analysis/workflow-state-count")
.header("sessionId", sessionId)
.params(paramsMap))
.andExpect(status().isOk())

View File

@ -72,7 +72,7 @@ public class SchedulerControllerTest extends AbstractControllerTest {
@Test
public void testCreateSchedule() throws Exception {
MultiValueMap<String, String> paramsMap = new LinkedMultiValueMap<>();
paramsMap.add("processDefinitionCode", "40");
paramsMap.add("workflowDefinitionCode", "40");
paramsMap.add("schedule",
"{'startTime':'2019-12-16 00:00:00','endTime':'2019-12-17 00:00:00','crontab':'0 0 6 * * ? *'}");
paramsMap.add("warningType", String.valueOf(WarningType.NONE));
@ -82,7 +82,7 @@ public class SchedulerControllerTest extends AbstractControllerTest {
paramsMap.add("receiversCc", "");
paramsMap.add("workerGroupId", "1");
paramsMap.add("tenantCode", "root");
paramsMap.add("processInstancePriority", String.valueOf(Priority.HIGH));
paramsMap.add("workflowInstancePriority", String.valueOf(Priority.HIGH));
Mockito.when(schedulerService.insertSchedule(isA(User.class), isA(Long.class), isA(Long.class),
isA(String.class), isA(WarningType.class), isA(int.class), isA(FailureStrategy.class),
@ -113,7 +113,7 @@ public class SchedulerControllerTest extends AbstractControllerTest {
paramsMap.add("receiversCc", "");
paramsMap.add("workerGroupId", "1");
paramsMap.add("tenantCode", "root");
paramsMap.add("processInstancePriority", String.valueOf(Priority.HIGH));
paramsMap.add("workflowInstancePriority", String.valueOf(Priority.HIGH));
Mockito.when(schedulerService.updateSchedule(isA(User.class), isA(Long.class), isA(Integer.class),
isA(String.class), isA(WarningType.class), isA(Integer.class), isA(FailureStrategy.class),
@ -172,7 +172,7 @@ public class SchedulerControllerTest extends AbstractControllerTest {
@Test
public void testQueryScheduleListPaging() throws Exception {
MultiValueMap<String, String> paramsMap = new LinkedMultiValueMap<>();
paramsMap.add("processDefinitionCode", "40");
paramsMap.add("workflowDefinitionCode", "40");
paramsMap.add("searchVal", "test");
paramsMap.add("pageNo", "1");
paramsMap.add("pageSize", "30");

View File

@ -137,7 +137,7 @@ public class WorkerGroupControllerTest extends AbstractControllerTest {
org.apache.dolphinscheduler.service.utils.Constants.NOT_TERMINATED_STATES))
.thenReturn(null);
Mockito.when(workerGroupMapper.deleteById(12)).thenReturn(1);
Mockito.when(workflowInstanceMapper.updateProcessInstanceByWorkerGroupName("测试", "")).thenReturn(1);
Mockito.when(workflowInstanceMapper.updateWorkflowInstanceByWorkerGroupName("测试", "")).thenReturn(1);
MvcResult mvcResult = mockMvc.perform(delete("/worker-groups/{id}", "12")
.header("sessionId", sessionId))

View File

@ -43,9 +43,6 @@ import org.springframework.test.web.servlet.MvcResult;
import org.springframework.util.LinkedMultiValueMap;
import org.springframework.util.MultiValueMap;
/**
* process instance controller test
*/
public class WorkflowInstanceControllerTest extends AbstractControllerTest {
@MockBean
@ -62,7 +59,7 @@ public class WorkflowInstanceControllerTest extends AbstractControllerTest {
.thenReturn(mockResult);
MultiValueMap<String, String> paramsMap = new LinkedMultiValueMap<>();
paramsMap.add("processDefineCode", "91");
paramsMap.add("workflowDefinitionCode", "91");
paramsMap.add("searchVal", "cxc");
paramsMap.add("stateType", WorkflowExecutionStatus.SUCCESS.name());
paramsMap.add("host", "192.168.1.13");
@ -71,7 +68,7 @@ public class WorkflowInstanceControllerTest extends AbstractControllerTest {
paramsMap.add("pageNo", "2");
paramsMap.add("pageSize", "2");
MvcResult mvcResult = mockMvc.perform(get("/projects/1113/process-instances")
MvcResult mvcResult = mockMvc.perform(get("/projects/1113/workflow-instances")
.header("sessionId", sessionId)
.params(paramsMap))
.andExpect(status().isOk())
@ -91,8 +88,9 @@ public class WorkflowInstanceControllerTest extends AbstractControllerTest {
Mockito.any()))
.thenReturn(mockResult);
MvcResult mvcResult = mockMvc.perform(get("/projects/{projectCode}/process-instances/{id}/tasks", "1113", "123")
.header(SESSION_ID, sessionId))
MvcResult mvcResult = mockMvc
.perform(get("/projects/{projectCode}/workflow-instances/{id}/tasks", "1113", "123")
.header(SESSION_ID, sessionId))
.andExpect(status().isOk())
.andExpect(content().contentType(MediaType.APPLICATION_JSON))
.andReturn();
@ -122,13 +120,13 @@ public class WorkflowInstanceControllerTest extends AbstractControllerTest {
MultiValueMap<String, String> paramsMap = new LinkedMultiValueMap<>();
paramsMap.add("taskRelationJson", json);
paramsMap.add("taskDefinitionJson", "");
paramsMap.add("processInstanceId", "91");
paramsMap.add("workflowInstanceId", "91");
paramsMap.add("scheduleTime", "2019-12-15 00:00:00");
paramsMap.add("syncDefine", "false");
paramsMap.add("locations", locations);
paramsMap.add("tenantCode", "123");
MvcResult mvcResult = mockMvc.perform(put("/projects/{projectCode}/process-instances/{id}", "1113", "123")
MvcResult mvcResult = mockMvc.perform(put("/projects/{projectCode}/workflow-instances/{id}", "1113", "123")
.header("sessionId", sessionId)
.params(paramsMap))
.andExpect(status().isOk())
@ -146,7 +144,7 @@ public class WorkflowInstanceControllerTest extends AbstractControllerTest {
Mockito.when(
workflowInstanceService.queryWorkflowInstanceById(Mockito.any(), Mockito.anyLong(), Mockito.anyInt()))
.thenReturn(mockResult);
MvcResult mvcResult = mockMvc.perform(get("/projects/{projectCode}/process-instances/{id}", "1113", "123")
MvcResult mvcResult = mockMvc.perform(get("/projects/{projectCode}/workflow-instances/{id}", "1113", "123")
.header(SESSION_ID, sessionId))
.andExpect(status().isOk())
.andExpect(content().contentType(MediaType.APPLICATION_JSON))
@ -165,7 +163,7 @@ public class WorkflowInstanceControllerTest extends AbstractControllerTest {
Mockito.anyInt())).thenReturn(mockResult);
MvcResult mvcResult = mockMvc
.perform(get("/projects/{projectCode}/process-instances/query-sub-by-parent", "1113")
.perform(get("/projects/{projectCode}/workflow-instances/query-sub-by-parent", "1113")
.header(SESSION_ID, sessionId)
.param("taskId", "1203"))
.andExpect(status().isOk())
@ -186,7 +184,7 @@ public class WorkflowInstanceControllerTest extends AbstractControllerTest {
.thenReturn(mockResult);
MvcResult mvcResult = mockMvc
.perform(get("/projects/{projectCode}/process-instances/query-parent-by-sub", "1113")
.perform(get("/projects/{projectCode}/workflow-instances/query-parent-by-sub", "1113")
.header(SESSION_ID, sessionId)
.param("subId", "1204"))
.andExpect(status().isOk())
@ -205,7 +203,7 @@ public class WorkflowInstanceControllerTest extends AbstractControllerTest {
mockResult.put(Constants.STATUS, Status.SUCCESS);
Mockito.when(workflowInstanceService.viewVariables(1113L, 123)).thenReturn(mockResult);
MvcResult mvcResult = mockMvc
.perform(get("/projects/{projectCode}/process-instances/{id}/view-variables", "1113", "123")
.perform(get("/projects/{projectCode}/workflow-instances/{id}/view-variables", "1113", "123")
.header(SESSION_ID, sessionId))
.andExpect(status().isOk())
.andExpect(content().contentType(MediaType.APPLICATION_JSON))
@ -221,7 +219,7 @@ public class WorkflowInstanceControllerTest extends AbstractControllerTest {
mockResult.put(Constants.STATUS, Status.SUCCESS);
Mockito.doNothing().when(workflowInstanceService).deleteWorkflowInstanceById(Mockito.any(), Mockito.anyInt());
MvcResult mvcResult = mockMvc.perform(delete("/projects/{projectCode}/process-instances/{id}", "1113", "123")
MvcResult mvcResult = mockMvc.perform(delete("/projects/{projectCode}/workflow-instances/{id}", "1113", "123")
.header(SESSION_ID, sessionId))
.andExpect(status().isOk())
.andExpect(content().contentType(MediaType.APPLICATION_JSON))
@ -238,9 +236,9 @@ public class WorkflowInstanceControllerTest extends AbstractControllerTest {
mockResult.put(Constants.STATUS, Status.WORKFLOW_INSTANCE_NOT_EXIST);
Mockito.doNothing().when(workflowInstanceService).deleteWorkflowInstanceById(Mockito.any(), Mockito.anyInt());
MvcResult mvcResult = mockMvc.perform(post("/projects/{projectCode}/process-instances/batch-delete", "1113")
MvcResult mvcResult = mockMvc.perform(post("/projects/{projectCode}/workflow-instances/batch-delete", "1113")
.header(SESSION_ID, sessionId)
.param("processInstanceIds", "1205,1206"))
.param("workflowInstanceIds", "1205,1206"))
.andExpect(status().isOk())
.andExpect(content().contentType(MediaType.APPLICATION_JSON))
.andReturn();
@ -259,7 +257,7 @@ public class WorkflowInstanceControllerTest extends AbstractControllerTest {
.queryByTriggerCode(Mockito.any(), Mockito.anyLong(), Mockito.anyLong()))
.thenReturn(mockResult);
MvcResult mvcResult = mockMvc.perform(get("/projects/1113/process-instances/trigger")
MvcResult mvcResult = mockMvc.perform(get("/projects/1113/workflow-instances/trigger")
.header("sessionId", sessionId)
.param("triggerCode", "12051206"))
.andExpect(status().isOk())

View File

@ -58,14 +58,11 @@ import com.google.common.collect.ImmutableMap;
import com.google.gson.Gson;
import com.google.gson.JsonObject;
/**
* executor controller test
*/
public class WorkflowInstanceExecuteFunctionControllerTest extends AbstractControllerTest {
final Gson gson = new Gson();
final long projectCode = 1L;
final long processDefinitionCode = 2L;
final long workflowDefinitionCode = 2L;
final String scheduleTime = "scheduleTime";
final FailureStrategy failureStrategy = FailureStrategy.END;
final String startNodeList = "startNodeList";
@ -75,7 +72,7 @@ public class WorkflowInstanceExecuteFunctionControllerTest extends AbstractContr
final int warningGroupId = 3;
final RunMode runMode = RunMode.RUN_MODE_SERIAL;
final ExecutionOrder executionOrder = ExecutionOrder.DESC_ORDER;
final Priority processInstancePriority = Priority.HIGH;
final Priority workflowInstancePriority = Priority.HIGH;
final String workerGroup = "workerGroup";
final String tenantCode = "root";
final Long environmentCode = 4L;
@ -99,10 +96,10 @@ public class WorkflowInstanceExecuteFunctionControllerTest extends AbstractContr
private ExecutorService executorService;
@Test
public void testStartProcessInstanceWithFullParams() throws Exception {
public void testStartWorkflowInstanceWithFullParams() throws Exception {
// Given
final MultiValueMap<String, String> paramsMap = new LinkedMultiValueMap<>();
paramsMap.add("processDefinitionCode", String.valueOf(processDefinitionCode));
paramsMap.add("workflowDefinitionCode", String.valueOf(workflowDefinitionCode));
paramsMap.add("scheduleTime", scheduleTime);
paramsMap.add("failureStrategy", String.valueOf(failureStrategy));
paramsMap.add("startNodeList", startNodeList);
@ -111,7 +108,7 @@ public class WorkflowInstanceExecuteFunctionControllerTest extends AbstractContr
paramsMap.add("warningType", String.valueOf(warningType));
paramsMap.add("warningGroupId", String.valueOf(warningGroupId));
paramsMap.add("runMode", String.valueOf(runMode));
paramsMap.add("processInstancePriority", String.valueOf(processInstancePriority));
paramsMap.add("workflowInstancePriority", String.valueOf(workflowInstancePriority));
paramsMap.add("workerGroup", workerGroup);
paramsMap.add("tenantCode", tenantCode);
paramsMap.add("environmentCode", String.valueOf(environmentCode));
@ -127,7 +124,7 @@ public class WorkflowInstanceExecuteFunctionControllerTest extends AbstractContr
// When
final MvcResult mvcResult = mockMvc
.perform(post("/projects/{projectCode}/executors/start-process-instance", projectCode)
.perform(post("/projects/{projectCode}/executors/start-workflow-instance", projectCode)
.header("sessionId", sessionId)
.params(paramsMap))
.andExpect(status().isOk())
@ -140,10 +137,10 @@ public class WorkflowInstanceExecuteFunctionControllerTest extends AbstractContr
}
@Test
public void testStartProcessInstanceWithoutTimeout() throws Exception {
public void testStartWorkflowInstanceWithoutTimeout() throws Exception {
// Given
final MultiValueMap<String, String> paramsMap = new LinkedMultiValueMap<>();
paramsMap.add("processDefinitionCode", String.valueOf(processDefinitionCode));
paramsMap.add("workflowDefinitionCode", String.valueOf(workflowDefinitionCode));
paramsMap.add("scheduleTime", scheduleTime);
paramsMap.add("failureStrategy", String.valueOf(failureStrategy));
paramsMap.add("startNodeList", startNodeList);
@ -152,7 +149,7 @@ public class WorkflowInstanceExecuteFunctionControllerTest extends AbstractContr
paramsMap.add("warningType", String.valueOf(warningType));
paramsMap.add("warningGroupId", String.valueOf(warningGroupId));
paramsMap.add("runMode", String.valueOf(runMode));
paramsMap.add("processInstancePriority", String.valueOf(processInstancePriority));
paramsMap.add("workflowInstancePriority", String.valueOf(workflowInstancePriority));
paramsMap.add("workerGroup", workerGroup);
paramsMap.add("tenantCode", tenantCode);
paramsMap.add("environmentCode", String.valueOf(environmentCode));
@ -167,7 +164,7 @@ public class WorkflowInstanceExecuteFunctionControllerTest extends AbstractContr
// When
final MvcResult mvcResult = mockMvc
.perform(post("/projects/{projectCode}/executors/start-process-instance", projectCode)
.perform(post("/projects/{projectCode}/executors/start-workflow-instance", projectCode)
.header("sessionId", sessionId)
.params(paramsMap))
.andExpect(status().isOk())
@ -180,10 +177,10 @@ public class WorkflowInstanceExecuteFunctionControllerTest extends AbstractContr
}
@Test
public void testStartProcessInstanceWithoutStartParams() throws Exception {
public void testStartWorkflowInstanceWithoutStartParams() throws Exception {
// Given
final MultiValueMap<String, String> paramsMap = new LinkedMultiValueMap<>();
paramsMap.add("processDefinitionCode", String.valueOf(processDefinitionCode));
paramsMap.add("workflowDefinitionCode", String.valueOf(workflowDefinitionCode));
paramsMap.add("scheduleTime", scheduleTime);
paramsMap.add("failureStrategy", String.valueOf(failureStrategy));
paramsMap.add("startNodeList", startNodeList);
@ -192,7 +189,7 @@ public class WorkflowInstanceExecuteFunctionControllerTest extends AbstractContr
paramsMap.add("warningType", String.valueOf(warningType));
paramsMap.add("warningGroupId", String.valueOf(warningGroupId));
paramsMap.add("runMode", String.valueOf(runMode));
paramsMap.add("processInstancePriority", String.valueOf(processInstancePriority));
paramsMap.add("workflowInstancePriority", String.valueOf(workflowInstancePriority));
paramsMap.add("workerGroup", workerGroup);
paramsMap.add("tenantCode", tenantCode);
paramsMap.add("environmentCode", String.valueOf(environmentCode));
@ -207,7 +204,7 @@ public class WorkflowInstanceExecuteFunctionControllerTest extends AbstractContr
// When
final MvcResult mvcResult = mockMvc
.perform(post("/projects/{projectCode}/executors/start-process-instance", projectCode)
.perform(post("/projects/{projectCode}/executors/start-workflow-instance", projectCode)
.header("sessionId", sessionId)
.params(paramsMap))
.andExpect(status().isOk())
@ -220,10 +217,10 @@ public class WorkflowInstanceExecuteFunctionControllerTest extends AbstractContr
}
@Test
public void testStartProcessInstanceWithRequiredParams() throws Exception {
public void testStartWorkflowInstanceWithRequiredParams() throws Exception {
// Given
final MultiValueMap<String, String> paramsMap = new LinkedMultiValueMap<>();
paramsMap.add("processDefinitionCode", String.valueOf(processDefinitionCode));
paramsMap.add("workflowDefinitionCode", String.valueOf(workflowDefinitionCode));
paramsMap.add("failureStrategy", String.valueOf(failureStrategy));
paramsMap.add("warningType", String.valueOf(warningType));
paramsMap.add("scheduleTime", scheduleTime);
@ -232,7 +229,7 @@ public class WorkflowInstanceExecuteFunctionControllerTest extends AbstractContr
when(executorService.triggerWorkflowDefinition(Mockito.any())).thenReturn(1);
final MvcResult mvcResult = mockMvc
.perform(post("/projects/{projectCode}/executors/start-process-instance", projectCode)
.perform(post("/projects/{projectCode}/executors/start-workflow-instance", projectCode)
.header("sessionId", sessionId)
.params(paramsMap))
.andExpect(status().isOk())
@ -247,17 +244,17 @@ public class WorkflowInstanceExecuteFunctionControllerTest extends AbstractContr
public void testExecuteWithSuccessStatus() throws Exception {
// Given
final ExecuteType executeType = ExecuteType.NONE;
final int processInstanceId = 40;
final int workflowInstanceId = 40;
final long projectCode = 1113;
final MultiValueMap<String, String> paramsMap = new LinkedMultiValueMap<>();
paramsMap.add("processInstanceId", Integer.toString(processInstanceId));
paramsMap.add("workflowInstanceId", Integer.toString(workflowInstanceId));
paramsMap.add("executeType", String.valueOf(executeType));
final JsonObject expectResponseContent = gson
.fromJson("{\"code\":0,\"msg\":\"success\",\"data\":null,\"success\":true,\"failed\":false}",
JsonObject.class);
doNothing().when(executorService).controlWorkflowInstance(any(User.class), eq(processInstanceId),
doNothing().when(executorService).controlWorkflowInstance(any(User.class), eq(workflowInstanceId),
eq(ExecuteType.NONE));
// When

View File

@ -37,9 +37,6 @@ import org.springframework.boot.test.mock.mockito.MockBean;
import org.springframework.http.MediaType;
import org.springframework.test.web.servlet.MvcResult;
/**
* process task relation controller test
*/
public class WorkflowTaskRelationControllerTest extends AbstractControllerTest {
@MockBean
@ -55,7 +52,7 @@ public class WorkflowTaskRelationControllerTest extends AbstractControllerTest {
.thenReturn(mockResult);
MvcResult mvcResult = mockMvc
.perform(get("/projects/{projectCode}/process-task-relation/{taskCode}/downstream", "1113", "123")
.perform(get("/projects/{projectCode}/workflow-task-relation/{taskCode}/downstream", "1113", "123")
.header(SESSION_ID, sessionId))
.andExpect(status().isOk())
.andExpect(content().contentType(MediaType.APPLICATION_JSON))
@ -75,7 +72,7 @@ public class WorkflowTaskRelationControllerTest extends AbstractControllerTest {
.thenReturn(mockResult);
MvcResult mvcResult = mockMvc
.perform(get("/projects/{projectCode}/process-task-relation/{taskCode}/upstream", "1113", "123")
.perform(get("/projects/{projectCode}/workflow-task-relation/{taskCode}/upstream", "1113", "123")
.header(SESSION_ID, sessionId))
.andExpect(status().isOk())
.andExpect(content().contentType(MediaType.APPLICATION_JSON))

Some files were not shown because too many files have changed in this diff Show More