[DSIP-78][Data Quality] Remove data quality module (#16794)
1
.github/CODEOWNERS
vendored
@ -26,7 +26,6 @@
|
||||
/dolphinscheduler-common/ @SbloodyS
|
||||
/dolphinscheduler-dao/ @SbloodyS @ruanwenjun
|
||||
/dolphinscheduler-dao-plugin/ @SbloodyS @ruanwenjun
|
||||
/dolphinscheduler-data-quality/ @SbloodyS
|
||||
/dolphinscheduler-datasource-plugin/ @SbloodyS
|
||||
/dolphinscheduler-dist/ @SbloodyS
|
||||
/dolphinscheduler-e2e/ @SbloodyS
|
||||
|
1
.github/actions/labeler/labeler.yml
vendored
@ -23,7 +23,6 @@ backend:
|
||||
- 'dolphinscheduler-common/**/*'
|
||||
- 'dolphinscheduler-dao/**/*'
|
||||
- 'dolphinscheduler-dao-plugin/**/*'
|
||||
- 'dolphinscheduler-data-quality/**/*'
|
||||
- 'dolphinscheduler-datasource-plugin/**/*'
|
||||
- 'dolphinscheduler-dist/**/*'
|
||||
- 'dolphinscheduler-extract/**/*'
|
||||
|
@ -77,7 +77,6 @@ dolphinscheduler-storage-s3
|
||||
dolphinscheduler-task-aliyunserverlessspark
|
||||
dolphinscheduler-task-chunjun
|
||||
dolphinscheduler-task-datafactory
|
||||
dolphinscheduler-task-dataquality
|
||||
dolphinscheduler-task-datasync
|
||||
dolphinscheduler-task-datax
|
||||
dolphinscheduler-task-dinky
|
||||
|
@ -131,7 +131,6 @@ Please refer to the [Quick Start in Kubernetes](../../../docs/docs/en/guide/inst
|
||||
| conf.common."aws.s3.endpoint" | string | `"http://minio:9000"` | You need to set this parameter when private cloud s3. If S3 uses public cloud, you only need to set resource.aws.region or set to the endpoint of a public cloud such as S3.cn-north-1.amazonaws.com.cn |
|
||||
| conf.common."aws.s3.region" | string | `"ca-central-1"` | The AWS Region to use. if resource.storage.type=S3, This configuration is required |
|
||||
| conf.common."conda.path" | string | `"/opt/anaconda3/etc/profile.d/conda.sh"` | set path of conda.sh |
|
||||
| conf.common."data-quality.jar.dir" | string | `nil` | data quality option |
|
||||
| conf.common."data.basedir.path" | string | `"/tmp/dolphinscheduler"` | user data local directory path, please make sure the directory exists and have read write permissions |
|
||||
| conf.common."datasource.encryption.enable" | bool | `false` | datasource encryption enable |
|
||||
| conf.common."datasource.encryption.salt" | string | `"!@#$%^&*"` | datasource encryption salt |
|
||||
|
@ -336,9 +336,6 @@ conf:
|
||||
# -- datasource encryption salt
|
||||
datasource.encryption.salt: '!@#$%^&*'
|
||||
|
||||
# -- data quality option
|
||||
data-quality.jar.dir:
|
||||
|
||||
# -- Whether hive SQL is executed in the same session
|
||||
support.hive.oneSession: false
|
||||
|
||||
@ -987,7 +984,6 @@ api:
|
||||
# cloud: []
|
||||
# logic: []
|
||||
# dataIntegration: []
|
||||
# dataQuality: []
|
||||
# machineLearning: []
|
||||
# other: []
|
||||
|
||||
|
@ -457,10 +457,6 @@ export default {
|
||||
}
|
||||
],
|
||||
},
|
||||
{
|
||||
title: 'Data Quality',
|
||||
link: '/en-us/docs/dev/user_doc/guide/data-quality.html',
|
||||
},
|
||||
{
|
||||
title: 'Remote Logging',
|
||||
link: '/en-us/docs/dev/user_doc/guide/remote-logging.html',
|
||||
@ -1160,10 +1156,6 @@ export default {
|
||||
}
|
||||
],
|
||||
},
|
||||
{
|
||||
title: '数据质量',
|
||||
link: '/zh-cn/docs/dev/user_doc/guide/data-quality.html',
|
||||
},
|
||||
{
|
||||
title: '远程日志存储',
|
||||
link: '/zh-cn/docs/dev/user_doc/guide/remote-logging.html',
|
||||
|
@ -224,7 +224,6 @@ The default configuration is as follows:
|
||||
| yarn.job.history.status.address | http://ds1:19888/ws/v1/history/mapreduce/jobs/%s | job history status url of yarn |
|
||||
| datasource.encryption.enable | false | whether to enable datasource encryption |
|
||||
| datasource.encryption.salt | !@#$%^&* | the salt of the datasource encryption |
|
||||
| data-quality.jar.dir | | the jar of data quality |
|
||||
| support.hive.oneSession | false | specify whether hive SQL is executed in the same session |
|
||||
| sudo.enable | true | whether to enable sudo |
|
||||
| alert.rpc.port | 50052 | the RPC port of Alert Server |
|
||||
|
@ -1,313 +0,0 @@
|
||||
# Data Quality
|
||||
|
||||
## Introduction
|
||||
|
||||
The data quality task is used to check the data accuracy during the integration and processing of data. Data quality tasks in this release include single-table checking, single-table custom SQL checking, multi-table accuracy, and two-table value comparisons. The running environment of the data quality task is Spark 2.4.0, and other versions have not been verified, and users can verify by themselves.
|
||||
|
||||
The execution logic of the data quality task is as follows:
|
||||
|
||||
- The user defines the task in the interface, and the user input value is stored in `TaskParam`.
|
||||
- When running a task, `Master` will parse `TaskParam`, encapsulate the parameters required by `DataQualityTask` and send it to `Worker`.
|
||||
- Worker runs the data quality task. After the data quality task finishes running, it writes the statistical results to the specified storage engine.
|
||||
- The current data quality task result is stored in the `t_ds_dq_execute_result` table of `dolphinscheduler`
|
||||
`Worker` sends the task result to `Master`, after `Master` receives `TaskResponse`, it will judge whether the task type is `DataQualityTask`, if so, it will read the corresponding result from `t_ds_dq_execute_result` according to `taskInstanceId`, and then The result is judged according to the check mode, operator and threshold configured by the user.
|
||||
- If the result is a failure, the corresponding operation, alarm or interruption will be performed according to the failure policy configured by the user.
|
||||
- If you package `data-quality` separately, remember to modify the package name to be consistent with `data-quality.jar.dir` in `common.properties` with attribute name `data-quality.jar.dir`
|
||||
- If the old version is upgraded and used, you need to execute the `sql` update script to initialize the database before running.
|
||||
- `dolphinscheduler-data-quality-dev-SNAPSHOT.jar` was built with no dependencies. If a `JDBC` driver is required, you can set the `-jars` parameter in the `node settings` `Option Parameters`, e.g. `--jars /lib/jars/mysql-connector-java-8.0.16.jar`.
|
||||
- Currently only `MySQL`, `PostgreSQL` and `HIVE` data sources have been tested, other data sources have not been tested yet.
|
||||
- `Spark` needs to be configured to read `Hive` metadata, `Spark` does not use `jdbc` to read `Hive`.
|
||||
|
||||
## Detailed Inspection Logic
|
||||
|
||||
| **Parameter** | **Description** |
|
||||
|---------------|----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
|
||||
| CheckMethod | [CheckFormula][Operator][Threshold], if the result is true, it indicates that the data does not meet expectations, and the failure strategy is executed. |
|
||||
| CheckFormula | <ul><li>Expected-Actual</li><li>Actual-Expected</li><li>(Actual/Expected)x100%</li><li>(Expected-Actual)/Expected x100%</li></ul> |
|
||||
| Operator | =, >, >=, <, <=, != |
|
||||
| ExpectedValue | <ul><li>FixValue</li><li>DailyAvg</li><li>WeeklyAvg</li><li>MonthlyAvg</li><li>Last7DayAvg</li><li>Last30DayAvg</li><li>SrcTableTotalRows</li><li>TargetTableTotalRows</li></ul> |
|
||||
| Example | <ul><li>CheckFormula:Actual-Expected</li><li>Operator:></li><li>Threshold:0</li><li>ExpectedValue:FixValue=9</li></ul> |
|
||||
|
||||
In the example, assuming that the actual value is 10, the operator is >, and the expected value is 9, then the result 10 -9 > 0 is true, which means that the row data in the empty column has exceeded the threshold, and the task is judged to fail.
|
||||
|
||||
# Task Operation Guide
|
||||
|
||||
## Null Value Check for Single Table Check
|
||||
|
||||
### Inspection Introduction
|
||||
|
||||
The goal of the null value check is to check the number of empty rows in the specified column. The number of empty rows can be compared with the total number of rows or a specified threshold. If it is greater than a certain threshold, it will be judged as failure.
|
||||
|
||||
- The SQL statement that calculates the null of the specified column is as follows:
|
||||
|
||||
```sql
|
||||
SELECT COUNT(*) AS miss FROM ${src_table} WHERE (${src_field} is null or ${src_field} = '') AND (${src_filter})
|
||||
```
|
||||
- The SQL to calculate the total number of rows in the table is as follows:
|
||||
|
||||
```sql
|
||||
SELECT COUNT(*) AS total FROM ${src_table} WHERE (${src_filter})
|
||||
```
|
||||
|
||||
### Interface Operation Guide
|
||||
|
||||
![dataquality_null_check](../../../img/tasks/demo/null_check.png)
|
||||
|
||||
| **Parameter** | **Description** |
|
||||
|------------------------|-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
|
||||
| Source data type | Select MySQL, PostgreSQL, etc. |
|
||||
| Source data source | The corresponding data source under the source data type. |
|
||||
| Source data table | Drop-down to select the table where the validation data is located. |
|
||||
| Src filter conditions | Such as the title, it will also be used when counting the total number of rows in the table, optional. |
|
||||
| Src table check column | Drop-down to select the check column name. |
|
||||
| Check method | <ul><li>[Expected-Actual]</li><li>[Actual-Expected]</li><li>[Actual/Expected]x100%</li><li>[(Expected-Actual)/Expected]x100%</li></ul> |
|
||||
| Check operators | =, >, >=, <, <=, ! = |
|
||||
| Threshold | The value used in the formula for comparison. |
|
||||
| Failure strategy | <ul><li>Alert: The data quality task failed, the DolphinScheduler task result is successful, and an alert is sent.</li><li>Blocking: The data quality task fails, the DolphinScheduler task result is failed, and an alarm is sent.</li></ul> |
|
||||
| Expected value type | Select the desired type from the drop-down menu. |
|
||||
|
||||
## Timeliness Check of Single Table Check
|
||||
|
||||
### Inspection Introduction
|
||||
|
||||
The timeliness check is used to check whether the data is processed within the expected time. The start time and end time can be specified to define the time range. If the amount of data within the time range does not reach the set threshold, the check task will be judged as fail.
|
||||
|
||||
### Interface Operation Guide
|
||||
|
||||
![dataquality_timeliness_check](../../../img/tasks/demo/timeliness_check.png)
|
||||
|
||||
| **Parameter** | **Description** |
|
||||
|------------------------|-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
|
||||
| Source data type | Select MySQL, PostgreSQL, etc. |
|
||||
| Source data source | The corresponding data source under the source data type. |
|
||||
| Source data table | Drop-down to select the table where the validation data is located. |
|
||||
| Src filter conditions | Such as the title, it will also be used when counting the total number of rows in the table, optional. |
|
||||
| Src table check column | Drop-down to select check column name. |
|
||||
| Start time | The start time of a time range. |
|
||||
| end time | The end time of a time range. |
|
||||
| Time Format | Set the corresponding time format. |
|
||||
| Check method | <ul><li>[Expected-Actual]</li><li>[Actual-Expected]</li><li>[Actual/Expected]x100%</li><li>[(Expected-Actual)/Expected]x100%</li></ul> |
|
||||
| Check operators | =, >, >=, <, <=, ! = |
|
||||
| Threshold | The value used in the formula for comparison. |
|
||||
| Failure strategy | <ul><li>Alert: The data quality task failed, the DolphinScheduler task result is successful, and an alert is sent.</li><li>Blocking: The data quality task fails, the DolphinScheduler task result is failed, and an alarm is sent.</li></ul> |
|
||||
| Expected value type | Select the desired type from the drop-down menu. |
|
||||
|
||||
## Field Length Check for Single Table Check
|
||||
|
||||
### Inspection Introduction
|
||||
|
||||
The goal of field length verification is to check whether the length of the selected field meets the expectations. If there is data that does not meet the requirements, and the number of rows exceeds the threshold, the task will be judged to fail.
|
||||
|
||||
### Interface Operation Guide
|
||||
|
||||
![dataquality_length_check](../../../img/tasks/demo/field_length_check.png)
|
||||
|
||||
| **Parameter** | **Description** |
|
||||
|------------------------|-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
|
||||
| Source data type | Select MySQL, PostgreSQL, etc. |
|
||||
| Source data source | The corresponding data source under the source data type. |
|
||||
| Source data table | Drop-down to select the table where the validation data is located. |
|
||||
| Src filter conditions | Such as the title, it will also be used when counting the total number of rows in the table, optional. |
|
||||
| Src table check column | Drop-down to select the check column name. |
|
||||
| Logical operators | =, >, >=, <, <=, ! = |
|
||||
| Field length limit | Like the title. |
|
||||
| Check method | <ul><li>[Expected-Actual]</li><li>[Actual-Expected]</li><li>[Actual/Expected]x100%</li><li>[(Expected-Actual)/Expected]x100%</li></ul> |
|
||||
| Check operators | =, >, >=, <, <=, ! = |
|
||||
| Threshold | The value used in the formula for comparison. |
|
||||
| Failure strategy | <ul><li>Alert: The data quality task failed, the DolphinScheduler task result is successful, and an alert is sent.</li><li>Blocking: The data quality task fails, the DolphinScheduler task result is failed, and an alarm is sent.</li></ul> |
|
||||
| Expected value type | Select the desired type from the drop-down menu. |
|
||||
|
||||
## Uniqueness Check for Single Table Check
|
||||
|
||||
### Inspection Introduction
|
||||
|
||||
The goal of the uniqueness check is to check whether the fields are duplicated. It is generally used to check whether the primary key is duplicated. If there are duplicates and the threshold is reached, the check task will be judged to be failed.
|
||||
|
||||
### Interface Operation Guide
|
||||
|
||||
![dataquality_uniqueness_check](../../../img/tasks/demo/uniqueness_check.png)
|
||||
|
||||
| **Parameter** | **Description** |
|
||||
|------------------------|-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
|
||||
| Source data type | Select MySQL, PostgreSQL, etc. |
|
||||
| Source data source | The corresponding data source under the source data type. |
|
||||
| Source data table | Drop-down to select the table where the validation data is located. |
|
||||
| Src filter conditions | Such as the title, it will also be used when counting the total number of rows in the table, optional. |
|
||||
| Src table check column | Drop-down to select the check column name. |
|
||||
| Check method | <ul><li>[Expected-Actual]</li><li>[Actual-Expected]</li><li>[Actual/Expected]x100%</li><li>[(Expected-Actual)/Expected]x100%</li></ul> |
|
||||
| Check operators | =, >, >=, <, <=, ! = |
|
||||
| Threshold | The value used in the formula for comparison. |
|
||||
| Failure strategy | <ul><li>Alert: The data quality task failed, the DolphinScheduler task result is successful, and an alert is sent.</li><li>Blocking: The data quality task fails, the DolphinScheduler task result is failed, and an alarm is sent.</li></ul> |
|
||||
| Expected value type | Select the desired type from the drop-down menu. |
|
||||
|
||||
## Regular Expression Check for Single Table Check
|
||||
|
||||
### Inspection Introduction
|
||||
|
||||
The goal of regular expression verification is to check whether the format of the value of a field meets the requirements, such as time format, email format, ID card format, etc. If there is data that does not meet the format and exceeds the threshold, the task will be judged as failed.
|
||||
|
||||
### Interface Operation Guide
|
||||
|
||||
![dataquality_regex_check](../../../img/tasks/demo/regexp_check.png)
|
||||
|
||||
| **Parameter** | **Description** |
|
||||
|------------------------|-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
|
||||
| Source data type | Select MySQL, PostgreSQL, etc. |
|
||||
| Source data source | The corresponding data source under the source data type. |
|
||||
| Source data table | Drop-down to select the table where the validation data is located. |
|
||||
| Src filter conditions | Such as the title, it will also be used when counting the total number of rows in the table, optional. |
|
||||
| Src table check column | Drop-down to select check column name. |
|
||||
| Regular expression | As title. |
|
||||
| Check method | <ul><li>[Expected-Actual]</li><li>[Actual-Expected]</li><li>[Actual/Expected]x100%</li><li>[(Expected-Actual)/Expected]x100%</li></ul> |
|
||||
| Check operators | =, >, >=, <, <=, ! = |
|
||||
| Threshold | The value used in the formula for comparison. |
|
||||
| Failure strategy | <ul><li>Alert: The data quality task failed, the DolphinScheduler task result is successful, and an alert is sent.</li><li>Blocking: The data quality task fails, the DolphinScheduler task result is failed, and an alarm is sent.</li></ul> |
|
||||
| Expected value type | Select the desired type from the drop-down menu. |
|
||||
|
||||
## Enumeration Value Validation for Single Table Check
|
||||
|
||||
### Inspection Introduction
|
||||
|
||||
The goal of enumeration value verification is to check whether the value of a field is within the range of the enumeration value. If there is data that is not in the range of the enumeration value and exceeds the threshold, the task will be judged to fail.
|
||||
|
||||
### Interface Operation Guide
|
||||
|
||||
![dataquality_enum_check](../../../img/tasks/demo/enumeration_check.png)
|
||||
|
||||
| **Parameter** | **Description** |
|
||||
|-----------------------------|-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
|
||||
| Source data type | Select MySQL, PostgreSQL, etc. |
|
||||
| Source data source | The corresponding data source under the source data type. |
|
||||
| Source data table | Drop-down to select the table where the validation data is located. |
|
||||
| Src table filter conditions | Such as title, also used when counting the total number of rows in the table, optional. |
|
||||
| Src table check column | Drop-down to select the check column name. |
|
||||
| List of enumeration values | Separated by commas. |
|
||||
| Check method | <ul><li>[Expected-Actual]</li><li>[Actual-Expected]</li><li>[Actual/Expected]x100%</li><li>[(Expected-Actual)/Expected]x100%</li></ul> |
|
||||
| Check operators | =, >, >=, <, <=, ! = |
|
||||
| Threshold | The value used in the formula for comparison. |
|
||||
| Failure strategy | <ul><li>Alert: The data quality task failed, the DolphinScheduler task result is successful, and an alert is sent.</li><li>Blocking: The data quality task fails, the DolphinScheduler task result is failed, and an alarm is sent.</li></ul> |
|
||||
| Expected value type | Select the desired type from the drop-down menu. |
|
||||
|
||||
## Table Row Number Verification for Single Table Check
|
||||
|
||||
### Inspection Introduction
|
||||
|
||||
The goal of table row number verification is to check whether the number of rows in the table reaches the expected value. If the number of rows does not meet the standard, the task will be judged as failed.
|
||||
|
||||
### Interface Operation Guide
|
||||
|
||||
![dataquality_count_check](../../../img/tasks/demo/table_count_check.png)
|
||||
|
||||
| **Parameter** | **Description** |
|
||||
|------------------------|-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
|
||||
| Source data type | Select MySQL, PostgreSQL, etc. |
|
||||
| Source data source | The corresponding data source under the source data type. |
|
||||
| Source data table | Drop-down to select the table where the validation data is located. |
|
||||
| Src filter conditions | Such as the title, it will also be used when counting the total number of rows in the table, optional. |
|
||||
| Src table check column | Drop-down to select the check column name. |
|
||||
| Check method | <ul><li>[Expected-Actual]</li><li>[Actual-Expected]</li><li>[Actual/Expected]x100%</li><li>[(Expected-Actual)/Expected]x100%</li></ul> |
|
||||
| Check operators | =, >, >=, <, <=, ! = |
|
||||
| Threshold | The value used in the formula for comparison. |
|
||||
| Failure strategy | <ul><li>Alert: The data quality task failed, the DolphinScheduler task result is successful, and an alert is sent.</li><li>Blocking: The data quality task fails, the DolphinScheduler task result is failed, and an alarm is sent.</li></ul> |
|
||||
| Expected value type | Select the desired type from the drop-down menu. |
|
||||
|
||||
## Custom SQL Check for Single Table Check
|
||||
|
||||
### Interface Operation Guide
|
||||
|
||||
![dataquality_custom_sql_check](../../../img/tasks/demo/custom_sql_check.png)
|
||||
|
||||
| **Parameter** | **Description** |
|
||||
|------------------------------|--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
|
||||
| Source data type | Select MySQL, PostgreSQL, etc. |
|
||||
| Source data source | The corresponding data source under the source data type. |
|
||||
| Source data table | Drop-down to select the table where the data to be verified is located. |
|
||||
| Actual value name | Alias in SQL for statistical value calculation, such as max_num. |
|
||||
| Actual value calculation SQL | SQL for outputting actual values. Note:<ul><li>The SQL must be statistical SQL, such as counting the number of rows, calculating the maximum value, minimum value, etc.</li><li>Select max(a) as max_num from ${src_table}, the table name must be filled like this.</li></ul> |
|
||||
| Src filter conditions | Such as the title, it will also be used when counting the total number of rows in the table, optional. |
|
||||
| Check method | <ul><li>[Expected-Actual]</li><li>[Actual-Expected]</li><li>[Actual/Expected]x100%</li><li>[(Expected-Actual)/Expected]x100%</li></ul> |
|
||||
| Check operators | =, >, >=, <, <=, ! = |
|
||||
| Threshold | The value used in the formula for comparison. |
|
||||
| Failure strategy | <ul><li>Alert: The data quality task failed, the DolphinScheduler task result is successful, and an alert is sent.</li><li>Blocking: The data quality task fails, the DolphinScheduler task result is failed, and an alarm is sent.</li></ul> |
|
||||
| Expected value type | Select the desired type from the drop-down menu. |
|
||||
|
||||
## Accuracy Check of Multi-table
|
||||
|
||||
### Inspection Introduction
|
||||
|
||||
Accuracy checks are performed by comparing the accuracy differences of data records for selected fields between two tables, examples are as follows
|
||||
- table test1
|
||||
|
||||
| c1 | c2 |
|
||||
|:--:|:--:|
|
||||
| a | 1 |
|
||||
| b | 2 |
|
||||
|
||||
- table test2
|
||||
|
||||
| c21 | c22 |
|
||||
|:---:|:---:|
|
||||
| a | 1 |
|
||||
| b | 3 |
|
||||
|
||||
If you compare the data in c1 and c21, the tables test1 and test2 are exactly the same. If you compare c2 and c22, the data in table test1 and table test2 are inconsistent.
|
||||
|
||||
### Interface Operation Guide
|
||||
|
||||
![dataquality_multi_table_accuracy_check](../../../img/tasks/demo/multi_table_accuracy_check.png)
|
||||
|
||||
| **Parameter** | **Description** |
|
||||
|--------------------------|----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
|
||||
| Source data type | Select MySQL, PostgreSQL, etc. |
|
||||
| Source data source | The corresponding data source under the source data type. |
|
||||
| Source data table | Drop-down to select the table where the data to be verified is located. |
|
||||
| Src filter conditions | Such as the title, it will also be used when counting the total number of rows in the table, optional. |
|
||||
| Target data type | Choose MySQL, PostgreSQL, etc. |
|
||||
| Target data source | The corresponding data source under the source data type. |
|
||||
| Target data table | Drop-down to select the table where the data to be verified is located. |
|
||||
| Target filter conditions | Such as the title, it will also be used when counting the total number of rows in the table, optional. |
|
||||
| Check column | Fill in the source data column, operator and target data column respectively. |
|
||||
| Verification method | Select the desired verification method. |
|
||||
| Operators | =, >, >=, <, <=, ! = |
|
||||
| Failure strategy | <ul><li>Alert: The data quality task failed, the DolphinScheduler task result is successful, and an alert is sent.</li><li>Blocking: The data quality task fails, the DolphinScheduler task result is failed, and an alarm is sent.</li><ul> |
|
||||
| Expected value type | Select the desired type in the drop-down menu, only `SrcTableTotalRow`, `TargetTableTotalRow` and fixed value are suitable for selection here. |
|
||||
|
||||
## Comparison of the values checked by the two tables
|
||||
|
||||
### Inspection Introduction
|
||||
|
||||
Two-table value comparison allows users to customize different SQL statistics for two tables and compare the corresponding values. For example, for the source table A, the total amount of a certain column is calculated, and for the target table, the total amount of a certain column is calculated. value sum2, compare sum1 and sum2 to determine the check result.
|
||||
|
||||
### Interface Operation Guide
|
||||
|
||||
![dataquality_multi_table_comparison_check](../../../img/tasks/demo/multi_table_comparison_check.png)
|
||||
|
||||
| **Parameter** | **Description** |
|
||||
|--------------------------------|---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
|
||||
| Source data type | Select MySQL, PostgreSQL, etc. |
|
||||
| Source data source | The corresponding data source under the source data type. |
|
||||
| Source data table | The table where the data is to be verified. |
|
||||
| Actual value name | Calculate the alias in SQL for the actual value, such as max_age1. |
|
||||
| Actual value calculation SQL | SQL for outputting actual values. Note: <ul><li>The SQL must be statistical SQL, such as counting the number of rows, calculating the maximum value, minimum value, etc.</li><li>Select max(age) as max_age1 from ${src_table} The table name must be filled like this.</li></ul> |
|
||||
| Target data type | Choose MySQL, PostgreSQL, etc. |
|
||||
| Target data source | The corresponding data source under the source data type. |
|
||||
| Target data table | The table where the data is to be verified. |
|
||||
| Expected value name | Calculate the alias in SQL for the expected value, such as max_age2. |
|
||||
| Expected value calculation SQL | SQL for outputting expected value. Note: <ul><li>The SQL must be statistical SQL, such as counting the number of rows, calculating the maximum value, minimum value, etc.</li><li>Select max(age) as max_age2 from ${target_table} The table name must be filled like this.</li></ul> |
|
||||
| Verification method | Select the desired verification method. |
|
||||
| Operators | =, >, >=, <, <=, ! = |
|
||||
| Failure strategy | <ul><li>Alert: The data quality task failed, the DolphinScheduler task result is successful, and an alert is sent.</li><li>Blocking: The data quality task fails, the DolphinScheduler task result is failed, and an alarm is sent.</li></ul> |
|
||||
|
||||
## Task result view
|
||||
|
||||
![dataquality_result](../../../img/tasks/demo/result.png)
|
||||
|
||||
## Rule View
|
||||
|
||||
### List of rules
|
||||
|
||||
![dataquality_rule_list](../../../img/tasks/demo/rule_list.png)
|
||||
|
||||
### Rules Details
|
||||
|
||||
![dataquality_rule_detail](../../../img/tasks/demo/rule_detail.png)
|
@ -31,5 +31,6 @@ This document records the incompatible updates between each version. You need to
|
||||
* Remove the `udf-manage` function from the `resource center` ([#16209])(https://github.com/apache/dolphinscheduler/pull/16209)
|
||||
* Remove the `Pigeon` from the `Task Plugin` ([#16218])(https://github.com/apache/dolphinscheduler/pull/16218)
|
||||
* Uniformly name `process` in code as `workflow` ([#16515])(https://github.com/apache/dolphinscheduler/pull/16515)
|
||||
* Deprecated upgrade code of 1.x and 2.x in 3.3.0-release ([#16543])(https://github.com/apache/dolphinscheduler/pull/16543)
|
||||
* Deprecated upgrade code of 1.x and 2.x ([#16543])(https://github.com/apache/dolphinscheduler/pull/16543)
|
||||
* Remove the `Data Quality` module ([#16794])(https://github.com/apache/dolphinscheduler/pull/16794)
|
||||
|
||||
|
@ -224,7 +224,6 @@ common.properties配置文件目前主要是配置hadoop/s3/yarn/applicationId
|
||||
| yarn.job.history.status.address | http://ds1:19888/ws/v1/history/mapreduce/jobs/%s | yarn的作业历史状态URL |
|
||||
| datasource.encryption.enable | false | 是否启用datasource 加密 |
|
||||
| datasource.encryption.salt | !@#$%^&* | datasource加密使用的salt |
|
||||
| data-quality.jar.dir | | 配置数据质量使用的jar包 |
|
||||
| support.hive.oneSession | false | 设置hive SQL是否在同一个session中执行 |
|
||||
| sudo.enable | true | 是否开启sudo |
|
||||
| alert.rpc.port | 50052 | Alert Server的RPC端口 |
|
||||
|
@ -1,357 +0,0 @@
|
||||
# 概述
|
||||
|
||||
## 任务类型介绍
|
||||
|
||||
数据质量任务是用于检查数据在集成、处理过程中的数据准确性。本版本的数据质量任务包括单表检查、单表自定义SQL检查、多表准确性以及两表值比对。数据质量任务的运行环境为Spark2.4.0,其他版本尚未进行过验证,用户可自行验证。
|
||||
|
||||
- 数据质量任务的执行逻辑如下:
|
||||
|
||||
> 用户在界面定义任务,用户输入值保存在`TaskParam`中
|
||||
> 运行任务时,`Master`会解析`TaskParam`,封装`DataQualityTask`所需要的参数下发至`Worker。
|
||||
> Worker`运行数据质量任务,数据质量任务在运行结束之后将统计结果写入到指定的存储引擎中,当前数据质量任务结果存储在`dolphinscheduler`的`t_ds_dq_execute_result`表中
|
||||
> `Worker`发送任务结果给`Master`,`Master`收到`TaskResponse`之后会判断任务类型是否为`DataQualityTask`,如果是的话会根据`taskInstanceId`从`t_ds_dq_execute_result`中读取相应的结果,然后根据用户配置好的检查方式,操作符和阈值进行结果判断,如果结果为失败的话,会根据用户配置好的的失败策略进行相应的操作,告警或者中断
|
||||
>
|
||||
## 注意事项
|
||||
|
||||
- 如果单独打包`data-quality`的话,记得修改包路径和`data-quality.jar.dir`一致,配置内容在 `common.properties` 中的 `data-quality.jar.dir`
|
||||
- 如果是老版本升级使用,运行之前需要先执行`SQL`更新脚本进行数据库初始化。
|
||||
- 当前 `dolphinscheduler-data-quality-dev-SNAPSHOT.jar` 是瘦包,不包含任何 `JDBC` 驱动。
|
||||
如果有 `JDBC` 驱动需要,可以在`节点设置` `选项参数`处设置 `--jars` 参数,
|
||||
如:`--jars /lib/jars/mysql-connector-java-8.0.16.jar`。
|
||||
- 当前只测试了`MySQL`、`PostgreSQL`和`HIVE`数据源,其他数据源暂时未测试过。
|
||||
- `Spark`需要配置好读取`Hive`元数据,`Spark`不是采用`JDBC`的方式读取`Hive`。
|
||||
|
||||
## 检查逻辑详解
|
||||
|
||||
- 校验公式:[校验方式][操作符][阈值],如果结果为真,则表明数据不符合期望,执行失败策略
|
||||
- 校验方式:
|
||||
- [Expected-Actual][期望值-实际值]
|
||||
- [Actual-Expected][实际值-期望值]
|
||||
- [Actual/Expected][实际值/期望值]x100%
|
||||
- [(Expected-Actual)/Expected][(期望值-实际值)/期望值]x100%
|
||||
- 操作符:=、>、>=、<、<=、!=
|
||||
- 期望值类型
|
||||
- 固定值
|
||||
- 日均值
|
||||
- 周均值
|
||||
- 月均值
|
||||
- 最近7天均值
|
||||
- 最近30天均值
|
||||
- 源表总行数
|
||||
- 目标表总行数
|
||||
- 例子
|
||||
- 校验方式为:[Actual-Expected][实际值-期望值]
|
||||
- [操作符]:>
|
||||
- [阈值]:0
|
||||
- 期望值类型:固定值=9。
|
||||
|
||||
假设实际值为10,操作符为 >, 期望值为9,那么结果 10 -9 > 0 为真,那就意味列为空的行数据已经超过阈值,任务被判定为失败
|
||||
|
||||
# 任务操作指南
|
||||
|
||||
## 单表检查之空值检查
|
||||
|
||||
### 检查介绍
|
||||
|
||||
空值检查的目标是检查出指定列为空的行数,可将为空的行数与总行数或者指定阈值进行比较,如果大于某个阈值则判定为失败
|
||||
- 计算指定列为空的SQL语句如下:
|
||||
|
||||
```sql
|
||||
SELECT COUNT(*) AS miss FROM ${src_table} WHERE (${src_field} is null or ${src_field} = '') AND (${src_filter})
|
||||
```
|
||||
|
||||
- 计算表总行数的SQL如下:
|
||||
|
||||
```sql
|
||||
SELECT COUNT(*) AS total FROM ${src_table} WHERE (${src_filter})
|
||||
```
|
||||
|
||||
### 界面操作指南
|
||||
|
||||
![dataquality_null_check](../../../img/tasks/demo/null_check.png)
|
||||
- 源数据类型:选择MySQL、PostgreSQL等
|
||||
- 源数据源:源数据类型下对应的数据源
|
||||
- 源数据表:下拉选择验证数据所在表
|
||||
- 源过滤条件:如标题,统计表总行数的时候也会用到,选填
|
||||
- 源表检查列:下拉选择检查列名
|
||||
- 校验方式:
|
||||
- [Expected-Actual][期望值-实际值]
|
||||
- [Actual-Expected][实际值-期望值]
|
||||
- [Actual/Expected][实际值/期望值]x100%
|
||||
- [(Expected-Actual)/Expected][(期望值-实际值)/期望值]x100%
|
||||
- 校验操作符:=,>、>=、<、<=、!=
|
||||
- 阈值:公式中用于比较的值
|
||||
- 失败策略
|
||||
- 告警:数据质量任务失败了,DolphinScheduler任务结果为成功,发送告警
|
||||
- 阻断:数据质量任务失败了,DolphinScheduler任务结果为失败,发送告警
|
||||
- 期望值类型:在下拉菜单中选择所要的类型
|
||||
|
||||
## 单表检查之及时性检查
|
||||
|
||||
### 检查介绍
|
||||
|
||||
及时性检查用于检查数据是否在预期时间内处理完成,可指定开始时间、结束时间来界定时间范围,如果在该时间范围内的数据量没有达到设定的阈值,那么会判断该检查任务为失败
|
||||
|
||||
### 界面操作指南
|
||||
|
||||
![dataquality_timeliness_check](../../../img/tasks/demo/timeliness_check.png)
|
||||
- 源数据类型:选择MySQL、PostgreSQL等
|
||||
- 源数据源:源数据类型下对应的数据源
|
||||
- 源数据表:下拉选择验证数据所在表
|
||||
- 源过滤条件:如标题,统计表总行数的时候也会用到,选填
|
||||
- 源表检查列:下拉选择检查列名
|
||||
- 起始时间:某个时间范围的开始时间
|
||||
- 结束时间:某个时间范围的结束时间
|
||||
- 时间格式:设置对应的时间格式
|
||||
- 校验方式:
|
||||
- [Expected-Actual][期望值-实际值]
|
||||
- [Actual-Expected][实际值-期望值]
|
||||
- [Actual/Expected][实际值/期望值]x100%
|
||||
- [(Expected-Actual)/Expected][(期望值-实际值)/期望值]x100%
|
||||
- 校验操作符:=,>、>=、<、<=、!=
|
||||
- 阈值:公式中用于比较的值
|
||||
- 失败策略
|
||||
- 告警:数据质量任务失败了,DolphinScheduler任务结果为成功,发送告警
|
||||
- 阻断:数据质量任务失败了,DolphinScheduler任务结果为失败,发送告警
|
||||
- 期望值类型:在下拉菜单中选择所要的类型
|
||||
|
||||
## 单表检查之字段长度校验
|
||||
|
||||
### 检查介绍
|
||||
|
||||
字段长度校验的目标是检查所选字段的长度是否满足预期,如果有存在不满足要求的数据,并且行数超过阈值则会判断任务为失败
|
||||
|
||||
### 界面操作指南
|
||||
|
||||
![dataquality_length_check](../../../img/tasks/demo/field_length_check.png)
|
||||
- 源数据类型:选择MySQL、PostgreSQL等
|
||||
- 源数据源:源数据类型下对应的数据源
|
||||
- 源数据表:下拉选择验证数据所在表
|
||||
- 源过滤条件:如标题,统计表总行数的时候也会用到,选填
|
||||
- 源表检查列:下拉选择检查列名
|
||||
- 逻辑操作符:=,>、>=、<、<=、!=
|
||||
- 字段长度限制:如标题
|
||||
- 校验方式:
|
||||
- [Expected-Actual][期望值-实际值]
|
||||
- [Actual-Expected][实际值-期望值]
|
||||
- [Actual/Expected][实际值/期望值]x100%
|
||||
- [(Expected-Actual)/Expected][(期望值-实际值)/期望值]x100%
|
||||
- 校验操作符:=,>、>=、<、<=、!=
|
||||
- 阈值:公式中用于比较的值
|
||||
- 失败策略
|
||||
- 告警:数据质量任务失败了,DolphinScheduler任务结果为成功,发送告警
|
||||
- 阻断:数据质量任务失败了,DolphinScheduler任务结果为失败,发送告警
|
||||
- 期望值类型:在下拉菜单中选择所要的类型
|
||||
|
||||
## 单表检查之唯一性校验
|
||||
|
||||
### 检查介绍
|
||||
|
||||
唯一性校验的目标是检查字段是否存在重复的情况,一般用于检验primary key是否有重复,如果存在重复且达到阈值,则会判断检查任务为失败
|
||||
|
||||
### 界面操作指南
|
||||
|
||||
![dataquality_uniqueness_check](../../../img/tasks/demo/uniqueness_check.png)
|
||||
- 源数据类型:选择MySQL、PostgreSQL等
|
||||
- 源数据源:源数据类型下对应的数据源
|
||||
- 源数据表:下拉选择验证数据所在表
|
||||
- 源过滤条件:如标题,统计表总行数的时候也会用到,选填
|
||||
- 源表检查列:下拉选择检查列名
|
||||
- 校验方式:
|
||||
- [Expected-Actual][期望值-实际值]
|
||||
- [Actual-Expected][实际值-期望值]
|
||||
- [Actual/Expected][实际值/期望值]x100%
|
||||
- [(Expected-Actual)/Expected][(期望值-实际值)/期望值]x100%
|
||||
- 校验操作符:=,>、>=、<、<=、!=
|
||||
- 阈值:公式中用于比较的值
|
||||
- 失败策略
|
||||
- 告警:数据质量任务失败了,DolphinScheduler任务结果为成功,发送告警
|
||||
- 阻断:数据质量任务失败了,DolphinScheduler任务结果为失败,发送告警
|
||||
- 期望值类型:在下拉菜单中选择所要的类型
|
||||
|
||||
## 单表检查之正则表达式校验
|
||||
|
||||
### 检查介绍
|
||||
|
||||
正则表达式校验的目标是检查某字段的值的格式是否符合要求,例如时间格式、邮箱格式、身份证格式等等,如果存在不符合格式的数据并超过阈值,则会判断任务为失败
|
||||
|
||||
### 界面操作指南
|
||||
|
||||
![dataquality_regex_check](../../../img/tasks/demo/regexp_check.png)
|
||||
- 源数据类型:选择MySQL、PostgreSQL等
|
||||
- 源数据源:源数据类型下对应的数据源
|
||||
- 源数据表:下拉选择验证数据所在表
|
||||
- 源过滤条件:如标题,统计表总行数的时候也会用到,选填
|
||||
- 源表检查列:下拉选择检查列名
|
||||
- 正则表达式:如标题
|
||||
- 校验方式:
|
||||
- [Expected-Actual][期望值-实际值]
|
||||
- [Actual-Expected][实际值-期望值]
|
||||
- [Actual/Expected][实际值/期望值]x100%
|
||||
- [(Expected-Actual)/Expected][(期望值-实际值)/期望值]x100%
|
||||
- 校验操作符:=,>、>=、<、<=、!=
|
||||
- 阈值:公式中用于比较的值
|
||||
- 失败策略
|
||||
- 告警:数据质量任务失败了,DolphinScheduler任务结果为成功,发送告警
|
||||
- 阻断:数据质量任务失败了,DolphinScheduler任务结果为失败,发送告警
|
||||
- 期望值类型:在下拉菜单中选择所要的类型
|
||||
|
||||
## 单表检查之枚举值校验
|
||||
|
||||
### 检查介绍
|
||||
|
||||
枚举值校验的目标是检查某字段的值是否在枚举值的范围内,如果存在不在枚举值范围里的数据并超过阈值,则会判断任务为失败
|
||||
|
||||
### 界面操作指南
|
||||
|
||||
![dataquality_enum_check](../../../img/tasks/demo/enumeration_check.png)
|
||||
- 源数据类型:选择MySQL、PostgreSQL等
|
||||
- 源数据源:源数据类型下对应的数据源
|
||||
- 源数据表:下拉选择验证数据所在表
|
||||
- 源表过滤条件:如标题,统计表总行数的时候也会用到,选填
|
||||
- 源表检查列:下拉选择检查列名
|
||||
- 枚举值列表:用英文逗号,隔开
|
||||
- 校验方式:
|
||||
- [Expected-Actual][期望值-实际值]
|
||||
- [Actual-Expected][实际值-期望值]
|
||||
- [Actual/Expected][实际值/期望值]x100%
|
||||
- [(Expected-Actual)/Expected][(期望值-实际值)/期望值]x100%
|
||||
- 校验操作符:=,>、>=、<、<=、!=
|
||||
- 阈值:公式中用于比较的值
|
||||
- 失败策略
|
||||
- 告警:数据质量任务失败了,DolphinScheduler任务结果为成功,发送告警
|
||||
- 阻断:数据质量任务失败了,DolphinScheduler任务结果为失败,发送告警
|
||||
- 期望值类型:在下拉菜单中选择所要的类型
|
||||
|
||||
## 单表检查之表行数校验
|
||||
|
||||
### 检查介绍
|
||||
|
||||
表行数校验的目标是检查表的行数是否达到预期的值,如果行数未达标,则会判断任务为失败
|
||||
|
||||
### 界面操作指南
|
||||
|
||||
![dataquality_count_check](../../../img/tasks/demo/table_count_check.png)
|
||||
- 源数据类型:选择MySQL、PostgreSQL等
|
||||
- 源数据源:源数据类型下对应的数据源
|
||||
- 源数据表:下拉选择验证数据所在表
|
||||
- 源过滤条件:如标题,统计表总行数的时候也会用到,选填
|
||||
- 源表检查列:下拉选择检查列名
|
||||
- 校验方式:
|
||||
- [Expected-Actual][期望值-实际值]
|
||||
- [Actual-Expected][实际值-期望值]
|
||||
- [Actual/Expected][实际值/期望值]x100%
|
||||
- [(Expected-Actual)/Expected][(期望值-实际值)/期望值]x100%
|
||||
- 校验操作符:=,>、>=、<、<=、!=
|
||||
- 阈值:公式中用于比较的值
|
||||
- 失败策略
|
||||
- 告警:数据质量任务失败了,DolphinScheduler任务结果为成功,发送告警
|
||||
- 阻断:数据质量任务失败了,DolphinScheduler任务结果为失败,发送告警
|
||||
- 期望值类型:在下拉菜单中选择所要的类型
|
||||
|
||||
## 单表检查之自定义SQL检查
|
||||
|
||||
### 检查介绍
|
||||
|
||||
### 界面操作指南
|
||||
|
||||
![dataquality_custom_sql_check](../../../img/tasks/demo/custom_sql_check.png)
|
||||
- 源数据类型:选择MySQL、PostgreSQL等
|
||||
- 源数据源:源数据类型下对应的数据源
|
||||
- 源数据表:下拉选择要验证数据所在表
|
||||
- 实际值名:为统计值计算SQL中的别名,如max_num
|
||||
- 实际值计算SQL: 用于输出实际值的SQL、
|
||||
- 注意点:该SQL必须为统计SQL,例如统计行数,计算最大值、最小值等
|
||||
- select max(a) as max_num from ${src_table},表名必须这么填
|
||||
- 源过滤条件:如标题,统计表总行数的时候也会用到,选填
|
||||
- 校验方式:
|
||||
- 校验操作符:=,>、>=、<、<=、!=
|
||||
- 阈值:公式中用于比较的值
|
||||
- 失败策略
|
||||
- 告警:数据质量任务失败了,DolphinScheduler任务结果为成功,发送告警
|
||||
- 阻断:数据质量任务失败了,DolphinScheduler任务结果为失败,发送告警
|
||||
- 期望值类型:在下拉菜单中选择所要的类型
|
||||
|
||||
## 多表检查之准确性检查
|
||||
|
||||
### 检查介绍
|
||||
|
||||
准确性检查是通过比较两个表之间所选字段的数据记录的准确性差异,例子如下
|
||||
- 表test1
|
||||
|
||||
| c1 | c2 |
|
||||
|:--:|:--:|
|
||||
| a | 1 |
|
||||
| b | 2 |
|
||||
|
||||
- 表test2
|
||||
|
||||
| c21 | c22 |
|
||||
|:---:|:---:|
|
||||
| a | 1 |
|
||||
| b | 3 |
|
||||
|
||||
如果对比c1和c21中的数据,则表test1和test2完全一致。 如果对比c2和c22则表test1和表test2中的数据则存在不一致了。
|
||||
|
||||
### 界面操作指南
|
||||
|
||||
![dataquality_multi_table_accuracy_check](../../../img/tasks/demo/multi_table_accuracy_check.png)
|
||||
- 源数据类型:选择MySQL、PostgreSQL等
|
||||
- 源数据源:源数据类型下对应的数据源
|
||||
- 源数据表:下拉选择要验证数据所在表
|
||||
- 源过滤条件:如标题,统计表总行数的时候也会用到,选填
|
||||
- 目标数据类型:选择MySQL、PostgreSQL等
|
||||
- 目标数据源:源数据类型下对应的数据源
|
||||
- 目标数据表:下拉选择要验证数据所在表
|
||||
- 目标过滤条件:如标题,统计表总行数的时候也会用到,选填
|
||||
- 检查列:
|
||||
- 分别填写 源数据列,操作符,目标数据列
|
||||
- 校验方式:选择想要的校验方式
|
||||
- 操作符:=,>、>=、<、<=、!=
|
||||
- 失败策略
|
||||
- 告警:数据质量任务失败了,DolphinScheduler任务结果为成功,发送告警
|
||||
- 阻断:数据质量任务失败了,DolphinScheduler任务结果为失败,发送告警
|
||||
- 期望值类型:在下拉菜单中选择所要的类型,这里只适合选择SrcTableTotalRow、TargetTableTotalRow和固定值
|
||||
|
||||
## 两表检查之值比对
|
||||
|
||||
### 检查介绍
|
||||
|
||||
两表值比对允许用户对两张表自定义不同的SQL统计出相应的值进行比对,例如针对源表A统计出某一列的金额总值sum1,针对目标表统计出某一列的金额总值sum2,将sum1和sum2进行比较来判定检查结果
|
||||
|
||||
### 界面操作指南
|
||||
|
||||
![dataquality_multi_table_comparison_check](../../../img/tasks/demo/multi_table_comparison_check.png)
|
||||
- 源数据类型:选择MySQL、PostgreSQL等
|
||||
- 源数据源:源数据类型下对应的数据源
|
||||
- 源数据表:要验证数据所在表
|
||||
- 实际值名:为实际值计算SQL中的别名,如max_age1
|
||||
- 实际值计算SQL: 用于输出实际值的SQL、
|
||||
- 注意点:该SQL必须为统计SQL,例如统计行数,计算最大值、最小值等
|
||||
- select max(age) as max_age1 from ${src_table} 表名必须这么填
|
||||
- 目标数据类型:选择MySQL、PostgreSQL等
|
||||
- 目标数据源:源数据类型下对应的数据源
|
||||
- 目标数据表:要验证数据所在表
|
||||
- 期望值名:为期望值计算SQL中的别名,如max_age2
|
||||
- 期望值计算SQL: 用于输出期望值的SQL、
|
||||
- 注意点:该SQL必须为统计SQL,例如统计行数,计算最大值、最小值等
|
||||
- select max(age) as max_age2 from ${target_table} 表名必须这么填
|
||||
- 校验方式:选择想要的校验方式
|
||||
- 操作符:=,>、>=、<、<=、!=
|
||||
- 失败策略
|
||||
- 告警:数据质量任务失败了,DolphinScheduler任务结果为成功,发送告警
|
||||
- 阻断:数据质量任务失败了,DolphinScheduler任务结果为失败,发送告警
|
||||
|
||||
## 任务结果查看
|
||||
|
||||
![dataquality_result](../../../img/tasks/demo/result.png)
|
||||
|
||||
## 规则查看
|
||||
|
||||
### 规则列表
|
||||
|
||||
![dataquality_rule_list](../../../img/tasks/demo/rule_list.png)
|
||||
|
||||
### 规则详情
|
||||
|
||||
![dataquality_rule_detail](../../../img/tasks/demo/rule_detail.png)
|
@ -26,8 +26,9 @@
|
||||
|
||||
## 3.3.0
|
||||
|
||||
* 从 `资源中心` 中移除了 `udf-manage` 功能 ([#16209])(https://github.com/apache/dolphinscheduler/pull/16209)
|
||||
* 从 `任务插件` 中移除了 `Pigeon` 类型 ([#16218])(https://github.com/apache/dolphinscheduler/pull/16218)
|
||||
* 从 `资源中心` 中移除 `udf-manage` 功能 ([#16209])(https://github.com/apache/dolphinscheduler/pull/16209)
|
||||
* 从 `任务插件` 中移除 `Pigeon` 类型 ([#16218])(https://github.com/apache/dolphinscheduler/pull/16218)
|
||||
* 统一代码中的 `process` 为 `workflow` ([#16515])(https://github.com/apache/dolphinscheduler/pull/16515)
|
||||
* 在 3.3.0-release 中废弃了从 1.x 至 2.x 的升级代码 ([#16543])(https://github.com/apache/dolphinscheduler/pull/16543)
|
||||
* 废弃从 1.x 至 2.x 的升级代码 ([#16543])(https://github.com/apache/dolphinscheduler/pull/16543)
|
||||
* 移除 `数据质量` 模块 ([#16794])(https://github.com/apache/dolphinscheduler/pull/16794)
|
||||
|
||||
|
Before Width: | Height: | Size: 20 KiB |
Before Width: | Height: | Size: 20 KiB |
Before Width: | Height: | Size: 18 KiB |
Before Width: | Height: | Size: 21 KiB |
Before Width: | Height: | Size: 20 KiB |
Before Width: | Height: | Size: 21 KiB |
Before Width: | Height: | Size: 20 KiB |
Before Width: | Height: | Size: 62 KiB |
Before Width: | Height: | Size: 72 KiB |
Before Width: | Height: | Size: 54 KiB |
Before Width: | Height: | Size: 20 KiB |
Before Width: | Height: | Size: 21 KiB |
Before Width: | Height: | Size: 20 KiB |
@ -84,13 +84,6 @@ datasource.encryption.enable=false
|
||||
# datasource encryption salt
|
||||
datasource.encryption.salt=!@#$%^&*
|
||||
|
||||
# data quality jar directory path, it would auto discovery data quality jar from this given dir. You should keep it empty if you do not change anything in
|
||||
# data-quality, it will auto discovery by dolphinscheduler itself. Change it only if you want to use your own data-quality jar and it is not in worker-server
|
||||
# libs directory(but may sure your jar name start with `dolphinscheduler-data-quality`).
|
||||
data-quality.jar.dir=
|
||||
|
||||
#data-quality.error.output.path=/tmp/data-quality-error-data
|
||||
|
||||
# Network IP gets priority, default inner outer
|
||||
|
||||
# Whether hive SQL is executed in the same session
|
||||
@ -126,4 +119,4 @@ ml.mlflow.preset_repository=https://github.com/apache/dolphinscheduler-mlflow
|
||||
ml.mlflow.preset_repository_version="main"
|
||||
|
||||
# way to collect applicationId: log(original regex match), aop
|
||||
appId.collect: log
|
||||
appId.collect: log
|
||||
|
@ -61,11 +61,6 @@
|
||||
<artifactId>dolphinscheduler-meter</artifactId>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>org.apache.dolphinscheduler</groupId>
|
||||
<artifactId>dolphinscheduler-data-quality</artifactId>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>org.apache.dolphinscheduler</groupId>
|
||||
<artifactId>dolphinscheduler-datasource-all</artifactId>
|
||||
|
@ -45,13 +45,12 @@ public class DynamicTaskTypeConfiguration {
|
||||
|
||||
private static final List<String> defaultTaskCategories =
|
||||
Arrays.asList(Constants.TYPE_UNIVERSAL, Constants.TYPE_DATA_INTEGRATION, Constants.TYPE_CLOUD,
|
||||
Constants.TYPE_LOGIC, Constants.TYPE_DATA_QUALITY, Constants.TYPE_OTHER,
|
||||
Constants.TYPE_LOGIC, Constants.TYPE_OTHER,
|
||||
Constants.TYPE_MACHINE_LEARNING);
|
||||
private List<DynamicTaskInfo> universal;
|
||||
private List<DynamicTaskInfo> cloud;
|
||||
private List<DynamicTaskInfo> logic;
|
||||
private List<DynamicTaskInfo> dataIntegration;
|
||||
private List<DynamicTaskInfo> dataQuality;
|
||||
private List<DynamicTaskInfo> other;
|
||||
private List<DynamicTaskInfo> machineLearning;
|
||||
|
||||
@ -69,8 +68,6 @@ public class DynamicTaskTypeConfiguration {
|
||||
return logic;
|
||||
case Constants.TYPE_LOGIC:
|
||||
return dataIntegration;
|
||||
case Constants.TYPE_DATA_QUALITY:
|
||||
return dataQuality;
|
||||
case Constants.TYPE_OTHER:
|
||||
return other;
|
||||
case Constants.TYPE_MACHINE_LEARNING:
|
||||
@ -86,7 +83,6 @@ public class DynamicTaskTypeConfiguration {
|
||||
log.info("support default cloud dynamic task types: {}", cloud);
|
||||
log.info("support default logic dynamic task types: {}", logic);
|
||||
log.info("support default dataIntegration dynamic task types: {}", dataIntegration);
|
||||
log.info("support default dataQuality dynamic task types: {}", dataQuality);
|
||||
log.info("support default machineLearning dynamic task types: {}", machineLearning);
|
||||
log.info("support default other dynamic task types: {}", other);
|
||||
}
|
||||
|
@ -48,7 +48,6 @@ public class TaskTypeConfiguration {
|
||||
private List<String> cloud;
|
||||
private List<String> logic;
|
||||
private List<String> dataIntegration;
|
||||
private List<String> dataQuality;
|
||||
private List<String> other;
|
||||
private List<String> machineLearning;
|
||||
|
||||
@ -64,7 +63,6 @@ public class TaskTypeConfiguration {
|
||||
logic.forEach(task -> defaultTaskTypes.add(new FavTaskDto(task, false, Constants.TYPE_LOGIC)));
|
||||
dataIntegration
|
||||
.forEach(task -> defaultTaskTypes.add(new FavTaskDto(task, false, Constants.TYPE_DATA_INTEGRATION)));
|
||||
dataQuality.forEach(task -> defaultTaskTypes.add(new FavTaskDto(task, false, Constants.TYPE_DATA_QUALITY)));
|
||||
machineLearning
|
||||
.forEach(task -> defaultTaskTypes.add(new FavTaskDto(task, false, Constants.TYPE_MACHINE_LEARNING)));
|
||||
other.forEach(task -> defaultTaskTypes.add(new FavTaskDto(task, false, Constants.TYPE_OTHER)));
|
||||
@ -76,7 +74,6 @@ public class TaskTypeConfiguration {
|
||||
log.info("support default cloud task types: {}", cloud);
|
||||
log.info("support default logic task types: {}", logic);
|
||||
log.info("support default dataIntegration task types: {}", dataIntegration);
|
||||
log.info("support default dataQuality task types: {}", dataQuality);
|
||||
log.info("support default machineLearning task types: {}", machineLearning);
|
||||
log.info("support default other task types: {}", other);
|
||||
}
|
||||
|
@ -1,195 +0,0 @@
|
||||
/*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
* contributor license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright ownership.
|
||||
* The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
* (the "License"); you may not use this file except in compliance with
|
||||
* the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.apache.dolphinscheduler.api.controller;
|
||||
|
||||
import static org.apache.dolphinscheduler.api.enums.Status.GET_DATASOURCE_OPTIONS_ERROR;
|
||||
import static org.apache.dolphinscheduler.api.enums.Status.GET_RULE_FORM_CREATE_JSON_ERROR;
|
||||
import static org.apache.dolphinscheduler.api.enums.Status.QUERY_EXECUTE_RESULT_LIST_PAGING_ERROR;
|
||||
import static org.apache.dolphinscheduler.api.enums.Status.QUERY_RULE_LIST_ERROR;
|
||||
import static org.apache.dolphinscheduler.api.enums.Status.QUERY_RULE_LIST_PAGING_ERROR;
|
||||
|
||||
import org.apache.dolphinscheduler.api.exceptions.ApiException;
|
||||
import org.apache.dolphinscheduler.api.service.DqExecuteResultService;
|
||||
import org.apache.dolphinscheduler.api.service.DqRuleService;
|
||||
import org.apache.dolphinscheduler.api.utils.PageInfo;
|
||||
import org.apache.dolphinscheduler.api.utils.Result;
|
||||
import org.apache.dolphinscheduler.common.constants.Constants;
|
||||
import org.apache.dolphinscheduler.dao.entity.DqExecuteResult;
|
||||
import org.apache.dolphinscheduler.dao.entity.DqRule;
|
||||
import org.apache.dolphinscheduler.dao.entity.User;
|
||||
import org.apache.dolphinscheduler.plugin.task.api.utils.ParameterUtils;
|
||||
import org.apache.dolphinscheduler.spi.params.base.ParamsOptions;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.http.HttpStatus;
|
||||
import org.springframework.web.bind.annotation.GetMapping;
|
||||
import org.springframework.web.bind.annotation.RequestAttribute;
|
||||
import org.springframework.web.bind.annotation.RequestMapping;
|
||||
import org.springframework.web.bind.annotation.RequestParam;
|
||||
import org.springframework.web.bind.annotation.ResponseStatus;
|
||||
import org.springframework.web.bind.annotation.RestController;
|
||||
|
||||
import io.swagger.v3.oas.annotations.Operation;
|
||||
import io.swagger.v3.oas.annotations.Parameter;
|
||||
import io.swagger.v3.oas.annotations.Parameters;
|
||||
import io.swagger.v3.oas.annotations.media.Schema;
|
||||
import io.swagger.v3.oas.annotations.tags.Tag;
|
||||
|
||||
/**
|
||||
* data quality controller
|
||||
*/
|
||||
@Tag(name = "DATA_QUALITY_TAG")
|
||||
@RestController
|
||||
@RequestMapping("/data-quality")
|
||||
public class DataQualityController extends BaseController {
|
||||
|
||||
@Autowired
|
||||
private DqRuleService dqRuleService;
|
||||
|
||||
@Autowired
|
||||
private DqExecuteResultService dqExecuteResultService;
|
||||
|
||||
/**
|
||||
* get rule from-create json
|
||||
* @param ruleId ruleId
|
||||
* @return from-create json
|
||||
*/
|
||||
@Operation(summary = "getRuleFormCreateJson", description = "GET_RULE_FORM_CREATE_JSON_NOTES")
|
||||
@Parameters({
|
||||
@Parameter(name = "ruleId", description = "RULE_ID", schema = @Schema(implementation = int.class, example = "1"))
|
||||
})
|
||||
@GetMapping(value = "/getRuleFormCreateJson")
|
||||
@ResponseStatus(HttpStatus.OK)
|
||||
@ApiException(GET_RULE_FORM_CREATE_JSON_ERROR)
|
||||
public Result<String> getRuleFormCreateJsonById(@RequestParam(value = "ruleId") int ruleId) {
|
||||
String ruleFormCreateJsonById = dqRuleService.getRuleFormCreateJsonById(ruleId);
|
||||
return Result.success(ruleFormCreateJsonById);
|
||||
}
|
||||
|
||||
/**
|
||||
* query rule list paging
|
||||
*
|
||||
* @param loginUser login user
|
||||
* @param searchVal search value
|
||||
* @param pageNo page number
|
||||
* @param pageSize page size
|
||||
* @return rule page
|
||||
*/
|
||||
@Operation(summary = "queryRuleListPaging", description = "QUERY_RULE_LIST_PAGING_NOTES")
|
||||
@Parameters({
|
||||
@Parameter(name = "searchVal", description = "SEARCH_VAL", schema = @Schema(implementation = String.class)),
|
||||
@Parameter(name = "ruleType", description = "RULE_TYPE", schema = @Schema(implementation = int.class, example = "1")),
|
||||
@Parameter(name = "startDate", description = "START_DATE", schema = @Schema(implementation = String.class)),
|
||||
@Parameter(name = "endDate", description = "END_DATE", schema = @Schema(implementation = String.class)),
|
||||
@Parameter(name = "pageNo", description = "PAGE_NO", schema = @Schema(implementation = int.class, example = "1")),
|
||||
@Parameter(name = "pageSize", description = "PAGE_SIZE", schema = @Schema(implementation = int.class, example = "10"))
|
||||
})
|
||||
@GetMapping(value = "/rule/page")
|
||||
@ResponseStatus(HttpStatus.OK)
|
||||
@ApiException(QUERY_RULE_LIST_PAGING_ERROR)
|
||||
public Result<PageInfo<DqRule>> queryRuleListPaging(@Parameter(hidden = true) @RequestAttribute(value = Constants.SESSION_USER) User loginUser,
|
||||
@RequestParam(value = "searchVal", required = false) String searchVal,
|
||||
@RequestParam(value = "ruleType", required = false) Integer ruleType,
|
||||
@RequestParam(value = "startDate", required = false) String startTime,
|
||||
@RequestParam(value = "endDate", required = false) String endTime,
|
||||
@RequestParam("pageNo") Integer pageNo,
|
||||
@RequestParam("pageSize") Integer pageSize) {
|
||||
checkPageParams(pageNo, pageSize);
|
||||
searchVal = ParameterUtils.handleEscapes(searchVal);
|
||||
|
||||
PageInfo<DqRule> dqRulePageInfo =
|
||||
dqRuleService.queryRuleListPaging(loginUser, searchVal, ruleType, startTime, endTime, pageNo, pageSize);
|
||||
return Result.success(dqRulePageInfo);
|
||||
}
|
||||
|
||||
/**
|
||||
* query all rule list
|
||||
*
|
||||
* @return rule list
|
||||
*/
|
||||
@Operation(summary = "queryRuleList", description = "QUERY_RULE_LIST_NOTES")
|
||||
@GetMapping(value = "/ruleList")
|
||||
@ResponseStatus(HttpStatus.OK)
|
||||
@ApiException(QUERY_RULE_LIST_ERROR)
|
||||
public Result<List<DqRule>> queryRuleList() {
|
||||
List<DqRule> dqRules = dqRuleService.queryAllRuleList();
|
||||
return Result.success(dqRules);
|
||||
}
|
||||
|
||||
/**
|
||||
* query task execute result list paging
|
||||
*
|
||||
* @param loginUser loginUser
|
||||
* @param searchVal searchVal
|
||||
* @param ruleType ruleType
|
||||
* @param state state
|
||||
* @param startTime startTime
|
||||
* @param endTime endTime
|
||||
* @param pageNo pageNo
|
||||
* @param pageSize pageSize
|
||||
* @return
|
||||
*/
|
||||
@Operation(summary = "queryExecuteResultListPaging", description = "QUERY_EXECUTE_RESULT_LIST_PAGING_NOTES")
|
||||
@Parameters({
|
||||
@Parameter(name = "searchVal", description = "SEARCH_VAL", schema = @Schema(implementation = String.class)),
|
||||
@Parameter(name = "ruleType", description = "RULE_TYPE", schema = @Schema(implementation = int.class, example = "1")),
|
||||
@Parameter(name = "state", description = "STATE", schema = @Schema(implementation = int.class, example = "1")),
|
||||
@Parameter(name = "startDate", description = "START_DATE", schema = @Schema(implementation = String.class)),
|
||||
@Parameter(name = "endDate", description = "END_DATE", schema = @Schema(implementation = String.class)),
|
||||
@Parameter(name = "pageNo", description = "PAGE_NO", schema = @Schema(implementation = int.class, example = "1")),
|
||||
@Parameter(name = "pageSize", description = "PAGE_SIZE", schema = @Schema(implementation = int.class, example = "10"))
|
||||
})
|
||||
@GetMapping(value = "/result/page")
|
||||
@ResponseStatus(HttpStatus.OK)
|
||||
@ApiException(QUERY_EXECUTE_RESULT_LIST_PAGING_ERROR)
|
||||
public Result<PageInfo<DqExecuteResult>> queryExecuteResultListPaging(@Parameter(hidden = true) @RequestAttribute(value = Constants.SESSION_USER) User loginUser,
|
||||
@RequestParam(value = "searchVal", required = false) String searchVal,
|
||||
@RequestParam(value = "ruleType", required = false) Integer ruleType,
|
||||
@RequestParam(value = "state", required = false) Integer state,
|
||||
@RequestParam(value = "startDate", required = false) String startTime,
|
||||
@RequestParam(value = "endDate", required = false) String endTime,
|
||||
@RequestParam("pageNo") Integer pageNo,
|
||||
@RequestParam("pageSize") Integer pageSize) {
|
||||
|
||||
checkPageParams(pageNo, pageSize);
|
||||
searchVal = ParameterUtils.handleEscapes(searchVal);
|
||||
|
||||
PageInfo<DqExecuteResult> dqExecuteResultPageInfo = dqExecuteResultService.queryResultListPaging(loginUser,
|
||||
searchVal, state, ruleType, startTime, endTime, pageNo, pageSize);
|
||||
return Result.success(dqExecuteResultPageInfo);
|
||||
}
|
||||
|
||||
/**
|
||||
* get datasource options by id
|
||||
* @param datasourceId datasourceId
|
||||
* @return result
|
||||
*/
|
||||
@Operation(summary = "getDatasourceOptionsById", description = "GET_DATASOURCE_OPTIONS_NOTES")
|
||||
@Parameters({
|
||||
@Parameter(name = "datasourceId", description = "DATA_SOURCE_ID", schema = @Schema(implementation = int.class, example = "1"))
|
||||
})
|
||||
@GetMapping(value = "/getDatasourceOptionsById")
|
||||
@ResponseStatus(HttpStatus.OK)
|
||||
@ApiException(GET_DATASOURCE_OPTIONS_ERROR)
|
||||
public Result<List<ParamsOptions>> getDatasourceOptionsById(@RequestParam(value = "datasourceId") int datasourceId) {
|
||||
List<ParamsOptions> paramsOptions = dqRuleService.getDatasourceOptionsById(datasourceId);
|
||||
return Result.success(paramsOptions);
|
||||
}
|
||||
}
|
@ -1,63 +0,0 @@
|
||||
/*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
* contributor license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright ownership.
|
||||
* The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
* (the "License"); you may not use this file except in compliance with
|
||||
* the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.apache.dolphinscheduler.api.dto;
|
||||
|
||||
import org.apache.dolphinscheduler.dao.entity.DqRuleExecuteSql;
|
||||
import org.apache.dolphinscheduler.dao.entity.DqRuleInputEntry;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
/**
|
||||
* RuleDefinition
|
||||
*/
|
||||
public class RuleDefinition {
|
||||
|
||||
/**
|
||||
* rule input entry list
|
||||
*/
|
||||
private List<DqRuleInputEntry> ruleInputEntryList;
|
||||
|
||||
/**
|
||||
* rule execute sql list
|
||||
*/
|
||||
private List<DqRuleExecuteSql> executeSqlList;
|
||||
|
||||
public RuleDefinition() {
|
||||
}
|
||||
|
||||
public RuleDefinition(List<DqRuleInputEntry> ruleInputEntryList, List<DqRuleExecuteSql> executeSqlList) {
|
||||
this.ruleInputEntryList = ruleInputEntryList;
|
||||
this.executeSqlList = executeSqlList;
|
||||
}
|
||||
|
||||
public List<DqRuleInputEntry> getRuleInputEntryList() {
|
||||
return ruleInputEntryList;
|
||||
}
|
||||
|
||||
public void setRuleInputEntryList(List<DqRuleInputEntry> ruleInputEntryList) {
|
||||
this.ruleInputEntryList = ruleInputEntryList;
|
||||
}
|
||||
|
||||
public List<DqRuleExecuteSql> getExecuteSqlList() {
|
||||
return executeSqlList;
|
||||
}
|
||||
|
||||
public void setExecuteSqlList(List<DqRuleExecuteSql> executeSqlList) {
|
||||
this.executeSqlList = executeSqlList;
|
||||
}
|
||||
}
|
@ -485,12 +485,6 @@ public enum Status {
|
||||
QUERY_ENVIRONMENT_BY_CODE_ERROR(1200009, "not found environment code [{0}] ", "查询环境编码[{0}]不存在"),
|
||||
QUERY_ENVIRONMENT_ERROR(1200010, "login user query environment error", "分页查询环境列表错误"),
|
||||
VERIFY_ENVIRONMENT_ERROR(1200011, "verify environment error", "验证环境信息错误"),
|
||||
GET_RULE_FORM_CREATE_JSON_ERROR(1200012, "get rule form create json error", "获取规则 FROM-CREATE-JSON 错误"),
|
||||
QUERY_RULE_LIST_PAGING_ERROR(1200013, "query rule list paging error", "获取规则分页列表错误"),
|
||||
QUERY_RULE_LIST_ERROR(1200014, "query rule list error", "获取规则列表错误"),
|
||||
QUERY_RULE_INPUT_ENTRY_LIST_ERROR(1200015, "query rule list error", "获取规则列表错误"),
|
||||
QUERY_EXECUTE_RESULT_LIST_PAGING_ERROR(1200016, "query execute result list paging error", "获取数据质量任务结果分页错误"),
|
||||
GET_DATASOURCE_OPTIONS_ERROR(1200017, "get datasource options error", "获取数据源Options错误"),
|
||||
GET_DATASOURCE_TABLES_ERROR(1200018, "get datasource tables error", "获取数据源表列表错误"),
|
||||
GET_DATASOURCE_TABLE_COLUMNS_ERROR(1200019, "get datasource table columns error", "获取数据源表列名错误"),
|
||||
GET_DATASOURCE_DATABASES_ERROR(1200035, "get datasource databases error", "获取数据库列表错误"),
|
||||
|
@ -1,36 +0,0 @@
|
||||
/*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
* contributor license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright ownership.
|
||||
* The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
* (the "License"); you may not use this file except in compliance with
|
||||
* the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.apache.dolphinscheduler.api.service;
|
||||
|
||||
import org.apache.dolphinscheduler.api.utils.PageInfo;
|
||||
import org.apache.dolphinscheduler.dao.entity.DqExecuteResult;
|
||||
import org.apache.dolphinscheduler.dao.entity.User;
|
||||
|
||||
/**
|
||||
* DqExecuteResultService
|
||||
*/
|
||||
public interface DqExecuteResultService {
|
||||
|
||||
PageInfo<DqExecuteResult> queryResultListPaging(User loginUser,
|
||||
String searchVal,
|
||||
Integer state,
|
||||
Integer ruleType,
|
||||
String startTime,
|
||||
String endTime,
|
||||
Integer pageNo, Integer pageSize);
|
||||
}
|
@ -1,44 +0,0 @@
|
||||
/*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
* contributor license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright ownership.
|
||||
* The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
* (the "License"); you may not use this file except in compliance with
|
||||
* the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.apache.dolphinscheduler.api.service;
|
||||
|
||||
import org.apache.dolphinscheduler.api.utils.PageInfo;
|
||||
import org.apache.dolphinscheduler.dao.entity.DqRule;
|
||||
import org.apache.dolphinscheduler.dao.entity.User;
|
||||
import org.apache.dolphinscheduler.spi.params.base.ParamsOptions;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
/**
|
||||
* DqsRuleService
|
||||
*/
|
||||
public interface DqRuleService {
|
||||
|
||||
String getRuleFormCreateJsonById(int id);
|
||||
|
||||
List<DqRule> queryAllRuleList();
|
||||
|
||||
PageInfo<DqRule> queryRuleListPaging(User loginUser,
|
||||
String searchVal,
|
||||
Integer ruleType,
|
||||
String startTime,
|
||||
String endTime,
|
||||
Integer pageNo, Integer pageSize);
|
||||
|
||||
List<ParamsOptions> getDatasourceOptionsById(int datasourceId);
|
||||
}
|
@ -1,101 +0,0 @@
|
||||
/*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
* contributor license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright ownership.
|
||||
* The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
* (the "License"); you may not use this file except in compliance with
|
||||
* the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.apache.dolphinscheduler.api.service.impl;
|
||||
|
||||
import org.apache.dolphinscheduler.api.enums.Status;
|
||||
import org.apache.dolphinscheduler.api.exceptions.ServiceException;
|
||||
import org.apache.dolphinscheduler.api.service.DqExecuteResultService;
|
||||
import org.apache.dolphinscheduler.api.utils.PageInfo;
|
||||
import org.apache.dolphinscheduler.common.utils.DateUtils;
|
||||
import org.apache.dolphinscheduler.dao.entity.DqExecuteResult;
|
||||
import org.apache.dolphinscheduler.dao.entity.User;
|
||||
import org.apache.dolphinscheduler.dao.mapper.DqExecuteResultMapper;
|
||||
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
|
||||
import java.util.Date;
|
||||
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.stereotype.Service;
|
||||
|
||||
import com.baomidou.mybatisplus.core.metadata.IPage;
|
||||
import com.baomidou.mybatisplus.extension.plugins.pagination.Page;
|
||||
|
||||
/**
|
||||
* DqExecuteResultServiceImpl
|
||||
*/
|
||||
@Service
|
||||
@Slf4j
|
||||
public class DqExecuteResultServiceImpl extends BaseServiceImpl implements DqExecuteResultService {
|
||||
|
||||
@Autowired
|
||||
private DqExecuteResultMapper dqExecuteResultMapper;
|
||||
|
||||
@Override
|
||||
public PageInfo<DqExecuteResult> queryResultListPaging(User loginUser,
|
||||
String searchVal,
|
||||
Integer state,
|
||||
Integer ruleType,
|
||||
String startTime,
|
||||
String endTime,
|
||||
Integer pageNo,
|
||||
Integer pageSize) {
|
||||
|
||||
int[] statusArray = null;
|
||||
// filter by state
|
||||
if (state != null) {
|
||||
statusArray = new int[]{state};
|
||||
}
|
||||
|
||||
Date start = null;
|
||||
Date end = null;
|
||||
try {
|
||||
if (StringUtils.isNotEmpty(startTime)) {
|
||||
start = DateUtils.stringToDate(startTime);
|
||||
}
|
||||
if (StringUtils.isNotEmpty(endTime)) {
|
||||
end = DateUtils.stringToDate(endTime);
|
||||
}
|
||||
} catch (Exception e) {
|
||||
throw new ServiceException(Status.REQUEST_PARAMS_NOT_VALID_ERROR, "startTime,endTime");
|
||||
}
|
||||
|
||||
Page<DqExecuteResult> page = new Page<>(pageNo, pageSize);
|
||||
PageInfo<DqExecuteResult> pageInfo = new PageInfo<>(pageNo, pageSize);
|
||||
|
||||
if (ruleType == null) {
|
||||
ruleType = -1;
|
||||
}
|
||||
|
||||
IPage<DqExecuteResult> dqsResultPage =
|
||||
dqExecuteResultMapper.queryResultListPaging(
|
||||
page,
|
||||
searchVal,
|
||||
loginUser,
|
||||
statusArray,
|
||||
ruleType,
|
||||
start,
|
||||
end);
|
||||
|
||||
pageInfo.setTotal((int) dqsResultPage.getTotal());
|
||||
pageInfo.setTotalList(dqsResultPage.getRecords());
|
||||
return pageInfo;
|
||||
}
|
||||
}
|
@ -1,314 +0,0 @@
|
||||
/*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
* contributor license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright ownership.
|
||||
* The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
* (the "License"); you may not use this file except in compliance with
|
||||
* the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.apache.dolphinscheduler.api.service.impl;
|
||||
|
||||
import static org.apache.dolphinscheduler.common.constants.Constants.CHANGE;
|
||||
import static org.apache.dolphinscheduler.common.constants.Constants.SMALL;
|
||||
|
||||
import org.apache.dolphinscheduler.api.dto.RuleDefinition;
|
||||
import org.apache.dolphinscheduler.api.enums.Status;
|
||||
import org.apache.dolphinscheduler.api.exceptions.ServiceException;
|
||||
import org.apache.dolphinscheduler.api.service.DqRuleService;
|
||||
import org.apache.dolphinscheduler.api.utils.PageInfo;
|
||||
import org.apache.dolphinscheduler.common.utils.DateUtils;
|
||||
import org.apache.dolphinscheduler.common.utils.JSONUtils;
|
||||
import org.apache.dolphinscheduler.dao.entity.DataSource;
|
||||
import org.apache.dolphinscheduler.dao.entity.DqComparisonType;
|
||||
import org.apache.dolphinscheduler.dao.entity.DqRule;
|
||||
import org.apache.dolphinscheduler.dao.entity.DqRuleExecuteSql;
|
||||
import org.apache.dolphinscheduler.dao.entity.DqRuleInputEntry;
|
||||
import org.apache.dolphinscheduler.dao.entity.User;
|
||||
import org.apache.dolphinscheduler.dao.mapper.DataSourceMapper;
|
||||
import org.apache.dolphinscheduler.dao.mapper.DqComparisonTypeMapper;
|
||||
import org.apache.dolphinscheduler.dao.mapper.DqRuleExecuteSqlMapper;
|
||||
import org.apache.dolphinscheduler.dao.mapper.DqRuleInputEntryMapper;
|
||||
import org.apache.dolphinscheduler.dao.mapper.DqRuleMapper;
|
||||
import org.apache.dolphinscheduler.dao.utils.DqRuleUtils;
|
||||
import org.apache.dolphinscheduler.plugin.task.api.enums.dp.OptionSourceType;
|
||||
import org.apache.dolphinscheduler.spi.enums.DbType;
|
||||
import org.apache.dolphinscheduler.spi.params.base.FormType;
|
||||
import org.apache.dolphinscheduler.spi.params.base.ParamsOptions;
|
||||
import org.apache.dolphinscheduler.spi.params.base.PluginParams;
|
||||
import org.apache.dolphinscheduler.spi.params.base.PropsType;
|
||||
import org.apache.dolphinscheduler.spi.params.base.Validate;
|
||||
import org.apache.dolphinscheduler.spi.params.group.GroupParam;
|
||||
import org.apache.dolphinscheduler.spi.params.group.GroupParamsProps;
|
||||
import org.apache.dolphinscheduler.spi.params.input.InputParam;
|
||||
import org.apache.dolphinscheduler.spi.params.input.InputParamProps;
|
||||
import org.apache.dolphinscheduler.spi.params.select.SelectParam;
|
||||
|
||||
import org.apache.commons.collections4.CollectionUtils;
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.Date;
|
||||
import java.util.List;
|
||||
import java.util.Objects;
|
||||
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.stereotype.Service;
|
||||
|
||||
import com.baomidou.mybatisplus.core.conditions.query.QueryWrapper;
|
||||
import com.baomidou.mybatisplus.core.metadata.IPage;
|
||||
import com.baomidou.mybatisplus.extension.plugins.pagination.Page;
|
||||
import com.fasterxml.jackson.annotation.JsonInclude;
|
||||
import com.fasterxml.jackson.core.JsonProcessingException;
|
||||
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||
|
||||
/**
|
||||
* DqRuleServiceImpl
|
||||
*/
|
||||
@Service
|
||||
@Slf4j
|
||||
public class DqRuleServiceImpl extends BaseServiceImpl implements DqRuleService {
|
||||
|
||||
@Autowired
|
||||
private DqRuleMapper dqRuleMapper;
|
||||
|
||||
@Autowired
|
||||
private DqRuleInputEntryMapper dqRuleInputEntryMapper;
|
||||
|
||||
@Autowired
|
||||
private DqRuleExecuteSqlMapper dqRuleExecuteSqlMapper;
|
||||
|
||||
@Autowired
|
||||
private DataSourceMapper dataSourceMapper;
|
||||
|
||||
@Autowired
|
||||
private DqComparisonTypeMapper dqComparisonTypeMapper;
|
||||
|
||||
@Override
|
||||
public String getRuleFormCreateJsonById(int id) {
|
||||
|
||||
List<DqRuleInputEntry> ruleInputEntryList = dqRuleInputEntryMapper.getRuleInputEntryList(id);
|
||||
if (ruleInputEntryList == null || ruleInputEntryList.isEmpty()) {
|
||||
throw new ServiceException(Status.QUERY_RULE_INPUT_ENTRY_LIST_ERROR);
|
||||
}
|
||||
return getRuleFormCreateJson(DqRuleUtils.transformInputEntry(ruleInputEntryList));
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<DqRule> queryAllRuleList() {
|
||||
return dqRuleMapper.selectList(new QueryWrapper<>());
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<ParamsOptions> getDatasourceOptionsById(int datasourceId) {
|
||||
|
||||
List<DataSource> dataSourceList = dataSourceMapper.listAllDataSourceByType(datasourceId);
|
||||
if (CollectionUtils.isEmpty(dataSourceList)) {
|
||||
return Collections.emptyList();
|
||||
}
|
||||
|
||||
List<ParamsOptions> options = new ArrayList<>();
|
||||
for (DataSource dataSource : dataSourceList) {
|
||||
ParamsOptions childrenOption = new ParamsOptions(dataSource.getName(), dataSource.getId(), false);
|
||||
options.add(childrenOption);
|
||||
}
|
||||
return options;
|
||||
}
|
||||
|
||||
@Override
|
||||
public PageInfo<DqRule> queryRuleListPaging(User loginUser,
|
||||
String searchVal,
|
||||
Integer ruleType,
|
||||
String startTime,
|
||||
String endTime,
|
||||
Integer pageNo,
|
||||
Integer pageSize) {
|
||||
|
||||
Date start = null;
|
||||
Date end = null;
|
||||
try {
|
||||
if (StringUtils.isNotEmpty(startTime)) {
|
||||
start = DateUtils.stringToDate(startTime);
|
||||
}
|
||||
if (StringUtils.isNotEmpty(endTime)) {
|
||||
end = DateUtils.stringToDate(endTime);
|
||||
}
|
||||
} catch (Exception e) {
|
||||
throw new ServiceException(Status.REQUEST_PARAMS_NOT_VALID_ERROR, "startTime,endTime");
|
||||
}
|
||||
|
||||
Page<DqRule> page = new Page<>(pageNo, pageSize);
|
||||
PageInfo<DqRule> pageInfo = new PageInfo<>(pageNo, pageSize);
|
||||
|
||||
if (ruleType == null) {
|
||||
ruleType = -1;
|
||||
}
|
||||
|
||||
IPage<DqRule> dqRulePage =
|
||||
dqRuleMapper.queryRuleListPaging(
|
||||
page,
|
||||
searchVal,
|
||||
ruleType,
|
||||
start,
|
||||
end);
|
||||
if (dqRulePage != null) {
|
||||
List<DqRule> dataList = dqRulePage.getRecords();
|
||||
dataList.forEach(dqRule -> {
|
||||
List<DqRuleInputEntry> ruleInputEntryList =
|
||||
DqRuleUtils.transformInputEntry(dqRuleInputEntryMapper.getRuleInputEntryList(dqRule.getId()));
|
||||
List<DqRuleExecuteSql> ruleExecuteSqlList = dqRuleExecuteSqlMapper.getExecuteSqlList(dqRule.getId());
|
||||
|
||||
RuleDefinition ruleDefinition = new RuleDefinition(ruleInputEntryList, ruleExecuteSqlList);
|
||||
dqRule.setRuleJson(JSONUtils.toJsonString(ruleDefinition));
|
||||
});
|
||||
|
||||
pageInfo.setTotal((int) dqRulePage.getTotal());
|
||||
pageInfo.setTotalList(dataList);
|
||||
}
|
||||
|
||||
return pageInfo;
|
||||
}
|
||||
|
||||
private String getRuleFormCreateJson(List<DqRuleInputEntry> ruleInputEntryList) {
|
||||
List<PluginParams> params = new ArrayList<>();
|
||||
|
||||
for (DqRuleInputEntry inputEntry : ruleInputEntryList) {
|
||||
if (Boolean.TRUE.equals(inputEntry.getIsShow())) {
|
||||
switch (Objects.requireNonNull(FormType.of(inputEntry.getType()))) {
|
||||
case INPUT:
|
||||
params.add(getInputParam(inputEntry));
|
||||
break;
|
||||
case SELECT:
|
||||
params.add(getSelectParam(inputEntry));
|
||||
break;
|
||||
case TEXTAREA:
|
||||
params.add(getTextareaParam(inputEntry));
|
||||
break;
|
||||
case GROUP:
|
||||
params.add(getGroupParam(inputEntry));
|
||||
break;
|
||||
default:
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
ObjectMapper mapper = new ObjectMapper();
|
||||
mapper.setSerializationInclusion(JsonInclude.Include.NON_NULL);
|
||||
String result = null;
|
||||
|
||||
try {
|
||||
result = mapper.writeValueAsString(params);
|
||||
} catch (JsonProcessingException e) {
|
||||
log.error("Json parse error.", e);
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
private InputParam getTextareaParam(DqRuleInputEntry inputEntry) {
|
||||
|
||||
InputParamProps paramProps =
|
||||
new InputParamProps();
|
||||
paramProps.setDisabled(!inputEntry.getCanEdit());
|
||||
paramProps.setSize(SMALL);
|
||||
paramProps.setType(PropsType.TEXTAREA.getPropsType());
|
||||
paramProps.setRows(1);
|
||||
|
||||
return InputParam
|
||||
.newBuilder(inputEntry.getField(), inputEntry.getTitle())
|
||||
.addValidate(Validate.newBuilder()
|
||||
.setRequired(inputEntry.getIsValidate())
|
||||
.build())
|
||||
.setProps(paramProps)
|
||||
.setValue(inputEntry.getData())
|
||||
.setPlaceholder(inputEntry.getPlaceholder())
|
||||
.setEmit(Boolean.TRUE.equals(inputEntry.getIsEmit()) ? Collections.singletonList(CHANGE) : null)
|
||||
.build();
|
||||
}
|
||||
|
||||
private SelectParam getSelectParam(DqRuleInputEntry inputEntry) {
|
||||
List<ParamsOptions> options = null;
|
||||
|
||||
switch (OptionSourceType.of(inputEntry.getOptionSourceType())) {
|
||||
case DEFAULT:
|
||||
String optionStr = inputEntry.getOptions();
|
||||
if (StringUtils.isNotEmpty(optionStr)) {
|
||||
options = JSONUtils.toList(optionStr, ParamsOptions.class);
|
||||
}
|
||||
break;
|
||||
case DATASOURCE_TYPE:
|
||||
options = new ArrayList<>();
|
||||
ParamsOptions paramsOptions = null;
|
||||
for (DbType dbtype : DbType.values()) {
|
||||
paramsOptions = new ParamsOptions(dbtype.name(), dbtype.getCode(), false);
|
||||
options.add(paramsOptions);
|
||||
}
|
||||
break;
|
||||
case COMPARISON_TYPE:
|
||||
options = new ArrayList<>();
|
||||
ParamsOptions comparisonOptions = null;
|
||||
List<DqComparisonType> list =
|
||||
dqComparisonTypeMapper.selectList(new QueryWrapper<DqComparisonType>().orderByAsc("id"));
|
||||
|
||||
for (DqComparisonType type : list) {
|
||||
comparisonOptions = new ParamsOptions(type.getType(), type.getId(), false);
|
||||
options.add(comparisonOptions);
|
||||
}
|
||||
break;
|
||||
default:
|
||||
break;
|
||||
}
|
||||
|
||||
return SelectParam
|
||||
.newBuilder(inputEntry.getField(), inputEntry.getTitle())
|
||||
.setOptions(options)
|
||||
.setValue(inputEntry.getData())
|
||||
.setSize(SMALL)
|
||||
.setPlaceHolder(inputEntry.getPlaceholder())
|
||||
.setEmit(Boolean.TRUE.equals(inputEntry.getIsEmit()) ? Collections.singletonList(CHANGE) : null)
|
||||
.build();
|
||||
}
|
||||
|
||||
private InputParam getInputParam(DqRuleInputEntry inputEntry) {
|
||||
InputParamProps paramProps =
|
||||
new InputParamProps();
|
||||
paramProps.setDisabled(!inputEntry.getCanEdit());
|
||||
paramProps.setSize(SMALL);
|
||||
paramProps.setRows(2);
|
||||
|
||||
return InputParam
|
||||
.newBuilder(inputEntry.getField(), inputEntry.getTitle())
|
||||
.addValidate(Validate.newBuilder()
|
||||
.setRequired(inputEntry.getIsValidate())
|
||||
.build())
|
||||
.setProps(paramProps)
|
||||
.setValue(inputEntry.getData())
|
||||
.setPlaceholder(inputEntry.getPlaceholder())
|
||||
.setEmit(Boolean.TRUE.equals(inputEntry.getIsEmit()) ? Collections.singletonList(CHANGE) : null)
|
||||
.build();
|
||||
}
|
||||
|
||||
private GroupParam getGroupParam(DqRuleInputEntry inputEntry) {
|
||||
return GroupParam
|
||||
.newBuilder(inputEntry.getField(), inputEntry.getTitle())
|
||||
.addValidate(Validate.newBuilder()
|
||||
.setRequired(inputEntry.getIsValidate())
|
||||
.build())
|
||||
.setProps(new GroupParamsProps().setRules(JSONUtils.toList(inputEntry.getOptions(), PluginParams.class))
|
||||
.setFontSize(20))
|
||||
.setEmit(Boolean.TRUE.equals(inputEntry.getIsEmit()) ? Collections.singletonList(CHANGE) : null)
|
||||
.build();
|
||||
}
|
||||
}
|
@ -41,7 +41,6 @@ import org.apache.dolphinscheduler.dao.entity.WorkflowInstance;
|
||||
import org.apache.dolphinscheduler.dao.mapper.ProjectMapper;
|
||||
import org.apache.dolphinscheduler.dao.mapper.TaskDefinitionMapper;
|
||||
import org.apache.dolphinscheduler.dao.mapper.TaskInstanceMapper;
|
||||
import org.apache.dolphinscheduler.dao.repository.DqExecuteResultDao;
|
||||
import org.apache.dolphinscheduler.dao.repository.TaskInstanceDao;
|
||||
import org.apache.dolphinscheduler.dao.repository.WorkflowInstanceDao;
|
||||
import org.apache.dolphinscheduler.dao.utils.TaskCacheUtils;
|
||||
@ -100,9 +99,6 @@ public class TaskInstanceServiceImpl extends BaseServiceImpl implements TaskInst
|
||||
@Autowired
|
||||
TaskDefinitionMapper taskDefinitionMapper;
|
||||
|
||||
@Autowired
|
||||
private DqExecuteResultDao dqExecuteResultDao;
|
||||
|
||||
@Autowired
|
||||
private TaskGroupQueueService taskGroupQueueService;
|
||||
|
||||
@ -375,7 +371,6 @@ public class TaskInstanceServiceImpl extends BaseServiceImpl implements TaskInst
|
||||
}
|
||||
}
|
||||
|
||||
dqExecuteResultDao.deleteByWorkflowInstanceId(workflowInstanceId);
|
||||
taskGroupQueueService.deleteByWorkflowInstanceId(workflowInstanceId);
|
||||
taskInstanceDao.deleteByWorkflowInstanceId(workflowInstanceId);
|
||||
}
|
||||
|
@ -24,7 +24,5 @@ dynamic-task:
|
||||
- {name: SUB_WORKFLOW,icon: shell-icon.png,hover: shell-hover.png}
|
||||
dataIntegration:
|
||||
- {name: SEATUNNEL,icon: shell-icon.png,hover: shell-hover.png}
|
||||
dataQuality:
|
||||
- {name: DATA_QUALITY,icon: shell-icon.png,hover: shell-hover.png}
|
||||
machineLearning:
|
||||
- {name: JUPYTER,icon: shell-icon.png,hover: shell-hover.png}
|
||||
|
@ -233,14 +233,7 @@ DELETE_WORKFLOW_DEFINITION_VERSION_NOTES=delete workflow definition version
|
||||
QUERY_WORKFLOW_DEFINITION_VERSIONS_NOTES=query workflow definition versions
|
||||
SWITCH_WORKFLOW_DEFINITION_VERSION_NOTES=switch workflow definition version
|
||||
VERSION=version
|
||||
GET_RULE_FORM_CREATE_JSON_NOTES=get rule form-create json
|
||||
QUERY_RULE_LIST_PAGING_NOTES=query rule list paging
|
||||
QUERY_RULE_LIST_NOTES=query rule list
|
||||
QUERY_EXECUTE_RESULT_LIST_PAGING_NOTES=query execute result list paging
|
||||
RULE_ID=rule id
|
||||
RULE_TYPE=rule type
|
||||
STATE=state
|
||||
GET_DATASOURCE_OPTIONS_NOTES=get datasource options
|
||||
GET_DATASOURCE_TABLES_NOTES=get datasource table
|
||||
GET_DATASOURCE_TABLE_COLUMNS_NOTES=get datasource table columns
|
||||
TABLE_NAME=table name
|
||||
@ -278,7 +271,6 @@ DELETE_CLUSTER_BY_CODE_NOTES=delete cluster by code
|
||||
QUERY_ALL_CLUSTER_LIST_NOTES=query all cluster list
|
||||
VERIFY_CLUSTER_NOTES=verify cluster
|
||||
|
||||
DATA_QUALITY_TAG=data quality related operation
|
||||
EXPECTED_PARALLELISM_NUMBER=custom parallelism to set the complement task threads
|
||||
DRY_RUN=dry run
|
||||
TEST_FLAG=test flag
|
||||
|
@ -269,13 +269,6 @@ QUERY_WORKFLOW_DEFINITION_VERSIONS_NOTES=query process definition versions
|
||||
SWITCH_WORKFLOW_DEFINITION_VERSION_NOTES=switch process definition version
|
||||
VERSION=version
|
||||
TASK_GROUP_QUEUE_PRIORITY=task group queue priority
|
||||
GET_RULE_FORM_CREATE_JSON_NOTES=get rule form-create json
|
||||
QUERY_RULE_LIST_PAGING_NOTES=query rule list paging
|
||||
QUERY_RULE_LIST_NOTES=query rule list
|
||||
QUERY_EXECUTE_RESULT_LIST_PAGING_NOTES=query execute result list paging
|
||||
RULE_ID=rule id
|
||||
RULE_TYPE=rule type
|
||||
GET_DATASOURCE_OPTIONS_NOTES=get datasource options
|
||||
GET_DATASOURCE_TABLES_NOTES=get datasource table
|
||||
GET_DATASOURCE_TABLE_COLUMNS_NOTES=get datasource table columns
|
||||
TABLE_NAME=table name
|
||||
@ -313,7 +306,6 @@ DELETE_CLUSTER_BY_CODE_NOTES=delete cluster by code
|
||||
QUERY_ALL_CLUSTER_LIST_NOTES=query all cluster list
|
||||
VERIFY_CLUSTER_NOTES=verify cluster
|
||||
|
||||
DATA_QUALITY_TAG=data quality service
|
||||
TEST_FLAG=test flag
|
||||
RUN_TASK_INSTANCE_NOTES=run task instance
|
||||
|
||||
|
@ -267,13 +267,6 @@ QUERY_WORKFLOW_DEFINITION_VERSIONS_NOTES=\u67E5\u8BE2\u6D41\u7A0B\u5386\u53F2\u7
|
||||
SWITCH_WORKFLOW_DEFINITION_VERSION_NOTES=\u5207\u6362\u6D41\u7A0B\u7248\u672C
|
||||
VERSION=\u7248\u672C\u53F7
|
||||
TASK_GROUP_QUEUE_PRIORITY=\u4EFB\u52A1\u961F\u5217\u4F18\u5148\u7EA7
|
||||
GET_RULE_FORM_CREATE_JSON_NOTES=\u83B7\u53D6\u89C4\u5219form-create json
|
||||
QUERY_RULE_LIST_PAGING_NOTES=\u67E5\u8BE2\u89C4\u5219\u5206\u9875\u5217\u8868
|
||||
QUERY_RULE_LIST_NOTES=\u67E5\u8BE2\u89C4\u5219\u5217\u8868
|
||||
QUERY_EXECUTE_RESULT_LIST_PAGING_NOTES=\u67E5\u8BE2\u6570\u636E\u8D28\u91CF\u4EFB\u52A1\u7ED3\u679C\u5206\u9875\u5217\u8868
|
||||
RULE_ID=\u89C4\u5219ID
|
||||
RULE_TYPE=\u89C4\u5219\u7C7B\u578B
|
||||
GET_DATASOURCE_OPTIONS_NOTES=\u83B7\u53D6\u6570\u636E\u6E90OPTIONS
|
||||
GET_DATASOURCE_TABLES_NOTES=\u83B7\u53D6\u6570\u636E\u6E90\u8868\u5217\u8868
|
||||
GET_DATASOURCE_TABLE_COLUMNS_NOTES=\u83B7\u53D6\u6570\u636E\u6E90\u8868\u5217\u540D
|
||||
TABLE_NAME=\u8868\u540D
|
||||
@ -311,7 +304,6 @@ DELETE_CLUSTER_BY_CODE_NOTES=\u901A\u8FC7\u96C6\u7FA4\u4EE3\u7801\u5220\u9664\u9
|
||||
QUERY_ALL_CLUSTER_LIST_NOTES=\u67E5\u8BE2\u6240\u6709\u96C6\u7FA4\u5217\u8868
|
||||
VERIFY_CLUSTER_NOTES=\u6821\u9A8C\u96C6\u7FA4
|
||||
|
||||
DATA_QUALITY_TAG=\u6570\u636E\u8D28\u91CF\u76F8\u5173\u64CD\u4F5C
|
||||
TEST_FLAG=\u6D4B\u8BD5\u6807\u8BC6
|
||||
RUN_TASK_INSTANCE_NOTES=\u8FD0\u884C\u4EFB\u52A1\u5B9E\u4F8B
|
||||
|
||||
|
@ -46,8 +46,6 @@ task:
|
||||
- 'SEATUNNEL'
|
||||
- 'DATAX'
|
||||
- 'SQOOP'
|
||||
dataQuality:
|
||||
- 'DATA_QUALITY'
|
||||
machineLearning:
|
||||
- 'JUPYTER'
|
||||
- 'MLFLOW'
|
||||
|
@ -1,164 +0,0 @@
|
||||
/*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
* contributor license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright ownership.
|
||||
* The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
* (the "License"); you may not use this file except in compliance with
|
||||
* the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.apache.dolphinscheduler.api.controller;
|
||||
|
||||
import static org.mockito.Mockito.when;
|
||||
|
||||
import org.apache.dolphinscheduler.api.AssertionsHelper;
|
||||
import org.apache.dolphinscheduler.api.enums.Status;
|
||||
import org.apache.dolphinscheduler.api.service.impl.DqExecuteResultServiceImpl;
|
||||
import org.apache.dolphinscheduler.api.service.impl.DqRuleServiceImpl;
|
||||
import org.apache.dolphinscheduler.api.utils.PageInfo;
|
||||
import org.apache.dolphinscheduler.api.utils.Result;
|
||||
import org.apache.dolphinscheduler.common.constants.Constants;
|
||||
import org.apache.dolphinscheduler.common.enums.UserType;
|
||||
import org.apache.dolphinscheduler.dao.entity.DqExecuteResult;
|
||||
import org.apache.dolphinscheduler.dao.entity.DqRule;
|
||||
import org.apache.dolphinscheduler.dao.entity.User;
|
||||
import org.apache.dolphinscheduler.plugin.task.api.enums.dp.RuleType;
|
||||
|
||||
import java.text.MessageFormat;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Date;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
import org.junit.jupiter.api.Assertions;
|
||||
import org.junit.jupiter.api.BeforeEach;
|
||||
import org.junit.jupiter.api.Test;
|
||||
import org.junit.jupiter.api.extension.ExtendWith;
|
||||
import org.mockito.InjectMocks;
|
||||
import org.mockito.Mock;
|
||||
import org.mockito.Mockito;
|
||||
import org.mockito.junit.jupiter.MockitoExtension;
|
||||
|
||||
/**
|
||||
* process definition controller test
|
||||
*/
|
||||
@ExtendWith(MockitoExtension.class)
|
||||
public class DataQualityControllerTest {
|
||||
|
||||
@InjectMocks
|
||||
private DataQualityController dataQualityController;
|
||||
|
||||
@Mock
|
||||
private DqRuleServiceImpl dqRuleService;
|
||||
|
||||
@Mock
|
||||
private DqExecuteResultServiceImpl dqExecuteResultService;
|
||||
|
||||
protected User user;
|
||||
|
||||
@BeforeEach
|
||||
public void before() {
|
||||
User loginUser = new User();
|
||||
loginUser.setId(1);
|
||||
loginUser.setUserType(UserType.GENERAL_USER);
|
||||
loginUser.setUserName("admin");
|
||||
|
||||
user = loginUser;
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testGetRuleFormCreateJsonById() {
|
||||
|
||||
Mockito.when(dqRuleService.getRuleFormCreateJsonById(1)).thenReturn("");
|
||||
AssertionsHelper.assertDoesNotThrow(() -> dataQualityController.getRuleFormCreateJsonById(1));
|
||||
}
|
||||
|
||||
private void putMsg(Map<String, Object> result, Status status, Object... statusParams) {
|
||||
result.put(Constants.STATUS, status);
|
||||
if (statusParams != null && statusParams.length > 0) {
|
||||
result.put(Constants.MSG, MessageFormat.format(status.getMsg(), statusParams));
|
||||
} else {
|
||||
result.put(Constants.MSG, status.getMsg());
|
||||
}
|
||||
}
|
||||
|
||||
public void putMsg(Result result, Status status, Object... statusParams) {
|
||||
result.setCode(status.getCode());
|
||||
if (statusParams != null && statusParams.length > 0) {
|
||||
result.setMsg(MessageFormat.format(status.getMsg(), statusParams));
|
||||
} else {
|
||||
result.setMsg(status.getMsg());
|
||||
}
|
||||
}
|
||||
|
||||
private List<DqRule> getRuleList() {
|
||||
List<DqRule> list = new ArrayList<>();
|
||||
DqRule rule = new DqRule();
|
||||
rule.setId(1);
|
||||
rule.setName("空值检测");
|
||||
rule.setType(RuleType.SINGLE_TABLE.getCode());
|
||||
rule.setUserId(1);
|
||||
rule.setUserName("admin");
|
||||
rule.setCreateTime(new Date());
|
||||
rule.setUpdateTime(new Date());
|
||||
|
||||
list.add(rule);
|
||||
|
||||
return list;
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testQueryRuleListPaging() throws Exception {
|
||||
|
||||
String searchVal = "";
|
||||
int ruleType = 0;
|
||||
String start = "2020-01-01 00:00:00";
|
||||
String end = "2020-01-02 00:00:00";
|
||||
|
||||
PageInfo<DqRule> pageInfo = new PageInfo<>(1, 10);
|
||||
pageInfo.setTotal(10);
|
||||
pageInfo.setTotalList(getRuleList());
|
||||
|
||||
when(dqRuleService.queryRuleListPaging(user, searchVal, ruleType, start, end, 1, 10)).thenReturn(pageInfo);
|
||||
|
||||
Result<PageInfo<DqRule>> response =
|
||||
dataQualityController.queryRuleListPaging(user, searchVal, ruleType, start, end, 1, 10);
|
||||
Assertions.assertEquals(Status.SUCCESS.getCode(), response.getCode().intValue());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testQueryRuleList() {
|
||||
|
||||
when(dqRuleService.queryAllRuleList()).thenReturn(getRuleList());
|
||||
|
||||
Result<List<DqRule>> listResult = dataQualityController.queryRuleList();
|
||||
Assertions.assertEquals(Status.SUCCESS.getCode(), listResult.getCode().intValue());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testQueryResultListPaging() {
|
||||
|
||||
String searchVal = "";
|
||||
int ruleType = 0;
|
||||
String start = "2020-01-01 00:00:00";
|
||||
String end = "2020-01-02 00:00:00";
|
||||
|
||||
PageInfo<DqExecuteResult> pageInfo = new PageInfo<>(1, 10);
|
||||
pageInfo.setTotal(10);
|
||||
|
||||
when(dqExecuteResultService.queryResultListPaging(user, searchVal, 0, ruleType, start, end, 1, 10))
|
||||
.thenReturn(pageInfo);
|
||||
|
||||
Result<PageInfo<DqExecuteResult>> pageInfoResult =
|
||||
dataQualityController.queryExecuteResultListPaging(user, searchVal, ruleType, 0, start, end, 1, 10);
|
||||
Assertions.assertEquals(Status.SUCCESS.getCode(), pageInfoResult.getCode().intValue());
|
||||
}
|
||||
}
|
@ -1,249 +0,0 @@
|
||||
/*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
* contributor license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright ownership.
|
||||
* The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
* (the "License"); you may not use this file except in compliance with
|
||||
* the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.apache.dolphinscheduler.api.service;
|
||||
|
||||
import static org.apache.dolphinscheduler.api.AssertionsHelper.assertDoesNotThrow;
|
||||
import static org.mockito.ArgumentMatchers.any;
|
||||
import static org.mockito.ArgumentMatchers.eq;
|
||||
import static org.mockito.Mockito.when;
|
||||
|
||||
import org.apache.dolphinscheduler.api.ApiApplicationServer;
|
||||
import org.apache.dolphinscheduler.api.permission.ResourcePermissionCheckService;
|
||||
import org.apache.dolphinscheduler.api.service.impl.BaseServiceImpl;
|
||||
import org.apache.dolphinscheduler.api.service.impl.DqRuleServiceImpl;
|
||||
import org.apache.dolphinscheduler.common.enums.AuthorizationType;
|
||||
import org.apache.dolphinscheduler.common.enums.UserType;
|
||||
import org.apache.dolphinscheduler.common.utils.DateUtils;
|
||||
import org.apache.dolphinscheduler.dao.entity.DataSource;
|
||||
import org.apache.dolphinscheduler.dao.entity.DqRule;
|
||||
import org.apache.dolphinscheduler.dao.entity.DqRuleExecuteSql;
|
||||
import org.apache.dolphinscheduler.dao.entity.DqRuleInputEntry;
|
||||
import org.apache.dolphinscheduler.dao.entity.User;
|
||||
import org.apache.dolphinscheduler.dao.mapper.DataSourceMapper;
|
||||
import org.apache.dolphinscheduler.dao.mapper.DqRuleExecuteSqlMapper;
|
||||
import org.apache.dolphinscheduler.dao.mapper.DqRuleInputEntryMapper;
|
||||
import org.apache.dolphinscheduler.dao.mapper.DqRuleMapper;
|
||||
import org.apache.dolphinscheduler.plugin.task.api.enums.dp.DataType;
|
||||
import org.apache.dolphinscheduler.plugin.task.api.enums.dp.ExecuteSqlType;
|
||||
import org.apache.dolphinscheduler.plugin.task.api.enums.dp.InputType;
|
||||
import org.apache.dolphinscheduler.plugin.task.api.enums.dp.OptionSourceType;
|
||||
import org.apache.dolphinscheduler.plugin.task.api.enums.dp.RuleType;
|
||||
import org.apache.dolphinscheduler.spi.enums.DbType;
|
||||
import org.apache.dolphinscheduler.spi.params.base.FormType;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Date;
|
||||
import java.util.List;
|
||||
|
||||
import org.junit.jupiter.api.Assertions;
|
||||
import org.junit.jupiter.api.Test;
|
||||
import org.junit.jupiter.api.extension.ExtendWith;
|
||||
import org.mockito.InjectMocks;
|
||||
import org.mockito.Mock;
|
||||
import org.mockito.Mockito;
|
||||
import org.mockito.junit.jupiter.MockitoExtension;
|
||||
import org.mockito.junit.jupiter.MockitoSettings;
|
||||
import org.mockito.quality.Strictness;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.springframework.boot.test.context.SpringBootTest;
|
||||
|
||||
import com.baomidou.mybatisplus.core.conditions.query.QueryWrapper;
|
||||
import com.baomidou.mybatisplus.core.metadata.IPage;
|
||||
import com.baomidou.mybatisplus.extension.plugins.pagination.Page;
|
||||
|
||||
@ExtendWith(MockitoExtension.class)
|
||||
@MockitoSettings(strictness = Strictness.LENIENT)
|
||||
@SpringBootTest(classes = ApiApplicationServer.class)
|
||||
public class DqRuleServiceTest {
|
||||
|
||||
private static final Logger baseServiceLogger = LoggerFactory.getLogger(BaseServiceImpl.class);
|
||||
|
||||
@InjectMocks
|
||||
private DqRuleServiceImpl dqRuleService;
|
||||
|
||||
@Mock
|
||||
DqRuleMapper dqRuleMapper;
|
||||
|
||||
@Mock
|
||||
DqRuleInputEntryMapper dqRuleInputEntryMapper;
|
||||
|
||||
@Mock
|
||||
DqRuleExecuteSqlMapper dqRuleExecuteSqlMapper;
|
||||
|
||||
@Mock
|
||||
DataSourceMapper dataSourceMapper;
|
||||
|
||||
@Mock
|
||||
private ResourcePermissionCheckService resourcePermissionCheckService;
|
||||
|
||||
@Test
|
||||
public void testGetRuleFormCreateJsonById() {
|
||||
String json = "[{\"field\":\"src_connector_type\",\"name\":\"源数据类型\",\"props\":{\"placeholder\":"
|
||||
+ "\"Please select the source connector type\",\"size\":\"small\"},\"type\":\"select\",\"title\":"
|
||||
+ "\"源数据类型\",\"value\":\"JDBC\",\"emit\":[\"change\"],\"options\":[{\"label\":\"HIVE\",\"value\":"
|
||||
+ "\"HIVE\",\"disabled\":false},{\"label\":\"JDBC\",\"value\":\"JDBC\",\"disabled\":false}]},{\"props\":"
|
||||
+ "{\"disabled\":false,\"rows\":2,\"placeholder\":\"Please enter statistics name, the alias in "
|
||||
+ "statistics execute sql\",\"size\":\"small\"},\"field\":\"statistics_name\",\"name\":"
|
||||
+ "\"统计值名\",\"type\":\"input\",\"title\":\"统计值名\",\"validate\":[{\"required\":true,\"type\":"
|
||||
+ "\"string\",\"trigger\":\"blur\"}]},{\"props\":{\"disabled\":false,\"type\":\"textarea\",\"rows\":"
|
||||
+ "1,\"placeholder\":\"Please enter the statistics execute sql\",\"size\":\"small\"},\"field\":"
|
||||
+ "\"statistics_execute_sql\",\"name\":\"统计值计算SQL\",\"type\":\"input\",\"title\":"
|
||||
+ "\"统计值计算SQL\",\"validate\":[{\"required\":true,\"type\":\"string\",\"trigger\":\"blur\"}]}]";
|
||||
when(dqRuleInputEntryMapper.getRuleInputEntryList(1)).thenReturn(getRuleInputEntryList());
|
||||
String ruleFormCreateJsonById = dqRuleService.getRuleFormCreateJsonById(1);
|
||||
Assertions.assertEquals(json, ruleFormCreateJsonById);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testQueryAllRuleList() {
|
||||
when(dqRuleMapper.selectList(new QueryWrapper<>())).thenReturn(getRuleList());
|
||||
assertDoesNotThrow(() -> dqRuleService.queryAllRuleList());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testGetDatasourceOptionsById() {
|
||||
when(dataSourceMapper.listAllDataSourceByType(DbType.MYSQL.getCode())).thenReturn(dataSourceList());
|
||||
assertDoesNotThrow(() -> dqRuleService.queryAllRuleList());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testQueryRuleListPaging() {
|
||||
|
||||
String searchVal = "";
|
||||
int ruleType = 0;
|
||||
Date start = DateUtils.stringToDate("2020-01-01 00:00:00");
|
||||
Date end = DateUtils.stringToDate("2020-01-02 00:00:00");
|
||||
|
||||
User loginUser = new User();
|
||||
loginUser.setId(1);
|
||||
loginUser.setUserType(UserType.ADMIN_USER);
|
||||
Mockito.when(resourcePermissionCheckService.operationPermissionCheck(AuthorizationType.DATA_QUALITY,
|
||||
loginUser.getId(), null, baseServiceLogger)).thenReturn(true);
|
||||
Mockito.when(resourcePermissionCheckService.resourcePermissionCheck(AuthorizationType.DATA_QUALITY, null, 0,
|
||||
baseServiceLogger)).thenReturn(true);
|
||||
Page<DqRule> page = new Page<>(1, 10);
|
||||
page.setTotal(1);
|
||||
page.setRecords(getRuleList());
|
||||
|
||||
when(dqRuleMapper.queryRuleListPaging(any(IPage.class), eq(""), eq(ruleType), eq(start), eq(end)))
|
||||
.thenReturn(page);
|
||||
|
||||
when(dqRuleInputEntryMapper.getRuleInputEntryList(1)).thenReturn(getRuleInputEntryList());
|
||||
when(dqRuleExecuteSqlMapper.getExecuteSqlList(1)).thenReturn(getRuleExecuteSqlList());
|
||||
|
||||
assertDoesNotThrow(() -> dqRuleService.queryRuleListPaging(loginUser, searchVal, 0, "2020-01-01 00:00:00",
|
||||
"2020-01-02 00:00:00", 1, 10));
|
||||
}
|
||||
|
||||
private List<DataSource> dataSourceList() {
|
||||
List<DataSource> dataSourceList = new ArrayList<>();
|
||||
DataSource dataSource = new DataSource();
|
||||
dataSource.setId(1);
|
||||
dataSource.setName("dolphinscheduler");
|
||||
dataSource.setType(DbType.MYSQL);
|
||||
dataSource.setUserId(1);
|
||||
dataSource.setUserName("admin");
|
||||
dataSource.setConnectionParams("");
|
||||
dataSource.setCreateTime(new Date());
|
||||
dataSource.setUpdateTime(new Date());
|
||||
dataSourceList.add(dataSource);
|
||||
|
||||
return dataSourceList;
|
||||
}
|
||||
|
||||
private List<DqRule> getRuleList() {
|
||||
List<DqRule> list = new ArrayList<>();
|
||||
DqRule rule = new DqRule();
|
||||
rule.setId(1);
|
||||
rule.setName("空值检测");
|
||||
rule.setType(RuleType.SINGLE_TABLE.getCode());
|
||||
rule.setUserId(1);
|
||||
rule.setUserName("admin");
|
||||
rule.setCreateTime(new Date());
|
||||
rule.setUpdateTime(new Date());
|
||||
|
||||
list.add(rule);
|
||||
|
||||
return list;
|
||||
}
|
||||
|
||||
private List<DqRuleInputEntry> getRuleInputEntryList() {
|
||||
List<DqRuleInputEntry> list = new ArrayList<>();
|
||||
|
||||
DqRuleInputEntry srcConnectorType = new DqRuleInputEntry();
|
||||
srcConnectorType.setTitle("源数据类型");
|
||||
srcConnectorType.setField("src_connector_type");
|
||||
srcConnectorType.setType(FormType.SELECT.getFormType());
|
||||
srcConnectorType.setCanEdit(true);
|
||||
srcConnectorType.setIsShow(true);
|
||||
srcConnectorType.setData("JDBC");
|
||||
srcConnectorType.setPlaceholder("Please select the source connector type");
|
||||
srcConnectorType.setOptionSourceType(OptionSourceType.DEFAULT.getCode());
|
||||
srcConnectorType
|
||||
.setOptions("[{\"label\":\"HIVE\",\"value\":\"HIVE\"},{\"label\":\"JDBC\",\"value\":\"JDBC\"}]");
|
||||
srcConnectorType.setInputType(InputType.DEFAULT.getCode());
|
||||
srcConnectorType.setDataType(DataType.NUMBER.getCode());
|
||||
srcConnectorType.setIsEmit(true);
|
||||
srcConnectorType.setIsValidate(true);
|
||||
|
||||
DqRuleInputEntry statisticsName = new DqRuleInputEntry();
|
||||
statisticsName.setTitle("统计值名");
|
||||
statisticsName.setField("statistics_name");
|
||||
statisticsName.setType(FormType.INPUT.getFormType());
|
||||
statisticsName.setCanEdit(true);
|
||||
statisticsName.setIsShow(true);
|
||||
statisticsName.setPlaceholder("Please enter statistics name, the alias in statistics execute sql");
|
||||
statisticsName.setOptionSourceType(OptionSourceType.DEFAULT.getCode());
|
||||
statisticsName.setInputType(InputType.DEFAULT.getCode());
|
||||
statisticsName.setDataType(DataType.STRING.getCode());
|
||||
statisticsName.setIsEmit(false);
|
||||
statisticsName.setIsValidate(true);
|
||||
|
||||
DqRuleInputEntry statisticsExecuteSql = new DqRuleInputEntry();
|
||||
statisticsExecuteSql.setTitle("统计值计算SQL");
|
||||
statisticsExecuteSql.setField("statistics_execute_sql");
|
||||
statisticsExecuteSql.setType(FormType.TEXTAREA.getFormType());
|
||||
statisticsExecuteSql.setCanEdit(true);
|
||||
statisticsExecuteSql.setIsShow(true);
|
||||
statisticsExecuteSql.setPlaceholder("Please enter the statistics execute sql");
|
||||
statisticsExecuteSql.setOptionSourceType(OptionSourceType.DEFAULT.getCode());
|
||||
statisticsExecuteSql.setDataType(DataType.LIKE_SQL.getCode());
|
||||
statisticsExecuteSql.setIsEmit(false);
|
||||
statisticsExecuteSql.setIsValidate(true);
|
||||
|
||||
list.add(srcConnectorType);
|
||||
list.add(statisticsName);
|
||||
list.add(statisticsExecuteSql);
|
||||
|
||||
return list;
|
||||
}
|
||||
|
||||
private List<DqRuleExecuteSql> getRuleExecuteSqlList() {
|
||||
List<DqRuleExecuteSql> list = new ArrayList<>();
|
||||
|
||||
DqRuleExecuteSql executeSqlDefinition = new DqRuleExecuteSql();
|
||||
executeSqlDefinition.setIndex(0);
|
||||
executeSqlDefinition.setSql("SELECT COUNT(*) AS total FROM ${src_table} WHERE (${src_filter})");
|
||||
executeSqlDefinition.setTableAlias("total_count");
|
||||
executeSqlDefinition.setType(ExecuteSqlType.COMPARISON.getCode());
|
||||
list.add(executeSqlDefinition);
|
||||
|
||||
return list;
|
||||
}
|
||||
}
|
@ -1,108 +0,0 @@
|
||||
/*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
* contributor license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright ownership.
|
||||
* The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
* (the "License"); you may not use this file except in compliance with
|
||||
* the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.apache.dolphinscheduler.api.service;
|
||||
|
||||
import static org.apache.dolphinscheduler.api.AssertionsHelper.assertDoesNotThrow;
|
||||
import static org.mockito.ArgumentMatchers.any;
|
||||
import static org.mockito.ArgumentMatchers.eq;
|
||||
import static org.mockito.Mockito.when;
|
||||
|
||||
import org.apache.dolphinscheduler.api.ApiApplicationServer;
|
||||
import org.apache.dolphinscheduler.api.permission.ResourcePermissionCheckService;
|
||||
import org.apache.dolphinscheduler.api.service.impl.BaseServiceImpl;
|
||||
import org.apache.dolphinscheduler.api.service.impl.DqExecuteResultServiceImpl;
|
||||
import org.apache.dolphinscheduler.common.enums.AuthorizationType;
|
||||
import org.apache.dolphinscheduler.common.enums.UserType;
|
||||
import org.apache.dolphinscheduler.common.utils.DateUtils;
|
||||
import org.apache.dolphinscheduler.dao.entity.DqExecuteResult;
|
||||
import org.apache.dolphinscheduler.dao.entity.User;
|
||||
import org.apache.dolphinscheduler.dao.mapper.DqExecuteResultMapper;
|
||||
import org.apache.dolphinscheduler.plugin.task.api.enums.dp.DqTaskState;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Date;
|
||||
import java.util.List;
|
||||
|
||||
import org.junit.jupiter.api.Test;
|
||||
import org.junit.jupiter.api.extension.ExtendWith;
|
||||
import org.mockito.InjectMocks;
|
||||
import org.mockito.Mock;
|
||||
import org.mockito.Mockito;
|
||||
import org.mockito.junit.jupiter.MockitoExtension;
|
||||
import org.mockito.junit.jupiter.MockitoSettings;
|
||||
import org.mockito.quality.Strictness;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.springframework.boot.test.context.SpringBootTest;
|
||||
|
||||
import com.baomidou.mybatisplus.core.metadata.IPage;
|
||||
import com.baomidou.mybatisplus.extension.plugins.pagination.Page;
|
||||
|
||||
@ExtendWith(MockitoExtension.class)
|
||||
@MockitoSettings(strictness = Strictness.LENIENT)
|
||||
@SpringBootTest(classes = ApiApplicationServer.class)
|
||||
public class DqWorkflowInstanceExecuteResultServiceTest {
|
||||
|
||||
private static final Logger logger = LoggerFactory.getLogger(DqWorkflowInstanceExecuteResultServiceTest.class);
|
||||
private static final Logger baseServiceLogger = LoggerFactory.getLogger(BaseServiceImpl.class);
|
||||
|
||||
@InjectMocks
|
||||
private DqExecuteResultServiceImpl dqExecuteResultService;
|
||||
|
||||
@Mock
|
||||
DqExecuteResultMapper dqExecuteResultMapper;
|
||||
|
||||
@Mock
|
||||
private ResourcePermissionCheckService resourcePermissionCheckService;
|
||||
|
||||
@Test
|
||||
public void testQueryResultListPaging() {
|
||||
|
||||
String searchVal = "";
|
||||
int ruleType = 0;
|
||||
Date start = DateUtils.stringToDate("2020-01-01 00:00:00");
|
||||
Date end = DateUtils.stringToDate("2020-01-02 00:00:00");
|
||||
|
||||
User loginUser = new User();
|
||||
loginUser.setId(1);
|
||||
loginUser.setUserType(UserType.ADMIN_USER);
|
||||
Mockito.when(resourcePermissionCheckService.operationPermissionCheck(AuthorizationType.DATA_QUALITY,
|
||||
loginUser.getId(), null, baseServiceLogger)).thenReturn(true);
|
||||
Mockito.when(resourcePermissionCheckService.resourcePermissionCheck(AuthorizationType.DATA_QUALITY, null, 0,
|
||||
baseServiceLogger)).thenReturn(true);
|
||||
Page<DqExecuteResult> page = new Page<>(1, 10);
|
||||
page.setTotal(1);
|
||||
page.setRecords(getExecuteResultList());
|
||||
when(dqExecuteResultMapper.queryResultListPaging(any(IPage.class), eq(""), eq(loginUser), any(), eq(ruleType),
|
||||
eq(start), eq(end))).thenReturn(page);
|
||||
|
||||
assertDoesNotThrow(() -> dqExecuteResultService.queryResultListPaging(loginUser, searchVal, 1, 0,
|
||||
"2020-01-01 00:00:00", "2020-01-02 00:00:00", 1, 10));
|
||||
}
|
||||
|
||||
public List<DqExecuteResult> getExecuteResultList() {
|
||||
|
||||
List<DqExecuteResult> list = new ArrayList<>();
|
||||
DqExecuteResult dqExecuteResult = new DqExecuteResult();
|
||||
dqExecuteResult.setId(1);
|
||||
dqExecuteResult.setState(DqTaskState.FAILURE.getCode());
|
||||
list.add(dqExecuteResult);
|
||||
|
||||
return list;
|
||||
}
|
||||
}
|
@ -94,7 +94,6 @@
|
||||
<gson.version>2.9.1</gson.version>
|
||||
<dropwizard.metrics-version>4.2.11</dropwizard.metrics-version>
|
||||
<snappy.version>1.1.10.1</snappy.version>
|
||||
<spark.version>3.2.2</spark.version>
|
||||
<janino.version>3.0.16</janino.version>
|
||||
<snakeyaml.version>1.33</snakeyaml.version>
|
||||
<htrace.version>4.1.1</htrace.version>
|
||||
@ -769,18 +768,6 @@
|
||||
<scope>provided</scope>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>org.apache.spark</groupId>
|
||||
<artifactId>spark-core_2.12</artifactId>
|
||||
<version>${spark.version}</version>
|
||||
<exclusions>
|
||||
<exclusion>
|
||||
<groupId>com.fasterxml.jackson.module</groupId>
|
||||
<artifactId>jackson-module-scala_2.11</artifactId>
|
||||
</exclusion>
|
||||
</exclusions>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>org.apache.sshd</groupId>
|
||||
<artifactId>sshd-sftp</artifactId>
|
||||
@ -792,42 +779,6 @@
|
||||
<version>${sshd.version}</version>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>org.apache.spark</groupId>
|
||||
<artifactId>spark-sql_2.12</artifactId>
|
||||
<version>${spark.version}</version>
|
||||
<exclusions>
|
||||
<exclusion>
|
||||
<groupId>com.fasterxml.jackson.core</groupId>
|
||||
<artifactId>jackson-core</artifactId>
|
||||
</exclusion>
|
||||
</exclusions>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>org.apache.spark</groupId>
|
||||
<artifactId>spark-hive_2.12</artifactId>
|
||||
<version>${spark.version}</version>
|
||||
<exclusions>
|
||||
<exclusion>
|
||||
<groupId>commons-httpclient</groupId>
|
||||
<artifactId>commons-httpclient</artifactId>
|
||||
</exclusion>
|
||||
<exclusion>
|
||||
<groupId>org.apache.httpcomponents</groupId>
|
||||
<artifactId>httpclient</artifactId>
|
||||
</exclusion>
|
||||
<exclusion>
|
||||
<groupId>org.codehaus.jackson</groupId>
|
||||
<artifactId>jackson-core-asl</artifactId>
|
||||
</exclusion>
|
||||
<exclusion>
|
||||
<groupId>org.codehaus.jackson</groupId>
|
||||
<artifactId>jackson-mapper-asl</artifactId>
|
||||
</exclusion>
|
||||
</exclusions>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>org.codehaus.janino</groupId>
|
||||
<artifactId>janino</artifactId>
|
||||
|
@ -535,11 +535,6 @@ public final class Constants {
|
||||
public static final int DRY_RUN_FLAG_NO = 0;
|
||||
public static final int DRY_RUN_FLAG_YES = 1;
|
||||
|
||||
/**
|
||||
* data.quality.error.output.path
|
||||
*/
|
||||
public static final String DATA_QUALITY_ERROR_OUTPUT_PATH = "data-quality.error.output.path";
|
||||
|
||||
/**
|
||||
* use for k8s
|
||||
*/
|
||||
@ -596,7 +591,6 @@ public final class Constants {
|
||||
public static final String TYPE_DATA_INTEGRATION = "DataIntegration";
|
||||
public static final String TYPE_CLOUD = "Cloud";
|
||||
public static final String TYPE_LOGIC = "Logic";
|
||||
public static final String TYPE_DATA_QUALITY = "DataQuality";
|
||||
public static final String TYPE_OTHER = "Other";
|
||||
public static final String TYPE_MACHINE_LEARNING = "MachineLearning";
|
||||
|
||||
@ -691,9 +685,6 @@ public final class Constants {
|
||||
|
||||
public static final String REMOTE_LOGGING_COS_REGION = "remote.logging.cos.region";
|
||||
|
||||
/**
|
||||
* data quality
|
||||
*/
|
||||
public static final String DATABASES_QUERY = "show databases";
|
||||
public static final String DATABASES_QUERY_PG = "SELECT datname FROM pg_database";
|
||||
|
||||
|
@ -17,11 +17,11 @@
|
||||
|
||||
package org.apache.dolphinscheduler.common.enums;
|
||||
|
||||
import lombok.Getter;
|
||||
|
||||
import com.baomidou.mybatisplus.annotation.EnumValue;
|
||||
|
||||
/**
|
||||
* Authorization type
|
||||
*/
|
||||
@Getter
|
||||
public enum AuthorizationType {
|
||||
|
||||
/**
|
||||
@ -62,7 +62,6 @@ public enum AuthorizationType {
|
||||
MONITOR(13, "monitor"),
|
||||
ALERT_PLUGIN_INSTANCE(14, "alert plugin instance"),
|
||||
TENANT(15, "tenant"),
|
||||
DATA_QUALITY(16, "data quality"),
|
||||
TASK_GROUP(17, "task group"),
|
||||
;
|
||||
|
||||
@ -75,11 +74,4 @@ public enum AuthorizationType {
|
||||
private final int code;
|
||||
private final String descp;
|
||||
|
||||
public int getCode() {
|
||||
return code;
|
||||
}
|
||||
|
||||
public String getDescp() {
|
||||
return descp;
|
||||
}
|
||||
}
|
||||
|
@ -66,13 +66,6 @@ datasource.encryption.enable=false
|
||||
# datasource encryption salt
|
||||
datasource.encryption.salt=!@#$%^&*
|
||||
|
||||
# data quality jar directory path, it would auto discovery data quality jar from this given dir. You should keep it empty if you do not change anything in
|
||||
# data-quality, it will auto discovery by dolphinscheduler itself. Change it only if you want to use your own data-quality jar and it is not in worker-server
|
||||
# libs directory(but may sure your jar name start with `dolphinscheduler-data-quality`).
|
||||
data-quality.jar.dir=
|
||||
|
||||
#data-quality.error.output.path=/tmp/data-quality-error-data
|
||||
|
||||
# Network IP gets priority, default inner outer
|
||||
|
||||
# Whether hive SQL is executed in the same session
|
||||
|
@ -79,34 +79,6 @@ public class SensitiveDataConverterTest {
|
||||
" }\n" +
|
||||
"}");
|
||||
|
||||
// data quality
|
||||
tcs.put("\"readers\" : [ {\n" +
|
||||
" \"type\" : \"JDBC\",\n" +
|
||||
" \"config\" : {\n" +
|
||||
" \"database\" : \"dolphinscheduler\",\n" +
|
||||
" \"password\" : \"view1\",\n" +
|
||||
" \"driver\" : \"com.mysql.cj.jdbc.Driver\",\n" +
|
||||
" \"user\" : \"root\",\n" +
|
||||
" \"output_table\" : \"dolphinscheduler_users\",\n" +
|
||||
" \"table\" : \"users\",\n" +
|
||||
" \"url\" : \"jdbc:mysql://127.0.0.1:3307/dolphinscheduler?userSSL=true&enabledTLSProtocols=TLSv1.2\"\n"
|
||||
+
|
||||
" }\n" +
|
||||
" } ]",
|
||||
"\"readers\" : [ {\n" +
|
||||
" \"type\" : \"JDBC\",\n" +
|
||||
" \"config\" : {\n" +
|
||||
" \"database\" : \"dolphinscheduler\",\n" +
|
||||
" \"password\" : \"*****\",\n" +
|
||||
" \"driver\" : \"com.mysql.cj.jdbc.Driver\",\n" +
|
||||
" \"user\" : \"root\",\n" +
|
||||
" \"output_table\" : \"dolphinscheduler_users\",\n" +
|
||||
" \"table\" : \"users\",\n" +
|
||||
" \"url\" : \"jdbc:mysql://127.0.0.1:3307/dolphinscheduler?userSSL=true&enabledTLSProtocols=TLSv1.2\"\n"
|
||||
+
|
||||
" }\n" +
|
||||
" } ]");
|
||||
|
||||
for (String logMsg : tcs.keySet()) {
|
||||
String maskedLog = SensitiveDataConverter.maskSensitiveData(logMsg);
|
||||
logger.info("original parameter : {}", logMsg);
|
||||
|
@ -130,13 +130,6 @@ datasource.encryption.enable=false
|
||||
# datasource encryption salt
|
||||
datasource.encryption.salt=!@#$%^&*
|
||||
|
||||
# data quality jar directory path, it would auto discovery data quality jar from this given dir. You should keep it empty if you do not change anything in
|
||||
# data-quality, it will auto discovery by dolphinscheduler itself. Change it only if you want to use your own data-quality jar and it is not in worker-server
|
||||
# libs directory(but may sure your jar name start with `dolphinscheduler-data-quality`).
|
||||
data-quality.jar.dir=
|
||||
|
||||
#data-quality.error.output.path=/tmp/data-quality-error-data
|
||||
|
||||
# Network IP gets priority, default inner outer
|
||||
|
||||
# Whether hive SQL is executed in the same session
|
||||
|
@ -38,47 +38,3 @@ DROP PROCEDURE dolphin_t_ds_tenant_insert_default;
|
||||
UPDATE t_ds_schedules t1 JOIN t_ds_workflow_definition t2 ON t1.workflow_definition_code = t2.code LEFT JOIN t_ds_tenant t3 ON t2.tenant_id = t3.id SET t1.tenant_code = COALESCE(t3.tenant_code, 'default');
|
||||
UPDATE `t_ds_workflow_instance` SET `tenant_code` = 'default' WHERE `tenant_code` IS NULL;
|
||||
|
||||
-- data quality support choose database
|
||||
INSERT IGNORE INTO `t_ds_dq_rule_input_entry`
|
||||
(`id`, `field`, `type`, `title`, `value`, `options`, `placeholder`, `option_source_type`, `value_type`, `input_type`, `is_show`, `can_edit`, `is_emit`, `is_validate`, `create_time`, `update_time`)
|
||||
VALUES(30, 'src_database', 'select', '$t(src_database)', NULL, NULL, 'please select source database', 0, 0, 0, 1, 1, 1, 1, current_timestamp, current_timestamp);
|
||||
INSERT IGNORE INTO `t_ds_dq_rule_input_entry`
|
||||
(`id`, `field`, `type`, `title`, `value`, `options`, `placeholder`, `option_source_type`, `value_type`, `input_type`, `is_show`, `can_edit`, `is_emit`, `is_validate`, `create_time`, `update_time`)
|
||||
VALUES(31, 'target_database', 'select', '$t(target_database)', NULL, NULL, 'please select target database', 0, 0, 0, 1, 1, 1, 1, current_timestamp, current_timestamp);
|
||||
|
||||
INSERT IGNORE INTO `t_ds_relation_rule_input_entry`
|
||||
(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
|
||||
VALUES(151, 1, 30, NULL, 2, current_timestamp, current_timestamp);
|
||||
INSERT IGNORE INTO `t_ds_relation_rule_input_entry`
|
||||
(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
|
||||
VALUES(152, 2, 30, NULL, 2, current_timestamp, current_timestamp);
|
||||
INSERT IGNORE INTO `t_ds_relation_rule_input_entry`
|
||||
(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
|
||||
VALUES(153, 3, 30, NULL, 2, current_timestamp, current_timestamp);
|
||||
INSERT IGNORE INTO `t_ds_relation_rule_input_entry`
|
||||
(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
|
||||
VALUES(154, 4, 30, NULL, 2, current_timestamp, current_timestamp);
|
||||
INSERT IGNORE INTO `t_ds_relation_rule_input_entry`
|
||||
(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
|
||||
VALUES(155, 5, 30, NULL, 2, current_timestamp, current_timestamp);
|
||||
INSERT IGNORE INTO `t_ds_relation_rule_input_entry`
|
||||
(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
|
||||
VALUES(156, 6, 30, NULL, 2, current_timestamp, current_timestamp);
|
||||
INSERT IGNORE INTO `t_ds_relation_rule_input_entry`
|
||||
(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
|
||||
VALUES(157, 7, 30, NULL, 2, current_timestamp, current_timestamp);
|
||||
INSERT IGNORE INTO `t_ds_relation_rule_input_entry`
|
||||
(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
|
||||
VALUES(158, 8, 30, NULL, 2, current_timestamp, current_timestamp);
|
||||
INSERT IGNORE INTO `t_ds_relation_rule_input_entry`
|
||||
(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
|
||||
VALUES(159, 9, 30, NULL, 2, current_timestamp, current_timestamp);
|
||||
INSERT IGNORE INTO `t_ds_relation_rule_input_entry`
|
||||
(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
|
||||
VALUES(160, 10, 30, NULL, 2, current_timestamp, current_timestamp);
|
||||
INSERT IGNORE INTO `t_ds_relation_rule_input_entry`
|
||||
(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
|
||||
VALUES(161, 3, 31, NULL, 6, current_timestamp, current_timestamp);
|
||||
INSERT IGNORE INTO `t_ds_relation_rule_input_entry`
|
||||
(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
|
||||
VALUES(162, 4, 31, NULL, 7, current_timestamp, current_timestamp);
|
||||
|
@ -1,74 +0,0 @@
|
||||
/*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
* contributor license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright ownership.
|
||||
* The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
* (the "License"); you may not use this file except in compliance with
|
||||
* the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.apache.dolphinscheduler.dao.entity;
|
||||
|
||||
import java.io.Serializable;
|
||||
import java.util.Date;
|
||||
|
||||
import lombok.Data;
|
||||
|
||||
import com.baomidou.mybatisplus.annotation.IdType;
|
||||
import com.baomidou.mybatisplus.annotation.TableField;
|
||||
import com.baomidou.mybatisplus.annotation.TableId;
|
||||
import com.baomidou.mybatisplus.annotation.TableName;
|
||||
|
||||
@Data
|
||||
@TableName("t_ds_dq_comparison_type")
|
||||
public class DqComparisonType implements Serializable {
|
||||
|
||||
/**
|
||||
* primary key
|
||||
*/
|
||||
@TableId(value = "id", type = IdType.AUTO)
|
||||
private Integer id;
|
||||
/**
|
||||
* type
|
||||
*/
|
||||
@TableField(value = "type")
|
||||
private String type;
|
||||
/**
|
||||
* execute sql
|
||||
*/
|
||||
@TableField(value = "execute_sql")
|
||||
private String executeSql;
|
||||
/**
|
||||
* output table
|
||||
*/
|
||||
@TableField(value = "output_table")
|
||||
private String outputTable;
|
||||
/**
|
||||
* comparison name
|
||||
*/
|
||||
@TableField(value = "name")
|
||||
private String name;
|
||||
/**
|
||||
* is inner source
|
||||
*/
|
||||
@TableField(value = "is_inner_source")
|
||||
private Boolean isInnerSource;
|
||||
/**
|
||||
* create_time
|
||||
*/
|
||||
@TableField(value = "create_time")
|
||||
private Date createTime;
|
||||
/**
|
||||
* update_time
|
||||
*/
|
||||
@TableField(value = "update_time")
|
||||
private Date updateTime;
|
||||
}
|
@ -1,108 +0,0 @@
|
||||
/*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
* contributor license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright ownership.
|
||||
* The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
* (the "License"); you may not use this file except in compliance with
|
||||
* the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.apache.dolphinscheduler.dao.entity;
|
||||
|
||||
import java.io.Serializable;
|
||||
import java.util.Date;
|
||||
|
||||
import lombok.Data;
|
||||
|
||||
import com.baomidou.mybatisplus.annotation.IdType;
|
||||
import com.baomidou.mybatisplus.annotation.TableField;
|
||||
import com.baomidou.mybatisplus.annotation.TableId;
|
||||
import com.baomidou.mybatisplus.annotation.TableName;
|
||||
|
||||
@Data
|
||||
@TableName("t_ds_dq_execute_result")
|
||||
public class DqExecuteResult implements Serializable {
|
||||
|
||||
@TableId(value = "id", type = IdType.AUTO)
|
||||
private Integer id;
|
||||
|
||||
@TableField(value = "workflow_definition_id")
|
||||
private long workflowDefinitionId;
|
||||
|
||||
@TableField(exist = false)
|
||||
private String workflowDefinitionName;
|
||||
|
||||
@TableField(exist = false)
|
||||
private long processDefinitionCode;
|
||||
|
||||
@TableField(value = "process_instance_id")
|
||||
private long processInstanceId;
|
||||
|
||||
@TableField(exist = false)
|
||||
private String processInstanceName;
|
||||
|
||||
@TableField(exist = false)
|
||||
private long projectCode;
|
||||
|
||||
@TableField(value = "task_instance_id")
|
||||
private long taskInstanceId;
|
||||
|
||||
@TableField(exist = false)
|
||||
private String taskName;
|
||||
|
||||
@TableField(value = "rule_type")
|
||||
private int ruleType;
|
||||
|
||||
@TableField(value = "rule_name")
|
||||
private String ruleName;
|
||||
|
||||
@TableField(value = "statistics_value")
|
||||
private double statisticsValue;
|
||||
|
||||
@TableField(value = "comparison_value")
|
||||
private double comparisonValue;
|
||||
|
||||
@TableField(value = "comparison_type")
|
||||
private int comparisonType;
|
||||
|
||||
@TableField(exist = false)
|
||||
private String comparisonTypeName;
|
||||
|
||||
@TableField(value = "check_type")
|
||||
private int checkType;
|
||||
|
||||
@TableField(value = "threshold")
|
||||
private double threshold;
|
||||
|
||||
@TableField(value = "operator")
|
||||
private int operator;
|
||||
|
||||
@TableField(value = "failure_strategy")
|
||||
private int failureStrategy;
|
||||
|
||||
@TableField(value = "user_id")
|
||||
private int userId;
|
||||
|
||||
@TableField(exist = false)
|
||||
private String userName;
|
||||
|
||||
@TableField(value = "state")
|
||||
private int state;
|
||||
|
||||
@TableField(value = "error_output_path")
|
||||
private String errorOutputPath;
|
||||
|
||||
@TableField(value = "create_time")
|
||||
private Date createTime;
|
||||
|
||||
@TableField(value = "update_time")
|
||||
private Date updateTime;
|
||||
}
|
@ -1,258 +0,0 @@
|
||||
/*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
* contributor license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright ownership.
|
||||
* The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
* (the "License"); you may not use this file except in compliance with
|
||||
* the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.apache.dolphinscheduler.dao.entity;
|
||||
|
||||
import java.io.Serializable;
|
||||
|
||||
import com.fasterxml.jackson.annotation.JsonInclude;
|
||||
import com.fasterxml.jackson.annotation.JsonInclude.Include;
|
||||
import com.fasterxml.jackson.annotation.JsonProperty;
|
||||
|
||||
@JsonInclude(Include.NON_NULL)
|
||||
public class DqExecuteResultAlertContent implements Serializable {
|
||||
|
||||
/**
|
||||
* process_defined_id
|
||||
*/
|
||||
@JsonProperty(value = "processDefinitionId")
|
||||
private long processDefinitionId;
|
||||
/**
|
||||
* process define name
|
||||
*/
|
||||
@JsonProperty("processDefinitionName")
|
||||
private String processDefinitionName;
|
||||
/**
|
||||
* process_instance_id
|
||||
*/
|
||||
@JsonProperty(value = "processInstanceId")
|
||||
private long processInstanceId;
|
||||
/**
|
||||
* process instance name
|
||||
*/
|
||||
@JsonProperty("processInstanceName")
|
||||
private String processInstanceName;
|
||||
/**
|
||||
* task_instance_id
|
||||
*/
|
||||
@JsonProperty(value = "taskInstanceId")
|
||||
private long taskInstanceId;
|
||||
/**
|
||||
* task name
|
||||
*/
|
||||
@JsonProperty("taskName")
|
||||
private String taskName;
|
||||
/**
|
||||
* rule_type
|
||||
*/
|
||||
@JsonProperty(value = "ruleType")
|
||||
private int ruleType;
|
||||
/**
|
||||
* rule_name
|
||||
*/
|
||||
@JsonProperty(value = "ruleName")
|
||||
private String ruleName;
|
||||
/**
|
||||
* statistics_value
|
||||
*/
|
||||
@JsonProperty(value = "statisticsValue")
|
||||
private double statisticsValue;
|
||||
/**
|
||||
* comparison_value
|
||||
*/
|
||||
@JsonProperty(value = "comparisonValue")
|
||||
private double comparisonValue;
|
||||
/**
|
||||
* check_type
|
||||
*/
|
||||
@JsonProperty(value = "checkType")
|
||||
private int checkType;
|
||||
/**
|
||||
* task_instance_id
|
||||
*/
|
||||
@JsonProperty(value = "threshold")
|
||||
private double threshold;
|
||||
/**
|
||||
* operator
|
||||
*/
|
||||
@JsonProperty(value = "operator")
|
||||
private int operator;
|
||||
/**
|
||||
* operator
|
||||
*/
|
||||
@JsonProperty(value = "failureStrategy")
|
||||
private int failureStrategy;
|
||||
/**
|
||||
* user id
|
||||
*/
|
||||
@JsonProperty(value = "userId")
|
||||
private int userId;
|
||||
/**
|
||||
* user_name
|
||||
*/
|
||||
@JsonProperty("userName")
|
||||
private String userName;
|
||||
/**
|
||||
* state
|
||||
*/
|
||||
@JsonProperty(value = "state")
|
||||
private int state;
|
||||
|
||||
@JsonProperty(value = "errorDataPath")
|
||||
private String errorDataPath;
|
||||
|
||||
public DqExecuteResultAlertContent(Builder builder) {
|
||||
this.processDefinitionId = builder.processDefinitionId;
|
||||
this.processDefinitionName = builder.processDefinitionName;
|
||||
this.processInstanceId = builder.processInstanceId;
|
||||
this.processInstanceName = builder.processInstanceName;
|
||||
this.taskInstanceId = builder.taskInstanceId;
|
||||
this.taskName = builder.taskName;
|
||||
this.ruleType = builder.ruleType;
|
||||
this.ruleName = builder.ruleName;
|
||||
this.statisticsValue = builder.statisticsValue;
|
||||
this.comparisonValue = builder.comparisonValue;
|
||||
this.checkType = builder.checkType;
|
||||
this.threshold = builder.threshold;
|
||||
this.operator = builder.operator;
|
||||
this.failureStrategy = builder.failureStrategy;
|
||||
this.userId = builder.userId;
|
||||
this.userName = builder.userName;
|
||||
this.state = builder.state;
|
||||
this.errorDataPath = builder.errorDataPath;
|
||||
}
|
||||
|
||||
public static Builder newBuilder() {
|
||||
return new Builder();
|
||||
}
|
||||
|
||||
public static class Builder {
|
||||
|
||||
private long processDefinitionId;
|
||||
private String processDefinitionName;
|
||||
private long processInstanceId;
|
||||
private String processInstanceName;
|
||||
private long taskInstanceId;
|
||||
private String taskName;
|
||||
private int ruleType;
|
||||
private String ruleName;
|
||||
private double statisticsValue;
|
||||
private double comparisonValue;
|
||||
private int checkType;
|
||||
private double threshold;
|
||||
private int operator;
|
||||
private int failureStrategy;
|
||||
private int userId;
|
||||
private String userName;
|
||||
private int state;
|
||||
private String errorDataPath;
|
||||
|
||||
public Builder processDefinitionId(long processDefinitionId) {
|
||||
this.processDefinitionId = processDefinitionId;
|
||||
return this;
|
||||
}
|
||||
|
||||
public Builder processDefinitionName(String processDefinitionName) {
|
||||
this.processDefinitionName = processDefinitionName;
|
||||
return this;
|
||||
}
|
||||
|
||||
public Builder processInstanceId(long processInstanceId) {
|
||||
this.processInstanceId = processInstanceId;
|
||||
return this;
|
||||
}
|
||||
|
||||
public Builder processInstanceName(String processInstanceName) {
|
||||
this.processInstanceName = processInstanceName;
|
||||
return this;
|
||||
}
|
||||
|
||||
public Builder taskInstanceId(long taskInstanceId) {
|
||||
this.taskInstanceId = taskInstanceId;
|
||||
return this;
|
||||
}
|
||||
|
||||
public Builder taskName(String taskName) {
|
||||
this.taskName = taskName;
|
||||
return this;
|
||||
}
|
||||
|
||||
public Builder ruleType(int ruleType) {
|
||||
this.ruleType = ruleType;
|
||||
return this;
|
||||
}
|
||||
|
||||
public Builder ruleName(String ruleName) {
|
||||
this.ruleName = ruleName;
|
||||
return this;
|
||||
}
|
||||
|
||||
public Builder statisticsValue(double statisticsValue) {
|
||||
this.statisticsValue = statisticsValue;
|
||||
return this;
|
||||
}
|
||||
|
||||
public Builder comparisonValue(double comparisonValue) {
|
||||
this.comparisonValue = comparisonValue;
|
||||
return this;
|
||||
}
|
||||
|
||||
public Builder checkType(int checkType) {
|
||||
this.checkType = checkType;
|
||||
return this;
|
||||
}
|
||||
|
||||
public Builder threshold(double threshold) {
|
||||
this.threshold = threshold;
|
||||
return this;
|
||||
}
|
||||
|
||||
public Builder operator(int operator) {
|
||||
this.operator = operator;
|
||||
return this;
|
||||
}
|
||||
|
||||
public Builder failureStrategy(int failureStrategy) {
|
||||
this.failureStrategy = failureStrategy;
|
||||
return this;
|
||||
}
|
||||
|
||||
public Builder userId(int userId) {
|
||||
this.userId = userId;
|
||||
return this;
|
||||
}
|
||||
|
||||
public Builder userName(String userName) {
|
||||
this.userName = userName;
|
||||
return this;
|
||||
}
|
||||
|
||||
public Builder state(int state) {
|
||||
this.state = state;
|
||||
return this;
|
||||
}
|
||||
|
||||
public Builder errorDataPath(String errorDataPath) {
|
||||
this.errorDataPath = errorDataPath;
|
||||
return this;
|
||||
}
|
||||
|
||||
public DqExecuteResultAlertContent build() {
|
||||
return new DqExecuteResultAlertContent(this);
|
||||
}
|
||||
}
|
||||
}
|
@ -1,74 +0,0 @@
|
||||
/*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
* contributor license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright ownership.
|
||||
* The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
* (the "License"); you may not use this file except in compliance with
|
||||
* the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.apache.dolphinscheduler.dao.entity;
|
||||
|
||||
import java.io.Serializable;
|
||||
import java.util.Date;
|
||||
|
||||
import lombok.Data;
|
||||
|
||||
import com.baomidou.mybatisplus.annotation.IdType;
|
||||
import com.baomidou.mybatisplus.annotation.TableField;
|
||||
import com.baomidou.mybatisplus.annotation.TableId;
|
||||
import com.baomidou.mybatisplus.annotation.TableName;
|
||||
|
||||
@Data
|
||||
@TableName("t_ds_dq_rule")
|
||||
public class DqRule implements Serializable {
|
||||
|
||||
/**
|
||||
* primary key
|
||||
*/
|
||||
@TableId(value = "id", type = IdType.AUTO)
|
||||
private Integer id;
|
||||
/**
|
||||
* name
|
||||
*/
|
||||
@TableField(value = "name")
|
||||
private String name;
|
||||
/**
|
||||
* type
|
||||
*/
|
||||
@TableField(value = "type")
|
||||
private int type;
|
||||
/**
|
||||
* type
|
||||
*/
|
||||
@TableField(exist = false)
|
||||
private String ruleJson;
|
||||
/**
|
||||
* user_id
|
||||
*/
|
||||
@TableField(value = "user_id")
|
||||
private int userId;
|
||||
/**
|
||||
* user_name
|
||||
*/
|
||||
@TableField(exist = false)
|
||||
private String userName;
|
||||
/**
|
||||
* create_time
|
||||
*/
|
||||
@TableField(value = "create_time")
|
||||
private Date createTime;
|
||||
/**
|
||||
* update_time
|
||||
*/
|
||||
@TableField(value = "update_time")
|
||||
private Date updateTime;
|
||||
}
|
@ -1,76 +0,0 @@
|
||||
/*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
* contributor license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright ownership.
|
||||
* The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
* (the "License"); you may not use this file except in compliance with
|
||||
* the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.apache.dolphinscheduler.dao.entity;
|
||||
|
||||
import org.apache.dolphinscheduler.plugin.task.api.enums.dp.ExecuteSqlType;
|
||||
|
||||
import java.io.Serializable;
|
||||
import java.util.Date;
|
||||
|
||||
import lombok.Data;
|
||||
|
||||
import com.baomidou.mybatisplus.annotation.IdType;
|
||||
import com.baomidou.mybatisplus.annotation.TableField;
|
||||
import com.baomidou.mybatisplus.annotation.TableId;
|
||||
import com.baomidou.mybatisplus.annotation.TableName;
|
||||
|
||||
@Data
|
||||
@TableName("t_ds_dq_rule_execute_sql")
|
||||
public class DqRuleExecuteSql implements Serializable {
|
||||
|
||||
/**
|
||||
* primary key
|
||||
*/
|
||||
@TableId(value = "id", type = IdType.AUTO)
|
||||
private Integer id;
|
||||
/**
|
||||
* index,ensure the execution order of sql
|
||||
*/
|
||||
@TableField(value = "index")
|
||||
private int index;
|
||||
/**
|
||||
* SQL Statement
|
||||
*/
|
||||
@TableField(value = "sql")
|
||||
private String sql;
|
||||
/**
|
||||
* table alias name
|
||||
*/
|
||||
@TableField(value = "table_alias")
|
||||
private String tableAlias;
|
||||
/**
|
||||
* input entry type: default,statistics,comparison,check
|
||||
*/
|
||||
@TableField(value = "type")
|
||||
private int type = ExecuteSqlType.MIDDLE.getCode();
|
||||
/**
|
||||
* is error output sql
|
||||
*/
|
||||
@TableField(value = "is_error_output_sql")
|
||||
private boolean isErrorOutputSql;
|
||||
/**
|
||||
* create_time
|
||||
*/
|
||||
@TableField(value = "create_time")
|
||||
private Date createTime;
|
||||
/**
|
||||
* update_time
|
||||
*/
|
||||
@TableField(value = "update_time")
|
||||
private Date updateTime;
|
||||
}
|
@ -1,130 +0,0 @@
|
||||
/*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
* contributor license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright ownership.
|
||||
* The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
* (the "License"); you may not use this file except in compliance with
|
||||
* the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.apache.dolphinscheduler.dao.entity;
|
||||
|
||||
import org.apache.dolphinscheduler.plugin.task.api.enums.dp.DataType;
|
||||
import org.apache.dolphinscheduler.plugin.task.api.enums.dp.InputType;
|
||||
import org.apache.dolphinscheduler.plugin.task.api.enums.dp.OptionSourceType;
|
||||
|
||||
import java.io.Serializable;
|
||||
import java.util.Date;
|
||||
|
||||
import lombok.Data;
|
||||
|
||||
import com.baomidou.mybatisplus.annotation.IdType;
|
||||
import com.baomidou.mybatisplus.annotation.TableField;
|
||||
import com.baomidou.mybatisplus.annotation.TableId;
|
||||
import com.baomidou.mybatisplus.annotation.TableName;
|
||||
|
||||
@Data
|
||||
@TableName("t_ds_dq_rule_input_entry")
|
||||
public class DqRuleInputEntry implements Serializable {
|
||||
|
||||
/**
|
||||
* primary key
|
||||
*/
|
||||
@TableId(value = "id", type = IdType.AUTO)
|
||||
private Integer id;
|
||||
/**
|
||||
* form field name
|
||||
*/
|
||||
@TableField(value = "field")
|
||||
private String field;
|
||||
/**
|
||||
* form type
|
||||
*/
|
||||
@TableField(value = "type")
|
||||
private String type;
|
||||
/**
|
||||
* form title
|
||||
*/
|
||||
@TableField(value = "title")
|
||||
private String title;
|
||||
/**
|
||||
* default data,can be null
|
||||
*/
|
||||
@TableField(value = "data")
|
||||
private String data;
|
||||
/**
|
||||
* default options,can be null
|
||||
* [{label:"",value:""}]
|
||||
*/
|
||||
@TableField(value = "options")
|
||||
private String options;
|
||||
/**
|
||||
* ${field}
|
||||
*/
|
||||
@TableField(value = "placeholder")
|
||||
private String placeholder;
|
||||
/**
|
||||
* the source type of options,use default options or other
|
||||
*/
|
||||
@TableField(value = "option_source_type")
|
||||
private int optionSourceType = OptionSourceType.DEFAULT.getCode();
|
||||
/**
|
||||
* input entry type: string,array,number .etc
|
||||
*/
|
||||
@TableField(value = "data_type")
|
||||
private int dataType = DataType.NUMBER.getCode();
|
||||
/**
|
||||
* input entry type: default,statistics,comparison
|
||||
*/
|
||||
@TableField(value = "input_type")
|
||||
private int inputType = InputType.DEFAULT.getCode();
|
||||
/**
|
||||
* whether to display on the front end
|
||||
*/
|
||||
@TableField(value = "is_show")
|
||||
private Boolean isShow;
|
||||
/**
|
||||
* whether to edit on the front end
|
||||
*/
|
||||
@TableField(value = "can_edit")
|
||||
private Boolean canEdit;
|
||||
/**
|
||||
* is emit event
|
||||
*/
|
||||
@TableField(value = "is_emit")
|
||||
private Boolean isEmit;
|
||||
/**
|
||||
* is validate
|
||||
*/
|
||||
@TableField(value = "is_validate")
|
||||
private Boolean isValidate;
|
||||
/**
|
||||
* values map
|
||||
*/
|
||||
@TableField(exist = false)
|
||||
private String valuesMap;
|
||||
|
||||
/**
|
||||
* values map
|
||||
*/
|
||||
@TableField(exist = false)
|
||||
private Integer index;
|
||||
/**
|
||||
* create_time
|
||||
*/
|
||||
@TableField(value = "create_time")
|
||||
private Date createTime;
|
||||
/**
|
||||
* update_time
|
||||
*/
|
||||
@TableField(value = "update_time")
|
||||
private Date updateTime;
|
||||
}
|
@ -1,72 +0,0 @@
|
||||
/*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
* contributor license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright ownership.
|
||||
* The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
* (the "License"); you may not use this file except in compliance with
|
||||
* the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.apache.dolphinscheduler.dao.entity;
|
||||
|
||||
import java.io.Serializable;
|
||||
import java.util.Date;
|
||||
|
||||
import lombok.Data;
|
||||
|
||||
import com.baomidou.mybatisplus.annotation.IdType;
|
||||
import com.baomidou.mybatisplus.annotation.TableField;
|
||||
import com.baomidou.mybatisplus.annotation.TableId;
|
||||
import com.baomidou.mybatisplus.annotation.TableName;
|
||||
|
||||
@Data
|
||||
@TableName("t_ds_dq_task_statistics_value")
|
||||
public class DqTaskStatisticsValue implements Serializable {
|
||||
|
||||
@TableId(value = "id", type = IdType.AUTO)
|
||||
private Integer id;
|
||||
|
||||
@TableField(value = "workflow_definition_id")
|
||||
private long workflowDefinitionId;
|
||||
|
||||
@TableField(exist = false)
|
||||
private String workflowDefinitionName;
|
||||
|
||||
@TableField(value = "task_instance_id")
|
||||
private long taskInstanceId;
|
||||
|
||||
@TableField(exist = false)
|
||||
private String taskName;
|
||||
|
||||
@TableField(value = "rule_id")
|
||||
private long ruleId;
|
||||
|
||||
@TableField(exist = false)
|
||||
private int ruleType;
|
||||
|
||||
@TableField(exist = false)
|
||||
private String ruleName;
|
||||
|
||||
@TableField(value = "statistics_value")
|
||||
private double statisticsValue;
|
||||
|
||||
@TableField(value = "statistics_name")
|
||||
private String statisticsName;
|
||||
|
||||
@TableField(value = "data_time")
|
||||
private Date dataTime;
|
||||
|
||||
@TableField(value = "create_time")
|
||||
private Date createTime;
|
||||
|
||||
@TableField(value = "update_time")
|
||||
private Date updateTime;
|
||||
}
|
@ -1,29 +0,0 @@
|
||||
/*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
* contributor license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright ownership.
|
||||
* The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
* (the "License"); you may not use this file except in compliance with
|
||||
* the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.apache.dolphinscheduler.dao.mapper;
|
||||
|
||||
import org.apache.dolphinscheduler.dao.entity.DqComparisonType;
|
||||
|
||||
import com.baomidou.mybatisplus.core.mapper.BaseMapper;
|
||||
|
||||
/**
|
||||
* DqComparisonTypeMapper
|
||||
*/
|
||||
public interface DqComparisonTypeMapper extends BaseMapper<DqComparisonType> {
|
||||
|
||||
}
|
@ -1,62 +0,0 @@
|
||||
/*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
* contributor license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright ownership.
|
||||
* The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
* (the "License"); you may not use this file except in compliance with
|
||||
* the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.apache.dolphinscheduler.dao.mapper;
|
||||
|
||||
import org.apache.dolphinscheduler.dao.entity.DqExecuteResult;
|
||||
import org.apache.dolphinscheduler.dao.entity.User;
|
||||
|
||||
import org.apache.ibatis.annotations.Param;
|
||||
|
||||
import java.util.Date;
|
||||
|
||||
import com.baomidou.mybatisplus.core.mapper.BaseMapper;
|
||||
import com.baomidou.mybatisplus.core.metadata.IPage;
|
||||
|
||||
/**
|
||||
* DqExecuteResultMapper
|
||||
*/
|
||||
public interface DqExecuteResultMapper extends BaseMapper<DqExecuteResult> {
|
||||
|
||||
/**
|
||||
* data quality task execute result page
|
||||
*
|
||||
* @param page page
|
||||
* @param searchVal searchVal
|
||||
* @param user user
|
||||
* @param statusArray states
|
||||
* @param ruleType ruleType
|
||||
* @param startTime startTime
|
||||
* @return endTime endTime
|
||||
*/
|
||||
IPage<DqExecuteResult> queryResultListPaging(IPage<DqExecuteResult> page,
|
||||
@Param("searchVal") String searchVal,
|
||||
@Param("user") User user,
|
||||
@Param("states") int[] statusArray,
|
||||
@Param("ruleType") int ruleType,
|
||||
@Param("startTime") Date startTime,
|
||||
@Param("endTime") Date endTime);
|
||||
|
||||
/**
|
||||
* get execute result by id
|
||||
* @param taskInstanceId taskInstanceId
|
||||
* @return DqExecuteResult
|
||||
*/
|
||||
DqExecuteResult getExecuteResultById(@Param("taskInstanceId") int taskInstanceId);
|
||||
|
||||
void deleteByWorkflowInstanceId(@Param("workflowInstanceId") Integer workflowInstanceId);
|
||||
}
|
@ -1,39 +0,0 @@
|
||||
/*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
* contributor license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright ownership.
|
||||
* The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
* (the "License"); you may not use this file except in compliance with
|
||||
* the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.apache.dolphinscheduler.dao.mapper;
|
||||
|
||||
import org.apache.dolphinscheduler.dao.entity.DqRuleExecuteSql;
|
||||
|
||||
import org.apache.ibatis.annotations.Param;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
import com.baomidou.mybatisplus.core.mapper.BaseMapper;
|
||||
|
||||
/**
|
||||
* DqRuleExecuteSqlMapper
|
||||
*/
|
||||
public interface DqRuleExecuteSqlMapper extends BaseMapper<DqRuleExecuteSql> {
|
||||
|
||||
/**
|
||||
* get execute sql list by rule id
|
||||
*
|
||||
* @param ruleId Integer
|
||||
*/
|
||||
List<DqRuleExecuteSql> getExecuteSqlList(@Param("ruleId") Integer ruleId);
|
||||
}
|
@ -1,39 +0,0 @@
|
||||
/*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
* contributor license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright ownership.
|
||||
* The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
* (the "License"); you may not use this file except in compliance with
|
||||
* the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.apache.dolphinscheduler.dao.mapper;
|
||||
|
||||
import org.apache.dolphinscheduler.dao.entity.DqRuleInputEntry;
|
||||
|
||||
import org.apache.ibatis.annotations.Param;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
import com.baomidou.mybatisplus.core.mapper.BaseMapper;
|
||||
|
||||
/**
|
||||
* DqRuleInputEntryMapper
|
||||
*/
|
||||
public interface DqRuleInputEntryMapper extends BaseMapper<DqRuleInputEntry> {
|
||||
|
||||
/**
|
||||
* get rule input entry list by rule id
|
||||
*
|
||||
* @param ruleId Integer
|
||||
*/
|
||||
List<DqRuleInputEntry> getRuleInputEntryList(@Param("ruleId") Integer ruleId);
|
||||
}
|
@ -1,48 +0,0 @@
|
||||
/*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
* contributor license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright ownership.
|
||||
* The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
* (the "License"); you may not use this file except in compliance with
|
||||
* the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.apache.dolphinscheduler.dao.mapper;
|
||||
|
||||
import org.apache.dolphinscheduler.dao.entity.DqRule;
|
||||
|
||||
import org.apache.ibatis.annotations.Param;
|
||||
|
||||
import java.util.Date;
|
||||
|
||||
import com.baomidou.mybatisplus.core.mapper.BaseMapper;
|
||||
import com.baomidou.mybatisplus.core.metadata.IPage;
|
||||
|
||||
/**
|
||||
* DqRuleMapper
|
||||
*/
|
||||
public interface DqRuleMapper extends BaseMapper<DqRule> {
|
||||
|
||||
/**
|
||||
* data quality rule page
|
||||
*
|
||||
* @param page page
|
||||
* @param searchVal searchVal
|
||||
* @param ruleType ruleType
|
||||
* @param startTime startTime
|
||||
* @return endTime endTime
|
||||
*/
|
||||
IPage<DqRule> queryRuleListPaging(IPage<DqRule> page,
|
||||
@Param("searchVal") String searchVal,
|
||||
@Param("ruleType") int ruleType,
|
||||
@Param("startTime") Date startTime,
|
||||
@Param("endTime") Date endTime);
|
||||
}
|
@ -1,29 +0,0 @@
|
||||
/*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
* contributor license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright ownership.
|
||||
* The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
* (the "License"); you may not use this file except in compliance with
|
||||
* the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.apache.dolphinscheduler.dao.mapper;
|
||||
|
||||
import org.apache.dolphinscheduler.dao.entity.DqTaskStatisticsValue;
|
||||
|
||||
import com.baomidou.mybatisplus.core.mapper.BaseMapper;
|
||||
|
||||
/**
|
||||
* DqTaskStatisticsValueMapper
|
||||
*/
|
||||
public interface DqTaskStatisticsValueMapper extends BaseMapper<DqTaskStatisticsValue> {
|
||||
|
||||
}
|
@ -1,25 +0,0 @@
|
||||
/*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
* contributor license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright ownership.
|
||||
* The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
* (the "License"); you may not use this file except in compliance with
|
||||
* the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.apache.dolphinscheduler.dao.repository;
|
||||
|
||||
import org.apache.dolphinscheduler.dao.entity.DqExecuteResult;
|
||||
|
||||
public interface DqExecuteResultDao extends IDao<DqExecuteResult> {
|
||||
|
||||
void deleteByWorkflowInstanceId(Integer workflowInstanceId);
|
||||
}
|
@ -1,42 +0,0 @@
|
||||
/*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
* contributor license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright ownership.
|
||||
* The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
* (the "License"); you may not use this file except in compliance with
|
||||
* the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.apache.dolphinscheduler.dao.repository.impl;
|
||||
|
||||
import org.apache.dolphinscheduler.dao.entity.DqExecuteResult;
|
||||
import org.apache.dolphinscheduler.dao.mapper.DqExecuteResultMapper;
|
||||
import org.apache.dolphinscheduler.dao.repository.BaseDao;
|
||||
import org.apache.dolphinscheduler.dao.repository.DqExecuteResultDao;
|
||||
|
||||
import lombok.NonNull;
|
||||
|
||||
import org.springframework.stereotype.Repository;
|
||||
|
||||
@Repository
|
||||
public class DqExecuteResultDaoImpl extends BaseDao<DqExecuteResult, DqExecuteResultMapper>
|
||||
implements
|
||||
DqExecuteResultDao {
|
||||
|
||||
public DqExecuteResultDaoImpl(@NonNull DqExecuteResultMapper dqExecuteResultMapper) {
|
||||
super(dqExecuteResultMapper);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void deleteByWorkflowInstanceId(Integer workflowInstanceId) {
|
||||
mybatisMapper.deleteByWorkflowInstanceId(workflowInstanceId);
|
||||
}
|
||||
}
|
@ -1,58 +0,0 @@
|
||||
/*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
* contributor license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright ownership.
|
||||
* The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
* (the "License"); you may not use this file except in compliance with
|
||||
* the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.apache.dolphinscheduler.dao.utils;
|
||||
|
||||
import org.apache.dolphinscheduler.common.utils.JSONUtils;
|
||||
import org.apache.dolphinscheduler.dao.entity.DqRuleInputEntry;
|
||||
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
/**
|
||||
* DqRuleUtils
|
||||
*/
|
||||
public class DqRuleUtils {
|
||||
|
||||
private DqRuleUtils() {
|
||||
throw new IllegalStateException("Utility class");
|
||||
}
|
||||
|
||||
public static List<DqRuleInputEntry> transformInputEntry(List<DqRuleInputEntry> ruleInputEntryList) {
|
||||
for (DqRuleInputEntry dqRuleInputEntry : ruleInputEntryList) {
|
||||
Map<String, Object> valuesMap =
|
||||
JSONUtils.toMap(dqRuleInputEntry.getValuesMap(), String.class, Object.class);
|
||||
if (valuesMap != null) {
|
||||
|
||||
if (valuesMap.get(dqRuleInputEntry.getField()) != null) {
|
||||
String value = String.valueOf(valuesMap.get(dqRuleInputEntry.getField()));
|
||||
dqRuleInputEntry.setData(value);
|
||||
}
|
||||
|
||||
if (valuesMap.get("is_show") != null) {
|
||||
dqRuleInputEntry.setIsShow(Boolean.parseBoolean(String.valueOf(valuesMap.get("is_show"))));
|
||||
}
|
||||
|
||||
if (valuesMap.get("can_edit") != null) {
|
||||
dqRuleInputEntry.setCanEdit(Boolean.parseBoolean(String.valueOf(valuesMap.get("can_edit"))));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return ruleInputEntryList;
|
||||
}
|
||||
}
|
@ -1,22 +0,0 @@
|
||||
<?xml version="1.0" encoding="UTF-8" ?>
|
||||
<!--
|
||||
~ Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
~ contributor license agreements. See the NOTICE file distributed with
|
||||
~ this work for additional information regarding copyright ownership.
|
||||
~ The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
~ (the "License"); you may not use this file except in compliance with
|
||||
~ the License. You may obtain a copy of the License at
|
||||
~
|
||||
~ http://www.apache.org/licenses/LICENSE-2.0
|
||||
~
|
||||
~ Unless required by applicable law or agreed to in writing, software
|
||||
~ distributed under the License is distributed on an "AS IS" BASIS,
|
||||
~ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
~ See the License for the specific language governing permissions and
|
||||
~ limitations under the License.
|
||||
-->
|
||||
|
||||
<!DOCTYPE mapper PUBLIC "-//mybatis.org//DTD Mapper 3.0//EN" "http://mybatis.org/dtd/mybatis-3-mapper.dtd" >
|
||||
<mapper namespace="org.apache.dolphinscheduler.dao.mapper.DqComparisonTypeMapper">
|
||||
|
||||
</mapper>
|
@ -1,111 +0,0 @@
|
||||
<?xml version="1.0" encoding="UTF-8" ?>
|
||||
<!--
|
||||
~ Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
~ contributor license agreements. See the NOTICE file distributed with
|
||||
~ this work for additional information regarding copyright ownership.
|
||||
~ The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
~ (the "License"); you may not use this file except in compliance with
|
||||
~ the License. You may obtain a copy of the License at
|
||||
~
|
||||
~ http://www.apache.org/licenses/LICENSE-2.0
|
||||
~
|
||||
~ Unless required by applicable law or agreed to in writing, software
|
||||
~ distributed under the License is distributed on an "AS IS" BASIS,
|
||||
~ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
~ See the License for the specific language governing permissions and
|
||||
~ limitations under the License.
|
||||
-->
|
||||
|
||||
<!DOCTYPE mapper PUBLIC "-//mybatis.org//DTD Mapper 3.0//EN" "http://mybatis.org/dtd/mybatis-3-mapper.dtd" >
|
||||
<mapper namespace="org.apache.dolphinscheduler.dao.mapper.DqExecuteResultMapper">
|
||||
<select id="queryResultListPaging" resultType="org.apache.dolphinscheduler.dao.entity.DqExecuteResult">
|
||||
SELECT a.id,
|
||||
a.workflow_definition_id,
|
||||
b.name as workflow_definition_name,
|
||||
b.code as workflow_definition_code,
|
||||
a.workflow_instance_id,
|
||||
e.name as workflow_instance_name,
|
||||
b.project_code,
|
||||
a.task_instance_id,
|
||||
c.name as task_name,
|
||||
a.rule_type,
|
||||
a.rule_name,
|
||||
a.statistics_value,
|
||||
a.comparison_value,
|
||||
a.check_type,
|
||||
a.threshold,
|
||||
cp.type as comparison_type_name,
|
||||
a.operator,
|
||||
a.failure_strategy,
|
||||
a.state,
|
||||
a.user_id,
|
||||
d.user_name,
|
||||
a.error_output_path,
|
||||
a.create_time,
|
||||
a.update_time
|
||||
FROM t_ds_dq_execute_result a
|
||||
left join t_ds_workflow_definition b on a.workflow_definition_id = b.id
|
||||
left join t_ds_task_instance c on a.task_instance_id = c.id
|
||||
left join t_ds_workflow_instance e on a.workflow_instance_id = e.id
|
||||
left join t_ds_user d on d.id = a.user_id
|
||||
left join t_ds_dq_comparison_type cp on cp.id = a.comparison_type
|
||||
<where>
|
||||
<if test=" searchVal != null and searchVal != ''">
|
||||
and c.name like concat('%', #{searchVal}, '%')
|
||||
</if>
|
||||
<if test="startTime != null ">
|
||||
and a.update_time > #{startTime} and a.update_time <![CDATA[ <=]]> #{endTime}
|
||||
</if>
|
||||
<if test="states != null and states != ''">
|
||||
and a.state in
|
||||
<foreach collection="states" index="index" item="i" open="(" separator="," close=")">
|
||||
#{i}
|
||||
</foreach>
|
||||
</if>
|
||||
<if test=" user.userType.code != 0">
|
||||
and a.user_id = #{user.id}
|
||||
</if>
|
||||
<if test=" ruleType != -1">
|
||||
and a.rule_type = #{ruleType}
|
||||
</if>
|
||||
</where>
|
||||
order by a.update_time desc
|
||||
</select>
|
||||
|
||||
<select id="getExecuteResultById" resultType="org.apache.dolphinscheduler.dao.entity.DqExecuteResult">
|
||||
SELECT a.id,
|
||||
a.workflow_definition_id,
|
||||
a.workflow_instance_id,
|
||||
a.task_instance_id,
|
||||
a.rule_type,
|
||||
a.rule_name,
|
||||
a.statistics_value,
|
||||
a.comparison_value,
|
||||
a.check_type,
|
||||
a.threshold,
|
||||
a.operator,
|
||||
a.failure_strategy,
|
||||
a.state,
|
||||
a.user_id,
|
||||
a.comparison_type,
|
||||
a.error_output_path,
|
||||
b.name as workflow_definition_name,
|
||||
e.name as workflow_instance_name,
|
||||
c.name as task_name,
|
||||
cp.type as comparison_type_name,
|
||||
d.user_name
|
||||
FROM t_ds_dq_execute_result a
|
||||
left join t_ds_workflow_definition b on a.workflow_definition_id = b.id
|
||||
left join t_ds_task_instance c on a.task_instance_id = c.id
|
||||
left join t_ds_workflow_instance e on a.workflow_instance_id = e.id
|
||||
left join t_ds_user d on d.id = a.user_id
|
||||
left join t_ds_dq_comparison_type cp on cp.id = a.comparison_type
|
||||
where task_instance_id = #{taskInstanceId}
|
||||
</select>
|
||||
|
||||
<delete id="deleteByWorkflowInstanceId">
|
||||
delete
|
||||
from t_ds_dq_execute_result
|
||||
where workflow_instance_id = #{workflowInstanceId}
|
||||
</delete>
|
||||
</mapper>
|
@ -1,27 +0,0 @@
|
||||
<?xml version="1.0" encoding="UTF-8" ?>
|
||||
<!--
|
||||
~ Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
~ contributor license agreements. See the NOTICE file distributed with
|
||||
~ this work for additional information regarding copyright ownership.
|
||||
~ The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
~ (the "License"); you may not use this file except in compliance with
|
||||
~ the License. You may obtain a copy of the License at
|
||||
~
|
||||
~ http://www.apache.org/licenses/LICENSE-2.0
|
||||
~
|
||||
~ Unless required by applicable law or agreed to in writing, software
|
||||
~ distributed under the License is distributed on an "AS IS" BASIS,
|
||||
~ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
~ See the License for the specific language governing permissions and
|
||||
~ limitations under the License.
|
||||
-->
|
||||
|
||||
<!DOCTYPE mapper PUBLIC "-//mybatis.org//DTD Mapper 3.0//EN" "http://mybatis.org/dtd/mybatis-3-mapper.dtd" >
|
||||
<mapper namespace="org.apache.dolphinscheduler.dao.mapper.DqRuleExecuteSqlMapper">
|
||||
|
||||
<select id="getExecuteSqlList" resultType="org.apache.dolphinscheduler.dao.entity.DqRuleExecuteSql">
|
||||
SELECT * FROM t_ds_dq_rule_execute_sql a join ( SELECT *
|
||||
FROM t_ds_relation_rule_execute_sql where rule_id = #{ruleId}) b
|
||||
on a.id = b.execute_sql_id
|
||||
</select>
|
||||
</mapper>
|
@ -1,43 +0,0 @@
|
||||
<?xml version="1.0" encoding="UTF-8" ?>
|
||||
<!--
|
||||
~ Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
~ contributor license agreements. See the NOTICE file distributed with
|
||||
~ this work for additional information regarding copyright ownership.
|
||||
~ The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
~ (the "License"); you may not use this file except in compliance with
|
||||
~ the License. You may obtain a copy of the License at
|
||||
~
|
||||
~ http://www.apache.org/licenses/LICENSE-2.0
|
||||
~
|
||||
~ Unless required by applicable law or agreed to in writing, software
|
||||
~ distributed under the License is distributed on an "AS IS" BASIS,
|
||||
~ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
~ See the License for the specific language governing permissions and
|
||||
~ limitations under the License.
|
||||
-->
|
||||
|
||||
<!DOCTYPE mapper PUBLIC "-//mybatis.org//DTD Mapper 3.0//EN" "http://mybatis.org/dtd/mybatis-3-mapper.dtd" >
|
||||
<mapper namespace="org.apache.dolphinscheduler.dao.mapper.DqRuleInputEntryMapper">
|
||||
|
||||
<select id="getRuleInputEntryList" resultType="org.apache.dolphinscheduler.dao.entity.DqRuleInputEntry">
|
||||
SELECT a.id,
|
||||
a.field,
|
||||
a.type,
|
||||
a.title,
|
||||
a.data,
|
||||
a.options,
|
||||
a.placeholder,
|
||||
a.option_source_type,
|
||||
a.data_type,
|
||||
a.input_type,
|
||||
a.is_show,
|
||||
a.can_edit,
|
||||
a.is_emit,
|
||||
a.is_validate,
|
||||
b.values_map,
|
||||
b.index
|
||||
FROM t_ds_dq_rule_input_entry a join ( SELECT *
|
||||
FROM t_ds_relation_rule_input_entry where rule_id = #{ruleId} ) b
|
||||
on a.id = b.rule_input_entry_id order by b.index
|
||||
</select>
|
||||
</mapper>
|
@ -1,37 +0,0 @@
|
||||
<?xml version="1.0" encoding="UTF-8" ?>
|
||||
<!--
|
||||
~ Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
~ contributor license agreements. See the NOTICE file distributed with
|
||||
~ this work for additional information regarding copyright ownership.
|
||||
~ The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
~ (the "License"); you may not use this file except in compliance with
|
||||
~ the License. You may obtain a copy of the License at
|
||||
~
|
||||
~ http://www.apache.org/licenses/LICENSE-2.0
|
||||
~
|
||||
~ Unless required by applicable law or agreed to in writing, software
|
||||
~ distributed under the License is distributed on an "AS IS" BASIS,
|
||||
~ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
~ See the License for the specific language governing permissions and
|
||||
~ limitations under the License.
|
||||
-->
|
||||
|
||||
<!DOCTYPE mapper PUBLIC "-//mybatis.org//DTD Mapper 3.0//EN" "http://mybatis.org/dtd/mybatis-3-mapper.dtd" >
|
||||
<mapper namespace="org.apache.dolphinscheduler.dao.mapper.DqRuleMapper">
|
||||
<select id="queryRuleListPaging" resultType="org.apache.dolphinscheduler.dao.entity.DqRule">
|
||||
SELECT a.id, a.name, a.type, b.user_name, a.create_time, a.update_time
|
||||
FROM t_ds_dq_rule a left join t_ds_user b on a.user_id = b.id
|
||||
<where>
|
||||
<if test=" searchVal != null and searchVal != ''">
|
||||
and a.name like concat('%', #{searchVal}, '%')
|
||||
</if>
|
||||
<if test="startTime != null ">
|
||||
and a.update_time > #{startTime} and a.update_time <![CDATA[ <=]]> #{endTime}
|
||||
</if>
|
||||
<if test=" ruleType != -1">
|
||||
and a.type = #{ruleType}
|
||||
</if>
|
||||
</where>
|
||||
order by a.update_time desc
|
||||
</select>
|
||||
</mapper>
|
@ -1,22 +0,0 @@
|
||||
<?xml version="1.0" encoding="UTF-8" ?>
|
||||
<!--
|
||||
~ Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
~ contributor license agreements. See the NOTICE file distributed with
|
||||
~ this work for additional information regarding copyright ownership.
|
||||
~ The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
~ (the "License"); you may not use this file except in compliance with
|
||||
~ the License. You may obtain a copy of the License at
|
||||
~
|
||||
~ http://www.apache.org/licenses/LICENSE-2.0
|
||||
~
|
||||
~ Unless required by applicable law or agreed to in writing, software
|
||||
~ distributed under the License is distributed on an "AS IS" BASIS,
|
||||
~ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
~ See the License for the specific language governing permissions and
|
||||
~ limitations under the License.
|
||||
-->
|
||||
|
||||
<!DOCTYPE mapper PUBLIC "-//mybatis.org//DTD Mapper 3.0//EN" "http://mybatis.org/dtd/mybatis-3-mapper.dtd" >
|
||||
<mapper namespace="org.apache.dolphinscheduler.dao.mapper.DqTaskStatisticsValueMapper">
|
||||
|
||||
</mapper>
|
@ -1103,825 +1103,6 @@ CREATE TABLE t_ds_alert_plugin_instance
|
||||
PRIMARY KEY (id)
|
||||
);
|
||||
|
||||
--
|
||||
-- Table structure for table `t_ds_dq_comparison_type`
|
||||
--
|
||||
DROP TABLE IF EXISTS `t_ds_dq_comparison_type`;
|
||||
CREATE TABLE `t_ds_dq_comparison_type` (
|
||||
`id` int(11) NOT NULL AUTO_INCREMENT,
|
||||
`type` varchar(255) NOT NULL,
|
||||
`execute_sql` text DEFAULT NULL,
|
||||
`output_table` varchar(100) DEFAULT NULL,
|
||||
`name` varchar(255) DEFAULT NULL,
|
||||
`create_time` datetime DEFAULT NULL,
|
||||
`update_time` datetime DEFAULT NULL,
|
||||
`is_inner_source` tinyint(1) DEFAULT '0',
|
||||
PRIMARY KEY (`id`)
|
||||
)ENGINE=InnoDB DEFAULT CHARSET=utf8;
|
||||
|
||||
INSERT INTO `t_ds_dq_comparison_type`
|
||||
(`id`, `type`, `execute_sql`, `output_table`, `name`, `create_time`, `update_time`, `is_inner_source`)
|
||||
VALUES(1, 'FixValue', NULL, NULL, NULL, '2021-06-30 00:00:00.000', '2021-06-30 00:00:00.000', false);
|
||||
INSERT INTO `t_ds_dq_comparison_type`
|
||||
(`id`, `type`, `execute_sql`, `output_table`, `name`, `create_time`, `update_time`, `is_inner_source`)
|
||||
VALUES(2, 'DailyAvg', 'select round(avg(statistics_value),2) as day_avg from t_ds_dq_task_statistics_value where data_time >=date_trunc(''DAY'', ${data_time}) and data_time < date_add(date_trunc(''day'', ${data_time}),1) and unique_code = ${unique_code} and statistics_name = ''${statistics_name}''', 'day_range', 'day_range.day_avg', '2021-06-30 00:00:00.000', '2021-06-30 00:00:00.000', true);
|
||||
INSERT INTO `t_ds_dq_comparison_type`
|
||||
(`id`, `type`, `execute_sql`, `output_table`, `name`, `create_time`, `update_time`, `is_inner_source`)
|
||||
VALUES(3, 'WeeklyAvg', 'select round(avg(statistics_value),2) as week_avg from t_ds_dq_task_statistics_value where data_time >= date_trunc(''WEEK'', ${data_time}) and data_time <date_trunc(''day'', ${data_time}) and unique_code = ${unique_code} and statistics_name = ''${statistics_name}''', 'week_range', 'week_range.week_avg', '2021-06-30 00:00:00.000', '2021-06-30 00:00:00.000', true);
|
||||
INSERT INTO `t_ds_dq_comparison_type`
|
||||
(`id`, `type`, `execute_sql`, `output_table`, `name`, `create_time`, `update_time`, `is_inner_source`)
|
||||
VALUES(4, 'MonthlyAvg', 'select round(avg(statistics_value),2) as month_avg from t_ds_dq_task_statistics_value where data_time >= date_trunc(''MONTH'', ${data_time}) and data_time <date_trunc(''day'', ${data_time}) and unique_code = ${unique_code} and statistics_name = ''${statistics_name}''', 'month_range', 'month_range.month_avg', '2021-06-30 00:00:00.000', '2021-06-30 00:00:00.000', true);
|
||||
INSERT INTO `t_ds_dq_comparison_type`
|
||||
(`id`, `type`, `execute_sql`, `output_table`, `name`, `create_time`, `update_time`, `is_inner_source`)
|
||||
VALUES(5, 'Last7DayAvg', 'select round(avg(statistics_value),2) as last_7_avg from t_ds_dq_task_statistics_value where data_time >= date_add(date_trunc(''day'', ${data_time}),-7) and data_time <date_trunc(''day'', ${data_time}) and unique_code = ${unique_code} and statistics_name = ''${statistics_name}''', 'last_seven_days', 'last_seven_days.last_7_avg', '2021-06-30 00:00:00.000', '2021-06-30 00:00:00.000', true);
|
||||
INSERT INTO `t_ds_dq_comparison_type`
|
||||
(`id`, `type`, `execute_sql`, `output_table`, `name`, `create_time`, `update_time`, `is_inner_source`)
|
||||
VALUES(6, 'Last30DayAvg', 'select round(avg(statistics_value),2) as last_30_avg from t_ds_dq_task_statistics_value where data_time >= date_add(date_trunc(''day'', ${data_time}),-30) and data_time < date_trunc(''day'', ${data_time}) and unique_code = ${unique_code} and statistics_name = ''${statistics_name}''', 'last_thirty_days', 'last_thirty_days.last_30_avg', '2021-06-30 00:00:00.000', '2021-06-30 00:00:00.000', true);
|
||||
INSERT INTO `t_ds_dq_comparison_type`
|
||||
(`id`, `type`, `execute_sql`, `output_table`, `name`, `create_time`, `update_time`, `is_inner_source`)
|
||||
VALUES(7, 'SrcTableTotalRows', 'SELECT COUNT(*) AS total FROM ${src_table} WHERE (${src_filter})', 'total_count', 'total_count.total', '2021-06-30 00:00:00.000', '2021-06-30 00:00:00.000', false);
|
||||
INSERT INTO `t_ds_dq_comparison_type`
|
||||
(`id`, `type`, `execute_sql`, `output_table`, `name`, `create_time`, `update_time`, `is_inner_source`)
|
||||
VALUES(8, 'TargetTableTotalRows', 'SELECT COUNT(*) AS total FROM ${target_table} WHERE (${target_filter})', 'total_count', 'total_count.total', '2021-06-30 00:00:00.000', '2021-06-30 00:00:00.000', false);
|
||||
|
||||
--
|
||||
-- Table structure for table `t_ds_dq_execute_result`
|
||||
--
|
||||
DROP TABLE IF EXISTS `t_ds_dq_execute_result`;
|
||||
CREATE TABLE `t_ds_dq_execute_result` (
|
||||
`id` int(11) NOT NULL AUTO_INCREMENT,
|
||||
`workflow_definition_id` int(11) DEFAULT NULL,
|
||||
`workflow_instance_id` int(11) DEFAULT NULL,
|
||||
`task_instance_id` int(11) DEFAULT NULL,
|
||||
`rule_type` int(11) DEFAULT NULL,
|
||||
`rule_name` varchar(255) DEFAULT NULL,
|
||||
`statistics_value` double DEFAULT NULL,
|
||||
`comparison_value` double DEFAULT NULL,
|
||||
`check_type` int(11) DEFAULT NULL,
|
||||
`threshold` double DEFAULT NULL,
|
||||
`operator` int(11) DEFAULT NULL,
|
||||
`failure_strategy` int(11) DEFAULT NULL,
|
||||
`state` int(11) DEFAULT NULL,
|
||||
`user_id` int(11) DEFAULT NULL,
|
||||
`comparison_type` int(11) DEFAULT NULL,
|
||||
`error_output_path` text DEFAULT NULL,
|
||||
`create_time` datetime DEFAULT NULL,
|
||||
`update_time` datetime DEFAULT NULL,
|
||||
PRIMARY KEY (`id`)
|
||||
) ENGINE=InnoDB DEFAULT CHARSET=utf8;
|
||||
|
||||
--
|
||||
-- Table structure for table t_ds_dq_rule
|
||||
--
|
||||
DROP TABLE IF EXISTS `t_ds_dq_rule`;
|
||||
CREATE TABLE `t_ds_dq_rule` (
|
||||
`id` int(11) NOT NULL AUTO_INCREMENT,
|
||||
`name` varchar(255) DEFAULT NULL,
|
||||
`type` int(11) DEFAULT NULL,
|
||||
`user_id` int(11) DEFAULT NULL,
|
||||
`create_time` datetime DEFAULT NULL,
|
||||
`update_time` datetime DEFAULT NULL,
|
||||
PRIMARY KEY (`id`)
|
||||
) ENGINE=InnoDB DEFAULT CHARSET=utf8;
|
||||
|
||||
INSERT INTO `t_ds_dq_rule`
|
||||
(`id`, `name`, `type`, `user_id`, `create_time`, `update_time`)
|
||||
VALUES(1, '$t(null_check)', 0, 1, '2020-01-12 00:00:00.000', '2020-01-12 00:00:00.000');
|
||||
INSERT INTO `t_ds_dq_rule`
|
||||
(`id`, `name`, `type`, `user_id`, `create_time`, `update_time`)
|
||||
VALUES(2, '$t(custom_sql)', 1, 1, '2020-01-12 00:00:00.000', '2020-01-12 00:00:00.000');
|
||||
INSERT INTO `t_ds_dq_rule`
|
||||
(`id`, `name`, `type`, `user_id`, `create_time`, `update_time`)
|
||||
VALUES(3, '$t(multi_table_accuracy)', 2, 1, '2020-01-12 00:00:00.000', '2020-01-12 00:00:00.000');
|
||||
INSERT INTO `t_ds_dq_rule`
|
||||
(`id`, `name`, `type`, `user_id`, `create_time`, `update_time`)
|
||||
VALUES(4, '$t(multi_table_value_comparison)', 3, 1, '2020-01-12 00:00:00.000', '2020-01-12 00:00:00.000');
|
||||
INSERT INTO `t_ds_dq_rule`
|
||||
(`id`, `name`, `type`, `user_id`, `create_time`, `update_time`)
|
||||
VALUES(5, '$t(field_length_check)', 0, 1, '2020-01-12 00:00:00.000', '2020-01-12 00:00:00.000');
|
||||
INSERT INTO `t_ds_dq_rule`
|
||||
(`id`, `name`, `type`, `user_id`, `create_time`, `update_time`)
|
||||
VALUES(6, '$t(uniqueness_check)', 0, 1, '2020-01-12 00:00:00.000', '2020-01-12 00:00:00.000');
|
||||
INSERT INTO `t_ds_dq_rule`
|
||||
(`id`, `name`, `type`, `user_id`, `create_time`, `update_time`)
|
||||
VALUES(7, '$t(regexp_check)', 0, 1, '2020-01-12 00:00:00.000', '2020-01-12 00:00:00.000');
|
||||
INSERT INTO `t_ds_dq_rule`
|
||||
(`id`, `name`, `type`, `user_id`, `create_time`, `update_time`)
|
||||
VALUES(8, '$t(timeliness_check)', 0, 1, '2020-01-12 00:00:00.000', '2020-01-12 00:00:00.000');
|
||||
INSERT INTO `t_ds_dq_rule`
|
||||
(`id`, `name`, `type`, `user_id`, `create_time`, `update_time`)
|
||||
VALUES(9, '$t(enumeration_check)', 0, 1, '2020-01-12 00:00:00.000', '2020-01-12 00:00:00.000');
|
||||
INSERT INTO `t_ds_dq_rule`
|
||||
(`id`, `name`, `type`, `user_id`, `create_time`, `update_time`)
|
||||
VALUES(10, '$t(table_count_check)', 0, 1, '2020-01-12 00:00:00.000', '2020-01-12 00:00:00.000');
|
||||
|
||||
--
|
||||
-- Table structure for table `t_ds_dq_rule_execute_sql`
|
||||
--
|
||||
DROP TABLE IF EXISTS `t_ds_dq_rule_execute_sql`;
|
||||
CREATE TABLE `t_ds_dq_rule_execute_sql` (
|
||||
`id` int(11) NOT NULL AUTO_INCREMENT,
|
||||
`index` int(11) DEFAULT NULL,
|
||||
`sql` text DEFAULT NULL,
|
||||
`table_alias` varchar(255) DEFAULT NULL,
|
||||
`type` int(11) DEFAULT NULL,
|
||||
`is_error_output_sql` tinyint(1) DEFAULT '0',
|
||||
`create_time` datetime DEFAULT NULL,
|
||||
`update_time` datetime DEFAULT NULL,
|
||||
PRIMARY KEY (`id`)
|
||||
) ENGINE=InnoDB DEFAULT CHARSET=utf8;
|
||||
|
||||
INSERT INTO `t_ds_dq_rule_execute_sql`
|
||||
(`id`, `index`, `sql`, `table_alias`, `type`, `is_error_output_sql`, `create_time`, `update_time`)
|
||||
VALUES(1, 1, 'SELECT COUNT(*) AS nulls FROM null_items', 'null_count', 1, false, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO `t_ds_dq_rule_execute_sql`
|
||||
(`id`, `index`, `sql`, `table_alias`, `type`, `is_error_output_sql`, `create_time`, `update_time`)
|
||||
VALUES(2, 1, 'SELECT COUNT(*) AS total FROM ${src_table} WHERE (${src_filter})', 'total_count', 2, false, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO `t_ds_dq_rule_execute_sql`
|
||||
(`id`, `index`, `sql`, `table_alias`, `type`, `is_error_output_sql`, `create_time`, `update_time`)
|
||||
VALUES(3, 1, 'SELECT COUNT(*) AS miss from miss_items', 'miss_count', 1, false, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO `t_ds_dq_rule_execute_sql`
|
||||
(`id`, `index`, `sql`, `table_alias`, `type`, `is_error_output_sql`, `create_time`, `update_time`)
|
||||
VALUES(4, 1, 'SELECT COUNT(*) AS valids FROM invalid_length_items', 'invalid_length_count', 1, false, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO `t_ds_dq_rule_execute_sql`
|
||||
(`id`, `index`, `sql`, `table_alias`, `type`, `is_error_output_sql`, `create_time`, `update_time`)
|
||||
VALUES(5, 1, 'SELECT COUNT(*) AS total FROM ${target_table} WHERE (${target_filter})', 'total_count', 2, false, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO `t_ds_dq_rule_execute_sql`
|
||||
(`id`, `index`, `sql`, `table_alias`, `type`, `is_error_output_sql`, `create_time`, `update_time`)
|
||||
VALUES(6, 1, 'SELECT ${src_field} FROM ${src_table} group by ${src_field} having count(*) > 1', 'duplicate_items', 0, true, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO `t_ds_dq_rule_execute_sql`
|
||||
(`id`, `index`, `sql`, `table_alias`, `type`, `is_error_output_sql`, `create_time`, `update_time`)
|
||||
VALUES(7, 1, 'SELECT COUNT(*) AS duplicates FROM duplicate_items', 'duplicate_count', 1, false, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO `t_ds_dq_rule_execute_sql`
|
||||
(`id`, `index`, `sql`, `table_alias`, `type`, `is_error_output_sql`, `create_time`, `update_time`)
|
||||
VALUES(8, 1, 'SELECT ${src_table}.* FROM (SELECT * FROM ${src_table} WHERE (${src_filter})) ${src_table} LEFT JOIN (SELECT * FROM ${target_table} WHERE (${target_filter})) ${target_table} ON ${on_clause} WHERE ${where_clause}', 'miss_items', 0, true, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO `t_ds_dq_rule_execute_sql`
|
||||
(`id`, `index`, `sql`, `table_alias`, `type`, `is_error_output_sql`, `create_time`, `update_time`)
|
||||
VALUES(9, 1, 'SELECT * FROM ${src_table} WHERE (${src_field} not regexp ''${regexp_pattern}'') AND (${src_filter}) ', 'regexp_items', 0, true, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO `t_ds_dq_rule_execute_sql`
|
||||
(`id`, `index`, `sql`, `table_alias`, `type`, `is_error_output_sql`, `create_time`, `update_time`)
|
||||
VALUES(10, 1, 'SELECT COUNT(*) AS regexps FROM regexp_items', 'regexp_count', 1, false, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO `t_ds_dq_rule_execute_sql`
|
||||
(`id`, `index`, `sql`, `table_alias`, `type`, `is_error_output_sql`, `create_time`, `update_time`)
|
||||
VALUES(11, 1, 'SELECT * FROM ${src_table} WHERE (to_unix_timestamp(${src_field}, ''${datetime_format}'')-to_unix_timestamp(''${deadline}'', ''${datetime_format}'') <= 0) AND (to_unix_timestamp(${src_field}, ''${datetime_format}'')-to_unix_timestamp(''${begin_time}'', ''${datetime_format}'') >= 0) AND (${src_filter}) ', 'timeliness_items', 0, 1, '2021-03-03 11:31:24.0', '2021-03-03 11:31:24.0');
|
||||
INSERT INTO `t_ds_dq_rule_execute_sql`
|
||||
(`id`, `index`, `sql`, `table_alias`, `type`, `is_error_output_sql`, `create_time`, `update_time`)
|
||||
VALUES(12, 1, 'SELECT COUNT(*) AS timeliness FROM timeliness_items', 'timeliness_count', 1, false, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO `t_ds_dq_rule_execute_sql`
|
||||
(`id`, `index`, `sql`, `table_alias`, `type`, `is_error_output_sql`, `create_time`, `update_time`)
|
||||
VALUES(13, 1, 'SELECT * FROM ${src_table} where (${src_field} not in ( ${enum_list} ) or ${src_field} is null) AND (${src_filter}) ', 'enum_items', 0, true, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO `t_ds_dq_rule_execute_sql`
|
||||
(`id`, `index`, `sql`, `table_alias`, `type`, `is_error_output_sql`, `create_time`, `update_time`)
|
||||
VALUES(14, 1, 'SELECT COUNT(*) AS enums FROM enum_items', 'enum_count', 1, false, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO `t_ds_dq_rule_execute_sql`
|
||||
(`id`, `index`, `sql`, `table_alias`, `type`, `is_error_output_sql`, `create_time`, `update_time`)
|
||||
VALUES(15, 1, 'SELECT COUNT(*) AS total FROM ${src_table} WHERE (${src_filter})', 'table_count', 1, false, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO `t_ds_dq_rule_execute_sql`
|
||||
(`id`, `index`, `sql`, `table_alias`, `type`, `is_error_output_sql`, `create_time`, `update_time`)
|
||||
VALUES(16, 1, 'SELECT * FROM ${src_table} WHERE (${src_field} is null or ${src_field} = '''') AND (${src_filter})', 'null_items', 0, true, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO `t_ds_dq_rule_execute_sql`
|
||||
(`id`, `index`, `sql`, `table_alias`, `type`, `is_error_output_sql`, `create_time`, `update_time`)
|
||||
VALUES(17, 1, 'SELECT * FROM ${src_table} WHERE (length(${src_field}) ${logic_operator} ${field_length}) AND (${src_filter})', 'invalid_length_items', 0, true, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
|
||||
--
|
||||
-- Table structure for table `t_ds_dq_rule_input_entry`
|
||||
--
|
||||
DROP TABLE IF EXISTS `t_ds_dq_rule_input_entry`;
|
||||
CREATE TABLE `t_ds_dq_rule_input_entry` (
|
||||
`id` int(11) NOT NULL AUTO_INCREMENT,
|
||||
`field` varchar(255) DEFAULT NULL,
|
||||
`type` varchar(255) DEFAULT NULL,
|
||||
`title` varchar(255) DEFAULT NULL,
|
||||
`data` varchar(255) DEFAULT NULL,
|
||||
`options` text DEFAULT NULL,
|
||||
`placeholder` varchar(255) DEFAULT NULL,
|
||||
`option_source_type` int(11) DEFAULT NULL,
|
||||
`data_type` int(11) DEFAULT NULL,
|
||||
`input_type` int(11) DEFAULT NULL,
|
||||
`is_show` tinyint(1) DEFAULT '1',
|
||||
`can_edit` tinyint(1) DEFAULT '1',
|
||||
`is_emit` tinyint(1) DEFAULT '0',
|
||||
`is_validate` tinyint(1) DEFAULT '1',
|
||||
`create_time` datetime DEFAULT NULL,
|
||||
`update_time` datetime DEFAULT NULL,
|
||||
PRIMARY KEY (`id`)
|
||||
) ENGINE=InnoDB DEFAULT CHARSET=utf8;
|
||||
|
||||
INSERT INTO `t_ds_dq_rule_input_entry`
|
||||
(`id`, `field`, `type`, `title`, `data`, `options`, `placeholder`, `option_source_type`, `data_type`, `input_type`, `is_show`, `can_edit`, `is_emit`, `is_validate`, `create_time`, `update_time`)
|
||||
VALUES(1, 'src_connector_type', 'select', '$t(src_connector_type)', '', '[{"label":"HIVE","value":"HIVE"},{"label":"JDBC","value":"JDBC"}]', 'please select source connector type', 2, 2, 0, 1, 1, 1, 0, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO `t_ds_dq_rule_input_entry`
|
||||
(`id`, `field`, `type`, `title`, `data`, `options`, `placeholder`, `option_source_type`, `data_type`, `input_type`, `is_show`, `can_edit`, `is_emit`, `is_validate`, `create_time`, `update_time`)
|
||||
VALUES(2, 'src_datasource_id', 'select', '$t(src_datasource_id)', '', NULL, 'please select source datasource id', 1, 2, 0, 1, 1, 1, 0, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO `t_ds_dq_rule_input_entry`
|
||||
(`id`, `field`, `type`, `title`, `data`, `options`, `placeholder`, `option_source_type`, `data_type`, `input_type`, `is_show`, `can_edit`, `is_emit`, `is_validate`, `create_time`, `update_time`)
|
||||
VALUES(3, 'src_table', 'select', '$t(src_table)', NULL, NULL, 'Please enter source table name', 0, 0, 0, 1, 1, 1, 1, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO `t_ds_dq_rule_input_entry`
|
||||
(`id`, `field`, `type`, `title`, `data`, `options`, `placeholder`, `option_source_type`, `data_type`, `input_type`, `is_show`, `can_edit`, `is_emit`, `is_validate`, `create_time`, `update_time`)
|
||||
VALUES(4, 'src_filter', 'input', '$t(src_filter)', NULL, NULL, 'Please enter filter expression', 0, 3, 0, 1, 1, 0, 0, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO `t_ds_dq_rule_input_entry`
|
||||
(`id`, `field`, `type`, `title`, `data`, `options`, `placeholder`, `option_source_type`, `data_type`, `input_type`, `is_show`, `can_edit`, `is_emit`, `is_validate`, `create_time`, `update_time`)
|
||||
VALUES(5, 'src_field', 'select', '$t(src_field)', NULL, NULL, 'Please enter column, only single column is supported', 0, 0, 0, 1, 1, 0, 1, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO `t_ds_dq_rule_input_entry`
|
||||
(`id`, `field`, `type`, `title`, `data`, `options`, `placeholder`, `option_source_type`, `data_type`, `input_type`, `is_show`, `can_edit`, `is_emit`, `is_validate`, `create_time`, `update_time`)
|
||||
VALUES(6, 'statistics_name', 'input', '$t(statistics_name)', NULL, NULL, 'Please enter statistics name, the alias in statistics execute sql', 0, 0, 1, 0, 0, 0, 1, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO `t_ds_dq_rule_input_entry`
|
||||
(`id`, `field`, `type`, `title`, `data`, `options`, `placeholder`, `option_source_type`, `data_type`, `input_type`, `is_show`, `can_edit`, `is_emit`, `is_validate`, `create_time`, `update_time`)
|
||||
VALUES(7, 'check_type', 'select', '$t(check_type)', '0', '[{"label":"Expected - Actual","value":"0"},{"label":"Actual - Expected","value":"1"},{"label":"Actual / Expected","value":"2"},{"label":"(Expected - Actual) / Expected","value":"3"}]', 'please select check type', 0, 0, 3, 1, 1, 1, 0, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO `t_ds_dq_rule_input_entry`
|
||||
(`id`, `field`, `type`, `title`, `data`, `options`, `placeholder`, `option_source_type`, `data_type`, `input_type`, `is_show`, `can_edit`, `is_emit`, `is_validate`, `create_time`, `update_time`)
|
||||
VALUES(8, 'operator', 'select', '$t(operator)', '0', '[{"label":"=","value":"0"},{"label":"<","value":"1"},{"label":"<=","value":"2"},{"label":">","value":"3"},{"label":">=","value":"4"},{"label":"!=","value":"5"}]', 'please select operator', 0, 0, 3, 1, 1, 0, 0, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO `t_ds_dq_rule_input_entry`
|
||||
(`id`, `field`, `type`, `title`, `data`, `options`, `placeholder`, `option_source_type`, `data_type`, `input_type`, `is_show`, `can_edit`, `is_emit`, `is_validate`, `create_time`, `update_time`)
|
||||
VALUES(9, 'threshold', 'input', '$t(threshold)', NULL, NULL, 'Please enter threshold, number is needed', 0, 2, 3, 1, 1, 0, 1, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO `t_ds_dq_rule_input_entry`
|
||||
(`id`, `field`, `type`, `title`, `data`, `options`, `placeholder`, `option_source_type`, `data_type`, `input_type`, `is_show`, `can_edit`, `is_emit`, `is_validate`, `create_time`, `update_time`)
|
||||
VALUES(10, 'failure_strategy', 'select', '$t(failure_strategy)', '0', '[{"label":"Alert","value":"0"},{"label":"Block","value":"1"}]', 'please select failure strategy', 0, 0, 3, 1, 1, 0, 0, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO `t_ds_dq_rule_input_entry`
|
||||
(`id`, `field`, `type`, `title`, `data`, `options`, `placeholder`, `option_source_type`, `data_type`, `input_type`, `is_show`, `can_edit`, `is_emit`, `is_validate`, `create_time`, `update_time`)
|
||||
VALUES(11, 'target_connector_type', 'select', '$t(target_connector_type)', '', '[{"label":"HIVE","value":"HIVE"},{"label":"JDBC","value":"JDBC"}]', 'Please select target connector type', 2, 0, 0, 1, 1, 1, 0, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO `t_ds_dq_rule_input_entry`
|
||||
(`id`, `field`, `type`, `title`, `data`, `options`, `placeholder`, `option_source_type`, `data_type`, `input_type`, `is_show`, `can_edit`, `is_emit`, `is_validate`, `create_time`, `update_time`)
|
||||
VALUES(12, 'target_datasource_id', 'select', '$t(target_datasource_id)', '', NULL, 'Please select target datasource', 1, 2, 0, 1, 1, 1, 0, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO `t_ds_dq_rule_input_entry`
|
||||
(`id`, `field`, `type`, `title`, `data`, `options`, `placeholder`, `option_source_type`, `data_type`, `input_type`, `is_show`, `can_edit`, `is_emit`, `is_validate`, `create_time`, `update_time`)
|
||||
VALUES(13, 'target_table', 'select', '$t(target_table)', NULL, NULL, 'Please enter target table', 0, 0, 0, 1, 1, 1, 1, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO `t_ds_dq_rule_input_entry`
|
||||
(`id`, `field`, `type`, `title`, `data`, `options`, `placeholder`, `option_source_type`, `data_type`, `input_type`, `is_show`, `can_edit`, `is_emit`, `is_validate`, `create_time`, `update_time`)
|
||||
VALUES(14, 'target_filter', 'input', '$t(target_filter)', NULL, NULL, 'Please enter target filter expression', 0, 3, 0, 1, 1, 0, 0, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO `t_ds_dq_rule_input_entry`
|
||||
(`id`, `field`, `type`, `title`, `data`, `options`, `placeholder`, `option_source_type`, `data_type`, `input_type`, `is_show`, `can_edit`, `is_emit`, `is_validate`, `create_time`, `update_time`)
|
||||
VALUES(15, 'mapping_columns', 'group', '$t(mapping_columns)', NULL, '[{"field":"src_field","props":{"placeholder":"Please input src field","rows":0,"disabled":false,"size":"small"},"type":"input","title":"src_field"},{"field":"operator","props":{"placeholder":"Please input operator","rows":0,"disabled":false,"size":"small"},"type":"input","title":"operator"},{"field":"target_field","props":{"placeholder":"Please input target field","rows":0,"disabled":false,"size":"small"},"type":"input","title":"target_field"}]', 'please enter mapping columns', 0, 0, 0, 1, 1, 0, 0, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO `t_ds_dq_rule_input_entry`
|
||||
(`id`, `field`, `type`, `title`, `data`, `options`, `placeholder`, `option_source_type`, `data_type`, `input_type`, `is_show`, `can_edit`, `is_emit`, `is_validate`, `create_time`, `update_time`)
|
||||
VALUES(16, 'statistics_execute_sql', 'textarea', '$t(statistics_execute_sql)', NULL, NULL, 'Please enter statistics execute sql', 0, 3, 0, 1, 1, 0, 1, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO `t_ds_dq_rule_input_entry`
|
||||
(`id`, `field`, `type`, `title`, `data`, `options`, `placeholder`, `option_source_type`, `data_type`, `input_type`, `is_show`, `can_edit`, `is_emit`, `is_validate`, `create_time`, `update_time`)
|
||||
VALUES(17, 'comparison_name', 'input', '$t(comparison_name)', NULL, NULL, 'Please enter comparison name, the alias in comparison execute sql', 0, 0, 0, 0, 0, 0, 1, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO `t_ds_dq_rule_input_entry`
|
||||
(`id`, `field`, `type`, `title`, `data`, `options`, `placeholder`, `option_source_type`, `data_type`, `input_type`, `is_show`, `can_edit`, `is_emit`, `is_validate`, `create_time`, `update_time`)
|
||||
VALUES(18, 'comparison_execute_sql', 'textarea', '$t(comparison_execute_sql)', NULL, NULL, 'Please enter comparison execute sql', 0, 3, 0, 1, 1, 0, 1, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO `t_ds_dq_rule_input_entry`
|
||||
(`id`, `field`, `type`, `title`, `data`, `options`, `placeholder`, `option_source_type`, `data_type`, `input_type`, `is_show`, `can_edit`, `is_emit`, `is_validate`, `create_time`, `update_time`)
|
||||
VALUES(19, 'comparison_type', 'select', '$t(comparison_type)', '', NULL, 'Please enter comparison title', 3, 0, 2, 1, 0, 1, 0, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO `t_ds_dq_rule_input_entry`
|
||||
(`id`, `field`, `type`, `title`, `data`, `options`, `placeholder`, `option_source_type`, `data_type`, `input_type`, `is_show`, `can_edit`, `is_emit`, `is_validate`, `create_time`, `update_time`)
|
||||
VALUES(20, 'writer_connector_type', 'select', '$t(writer_connector_type)', '', '[{"label":"MYSQL","value":"0"},{"label":"POSTGRESQL","value":"1"}]', 'please select writer connector type', 0, 2, 0, 1, 1, 1, 0, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO `t_ds_dq_rule_input_entry`
|
||||
(`id`, `field`, `type`, `title`, `data`, `options`, `placeholder`, `option_source_type`, `data_type`, `input_type`, `is_show`, `can_edit`, `is_emit`, `is_validate`, `create_time`, `update_time`)
|
||||
VALUES(21, 'writer_datasource_id', 'select', '$t(writer_datasource_id)', '', NULL, 'please select writer datasource id', 1, 2, 0, 1, 1, 0, 0, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO `t_ds_dq_rule_input_entry`
|
||||
(`id`, `field`, `type`, `title`, `data`, `options`, `placeholder`, `option_source_type`, `data_type`, `input_type`, `is_show`, `can_edit`, `is_emit`, `is_validate`, `create_time`, `update_time`)
|
||||
VALUES(22, 'target_field', 'select', '$t(target_field)', NULL, NULL, 'Please enter column, only single column is supported', 0, 0, 0, 1, 1, 0, 0, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO `t_ds_dq_rule_input_entry`
|
||||
(`id`, `field`, `type`, `title`, `data`, `options`, `placeholder`, `option_source_type`, `data_type`, `input_type`, `is_show`, `can_edit`, `is_emit`, `is_validate`, `create_time`, `update_time`)
|
||||
VALUES(23, 'field_length', 'input', '$t(field_length)', NULL, NULL, 'Please enter length limit', 0, 3, 0, 1, 1, 0, 0, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO `t_ds_dq_rule_input_entry`
|
||||
(`id`, `field`, `type`, `title`, `data`, `options`, `placeholder`, `option_source_type`, `data_type`, `input_type`, `is_show`, `can_edit`, `is_emit`, `is_validate`, `create_time`, `update_time`)
|
||||
VALUES(24, 'logic_operator', 'select', '$t(logic_operator)', '=', '[{"label":"=","value":"="},{"label":"<","value":"<"},{"label":"<=","value":"<="},{"label":">","value":">"},{"label":">=","value":">="},{"label":"<>","value":"<>"}]', 'please select logic operator', 0, 0, 3, 1, 1, 0, 0, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO `t_ds_dq_rule_input_entry`
|
||||
(`id`, `field`, `type`, `title`, `data`, `options`, `placeholder`, `option_source_type`, `data_type`, `input_type`, `is_show`, `can_edit`, `is_emit`, `is_validate`, `create_time`, `update_time`)
|
||||
VALUES(25, 'regexp_pattern', 'input', '$t(regexp_pattern)', NULL, NULL, 'Please enter regexp pattern', 0, 0, 0, 1, 1, 0, 0, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO `t_ds_dq_rule_input_entry`
|
||||
(`id`, `field`, `type`, `title`, `data`, `options`, `placeholder`, `option_source_type`, `data_type`, `input_type`, `is_show`, `can_edit`, `is_emit`, `is_validate`, `create_time`, `update_time`)
|
||||
VALUES(26, 'deadline', 'input', '$t(deadline)', NULL, NULL, 'Please enter deadline', 0, 0, 0, 1, 1, 0, 0, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO `t_ds_dq_rule_input_entry`
|
||||
(`id`, `field`, `type`, `title`, `data`, `options`, `placeholder`, `option_source_type`, `data_type`, `input_type`, `is_show`, `can_edit`, `is_emit`, `is_validate`, `create_time`, `update_time`)
|
||||
VALUES(27, 'datetime_format', 'input', '$t(datetime_format)', NULL, NULL, 'Please enter datetime format', 0, 0, 0, 1, 1, 0, 0, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO `t_ds_dq_rule_input_entry`
|
||||
(`id`, `field`, `type`, `title`, `data`, `options`, `placeholder`, `option_source_type`, `data_type`, `input_type`, `is_show`, `can_edit`, `is_emit`, `is_validate`, `create_time`, `update_time`)
|
||||
VALUES(28, 'enum_list', 'input', '$t(enum_list)', NULL, NULL, 'Please enter enumeration', 0, 0, 0, 1, 1, 0, 0, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO `t_ds_dq_rule_input_entry`
|
||||
(`id`, `field`, `type`, `title`, `data`, `options`, `placeholder`, `option_source_type`, `data_type`, `input_type`, `is_show`, `can_edit`, `is_emit`, `is_validate`, `create_time`, `update_time`)
|
||||
VALUES(29, 'begin_time', 'input', '$t(begin_time)', NULL, NULL, 'Please enter begin time', 0, 0, 0, 1, 1, 0, 0, '2021-03-03 11:31:24.0', '2021-03-03 11:31:24.0');
|
||||
INSERT INTO `t_ds_dq_rule_input_entry`
|
||||
(`id`, `field`, `type`, `title`, `data`, `options`, `placeholder`, `option_source_type`, `data_type`, `input_type`, `is_show`, `can_edit`, `is_emit`, `is_validate`, `create_time`, `update_time`)
|
||||
VALUES(30, 'src_database', 'select', '$t(src_database)', NULL, NULL, 'Please select source database', 0, 0, 0, 1, 1, 1, 1, '2021-03-03 11:31:24.0', '2021-03-03 11:31:24.0');
|
||||
INSERT INTO `t_ds_dq_rule_input_entry`
|
||||
(`id`, `field`, `type`, `title`, `data`, `options`, `placeholder`, `option_source_type`, `data_type`, `input_type`, `is_show`, `can_edit`, `is_emit`, `is_validate`, `create_time`, `update_time`)
|
||||
VALUES(31, 'target_database', 'select', '$t(target_database)', NULL, NULL, 'Please select target database', 0, 0, 0, 1, 1, 1, 1, '2021-03-03 11:31:24.0', '2021-03-03 11:31:24.0');
|
||||
|
||||
--
|
||||
-- Table structure for table `t_ds_dq_task_statistics_value`
|
||||
--
|
||||
DROP TABLE IF EXISTS `t_ds_dq_task_statistics_value`;
|
||||
CREATE TABLE `t_ds_dq_task_statistics_value` (
|
||||
`id` int(11) NOT NULL AUTO_INCREMENT,
|
||||
`workflow_definition_id` int(11) DEFAULT NULL,
|
||||
`task_instance_id` int(11) DEFAULT NULL,
|
||||
`rule_id` int(11) NOT NULL,
|
||||
`unique_code` varchar(255) NULL,
|
||||
`statistics_name` varchar(255) NULL,
|
||||
`statistics_value` double NULL,
|
||||
`data_time` datetime DEFAULT NULL,
|
||||
`create_time` datetime DEFAULT NULL,
|
||||
`update_time` datetime DEFAULT NULL,
|
||||
PRIMARY KEY (`id`)
|
||||
) ENGINE=InnoDB DEFAULT CHARSET=utf8;
|
||||
|
||||
--
|
||||
-- Table structure for table `t_ds_relation_rule_execute_sql`
|
||||
--
|
||||
DROP TABLE IF EXISTS `t_ds_relation_rule_execute_sql`;
|
||||
CREATE TABLE `t_ds_relation_rule_execute_sql` (
|
||||
`id` int(11) NOT NULL AUTO_INCREMENT,
|
||||
`rule_id` int(11) DEFAULT NULL,
|
||||
`execute_sql_id` int(11) DEFAULT NULL,
|
||||
`create_time` datetime NULL,
|
||||
`update_time` datetime NULL,
|
||||
PRIMARY KEY (`id`)
|
||||
) ENGINE=InnoDB DEFAULT CHARSET=utf8;
|
||||
|
||||
INSERT INTO `t_ds_relation_rule_execute_sql`
|
||||
(`id`, `rule_id`, `execute_sql_id`, `create_time`, `update_time`)
|
||||
VALUES(1, 1, 1, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO `t_ds_relation_rule_execute_sql`
|
||||
(`id`, `rule_id`, `execute_sql_id`, `create_time`, `update_time`)
|
||||
VALUES(3, 5, 4, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO `t_ds_relation_rule_execute_sql`
|
||||
(`id`, `rule_id`, `execute_sql_id`, `create_time`, `update_time`)
|
||||
VALUES(2, 3, 3, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO `t_ds_relation_rule_execute_sql`
|
||||
(`id`, `rule_id`, `execute_sql_id`, `create_time`, `update_time`)
|
||||
VALUES(4, 3, 8, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO `t_ds_relation_rule_execute_sql`
|
||||
(`id`, `rule_id`, `execute_sql_id`, `create_time`, `update_time`)
|
||||
VALUES(5, 6, 6, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO `t_ds_relation_rule_execute_sql`
|
||||
(`id`, `rule_id`, `execute_sql_id`, `create_time`, `update_time`)
|
||||
VALUES(6, 6, 7, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO `t_ds_relation_rule_execute_sql`
|
||||
(`id`, `rule_id`, `execute_sql_id`, `create_time`, `update_time`)
|
||||
VALUES(7, 7, 9, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO `t_ds_relation_rule_execute_sql`
|
||||
(`id`, `rule_id`, `execute_sql_id`, `create_time`, `update_time`)
|
||||
VALUES(8, 7, 10, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO `t_ds_relation_rule_execute_sql`
|
||||
(`id`, `rule_id`, `execute_sql_id`, `create_time`, `update_time`)
|
||||
VALUES(9, 8, 11, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO `t_ds_relation_rule_execute_sql`
|
||||
(`id`, `rule_id`, `execute_sql_id`, `create_time`, `update_time`)
|
||||
VALUES(10, 8, 12, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO `t_ds_relation_rule_execute_sql`
|
||||
(`id`, `rule_id`, `execute_sql_id`, `create_time`, `update_time`)
|
||||
VALUES(11, 9, 13, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO `t_ds_relation_rule_execute_sql`
|
||||
(`id`, `rule_id`, `execute_sql_id`, `create_time`, `update_time`)
|
||||
VALUES(12, 9, 14, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO `t_ds_relation_rule_execute_sql`
|
||||
(`id`, `rule_id`, `execute_sql_id`, `create_time`, `update_time`)
|
||||
VALUES(13, 10, 15, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO `t_ds_relation_rule_execute_sql`
|
||||
(`id`, `rule_id`, `execute_sql_id`, `create_time`, `update_time`)
|
||||
VALUES(14, 1, 16, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO `t_ds_relation_rule_execute_sql`
|
||||
(`id`, `rule_id`, `execute_sql_id`, `create_time`, `update_time`)
|
||||
VALUES(15, 5, 17, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
|
||||
--
|
||||
-- Table structure for table `t_ds_relation_rule_input_entry`
|
||||
--
|
||||
DROP TABLE IF EXISTS `t_ds_relation_rule_input_entry`;
|
||||
CREATE TABLE `t_ds_relation_rule_input_entry` (
|
||||
`id` int(11) NOT NULL AUTO_INCREMENT,
|
||||
`rule_id` int(11) DEFAULT NULL,
|
||||
`rule_input_entry_id` int(11) DEFAULT NULL,
|
||||
`values_map` text DEFAULT NULL,
|
||||
`index` int(11) DEFAULT NULL,
|
||||
`create_time` datetime DEFAULT NULL,
|
||||
`update_time` datetime DEFAULT NULL,
|
||||
PRIMARY KEY (`id`)
|
||||
) ENGINE=InnoDB DEFAULT CHARSET=utf8;
|
||||
|
||||
INSERT INTO `t_ds_relation_rule_input_entry`
|
||||
(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
|
||||
VALUES(1, 1, 1, NULL, 1, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO `t_ds_relation_rule_input_entry`
|
||||
(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
|
||||
VALUES(2, 1, 2, NULL, 2, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO `t_ds_relation_rule_input_entry`
|
||||
(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
|
||||
VALUES(3, 1, 3, NULL, 3, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO `t_ds_relation_rule_input_entry`
|
||||
(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
|
||||
VALUES(4, 1, 4, NULL, 4, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO `t_ds_relation_rule_input_entry`
|
||||
(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
|
||||
VALUES(5, 1, 5, NULL, 5, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO `t_ds_relation_rule_input_entry`
|
||||
(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
|
||||
VALUES(6, 1, 6, '{"statistics_name":"null_count.nulls"}', 6, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO `t_ds_relation_rule_input_entry`
|
||||
(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
|
||||
VALUES(7, 1, 7, NULL, 7, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO `t_ds_relation_rule_input_entry`
|
||||
(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
|
||||
VALUES(8, 1, 8, NULL, 8, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO `t_ds_relation_rule_input_entry`
|
||||
(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
|
||||
VALUES(9, 1, 9, NULL, 9, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO `t_ds_relation_rule_input_entry`
|
||||
(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
|
||||
VALUES(10, 1, 10, NULL, 10, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO `t_ds_relation_rule_input_entry`
|
||||
(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
|
||||
VALUES(11, 1, 17, '', 11, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO `t_ds_relation_rule_input_entry`
|
||||
(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
|
||||
VALUES(12, 1, 19, NULL, 12, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO `t_ds_relation_rule_input_entry`
|
||||
(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
|
||||
VALUES(13, 2, 1, NULL, 1, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO `t_ds_relation_rule_input_entry`
|
||||
(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
|
||||
VALUES(14, 2, 2, NULL, 2, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO `t_ds_relation_rule_input_entry`
|
||||
(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
|
||||
VALUES(15, 2, 3, NULL, 3, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO `t_ds_relation_rule_input_entry`
|
||||
(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
|
||||
VALUES(16, 2, 6, '{"is_show":"true","can_edit":"true"}', 4, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO `t_ds_relation_rule_input_entry`
|
||||
(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
|
||||
VALUES(17, 2, 16, NULL, 5, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO `t_ds_relation_rule_input_entry`
|
||||
(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
|
||||
VALUES(18, 2, 4, NULL, 6, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO `t_ds_relation_rule_input_entry`
|
||||
(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
|
||||
VALUES(19, 2, 7, NULL, 7, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO `t_ds_relation_rule_input_entry`
|
||||
(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
|
||||
VALUES(20, 2, 8, NULL, 8, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO `t_ds_relation_rule_input_entry`
|
||||
(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
|
||||
VALUES(21, 2, 9, NULL, 9, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO `t_ds_relation_rule_input_entry`
|
||||
(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
|
||||
VALUES(22, 2, 10, NULL, 10, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO `t_ds_relation_rule_input_entry`
|
||||
(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
|
||||
VALUES(24, 2, 19, NULL, 12, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO `t_ds_relation_rule_input_entry`
|
||||
(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
|
||||
VALUES(25, 3, 1, NULL, 1, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO `t_ds_relation_rule_input_entry`
|
||||
(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
|
||||
VALUES(26, 3, 2, NULL, 2, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO `t_ds_relation_rule_input_entry`
|
||||
(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
|
||||
VALUES(27, 3, 3, NULL, 3, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO `t_ds_relation_rule_input_entry`
|
||||
(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
|
||||
VALUES(28, 3, 4, NULL, 4, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO `t_ds_relation_rule_input_entry`
|
||||
(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
|
||||
VALUES(29, 3, 11, NULL, 5, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO `t_ds_relation_rule_input_entry`
|
||||
(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
|
||||
VALUES(30, 3, 12, NULL, 6, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO `t_ds_relation_rule_input_entry`
|
||||
(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
|
||||
VALUES(31, 3, 13, NULL, 7, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO `t_ds_relation_rule_input_entry`
|
||||
(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
|
||||
VALUES(32, 3, 14, NULL, 8, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO `t_ds_relation_rule_input_entry`
|
||||
(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
|
||||
VALUES(33, 3, 15, NULL, 9, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO `t_ds_relation_rule_input_entry`
|
||||
(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
|
||||
VALUES(34, 3, 7, NULL, 10, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO `t_ds_relation_rule_input_entry`
|
||||
(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
|
||||
VALUES(35, 3, 8, NULL, 11, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO `t_ds_relation_rule_input_entry`
|
||||
(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
|
||||
VALUES(36, 3, 9, NULL, 12, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO `t_ds_relation_rule_input_entry`
|
||||
(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
|
||||
VALUES(37, 3, 10, NULL, 13, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO `t_ds_relation_rule_input_entry`
|
||||
(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
|
||||
VALUES(38, 3, 17, '{"comparison_name":"total_count.total"}', 14, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO `t_ds_relation_rule_input_entry`
|
||||
(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
|
||||
VALUES(39, 3, 19, NULL, 15, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO `t_ds_relation_rule_input_entry`
|
||||
(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
|
||||
VALUES(40, 4, 1, NULL, 1, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO `t_ds_relation_rule_input_entry`
|
||||
(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
|
||||
VALUES(41, 4, 2, NULL, 2, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO `t_ds_relation_rule_input_entry`
|
||||
(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
|
||||
VALUES(42, 4, 3, NULL, 3, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO `t_ds_relation_rule_input_entry`
|
||||
(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
|
||||
VALUES(43, 4, 6, '{"is_show":"true","can_edit":"true"}', 4, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO `t_ds_relation_rule_input_entry`
|
||||
(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
|
||||
VALUES(44, 4, 16, NULL, 5, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO `t_ds_relation_rule_input_entry`
|
||||
(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
|
||||
VALUES(45, 4, 11, NULL, 6, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO `t_ds_relation_rule_input_entry`
|
||||
(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
|
||||
VALUES(46, 4, 12, NULL, 7, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO `t_ds_relation_rule_input_entry`
|
||||
(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
|
||||
VALUES(47, 4, 13, NULL, 8, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO `t_ds_relation_rule_input_entry`
|
||||
(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
|
||||
VALUES(48, 4, 17, '{"is_show":"true","can_edit":"true"}', 9, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO `t_ds_relation_rule_input_entry`
|
||||
(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
|
||||
VALUES(49, 4, 18, NULL, 10, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO `t_ds_relation_rule_input_entry`
|
||||
(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
|
||||
VALUES(50, 4, 7, NULL, 11, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO `t_ds_relation_rule_input_entry`
|
||||
(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
|
||||
VALUES(51, 4, 8, NULL, 12, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO `t_ds_relation_rule_input_entry`
|
||||
(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
|
||||
VALUES(52, 4, 9, NULL, 13, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO `t_ds_relation_rule_input_entry`
|
||||
(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
|
||||
VALUES(53, 4, 10, NULL, 14, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO `t_ds_relation_rule_input_entry`
|
||||
(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
|
||||
VALUES(62, 3, 6, '{"statistics_name":"miss_count.miss"}', 18, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO `t_ds_relation_rule_input_entry`
|
||||
(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
|
||||
VALUES(63, 5, 1, NULL, 1, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO `t_ds_relation_rule_input_entry`
|
||||
(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
|
||||
VALUES(64, 5, 2, NULL, 2, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO `t_ds_relation_rule_input_entry`
|
||||
(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
|
||||
VALUES(65, 5, 3, NULL, 3, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO `t_ds_relation_rule_input_entry`
|
||||
(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
|
||||
VALUES(66, 5, 4, NULL, 4, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO `t_ds_relation_rule_input_entry`
|
||||
(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
|
||||
VALUES(67, 5, 5, NULL, 5, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO `t_ds_relation_rule_input_entry`
|
||||
(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
|
||||
VALUES(68, 5, 6, '{"statistics_name":"invalid_length_count.valids"}', 6, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO `t_ds_relation_rule_input_entry`
|
||||
(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
|
||||
VALUES(69, 5, 24, NULL, 7, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO `t_ds_relation_rule_input_entry`
|
||||
(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
|
||||
VALUES(70, 5, 23, NULL, 8, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO `t_ds_relation_rule_input_entry`
|
||||
(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
|
||||
VALUES(71, 5, 7, NULL, 9, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO `t_ds_relation_rule_input_entry`
|
||||
(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
|
||||
VALUES(72, 5, 8, NULL, 10, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO `t_ds_relation_rule_input_entry`
|
||||
(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
|
||||
VALUES(73, 5, 9, NULL, 11, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO `t_ds_relation_rule_input_entry`
|
||||
(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
|
||||
VALUES(74, 5, 10, NULL, 12, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO `t_ds_relation_rule_input_entry`
|
||||
(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
|
||||
VALUES(75, 5, 17, '', 13, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO `t_ds_relation_rule_input_entry`
|
||||
(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
|
||||
VALUES(76, 5, 19, NULL, 14, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO `t_ds_relation_rule_input_entry`
|
||||
(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
|
||||
VALUES(79, 6, 1, NULL, 1, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO `t_ds_relation_rule_input_entry`
|
||||
(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
|
||||
VALUES(80, 6, 2, NULL, 2, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO `t_ds_relation_rule_input_entry`
|
||||
(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
|
||||
VALUES(81, 6, 3, NULL, 3, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO `t_ds_relation_rule_input_entry`
|
||||
(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
|
||||
VALUES(82, 6, 4, NULL, 4, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO `t_ds_relation_rule_input_entry`
|
||||
(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
|
||||
VALUES(83, 6, 5, NULL, 5, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO `t_ds_relation_rule_input_entry`
|
||||
(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
|
||||
VALUES(84, 6, 6, '{"statistics_name":"duplicate_count.duplicates"}', 6, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO `t_ds_relation_rule_input_entry`
|
||||
(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
|
||||
VALUES(85, 6, 7, NULL, 7, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO `t_ds_relation_rule_input_entry`
|
||||
(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
|
||||
VALUES(86, 6, 8, NULL, 8, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO `t_ds_relation_rule_input_entry`
|
||||
(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
|
||||
VALUES(87, 6, 9, NULL, 9, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO `t_ds_relation_rule_input_entry`
|
||||
(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
|
||||
VALUES(88, 6, 10, NULL, 10, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO `t_ds_relation_rule_input_entry`
|
||||
(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
|
||||
VALUES(89, 6, 17, '', 11, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO `t_ds_relation_rule_input_entry`
|
||||
(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
|
||||
VALUES(90, 6, 19, NULL, 12, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO `t_ds_relation_rule_input_entry`
|
||||
(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
|
||||
VALUES(93, 7, 1, NULL, 1, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO `t_ds_relation_rule_input_entry`
|
||||
(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
|
||||
VALUES(94, 7, 2, NULL, 2, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO `t_ds_relation_rule_input_entry`
|
||||
(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
|
||||
VALUES(95, 7, 3, NULL, 3, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO `t_ds_relation_rule_input_entry`
|
||||
(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
|
||||
VALUES(96, 7, 4, NULL, 4, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO `t_ds_relation_rule_input_entry`
|
||||
(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
|
||||
VALUES(97, 7, 5, NULL, 5, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO `t_ds_relation_rule_input_entry`
|
||||
(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
|
||||
VALUES(98, 7, 6, '{"statistics_name":"regexp_count.regexps"}', 6, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO `t_ds_relation_rule_input_entry`
|
||||
(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
|
||||
VALUES(99, 7, 25, NULL, 5, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO `t_ds_relation_rule_input_entry`
|
||||
(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
|
||||
VALUES(100, 7, 7, NULL, 7, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO `t_ds_relation_rule_input_entry`
|
||||
(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
|
||||
VALUES(101, 7, 8, NULL, 8, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO `t_ds_relation_rule_input_entry`
|
||||
(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
|
||||
VALUES(102, 7, 9, NULL, 9, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO `t_ds_relation_rule_input_entry`
|
||||
(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
|
||||
VALUES(103, 7, 10, NULL, 10, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO `t_ds_relation_rule_input_entry`
|
||||
(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
|
||||
VALUES(104, 7, 17, NULL, 11, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO `t_ds_relation_rule_input_entry`
|
||||
(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
|
||||
VALUES(105, 7, 19, NULL, 12, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO `t_ds_relation_rule_input_entry`
|
||||
(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
|
||||
VALUES(108, 8, 1, NULL, 1, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO `t_ds_relation_rule_input_entry`
|
||||
(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
|
||||
VALUES(109, 8, 2, NULL, 2, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO `t_ds_relation_rule_input_entry`
|
||||
(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
|
||||
VALUES(110, 8, 3, NULL, 3, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO `t_ds_relation_rule_input_entry`
|
||||
(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
|
||||
VALUES(111, 8, 4, NULL, 4, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO `t_ds_relation_rule_input_entry`
|
||||
(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
|
||||
VALUES(112, 8, 5, NULL, 5, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO `t_ds_relation_rule_input_entry`
|
||||
(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
|
||||
VALUES(113, 8, 6, '{"statistics_name":"timeliness_count.timeliness"}', 6, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO `t_ds_relation_rule_input_entry`
|
||||
(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
|
||||
VALUES(114, 8, 26, NULL, 8, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO `t_ds_relation_rule_input_entry`
|
||||
(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
|
||||
VALUES(115, 8, 27, NULL, 9, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO `t_ds_relation_rule_input_entry`
|
||||
(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
|
||||
VALUES(116, 8, 7, NULL, 10, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO `t_ds_relation_rule_input_entry`
|
||||
(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
|
||||
VALUES(117, 8, 8, NULL, 11, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO `t_ds_relation_rule_input_entry`
|
||||
(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
|
||||
VALUES(118, 8, 9, NULL, 12, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO `t_ds_relation_rule_input_entry`
|
||||
(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
|
||||
VALUES(119, 8, 10, NULL, 13, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO `t_ds_relation_rule_input_entry`
|
||||
(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
|
||||
VALUES(120, 8, 17, NULL, 14, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO `t_ds_relation_rule_input_entry`
|
||||
(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
|
||||
VALUES(121, 8, 19, NULL, 15, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO `t_ds_relation_rule_input_entry`
|
||||
(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
|
||||
VALUES(124, 9, 1, NULL, 1, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO `t_ds_relation_rule_input_entry`
|
||||
(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
|
||||
VALUES(125, 9, 2, NULL, 2, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO `t_ds_relation_rule_input_entry`
|
||||
(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
|
||||
VALUES(126, 9, 3, NULL, 3, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO `t_ds_relation_rule_input_entry`
|
||||
(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
|
||||
VALUES(127, 9, 4, NULL, 4, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO `t_ds_relation_rule_input_entry`
|
||||
(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
|
||||
VALUES(128, 9, 5, NULL, 5, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO `t_ds_relation_rule_input_entry`
|
||||
(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
|
||||
VALUES(129, 9, 6, '{"statistics_name":"enum_count.enums"}', 6, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO `t_ds_relation_rule_input_entry`
|
||||
(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
|
||||
VALUES(130, 9, 28, NULL, 7, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO `t_ds_relation_rule_input_entry`
|
||||
(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
|
||||
VALUES(131, 9, 7, NULL, 8, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO `t_ds_relation_rule_input_entry`
|
||||
(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
|
||||
VALUES(132, 9, 8, NULL, 9, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO `t_ds_relation_rule_input_entry`
|
||||
(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
|
||||
VALUES(133, 9, 9, NULL, 10, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO `t_ds_relation_rule_input_entry`
|
||||
(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
|
||||
VALUES(134, 9, 10, NULL, 11, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO `t_ds_relation_rule_input_entry`
|
||||
(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
|
||||
VALUES(135, 9, 17, NULL, 12, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO `t_ds_relation_rule_input_entry`
|
||||
(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
|
||||
VALUES(136, 9, 19, NULL, 13, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO `t_ds_relation_rule_input_entry`
|
||||
(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
|
||||
VALUES(139, 10, 1, NULL, 1, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO `t_ds_relation_rule_input_entry`
|
||||
(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
|
||||
VALUES(140, 10, 2, NULL, 2, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO `t_ds_relation_rule_input_entry`
|
||||
(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
|
||||
VALUES(141, 10, 3, NULL, 3, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO `t_ds_relation_rule_input_entry`
|
||||
(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
|
||||
VALUES(142, 10, 4, NULL, 4, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO `t_ds_relation_rule_input_entry`
|
||||
(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
|
||||
VALUES(143, 10, 6, '{"statistics_name":"table_count.total"}', 6, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO `t_ds_relation_rule_input_entry`
|
||||
(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
|
||||
VALUES(144, 10, 7, NULL, 7, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO `t_ds_relation_rule_input_entry`
|
||||
(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
|
||||
VALUES(145, 10, 8, NULL, 8, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO `t_ds_relation_rule_input_entry`
|
||||
(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
|
||||
VALUES(146, 10, 9, NULL, 9, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO `t_ds_relation_rule_input_entry`
|
||||
(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
|
||||
VALUES(147, 10, 10, NULL, 10, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO `t_ds_relation_rule_input_entry`
|
||||
(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
|
||||
VALUES(148, 10, 17, NULL, 11, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO `t_ds_relation_rule_input_entry`
|
||||
(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
|
||||
VALUES(149, 10, 19, NULL, 12, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO `t_ds_relation_rule_input_entry`
|
||||
(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
|
||||
VALUES(150, 8, 29, NULL, 7, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO `t_ds_relation_rule_input_entry`
|
||||
(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
|
||||
VALUES(151, 1, 30, NULL, 2, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO `t_ds_relation_rule_input_entry`
|
||||
(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
|
||||
VALUES(152, 2, 30, NULL, 2, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO `t_ds_relation_rule_input_entry`
|
||||
(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
|
||||
VALUES(153, 3, 30, NULL, 2, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO `t_ds_relation_rule_input_entry`
|
||||
(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
|
||||
VALUES(154, 4, 30, NULL, 2, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO `t_ds_relation_rule_input_entry`
|
||||
(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
|
||||
VALUES(155, 5, 30, NULL, 2, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO `t_ds_relation_rule_input_entry`
|
||||
(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
|
||||
VALUES(156, 6, 30, NULL, 2, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO `t_ds_relation_rule_input_entry`
|
||||
(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
|
||||
VALUES(157, 7, 30, NULL, 2, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO `t_ds_relation_rule_input_entry`
|
||||
(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
|
||||
VALUES(158, 8, 30, NULL, 2, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO `t_ds_relation_rule_input_entry`
|
||||
(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
|
||||
VALUES(159, 9, 30, NULL, 2, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO `t_ds_relation_rule_input_entry`
|
||||
(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
|
||||
VALUES(160, 10, 30, NULL, 2, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO `t_ds_relation_rule_input_entry`
|
||||
(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
|
||||
VALUES(161, 3, 31, NULL, 6, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO `t_ds_relation_rule_input_entry`
|
||||
(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
|
||||
VALUES(162, 4, 31, NULL, 7, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
|
||||
--
|
||||
-- Table structure for table t_ds_environment
|
||||
|
@ -1093,827 +1093,6 @@ CREATE TABLE `t_ds_relation_project_worker_group` (
|
||||
) ENGINE=InnoDB AUTO_INCREMENT=1 DEFAULT CHARSET=utf8 COLLATE = utf8_bin;
|
||||
|
||||
|
||||
--
|
||||
-- Table structure for table `t_ds_dq_comparison_type`
|
||||
--
|
||||
DROP TABLE IF EXISTS `t_ds_dq_comparison_type`;
|
||||
CREATE TABLE `t_ds_dq_comparison_type` (
|
||||
`id` int(11) NOT NULL AUTO_INCREMENT,
|
||||
`type` varchar(255) NOT NULL,
|
||||
`execute_sql` text DEFAULT NULL,
|
||||
`output_table` varchar(100) DEFAULT NULL,
|
||||
`name` varchar(255) DEFAULT NULL,
|
||||
`create_time` datetime DEFAULT NULL,
|
||||
`update_time` datetime DEFAULT NULL,
|
||||
`is_inner_source` tinyint(1) DEFAULT '0',
|
||||
PRIMARY KEY (`id`)
|
||||
)ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE = utf8_bin;
|
||||
|
||||
|
||||
INSERT IGNORE INTO `t_ds_dq_comparison_type`
|
||||
(`id`, `type`, `execute_sql`, `output_table`, `name`, `create_time`, `update_time`, `is_inner_source`)
|
||||
VALUES(1, 'FixValue', NULL, NULL, NULL, current_timestamp, current_timestamp, false);
|
||||
INSERT IGNORE INTO `t_ds_dq_comparison_type`
|
||||
(`id`, `type`, `execute_sql`, `output_table`, `name`, `create_time`, `update_time`, `is_inner_source`)
|
||||
VALUES(2, 'DailyAvg', 'select round(avg(statistics_value),2) as day_avg from t_ds_dq_task_statistics_value where data_time >=date_trunc(''DAY'', ${data_time}) and data_time < date_add(date_trunc(''day'', ${data_time}),1) and unique_code = ${unique_code} and statistics_name = ''${statistics_name}''', 'day_range', 'day_range.day_avg', current_timestamp, current_timestamp, true);
|
||||
INSERT IGNORE INTO `t_ds_dq_comparison_type`
|
||||
(`id`, `type`, `execute_sql`, `output_table`, `name`, `create_time`, `update_time`, `is_inner_source`)
|
||||
VALUES(3, 'WeeklyAvg', 'select round(avg(statistics_value),2) as week_avg from t_ds_dq_task_statistics_value where data_time >= date_trunc(''WEEK'', ${data_time}) and data_time <date_trunc(''day'', ${data_time}) and unique_code = ${unique_code} and statistics_name = ''${statistics_name}''', 'week_range', 'week_range.week_avg', current_timestamp, current_timestamp, true);
|
||||
INSERT IGNORE INTO `t_ds_dq_comparison_type`
|
||||
(`id`, `type`, `execute_sql`, `output_table`, `name`, `create_time`, `update_time`, `is_inner_source`)
|
||||
VALUES(4, 'MonthlyAvg', 'select round(avg(statistics_value),2) as month_avg from t_ds_dq_task_statistics_value where data_time >= date_trunc(''MONTH'', ${data_time}) and data_time <date_trunc(''day'', ${data_time}) and unique_code = ${unique_code} and statistics_name = ''${statistics_name}''', 'month_range', 'month_range.month_avg', current_timestamp, current_timestamp, true);
|
||||
INSERT IGNORE INTO `t_ds_dq_comparison_type`
|
||||
(`id`, `type`, `execute_sql`, `output_table`, `name`, `create_time`, `update_time`, `is_inner_source`)
|
||||
VALUES(5, 'Last7DayAvg', 'select round(avg(statistics_value),2) as last_7_avg from t_ds_dq_task_statistics_value where data_time >= date_add(date_trunc(''day'', ${data_time}),-7) and data_time <date_trunc(''day'', ${data_time}) and unique_code = ${unique_code} and statistics_name = ''${statistics_name}''', 'last_seven_days', 'last_seven_days.last_7_avg', current_timestamp, current_timestamp, true);
|
||||
INSERT IGNORE INTO `t_ds_dq_comparison_type`
|
||||
(`id`, `type`, `execute_sql`, `output_table`, `name`, `create_time`, `update_time`, `is_inner_source`)
|
||||
VALUES(6, 'Last30DayAvg', 'select round(avg(statistics_value),2) as last_30_avg from t_ds_dq_task_statistics_value where data_time >= date_add(date_trunc(''day'', ${data_time}),-30) and data_time < date_trunc(''day'', ${data_time}) and unique_code = ${unique_code} and statistics_name = ''${statistics_name}''', 'last_thirty_days', 'last_thirty_days.last_30_avg', current_timestamp, current_timestamp, true);
|
||||
INSERT IGNORE INTO `t_ds_dq_comparison_type`
|
||||
(`id`, `type`, `execute_sql`, `output_table`, `name`, `create_time`, `update_time`, `is_inner_source`)
|
||||
VALUES(7, 'SrcTableTotalRows', 'SELECT COUNT(*) AS total FROM ${src_table} WHERE (${src_filter})', 'total_count', 'total_count.total', current_timestamp, current_timestamp, false);
|
||||
INSERT IGNORE INTO `t_ds_dq_comparison_type`
|
||||
(`id`, `type`, `execute_sql`, `output_table`, `name`, `create_time`, `update_time`, `is_inner_source`)
|
||||
VALUES(8, 'TargetTableTotalRows', 'SELECT COUNT(*) AS total FROM ${target_table} WHERE (${target_filter})', 'total_count', 'total_count.total', current_timestamp, current_timestamp, false);
|
||||
|
||||
--
|
||||
-- Table structure for table `t_ds_dq_execute_result`
|
||||
--
|
||||
DROP TABLE IF EXISTS `t_ds_dq_execute_result`;
|
||||
CREATE TABLE `t_ds_dq_execute_result` (
|
||||
`id` int(11) NOT NULL AUTO_INCREMENT,
|
||||
`workflow_definition_id` int(11) DEFAULT NULL,
|
||||
`workflow_instance_id` int(11) DEFAULT NULL,
|
||||
`task_instance_id` int(11) DEFAULT NULL,
|
||||
`rule_type` int(11) DEFAULT NULL,
|
||||
`rule_name` varchar(255) DEFAULT NULL,
|
||||
`statistics_value` double DEFAULT NULL,
|
||||
`comparison_value` double DEFAULT NULL,
|
||||
`check_type` int(11) DEFAULT NULL,
|
||||
`threshold` double DEFAULT NULL,
|
||||
`operator` int(11) DEFAULT NULL,
|
||||
`failure_strategy` int(11) DEFAULT NULL,
|
||||
`state` int(11) DEFAULT NULL,
|
||||
`user_id` int(11) DEFAULT NULL,
|
||||
`comparison_type` int(11) DEFAULT NULL,
|
||||
`error_output_path` text DEFAULT NULL,
|
||||
`create_time` datetime DEFAULT NULL,
|
||||
`update_time` datetime DEFAULT NULL,
|
||||
PRIMARY KEY (`id`)
|
||||
) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE = utf8_bin;
|
||||
|
||||
--
|
||||
-- Table structure for table t_ds_dq_rule
|
||||
--
|
||||
DROP TABLE IF EXISTS `t_ds_dq_rule`;
|
||||
CREATE TABLE `t_ds_dq_rule` (
|
||||
`id` int(11) NOT NULL AUTO_INCREMENT,
|
||||
`name` varchar(255) DEFAULT NULL,
|
||||
`type` int(11) DEFAULT NULL,
|
||||
`user_id` int(11) DEFAULT NULL,
|
||||
`create_time` datetime DEFAULT NULL,
|
||||
`update_time` datetime DEFAULT NULL,
|
||||
PRIMARY KEY (`id`)
|
||||
) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE = utf8_bin;
|
||||
|
||||
INSERT IGNORE INTO `t_ds_dq_rule`
|
||||
(`id`, `name`, `type`, `user_id`, `create_time`, `update_time`)
|
||||
VALUES(1, '$t(null_check)', 0, 1, current_timestamp, current_timestamp);
|
||||
INSERT IGNORE INTO `t_ds_dq_rule`
|
||||
(`id`, `name`, `type`, `user_id`, `create_time`, `update_time`)
|
||||
VALUES(2, '$t(custom_sql)', 1, 1, current_timestamp, current_timestamp);
|
||||
INSERT IGNORE INTO `t_ds_dq_rule`
|
||||
(`id`, `name`, `type`, `user_id`, `create_time`, `update_time`)
|
||||
VALUES(3, '$t(multi_table_accuracy)', 2, 1, current_timestamp, current_timestamp);
|
||||
INSERT IGNORE INTO `t_ds_dq_rule`
|
||||
(`id`, `name`, `type`, `user_id`, `create_time`, `update_time`)
|
||||
VALUES(4, '$t(multi_table_value_comparison)', 3, 1, current_timestamp, current_timestamp);
|
||||
INSERT IGNORE INTO `t_ds_dq_rule`
|
||||
(`id`, `name`, `type`, `user_id`, `create_time`, `update_time`)
|
||||
VALUES(5, '$t(field_length_check)', 0, 1, current_timestamp, current_timestamp);
|
||||
INSERT IGNORE INTO `t_ds_dq_rule`
|
||||
(`id`, `name`, `type`, `user_id`, `create_time`, `update_time`)
|
||||
VALUES(6, '$t(uniqueness_check)', 0, 1, current_timestamp, current_timestamp);
|
||||
INSERT IGNORE INTO `t_ds_dq_rule`
|
||||
(`id`, `name`, `type`, `user_id`, `create_time`, `update_time`)
|
||||
VALUES(7, '$t(regexp_check)', 0, 1, current_timestamp, current_timestamp);
|
||||
INSERT IGNORE INTO `t_ds_dq_rule`
|
||||
(`id`, `name`, `type`, `user_id`, `create_time`, `update_time`)
|
||||
VALUES(8, '$t(timeliness_check)', 0, 1, current_timestamp, current_timestamp);
|
||||
INSERT IGNORE INTO `t_ds_dq_rule`
|
||||
(`id`, `name`, `type`, `user_id`, `create_time`, `update_time`)
|
||||
VALUES(9, '$t(enumeration_check)', 0, 1, current_timestamp, current_timestamp);
|
||||
INSERT IGNORE INTO `t_ds_dq_rule`
|
||||
(`id`, `name`, `type`, `user_id`, `create_time`, `update_time`)
|
||||
VALUES(10, '$t(table_count_check)', 0, 1, current_timestamp, current_timestamp);
|
||||
|
||||
--
|
||||
-- Table structure for table `t_ds_dq_rule_execute_sql`
|
||||
--
|
||||
DROP TABLE IF EXISTS `t_ds_dq_rule_execute_sql`;
|
||||
CREATE TABLE `t_ds_dq_rule_execute_sql` (
|
||||
`id` int(11) NOT NULL AUTO_INCREMENT,
|
||||
`index` int(11) DEFAULT NULL,
|
||||
`sql` text DEFAULT NULL,
|
||||
`table_alias` varchar(255) DEFAULT NULL,
|
||||
`type` int(11) DEFAULT NULL,
|
||||
`is_error_output_sql` tinyint(1) DEFAULT '0',
|
||||
`create_time` datetime DEFAULT NULL,
|
||||
`update_time` datetime DEFAULT NULL,
|
||||
PRIMARY KEY (`id`)
|
||||
) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE = utf8_bin;
|
||||
|
||||
INSERT IGNORE INTO `t_ds_dq_rule_execute_sql`
|
||||
(`id`, `index`, `sql`, `table_alias`, `type`, `is_error_output_sql`, `create_time`, `update_time`)
|
||||
VALUES(1, 1, 'SELECT COUNT(*) AS nulls FROM null_items', 'null_count', 1, false, current_timestamp, current_timestamp);
|
||||
INSERT IGNORE INTO `t_ds_dq_rule_execute_sql`
|
||||
(`id`, `index`, `sql`, `table_alias`, `type`, `is_error_output_sql`, `create_time`, `update_time`)
|
||||
VALUES(2, 1, 'SELECT COUNT(*) AS total FROM ${src_table} WHERE (${src_filter})', 'total_count', 2, false, current_timestamp, current_timestamp);
|
||||
INSERT IGNORE INTO `t_ds_dq_rule_execute_sql`
|
||||
(`id`, `index`, `sql`, `table_alias`, `type`, `is_error_output_sql`, `create_time`, `update_time`)
|
||||
VALUES(3, 1, 'SELECT COUNT(*) AS miss from miss_items', 'miss_count', 1, false, current_timestamp, current_timestamp);
|
||||
INSERT IGNORE INTO `t_ds_dq_rule_execute_sql`
|
||||
(`id`, `index`, `sql`, `table_alias`, `type`, `is_error_output_sql`, `create_time`, `update_time`)
|
||||
VALUES(4, 1, 'SELECT COUNT(*) AS valids FROM invalid_length_items', 'invalid_length_count', 1, false, current_timestamp, current_timestamp);
|
||||
INSERT IGNORE INTO `t_ds_dq_rule_execute_sql`
|
||||
(`id`, `index`, `sql`, `table_alias`, `type`, `is_error_output_sql`, `create_time`, `update_time`)
|
||||
VALUES(5, 1, 'SELECT COUNT(*) AS total FROM ${target_table} WHERE (${target_filter})', 'total_count', 2, false, current_timestamp, current_timestamp);
|
||||
INSERT IGNORE INTO `t_ds_dq_rule_execute_sql`
|
||||
(`id`, `index`, `sql`, `table_alias`, `type`, `is_error_output_sql`, `create_time`, `update_time`)
|
||||
VALUES(6, 1, 'SELECT ${src_field} FROM ${src_table} group by ${src_field} having count(*) > 1', 'duplicate_items', 0, true, current_timestamp, current_timestamp);
|
||||
INSERT IGNORE INTO `t_ds_dq_rule_execute_sql`
|
||||
(`id`, `index`, `sql`, `table_alias`, `type`, `is_error_output_sql`, `create_time`, `update_time`)
|
||||
VALUES(7, 1, 'SELECT COUNT(*) AS duplicates FROM duplicate_items', 'duplicate_count', 1, false, current_timestamp, current_timestamp);
|
||||
INSERT IGNORE INTO `t_ds_dq_rule_execute_sql`
|
||||
(`id`, `index`, `sql`, `table_alias`, `type`, `is_error_output_sql`, `create_time`, `update_time`)
|
||||
VALUES(8, 1, 'SELECT ${src_table}.* FROM (SELECT * FROM ${src_table} WHERE (${src_filter})) ${src_table} LEFT JOIN (SELECT * FROM ${target_table} WHERE (${target_filter})) ${target_table} ON ${on_clause} WHERE ${where_clause}', 'miss_items', 0, true, current_timestamp, current_timestamp);
|
||||
INSERT IGNORE INTO `t_ds_dq_rule_execute_sql`
|
||||
(`id`, `index`, `sql`, `table_alias`, `type`, `is_error_output_sql`, `create_time`, `update_time`)
|
||||
VALUES(9, 1, 'SELECT * FROM ${src_table} WHERE (${src_field} not regexp ''${regexp_pattern}'') AND (${src_filter}) ', 'regexp_items', 0, true, current_timestamp, current_timestamp);
|
||||
INSERT IGNORE INTO `t_ds_dq_rule_execute_sql`
|
||||
(`id`, `index`, `sql`, `table_alias`, `type`, `is_error_output_sql`, `create_time`, `update_time`)
|
||||
VALUES(10, 1, 'SELECT COUNT(*) AS regexps FROM regexp_items', 'regexp_count', 1, false, current_timestamp, current_timestamp);
|
||||
INSERT IGNORE INTO `t_ds_dq_rule_execute_sql`
|
||||
(`id`, `index`, `sql`, `table_alias`, `type`, `is_error_output_sql`, `create_time`, `update_time`)
|
||||
VALUES(11, 1, 'SELECT * FROM ${src_table} WHERE (to_unix_timestamp(${src_field}, ''${datetime_format}'')-to_unix_timestamp(''${deadline}'', ''${datetime_format}'') <= 0) AND (to_unix_timestamp(${src_field}, ''${datetime_format}'')-to_unix_timestamp(''${begin_time}'', ''${datetime_format}'') >= 0) AND (${src_filter}) ', 'timeliness_items', 0, 1, current_timestamp, current_timestamp);
|
||||
INSERT IGNORE INTO `t_ds_dq_rule_execute_sql`
|
||||
(`id`, `index`, `sql`, `table_alias`, `type`, `is_error_output_sql`, `create_time`, `update_time`)
|
||||
VALUES(12, 1, 'SELECT COUNT(*) AS timeliness FROM timeliness_items', 'timeliness_count', 1, false, current_timestamp, current_timestamp);
|
||||
INSERT IGNORE INTO `t_ds_dq_rule_execute_sql`
|
||||
(`id`, `index`, `sql`, `table_alias`, `type`, `is_error_output_sql`, `create_time`, `update_time`)
|
||||
VALUES(13, 1, 'SELECT * FROM ${src_table} where (${src_field} not in ( ${enum_list} ) or ${src_field} is null) AND (${src_filter}) ', 'enum_items', 0, true, current_timestamp, current_timestamp);
|
||||
INSERT IGNORE INTO `t_ds_dq_rule_execute_sql`
|
||||
(`id`, `index`, `sql`, `table_alias`, `type`, `is_error_output_sql`, `create_time`, `update_time`)
|
||||
VALUES(14, 1, 'SELECT COUNT(*) AS enums FROM enum_items', 'enum_count', 1, false, current_timestamp, current_timestamp);
|
||||
INSERT IGNORE INTO `t_ds_dq_rule_execute_sql`
|
||||
(`id`, `index`, `sql`, `table_alias`, `type`, `is_error_output_sql`, `create_time`, `update_time`)
|
||||
VALUES(15, 1, 'SELECT COUNT(*) AS total FROM ${src_table} WHERE (${src_filter})', 'table_count', 1, false, current_timestamp, current_timestamp);
|
||||
INSERT IGNORE INTO `t_ds_dq_rule_execute_sql`
|
||||
(`id`, `index`, `sql`, `table_alias`, `type`, `is_error_output_sql`, `create_time`, `update_time`)
|
||||
VALUES(16, 1, 'SELECT * FROM ${src_table} WHERE (${src_field} is null or ${src_field} = '''') AND (${src_filter})', 'null_items', 0, true, current_timestamp, current_timestamp);
|
||||
INSERT IGNORE INTO `t_ds_dq_rule_execute_sql`
|
||||
(`id`, `index`, `sql`, `table_alias`, `type`, `is_error_output_sql`, `create_time`, `update_time`)
|
||||
VALUES(17, 1, 'SELECT * FROM ${src_table} WHERE (length(${src_field}) ${logic_operator} ${field_length}) AND (${src_filter})', 'invalid_length_items', 0, true, current_timestamp, current_timestamp);
|
||||
|
||||
--
|
||||
-- Table structure for table `t_ds_dq_rule_input_entry`
|
||||
--
|
||||
DROP TABLE IF EXISTS `t_ds_dq_rule_input_entry`;
|
||||
CREATE TABLE `t_ds_dq_rule_input_entry` (
|
||||
`id` int(11) NOT NULL AUTO_INCREMENT,
|
||||
`field` varchar(255) DEFAULT NULL,
|
||||
`type` varchar(255) DEFAULT NULL,
|
||||
`title` varchar(255) DEFAULT NULL,
|
||||
`data` varchar(255) DEFAULT NULL,
|
||||
`options` text DEFAULT NULL,
|
||||
`placeholder` varchar(255) DEFAULT NULL,
|
||||
`option_source_type` int(11) DEFAULT NULL,
|
||||
`data_type` int(11) DEFAULT NULL,
|
||||
`input_type` int(11) DEFAULT NULL,
|
||||
`is_show` tinyint(1) DEFAULT '1',
|
||||
`can_edit` tinyint(1) DEFAULT '1',
|
||||
`is_emit` tinyint(1) DEFAULT '0',
|
||||
`is_validate` tinyint(1) DEFAULT '1',
|
||||
`create_time` datetime DEFAULT NULL,
|
||||
`update_time` datetime DEFAULT NULL,
|
||||
PRIMARY KEY (`id`)
|
||||
) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE = utf8_bin;
|
||||
|
||||
INSERT IGNORE INTO `t_ds_dq_rule_input_entry`
|
||||
(`id`, `field`, `type`, `title`, `data`, `options`, `placeholder`, `option_source_type`, `data_type`, `input_type`, `is_show`, `can_edit`, `is_emit`, `is_validate`, `create_time`, `update_time`)
|
||||
VALUES(1, 'src_connector_type', 'select', '$t(src_connector_type)', '', '[{"label":"HIVE","value":"HIVE"},{"label":"JDBC","value":"JDBC"}]', 'please select source connector type', 2, 2, 0, 1, 1, 1, 0, current_timestamp, current_timestamp);
|
||||
INSERT IGNORE INTO `t_ds_dq_rule_input_entry`
|
||||
(`id`, `field`, `type`, `title`, `data`, `options`, `placeholder`, `option_source_type`, `data_type`, `input_type`, `is_show`, `can_edit`, `is_emit`, `is_validate`, `create_time`, `update_time`)
|
||||
VALUES(2, 'src_datasource_id', 'select', '$t(src_datasource_id)', '', NULL, 'please select source datasource id', 1, 2, 0, 1, 1, 1, 0, current_timestamp, current_timestamp);
|
||||
INSERT IGNORE INTO `t_ds_dq_rule_input_entry`
|
||||
(`id`, `field`, `type`, `title`, `data`, `options`, `placeholder`, `option_source_type`, `data_type`, `input_type`, `is_show`, `can_edit`, `is_emit`, `is_validate`, `create_time`, `update_time`)
|
||||
VALUES(3, 'src_table', 'select', '$t(src_table)', NULL, NULL, 'Please enter source table name', 0, 0, 0, 1, 1, 1, 1, current_timestamp, current_timestamp);
|
||||
INSERT IGNORE INTO `t_ds_dq_rule_input_entry`
|
||||
(`id`, `field`, `type`, `title`, `data`, `options`, `placeholder`, `option_source_type`, `data_type`, `input_type`, `is_show`, `can_edit`, `is_emit`, `is_validate`, `create_time`, `update_time`)
|
||||
VALUES(4, 'src_filter', 'input', '$t(src_filter)', NULL, NULL, 'Please enter filter expression', 0, 3, 0, 1, 1, 0, 0, current_timestamp, current_timestamp);
|
||||
INSERT IGNORE INTO `t_ds_dq_rule_input_entry`
|
||||
(`id`, `field`, `type`, `title`, `data`, `options`, `placeholder`, `option_source_type`, `data_type`, `input_type`, `is_show`, `can_edit`, `is_emit`, `is_validate`, `create_time`, `update_time`)
|
||||
VALUES(5, 'src_field', 'select', '$t(src_field)', NULL, NULL, 'Please enter column, only single column is supported', 0, 0, 0, 1, 1, 0, 1, current_timestamp, current_timestamp);
|
||||
INSERT IGNORE INTO `t_ds_dq_rule_input_entry`
|
||||
(`id`, `field`, `type`, `title`, `data`, `options`, `placeholder`, `option_source_type`, `data_type`, `input_type`, `is_show`, `can_edit`, `is_emit`, `is_validate`, `create_time`, `update_time`)
|
||||
VALUES(6, 'statistics_name', 'input', '$t(statistics_name)', NULL, NULL, 'Please enter statistics name, the alias in statistics execute sql', 0, 0, 1, 0, 0, 0, 1, current_timestamp, current_timestamp);
|
||||
INSERT IGNORE INTO `t_ds_dq_rule_input_entry`
|
||||
(`id`, `field`, `type`, `title`, `data`, `options`, `placeholder`, `option_source_type`, `data_type`, `input_type`, `is_show`, `can_edit`, `is_emit`, `is_validate`, `create_time`, `update_time`)
|
||||
VALUES(7, 'check_type', 'select', '$t(check_type)', '0', '[{"label":"Expected - Actual","value":"0"},{"label":"Actual - Expected","value":"1"},{"label":"Actual / Expected","value":"2"},{"label":"(Expected - Actual) / Expected","value":"3"}]', 'please select check type', 0, 0, 3, 1, 1, 1, 0, current_timestamp, current_timestamp);
|
||||
INSERT IGNORE INTO `t_ds_dq_rule_input_entry`
|
||||
(`id`, `field`, `type`, `title`, `data`, `options`, `placeholder`, `option_source_type`, `data_type`, `input_type`, `is_show`, `can_edit`, `is_emit`, `is_validate`, `create_time`, `update_time`)
|
||||
VALUES(8, 'operator', 'select', '$t(operator)', '0', '[{"label":"=","value":"0"},{"label":"<","value":"1"},{"label":"<=","value":"2"},{"label":">","value":"3"},{"label":">=","value":"4"},{"label":"!=","value":"5"}]', 'please select operator', 0, 0, 3, 1, 1, 0, 0, current_timestamp, current_timestamp);
|
||||
INSERT IGNORE INTO `t_ds_dq_rule_input_entry`
|
||||
(`id`, `field`, `type`, `title`, `data`, `options`, `placeholder`, `option_source_type`, `data_type`, `input_type`, `is_show`, `can_edit`, `is_emit`, `is_validate`, `create_time`, `update_time`)
|
||||
VALUES(9, 'threshold', 'input', '$t(threshold)', NULL, NULL, 'Please enter threshold, number is needed', 0, 2, 3, 1, 1, 0, 1, current_timestamp, current_timestamp);
|
||||
INSERT Ignore INTO `t_ds_dq_rule_input_entry`
|
||||
(`id`, `field`, `type`, `title`, `data`, `options`, `placeholder`, `option_source_type`, `data_type`, `input_type`, `is_show`, `can_edit`, `is_emit`, `is_validate`, `create_time`, `update_time`)
|
||||
VALUES(10, 'failure_strategy', 'select', '$t(failure_strategy)', '0', '[{"label":"Alert","value":"0"},{"label":"Block","value":"1"}]', 'please select failure strategy', 0, 0, 3, 1, 1, 0, 0, current_timestamp, current_timestamp);
|
||||
INSERT IGNORE INTO `t_ds_dq_rule_input_entry`
|
||||
(`id`, `field`, `type`, `title`, `data`, `options`, `placeholder`, `option_source_type`, `data_type`, `input_type`, `is_show`, `can_edit`, `is_emit`, `is_validate`, `create_time`, `update_time`)
|
||||
VALUES(11, 'target_connector_type', 'select', '$t(target_connector_type)', '', '[{"label":"HIVE","value":"HIVE"},{"label":"JDBC","value":"JDBC"}]', 'Please select target connector type', 2, 0, 0, 1, 1, 1, 0, current_timestamp, current_timestamp);
|
||||
INSERT IGNORE INTO `t_ds_dq_rule_input_entry`
|
||||
(`id`, `field`, `type`, `title`, `data`, `options`, `placeholder`, `option_source_type`, `data_type`, `input_type`, `is_show`, `can_edit`, `is_emit`, `is_validate`, `create_time`, `update_time`)
|
||||
VALUES(12, 'target_datasource_id', 'select', '$t(target_datasource_id)', '', NULL, 'Please select target datasource', 1, 2, 0, 1, 1, 1, 0, current_timestamp, current_timestamp);
|
||||
INSERT IGNORE INTO `t_ds_dq_rule_input_entry`
|
||||
(`id`, `field`, `type`, `title`, `data`, `options`, `placeholder`, `option_source_type`, `data_type`, `input_type`, `is_show`, `can_edit`, `is_emit`, `is_validate`, `create_time`, `update_time`)
|
||||
VALUES(13, 'target_table', 'select', '$t(target_table)', NULL, NULL, 'Please enter target table', 0, 0, 0, 1, 1, 1, 1, current_timestamp, current_timestamp);
|
||||
INSERT IGNORE INTO `t_ds_dq_rule_input_entry`
|
||||
(`id`, `field`, `type`, `title`, `data`, `options`, `placeholder`, `option_source_type`, `data_type`, `input_type`, `is_show`, `can_edit`, `is_emit`, `is_validate`, `create_time`, `update_time`)
|
||||
VALUES(14, 'target_filter', 'input', '$t(target_filter)', NULL, NULL, 'Please enter target filter expression', 0, 3, 0, 1, 1, 0, 0, current_timestamp, current_timestamp);
|
||||
INSERT IGNORE INTO `t_ds_dq_rule_input_entry`
|
||||
(`id`, `field`, `type`, `title`, `data`, `options`, `placeholder`, `option_source_type`, `data_type`, `input_type`, `is_show`, `can_edit`, `is_emit`, `is_validate`, `create_time`, `update_time`)
|
||||
VALUES(15, 'mapping_columns', 'group', '$t(mapping_columns)', NULL, '[{"field":"src_field","props":{"placeholder":"Please input src field","rows":0,"disabled":false,"size":"small"},"type":"input","title":"src_field"},{"field":"operator","props":{"placeholder":"Please input operator","rows":0,"disabled":false,"size":"small"},"type":"input","title":"operator"},{"field":"target_field","props":{"placeholder":"Please input target field","rows":0,"disabled":false,"size":"small"},"type":"input","title":"target_field"}]', 'please enter mapping columns', 0, 0, 0, 1, 1, 0, 0, current_timestamp, current_timestamp);
|
||||
INSERT IGNORE INTO `t_ds_dq_rule_input_entry`
|
||||
(`id`, `field`, `type`, `title`, `data`, `options`, `placeholder`, `option_source_type`, `data_type`, `input_type`, `is_show`, `can_edit`, `is_emit`, `is_validate`, `create_time`, `update_time`)
|
||||
VALUES(16, 'statistics_execute_sql', 'textarea', '$t(statistics_execute_sql)', NULL, NULL, 'Please enter statistics execute sql', 0, 3, 0, 1, 1, 0, 1, current_timestamp, current_timestamp);
|
||||
INSERT IGNORE INTO `t_ds_dq_rule_input_entry`
|
||||
(`id`, `field`, `type`, `title`, `data`, `options`, `placeholder`, `option_source_type`, `data_type`, `input_type`, `is_show`, `can_edit`, `is_emit`, `is_validate`, `create_time`, `update_time`)
|
||||
VALUES(17, 'comparison_name', 'input', '$t(comparison_name)', NULL, NULL, 'Please enter comparison name, the alias in comparison execute sql', 0, 0, 0, 0, 0, 0, 1, current_timestamp, current_timestamp);
|
||||
INSERT IGNORE INTO `t_ds_dq_rule_input_entry`
|
||||
(`id`, `field`, `type`, `title`, `data`, `options`, `placeholder`, `option_source_type`, `data_type`, `input_type`, `is_show`, `can_edit`, `is_emit`, `is_validate`, `create_time`, `update_time`)
|
||||
VALUES(18, 'comparison_execute_sql', 'textarea', '$t(comparison_execute_sql)', NULL, NULL, 'Please enter comparison execute sql', 0, 3, 0, 1, 1, 0, 1, current_timestamp, current_timestamp);
|
||||
INSERT IGNORE INTO `t_ds_dq_rule_input_entry`
|
||||
(`id`, `field`, `type`, `title`, `data`, `options`, `placeholder`, `option_source_type`, `data_type`, `input_type`, `is_show`, `can_edit`, `is_emit`, `is_validate`, `create_time`, `update_time`)
|
||||
VALUES(19, 'comparison_type', 'select', '$t(comparison_type)', '', NULL, 'Please enter comparison title', 3, 0, 2, 1, 0, 1, 0, current_timestamp, current_timestamp);
|
||||
INSERT IGNORE INTO `t_ds_dq_rule_input_entry`
|
||||
(`id`, `field`, `type`, `title`, `data`, `options`, `placeholder`, `option_source_type`, `data_type`, `input_type`, `is_show`, `can_edit`, `is_emit`, `is_validate`, `create_time`, `update_time`)
|
||||
VALUES(20, 'writer_connector_type', 'select', '$t(writer_connector_type)', '', '[{"label":"MYSQL","value":"0"},{"label":"POSTGRESQL","value":"1"}]', 'please select writer connector type', 0, 2, 0, 1, 1, 1, 0, current_timestamp, current_timestamp);
|
||||
INSERT IGNORE INTO `t_ds_dq_rule_input_entry`
|
||||
(`id`, `field`, `type`, `title`, `data`, `options`, `placeholder`, `option_source_type`, `data_type`, `input_type`, `is_show`, `can_edit`, `is_emit`, `is_validate`, `create_time`, `update_time`)
|
||||
VALUES(21, 'writer_datasource_id', 'select', '$t(writer_datasource_id)', '', NULL, 'please select writer datasource id', 1, 2, 0, 1, 1, 0, 0, current_timestamp, current_timestamp);
|
||||
INSERT IGNORE INTO `t_ds_dq_rule_input_entry`
|
||||
(`id`, `field`, `type`, `title`, `data`, `options`, `placeholder`, `option_source_type`, `data_type`, `input_type`, `is_show`, `can_edit`, `is_emit`, `is_validate`, `create_time`, `update_time`)
|
||||
VALUES(22, 'target_field', 'select', '$t(target_field)', NULL, NULL, 'Please enter column, only single column is supported', 0, 0, 0, 1, 1, 0, 0, current_timestamp, current_timestamp);
|
||||
INSERT IGNORE INTO `t_ds_dq_rule_input_entry`
|
||||
(`id`, `field`, `type`, `title`, `data`, `options`, `placeholder`, `option_source_type`, `data_type`, `input_type`, `is_show`, `can_edit`, `is_emit`, `is_validate`, `create_time`, `update_time`)
|
||||
VALUES(23, 'field_length', 'input', '$t(field_length)', NULL, NULL, 'Please enter length limit', 0, 3, 0, 1, 1, 0, 0, current_timestamp, current_timestamp);
|
||||
INSERT IGNORE INTO `t_ds_dq_rule_input_entry`
|
||||
(`id`, `field`, `type`, `title`, `data`, `options`, `placeholder`, `option_source_type`, `data_type`, `input_type`, `is_show`, `can_edit`, `is_emit`, `is_validate`, `create_time`, `update_time`)
|
||||
VALUES(24, 'logic_operator', 'select', '$t(logic_operator)', '=', '[{"label":"=","value":"="},{"label":"<","value":"<"},{"label":"<=","value":"<="},{"label":">","value":">"},{"label":">=","value":">="},{"label":"<>","value":"<>"}]', 'please select logic operator', 0, 0, 3, 1, 1, 0, 0, current_timestamp, current_timestamp);
|
||||
INSERT IGNORE INTO `t_ds_dq_rule_input_entry`
|
||||
(`id`, `field`, `type`, `title`, `data`, `options`, `placeholder`, `option_source_type`, `data_type`, `input_type`, `is_show`, `can_edit`, `is_emit`, `is_validate`, `create_time`, `update_time`)
|
||||
VALUES(25, 'regexp_pattern', 'input', '$t(regexp_pattern)', NULL, NULL, 'Please enter regexp pattern', 0, 0, 0, 1, 1, 0, 0, current_timestamp, current_timestamp);
|
||||
INSERT IGNORE INTO `t_ds_dq_rule_input_entry`
|
||||
(`id`, `field`, `type`, `title`, `data`, `options`, `placeholder`, `option_source_type`, `data_type`, `input_type`, `is_show`, `can_edit`, `is_emit`, `is_validate`, `create_time`, `update_time`)
|
||||
VALUES(26, 'deadline', 'input', '$t(deadline)', NULL, NULL, 'Please enter deadline', 0, 0, 0, 1, 1, 0, 0, current_timestamp, current_timestamp);
|
||||
INSERT IGNORE INTO `t_ds_dq_rule_input_entry`
|
||||
(`id`, `field`, `type`, `title`, `data`, `options`, `placeholder`, `option_source_type`, `data_type`, `input_type`, `is_show`, `can_edit`, `is_emit`, `is_validate`, `create_time`, `update_time`)
|
||||
VALUES(27, 'datetime_format', 'input', '$t(datetime_format)', NULL, NULL, 'Please enter datetime format', 0, 0, 0, 1, 1, 0, 0, current_timestamp, current_timestamp);
|
||||
INSERT IGNORE INTO `t_ds_dq_rule_input_entry`
|
||||
(`id`, `field`, `type`, `title`, `data`, `options`, `placeholder`, `option_source_type`, `data_type`, `input_type`, `is_show`, `can_edit`, `is_emit`, `is_validate`, `create_time`, `update_time`)
|
||||
VALUES(28, 'enum_list', 'input', '$t(enum_list)', NULL, NULL, 'Please enter enumeration', 0, 0, 0, 1, 1, 0, 0, current_timestamp, current_timestamp);
|
||||
INSERT IGNORE INTO `t_ds_dq_rule_input_entry`
|
||||
(`id`, `field`, `type`, `title`, `data`, `options`, `placeholder`, `option_source_type`, `data_type`, `input_type`, `is_show`, `can_edit`, `is_emit`, `is_validate`, `create_time`, `update_time`)
|
||||
VALUES(29, 'begin_time', 'input', '$t(begin_time)', NULL, NULL, 'Please enter begin time', 0, 0, 0, 1, 1, 0, 0, current_timestamp, current_timestamp);
|
||||
INSERT IGNORE INTO `t_ds_dq_rule_input_entry`
|
||||
(`id`, `field`, `type`, `title`, `data`, `options`, `placeholder`, `option_source_type`, `data_type`, `input_type`, `is_show`, `can_edit`, `is_emit`, `is_validate`, `create_time`, `update_time`)
|
||||
VALUES(30, 'src_database', 'select', '$t(src_database)', NULL, NULL, 'Please select source database', 0, 0, 0, 1, 1, 1, 1, current_timestamp, current_timestamp);
|
||||
INSERT IGNORE INTO `t_ds_dq_rule_input_entry`
|
||||
(`id`, `field`, `type`, `title`, `data`, `options`, `placeholder`, `option_source_type`, `data_type`, `input_type`, `is_show`, `can_edit`, `is_emit`, `is_validate`, `create_time`, `update_time`)
|
||||
VALUES(31, 'target_database', 'select', '$t(target_database)', NULL, NULL, 'Please select target database', 0, 0, 0, 1, 1, 1, 1, current_timestamp, current_timestamp);
|
||||
|
||||
--
|
||||
-- Table structure for table `t_ds_dq_task_statistics_value`
|
||||
--
|
||||
DROP TABLE IF EXISTS `t_ds_dq_task_statistics_value`;
|
||||
CREATE TABLE `t_ds_dq_task_statistics_value` (
|
||||
`id` int(11) NOT NULL AUTO_INCREMENT,
|
||||
`workflow_definition_id` int(11) DEFAULT NULL,
|
||||
`task_instance_id` int(11) DEFAULT NULL,
|
||||
`rule_id` int(11) NOT NULL,
|
||||
`unique_code` varchar(255) NULL,
|
||||
`statistics_name` varchar(255) NULL,
|
||||
`statistics_value` double NULL,
|
||||
`data_time` datetime DEFAULT NULL,
|
||||
`create_time` datetime DEFAULT NULL,
|
||||
`update_time` datetime DEFAULT NULL,
|
||||
PRIMARY KEY (`id`)
|
||||
) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE = utf8_bin;
|
||||
|
||||
--
|
||||
-- Table structure for table `t_ds_relation_rule_execute_sql`
|
||||
--
|
||||
DROP TABLE IF EXISTS `t_ds_relation_rule_execute_sql`;
|
||||
CREATE TABLE `t_ds_relation_rule_execute_sql` (
|
||||
`id` int(11) NOT NULL AUTO_INCREMENT,
|
||||
`rule_id` int(11) DEFAULT NULL,
|
||||
`execute_sql_id` int(11) DEFAULT NULL,
|
||||
`create_time` datetime NULL,
|
||||
`update_time` datetime NULL,
|
||||
PRIMARY KEY (`id`)
|
||||
) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE = utf8_bin;
|
||||
|
||||
INSERT IGNORE INTO `t_ds_relation_rule_execute_sql`
|
||||
(`id`, `rule_id`, `execute_sql_id`, `create_time`, `update_time`)
|
||||
VALUES(1, 1, 1, current_timestamp, current_timestamp);
|
||||
INSERT IGNORE INTO `t_ds_relation_rule_execute_sql`
|
||||
(`id`, `rule_id`, `execute_sql_id`, `create_time`, `update_time`)
|
||||
VALUES(3, 5, 4, current_timestamp, current_timestamp);
|
||||
INSERT IGNORE INTO `t_ds_relation_rule_execute_sql`
|
||||
(`id`, `rule_id`, `execute_sql_id`, `create_time`, `update_time`)
|
||||
VALUES(2, 3, 3, current_timestamp, current_timestamp);
|
||||
INSERT IGNORE INTO `t_ds_relation_rule_execute_sql`
|
||||
(`id`, `rule_id`, `execute_sql_id`, `create_time`, `update_time`)
|
||||
VALUES(4, 3, 8, current_timestamp, current_timestamp);
|
||||
INSERT IGNORE INTO `t_ds_relation_rule_execute_sql`
|
||||
(`id`, `rule_id`, `execute_sql_id`, `create_time`, `update_time`)
|
||||
VALUES(5, 6, 6, current_timestamp, current_timestamp);
|
||||
INSERT IGNORE INTO `t_ds_relation_rule_execute_sql`
|
||||
(`id`, `rule_id`, `execute_sql_id`, `create_time`, `update_time`)
|
||||
VALUES(6, 6, 7, current_timestamp, current_timestamp);
|
||||
INSERT IGNORE INTO `t_ds_relation_rule_execute_sql`
|
||||
(`id`, `rule_id`, `execute_sql_id`, `create_time`, `update_time`)
|
||||
VALUES(7, 7, 9, current_timestamp, current_timestamp);
|
||||
INSERT IGNORE INTO `t_ds_relation_rule_execute_sql`
|
||||
(`id`, `rule_id`, `execute_sql_id`, `create_time`, `update_time`)
|
||||
VALUES(8, 7, 10, current_timestamp, current_timestamp);
|
||||
INSERT IGNORE INTO `t_ds_relation_rule_execute_sql`
|
||||
(`id`, `rule_id`, `execute_sql_id`, `create_time`, `update_time`)
|
||||
VALUES(9, 8, 11, current_timestamp, current_timestamp);
|
||||
INSERT IGNORE INTO `t_ds_relation_rule_execute_sql`
|
||||
(`id`, `rule_id`, `execute_sql_id`, `create_time`, `update_time`)
|
||||
VALUES(10, 8, 12, current_timestamp, current_timestamp);
|
||||
INSERT IGNORE INTO `t_ds_relation_rule_execute_sql`
|
||||
(`id`, `rule_id`, `execute_sql_id`, `create_time`, `update_time`)
|
||||
VALUES(11, 9, 13, current_timestamp, current_timestamp);
|
||||
INSERT IGNORE INTO `t_ds_relation_rule_execute_sql`
|
||||
(`id`, `rule_id`, `execute_sql_id`, `create_time`, `update_time`)
|
||||
VALUES(12, 9, 14, current_timestamp, current_timestamp);
|
||||
INSERT IGNORE INTO `t_ds_relation_rule_execute_sql`
|
||||
(`id`, `rule_id`, `execute_sql_id`, `create_time`, `update_time`)
|
||||
VALUES(13, 10, 15, current_timestamp, current_timestamp);
|
||||
INSERT IGNORE INTO `t_ds_relation_rule_execute_sql`
|
||||
(`id`, `rule_id`, `execute_sql_id`, `create_time`, `update_time`)
|
||||
VALUES(14, 1, 16, current_timestamp, current_timestamp);
|
||||
INSERT IGNORE INTO `t_ds_relation_rule_execute_sql`
|
||||
(`id`, `rule_id`, `execute_sql_id`, `create_time`, `update_time`)
|
||||
VALUES(15, 5, 17, current_timestamp, current_timestamp);
|
||||
|
||||
--
|
||||
-- Table structure for table `t_ds_relation_rule_input_entry`
|
||||
--
|
||||
DROP TABLE IF EXISTS `t_ds_relation_rule_input_entry`;
|
||||
CREATE TABLE `t_ds_relation_rule_input_entry` (
|
||||
`id` int(11) NOT NULL AUTO_INCREMENT,
|
||||
`rule_id` int(11) DEFAULT NULL,
|
||||
`rule_input_entry_id` int(11) DEFAULT NULL,
|
||||
`values_map` text DEFAULT NULL,
|
||||
`index` int(11) DEFAULT NULL,
|
||||
`create_time` datetime DEFAULT NULL,
|
||||
`update_time` datetime DEFAULT NULL,
|
||||
PRIMARY KEY (`id`)
|
||||
) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE = utf8_bin;
|
||||
|
||||
INSERT IGNORE INTO `t_ds_relation_rule_input_entry`
|
||||
(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
|
||||
VALUES(1, 1, 1, NULL, 1, current_timestamp, current_timestamp);
|
||||
INSERT IGNORE INTO `t_ds_relation_rule_input_entry`
|
||||
(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
|
||||
VALUES(2, 1, 2, NULL, 2, current_timestamp, current_timestamp);
|
||||
INSERT IGNORE INTO `t_ds_relation_rule_input_entry`
|
||||
(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
|
||||
VALUES(3, 1, 3, NULL, 3, current_timestamp, current_timestamp);
|
||||
INSERT IGNORE INTO `t_ds_relation_rule_input_entry`
|
||||
(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
|
||||
VALUES(4, 1, 4, NULL, 4, current_timestamp, current_timestamp);
|
||||
INSERT IGNORE INTO `t_ds_relation_rule_input_entry`
|
||||
(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
|
||||
VALUES(5, 1, 5, NULL, 5, current_timestamp, current_timestamp);
|
||||
INSERT IGNORE INTO `t_ds_relation_rule_input_entry`
|
||||
(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
|
||||
VALUES(6, 1, 6, '{"statistics_name":"null_count.nulls"}', 6, current_timestamp, current_timestamp);
|
||||
INSERT IGNORE INTO `t_ds_relation_rule_input_entry`
|
||||
(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
|
||||
VALUES(7, 1, 7, NULL, 7, current_timestamp, current_timestamp);
|
||||
INSERT IGNORE INTO `t_ds_relation_rule_input_entry`
|
||||
(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
|
||||
VALUES(8, 1, 8, NULL, 8, current_timestamp, current_timestamp);
|
||||
INSERT IGNORE INTO `t_ds_relation_rule_input_entry`
|
||||
(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
|
||||
VALUES(9, 1, 9, NULL, 9, current_timestamp, current_timestamp);
|
||||
INSERT IGNORE INTO `t_ds_relation_rule_input_entry`
|
||||
(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
|
||||
VALUES(10, 1, 10, NULL, 10, current_timestamp, current_timestamp);
|
||||
INSERT IGNORE INTO `t_ds_relation_rule_input_entry`
|
||||
(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
|
||||
VALUES(11, 1, 17, '', 11, current_timestamp, current_timestamp);
|
||||
INSERT IGNORE INTO `t_ds_relation_rule_input_entry`
|
||||
(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
|
||||
VALUES(12, 1, 19, NULL, 12, current_timestamp, current_timestamp);
|
||||
INSERT IGNORE INTO `t_ds_relation_rule_input_entry`
|
||||
(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
|
||||
VALUES(13, 2, 1, NULL, 1, current_timestamp, current_timestamp);
|
||||
INSERT IGNORE INTO `t_ds_relation_rule_input_entry`
|
||||
(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
|
||||
VALUES(14, 2, 2, NULL, 2, current_timestamp, current_timestamp);
|
||||
INSERT IGNORE INTO `t_ds_relation_rule_input_entry`
|
||||
(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
|
||||
VALUES(15, 2, 3, NULL, 3, current_timestamp, current_timestamp);
|
||||
INSERT IGNORE INTO `t_ds_relation_rule_input_entry`
|
||||
(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
|
||||
VALUES(16, 2, 6, '{"is_show":"true","can_edit":"true"}', 4, current_timestamp, current_timestamp);
|
||||
INSERT IGNORE INTO `t_ds_relation_rule_input_entry`
|
||||
(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
|
||||
VALUES(17, 2, 16, NULL, 5, current_timestamp, current_timestamp);
|
||||
INSERT IGNORE INTO `t_ds_relation_rule_input_entry`
|
||||
(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
|
||||
VALUES(18, 2, 4, NULL, 6, current_timestamp, current_timestamp);
|
||||
INSERT IGNORE INTO `t_ds_relation_rule_input_entry`
|
||||
(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
|
||||
VALUES(19, 2, 7, NULL, 7, current_timestamp, current_timestamp);
|
||||
INSERT IGNORE INTO `t_ds_relation_rule_input_entry`
|
||||
(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
|
||||
VALUES(20, 2, 8, NULL, 8, current_timestamp, current_timestamp);
|
||||
INSERT IGNORE INTO `t_ds_relation_rule_input_entry`
|
||||
(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
|
||||
VALUES(21, 2, 9, NULL, 9, current_timestamp, current_timestamp);
|
||||
INSERT IGNORE INTO `t_ds_relation_rule_input_entry`
|
||||
(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
|
||||
VALUES(22, 2, 10, NULL, 10, current_timestamp, current_timestamp);
|
||||
INSERT IGNORE INTO `t_ds_relation_rule_input_entry`
|
||||
(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
|
||||
VALUES(24, 2, 19, NULL, 12, current_timestamp, current_timestamp);
|
||||
INSERT IGNORE INTO `t_ds_relation_rule_input_entry`
|
||||
(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
|
||||
VALUES(25, 3, 1, NULL, 1, current_timestamp, current_timestamp);
|
||||
INSERT IGNORE INTO `t_ds_relation_rule_input_entry`
|
||||
(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
|
||||
VALUES(26, 3, 2, NULL, 2, current_timestamp, current_timestamp);
|
||||
INSERT IGNORE INTO `t_ds_relation_rule_input_entry`
|
||||
(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
|
||||
VALUES(27, 3, 3, NULL, 3, current_timestamp, current_timestamp);
|
||||
INSERT IGNORE INTO `t_ds_relation_rule_input_entry`
|
||||
(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
|
||||
VALUES(28, 3, 4, NULL, 4, current_timestamp, current_timestamp);
|
||||
INSERT IGNORE INTO `t_ds_relation_rule_input_entry`
|
||||
(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
|
||||
VALUES(29, 3, 11, NULL, 5, current_timestamp, current_timestamp);
|
||||
INSERT IGNORE INTO `t_ds_relation_rule_input_entry`
|
||||
(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
|
||||
VALUES(30, 3, 12, NULL, 6, current_timestamp, current_timestamp);
|
||||
INSERT IGNORE INTO `t_ds_relation_rule_input_entry`
|
||||
(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
|
||||
VALUES(31, 3, 13, NULL, 7, current_timestamp, current_timestamp);
|
||||
INSERT IGNORE INTO `t_ds_relation_rule_input_entry`
|
||||
(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
|
||||
VALUES(32, 3, 14, NULL, 8, current_timestamp, current_timestamp);
|
||||
INSERT IGNORE INTO `t_ds_relation_rule_input_entry`
|
||||
(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
|
||||
VALUES(33, 3, 15, NULL, 9, current_timestamp, current_timestamp);
|
||||
INSERT IGNORE INTO `t_ds_relation_rule_input_entry`
|
||||
(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
|
||||
VALUES(34, 3, 7, NULL, 10, current_timestamp, current_timestamp);
|
||||
INSERT IGNORE INTO `t_ds_relation_rule_input_entry`
|
||||
(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
|
||||
VALUES(35, 3, 8, NULL, 11, current_timestamp, current_timestamp);
|
||||
INSERT IGNORE INTO `t_ds_relation_rule_input_entry`
|
||||
(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
|
||||
VALUES(36, 3, 9, NULL, 12, current_timestamp, current_timestamp);
|
||||
INSERT IGNORE INTO `t_ds_relation_rule_input_entry`
|
||||
(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
|
||||
VALUES(37, 3, 10, NULL, 13, current_timestamp, current_timestamp);
|
||||
INSERT IGNORE INTO `t_ds_relation_rule_input_entry`
|
||||
(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
|
||||
VALUES(38, 3, 17, '{"comparison_name":"total_count.total"}', 14, current_timestamp, current_timestamp);
|
||||
INSERT IGNORE INTO `t_ds_relation_rule_input_entry`
|
||||
(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
|
||||
VALUES(39, 3, 19, NULL, 15, current_timestamp, current_timestamp);
|
||||
INSERT IGNORE INTO `t_ds_relation_rule_input_entry`
|
||||
(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
|
||||
VALUES(40, 4, 1, NULL, 1, current_timestamp, current_timestamp);
|
||||
INSERT IGNORE INTO `t_ds_relation_rule_input_entry`
|
||||
(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
|
||||
VALUES(41, 4, 2, NULL, 2, current_timestamp, current_timestamp);
|
||||
INSERT IGNORE INTO `t_ds_relation_rule_input_entry`
|
||||
(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
|
||||
VALUES(42, 4, 3, NULL, 3, current_timestamp, current_timestamp);
|
||||
INSERT IGNORE INTO `t_ds_relation_rule_input_entry`
|
||||
(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
|
||||
VALUES(43, 4, 6, '{"is_show":"true","can_edit":"true"}', 4, current_timestamp, current_timestamp);
|
||||
INSERT IGNORE INTO `t_ds_relation_rule_input_entry`
|
||||
(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
|
||||
VALUES(44, 4, 16, NULL, 5, current_timestamp, current_timestamp);
|
||||
INSERT IGNORE INTO `t_ds_relation_rule_input_entry`
|
||||
(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
|
||||
VALUES(45, 4, 11, NULL, 6, current_timestamp, current_timestamp);
|
||||
INSERT IGNORE INTO `t_ds_relation_rule_input_entry`
|
||||
(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
|
||||
VALUES(46, 4, 12, NULL, 7, current_timestamp, current_timestamp);
|
||||
INSERT IGNORE INTO `t_ds_relation_rule_input_entry`
|
||||
(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
|
||||
VALUES(47, 4, 13, NULL, 8, current_timestamp, current_timestamp);
|
||||
INSERT IGNORE INTO `t_ds_relation_rule_input_entry`
|
||||
(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
|
||||
VALUES(48, 4, 17, '{"is_show":"true","can_edit":"true"}', 9, current_timestamp, current_timestamp);
|
||||
INSERT IGNORE INTO `t_ds_relation_rule_input_entry`
|
||||
(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
|
||||
VALUES(49, 4, 18, NULL, 10, current_timestamp, current_timestamp);
|
||||
INSERT IGNORE INTO `t_ds_relation_rule_input_entry`
|
||||
(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
|
||||
VALUES(50, 4, 7, NULL, 11, current_timestamp, current_timestamp);
|
||||
INSERT IGNORE INTO `t_ds_relation_rule_input_entry`
|
||||
(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
|
||||
VALUES(51, 4, 8, NULL, 12, current_timestamp, current_timestamp);
|
||||
INSERT IGNORE INTO `t_ds_relation_rule_input_entry`
|
||||
(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
|
||||
VALUES(52, 4, 9, NULL, 13, current_timestamp, current_timestamp);
|
||||
INSERT IGNORE INTO `t_ds_relation_rule_input_entry`
|
||||
(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
|
||||
VALUES(53, 4, 10, NULL, 14, current_timestamp, current_timestamp);
|
||||
INSERT IGNORE INTO `t_ds_relation_rule_input_entry`
|
||||
(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
|
||||
VALUES(62, 3, 6, '{"statistics_name":"miss_count.miss"}', 18, current_timestamp, current_timestamp);
|
||||
INSERT IGNORE INTO `t_ds_relation_rule_input_entry`
|
||||
(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
|
||||
VALUES(63, 5, 1, NULL, 1, current_timestamp, current_timestamp);
|
||||
INSERT IGNORE INTO `t_ds_relation_rule_input_entry`
|
||||
(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
|
||||
VALUES(64, 5, 2, NULL, 2, current_timestamp, current_timestamp);
|
||||
INSERT IGNORE INTO `t_ds_relation_rule_input_entry`
|
||||
(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
|
||||
VALUES(65, 5, 3, NULL, 3, current_timestamp, current_timestamp);
|
||||
INSERT IGNORE INTO `t_ds_relation_rule_input_entry`
|
||||
(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
|
||||
VALUES(66, 5, 4, NULL, 4, current_timestamp, current_timestamp);
|
||||
INSERT IGNORE INTO `t_ds_relation_rule_input_entry`
|
||||
(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
|
||||
VALUES(67, 5, 5, NULL, 5, current_timestamp, current_timestamp);
|
||||
INSERT IGNORE INTO `t_ds_relation_rule_input_entry`
|
||||
(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
|
||||
VALUES(68, 5, 6, '{"statistics_name":"invalid_length_count.valids"}', 6, current_timestamp, current_timestamp);
|
||||
INSERT IGNORE INTO `t_ds_relation_rule_input_entry`
|
||||
(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
|
||||
VALUES(69, 5, 24, NULL, 7, current_timestamp, current_timestamp);
|
||||
INSERT IGNORE INTO `t_ds_relation_rule_input_entry`
|
||||
(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
|
||||
VALUES(70, 5, 23, NULL, 8, current_timestamp, current_timestamp);
|
||||
INSERT IGNORE INTO `t_ds_relation_rule_input_entry`
|
||||
(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
|
||||
VALUES(71, 5, 7, NULL, 9, current_timestamp, current_timestamp);
|
||||
INSERT IGNORE INTO `t_ds_relation_rule_input_entry`
|
||||
(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
|
||||
VALUES(72, 5, 8, NULL, 10, current_timestamp, current_timestamp);
|
||||
INSERT IGNORE INTO `t_ds_relation_rule_input_entry`
|
||||
(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
|
||||
VALUES(73, 5, 9, NULL, 11, current_timestamp, current_timestamp);
|
||||
INSERT IGNORE INTO `t_ds_relation_rule_input_entry`
|
||||
(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
|
||||
VALUES(74, 5, 10, NULL, 12, current_timestamp, current_timestamp);
|
||||
INSERT IGNORE INTO `t_ds_relation_rule_input_entry`
|
||||
(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
|
||||
VALUES(75, 5, 17, '', 13, current_timestamp, current_timestamp);
|
||||
INSERT IGNORE INTO `t_ds_relation_rule_input_entry`
|
||||
(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
|
||||
VALUES(76, 5, 19, NULL, 14, current_timestamp, current_timestamp);
|
||||
INSERT IGNORE INTO `t_ds_relation_rule_input_entry`
|
||||
(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
|
||||
VALUES(79, 6, 1, NULL, 1, current_timestamp, current_timestamp);
|
||||
INSERT IGNORE INTO `t_ds_relation_rule_input_entry`
|
||||
(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
|
||||
VALUES(80, 6, 2, NULL, 2, current_timestamp, current_timestamp);
|
||||
INSERT IGNORE INTO `t_ds_relation_rule_input_entry`
|
||||
(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
|
||||
VALUES(81, 6, 3, NULL, 3, current_timestamp, current_timestamp);
|
||||
INSERT IGNORE INTO `t_ds_relation_rule_input_entry`
|
||||
(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
|
||||
VALUES(82, 6, 4, NULL, 4, current_timestamp, current_timestamp);
|
||||
INSERT IGNORE INTO `t_ds_relation_rule_input_entry`
|
||||
(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
|
||||
VALUES(83, 6, 5, NULL, 5, current_timestamp, current_timestamp);
|
||||
INSERT IGNORE INTO `t_ds_relation_rule_input_entry`
|
||||
(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
|
||||
VALUES(84, 6, 6, '{"statistics_name":"duplicate_count.duplicates"}', 6, current_timestamp, current_timestamp);
|
||||
INSERT IGNORE INTO `t_ds_relation_rule_input_entry`
|
||||
(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
|
||||
VALUES(85, 6, 7, NULL, 7, current_timestamp, current_timestamp);
|
||||
INSERT IGNORE INTO `t_ds_relation_rule_input_entry`
|
||||
(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
|
||||
VALUES(86, 6, 8, NULL, 8, current_timestamp, current_timestamp);
|
||||
INSERT IGNORE INTO `t_ds_relation_rule_input_entry`
|
||||
(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
|
||||
VALUES(87, 6, 9, NULL, 9, current_timestamp, current_timestamp);
|
||||
INSERT IGNORE INTO `t_ds_relation_rule_input_entry`
|
||||
(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
|
||||
VALUES(88, 6, 10, NULL, 10, current_timestamp, current_timestamp);
|
||||
INSERT IGNORE INTO `t_ds_relation_rule_input_entry`
|
||||
(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
|
||||
VALUES(89, 6, 17, '', 11, current_timestamp, current_timestamp);
|
||||
INSERT IGNORE INTO `t_ds_relation_rule_input_entry`
|
||||
(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
|
||||
VALUES(90, 6, 19, NULL, 12, current_timestamp, current_timestamp);
|
||||
INSERT IGNORE INTO `t_ds_relation_rule_input_entry`
|
||||
(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
|
||||
VALUES(93, 7, 1, NULL, 1, current_timestamp, current_timestamp);
|
||||
INSERT IGNORE INTO `t_ds_relation_rule_input_entry`
|
||||
(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
|
||||
VALUES(94, 7, 2, NULL, 2, current_timestamp, current_timestamp);
|
||||
INSERT IGNORE INTO `t_ds_relation_rule_input_entry`
|
||||
(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
|
||||
VALUES(95, 7, 3, NULL, 3, current_timestamp, current_timestamp);
|
||||
INSERT IGNORE INTO `t_ds_relation_rule_input_entry`
|
||||
(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
|
||||
VALUES(96, 7, 4, NULL, 4, current_timestamp, current_timestamp);
|
||||
INSERT IGNORE INTO `t_ds_relation_rule_input_entry`
|
||||
(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
|
||||
VALUES(97, 7, 5, NULL, 5, current_timestamp, current_timestamp);
|
||||
INSERT IGNORE INTO `t_ds_relation_rule_input_entry`
|
||||
(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
|
||||
VALUES(98, 7, 6, '{"statistics_name":"regexp_count.regexps"}', 6, current_timestamp, current_timestamp);
|
||||
INSERT IGNORE INTO `t_ds_relation_rule_input_entry`
|
||||
(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
|
||||
VALUES(99, 7, 25, NULL, 5, current_timestamp, current_timestamp);
|
||||
INSERT IGNORE INTO `t_ds_relation_rule_input_entry`
|
||||
(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
|
||||
VALUES(100, 7, 7, NULL, 7, current_timestamp, current_timestamp);
|
||||
INSERT IGNORE INTO `t_ds_relation_rule_input_entry`
|
||||
(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
|
||||
VALUES(101, 7, 8, NULL, 8, current_timestamp, current_timestamp);
|
||||
INSERT IGNORE INTO `t_ds_relation_rule_input_entry`
|
||||
(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
|
||||
VALUES(102, 7, 9, NULL, 9, current_timestamp, current_timestamp);
|
||||
INSERT IGNORE INTO `t_ds_relation_rule_input_entry`
|
||||
(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
|
||||
VALUES(103, 7, 10, NULL, 10, current_timestamp, current_timestamp);
|
||||
INSERT IGNORE INTO `t_ds_relation_rule_input_entry`
|
||||
(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
|
||||
VALUES(104, 7, 17, NULL, 11, current_timestamp, current_timestamp);
|
||||
INSERT IGNORE INTO `t_ds_relation_rule_input_entry`
|
||||
(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
|
||||
VALUES(105, 7, 19, NULL, 12, current_timestamp, current_timestamp);
|
||||
INSERT IGNORE INTO `t_ds_relation_rule_input_entry`
|
||||
(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
|
||||
VALUES(108, 8, 1, NULL, 1, current_timestamp, current_timestamp);
|
||||
INSERT IGNORE INTO `t_ds_relation_rule_input_entry`
|
||||
(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
|
||||
VALUES(109, 8, 2, NULL, 2, current_timestamp, current_timestamp);
|
||||
INSERT IGNORE INTO `t_ds_relation_rule_input_entry`
|
||||
(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
|
||||
VALUES(110, 8, 3, NULL, 3, current_timestamp, current_timestamp);
|
||||
INSERT IGNORE INTO `t_ds_relation_rule_input_entry`
|
||||
(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
|
||||
VALUES(111, 8, 4, NULL, 4, current_timestamp, current_timestamp);
|
||||
INSERT IGNORE INTO `t_ds_relation_rule_input_entry`
|
||||
(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
|
||||
VALUES(112, 8, 5, NULL, 5, current_timestamp, current_timestamp);
|
||||
INSERT IGNORE INTO `t_ds_relation_rule_input_entry`
|
||||
(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
|
||||
VALUES(113, 8, 6, '{"statistics_name":"timeliness_count.timeliness"}', 6, current_timestamp, current_timestamp);
|
||||
INSERT IGNORE INTO `t_ds_relation_rule_input_entry`
|
||||
(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
|
||||
VALUES(114, 8, 26, NULL, 8, current_timestamp, current_timestamp);
|
||||
INSERT IGNORE INTO `t_ds_relation_rule_input_entry`
|
||||
(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
|
||||
VALUES(115, 8, 27, NULL, 9, current_timestamp, current_timestamp);
|
||||
INSERT IGNORE INTO `t_ds_relation_rule_input_entry`
|
||||
(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
|
||||
VALUES(116, 8, 7, NULL, 10, current_timestamp, current_timestamp);
|
||||
INSERT IGNORE INTO `t_ds_relation_rule_input_entry`
|
||||
(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
|
||||
VALUES(117, 8, 8, NULL, 11, current_timestamp, current_timestamp);
|
||||
INSERT IGNORE INTO `t_ds_relation_rule_input_entry`
|
||||
(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
|
||||
VALUES(118, 8, 9, NULL, 12, current_timestamp, current_timestamp);
|
||||
INSERT IGNORE INTO `t_ds_relation_rule_input_entry`
|
||||
(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
|
||||
VALUES(119, 8, 10, NULL, 13, current_timestamp, current_timestamp);
|
||||
INSERT IGNORE INTO `t_ds_relation_rule_input_entry`
|
||||
(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
|
||||
VALUES(120, 8, 17, NULL, 14, current_timestamp, current_timestamp);
|
||||
INSERT IGNORE INTO `t_ds_relation_rule_input_entry`
|
||||
(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
|
||||
VALUES(121, 8, 19, NULL, 15, current_timestamp, current_timestamp);
|
||||
INSERT IGNORE INTO `t_ds_relation_rule_input_entry`
|
||||
(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
|
||||
VALUES(124, 9, 1, NULL, 1, current_timestamp, current_timestamp);
|
||||
INSERT IGNORE INTO `t_ds_relation_rule_input_entry`
|
||||
(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
|
||||
VALUES(125, 9, 2, NULL, 2, current_timestamp, current_timestamp);
|
||||
INSERT IGNORE INTO `t_ds_relation_rule_input_entry`
|
||||
(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
|
||||
VALUES(126, 9, 3, NULL, 3, current_timestamp, current_timestamp);
|
||||
INSERT IGNORE INTO `t_ds_relation_rule_input_entry`
|
||||
(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
|
||||
VALUES(127, 9, 4, NULL, 4, current_timestamp, current_timestamp);
|
||||
INSERT IGNORE INTO `t_ds_relation_rule_input_entry`
|
||||
(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
|
||||
VALUES(128, 9, 5, NULL, 5, current_timestamp, current_timestamp);
|
||||
INSERT IGNORE INTO `t_ds_relation_rule_input_entry`
|
||||
(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
|
||||
VALUES(129, 9, 6, '{"statistics_name":"enum_count.enums"}', 6, current_timestamp, current_timestamp);
|
||||
INSERT IGNORE INTO `t_ds_relation_rule_input_entry`
|
||||
(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
|
||||
VALUES(130, 9, 28, NULL, 7, current_timestamp, current_timestamp);
|
||||
INSERT IGNORE INTO `t_ds_relation_rule_input_entry`
|
||||
(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
|
||||
VALUES(131, 9, 7, NULL, 8, current_timestamp, current_timestamp);
|
||||
INSERT IGNORE INTO `t_ds_relation_rule_input_entry`
|
||||
(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
|
||||
VALUES(132, 9, 8, NULL, 9, current_timestamp, current_timestamp);
|
||||
INSERT IGNORE INTO `t_ds_relation_rule_input_entry`
|
||||
(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
|
||||
VALUES(133, 9, 9, NULL, 10, current_timestamp, current_timestamp);
|
||||
INSERT IGNORE INTO `t_ds_relation_rule_input_entry`
|
||||
(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
|
||||
VALUES(134, 9, 10, NULL, 11, current_timestamp, current_timestamp);
|
||||
INSERT IGNORE INTO `t_ds_relation_rule_input_entry`
|
||||
(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
|
||||
VALUES(135, 9, 17, NULL, 12, current_timestamp, current_timestamp);
|
||||
INSERT IGNORE INTO `t_ds_relation_rule_input_entry`
|
||||
(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
|
||||
VALUES(136, 9, 19, NULL, 13, current_timestamp, current_timestamp);
|
||||
INSERT IGNORE INTO `t_ds_relation_rule_input_entry`
|
||||
(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
|
||||
VALUES(139, 10, 1, NULL, 1, current_timestamp, current_timestamp);
|
||||
INSERT IGNORE INTO `t_ds_relation_rule_input_entry`
|
||||
(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
|
||||
VALUES(140, 10, 2, NULL, 2, current_timestamp, current_timestamp);
|
||||
INSERT IGNORE INTO `t_ds_relation_rule_input_entry`
|
||||
(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
|
||||
VALUES(141, 10, 3, NULL, 3, current_timestamp, current_timestamp);
|
||||
INSERT IGNORE INTO `t_ds_relation_rule_input_entry`
|
||||
(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
|
||||
VALUES(142, 10, 4, NULL, 4, current_timestamp, current_timestamp);
|
||||
INSERT IGNORE INTO `t_ds_relation_rule_input_entry`
|
||||
(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
|
||||
VALUES(143, 10, 6, '{"statistics_name":"table_count.total"}', 6, current_timestamp, current_timestamp);
|
||||
INSERT IGNORE INTO `t_ds_relation_rule_input_entry`
|
||||
(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
|
||||
VALUES(144, 10, 7, NULL, 7, current_timestamp, current_timestamp);
|
||||
INSERT IGNORE INTO `t_ds_relation_rule_input_entry`
|
||||
(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
|
||||
VALUES(145, 10, 8, NULL, 8, current_timestamp, current_timestamp);
|
||||
INSERT IGNORE INTO `t_ds_relation_rule_input_entry`
|
||||
(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
|
||||
VALUES(146, 10, 9, NULL, 9, current_timestamp, current_timestamp);
|
||||
INSERT IGNORE INTO `t_ds_relation_rule_input_entry`
|
||||
(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
|
||||
VALUES(147, 10, 10, NULL, 10, current_timestamp, current_timestamp);
|
||||
INSERT IGNORE INTO `t_ds_relation_rule_input_entry`
|
||||
(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
|
||||
VALUES(148, 10, 17, NULL, 11, current_timestamp, current_timestamp);
|
||||
INSERT IGNORE INTO `t_ds_relation_rule_input_entry`
|
||||
(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
|
||||
VALUES(149, 10, 19, NULL, 12, current_timestamp, current_timestamp);
|
||||
INSERT IGNORE INTO t_ds_relation_rule_input_entry
|
||||
(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
|
||||
VALUES(150, 8, 29, NULL, 7, current_timestamp, current_timestamp);
|
||||
|
||||
INSERT IGNORE INTO `t_ds_relation_rule_input_entry`
|
||||
(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
|
||||
VALUES(151, 1, 30, NULL, 2, current_timestamp, current_timestamp);
|
||||
INSERT IGNORE INTO `t_ds_relation_rule_input_entry`
|
||||
(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
|
||||
VALUES(152, 2, 30, NULL, 2, current_timestamp, current_timestamp);
|
||||
INSERT IGNORE INTO `t_ds_relation_rule_input_entry`
|
||||
(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
|
||||
VALUES(153, 3, 30, NULL, 2, current_timestamp, current_timestamp);
|
||||
INSERT IGNORE INTO `t_ds_relation_rule_input_entry`
|
||||
(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
|
||||
VALUES(154, 4, 30, NULL, 2, current_timestamp, current_timestamp);
|
||||
INSERT IGNORE INTO `t_ds_relation_rule_input_entry`
|
||||
(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
|
||||
VALUES(155, 5, 30, NULL, 2, current_timestamp, current_timestamp);
|
||||
INSERT IGNORE INTO `t_ds_relation_rule_input_entry`
|
||||
(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
|
||||
VALUES(156, 6, 30, NULL, 2, current_timestamp, current_timestamp);
|
||||
INSERT IGNORE INTO `t_ds_relation_rule_input_entry`
|
||||
(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
|
||||
VALUES(157, 7, 30, NULL, 2, current_timestamp, current_timestamp);
|
||||
INSERT IGNORE INTO `t_ds_relation_rule_input_entry`
|
||||
(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
|
||||
VALUES(158, 8, 30, NULL, 2, current_timestamp, current_timestamp);
|
||||
INSERT IGNORE INTO `t_ds_relation_rule_input_entry`
|
||||
(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
|
||||
VALUES(159, 9, 30, NULL, 2, current_timestamp, current_timestamp);
|
||||
INSERT IGNORE INTO `t_ds_relation_rule_input_entry`
|
||||
(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
|
||||
VALUES(160, 10, 30, NULL, 2, current_timestamp, current_timestamp);
|
||||
INSERT IGNORE INTO `t_ds_relation_rule_input_entry`
|
||||
(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
|
||||
VALUES(161, 3, 31, NULL, 6, current_timestamp, current_timestamp);
|
||||
INSERT IGNORE INTO `t_ds_relation_rule_input_entry`
|
||||
(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
|
||||
VALUES(162, 4, 31, NULL, 7, current_timestamp, current_timestamp);
|
||||
-- ----------------------------
|
||||
-- Table structure for t_ds_environment
|
||||
-- ----------------------------
|
||||
|
@ -1095,819 +1095,7 @@ CREATE TABLE t_ds_alert_plugin_instance (
|
||||
CONSTRAINT t_ds_alert_plugin_instance_pk PRIMARY KEY (id)
|
||||
);
|
||||
|
||||
--
|
||||
-- Table structure for table t_ds_dq_comparison_type
|
||||
--
|
||||
DROP TABLE IF EXISTS t_ds_dq_comparison_type;
|
||||
CREATE TABLE t_ds_dq_comparison_type (
|
||||
id serial NOT NULL,
|
||||
"type" varchar NOT NULL,
|
||||
execute_sql varchar NULL,
|
||||
output_table varchar NULL,
|
||||
"name" varchar NULL,
|
||||
create_time timestamp NULL,
|
||||
update_time timestamp NULL,
|
||||
is_inner_source bool NULL,
|
||||
CONSTRAINT t_ds_dq_comparison_type_pk PRIMARY KEY (id)
|
||||
);
|
||||
INSERT INTO t_ds_dq_comparison_type
|
||||
(id, "type", execute_sql, output_table, "name", create_time, update_time, is_inner_source)
|
||||
VALUES(1, 'FixValue', NULL, NULL, NULL, '2021-06-30 00:00:00.000', '2021-06-30 00:00:00.000', false);
|
||||
INSERT INTO t_ds_dq_comparison_type
|
||||
(id, "type", execute_sql, output_table, "name", create_time, update_time, is_inner_source)
|
||||
VALUES(2, 'DailyAvg', 'select round(avg(statistics_value),2) as day_avg from t_ds_dq_task_statistics_value where data_time >=date_trunc(''DAY'', ${data_time}) and data_time < date_add(date_trunc(''day'', ${data_time}),1) and unique_code = ${unique_code} and statistics_name = ''${statistics_name}''', 'day_range', 'day_range.day_avg', '2021-06-30 00:00:00.000', '2021-06-30 00:00:00.000', true);
|
||||
INSERT INTO t_ds_dq_comparison_type
|
||||
(id, "type", execute_sql, output_table, "name", create_time, update_time, is_inner_source)
|
||||
VALUES(3, 'WeeklyAvg', 'select round(avg(statistics_value),2) as week_avg from t_ds_dq_task_statistics_value where data_time >= date_trunc(''WEEK'', ${data_time}) and data_time <date_trunc(''day'', ${data_time}) and unique_code = ${unique_code} and statistics_name = ''${statistics_name}''', 'week_range', 'week_range.week_avg', '2021-06-30 00:00:00.000', '2021-06-30 00:00:00.000', true);
|
||||
INSERT INTO t_ds_dq_comparison_type
|
||||
(id, "type", execute_sql, output_table, "name", create_time, update_time, is_inner_source)
|
||||
VALUES(4, 'MonthlyAvg', 'select round(avg(statistics_value),2) as month_avg from t_ds_dq_task_statistics_value where data_time >= date_trunc(''MONTH'', ${data_time}) and data_time <date_trunc(''day'', ${data_time}) and unique_code = ${unique_code} and statistics_name = ''${statistics_name}''', 'month_range', 'month_range.month_avg', '2021-06-30 00:00:00.000', '2021-06-30 00:00:00.000', true);
|
||||
INSERT INTO t_ds_dq_comparison_type
|
||||
(id, "type", execute_sql, output_table, "name", create_time, update_time, is_inner_source)
|
||||
VALUES(5, 'Last7DayAvg', 'select round(avg(statistics_value),2) as last_7_avg from t_ds_dq_task_statistics_value where data_time >= date_add(date_trunc(''day'', ${data_time}),-7) and data_time <date_trunc(''day'', ${data_time}) and unique_code = ${unique_code} and statistics_name = ''${statistics_name}''', 'last_seven_days', 'last_seven_days.last_7_avg', '2021-06-30 00:00:00.000', '2021-06-30 00:00:00.000', true);
|
||||
INSERT INTO t_ds_dq_comparison_type
|
||||
(id, "type", execute_sql, output_table, "name", create_time, update_time, is_inner_source)
|
||||
VALUES(6, 'Last30DayAvg', 'select round(avg(statistics_value),2) as last_30_avg from t_ds_dq_task_statistics_value where data_time >= date_add(date_trunc(''day'', ${data_time}),-30) and data_time < date_trunc(''day'', ${data_time}) and unique_code = ${unique_code} and statistics_name = ''${statistics_name}''', 'last_thirty_days', 'last_thirty_days.last_30_avg', '2021-06-30 00:00:00.000', '2021-06-30 00:00:00.000', true);
|
||||
INSERT INTO t_ds_dq_comparison_type
|
||||
(id, "type", execute_sql, output_table, "name", create_time, update_time, is_inner_source)
|
||||
VALUES(7, 'SrcTableTotalRows', 'SELECT COUNT(*) AS total FROM ${src_table} WHERE (${src_filter})', 'total_count', 'total_count.total', '2021-06-30 00:00:00.000', '2021-06-30 00:00:00.000', false);
|
||||
INSERT INTO t_ds_dq_comparison_type
|
||||
(id, "type", execute_sql, output_table, "name", create_time, update_time, is_inner_source)
|
||||
VALUES(8, 'TargetTableTotalRows', 'SELECT COUNT(*) AS total FROM ${target_table} WHERE (${target_filter})', 'total_count', 'total_count.total', '2021-06-30 00:00:00.000', '2021-06-30 00:00:00.000', false);
|
||||
|
||||
--
|
||||
-- Table structure for table t_ds_dq_execute_result
|
||||
--
|
||||
DROP TABLE IF EXISTS t_ds_dq_execute_result;
|
||||
CREATE TABLE t_ds_dq_execute_result (
|
||||
id serial NOT NULL,
|
||||
workflow_definition_id int4 NULL,
|
||||
workflow_instance_id int4 NULL,
|
||||
task_instance_id int4 NULL,
|
||||
rule_type int4 NULL,
|
||||
rule_name varchar(255) DEFAULT NULL,
|
||||
statistics_value float8 NULL,
|
||||
comparison_value float8 NULL,
|
||||
check_type int4 NULL,
|
||||
threshold float8 NULL,
|
||||
"operator" int4 NULL,
|
||||
failure_strategy int4 NULL,
|
||||
state int4 NULL,
|
||||
user_id int4 NULL,
|
||||
create_time timestamp NULL,
|
||||
update_time timestamp NULL,
|
||||
comparison_type int4 NULL,
|
||||
error_output_path text NULL,
|
||||
CONSTRAINT t_ds_dq_execute_result_pk PRIMARY KEY (id)
|
||||
);
|
||||
|
||||
--
|
||||
-- Table structure for table t_ds_dq_rule
|
||||
--
|
||||
DROP TABLE IF EXISTS t_ds_dq_rule;
|
||||
CREATE TABLE t_ds_dq_rule (
|
||||
id serial NOT NULL,
|
||||
"name" varchar(255) DEFAULT NULL,
|
||||
"type" int4 NULL,
|
||||
user_id int4 NULL,
|
||||
create_time timestamp NULL,
|
||||
update_time timestamp NULL,
|
||||
CONSTRAINT t_ds_dq_rule_pk PRIMARY KEY (id)
|
||||
);
|
||||
INSERT INTO t_ds_dq_rule
|
||||
(id, "name", "type", user_id, create_time, update_time)
|
||||
VALUES(1, '$t(null_check)', 0, 1, '2020-01-12 00:00:00.000', '2020-01-12 00:00:00.000');
|
||||
INSERT INTO t_ds_dq_rule
|
||||
(id, "name", "type", user_id, create_time, update_time)
|
||||
VALUES(2, '$t(custom_sql)', 1, 1, '2020-01-12 00:00:00.000', '2020-01-12 00:00:00.000');
|
||||
INSERT INTO t_ds_dq_rule
|
||||
(id, "name", "type", user_id, create_time, update_time)
|
||||
VALUES(3, '$t(multi_table_accuracy)', 2, 1, '2020-01-12 00:00:00.000', '2020-01-12 00:00:00.000');
|
||||
INSERT INTO t_ds_dq_rule
|
||||
(id, "name", "type", user_id, create_time, update_time)
|
||||
VALUES(4, '$t(multi_table_value_comparison)', 3, 1, '2020-01-12 00:00:00.000', '2020-01-12 00:00:00.000');
|
||||
INSERT INTO t_ds_dq_rule
|
||||
(id, "name", "type", user_id, create_time, update_time)
|
||||
VALUES(5, '$t(field_length_check)', 0, 1, '2020-01-12 00:00:00.000', '2020-01-12 00:00:00.000');
|
||||
INSERT INTO t_ds_dq_rule
|
||||
(id, "name", "type", user_id, create_time, update_time)
|
||||
VALUES(6, '$t(uniqueness_check)', 0, 1, '2020-01-12 00:00:00.000', '2020-01-12 00:00:00.000');
|
||||
INSERT INTO t_ds_dq_rule
|
||||
(id, "name", "type", user_id, create_time, update_time)
|
||||
VALUES(7, '$t(regexp_check)', 0, 1, '2020-01-12 00:00:00.000', '2020-01-12 00:00:00.000');
|
||||
INSERT INTO t_ds_dq_rule
|
||||
(id, "name", "type", user_id, create_time, update_time)
|
||||
VALUES(8, '$t(timeliness_check)', 0, 1, '2020-01-12 00:00:00.000', '2020-01-12 00:00:00.000');
|
||||
INSERT INTO t_ds_dq_rule
|
||||
(id, "name", "type", user_id, create_time, update_time)
|
||||
VALUES(9, '$t(enumeration_check)', 0, 1, '2020-01-12 00:00:00.000', '2020-01-12 00:00:00.000');
|
||||
INSERT INTO t_ds_dq_rule
|
||||
(id, "name", "type", user_id, create_time, update_time)
|
||||
VALUES(10, '$t(table_count_check)', 0, 1, '2020-01-12 00:00:00.000', '2020-01-12 00:00:00.000');
|
||||
|
||||
--
|
||||
-- Table structure for table t_ds_dq_rule_execute_sql
|
||||
--
|
||||
DROP TABLE IF EXISTS t_ds_dq_rule_execute_sql;
|
||||
CREATE TABLE t_ds_dq_rule_execute_sql (
|
||||
id serial NOT NULL,
|
||||
"index" int4 NULL,
|
||||
"sql" text NULL,
|
||||
table_alias varchar(255) DEFAULT NULL,
|
||||
"type" int4 NULL,
|
||||
create_time timestamp NULL,
|
||||
update_time timestamp NULL,
|
||||
is_error_output_sql bool NULL,
|
||||
CONSTRAINT t_ds_dq_rule_execute_sql_pk PRIMARY KEY (id)
|
||||
);
|
||||
INSERT INTO t_ds_dq_rule_execute_sql
|
||||
(id, "index", "sql", table_alias, "type", is_error_output_sql, create_time, update_time)
|
||||
VALUES(1, 1, 'SELECT COUNT(*) AS nulls FROM null_items', 'null_count', 1, false, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO t_ds_dq_rule_execute_sql
|
||||
(id, "index", "sql", table_alias, "type", is_error_output_sql, create_time, update_time)
|
||||
VALUES(2, 1, 'SELECT COUNT(*) AS total FROM ${src_table} WHERE (${src_filter})', 'total_count', 2, false, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO t_ds_dq_rule_execute_sql
|
||||
(id, "index", "sql", table_alias, "type", is_error_output_sql, create_time, update_time)
|
||||
VALUES(3, 1, 'SELECT COUNT(*) AS miss from miss_items', 'miss_count', 1, false, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO t_ds_dq_rule_execute_sql
|
||||
(id, "index", "sql", table_alias, "type", is_error_output_sql, create_time, update_time)
|
||||
VALUES(4, 1, 'SELECT COUNT(*) AS valids FROM invalid_length_items', 'invalid_length_count', 1, false, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO t_ds_dq_rule_execute_sql
|
||||
(id, "index", "sql", table_alias, "type", is_error_output_sql, create_time, update_time)
|
||||
VALUES(5, 1, 'SELECT COUNT(*) AS total FROM ${target_table} WHERE (${target_filter})', 'total_count', 2, false, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO t_ds_dq_rule_execute_sql
|
||||
(id, "index", "sql", table_alias, "type", is_error_output_sql, create_time, update_time)
|
||||
VALUES(6, 1, 'SELECT ${src_field} FROM ${src_table} group by ${src_field} having count(*) > 1', 'duplicate_items', 0, true, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO t_ds_dq_rule_execute_sql
|
||||
(id, "index", "sql", table_alias, "type", is_error_output_sql, create_time, update_time)
|
||||
VALUES(7, 1, 'SELECT COUNT(*) AS duplicates FROM duplicate_items', 'duplicate_count', 1, false, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO t_ds_dq_rule_execute_sql
|
||||
(id, "index", "sql", table_alias, "type", is_error_output_sql, create_time, update_time)
|
||||
VALUES(8, 1, 'SELECT ${src_table}.* FROM (SELECT * FROM ${src_table} WHERE (${src_filter})) ${src_table} LEFT JOIN (SELECT * FROM ${target_table} WHERE (${target_filter})) ${target_table} ON ${on_clause} WHERE ${where_clause}', 'miss_items', 0, true, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO t_ds_dq_rule_execute_sql
|
||||
(id, "index", "sql", table_alias, "type", is_error_output_sql, create_time, update_time)
|
||||
VALUES(9, 1, 'SELECT * FROM ${src_table} WHERE (${src_field} not regexp ''${regexp_pattern}'') AND (${src_filter}) ', 'regexp_items', 0, true, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO t_ds_dq_rule_execute_sql
|
||||
(id, "index", "sql", table_alias, "type", is_error_output_sql, create_time, update_time)
|
||||
VALUES(10, 1, 'SELECT COUNT(*) AS regexps FROM regexp_items', 'regexp_count', 1, false, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO t_ds_dq_rule_execute_sql
|
||||
(id, "index", "sql", table_alias, "type", is_error_output_sql, create_time, update_time)
|
||||
VALUES(11, 1, 'SELECT * FROM ${src_table} WHERE (to_unix_timestamp(${src_field}, ''${datetime_format}'')-to_unix_timestamp(''${deadline}'', ''${datetime_format}'') <= 0) AND (to_unix_timestamp(${src_field}, ''${datetime_format}'')-to_unix_timestamp(''${begin_time}'', ''${datetime_format}'') >= 0) AND (${src_filter}) ', 'timeliness_items', 0, true, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO t_ds_dq_rule_execute_sql
|
||||
(id, "index", "sql", table_alias, "type", is_error_output_sql, create_time, update_time)
|
||||
VALUES(12, 1, 'SELECT COUNT(*) AS timeliness FROM timeliness_items', 'timeliness_count', 1, false, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO t_ds_dq_rule_execute_sql
|
||||
(id, "index", "sql", table_alias, "type", is_error_output_sql, create_time, update_time)
|
||||
VALUES(13, 1, 'SELECT * FROM ${src_table} where (${src_field} not in ( ${enum_list} ) or ${src_field} is null) AND (${src_filter}) ', 'enum_items', 0, true, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO t_ds_dq_rule_execute_sql
|
||||
(id, "index", "sql", table_alias, "type", is_error_output_sql, create_time, update_time)
|
||||
VALUES(14, 1, 'SELECT COUNT(*) AS enums FROM enum_items', 'enum_count', 1, false, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO t_ds_dq_rule_execute_sql
|
||||
(id, "index", "sql", table_alias, "type", is_error_output_sql, create_time, update_time)
|
||||
VALUES(15, 1, 'SELECT COUNT(*) AS total FROM ${src_table} WHERE (${src_filter})', 'table_count', 1, false, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO t_ds_dq_rule_execute_sql
|
||||
(id, "index", "sql", table_alias, "type", is_error_output_sql, create_time, update_time)
|
||||
VALUES(16, 1, 'SELECT * FROM ${src_table} WHERE (${src_field} is null or ${src_field} = '''') AND (${src_filter})', 'null_items', 0, true, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO t_ds_dq_rule_execute_sql
|
||||
(id, "index", "sql", table_alias, "type", is_error_output_sql, create_time, update_time)
|
||||
VALUES(17, 1, 'SELECT * FROM ${src_table} WHERE (length(${src_field}) ${logic_operator} ${field_length}) AND (${src_filter})', 'invalid_length_items', 0, true, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
|
||||
--
|
||||
-- Table structure for table t_ds_dq_rule_input_entry
|
||||
--
|
||||
DROP TABLE IF EXISTS t_ds_dq_rule_input_entry;
|
||||
CREATE TABLE t_ds_dq_rule_input_entry (
|
||||
id serial NOT NULL,
|
||||
field varchar(255) DEFAULT NULL,
|
||||
"type" varchar(255) DEFAULT NULL,
|
||||
title varchar(255) DEFAULT NULL,
|
||||
data varchar(255) DEFAULT NULL,
|
||||
"options" text DEFAULT NULL,
|
||||
placeholder varchar(255) DEFAULT NULL,
|
||||
option_source_type int4 NULL,
|
||||
data_type int4 NULL,
|
||||
input_type int4 NULL,
|
||||
is_show int2 NULL DEFAULT '1'::smallint,
|
||||
can_edit int2 NULL DEFAULT '1'::smallint,
|
||||
is_emit int2 NULL DEFAULT '0'::smallint,
|
||||
is_validate int2 NULL DEFAULT '0'::smallint,
|
||||
create_time timestamp NULL,
|
||||
update_time timestamp NULL,
|
||||
CONSTRAINT t_ds_dq_rule_input_entry_pk PRIMARY KEY (id)
|
||||
);
|
||||
INSERT INTO t_ds_dq_rule_input_entry
|
||||
(id, field, "type", title, data, "options", placeholder, option_source_type, data_type, input_type, is_show, can_edit, is_emit, is_validate, create_time, update_time)
|
||||
VALUES(1, 'src_connector_type', 'select', '$t(src_connector_type)', '', '[{"label":"HIVE","value":"HIVE"},{"label":"JDBC","value":"JDBC"}]', 'please select source connector type', 2, 2, 0, 1, 1, 1, 0, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO t_ds_dq_rule_input_entry
|
||||
(id, field, "type", title, data, "options", placeholder, option_source_type, data_type, input_type, is_show, can_edit, is_emit, is_validate, create_time, update_time)
|
||||
VALUES(2, 'src_datasource_id', 'select', '$t(src_datasource_id)', '', NULL, 'please select source datasource id', 1, 2, 0, 1, 1, 1, 0, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO t_ds_dq_rule_input_entry
|
||||
(id, field, "type", title, data, "options", placeholder, option_source_type, data_type, input_type, is_show, can_edit, is_emit, is_validate, create_time, update_time)
|
||||
VALUES(3, 'src_table', 'select', '$t(src_table)', NULL, NULL, 'Please enter source table name', 0, 0, 0, 1, 1, 1, 1, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO t_ds_dq_rule_input_entry
|
||||
(id, field, "type", title, data, "options", placeholder, option_source_type, data_type, input_type, is_show, can_edit, is_emit, is_validate, create_time, update_time)
|
||||
VALUES(4, 'src_filter', 'input', '$t(src_filter)', NULL, NULL, 'Please enter filter expression', 0, 3, 0, 1, 1, 0, 0, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO t_ds_dq_rule_input_entry
|
||||
(id, field, "type", title, data, "options", placeholder, option_source_type, data_type, input_type, is_show, can_edit, is_emit, is_validate, create_time, update_time)
|
||||
VALUES(5, 'src_field', 'select', '$t(src_field)', NULL, NULL, 'Please enter column, only single column is supported', 0, 0, 0, 1, 1, 0, 1, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO t_ds_dq_rule_input_entry
|
||||
(id, field, "type", title, data, "options", placeholder, option_source_type, data_type, input_type, is_show, can_edit, is_emit, is_validate, create_time, update_time)
|
||||
VALUES(6, 'statistics_name', 'input', '$t(statistics_name)', NULL, NULL, 'Please enter statistics name, the alias in statistics execute sql', 0, 0, 1, 0, 0, 0, 1, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO t_ds_dq_rule_input_entry
|
||||
(id, field, "type", title, data, "options", placeholder, option_source_type, data_type, input_type, is_show, can_edit, is_emit, is_validate, create_time, update_time)
|
||||
VALUES(7, 'check_type', 'select', '$t(check_type)', '0', '[{"label":"Expected - Actual","value":"0"},{"label":"Actual - Expected","value":"1"},{"label":"Actual / Expected","value":"2"},{"label":"(Expected - Actual) / Expected","value":"3"}]', 'please select check type', 0, 0, 3, 1, 1, 1, 0, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO t_ds_dq_rule_input_entry
|
||||
(id, field, "type", title, data, "options", placeholder, option_source_type, data_type, input_type, is_show, can_edit, is_emit, is_validate, create_time, update_time)
|
||||
VALUES(8, 'operator', 'select', '$t(operator)', '0', '[{"label":"=","value":"0"},{"label":"<","value":"1"},{"label":"<=","value":"2"},{"label":">","value":"3"},{"label":">=","value":"4"},{"label":"!=","value":"5"}]', 'please select operator', 0, 0, 3, 1, 1, 0, 0, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO t_ds_dq_rule_input_entry
|
||||
(id, field, "type", title, data, "options", placeholder, option_source_type, data_type, input_type, is_show, can_edit, is_emit, is_validate, create_time, update_time)
|
||||
VALUES(9, 'threshold', 'input', '$t(threshold)', NULL, NULL, 'Please enter threshold, number is needed', 0, 2, 3, 1, 1, 0, 1, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO t_ds_dq_rule_input_entry
|
||||
(id, field, "type", title, data, "options", placeholder, option_source_type, data_type, input_type, is_show, can_edit, is_emit, is_validate, create_time, update_time)
|
||||
VALUES(10, 'failure_strategy', 'select', '$t(failure_strategy)', '0', '[{"label":"Alert","value":"0"},{"label":"Block","value":"1"}]', 'please select failure strategy', 0, 0, 3, 1, 1, 0, 0, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO t_ds_dq_rule_input_entry
|
||||
(id, field, "type", title, data, "options", placeholder, option_source_type, data_type, input_type, is_show, can_edit, is_emit, is_validate, create_time, update_time)
|
||||
VALUES(11, 'target_connector_type', 'select', '$t(target_connector_type)', '', '[{"label":"HIVE","value":"HIVE"},{"label":"JDBC","value":"JDBC"}]', 'Please select target connector type', 2, 0, 0, 1, 1, 1, 0, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO t_ds_dq_rule_input_entry
|
||||
(id, field, "type", title, data, "options", placeholder, option_source_type, data_type, input_type, is_show, can_edit, is_emit, is_validate, create_time, update_time)
|
||||
VALUES(12, 'target_datasource_id', 'select', '$t(target_datasource_id)', '', NULL, 'Please select target datasource', 1, 2, 0, 1, 1, 1, 0, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO t_ds_dq_rule_input_entry
|
||||
(id, field, "type", title, data, "options", placeholder, option_source_type, data_type, input_type, is_show, can_edit, is_emit, is_validate, create_time, update_time)
|
||||
VALUES(13, 'target_table', 'select', '$t(target_table)', NULL, NULL, 'Please enter target table', 0, 0, 0, 1, 1, 1, 1, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO t_ds_dq_rule_input_entry
|
||||
(id, field, "type", title, data, "options", placeholder, option_source_type, data_type, input_type, is_show, can_edit, is_emit, is_validate, create_time, update_time)
|
||||
VALUES(14, 'target_filter', 'input', '$t(target_filter)', NULL, NULL, 'Please enter target filter expression', 0, 3, 0, 1, 1, 0, 0, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO t_ds_dq_rule_input_entry
|
||||
(id, field, "type", title, data, "options", placeholder, option_source_type, data_type, input_type, is_show, can_edit, is_emit, is_validate, create_time, update_time)
|
||||
VALUES(15, 'mapping_columns', 'group', '$t(mapping_columns)', NULL, '[{"field":"src_field","props":{"placeholder":"Please input src field","rows":0,"disabled":false,"size":"small"},"type":"input","title":"src_field"},{"field":"operator","props":{"placeholder":"Please input operator","rows":0,"disabled":false,"size":"small"},"type":"input","title":"operator"},{"field":"target_field","props":{"placeholder":"Please input target field","rows":0,"disabled":false,"size":"small"},"type":"input","title":"target_field"}]', 'please enter mapping columns', 0, 0, 0, 1, 1, 0, 0, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO t_ds_dq_rule_input_entry
|
||||
(id, field, "type", title, data, "options", placeholder, option_source_type, data_type, input_type, is_show, can_edit, is_emit, is_validate, create_time, update_time)
|
||||
VALUES(16, 'statistics_execute_sql', 'textarea', '$t(statistics_execute_sql)', NULL, NULL, 'Please enter statistics execute sql', 0, 3, 0, 1, 1, 0, 1, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO t_ds_dq_rule_input_entry
|
||||
(id, field, "type", title, data, "options", placeholder, option_source_type, data_type, input_type, is_show, can_edit, is_emit, is_validate, create_time, update_time)
|
||||
VALUES(17, 'comparison_name', 'input', '$t(comparison_name)', NULL, NULL, 'Please enter comparison name, the alias in comparison execute sql', 0, 0, 0, 0, 0, 0, 1, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO t_ds_dq_rule_input_entry
|
||||
(id, field, "type", title, data, "options", placeholder, option_source_type, data_type, input_type, is_show, can_edit, is_emit, is_validate, create_time, update_time)
|
||||
VALUES(18, 'comparison_execute_sql', 'textarea', '$t(comparison_execute_sql)', NULL, NULL, 'Please enter comparison execute sql', 0, 3, 0, 1, 1, 0, 1, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO t_ds_dq_rule_input_entry
|
||||
(id, field, "type", title, data, "options", placeholder, option_source_type, data_type, input_type, is_show, can_edit, is_emit, is_validate, create_time, update_time)
|
||||
VALUES(19, 'comparison_type', 'select', '$t(comparison_type)', '', NULL, 'Please enter comparison title', 3, 0, 2, 1, 0, 1, 0, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO t_ds_dq_rule_input_entry
|
||||
(id, field, "type", title, data, "options", placeholder, option_source_type, data_type, input_type, is_show, can_edit, is_emit, is_validate, create_time, update_time)
|
||||
VALUES(20, 'writer_connector_type', 'select', '$t(writer_connector_type)', '', '[{"label":"MYSQL","value":"0"},{"label":"POSTGRESQL","value":"1"}]', 'please select writer connector type', 0, 2, 0, 1, 1, 1, 0, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO t_ds_dq_rule_input_entry
|
||||
(id, field, "type", title, data, "options", placeholder, option_source_type, data_type, input_type, is_show, can_edit, is_emit, is_validate, create_time, update_time)
|
||||
VALUES(21, 'writer_datasource_id', 'select', '$t(writer_datasource_id)', '', NULL, 'please select writer datasource id', 1, 2, 0, 1, 1, 0, 0, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO t_ds_dq_rule_input_entry
|
||||
(id, field, "type", title, data, "options", placeholder, option_source_type, data_type, input_type, is_show, can_edit, is_emit, is_validate, create_time, update_time)
|
||||
VALUES(22, 'target_field', 'select', '$t(target_field)', NULL, NULL, 'Please enter column, only single column is supported', 0, 0, 0, 1, 1, 0, 0, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO t_ds_dq_rule_input_entry
|
||||
(id, field, "type", title, data, "options", placeholder, option_source_type, data_type, input_type, is_show, can_edit, is_emit, is_validate, create_time, update_time)
|
||||
VALUES(23, 'field_length', 'input', '$t(field_length)', NULL, NULL, 'Please enter length limit', 0, 3, 0, 1, 1, 0, 0, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO t_ds_dq_rule_input_entry
|
||||
(id, field, "type", title, data, "options", placeholder, option_source_type, data_type, input_type, is_show, can_edit, is_emit, is_validate, create_time, update_time)
|
||||
VALUES(24, 'logic_operator', 'select', '$t(logic_operator)', '=', '[{"label":"=","value":"="},{"label":"<","value":"<"},{"label":"<=","value":"<="},{"label":">","value":">"},{"label":">=","value":">="},{"label":"<>","value":"<>"}]', 'please select logic operator', 0, 0, 3, 1, 1, 0, 0, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO t_ds_dq_rule_input_entry
|
||||
(id, field, "type", title, data, "options", placeholder, option_source_type, data_type, input_type, is_show, can_edit, is_emit, is_validate, create_time, update_time)
|
||||
VALUES(25, 'regexp_pattern', 'input', '$t(regexp_pattern)', NULL, NULL, 'Please enter regexp pattern', 0, 0, 0, 1, 1, 0, 0, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO t_ds_dq_rule_input_entry
|
||||
(id, field, "type", title, data, "options", placeholder, option_source_type, data_type, input_type, is_show, can_edit, is_emit, is_validate, create_time, update_time)
|
||||
VALUES(26, 'deadline', 'input', '$t(deadline)', NULL, NULL, 'Please enter deadline', 0, 0, 0, 1, 1, 0, 0, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO t_ds_dq_rule_input_entry
|
||||
(id, field, "type", title, data, "options", placeholder, option_source_type, data_type, input_type, is_show, can_edit, is_emit, is_validate, create_time, update_time)
|
||||
VALUES(27, 'datetime_format', 'input', '$t(datetime_format)', NULL, NULL, 'Please enter datetime format', 0, 0, 0, 1, 1, 0, 0, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO t_ds_dq_rule_input_entry
|
||||
(id, field, "type", title, data, "options", placeholder, option_source_type, data_type, input_type, is_show, can_edit, is_emit, is_validate, create_time, update_time)
|
||||
VALUES(28, 'enum_list', 'input', '$t(enum_list)', NULL, NULL, 'Please enter enumeration', 0, 0, 0, 1, 1, 0, 0, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO t_ds_dq_rule_input_entry
|
||||
(id, field, "type", title, data, "options", placeholder, option_source_type, data_type, input_type, is_show, can_edit, is_emit, is_validate, create_time, update_time)
|
||||
VALUES(29, 'begin_time', 'input', '$t(begin_time)', NULL, NULL, 'Please enter begin time', 0, 0, 0, 1, 1, 0, 0, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO t_ds_dq_rule_input_entry
|
||||
(id, field, "type", title, data, "options", placeholder, option_source_type, data_type, input_type, is_show, can_edit, is_emit, is_validate, create_time, update_time)
|
||||
VALUES(30, 'src_database', 'select', '$t(src_database)', NULL, NULL, 'Please select source database', 0, 0, 0, 1, 1, 1, 1, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO t_ds_dq_rule_input_entry
|
||||
(id, field, "type", title, data, "options", placeholder, option_source_type, data_type, input_type, is_show, can_edit, is_emit, is_validate, create_time, update_time)
|
||||
VALUES(31, 'target_database', 'select', '$t(target_database)', NULL, NULL, 'Please select target database', 0, 0, 0, 1, 1, 1, 1, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
|
||||
--
|
||||
-- Table structure for table t_ds_dq_task_statistics_value
|
||||
--
|
||||
DROP TABLE IF EXISTS t_ds_dq_task_statistics_value;
|
||||
CREATE TABLE t_ds_dq_task_statistics_value (
|
||||
id serial NOT NULL,
|
||||
workflow_definition_id int4 NOT NULL,
|
||||
task_instance_id int4 NULL,
|
||||
rule_id int4 NOT NULL,
|
||||
unique_code varchar NOT NULL,
|
||||
statistics_name varchar NULL,
|
||||
statistics_value float8 NULL,
|
||||
data_time timestamp(0) NULL,
|
||||
create_time timestamp(0) NULL,
|
||||
update_time timestamp(0) NULL,
|
||||
CONSTRAINT t_ds_dq_task_statistics_value_pk PRIMARY KEY (id)
|
||||
);
|
||||
|
||||
--
|
||||
-- Table structure for table t_ds_relation_rule_execute_sql
|
||||
--
|
||||
DROP TABLE IF EXISTS t_ds_relation_rule_execute_sql;
|
||||
CREATE TABLE t_ds_relation_rule_execute_sql (
|
||||
id serial NOT NULL,
|
||||
rule_id int4 NULL,
|
||||
execute_sql_id int4 NULL,
|
||||
create_time timestamp NULL,
|
||||
update_time timestamp NULL,
|
||||
CONSTRAINT t_ds_relation_rule_execute_sql_pk PRIMARY KEY (id)
|
||||
);
|
||||
INSERT INTO t_ds_relation_rule_execute_sql
|
||||
(id, rule_id, execute_sql_id, create_time, update_time)
|
||||
VALUES(1, 1, 1, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO t_ds_relation_rule_execute_sql
|
||||
(id, rule_id, execute_sql_id, create_time, update_time)
|
||||
VALUES(3, 5, 4, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO t_ds_relation_rule_execute_sql
|
||||
(id, rule_id, execute_sql_id, create_time, update_time)
|
||||
VALUES(2, 3, 3, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO t_ds_relation_rule_execute_sql
|
||||
(id, rule_id, execute_sql_id, create_time, update_time)
|
||||
VALUES(4, 3, 8, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO t_ds_relation_rule_execute_sql
|
||||
(id, rule_id, execute_sql_id, create_time, update_time)
|
||||
VALUES(5, 6, 6, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO t_ds_relation_rule_execute_sql
|
||||
(id, rule_id, execute_sql_id, create_time, update_time)
|
||||
VALUES(6, 6, 7, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO t_ds_relation_rule_execute_sql
|
||||
(id, rule_id, execute_sql_id, create_time, update_time)
|
||||
VALUES(7, 7, 9, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO t_ds_relation_rule_execute_sql
|
||||
(id, rule_id, execute_sql_id, create_time, update_time)
|
||||
VALUES(8, 7, 10, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO t_ds_relation_rule_execute_sql
|
||||
(id, rule_id, execute_sql_id, create_time, update_time)
|
||||
VALUES(9, 8, 11, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO t_ds_relation_rule_execute_sql
|
||||
(id, rule_id, execute_sql_id, create_time, update_time)
|
||||
VALUES(10, 8, 12, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO t_ds_relation_rule_execute_sql
|
||||
(id, rule_id, execute_sql_id, create_time, update_time)
|
||||
VALUES(11, 9, 13, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO t_ds_relation_rule_execute_sql
|
||||
(id, rule_id, execute_sql_id, create_time, update_time)
|
||||
VALUES(12, 9, 14, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO t_ds_relation_rule_execute_sql
|
||||
(id, rule_id, execute_sql_id, create_time, update_time)
|
||||
VALUES(13, 10, 15, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO t_ds_relation_rule_execute_sql
|
||||
(id, rule_id, execute_sql_id, create_time, update_time)
|
||||
VALUES(14, 1, 16, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO t_ds_relation_rule_execute_sql
|
||||
(id, rule_id, execute_sql_id, create_time, update_time)
|
||||
VALUES(15, 5, 17, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
|
||||
--
|
||||
-- Table structure for table t_ds_relation_rule_input_entry
|
||||
--
|
||||
DROP TABLE IF EXISTS t_ds_relation_rule_input_entry;
|
||||
CREATE TABLE t_ds_relation_rule_input_entry (
|
||||
id serial NOT NULL,
|
||||
rule_id int4 NULL,
|
||||
rule_input_entry_id int4 NULL,
|
||||
values_map text NULL,
|
||||
"index" int4 NULL,
|
||||
create_time timestamp NULL,
|
||||
update_time timestamp NULL,
|
||||
CONSTRAINT t_ds_relation_rule_input_entry_pk PRIMARY KEY (id)
|
||||
);
|
||||
INSERT INTO t_ds_relation_rule_input_entry
|
||||
(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
|
||||
VALUES(1, 1, 1, NULL, 1, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO t_ds_relation_rule_input_entry
|
||||
(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
|
||||
VALUES(2, 1, 2, NULL, 2, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO t_ds_relation_rule_input_entry
|
||||
(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
|
||||
VALUES(3, 1, 3, NULL, 3, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO t_ds_relation_rule_input_entry
|
||||
(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
|
||||
VALUES(4, 1, 4, NULL, 4, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO t_ds_relation_rule_input_entry
|
||||
(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
|
||||
VALUES(5, 1, 5, NULL, 5, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO t_ds_relation_rule_input_entry
|
||||
(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
|
||||
VALUES(6, 1, 6, '{"statistics_name":"null_count.nulls"}', 6, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO t_ds_relation_rule_input_entry
|
||||
(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
|
||||
VALUES(7, 1, 7, NULL, 7, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO t_ds_relation_rule_input_entry
|
||||
(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
|
||||
VALUES(8, 1, 8, NULL, 8, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO t_ds_relation_rule_input_entry
|
||||
(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
|
||||
VALUES(9, 1, 9, NULL, 9, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO t_ds_relation_rule_input_entry
|
||||
(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
|
||||
VALUES(10, 1, 10, NULL, 10, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO t_ds_relation_rule_input_entry
|
||||
(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
|
||||
VALUES(11, 1, 17, '', 11, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO t_ds_relation_rule_input_entry
|
||||
(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
|
||||
VALUES(12, 1, 19, NULL, 12, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO t_ds_relation_rule_input_entry
|
||||
(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
|
||||
VALUES(13, 2, 1, NULL, 1, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO t_ds_relation_rule_input_entry
|
||||
(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
|
||||
VALUES(14, 2, 2, NULL, 2, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO t_ds_relation_rule_input_entry
|
||||
(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
|
||||
VALUES(15, 2, 3, NULL, 3, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO t_ds_relation_rule_input_entry
|
||||
(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
|
||||
VALUES(16, 2, 6, '{"is_show":"true","can_edit":"true"}', 4, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO t_ds_relation_rule_input_entry
|
||||
(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
|
||||
VALUES(17, 2, 16, NULL, 5, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO t_ds_relation_rule_input_entry
|
||||
(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
|
||||
VALUES(18, 2, 4, NULL, 6, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO t_ds_relation_rule_input_entry
|
||||
(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
|
||||
VALUES(19, 2, 7, NULL, 7, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO t_ds_relation_rule_input_entry
|
||||
(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
|
||||
VALUES(20, 2, 8, NULL, 8, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO t_ds_relation_rule_input_entry
|
||||
(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
|
||||
VALUES(21, 2, 9, NULL, 9, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO t_ds_relation_rule_input_entry
|
||||
(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
|
||||
VALUES(22, 2, 10, NULL, 10, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO t_ds_relation_rule_input_entry
|
||||
(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
|
||||
VALUES(24, 2, 19, NULL, 12, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO t_ds_relation_rule_input_entry
|
||||
(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
|
||||
VALUES(25, 3, 1, NULL, 1, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO t_ds_relation_rule_input_entry
|
||||
(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
|
||||
VALUES(26, 3, 2, NULL, 2, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO t_ds_relation_rule_input_entry
|
||||
(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
|
||||
VALUES(27, 3, 3, NULL, 3, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO t_ds_relation_rule_input_entry
|
||||
(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
|
||||
VALUES(28, 3, 4, NULL, 4, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO t_ds_relation_rule_input_entry
|
||||
(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
|
||||
VALUES(29, 3, 11, NULL, 5, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO t_ds_relation_rule_input_entry
|
||||
(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
|
||||
VALUES(30, 3, 12, NULL, 6, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO t_ds_relation_rule_input_entry
|
||||
(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
|
||||
VALUES(31, 3, 13, NULL, 7, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO t_ds_relation_rule_input_entry
|
||||
(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
|
||||
VALUES(32, 3, 14, NULL, 8, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO t_ds_relation_rule_input_entry
|
||||
(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
|
||||
VALUES(33, 3, 15, NULL, 9, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO t_ds_relation_rule_input_entry
|
||||
(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
|
||||
VALUES(34, 3, 7, NULL, 10, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO t_ds_relation_rule_input_entry
|
||||
(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
|
||||
VALUES(35, 3, 8, NULL, 11, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO t_ds_relation_rule_input_entry
|
||||
(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
|
||||
VALUES(36, 3, 9, NULL, 12, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO t_ds_relation_rule_input_entry
|
||||
(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
|
||||
VALUES(37, 3, 10, NULL, 13, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO t_ds_relation_rule_input_entry
|
||||
(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
|
||||
VALUES(38, 3, 17, '{"comparison_name":"total_count.total"}', 14, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO t_ds_relation_rule_input_entry
|
||||
(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
|
||||
VALUES(39, 3, 19, NULL, 15, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO t_ds_relation_rule_input_entry
|
||||
(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
|
||||
VALUES(40, 4, 1, NULL, 1, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO t_ds_relation_rule_input_entry
|
||||
(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
|
||||
VALUES(41, 4, 2, NULL, 2, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO t_ds_relation_rule_input_entry
|
||||
(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
|
||||
VALUES(42, 4, 3, NULL, 3, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO t_ds_relation_rule_input_entry
|
||||
(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
|
||||
VALUES(43, 4, 6, '{"is_show":"true","can_edit":"true"}', 4, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO t_ds_relation_rule_input_entry
|
||||
(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
|
||||
VALUES(44, 4, 16, NULL, 5, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO t_ds_relation_rule_input_entry
|
||||
(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
|
||||
VALUES(45, 4, 11, NULL, 6, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO t_ds_relation_rule_input_entry
|
||||
(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
|
||||
VALUES(46, 4, 12, NULL, 7, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO t_ds_relation_rule_input_entry
|
||||
(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
|
||||
VALUES(47, 4, 13, NULL, 8, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO t_ds_relation_rule_input_entry
|
||||
(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
|
||||
VALUES(48, 4, 17, '{"is_show":"true","can_edit":"true"}', 9, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO t_ds_relation_rule_input_entry
|
||||
(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
|
||||
VALUES(49, 4, 18, NULL, 10, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO t_ds_relation_rule_input_entry
|
||||
(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
|
||||
VALUES(50, 4, 7, NULL, 11, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO t_ds_relation_rule_input_entry
|
||||
(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
|
||||
VALUES(51, 4, 8, NULL, 12, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO t_ds_relation_rule_input_entry
|
||||
(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
|
||||
VALUES(52, 4, 9, NULL, 13, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO t_ds_relation_rule_input_entry
|
||||
(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
|
||||
VALUES(53, 4, 10, NULL, 14, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO t_ds_relation_rule_input_entry
|
||||
(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
|
||||
VALUES(62, 3, 6, '{"statistics_name":"miss_count.miss"}', 18, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO t_ds_relation_rule_input_entry
|
||||
(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
|
||||
VALUES(63, 5, 1, NULL, 1, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO t_ds_relation_rule_input_entry
|
||||
(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
|
||||
VALUES(64, 5, 2, NULL, 2, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO t_ds_relation_rule_input_entry
|
||||
(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
|
||||
VALUES(65, 5, 3, NULL, 3, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO t_ds_relation_rule_input_entry
|
||||
(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
|
||||
VALUES(66, 5, 4, NULL, 4, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO t_ds_relation_rule_input_entry
|
||||
(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
|
||||
VALUES(67, 5, 5, NULL, 5, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO t_ds_relation_rule_input_entry
|
||||
(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
|
||||
VALUES(68, 5, 6, '{"statistics_name":"invalid_length_count.valids"}', 6, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO t_ds_relation_rule_input_entry
|
||||
(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
|
||||
VALUES(69, 5, 24, NULL, 7, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO t_ds_relation_rule_input_entry
|
||||
(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
|
||||
VALUES(70, 5, 23, NULL, 8, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO t_ds_relation_rule_input_entry
|
||||
(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
|
||||
VALUES(71, 5, 7, NULL, 9, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO t_ds_relation_rule_input_entry
|
||||
(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
|
||||
VALUES(72, 5, 8, NULL, 10, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO t_ds_relation_rule_input_entry
|
||||
(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
|
||||
VALUES(73, 5, 9, NULL, 11, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO t_ds_relation_rule_input_entry
|
||||
(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
|
||||
VALUES(74, 5, 10, NULL, 12, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO t_ds_relation_rule_input_entry
|
||||
(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
|
||||
VALUES(75, 5, 17, '', 13, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO t_ds_relation_rule_input_entry
|
||||
(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
|
||||
VALUES(76, 5, 19, NULL, 14, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO t_ds_relation_rule_input_entry
|
||||
(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
|
||||
VALUES(79, 6, 1, NULL, 1, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO t_ds_relation_rule_input_entry
|
||||
(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
|
||||
VALUES(80, 6, 2, NULL, 2, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO t_ds_relation_rule_input_entry
|
||||
(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
|
||||
VALUES(81, 6, 3, NULL, 3, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO t_ds_relation_rule_input_entry
|
||||
(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
|
||||
VALUES(82, 6, 4, NULL, 4, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO t_ds_relation_rule_input_entry
|
||||
(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
|
||||
VALUES(83, 6, 5, NULL, 5, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO t_ds_relation_rule_input_entry
|
||||
(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
|
||||
VALUES(84, 6, 6, '{"statistics_name":"duplicate_count.duplicates"}', 6, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO t_ds_relation_rule_input_entry
|
||||
(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
|
||||
VALUES(85, 6, 7, NULL, 7, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO t_ds_relation_rule_input_entry
|
||||
(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
|
||||
VALUES(86, 6, 8, NULL, 8, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO t_ds_relation_rule_input_entry
|
||||
(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
|
||||
VALUES(87, 6, 9, NULL, 9, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO t_ds_relation_rule_input_entry
|
||||
(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
|
||||
VALUES(88, 6, 10, NULL, 10, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO t_ds_relation_rule_input_entry
|
||||
(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
|
||||
VALUES(89, 6, 17, '', 11, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO t_ds_relation_rule_input_entry
|
||||
(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
|
||||
VALUES(90, 6, 19, NULL, 12, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO t_ds_relation_rule_input_entry
|
||||
(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
|
||||
VALUES(93, 7, 1, NULL, 1, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO t_ds_relation_rule_input_entry
|
||||
(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
|
||||
VALUES(94, 7, 2, NULL, 2, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO t_ds_relation_rule_input_entry
|
||||
(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
|
||||
VALUES(95, 7, 3, NULL, 3, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO t_ds_relation_rule_input_entry
|
||||
(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
|
||||
VALUES(96, 7, 4, NULL, 4, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO t_ds_relation_rule_input_entry
|
||||
(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
|
||||
VALUES(97, 7, 5, NULL, 5, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO t_ds_relation_rule_input_entry
|
||||
(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
|
||||
VALUES(98, 7, 6, '{"statistics_name":"regexp_count.regexps"}', 6, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO t_ds_relation_rule_input_entry
|
||||
(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
|
||||
VALUES(99, 7, 25, NULL, 5, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO t_ds_relation_rule_input_entry
|
||||
(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
|
||||
VALUES(100, 7, 7, NULL, 7, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO t_ds_relation_rule_input_entry
|
||||
(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
|
||||
VALUES(101, 7, 8, NULL, 8, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO t_ds_relation_rule_input_entry
|
||||
(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
|
||||
VALUES(102, 7, 9, NULL, 9, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO t_ds_relation_rule_input_entry
|
||||
(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
|
||||
VALUES(103, 7, 10, NULL, 10, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO t_ds_relation_rule_input_entry
|
||||
(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
|
||||
VALUES(104, 7, 17, NULL, 11, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO t_ds_relation_rule_input_entry
|
||||
(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
|
||||
VALUES(105, 7, 19, NULL, 12, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO t_ds_relation_rule_input_entry
|
||||
(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
|
||||
VALUES(108, 8, 1, NULL, 1, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO t_ds_relation_rule_input_entry
|
||||
(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
|
||||
VALUES(109, 8, 2, NULL, 2, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO t_ds_relation_rule_input_entry
|
||||
(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
|
||||
VALUES(110, 8, 3, NULL, 3, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO t_ds_relation_rule_input_entry
|
||||
(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
|
||||
VALUES(111, 8, 4, NULL, 4, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO t_ds_relation_rule_input_entry
|
||||
(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
|
||||
VALUES(112, 8, 5, NULL, 5, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO t_ds_relation_rule_input_entry
|
||||
(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
|
||||
VALUES(113, 8, 6, '{"statistics_name":"timeliness_count.timeliness"}', 6, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO t_ds_relation_rule_input_entry
|
||||
(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
|
||||
VALUES(114, 8, 26, NULL, 8, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO t_ds_relation_rule_input_entry
|
||||
(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
|
||||
VALUES(115, 8, 27, NULL, 9, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO t_ds_relation_rule_input_entry
|
||||
(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
|
||||
VALUES(116, 8, 7, NULL, 10, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO t_ds_relation_rule_input_entry
|
||||
(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
|
||||
VALUES(117, 8, 8, NULL, 11, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO t_ds_relation_rule_input_entry
|
||||
(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
|
||||
VALUES(118, 8, 9, NULL, 12, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO t_ds_relation_rule_input_entry
|
||||
(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
|
||||
VALUES(119, 8, 10, NULL, 13, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO t_ds_relation_rule_input_entry
|
||||
(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
|
||||
VALUES(120, 8, 17, NULL, 14, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO t_ds_relation_rule_input_entry
|
||||
(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
|
||||
VALUES(121, 8, 19, NULL, 15, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO t_ds_relation_rule_input_entry
|
||||
(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
|
||||
VALUES(124, 9, 1, NULL, 1, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO t_ds_relation_rule_input_entry
|
||||
(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
|
||||
VALUES(125, 9, 2, NULL, 2, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO t_ds_relation_rule_input_entry
|
||||
(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
|
||||
VALUES(126, 9, 3, NULL, 3, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO t_ds_relation_rule_input_entry
|
||||
(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
|
||||
VALUES(127, 9, 4, NULL, 4, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO t_ds_relation_rule_input_entry
|
||||
(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
|
||||
VALUES(128, 9, 5, NULL, 5, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO t_ds_relation_rule_input_entry
|
||||
(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
|
||||
VALUES(129, 9, 6, '{"statistics_name":"enum_count.enums"}', 6, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO t_ds_relation_rule_input_entry
|
||||
(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
|
||||
VALUES(130, 9, 28, NULL, 7, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO t_ds_relation_rule_input_entry
|
||||
(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
|
||||
VALUES(131, 9, 7, NULL, 8, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO t_ds_relation_rule_input_entry
|
||||
(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
|
||||
VALUES(132, 9, 8, NULL, 9, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO t_ds_relation_rule_input_entry
|
||||
(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
|
||||
VALUES(133, 9, 9, NULL, 10, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO t_ds_relation_rule_input_entry
|
||||
(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
|
||||
VALUES(134, 9, 10, NULL, 11, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO t_ds_relation_rule_input_entry
|
||||
(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
|
||||
VALUES(135, 9, 17, NULL, 12, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO t_ds_relation_rule_input_entry
|
||||
(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
|
||||
VALUES(136, 9, 19, NULL, 13, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO t_ds_relation_rule_input_entry
|
||||
(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
|
||||
VALUES(139, 10, 1, NULL, 1, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO t_ds_relation_rule_input_entry
|
||||
(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
|
||||
VALUES(140, 10, 2, NULL, 2, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO t_ds_relation_rule_input_entry
|
||||
(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
|
||||
VALUES(141, 10, 3, NULL, 3, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO t_ds_relation_rule_input_entry
|
||||
(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
|
||||
VALUES(142, 10, 4, NULL, 4, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO t_ds_relation_rule_input_entry
|
||||
(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
|
||||
VALUES(143, 10, 6, '{"statistics_name":"table_count.total"}', 6, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO t_ds_relation_rule_input_entry
|
||||
(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
|
||||
VALUES(144, 10, 7, NULL, 7, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO t_ds_relation_rule_input_entry
|
||||
(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
|
||||
VALUES(145, 10, 8, NULL, 8, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO t_ds_relation_rule_input_entry
|
||||
(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
|
||||
VALUES(146, 10, 9, NULL, 9, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO t_ds_relation_rule_input_entry
|
||||
(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
|
||||
VALUES(147, 10, 10, NULL, 10, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO t_ds_relation_rule_input_entry
|
||||
(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
|
||||
VALUES(148, 10, 17, NULL, 11, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO t_ds_relation_rule_input_entry
|
||||
(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
|
||||
VALUES(149, 10, 19, NULL, 12, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO t_ds_relation_rule_input_entry
|
||||
(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
|
||||
VALUES(150, 8, 29, NULL, 7, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO t_ds_relation_rule_input_entry
|
||||
(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
|
||||
VALUES(151, 1, 30, NULL, 2, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO t_ds_relation_rule_input_entry
|
||||
(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
|
||||
VALUES(152, 2, 30, NULL, 2, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO t_ds_relation_rule_input_entry
|
||||
(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
|
||||
VALUES(153, 3, 30, NULL, 2, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO t_ds_relation_rule_input_entry
|
||||
(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
|
||||
VALUES(154, 4, 30, NULL, 2, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO t_ds_relation_rule_input_entry
|
||||
(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
|
||||
VALUES(155, 5, 30, NULL, 2, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO t_ds_relation_rule_input_entry
|
||||
(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
|
||||
VALUES(156, 6, 30, NULL, 2, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO t_ds_relation_rule_input_entry
|
||||
(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
|
||||
VALUES(157, 7, 30, NULL, 2, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO t_ds_relation_rule_input_entry
|
||||
(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
|
||||
VALUES(158, 8, 30, NULL, 2, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO t_ds_relation_rule_input_entry
|
||||
(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
|
||||
VALUES(159, 9, 30, NULL, 2, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO t_ds_relation_rule_input_entry
|
||||
(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
|
||||
VALUES(160, 10, 30, NULL, 2, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO t_ds_relation_rule_input_entry
|
||||
(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
|
||||
VALUES(161, 3, 31, NULL, 6, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
INSERT INTO t_ds_relation_rule_input_entry
|
||||
(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
|
||||
VALUES(162, 4, 31, NULL, 7, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
|
||||
--
|
||||
-- Table structure for table t_ds_environment
|
||||
--
|
||||
|
@ -41,14 +41,6 @@ DROP PROCEDURE dolphin_t_ds_tenant_insert_default;
|
||||
UPDATE t_ds_schedules t1 JOIN t_ds_process_definition t2 ON t1.process_definition_code = t2.code LEFT JOIN t_ds_tenant t3 ON t2.tenant_id = t3.id SET t1.tenant_code = COALESCE(t3.tenant_code, 'default');
|
||||
UPDATE `t_ds_process_instance` SET `tenant_code` = 'default' WHERE `tenant_code` IS NULL;
|
||||
|
||||
-- data quality support choose database
|
||||
INSERT IGNORE INTO `t_ds_dq_rule_input_entry`
|
||||
(`id`, `field`, `type`, `title`, `value`, `options`, `placeholder`, `option_source_type`, `value_type`, `input_type`, `is_show`, `can_edit`, `is_emit`, `is_validate`, `create_time`, `update_time`)
|
||||
VALUES(30, 'src_database', 'select', '$t(src_database)', NULL, NULL, 'please select source database', 0, 0, 0, 1, 1, 1, 1, current_timestamp, current_timestamp);
|
||||
INSERT IGNORE INTO `t_ds_dq_rule_input_entry`
|
||||
(`id`, `field`, `type`, `title`, `value`, `options`, `placeholder`, `option_source_type`, `value_type`, `input_type`, `is_show`, `can_edit`, `is_emit`, `is_validate`, `create_time`, `update_time`)
|
||||
VALUES(31, 'target_database', 'select', '$t(target_database)', NULL, NULL, 'please select target database', 0, 0, 0, 1, 1, 1, 1, current_timestamp, current_timestamp);
|
||||
|
||||
INSERT IGNORE INTO `t_ds_relation_rule_input_entry`
|
||||
(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
|
||||
VALUES(151, 1, 30, NULL, 2, current_timestamp, current_timestamp);
|
||||
|
@ -24,14 +24,6 @@ UPDATE t_ds_process_instance SET tenant_code = 'default' WHERE tenant_code IS NU
|
||||
-- If the admin account is not associated with a tenant, the admin's tenant will be set to the default tenant.
|
||||
UPDATE t_ds_user SET tenant_id = '-1' WHERE (user_name = 'admin') AND (tenant_id = '0');
|
||||
|
||||
-- data quality support choose database
|
||||
INSERT INTO t_ds_dq_rule_input_entry
|
||||
(id, field, "type", title, value, "options", placeholder, option_source_type, value_type, input_type, is_show, can_edit, is_emit, is_validate, create_time, update_time)
|
||||
VALUES(30, 'src_database', 'select', '$t(src_database)', NULL, NULL, 'Please select source database', 0, 0, 0, 1, 1, 1, 1, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000') ON CONFLICT (id) DO NOTHING;
|
||||
INSERT INTO t_ds_dq_rule_input_entry
|
||||
(id, field, "type", title, value, "options", placeholder, option_source_type, value_type, input_type, is_show, can_edit, is_emit, is_validate, create_time, update_time)
|
||||
VALUES(31, 'target_database', 'select', '$t(target_database)', NULL, NULL, 'Please select target database', 0, 0, 0, 1, 1, 1, 1, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000') ON CONFLICT (id) DO NOTHING;
|
||||
|
||||
INSERT INTO t_ds_relation_rule_input_entry
|
||||
(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
|
||||
VALUES(151, 1, 30, NULL, 2, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000') ON CONFLICT (id) DO NOTHING;
|
||||
|
@ -202,3 +202,23 @@ delimiter ;
|
||||
CALL rename_tables_and_fields_from_process_to_workflow;
|
||||
DROP PROCEDURE rename_tables_and_fields_from_process_to_workflow;
|
||||
|
||||
-- Drop data quality tables
|
||||
DROP PROCEDURE if EXISTS drop_data_quality_tables;
|
||||
delimiter d//
|
||||
CREATE PROCEDURE drop_data_quality_tables()
|
||||
BEGIN
|
||||
|
||||
DROP TABLE IF EXISTS t_ds_dq_comparison_type;
|
||||
DROP TABLE IF EXISTS t_ds_dq_rule_execute_sql;
|
||||
DROP TABLE IF EXISTS t_ds_dq_rule_input_entry;
|
||||
DROP TABLE IF EXISTS t_ds_dq_task_statistics_value;
|
||||
DROP TABLE IF EXISTS t_ds_dq_execute_result;
|
||||
DROP TABLE IF EXISTS t_ds_dq_rule;
|
||||
DROP TABLE IF EXISTS t_ds_relation_rule_input_entry;
|
||||
DROP TABLE IF EXISTS t_ds_relation_rule_execute_sql;
|
||||
|
||||
END;
|
||||
d//
|
||||
delimiter ;
|
||||
CALL drop_data_quality_tables;
|
||||
DROP PROCEDURE drop_data_quality_tables;
|
||||
|
@ -204,3 +204,24 @@ d//
|
||||
select rename_tables_and_fields_from_process_to_workflow();
|
||||
DROP FUNCTION IF EXISTS rename_tables_and_fields_from_process_to_workflow();
|
||||
|
||||
|
||||
-- Drop data quality tables
|
||||
delimiter d//
|
||||
CREATE OR REPLACE FUNCTION drop_data_quality_tables() RETURNS void AS $$
|
||||
BEGIN
|
||||
|
||||
DROP TABLE IF EXISTS t_ds_dq_comparison_type;
|
||||
DROP TABLE IF EXISTS t_ds_dq_rule_execute_sql;
|
||||
DROP TABLE IF EXISTS t_ds_dq_rule_input_entry;
|
||||
DROP TABLE IF EXISTS t_ds_dq_task_statistics_value;
|
||||
DROP TABLE IF EXISTS t_ds_dq_execute_result;
|
||||
DROP TABLE IF EXISTS t_ds_dq_rule;
|
||||
DROP TABLE IF EXISTS t_ds_relation_rule_input_entry;
|
||||
DROP TABLE IF EXISTS t_ds_relation_rule_execute_sql;
|
||||
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
d//
|
||||
|
||||
select drop_data_quality_tables();
|
||||
DROP FUNCTION IF EXISTS drop_data_quality_tables();
|
||||
|
@ -1,61 +0,0 @@
|
||||
/*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
* contributor license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright ownership.
|
||||
* The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
* (the "License"); you may not use this file except in compliance with
|
||||
* the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.apache.dolphinscheduler.dao.mapper;
|
||||
|
||||
import org.apache.dolphinscheduler.dao.BaseDaoTest;
|
||||
import org.apache.dolphinscheduler.dao.entity.DqRule;
|
||||
import org.apache.dolphinscheduler.dao.entity.DqRuleInputEntry;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
import org.junit.jupiter.api.Test;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
|
||||
import com.baomidou.mybatisplus.core.metadata.IPage;
|
||||
import com.baomidou.mybatisplus.extension.plugins.pagination.Page;
|
||||
|
||||
/**
|
||||
* DQC rule mapper UT.
|
||||
*/
|
||||
public class DqRuleInputEntryMapperTest extends BaseDaoTest {
|
||||
|
||||
@Autowired
|
||||
private DqRuleInputEntryMapper dqRuleInputEntryMapper;
|
||||
|
||||
@Autowired
|
||||
private DqRuleMapper dqRuleMapper;
|
||||
|
||||
@Test
|
||||
public void testDqcRulePageList() {
|
||||
|
||||
Page<DqRule> page = new Page<>(1, 10);
|
||||
|
||||
IPage<DqRule> dqRulePage =
|
||||
dqRuleMapper.queryRuleListPaging(
|
||||
page,
|
||||
"",
|
||||
-1,
|
||||
null,
|
||||
null);
|
||||
|
||||
dqRulePage.getRecords().forEach(rule -> {
|
||||
final List<DqRuleInputEntry> ruleInputEntryList = dqRuleInputEntryMapper.getRuleInputEntryList(1);
|
||||
assert ruleInputEntryList != null;
|
||||
});
|
||||
}
|
||||
}
|
@ -1,201 +0,0 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<!--
|
||||
~ Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
~ contributor license agreements. See the NOTICE file distributed with
|
||||
~ this work for additional information regarding copyright ownership.
|
||||
~ The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
~ (the "License"); you may not use this file except in compliance with
|
||||
~ the License. You may obtain a copy of the License at
|
||||
~
|
||||
~ http://www.apache.org/licenses/LICENSE-2.0
|
||||
~
|
||||
~ Unless required by applicable law or agreed to in writing, software
|
||||
~ distributed under the License is distributed on an "AS IS" BASIS,
|
||||
~ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
~ See the License for the specific language governing permissions and
|
||||
~ limitations under the License.
|
||||
-->
|
||||
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
||||
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
|
||||
<modelVersion>4.0.0</modelVersion>
|
||||
<parent>
|
||||
<groupId>org.apache.dolphinscheduler</groupId>
|
||||
<artifactId>dolphinscheduler</artifactId>
|
||||
<version>dev-SNAPSHOT</version>
|
||||
</parent>
|
||||
<artifactId>dolphinscheduler-data-quality</artifactId>
|
||||
|
||||
<packaging>jar</packaging>
|
||||
<name>dolphinscheduler-data-quality</name>
|
||||
|
||||
<dependencyManagement>
|
||||
<dependencies>
|
||||
<dependency>
|
||||
<groupId>org.apache.dolphinscheduler</groupId>
|
||||
<artifactId>dolphinscheduler-bom</artifactId>
|
||||
<version>${project.version}</version>
|
||||
<type>pom</type>
|
||||
<scope>import</scope>
|
||||
</dependency>
|
||||
</dependencies>
|
||||
</dependencyManagement>
|
||||
|
||||
<dependencies>
|
||||
<dependency>
|
||||
<groupId>org.apache.spark</groupId>
|
||||
<artifactId>spark-core_2.12</artifactId>
|
||||
<scope>provided</scope>
|
||||
<exclusions>
|
||||
<exclusion>
|
||||
<groupId>com.fasterxml.jackson.module</groupId>
|
||||
<artifactId>jackson-module-scala_2.11</artifactId>
|
||||
</exclusion>
|
||||
</exclusions>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>org.apache.spark</groupId>
|
||||
<artifactId>spark-sql_2.12</artifactId>
|
||||
<scope>provided</scope>
|
||||
<exclusions>
|
||||
<exclusion>
|
||||
<groupId>com.fasterxml.jackson.core</groupId>
|
||||
<artifactId>jackson-core</artifactId>
|
||||
</exclusion>
|
||||
</exclusions>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>org.apache.spark</groupId>
|
||||
<artifactId>spark-hive_2.12</artifactId>
|
||||
<scope>provided</scope>
|
||||
<exclusions>
|
||||
<exclusion>
|
||||
<groupId>commons-httpclient</groupId>
|
||||
<artifactId>commons-httpclient</artifactId>
|
||||
</exclusion>
|
||||
<exclusion>
|
||||
<groupId>org.apache.httpcomponents</groupId>
|
||||
<artifactId>httpclient</artifactId>
|
||||
</exclusion>
|
||||
<exclusion>
|
||||
<groupId>org.codehaus.jackson</groupId>
|
||||
<artifactId>jackson-core-asl</artifactId>
|
||||
</exclusion>
|
||||
<exclusion>
|
||||
<groupId>org.codehaus.jackson</groupId>
|
||||
<artifactId>jackson-mapper-asl</artifactId>
|
||||
</exclusion>
|
||||
</exclusions>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>com.h2database</groupId>
|
||||
<artifactId>h2</artifactId>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>mysql</groupId>
|
||||
<artifactId>mysql-connector-java</artifactId>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>org.postgresql</groupId>
|
||||
<artifactId>postgresql</artifactId>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>io.trino</groupId>
|
||||
<artifactId>trino-jdbc</artifactId>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>com.clickhouse</groupId>
|
||||
<artifactId>clickhouse-jdbc</artifactId>
|
||||
<exclusions>
|
||||
<exclusion>
|
||||
<groupId>com.fasterxml.jackson.core</groupId>
|
||||
<artifactId>jackson-core</artifactId>
|
||||
</exclusion>
|
||||
</exclusions>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>com.databend</groupId>
|
||||
<artifactId>databend-jdbc</artifactId>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>com.microsoft.sqlserver</groupId>
|
||||
<artifactId>mssql-jdbc</artifactId>
|
||||
<exclusions>
|
||||
<exclusion>
|
||||
<groupId>com.microsoft.azure</groupId>
|
||||
<artifactId>azure-keyvault</artifactId>
|
||||
</exclusion>
|
||||
</exclusions>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>com.facebook.presto</groupId>
|
||||
<artifactId>presto-jdbc</artifactId>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>com.google.guava</groupId>
|
||||
<artifactId>guava</artifactId>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>com.fasterxml.jackson.core</groupId>
|
||||
<artifactId>jackson-databind</artifactId>
|
||||
<scope>provided</scope>
|
||||
<exclusions>
|
||||
<exclusion>
|
||||
<groupId>com.fasterxml.jackson.core</groupId>
|
||||
<artifactId>jackson-core</artifactId>
|
||||
</exclusion>
|
||||
</exclusions>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>com.fasterxml.jackson.module</groupId>
|
||||
<artifactId>jackson-module-scala_2.11</artifactId>
|
||||
<scope>provided</scope>
|
||||
<exclusions>
|
||||
<exclusion>
|
||||
<groupId>com.fasterxml.jackson.core</groupId>
|
||||
<artifactId>jackson-core</artifactId>
|
||||
</exclusion>
|
||||
</exclusions>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>org.codehaus.janino</groupId>
|
||||
<artifactId>janino</artifactId>
|
||||
<scope>provided</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.apache.commons</groupId>
|
||||
<artifactId>commons-collections4</artifactId>
|
||||
<scope>provided</scope>
|
||||
</dependency>
|
||||
|
||||
</dependencies>
|
||||
|
||||
<build>
|
||||
<plugins>
|
||||
<plugin>
|
||||
<artifactId>maven-jar-plugin</artifactId>
|
||||
<configuration>
|
||||
<archive>
|
||||
<manifest>
|
||||
<mainClass>org.apache.dolphinscheduler.data.quality.DataQualityApplication</mainClass>
|
||||
</manifest>
|
||||
</archive>
|
||||
</configuration>
|
||||
</plugin>
|
||||
</plugins>
|
||||
</build>
|
||||
</project>
|
@ -1,67 +0,0 @@
|
||||
/*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
* contributor license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright ownership.
|
||||
* The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
* (the "License"); you may not use this file except in compliance with
|
||||
* the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.apache.dolphinscheduler.data.quality;
|
||||
|
||||
/**
|
||||
* Constants
|
||||
*/
|
||||
public final class Constants {
|
||||
|
||||
private Constants() {
|
||||
throw new IllegalStateException("Construct Constants");
|
||||
}
|
||||
|
||||
public static final String DATABASE = "database";
|
||||
|
||||
public static final String TABLE = "table";
|
||||
|
||||
public static final String URL = "url";
|
||||
|
||||
public static final String USER = "user";
|
||||
|
||||
public static final String PASSWORD = "password";
|
||||
|
||||
public static final String DRIVER = "driver";
|
||||
|
||||
public static final String EMPTY = "";
|
||||
|
||||
public static final String SQL = "sql";
|
||||
|
||||
public static final String DOTS = ".";
|
||||
|
||||
public static final String INPUT_TABLE = "input_table";
|
||||
|
||||
public static final String OUTPUT_TABLE = "output_table";
|
||||
|
||||
public static final String TMP_TABLE = "tmp_table";
|
||||
|
||||
public static final String DB_TABLE = "dbtable";
|
||||
|
||||
public static final String JDBC = "jdbc";
|
||||
|
||||
public static final String SAVE_MODE = "save_mode";
|
||||
|
||||
public static final String APPEND = "append";
|
||||
|
||||
public static final String SPARK_APP_NAME = "spark.app.name";
|
||||
|
||||
/**
|
||||
* date format of yyyy-MM-dd HH:mm:ss
|
||||
*/
|
||||
public static final String YYYY_MM_DD_HH_MM_SS = "yyyy-MM-dd HH:mm:ss";
|
||||
}
|
@ -1,80 +0,0 @@
|
||||
/*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
* contributor license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright ownership.
|
||||
* The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
* (the "License"); you may not use this file except in compliance with
|
||||
* the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.apache.dolphinscheduler.data.quality;
|
||||
|
||||
import static org.apache.dolphinscheduler.data.quality.Constants.SPARK_APP_NAME;
|
||||
import static org.apache.dolphinscheduler.data.quality.enums.ReaderType.HIVE;
|
||||
|
||||
import org.apache.dolphinscheduler.data.quality.config.Config;
|
||||
import org.apache.dolphinscheduler.data.quality.config.DataQualityConfiguration;
|
||||
import org.apache.dolphinscheduler.data.quality.config.EnvConfig;
|
||||
import org.apache.dolphinscheduler.data.quality.context.DataQualityContext;
|
||||
import org.apache.dolphinscheduler.data.quality.execution.SparkRuntimeEnvironment;
|
||||
import org.apache.dolphinscheduler.data.quality.utils.JsonUtils;
|
||||
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
|
||||
import com.google.common.base.Strings;
|
||||
|
||||
/**
|
||||
* DataQualityApplication is spark application.
|
||||
* It mainly includes three components: reader, transformer and writer.
|
||||
* These three components realize the functions of connecting data, executing intermediate SQL
|
||||
* and writing execution results and error data to the specified storage engine
|
||||
*/
|
||||
@Slf4j
|
||||
public class DataQualityApplication {
|
||||
|
||||
public static void main(String[] args) throws Exception {
|
||||
|
||||
if (args.length < 1) {
|
||||
log.error("Can not find DataQualityConfiguration");
|
||||
System.exit(-1);
|
||||
}
|
||||
|
||||
String dataQualityParameter = args[0];
|
||||
|
||||
DataQualityConfiguration dataQualityConfiguration =
|
||||
JsonUtils.fromJson(dataQualityParameter, DataQualityConfiguration.class);
|
||||
if (dataQualityConfiguration == null) {
|
||||
log.info("DataQualityConfiguration is null");
|
||||
System.exit(-1);
|
||||
} else {
|
||||
dataQualityConfiguration.validate();
|
||||
}
|
||||
|
||||
EnvConfig envConfig = dataQualityConfiguration.getEnvConfig();
|
||||
Config config = new Config(envConfig.getConfig());
|
||||
config.put("type", envConfig.getType());
|
||||
if (Strings.isNullOrEmpty(config.getString(SPARK_APP_NAME))) {
|
||||
config.put(SPARK_APP_NAME, dataQualityConfiguration.getName());
|
||||
}
|
||||
|
||||
boolean hiveClientSupport = dataQualityConfiguration
|
||||
.getReaderConfigs()
|
||||
.stream()
|
||||
.anyMatch(line -> line.getType().equalsIgnoreCase(HIVE.name()));
|
||||
|
||||
SparkRuntimeEnvironment sparkRuntimeEnvironment = new SparkRuntimeEnvironment(config, hiveClientSupport);
|
||||
|
||||
DataQualityContext dataQualityContext =
|
||||
new DataQualityContext(sparkRuntimeEnvironment, dataQualityConfiguration);
|
||||
|
||||
dataQualityContext.execute();
|
||||
}
|
||||
}
|
@ -1,66 +0,0 @@
|
||||
/*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
* contributor license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright ownership.
|
||||
* The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
* (the "License"); you may not use this file except in compliance with
|
||||
* the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.apache.dolphinscheduler.data.quality.config;
|
||||
|
||||
import java.util.Map;
|
||||
|
||||
import com.fasterxml.jackson.annotation.JsonProperty;
|
||||
import com.google.common.base.Preconditions;
|
||||
import com.google.common.base.Strings;
|
||||
|
||||
/**
|
||||
* BaseConfig
|
||||
*/
|
||||
public class BaseConfig implements IConfig {
|
||||
|
||||
@JsonProperty("type")
|
||||
private String type;
|
||||
|
||||
@JsonProperty("config")
|
||||
private Map<String, Object> config;
|
||||
|
||||
public BaseConfig() {
|
||||
}
|
||||
|
||||
public BaseConfig(String type, Map<String, Object> config) {
|
||||
this.type = type;
|
||||
this.config = config;
|
||||
}
|
||||
|
||||
public String getType() {
|
||||
return type;
|
||||
}
|
||||
|
||||
public void setType(String type) {
|
||||
this.type = type;
|
||||
}
|
||||
|
||||
public Map<String, Object> getConfig() {
|
||||
return config;
|
||||
}
|
||||
|
||||
public void setConfig(Map<String, Object> config) {
|
||||
this.config = config;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void validate() {
|
||||
Preconditions.checkArgument(!Strings.isNullOrEmpty(type), "type should not be empty");
|
||||
Preconditions.checkArgument(config != null, "config should not be empty");
|
||||
}
|
||||
}
|
@ -1,94 +0,0 @@
|
||||
/*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
* contributor license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright ownership.
|
||||
* The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
* (the "License"); you may not use this file except in compliance with
|
||||
* the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.apache.dolphinscheduler.data.quality.config;
|
||||
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Map.Entry;
|
||||
import java.util.Set;
|
||||
|
||||
/**
|
||||
* Config
|
||||
*/
|
||||
public class Config {
|
||||
|
||||
private Map<String, Object> configuration = new HashMap<>();
|
||||
|
||||
public Config() {
|
||||
|
||||
}
|
||||
|
||||
public Config(Map<String, Object> configuration) {
|
||||
if (configuration != null) {
|
||||
this.configuration = configuration;
|
||||
}
|
||||
}
|
||||
|
||||
public String getString(String key) {
|
||||
return configuration.get(key) == null ? null : String.valueOf(configuration.get(key));
|
||||
}
|
||||
|
||||
public List<String> getStringList(String key) {
|
||||
return (List<String>) configuration.get(key);
|
||||
}
|
||||
|
||||
public Integer getInt(String key) {
|
||||
return Integer.valueOf(String.valueOf(configuration.get(key)));
|
||||
}
|
||||
|
||||
public Boolean getBoolean(String key) {
|
||||
return Boolean.valueOf(String.valueOf(configuration.get(key)));
|
||||
}
|
||||
|
||||
public Double getDouble(String key) {
|
||||
return Double.valueOf(String.valueOf(configuration.get(key)));
|
||||
}
|
||||
|
||||
public Long getLong(String key) {
|
||||
return Long.valueOf(String.valueOf(configuration.get(key)));
|
||||
}
|
||||
|
||||
public Boolean has(String key) {
|
||||
return configuration.get(key) != null;
|
||||
}
|
||||
|
||||
public Set<Entry<String, Object>> entrySet() {
|
||||
return configuration.entrySet();
|
||||
}
|
||||
|
||||
public boolean isEmpty() {
|
||||
return configuration.size() <= 0;
|
||||
}
|
||||
|
||||
public boolean isNotEmpty() {
|
||||
return configuration.size() > 0;
|
||||
}
|
||||
|
||||
public void put(String key, Object value) {
|
||||
this.configuration.put(key, value);
|
||||
}
|
||||
|
||||
public void merge(Map<String, Object> configuration) {
|
||||
configuration.forEach(this.configuration::putIfAbsent);
|
||||
}
|
||||
|
||||
public Map<String, Object> configurationMap() {
|
||||
return this.configuration;
|
||||
}
|
||||
}
|
@ -1,133 +0,0 @@
|
||||
/*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
* contributor license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright ownership.
|
||||
* The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
* (the "License"); you may not use this file except in compliance with
|
||||
* the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.apache.dolphinscheduler.data.quality.config;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
import com.fasterxml.jackson.annotation.JsonProperty;
|
||||
import com.google.common.base.Preconditions;
|
||||
import com.google.common.base.Strings;
|
||||
|
||||
/**
|
||||
* DataQualityConfiguration
|
||||
*/
|
||||
public class DataQualityConfiguration implements IConfig {
|
||||
|
||||
@JsonProperty("name")
|
||||
private String name;
|
||||
|
||||
@JsonProperty("env")
|
||||
private EnvConfig envConfig;
|
||||
|
||||
@JsonProperty("readers")
|
||||
private List<ReaderConfig> readerConfigs;
|
||||
|
||||
@JsonProperty("transformers")
|
||||
private List<TransformerConfig> transformerConfigs;
|
||||
|
||||
@JsonProperty("writers")
|
||||
private List<WriterConfig> writerConfigs;
|
||||
|
||||
public DataQualityConfiguration() {
|
||||
}
|
||||
|
||||
public DataQualityConfiguration(String name,
|
||||
EnvConfig envConfig,
|
||||
List<ReaderConfig> readerConfigs,
|
||||
List<WriterConfig> writerConfigs,
|
||||
List<TransformerConfig> transformerConfigs) {
|
||||
this.name = name;
|
||||
this.envConfig = envConfig;
|
||||
this.readerConfigs = readerConfigs;
|
||||
this.writerConfigs = writerConfigs;
|
||||
this.transformerConfigs = transformerConfigs;
|
||||
}
|
||||
|
||||
public String getName() {
|
||||
return name;
|
||||
}
|
||||
|
||||
public void setName(String name) {
|
||||
this.name = name;
|
||||
}
|
||||
|
||||
public EnvConfig getEnvConfig() {
|
||||
return envConfig;
|
||||
}
|
||||
|
||||
public void setEnvConfig(EnvConfig envConfig) {
|
||||
this.envConfig = envConfig;
|
||||
}
|
||||
|
||||
public List<ReaderConfig> getReaderConfigs() {
|
||||
return readerConfigs;
|
||||
}
|
||||
|
||||
public void setReaderConfigs(List<ReaderConfig> readerConfigs) {
|
||||
this.readerConfigs = readerConfigs;
|
||||
}
|
||||
|
||||
public List<TransformerConfig> getTransformerConfigs() {
|
||||
return transformerConfigs;
|
||||
}
|
||||
|
||||
public void setTransformerConfigs(List<TransformerConfig> transformerConfigs) {
|
||||
this.transformerConfigs = transformerConfigs;
|
||||
}
|
||||
|
||||
public List<WriterConfig> getWriterConfigs() {
|
||||
return writerConfigs;
|
||||
}
|
||||
|
||||
public void setWriterConfigs(List<WriterConfig> writerConfigs) {
|
||||
this.writerConfigs = writerConfigs;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void validate() {
|
||||
Preconditions.checkArgument(!Strings.isNullOrEmpty(name), "name should not be empty");
|
||||
|
||||
Preconditions.checkArgument(envConfig != null, "env config should not be empty");
|
||||
|
||||
Preconditions.checkArgument(readerConfigs != null, "reader config should not be empty");
|
||||
for (ReaderConfig readerConfig : readerConfigs) {
|
||||
readerConfig.validate();
|
||||
}
|
||||
|
||||
Preconditions.checkArgument(transformerConfigs != null, "transform config should not be empty");
|
||||
for (TransformerConfig transformParameter : transformerConfigs) {
|
||||
transformParameter.validate();
|
||||
}
|
||||
|
||||
Preconditions.checkArgument(writerConfigs != null, "writer config should not be empty");
|
||||
for (WriterConfig writerConfig : writerConfigs) {
|
||||
writerConfig.validate();
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return "DataQualityConfiguration{"
|
||||
+ "name='" + name + '\''
|
||||
+ ", envConfig=" + envConfig
|
||||
+ ", readerConfigs=" + readerConfigs
|
||||
+ ", transformerConfigs=" + transformerConfigs
|
||||
+ ", writerConfigs=" + writerConfigs
|
||||
+ '}';
|
||||
}
|
||||
}
|
@ -1,34 +0,0 @@
|
||||
/*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
* contributor license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright ownership.
|
||||
* The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
* (the "License"); you may not use this file except in compliance with
|
||||
* the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.apache.dolphinscheduler.data.quality.config;
|
||||
|
||||
import java.util.Map;
|
||||
|
||||
/**
|
||||
* EnvConfig
|
||||
*/
|
||||
public class EnvConfig extends BaseConfig {
|
||||
|
||||
public EnvConfig() {
|
||||
}
|
||||
|
||||
public EnvConfig(String type, Map<String, Object> config) {
|
||||
super(type, config);
|
||||
}
|
||||
|
||||
}
|
@ -1,29 +0,0 @@
|
||||
/*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
* contributor license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright ownership.
|
||||
* The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
* (the "License"); you may not use this file except in compliance with
|
||||
* the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.apache.dolphinscheduler.data.quality.config;
|
||||
|
||||
/**
|
||||
* IConfig
|
||||
*/
|
||||
public interface IConfig {
|
||||
|
||||
/**
|
||||
* check the parameter
|
||||
*/
|
||||
void validate();
|
||||
}
|
@ -1,33 +0,0 @@
|
||||
/*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
* contributor license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright ownership.
|
||||
* The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
* (the "License"); you may not use this file except in compliance with
|
||||
* the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.apache.dolphinscheduler.data.quality.config;
|
||||
|
||||
import java.util.Map;
|
||||
|
||||
/**
|
||||
* ReaderConfig
|
||||
*/
|
||||
public class ReaderConfig extends BaseConfig {
|
||||
|
||||
public ReaderConfig() {
|
||||
}
|
||||
|
||||
public ReaderConfig(String type, Map<String, Object> config) {
|
||||
super(type, config);
|
||||
}
|
||||
}
|
@ -1,33 +0,0 @@
|
||||
/*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
* contributor license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright ownership.
|
||||
* The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
* (the "License"); you may not use this file except in compliance with
|
||||
* the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.apache.dolphinscheduler.data.quality.config;
|
||||
|
||||
import java.util.Map;
|
||||
|
||||
/**
|
||||
* TransformerConfig
|
||||
*/
|
||||
public class TransformerConfig extends BaseConfig {
|
||||
|
||||
public TransformerConfig() {
|
||||
}
|
||||
|
||||
public TransformerConfig(String type, Map<String, Object> config) {
|
||||
super(type, config);
|
||||
}
|
||||
}
|
@ -1,46 +0,0 @@
|
||||
/*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
* contributor license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright ownership.
|
||||
* The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
* (the "License"); you may not use this file except in compliance with
|
||||
* the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.apache.dolphinscheduler.data.quality.config;
|
||||
|
||||
public class ValidateResult {
|
||||
|
||||
private boolean success;
|
||||
|
||||
private String msg;
|
||||
|
||||
public ValidateResult(boolean success, String msg) {
|
||||
this.success = success;
|
||||
this.msg = msg;
|
||||
}
|
||||
|
||||
public boolean isSuccess() {
|
||||
return success;
|
||||
}
|
||||
|
||||
public void setSuccess(boolean success) {
|
||||
this.success = success;
|
||||
}
|
||||
|
||||
public String getMsg() {
|
||||
return msg;
|
||||
}
|
||||
|
||||
public void setMsg(String msg) {
|
||||
this.msg = msg;
|
||||
}
|
||||
}
|
@ -1,33 +0,0 @@
|
||||
/*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
* contributor license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright ownership.
|
||||
* The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
* (the "License"); you may not use this file except in compliance with
|
||||
* the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.apache.dolphinscheduler.data.quality.config;
|
||||
|
||||
import java.util.Map;
|
||||
|
||||
/**
|
||||
* WriterConfig
|
||||
*/
|
||||
public class WriterConfig extends BaseConfig {
|
||||
|
||||
public WriterConfig() {
|
||||
}
|
||||
|
||||
public WriterConfig(String type, Map<String, Object> config) {
|
||||
super(type, config);
|
||||
}
|
||||
}
|
@ -1,67 +0,0 @@
|
||||
/*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
* contributor license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright ownership.
|
||||
* The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
* (the "License"); you may not use this file except in compliance with
|
||||
* the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.apache.dolphinscheduler.data.quality.context;
|
||||
|
||||
import org.apache.dolphinscheduler.data.quality.config.DataQualityConfiguration;
|
||||
import org.apache.dolphinscheduler.data.quality.exception.DataQualityException;
|
||||
import org.apache.dolphinscheduler.data.quality.execution.SparkRuntimeEnvironment;
|
||||
import org.apache.dolphinscheduler.data.quality.flow.batch.BatchReader;
|
||||
import org.apache.dolphinscheduler.data.quality.flow.batch.BatchTransformer;
|
||||
import org.apache.dolphinscheduler.data.quality.flow.batch.BatchWriter;
|
||||
import org.apache.dolphinscheduler.data.quality.flow.batch.reader.ReaderFactory;
|
||||
import org.apache.dolphinscheduler.data.quality.flow.batch.transformer.TransformerFactory;
|
||||
import org.apache.dolphinscheduler.data.quality.flow.batch.writer.WriterFactory;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
/**
|
||||
* DataQualityContext
|
||||
*/
|
||||
public class DataQualityContext {
|
||||
|
||||
private SparkRuntimeEnvironment sparkRuntimeEnvironment;
|
||||
|
||||
private DataQualityConfiguration dataQualityConfiguration;
|
||||
|
||||
public DataQualityContext() {
|
||||
}
|
||||
|
||||
public DataQualityContext(SparkRuntimeEnvironment sparkRuntimeEnvironment,
|
||||
DataQualityConfiguration dataQualityConfiguration) {
|
||||
this.sparkRuntimeEnvironment = sparkRuntimeEnvironment;
|
||||
this.dataQualityConfiguration = dataQualityConfiguration;
|
||||
}
|
||||
|
||||
public void execute() throws DataQualityException {
|
||||
List<BatchReader> readers = ReaderFactory
|
||||
.getInstance()
|
||||
.getReaders(this.sparkRuntimeEnvironment, dataQualityConfiguration.getReaderConfigs());
|
||||
List<BatchTransformer> transformers = TransformerFactory
|
||||
.getInstance()
|
||||
.getTransformer(this.sparkRuntimeEnvironment, dataQualityConfiguration.getTransformerConfigs());
|
||||
List<BatchWriter> writers = WriterFactory
|
||||
.getInstance()
|
||||
.getWriters(this.sparkRuntimeEnvironment, dataQualityConfiguration.getWriterConfigs());
|
||||
|
||||
if (sparkRuntimeEnvironment.isBatch()) {
|
||||
sparkRuntimeEnvironment.getBatchExecution().execute(readers, transformers, writers);
|
||||
} else {
|
||||
throw new DataQualityException("stream mode is not supported now");
|
||||
}
|
||||
}
|
||||
}
|
@ -1,41 +0,0 @@
|
||||
/*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
* contributor license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright ownership.
|
||||
* The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
* (the "License"); you may not use this file except in compliance with
|
||||
* the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.apache.dolphinscheduler.data.quality.enums;
|
||||
|
||||
/**
|
||||
* ReaderType
|
||||
*/
|
||||
public enum ReaderType {
|
||||
|
||||
/**
|
||||
* JDBC
|
||||
* HIVE
|
||||
*/
|
||||
JDBC,
|
||||
HIVE;
|
||||
|
||||
public static ReaderType getType(String name) {
|
||||
for (ReaderType type : ReaderType.values()) {
|
||||
if (type.name().equalsIgnoreCase(name)) {
|
||||
return type;
|
||||
}
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
}
|
@ -1,39 +0,0 @@
|
||||
/*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
* contributor license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright ownership.
|
||||
* The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
* (the "License"); you may not use this file except in compliance with
|
||||
* the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.apache.dolphinscheduler.data.quality.enums;
|
||||
|
||||
/**
|
||||
* TransformerType
|
||||
*/
|
||||
public enum TransformerType {
|
||||
|
||||
/**
|
||||
* JDBC
|
||||
*/
|
||||
SQL;
|
||||
|
||||
public static TransformerType getType(String name) {
|
||||
for (TransformerType type : TransformerType.values()) {
|
||||
if (type.name().equalsIgnoreCase(name)) {
|
||||
return type;
|
||||
}
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
}
|
@ -1,41 +0,0 @@
|
||||
/*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
* contributor license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright ownership.
|
||||
* The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
* (the "License"); you may not use this file except in compliance with
|
||||
* the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.apache.dolphinscheduler.data.quality.enums;
|
||||
|
||||
/**
|
||||
* WriterType
|
||||
*/
|
||||
public enum WriterType {
|
||||
|
||||
/**
|
||||
* JDBC
|
||||
*/
|
||||
JDBC,
|
||||
LOCAL_FILE,
|
||||
HDFS_FILE;
|
||||
|
||||
public static WriterType getType(String name) {
|
||||
for (WriterType type : WriterType.values()) {
|
||||
if (type.name().equalsIgnoreCase(name)) {
|
||||
return type;
|
||||
}
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
}
|
@ -1,40 +0,0 @@
|
||||
/*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
* contributor license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright ownership.
|
||||
* The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
* (the "License"); you may not use this file except in compliance with
|
||||
* the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.apache.dolphinscheduler.data.quality.exception;
|
||||
|
||||
/**
|
||||
* ConfigRuntimeException
|
||||
*/
|
||||
public class ConfigRuntimeException extends RuntimeException {
|
||||
|
||||
public ConfigRuntimeException() {
|
||||
super();
|
||||
}
|
||||
|
||||
public ConfigRuntimeException(String message) {
|
||||
super(message);
|
||||
}
|
||||
|
||||
public ConfigRuntimeException(String message, Throwable cause) {
|
||||
super(message, cause);
|
||||
}
|
||||
|
||||
public ConfigRuntimeException(Throwable cause) {
|
||||
super(cause);
|
||||
}
|
||||
}
|
@ -1,57 +0,0 @@
|
||||
/*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
* contributor license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright ownership.
|
||||
* The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
* (the "License"); you may not use this file except in compliance with
|
||||
* the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.apache.dolphinscheduler.data.quality.exception;
|
||||
|
||||
/**
|
||||
* data quality exception
|
||||
*/
|
||||
public class DataQualityException extends Exception {
|
||||
|
||||
public DataQualityException() {
|
||||
super();
|
||||
}
|
||||
|
||||
/**
|
||||
* Construct a new runtime exception with the detail message
|
||||
*
|
||||
* @param message detail message
|
||||
*/
|
||||
public DataQualityException(String message) {
|
||||
super(message);
|
||||
}
|
||||
|
||||
/**
|
||||
* Construct a new runtime exception with the detail message and cause
|
||||
*
|
||||
* @param message the detail message
|
||||
* @param cause the cause
|
||||
* @since 1.4
|
||||
*/
|
||||
public DataQualityException(String message, Throwable cause) {
|
||||
super(message, cause);
|
||||
}
|
||||
|
||||
/**
|
||||
* Construct a new runtime exception with throwable
|
||||
*
|
||||
* @param cause the cause
|
||||
*/
|
||||
public DataQualityException(Throwable cause) {
|
||||
super(cause);
|
||||
}
|
||||
}
|