workflow lineage (#2421)

* workflow lineage

* workflow lineage unit test

* stash

* add apache license

* Update .env

* code optimize

* add unit test

* add apache license

* add apache license

* optimized code

* add postgresql support and optimized code

* Update pom.xml

* Update en_US.js

* Update zh_CN.js

* remove mock code

* inport i18n

* Update pom.xml

* Delete .env

* add env file

Co-authored-by: linhaiqiang <haiqiang.lin@five-star.cn>
Co-authored-by: dailidong <dailidong66@gmail.com>
Co-authored-by: becarchal <yangbin_57@sina.com>
This commit is contained in:
LiemLin 2020-07-30 13:32:29 +08:00 committed by GitHub
parent 3a0d4da27e
commit f27c62a5db
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
29 changed files with 1288 additions and 12 deletions

View File

@ -0,0 +1,81 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.api.controller;
import org.apache.dolphinscheduler.api.service.WorkFlowLineageService;
import org.apache.dolphinscheduler.api.utils.Result;
import org.apache.dolphinscheduler.common.utils.ParameterUtils;
import io.swagger.annotations.ApiParam;
import org.apache.dolphinscheduler.dao.entity.WorkFlowLineage;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.http.HttpStatus;
import org.springframework.web.bind.annotation.*;
import springfox.documentation.annotations.ApiIgnore;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import static org.apache.dolphinscheduler.api.enums.Status.QUERY_WORKFLOW_LINEAGE_ERROR;
@RestController
@RequestMapping("lineages/{projectId}")
public class WorkFlowLineageController extends BaseController {
private static final Logger logger = LoggerFactory.getLogger(WorkFlowLineageController.class);
@Autowired
private WorkFlowLineageService workFlowLineageService;
@GetMapping(value="/list-name")
@ResponseStatus(HttpStatus.OK)
public Result<List<WorkFlowLineage>> queryWorkFlowLineageByName(@ApiIgnore @RequestParam(value = "searchVal", required = false) String searchVal, @ApiParam(name = "projectId", value = "PROJECT_ID", required = true) @PathVariable int projectId) {
try {
searchVal = ParameterUtils.handleEscapes(searchVal);
Map<String, Object> result = workFlowLineageService.queryWorkFlowLineageByName(searchVal,projectId);
return returnDataList(result);
} catch (Exception e){
logger.error(QUERY_WORKFLOW_LINEAGE_ERROR.getMsg(),e);
return error(QUERY_WORKFLOW_LINEAGE_ERROR.getCode(), QUERY_WORKFLOW_LINEAGE_ERROR.getMsg());
}
}
@GetMapping(value="/list-ids")
@ResponseStatus(HttpStatus.OK)
public Result<Map<String, Object>> queryWorkFlowLineageByIds(@ApiIgnore @RequestParam(value = "ids", required = false) String ids,@ApiParam(name = "projectId", value = "PROJECT_ID", required = true) @PathVariable int projectId) {
try {
ids = ParameterUtils.handleEscapes(ids);
Set<Integer> idsSet = new HashSet<>();
if(ids != null) {
String[] idsStr = ids.split(",");
for (String id : idsStr)
{
idsSet.add(Integer.parseInt(id));
}
}
Map<String, Object> result = workFlowLineageService.queryWorkFlowLineageByIds(idsSet, projectId);
return returnDataList(result);
} catch (Exception e){
logger.error(QUERY_WORKFLOW_LINEAGE_ERROR.getMsg(),e);
return error(QUERY_WORKFLOW_LINEAGE_ERROR.getCode(), QUERY_WORKFLOW_LINEAGE_ERROR.getMsg());
}
}
}

View File

@ -168,15 +168,15 @@ public enum Status {
PREVIEW_SCHEDULE_ERROR(10139,"preview schedule error", "预览调度配置错误"),
PARSE_TO_CRON_EXPRESSION_ERROR(10140,"parse cron to cron expression error", "解析调度表达式错误"),
SCHEDULE_START_TIME_END_TIME_SAME(10141,"The start time must not be the same as the end", "开始时间不能和结束时间一样"),
DELETE_TENANT_BY_ID_FAIL(10142,"delete tenant by id fail, for there are {0} process instances in executing using it", "删除租户失败,有[{0}]个运行中的工作流实例正在使用"),
DELETE_TENANT_BY_ID_FAIL_DEFINES(10143,"delete tenant by id fail, for there are {0} process definitions using it", "删除租户失败,有[{0}]个工作流定义正在使用"),
DELETE_TENANT_BY_ID_FAIL_USERS(10144,"delete tenant by id fail, for there are {0} users using it", "删除租户失败,有[{0}]个用户正在使用"),
DELETE_WORKER_GROUP_BY_ID_FAIL(10145,"delete worker group by id fail, for there are {0} process instances in executing using it", "删除Worker分组失败有[{0}]个运行中的工作流实例正在使用"),
QUERY_WORKER_GROUP_FAIL(10146,"query worker group fail ", "查询worker分组失败"),
DELETE_WORKER_GROUP_FAIL(10147,"delete worker group fail ", "删除worker分组失败"),
DELETE_TENANT_BY_ID_FAIL(100142,"delete tenant by id fail, for there are {0} process instances in executing using it", "删除租户失败,有[{0}]个运行中的工作流实例正在使用"),
DELETE_TENANT_BY_ID_FAIL_DEFINES(100143,"delete tenant by id fail, for there are {0} process definitions using it", "删除租户失败,有[{0}]个工作流定义正在使用"),
DELETE_TENANT_BY_ID_FAIL_USERS(100144,"delete tenant by id fail, for there are {0} users using it", "删除租户失败,有[{0}]个用户正在使用"),
DELETE_WORKER_GROUP_BY_ID_FAIL(100145,"delete worker group by id fail, for there are {0} process instances in executing using it", "删除Worker分组失败有[{0}]个运行中的工作流实例正在使用"),
QUERY_WORKER_GROUP_FAIL(100146,"query worker group fail ", "查询worker分组失败"),
DELETE_WORKER_GROUP_FAIL(100147,"delete worker group fail ", "删除worker分组失败"),
QUERY_WORKFLOW_LINEAGE_ERROR(10143,"query workflow lineage error", "查询血缘失败"),
COPY_PROCESS_DEFINITION_ERROR(10148,"copy process definition error", "复制工作流错误"),
USER_DISABLED(10149,"The current user is disabled", "当前用户已停用"),
UDF_FUNCTION_NOT_EXIST(20001, "UDF function not found", "UDF函数不存在"),
UDF_FUNCTION_EXISTS(20002, "UDF function already exists", "UDF函数已存在"),
RESOURCE_NOT_EXIST(20004, "resource not exist", "资源不存在"),

View File

@ -0,0 +1,97 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.api.service;
import org.apache.dolphinscheduler.api.enums.Status;
import org.apache.dolphinscheduler.common.Constants;
import org.apache.dolphinscheduler.dao.mapper.WorkFlowLineageMapper;
import org.apache.dolphinscheduler.dao.entity.WorkFlowLineage;
import org.apache.dolphinscheduler.dao.entity.WorkFlowRelation;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import java.util.*;
@Service
public class WorkFlowLineageService extends BaseService {
@Autowired
private WorkFlowLineageMapper workFlowLineageMapper;
public Map<String, Object> queryWorkFlowLineageByName(String workFlowName, int projectId) {
Map<String, Object> result = new HashMap<>(5);
List<WorkFlowLineage> workFlowLineageList = workFlowLineageMapper.queryByName(workFlowName, projectId);
result.put(Constants.DATA_LIST, workFlowLineageList);
putMsg(result, Status.SUCCESS);
return result;
}
private List<WorkFlowRelation> getWorkFlowRelationRecursion(Set<Integer> ids, List<WorkFlowRelation> workFlowRelations,Set<Integer> sourceIds) {
for(int id : ids) {
sourceIds.addAll(ids);
List<WorkFlowRelation> workFlowRelationsTmp = workFlowLineageMapper.querySourceTarget(id);
if(workFlowRelationsTmp != null && !workFlowRelationsTmp.isEmpty()) {
Set<Integer> idsTmp = new HashSet<>();
for(WorkFlowRelation workFlowRelation:workFlowRelationsTmp) {
if(!sourceIds.contains(workFlowRelation.getTargetWorkFlowId())){
idsTmp.add(workFlowRelation.getTargetWorkFlowId());
}
}
workFlowRelations.addAll(workFlowRelationsTmp);
getWorkFlowRelationRecursion(idsTmp, workFlowRelations,sourceIds);
}
}
return workFlowRelations;
}
public Map<String, Object> queryWorkFlowLineageByIds(Set<Integer> ids,int projectId) {
Map<String, Object> result = new HashMap<>(5);
List<WorkFlowLineage> workFlowLineageList = workFlowLineageMapper.queryByIds(ids, projectId);
Map<String, Object> workFlowLists = new HashMap<>(5);
Set<Integer> idsV = new HashSet<>();
if(ids == null || ids.isEmpty()){
for(WorkFlowLineage workFlowLineage:workFlowLineageList) {
idsV.add(workFlowLineage.getWorkFlowId());
}
} else {
idsV = ids;
}
List<WorkFlowRelation> workFlowRelations = new ArrayList<>();
Set<Integer> sourceIds = new HashSet<>();
getWorkFlowRelationRecursion(idsV, workFlowRelations, sourceIds);
Set<Integer> idSet = new HashSet<>();
//If the incoming parameter is not empty, you need to add downstream workflow detail attributes
if(ids != null && !ids.isEmpty()) {
for(WorkFlowRelation workFlowRelation : workFlowRelations) {
idSet.add(workFlowRelation.getTargetWorkFlowId());
}
for(int id : ids){
idSet.remove(id);
}
if(!idSet.isEmpty()) {
workFlowLineageList.addAll(workFlowLineageMapper.queryByIds(idSet, projectId));
}
}
workFlowLists.put("workFlowList",workFlowLineageList);
workFlowLists.put("workFlowRelationList",workFlowRelations);
result.put(Constants.DATA_LIST, workFlowLists);
putMsg(result, Status.SUCCESS);
return result;
}
}

View File

@ -0,0 +1,69 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.api.controller;
import org.apache.dolphinscheduler.api.enums.Status;
import org.apache.dolphinscheduler.api.utils.Result;
import org.apache.dolphinscheduler.common.utils.JSONUtils;
import org.junit.Assert;
import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.http.MediaType;
import org.springframework.test.web.servlet.MvcResult;
import org.springframework.util.LinkedMultiValueMap;
import org.springframework.util.MultiValueMap;
import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get;
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.content;
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status;
public class WorkFlowLineageControllerTest extends AbstractControllerTest {
private static Logger logger = LoggerFactory.getLogger(WorkFlowLineageControllerTest.class);
@Test
public void testQueryWorkFlowLineageByName() throws Exception {
MultiValueMap<String, String> paramsMap = new LinkedMultiValueMap<>();
paramsMap.add("searchVal","test");
MvcResult mvcResult = mockMvc.perform(get("/lineages/1/list-name")
.header("sessionId", sessionId)
.params(paramsMap))
.andExpect(status().isOk())
.andExpect(content().contentType(MediaType.APPLICATION_JSON_UTF8))
.andReturn();
Result result = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString(), Result.class);
Assert.assertEquals(Status.SUCCESS.getCode(),result.getCode().intValue());
logger.info(mvcResult.getResponse().getContentAsString());
}
@Test
public void testQueryWorkFlowLineageByIds() throws Exception {
MultiValueMap<String, String> paramsMap = new LinkedMultiValueMap<>();
paramsMap.add("ids","1");
MvcResult mvcResult = mockMvc.perform(get("/lineages/1/list-ids")
.header("sessionId", sessionId)
.params(paramsMap))
.andExpect(status().isOk())
.andExpect(content().contentType(MediaType.APPLICATION_JSON_UTF8))
.andReturn();
Result result = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString(), Result.class);
Assert.assertEquals(Status.SUCCESS.getCode(),result.getCode().intValue());
logger.info(mvcResult.getResponse().getContentAsString());
}
}

View File

@ -0,0 +1,88 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.api.service;
import org.apache.dolphinscheduler.common.Constants;
import org.apache.dolphinscheduler.common.utils.EncryptionUtils;
import org.apache.dolphinscheduler.dao.entity.WorkFlowLineage;
import org.apache.dolphinscheduler.dao.entity.WorkFlowRelation;
import org.apache.dolphinscheduler.dao.mapper.WorkFlowLineageMapper;
import org.junit.Assert;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.InjectMocks;
import org.mockito.Mock;
import org.mockito.junit.MockitoJUnitRunner;
import java.util.*;
import static org.mockito.Mockito.when;
@RunWith(MockitoJUnitRunner.class)
public class WorkFlowLineageServiceTest {
@InjectMocks
private WorkFlowLineageService workFlowLineageService;
@Mock
private WorkFlowLineageMapper workFlowLineageMapper;
@Test
public void testQueryWorkFlowLineageByName() {
String searchVal = "test";
when(workFlowLineageMapper.queryByName(searchVal, 1)).thenReturn(getWorkFlowLineages());
Map<String, Object> result = workFlowLineageService.queryWorkFlowLineageByName(searchVal,1);
List<WorkFlowLineage> workFlowLineageList = (List<WorkFlowLineage>)result.get(Constants.DATA_LIST);
Assert.assertTrue(workFlowLineageList.size()>0);
}
@Test
public void testQueryWorkFlowLineageByIds() {
Set<Integer> ids = new HashSet<>();
ids.add(1);
ids.add(2);
when(workFlowLineageMapper.queryByIds(ids, 1)).thenReturn(getWorkFlowLineages());
when(workFlowLineageMapper.querySourceTarget(1)).thenReturn(getWorkFlowRelation());
Map<String, Object> result = workFlowLineageService.queryWorkFlowLineageByIds(ids,1);
Map<String, Object> workFlowLists = (Map<String, Object>)result.get(Constants.DATA_LIST);
List<WorkFlowLineage> workFlowLineages = (List<WorkFlowLineage>)workFlowLists.get("workFlowList");
List<WorkFlowRelation> workFlowRelations = (List<WorkFlowRelation>)workFlowLists.get("workFlowRelationList");
Assert.assertTrue(workFlowLineages.size()>0);
Assert.assertTrue(workFlowRelations.size()>0);
}
private List<WorkFlowLineage> getWorkFlowLineages() {
List<WorkFlowLineage> workFlowLineages = new ArrayList<>();
WorkFlowLineage workFlowLineage = new WorkFlowLineage();
workFlowLineage.setWorkFlowId(1);
workFlowLineage.setWorkFlowName("testdag");
workFlowLineages.add(workFlowLineage);
return workFlowLineages;
}
private List<WorkFlowRelation> getWorkFlowRelation(){
List<WorkFlowRelation> workFlowRelations = new ArrayList<>();
WorkFlowRelation workFlowRelation = new WorkFlowRelation();
workFlowRelation.setSourceWorkFlowId(1);
workFlowRelation.setTargetWorkFlowId(2);
workFlowRelations.add(workFlowRelation);
return workFlowRelations;
}
}

View File

@ -26,6 +26,8 @@ import org.apache.commons.configuration.ConfigurationException;
import org.apache.commons.configuration.PropertiesConfiguration;
import org.apache.dolphinscheduler.common.Constants;
import org.apache.dolphinscheduler.dao.utils.PropertyUtils;
import org.apache.ibatis.mapping.DatabaseIdProvider;
import org.apache.ibatis.mapping.VendorDatabaseIdProvider;
import org.apache.ibatis.session.SqlSession;
import org.apache.ibatis.session.SqlSessionFactory;
import org.apache.ibatis.type.JdbcType;
@ -39,6 +41,8 @@ import org.springframework.core.io.support.PathMatchingResourcePatternResolver;
import org.springframework.core.io.support.ResourcePatternResolver;
import org.springframework.jdbc.datasource.DataSourceTransactionManager;
import java.util.Properties;
/**
* data source connection factory
@ -129,6 +133,7 @@ public class SpringConnectionFactory {
ResourcePatternResolver resolver = new PathMatchingResourcePatternResolver();
sqlSessionFactoryBean.setMapperLocations(resolver.getResources("org/apache/dolphinscheduler/dao/mapper/*Mapper.xml"));
sqlSessionFactoryBean.setTypeEnumsPackage("org.apache.dolphinscheduler.*.enums");
sqlSessionFactoryBean.setDatabaseIdProvider(databaseIdProvider());
return sqlSessionFactoryBean.getObject();
}
@ -142,4 +147,13 @@ public class SpringConnectionFactory {
return new SqlSessionTemplate(sqlSessionFactory());
}
@Bean
public DatabaseIdProvider databaseIdProvider(){
DatabaseIdProvider databaseIdProvider = new VendorDatabaseIdProvider();
Properties properties = new Properties();
properties.setProperty("MySQL", "mysql");
properties.setProperty("PostgreSQL", "pg");
databaseIdProvider.setProperties(properties);
return databaseIdProvider;
}
}

View File

@ -0,0 +1,94 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.dao.entity;
import java.util.Date;
public class WorkFlowLineage {
private int workFlowId;
private String workFlowName;
private String workFlowPublishStatus;
private Date scheduleStartTime;
private Date scheduleEndTime;
private String crontab;
private int schedulePublishStatus;
private String sourceWorkFlowId;
public String getSourceWorkFlowId() {
return sourceWorkFlowId;
}
public void setSourceWorkFlowId(String sourceWorkFlowId) {
this.sourceWorkFlowId = sourceWorkFlowId;
}
public int getWorkFlowId() {
return workFlowId;
}
public void setWorkFlowId(int workFlowId) {
this.workFlowId = workFlowId;
}
public String getWorkFlowName() {
return workFlowName;
}
public void setWorkFlowName(String workFlowName) {
this.workFlowName = workFlowName;
}
public String getWorkFlowPublishStatus() {
return workFlowPublishStatus;
}
public void setWorkFlowPublishStatus(String workFlowPublishStatus) {
this.workFlowPublishStatus = workFlowPublishStatus;
}
public Date getScheduleStartTime() {
return scheduleStartTime;
}
public void setScheduleStartTime(Date scheduleStartTime) {
this.scheduleStartTime = scheduleStartTime;
}
public Date getScheduleEndTime() {
return scheduleEndTime;
}
public void setScheduleEndTime(Date scheduleEndTime) {
this.scheduleEndTime = scheduleEndTime;
}
public String getCrontab() {
return crontab;
}
public void setCrontab(String crontab) {
this.crontab = crontab;
}
public int getSchedulePublishStatus() {
return schedulePublishStatus;
}
public void setSchedulePublishStatus(int schedulePublishStatus) {
this.schedulePublishStatus = schedulePublishStatus;
}
}

View File

@ -0,0 +1,38 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.dao.entity;
public class WorkFlowRelation {
private int sourceWorkFlowId;
private int targetWorkFlowId;
public int getSourceWorkFlowId() {
return sourceWorkFlowId;
}
public void setSourceWorkFlowId(int sourceWorkFlowId) {
this.sourceWorkFlowId = sourceWorkFlowId;
}
public int getTargetWorkFlowId() {
return targetWorkFlowId;
}
public void setTargetWorkFlowId(int targetWorkFlowId) {
this.targetWorkFlowId = targetWorkFlowId;
}
}

View File

@ -0,0 +1,32 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.dao.mapper;
import org.apache.dolphinscheduler.dao.entity.WorkFlowLineage;
import org.apache.dolphinscheduler.dao.entity.WorkFlowRelation;
import org.apache.ibatis.annotations.Param;
import java.util.List;
import java.util.Set;
public interface WorkFlowLineageMapper {
public List<WorkFlowLineage> queryByName(@Param("searchVal") String searchVal, @Param("projectId") int projectId);
public List<WorkFlowLineage> queryByIds(@Param("ids") Set<Integer> ids, @Param("projectId") int projectId);
public List<WorkFlowRelation> querySourceTarget(@Param("id") int id);
}

View File

@ -0,0 +1,103 @@
<?xml version="1.0" encoding="UTF-8" ?>
<!--
~ Licensed to the Apache Software Foundation (ASF) under one or more
~ contributor license agreements. See the NOTICE file distributed with
~ this work for additional information regarding copyright ownership.
~ The ASF licenses this file to You under the Apache License, Version 2.0
~ (the "License"); you may not use this file except in compliance with
~ the License. You may obtain a copy of the License at
~
~ http://www.apache.org/licenses/LICENSE-2.0
~
~ Unless required by applicable law or agreed to in writing, software
~ distributed under the License is distributed on an "AS IS" BASIS,
~ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
~ See the License for the specific language governing permissions and
~ limitations under the License.
-->
<!DOCTYPE mapper PUBLIC "-//mybatis.org//DTD Mapper 3.0//EN" "http://mybatis.org/dtd/mybatis-3-mapper.dtd" >
<mapper namespace="org.apache.dolphinscheduler.dao.mapper.WorkFlowLineageMapper">
<select id="queryByName" resultType="org.apache.dolphinscheduler.dao.entity.WorkFlowLineage">
select tepd.id as work_flow_id,tepd.name as work_flow_name
from t_ds_process_definition tepd
left join t_ds_schedules tes on tepd.id = tes.process_definition_id
where tepd.project_id = #{projectId}
<if test="searchVal != null and searchVal != ''">
and tepd.name like concat('%', #{searchVal}, '%')
</if>
</select>
<select id="queryByIds" resultType="org.apache.dolphinscheduler.dao.entity.WorkFlowLineage" databaseId="mysql">
select tepd.id as work_flow_id,tepd.name as work_flow_name,
(case when json_extract(tepd.process_definition_json, '$**.dependItemList') is not null then 1 else 0 end) as is_depend_work_flow,
json_extract(tepd.process_definition_json, '$**.definitionId') as source_work_flow_id,
tepd.release_state as work_flow_publish_status,
tes.start_time as schedule_start_time,
tes.end_time as schedule_end_time,
tes.crontab as crontab,
tes.release_state as schedule_publish_status
from t_ds_process_definition tepd
left join t_ds_schedules tes on tepd.id = tes.process_definition_id
where tepd.project_id = #{projectId}
<if test="ids != null and ids.size()>0">
and tepd.id in
<foreach collection="ids" index="index" item="i" open="(" separator="," close=")">
#{i}
</foreach>
</if>
</select>
<select id="queryByIds" resultType="org.apache.dolphinscheduler.dao.entity.WorkFlowLineage" databaseId="pg">
select a.work_flow_id,
a.work_flow_name,
a.is_depend_work_flow,
array_agg(a.source_id) as source_id,
a.work_flow_publish_status,
a.schedule_start_time,
a.schedule_end_time,
a.crontab,
a.schedule_publish_status
from (
select tepd.id as work_flow_id,tepd.name as work_flow_name,
case when tepd.process_definition_json::json#>'{tasks,1,dependence}' is not null then 1 else 0 end as is_depend_work_flow,
(json_array_elements(tepd.process_definition_json::json#>'{tasks}')#>>'{dependence,dependTaskList,0,dependItemList,0,definitionId}') as source_id,
tepd.release_state as work_flow_publish_status,
tes.start_time as schedule_start_time,
tes.end_time as schedule_end_time,
tes.crontab as crontab,
tes.release_state as schedule_publish_status
from t_ds_process_definition tepd
left join t_ds_schedules tes on tepd.id = tes.process_definition_id
where tepd.project_id = #{projectId}
<if test="ids != null and ids.size()>0">
and tepd.id in
<foreach collection="ids" index="index" item="i" open="(" separator="," close=")">
#{i}
</foreach>
</if>
) a
where (a.is_depend_work_flow = 1 and source_id is not null) or (a.is_depend_work_flow = 0)
group by a.work_flow_id,a.work_flow_name,a.is_depend_work_flow,a.work_flow_publish_status,a.schedule_start_time,
a.schedule_end_time,a.crontab,a.schedule_publish_status
</select>
<select id="querySourceTarget" resultType="org.apache.dolphinscheduler.dao.entity.WorkFlowRelation" databaseId="mysql">
select id as target_work_flow_id,#{id} as source_work_flow_id
from t_ds_process_definition t
where json_extract(t.process_definition_json, '$**.dependItemList') is not null
and find_in_set(#{id}, replace(replace(replace(json_extract(t.process_definition_json, '$**.definitionId'), '[', ''),']', ''), ' ', '')) > 0
</select>
<select id="querySourceTarget" resultType="org.apache.dolphinscheduler.dao.entity.WorkFlowRelation" databaseId="pg">
select a.work_flow_id as target_work_flow_id,
a.source_id as source_work_flow_id
from (
select tepd.id as work_flow_id,
(json_array_elements(tepd.process_definition_json::json#>'{tasks}')#>>'{dependence,dependTaskList,0,dependItemList,0,definitionId}') as source_id
from t_ds_process_definition tepd
left join t_ds_schedules tes on tepd.id = tes.process_definition_id
where tepd.project_id = 1) a
where source_id = #{id}::text;
</select>
</mapper>

View File

@ -0,0 +1,62 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.dao.mapper;
import org.apache.dolphinscheduler.dao.entity.WorkFlowLineage;
import org.apache.dolphinscheduler.dao.entity.WorkFlowRelation;
import org.junit.Assert;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.test.context.SpringBootTest;
import org.springframework.test.annotation.Rollback;
import org.springframework.test.context.junit4.SpringRunner;
import org.springframework.transaction.annotation.Transactional;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
@RunWith(SpringRunner.class)
@SpringBootTest
@Transactional
@Rollback(true)
public class WorkFlowLineageMapperTest {
@Autowired
private WorkFlowLineageMapper workFlowLineageMapper;
@Test
public void testQueryByName() {
List<WorkFlowLineage> workFlowLineages = workFlowLineageMapper.queryByName("test",1);
Assert.assertNotEquals(workFlowLineages.size(), 0);
}
@Test
public void testQueryByIds() {
Set<Integer> ids = new HashSet<>();
ids.add(1);
List<WorkFlowLineage> workFlowLineages = workFlowLineageMapper.queryByIds(ids,1);
Assert.assertNotEquals(workFlowLineages.size(), 0);
}
@Test
public void testQuerySourceTarget() {
List<WorkFlowRelation> workFlowRelations = workFlowLineageMapper.querySourceTarget(1);
Assert.assertNotEquals(workFlowRelations.size(), 0);
}
}

View File

@ -116,7 +116,6 @@ const pages = glob.sync(['*/!(_*).html'], { cwd: viewDir }).map(p => {
minify: minifierConfig
})
})
const baseConfig = {
entry: jsEntry,
output: {

View File

@ -17,7 +17,9 @@
<template>
<div class="main-layout-box" :class="!isProjectsList ? '' : 'no'">
<m-secondary-menu :type="'projects'" v-if="!isProjectsList"></m-secondary-menu>
<router-view></router-view>
<transition name="fade">
<router-view></router-view>
</transition>
</div>
</template>
<script>

View File

@ -0,0 +1,63 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
<template>
<div ref="graph-grid" class="graph-grid"></div>
</template>
<script>
import echarts from 'echarts'
import { mapActions, mapState, mapMutations } from 'vuex'
import graphGridOption from './graphGridOption'
export default {
name: 'graphGrid',
data () {
return {}
},
props: {
id: String,
locations: Array,
connects: Array,
isShowLabel: Boolean
},
methods: {
init () {
},
},
created () {
},
mounted () {
const graphGrid = echarts.init(this.$refs['graph-grid'])
graphGrid.setOption(graphGridOption(this.locations, this.connects, this.sourceWorkFlowId, this.isShowLabel), true)
graphGrid.on('click', (params) => {
// Jump to the definition page
this.$router.push({ path: `/projects/definition/list/${params.data.id}`})
});
},
components: {},
computed: {
...mapState('kinship', ['locations', 'connects', 'sourceWorkFlowId'])
},
}
</script>
<style lang="scss" rel="stylesheet/scss">
.graph-grid {
width: 100%;
height: calc(100vh - 100px);
background: url("./img/dag_bg.png");
}
</style>

View File

@ -0,0 +1,145 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import _ from 'lodash';
import i18n from '@/module/i18n/index.js'
const getCategory = (categoryDic, { workFlowPublishStatus, schedulePublishStatus, id }, sourceWorkFlowId) => {
if (id === sourceWorkFlowId) return categoryDic['active']
switch (true) {
case workFlowPublishStatus === '0':
return categoryDic['0'];
case workFlowPublishStatus === '1' && schedulePublishStatus === '0':
return categoryDic['10'];
case workFlowPublishStatus === '1' && schedulePublishStatus === '1':
default:
return categoryDic['1'];
}
}
export default function (locations, links, sourceWorkFlowId, isShowLabel) {
const categoryDic = {
'active': { color: '#2D8DF0', category: i18n.$t('KinshipStateActive')},
'1': { color: '#00C800', category: i18n.$t('KinshipState1')},
'0': { color: '#999999', category: i18n.$t('KinshipState0')},
'10': { color: '#FF8F05', category: i18n.$t('KinshipState10')},
}
const newData = _.map(locations, (item) => {
const { color, category } = getCategory(categoryDic, item, sourceWorkFlowId)
return {
...item,
emphasis: {
itemStyle: {
color
},
},
category
}
});
const categories = [
{ name: categoryDic.active.category},
{ name: categoryDic['1'].category},
{ name: categoryDic['0'].category},
{ name: categoryDic['10'].category},
]
let option = {
tooltip: {
trigger: 'item',
triggerOn: 'mousemove',
backgroundColor: '#2D303A',
padding: [8, 12],
formatter: (params) => {
if (!params.data.name) return '';
const { name, scheduleStartTime, scheduleEndTime, crontab, workFlowPublishStatus, schedulePublishStatus } = params.data;
const str = `
工作流名字${name}<br/>
调度开始时间${scheduleStartTime}<br/>
调度结束时间${scheduleEndTime}<br/>
crontab表达式${crontab}<br/>
工作流发布状态${workFlowPublishStatus}<br/>
调度发布状态${schedulePublishStatus}<br/>
`
return str;
},
color: '#2D303A',
textStyle: {
rich: {
a: {
fontSize: 12,
color: '#2D303A',
lineHeight: 12,
align: 'left',
padding: [4, 4, 4, 4]
},
}
},
},
color: [categoryDic.active.color, categoryDic['1'].color, categoryDic['0'].color, categoryDic['10'].color],
legend: [{
orient: 'horizontal',
top: 6,
left: 6,
data: categories,
}],
series: [{
type: 'graph',
layout: 'force',
nodeScaleRatio: 1.2,
draggable: true,
animation: false,
data: newData,
roam: true,
symbol: 'roundRect',
symbolSize: 70,
categories,
label: {
show: isShowLabel,
position: 'inside',
formatter: (params) => {
if (!params.data.name) return '';
const str = params.data.name.split('_').map(item => `{a|${item}\n}`).join('')
return str;
},
color: '#222222',
textStyle: {
rich: {
a: {
fontSize: 12,
color: '#222222',
lineHeight: 12,
align: 'left',
padding: [4, 4, 4, 4]
},
}
}
},
edgeSymbol: ['circle', 'arrow'],
edgeSymbolSize: [4, 12],
force: {
repulsion: 1000,
edgeLength: 300
},
links: links,
lineStyle: {
color: '#999999'
}
}]
};
return option
}

Binary file not shown.

After

Width:  |  Height:  |  Size: 5.0 KiB

View File

@ -0,0 +1,169 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
<template>
<div class="home-main index-model">
<div class="project-kinship-content">
<div class="search-bar">
<x-select filterable clearable
:placeholder="$t('Process Name')"
@on-change="onChange"
:style="inputFocusStyle"
v-tooltip="tooltipOption(currentItemName)"
size="small">
<x-option
v-for="work in workList"
:key="work.id"
:value="work.id"
:label="work.name"
v-tooltip="tooltipOption(work.name)"
>
</x-option>
</x-select>
<x-button type="primary"
icon="ans-icon-dot-circle"
size="small"
v-tooltip.small.top.start="$t('Reset')"
@click="reset"
></x-button>
<x-button type="ghost"
icon="ans-icon-eye"
size="small"
v-tooltip.small.top="$t('Dag label display control')"
@click="changeLabel"
></x-button>
</div>
<graph-grid v-if="!isLoading && !!locations.length" :isShowLabel="isShowLabel"></graph-grid>
<template v-if="!isLoading && !locations.length">
<m-no-data style="height: 100%;"></m-no-data>
</template>
</div>
<m-spin :is-spin="isLoading" :fullscreen="false"></m-spin>
</div>
</template>
<script>
import _ from 'lodash'
import { mapActions, mapState } from 'vuex'
import mSpin from '@/module/components/spin/spin'
import mNoData from '@/module/components/noData/noData'
import listUrlParamHandle from '@/module/mixin/listUrlParamHandle'
import graphGrid from './_source/graphGrid.vue'
export default {
name: 'projects-kinship-index',
components: { graphGrid, mSpin, mNoData },
data () {
return {
isLoading: true,
isShowLabel: true,
currentItemName: '',
}
},
props: {},
methods: {
...mapActions('kinship', ['getWorkFlowList','getWorkFlowDAG']),
/**
* init
*/
init () {
this.isLoading = true
// Promise Get node needs data
Promise.all([
// get process definition
this.getWorkFlowList(),
this.getWorkFlowDAG(),
]).then((data) => {
this.isLoading = false
}).catch(() => {
this.isLoading = false
})
},
/**
* reset
*/
reset() {
this.isLoading = true;
this.$nextTick(() => {
this.isLoading = false;
})
},
async onChange(item) {
const { value, label } = item || {};
this.isLoading = true;
this.currentItemName = label;
try {
await this.getWorkFlowDAG(value);
} catch (error) {
this.$message.error(error.msg || '')
}
this.isLoading = false;
},
tooltipOption(text) {
return {
text,
maxWidth: '500px',
placement: 'top',
theme: 'dark',
triggerEvent: 'mouseenter',
large: false,
}
},
changeLabel() {
this.isLoading = true;
this.isShowLabel = !this.isShowLabel;
this.$nextTick(() => {
this.isLoading = false;
})
}
},
watch: {
// router
'$route' (a) {
// url no params get instance list
}
},
created () {
this.init()
},
computed: {
...mapState('kinship', ['locations', 'workList']),
inputFocusStyle() {
return `width:280px`
},
},
mounted () {
}
}
</script>
<style lang="scss" rel="stylesheet/scss">
.project-kinship-content {
position: relative;
width: 100%;
height: calc(100vh - 100px);
background: url("./_source/img/dag_bg.png");
.search-bar {
position: absolute;
right: 8px;
top: 10px;
z-index: 2;
.ans-input {
transition: width 300ms ease-in-out;
}
}
}
</style>

View File

@ -57,6 +57,14 @@ const router = new Router({
title: `${i18n.$t('Project Home')}`
}
},
{
path: '/projects/kinship',
name: 'projects-kinship',
component: resolve => require(['../pages/projects/pages/kinship/index'], resolve),
meta: {
title: `${i18n.$t('Kinship')}`
}
},
{
path: '/projects/list',
name: 'projects-list',

View File

@ -17,6 +17,7 @@
import Vue from 'vue'
import Vuex from 'vuex'
import dag from './dag'
import kinship from './kinship'
import projects from './projects'
import resource from './resource'
import security from './security'
@ -28,6 +29,7 @@ export default new Vuex.Store({
modules: {
dag,
projects,
kinship,
resource,
security,
datasource,

View File

@ -0,0 +1,89 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import _ from 'lodash'
import io from '@/module/io'
import localStore from '@/module/util/localStorage'
export default {
/**
* Get workFlow DAG
*/
getWorkFlowList ({ state }, payload) {
const projectId = localStore.getItem('projectId');
return new Promise((resolve, reject) => {
const url = `lineages/${projectId}/list-name`;
io.get(url, {
searchVal: payload,
}, res => {
const workList = [];
if (res.data) {
_.map(res.data, (item) => {
workList.push({
id: `${item.workFlowId}`,
name: item.workFlowName,
})
})
}
state.workList = workList /* JSON.parse(connects) */
resolve(res.data)
}).catch(res => {
reject(res)
})
})
},
/**
* Get workFlow DAG
*/
getWorkFlowDAG ({ state }, payload) {
const projectId = localStore.getItem('projectId');
return new Promise((resolve, reject) => {
const url = `lineages/${projectId}/list-ids`;
io.get(url, {
ids: payload,
}, res => {
let locations = [];
let connects = [];
if (res.data.workFlowList) {
locations = _.uniqBy(res.data.workFlowList, 'workFlowId').map((item) => ({
id: `${item.workFlowId}`,
name: item.workFlowName,
workFlowPublishStatus: item.workFlowPublishStatus,
scheduleStartTime: item.scheduleStartTime,
scheduleEndTime: item.scheduleEndTime,
crontab: item.crontab,
schedulePublishStatus: item.schedulePublishStatus
}))
}
if (res.data.workFlowRelationList) {
connects = _.map(res.data.workFlowRelationList, (item) => ({
source: `${item.sourceWorkFlowId}`, // should be string, or connects will not show by echarts
target: `${item.targetWorkFlowId}`, // should be string, or connects will not show by echarts
}))
}
state.sourceWorkFlowId = payload || '';
// locations
state.locations = locations /* JSON.parse(locations) */
// connects
state.connects = connects /* JSON.parse(connects) */
resolve(res.data)
}).catch(res => {
reject(res)
})
})
},
}

View File

@ -0,0 +1,19 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
export default {
}

View File

@ -0,0 +1,30 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import actions from './actions'
import getters from './getters'
import mutations from './mutations'
import state from './state'
export default {
strict: true,
namespaced: true,
state,
getters,
mutations,
actions
}

View File

@ -0,0 +1,17 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
export default {}

View File

@ -0,0 +1,23 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
export default {
sourceWorkFlowId: '',
workList: [],
locations: [],
connects: []
}

View File

@ -31,8 +31,17 @@ const menu = {
children: []
},
{
name: `${i18n.$t('Process')}`,
name: `${i18n.$t('Kinship')}`,
id: 1,
path: 'projects-kinship',
isOpen: true,
disabled: true,
icon: 'ans-icon-node',
children: []
},
{
name: `${i18n.$t('Process')}`,
id: 2,
path: '',
isOpen: true,
disabled: true,

View File

@ -545,8 +545,8 @@ export default {
'0 means unlimited by count': '0 means unlimited',
'Modify User': 'Modify User',
'Whether directory': 'Whether directory',
Yes: 'Yes',
No: 'No',
'Yes': 'Yes',
'No': 'No',
'Hadoop Custom Params': 'Hadoop Params',
'Sqoop Advanced Parameters': 'Sqoop Params',
'Sqoop Job Name': 'Job Name',
@ -605,6 +605,13 @@ export default {
'Successful branch flow and failed branch flow are required': 'conditions node Successful and failed branch flow are required',
'Unauthorized or deleted resources': 'Unauthorized or deleted resources',
'Please delete all non-existent resources': 'Please delete all non-existent resources',
'Kinship': 'Workflow relationship',
'Reset': 'Reset',
'KinshipStateActive': 'Active',
'KinshipState1': 'Online',
'KinshipState0': 'Workflow is not online',
'KinshipState10': 'Scheduling is not online',
'Dag label display control': 'Dag label display control',
'Enable': 'Enable',
'Timeout Settings': 'Timeout Settings',
'Connect Timeout':'Connect Timeout',

View File

@ -605,6 +605,13 @@ export default {
'Successful branch flow and failed branch flow are required': 'conditions节点成功和失败分支流转必填',
'Unauthorized or deleted resources': '未授权或已删除资源',
'Please delete all non-existent resources': '请删除所有未授权或已删除资源',
'Kinship': '工作流关系',
'Reset': '重置',
'KinshipStateActive': '当前选择',
'KinshipState1': '已上线',
'KinshipState0': '工作流未上线',
'KinshipState10': '调度未上线',
'Dag label display control': 'Dag节点名称显隐',
'Enable': '启用',
'Disable': '停用',
'The Worker group no longer exists, please select the correct Worker group!': '该Worker分组已经不存在请选择正确的Worker分组',

View File

@ -552,6 +552,7 @@
<finalName>apache-dolphinscheduler-incubating-${project.version}</finalName>
<pluginManagement>
<plugins>
@ -733,7 +734,9 @@
<include>**/api/service/UserAlertGroupServiceTest.java</include>
<include>**/api/service/UsersServiceTest.java</include>
<include>**/api/service/WorkerGroupServiceTest.java</include>
<include>**/api/service/WorkFlowLineageServiceTest.java</include>
<include>**/api/controller/ProcessDefinitionControllerTest.java</include>
<include>**/api/controller/WorkFlowLineageControllerTest.java</include>
<include>**/api/utils/exportprocess/DataSourceParamTest.java</include>
<include>**/api/utils/exportprocess/DependentParamTest.java</include>
<include>**/api/utils/CheckUtilsTest.java</include>

View File

@ -0,0 +1,6 @@
.idea
.settings
package-lock.json
.classpath
.project
node_modules