mirror of
https://gitee.com/dolphinscheduler/DolphinScheduler.git
synced 2024-12-01 11:47:51 +08:00
startup conflict reslove
This commit is contained in:
parent
7cd1f91587
commit
70d6360149
@ -163,7 +163,6 @@ public class ProcessInstanceServiceTest {
|
|||||||
|
|
||||||
//project auth success
|
//project auth success
|
||||||
ProcessInstance processInstance = getProcessInstance();
|
ProcessInstance processInstance = getProcessInstance();
|
||||||
processInstance.setWorkerGroupId(-1);
|
|
||||||
processInstance.setReceivers("xxx@qq.com");
|
processInstance.setReceivers("xxx@qq.com");
|
||||||
processInstance.setReceiversCc("xxx@qq.com");
|
processInstance.setReceiversCc("xxx@qq.com");
|
||||||
processInstance.setProcessDefinitionId(46);
|
processInstance.setProcessDefinitionId(46);
|
||||||
@ -178,16 +177,11 @@ public class ProcessInstanceServiceTest {
|
|||||||
Assert.assertEquals(Status.SUCCESS, successRes.get(Constants.STATUS));
|
Assert.assertEquals(Status.SUCCESS, successRes.get(Constants.STATUS));
|
||||||
|
|
||||||
//worker group null
|
//worker group null
|
||||||
processInstance.setWorkerGroupId(1);
|
|
||||||
when(workerGroupMapper.selectById(processInstance.getWorkerGroupId())).thenReturn(null);
|
|
||||||
Map<String, Object> workerNullRes = processInstanceService.queryProcessInstanceById(loginUser, projectName, 1);
|
Map<String, Object> workerNullRes = processInstanceService.queryProcessInstanceById(loginUser, projectName, 1);
|
||||||
Assert.assertEquals(Status.SUCCESS, workerNullRes.get(Constants.STATUS));
|
Assert.assertEquals(Status.SUCCESS, workerNullRes.get(Constants.STATUS));
|
||||||
|
|
||||||
//worker group exist
|
//worker group exist
|
||||||
WorkerGroup workerGroup = getWorkGroup();
|
WorkerGroup workerGroup = getWorkGroup();
|
||||||
when(workerGroupMapper.selectById(processInstance.getWorkerGroupId())).thenReturn(workerGroup);
|
|
||||||
processInstance.setWorkerGroupId(1);
|
|
||||||
when(workerGroupMapper.selectById(processInstance.getWorkerGroupId())).thenReturn(null);
|
|
||||||
Map<String, Object> workerExistRes = processInstanceService.queryProcessInstanceById(loginUser, projectName, 1);
|
Map<String, Object> workerExistRes = processInstanceService.queryProcessInstanceById(loginUser, projectName, 1);
|
||||||
Assert.assertEquals(Status.SUCCESS, workerExistRes.get(Constants.STATUS));
|
Assert.assertEquals(Status.SUCCESS, workerExistRes.get(Constants.STATUS));
|
||||||
}
|
}
|
||||||
@ -394,8 +388,6 @@ public class ProcessInstanceServiceTest {
|
|||||||
//project auth fail
|
//project auth fail
|
||||||
when(projectMapper.queryByName(projectName)).thenReturn(null);
|
when(projectMapper.queryByName(projectName)).thenReturn(null);
|
||||||
when(projectService.checkProjectAndAuth(loginUser, null, projectName)).thenReturn(result);
|
when(projectService.checkProjectAndAuth(loginUser, null, projectName)).thenReturn(result);
|
||||||
Map<String, Object> proejctAuthFailRes = processInstanceService.deleteProcessInstanceById(loginUser, projectName, 1, Mockito.any());
|
|
||||||
Assert.assertEquals(Status.PROJECT_NOT_FOUNT, proejctAuthFailRes.get(Constants.STATUS));
|
|
||||||
|
|
||||||
//process instance null
|
//process instance null
|
||||||
Project project = getProject(projectName);
|
Project project = getProject(projectName);
|
||||||
@ -403,8 +395,6 @@ public class ProcessInstanceServiceTest {
|
|||||||
when(projectMapper.queryByName(projectName)).thenReturn(project);
|
when(projectMapper.queryByName(projectName)).thenReturn(project);
|
||||||
when(projectService.checkProjectAndAuth(loginUser, project, projectName)).thenReturn(result);
|
when(projectService.checkProjectAndAuth(loginUser, project, projectName)).thenReturn(result);
|
||||||
when(processService.findProcessInstanceDetailById(1)).thenReturn(null);
|
when(processService.findProcessInstanceDetailById(1)).thenReturn(null);
|
||||||
Map<String, Object> processInstanceNullRes = processInstanceService.deleteProcessInstanceById(loginUser, projectName, 1, Mockito.any());
|
|
||||||
Assert.assertEquals(Status.PROCESS_INSTANCE_NOT_EXIST, processInstanceNullRes.get(Constants.STATUS));
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
|
@ -63,9 +63,7 @@ yarn.resourcemanager.ha.rm.ids=192.168.xx.xx,192.168.xx.xx
|
|||||||
# If resourcemanager HA enable or not use resourcemanager, please keep the default value; If resourcemanager is single, you only need to replace ark1 to actual resourcemanager hostname.
|
# If resourcemanager HA enable or not use resourcemanager, please keep the default value; If resourcemanager is single, you only need to replace ark1 to actual resourcemanager hostname.
|
||||||
yarn.application.status.address=http://ark1:8088/ws/v1/cluster/apps/%s
|
yarn.application.status.address=http://ark1:8088/ws/v1/cluster/apps/%s
|
||||||
|
|
||||||
<<<<<<< HEAD
|
|
||||||
# system env path. self configuration, please make sure the directory and file exists and have read write execute permissions, TODO
|
# system env path. self configuration, please make sure the directory and file exists and have read write execute permissions, TODO
|
||||||
#dolphinscheduler.env.path=env/dolphinscheduler_env.sh
|
#dolphinscheduler.env.path=env/dolphinscheduler_env.sh
|
||||||
=======
|
|
||||||
kerberos.expire.time=7
|
kerberos.expire.time=7
|
||||||
>>>>>>> remotes/upstream/dev
|
|
@ -349,11 +349,7 @@ public class NettyRemotingClient {
|
|||||||
return channel;
|
return channel;
|
||||||
}
|
}
|
||||||
} catch (Exception ex) {
|
} catch (Exception ex) {
|
||||||
<<<<<<< HEAD
|
|
||||||
logger.info("connect to {} error {}", host, ex);
|
logger.info("connect to {} error {}", host, ex);
|
||||||
=======
|
|
||||||
logger.error("connect to {} error", address, ex);
|
|
||||||
>>>>>>> remotes/upstream/dev
|
|
||||||
}
|
}
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
|
@ -170,38 +170,9 @@ public class NettyClientHandler extends ChannelInboundHandlerAdapter {
|
|||||||
*/
|
*/
|
||||||
@Override
|
@Override
|
||||||
public void exceptionCaught(ChannelHandlerContext ctx, Throwable cause) throws Exception {
|
public void exceptionCaught(ChannelHandlerContext ctx, Throwable cause) throws Exception {
|
||||||
logger.error("exceptionCaught : {}",cause.getMessage(), cause);
|
logger.error("exceptionCaught : {}", cause);
|
||||||
nettyRemotingClient.closeChannel(ChannelUtils.toAddress(ctx.channel()));
|
nettyRemotingClient.closeChannel(ChannelUtils.toAddress(ctx.channel()));
|
||||||
ctx.channel().close();
|
ctx.channel().close();
|
||||||
}
|
}
|
||||||
|
|
||||||
<<<<<<< HEAD
|
|
||||||
=======
|
|
||||||
/**
|
|
||||||
* channel write changed
|
|
||||||
*
|
|
||||||
* @param ctx channel handler context
|
|
||||||
* @throws Exception
|
|
||||||
*/
|
|
||||||
@Override
|
|
||||||
public void channelWritabilityChanged(ChannelHandlerContext ctx) throws Exception {
|
|
||||||
Channel ch = ctx.channel();
|
|
||||||
ChannelConfig config = ch.config();
|
|
||||||
|
|
||||||
if (!ch.isWritable()) {
|
|
||||||
if (logger.isWarnEnabled()) {
|
|
||||||
logger.warn("{} is not writable, over high water level : {}",
|
|
||||||
ch, config.getWriteBufferHighWaterMark());
|
|
||||||
}
|
|
||||||
|
|
||||||
config.setAutoRead(false);
|
|
||||||
} else {
|
|
||||||
if (logger.isWarnEnabled()) {
|
|
||||||
logger.warn("{} is writable, to low water : {}",
|
|
||||||
ch, config.getWriteBufferLowWaterMark());
|
|
||||||
}
|
|
||||||
config.setAutoRead(true);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
>>>>>>> remotes/upstream/dev
|
|
||||||
}
|
}
|
Loading…
Reference in New Issue
Block a user