From ce0e3d98ddd23d031652a7a181d1e753422f7d44 Mon Sep 17 00:00:00 2001 From: lanzhengjian Date: Thu, 21 Nov 2024 14:11:39 +0800 Subject: [PATCH 1/4] support turbo plugin --- CHANGELOG.md | 9 + demo/pom.xml | 6 +- engine/pom.xml | 9 +- .../turbo/engine/bo/FlowInstanceBO.java | 20 + .../turbo/engine/bo/NodeInstance.java | 21 + .../turbo/engine/bo/NodeInstanceBO.java | 33 +- .../turbo/engine/common/EntityPOEnum.java | 52 + .../turbo/engine/common/ErrorEnum.java | 1 + .../engine/common/ExtendRuntimeContext.java | 61 + .../turbo/engine/common/PluginTypeEnum.java | 38 + .../turbo/engine/common/RuntimeContext.java | 27 +- .../turbo/engine/config/PluginConfig.java | 80 + .../engine/config/TurboMybatisConfig.java | 20 + .../didiglobal/turbo/engine/dao/BaseDAO.java | 5 + .../turbo/engine/dao/InstanceDataDAO.java | 28 + .../turbo/engine/dao/NodeInstanceDAO.java | 10 + .../engine/dao/mapper/InstanceDataMapper.java | 4 + .../dao/mapper/NodeInstanceLogMapper.java | 4 +- .../engine/dao/mapper/NodeInstanceMapper.java | 6 +- .../dao/provider/NodeInstanceLogProvider.java | 23 +- .../dao/provider/NodeInstanceProvider.java | 32 +- .../turbo/engine/entity/CommonPO.java | 21 + .../turbo/engine/entity/NodeInstancePO.java | 9 + .../engine/exception/TurboException.java | 2 +- .../engine/executor/ElementExecutor.java | 2 + .../engine/executor/ExecutorFactory.java | 43 +- .../turbo/engine/executor/FlowExecutor.java | 7 +- .../engine/executor/RuntimeExecutor.java | 18 +- .../interceptor/MyBatisInterceptor.java | 144 ++ .../turbo/engine/model/FlowElement.java | 3 +- .../turbo/engine/param/RuntimeTaskParam.java | 12 + .../engine/plugin/CustomOperationHandler.java | 16 + .../CustomOperationHandlerRegistry.java | 32 + .../turbo/engine/plugin/ElementPlugin.java | 13 + .../plugin/ExpressionCalculatorPlugin.java | 7 + .../engine/plugin/IdGeneratorPlugin.java | 7 + .../turbo/engine/plugin/ListenerPlugin.java | 4 + .../turbo/engine/plugin/Plugin.java | 24 + .../plugin/manager/AbstractPluginManager.java | 74 + .../engine/plugin/manager/BasePlugin.java | 19 + .../plugin/manager/DefaultPluginManager.java | 104 ++ .../engine/plugin/manager/PluginManager.java | 19 + .../engine/processor/DefinitionProcessor.java | 21 +- .../engine/processor/RuntimeProcessor.java | 40 +- .../turbo/engine/result/CommonResult.java | 4 +- .../turbo/engine/result/RuntimeResult.java | 86 +- .../engine/util/MapToObjectConverter.java | 33 + .../engine/util/PluginPropertiesUtil.java | 59 + .../engine/util/PluginSqlExecutorUtil.java | 139 ++ .../didiglobal/turbo/engine/util/SPIUtil.java | 34 + .../util/impl/GroovyExpressionCalculator.java | 2 - .../validator/ElementValidatorFactory.java | 43 +- .../resources/turbo.db.create/turbo.mysql.sql | 1 + parallel-plugin/pom.xml | 58 + .../plugin/InclusiveGatewayElementPlugin.java | 63 + .../plugin/ParallelGatewayElementPlugin.java | 64 + .../turbo/plugin/common/Constants.java | 12 + .../plugin/common/ExtendFlowElementType.java | 8 + .../turbo/plugin/common/MergeStrategy.java | 34 + .../plugin/common/ParallelErrorEnum.java | 49 + .../common/ParallelNodeInstanceStatus.java | 12 + .../plugin/common/ParallelRuntimeContext.java | 30 + .../plugin/config/ParallelPluginConfig.java | 26 + .../turbo/plugin/config/ThreadPoolConfig.java | 23 + .../dao/ParallelNodeInstanceHandler.java | 125 ++ .../dao/ParallelNodeInstanceLogHandler.java | 122 ++ .../mapper/ParallelNodeInstanceLogMapper.java | 20 + .../mapper/ParallelNodeInstanceMapper.java | 20 + .../entity/ParallelNodeInstanceLogPO.java | 33 + .../plugin/entity/ParallelNodeInstancePO.java | 32 + .../executor/AbstractGatewayExecutor.java | 532 ++++++ .../plugin/executor/AsynTaskExecutor.java | 15 + .../plugin/executor/BranchMergeAnyOne.java | 53 + .../plugin/executor/BranchMergeCustom.java | 25 + .../plugin/executor/BranchMergeJoinAll.java | 112 ++ .../plugin/executor/BranchMergeStrategy.java | 130 ++ .../turbo/plugin/executor/DataMergeAll.java | 41 + .../plugin/executor/DataMergeCustom.java | 22 + .../turbo/plugin/executor/DataMergeNone.java | 30 + .../plugin/executor/DataMergeStrategy.java | 24 + .../executor/InclusiveGatewayExecutor.java | 25 + .../plugin/executor/MergeStrategyFactory.java | 61 + .../executor/ParallelGatewayExecutor.java | 16 + .../turbo/plugin/model/InclusiveGateway.java | 6 + .../turbo/plugin/model/ParallelGateway.java | 6 + .../service/ParallelNodeInstanceService.java | 61 + .../turbo/plugin/util/ExecutorUtil.java | 182 ++ .../validator/InclusiveGatewayValidator.java | 8 + .../validator/ParallelGatewayValidator.java | 8 + ...diglobal.turbo.engine.plugin.ElementPlugin | 2 + .../src/main/resources/plugin.properties | 3 + .../main/resources/sql/parallelGateway.sql | 11 + .../dao/ParallelNodeInstanceHandlerTest.java | 82 + .../ParallelNodeInstanceLogHandlerTest.java | 22 + .../processor/RuntimeProcessorTest.java | 618 +++++++ .../turbo/plugin/runner/BaseTest.java | 14 + .../plugin/runner/TestEngineApplication.java | 18 + .../turbo/plugin/util/EntityBuilder.java | 1483 +++++++++++++++++ .../src/test/resources/application.properties | 6 + .../src/test/resources/logback-spring.xml | 35 + .../src/test/resources/plugin.properties | 12 + pom.xml | 8 +- 102 files changed, 5842 insertions(+), 91 deletions(-) create mode 100644 engine/src/main/java/com/didiglobal/turbo/engine/common/EntityPOEnum.java create mode 100644 engine/src/main/java/com/didiglobal/turbo/engine/common/ExtendRuntimeContext.java create mode 100644 engine/src/main/java/com/didiglobal/turbo/engine/common/PluginTypeEnum.java create mode 100644 engine/src/main/java/com/didiglobal/turbo/engine/config/PluginConfig.java create mode 100644 engine/src/main/java/com/didiglobal/turbo/engine/config/TurboMybatisConfig.java create mode 100644 engine/src/main/java/com/didiglobal/turbo/engine/interceptor/MyBatisInterceptor.java create mode 100644 engine/src/main/java/com/didiglobal/turbo/engine/plugin/CustomOperationHandler.java create mode 100644 engine/src/main/java/com/didiglobal/turbo/engine/plugin/CustomOperationHandlerRegistry.java create mode 100644 engine/src/main/java/com/didiglobal/turbo/engine/plugin/ElementPlugin.java create mode 100644 engine/src/main/java/com/didiglobal/turbo/engine/plugin/ExpressionCalculatorPlugin.java create mode 100644 engine/src/main/java/com/didiglobal/turbo/engine/plugin/IdGeneratorPlugin.java create mode 100644 engine/src/main/java/com/didiglobal/turbo/engine/plugin/ListenerPlugin.java create mode 100644 engine/src/main/java/com/didiglobal/turbo/engine/plugin/Plugin.java create mode 100644 engine/src/main/java/com/didiglobal/turbo/engine/plugin/manager/AbstractPluginManager.java create mode 100644 engine/src/main/java/com/didiglobal/turbo/engine/plugin/manager/BasePlugin.java create mode 100644 engine/src/main/java/com/didiglobal/turbo/engine/plugin/manager/DefaultPluginManager.java create mode 100644 engine/src/main/java/com/didiglobal/turbo/engine/plugin/manager/PluginManager.java create mode 100644 engine/src/main/java/com/didiglobal/turbo/engine/util/MapToObjectConverter.java create mode 100644 engine/src/main/java/com/didiglobal/turbo/engine/util/PluginPropertiesUtil.java create mode 100644 engine/src/main/java/com/didiglobal/turbo/engine/util/PluginSqlExecutorUtil.java create mode 100644 engine/src/main/java/com/didiglobal/turbo/engine/util/SPIUtil.java create mode 100644 parallel-plugin/pom.xml create mode 100644 parallel-plugin/src/main/java/com/didiglobal/turbo/plugin/InclusiveGatewayElementPlugin.java create mode 100644 parallel-plugin/src/main/java/com/didiglobal/turbo/plugin/ParallelGatewayElementPlugin.java create mode 100644 parallel-plugin/src/main/java/com/didiglobal/turbo/plugin/common/Constants.java create mode 100644 parallel-plugin/src/main/java/com/didiglobal/turbo/plugin/common/ExtendFlowElementType.java create mode 100644 parallel-plugin/src/main/java/com/didiglobal/turbo/plugin/common/MergeStrategy.java create mode 100644 parallel-plugin/src/main/java/com/didiglobal/turbo/plugin/common/ParallelErrorEnum.java create mode 100644 parallel-plugin/src/main/java/com/didiglobal/turbo/plugin/common/ParallelNodeInstanceStatus.java create mode 100644 parallel-plugin/src/main/java/com/didiglobal/turbo/plugin/common/ParallelRuntimeContext.java create mode 100644 parallel-plugin/src/main/java/com/didiglobal/turbo/plugin/config/ParallelPluginConfig.java create mode 100644 parallel-plugin/src/main/java/com/didiglobal/turbo/plugin/config/ThreadPoolConfig.java create mode 100644 parallel-plugin/src/main/java/com/didiglobal/turbo/plugin/dao/ParallelNodeInstanceHandler.java create mode 100644 parallel-plugin/src/main/java/com/didiglobal/turbo/plugin/dao/ParallelNodeInstanceLogHandler.java create mode 100644 parallel-plugin/src/main/java/com/didiglobal/turbo/plugin/dao/mapper/ParallelNodeInstanceLogMapper.java create mode 100644 parallel-plugin/src/main/java/com/didiglobal/turbo/plugin/dao/mapper/ParallelNodeInstanceMapper.java create mode 100644 parallel-plugin/src/main/java/com/didiglobal/turbo/plugin/entity/ParallelNodeInstanceLogPO.java create mode 100644 parallel-plugin/src/main/java/com/didiglobal/turbo/plugin/entity/ParallelNodeInstancePO.java create mode 100644 parallel-plugin/src/main/java/com/didiglobal/turbo/plugin/executor/AbstractGatewayExecutor.java create mode 100644 parallel-plugin/src/main/java/com/didiglobal/turbo/plugin/executor/AsynTaskExecutor.java create mode 100644 parallel-plugin/src/main/java/com/didiglobal/turbo/plugin/executor/BranchMergeAnyOne.java create mode 100644 parallel-plugin/src/main/java/com/didiglobal/turbo/plugin/executor/BranchMergeCustom.java create mode 100644 parallel-plugin/src/main/java/com/didiglobal/turbo/plugin/executor/BranchMergeJoinAll.java create mode 100644 parallel-plugin/src/main/java/com/didiglobal/turbo/plugin/executor/BranchMergeStrategy.java create mode 100644 parallel-plugin/src/main/java/com/didiglobal/turbo/plugin/executor/DataMergeAll.java create mode 100644 parallel-plugin/src/main/java/com/didiglobal/turbo/plugin/executor/DataMergeCustom.java create mode 100644 parallel-plugin/src/main/java/com/didiglobal/turbo/plugin/executor/DataMergeNone.java create mode 100644 parallel-plugin/src/main/java/com/didiglobal/turbo/plugin/executor/DataMergeStrategy.java create mode 100644 parallel-plugin/src/main/java/com/didiglobal/turbo/plugin/executor/InclusiveGatewayExecutor.java create mode 100644 parallel-plugin/src/main/java/com/didiglobal/turbo/plugin/executor/MergeStrategyFactory.java create mode 100644 parallel-plugin/src/main/java/com/didiglobal/turbo/plugin/executor/ParallelGatewayExecutor.java create mode 100644 parallel-plugin/src/main/java/com/didiglobal/turbo/plugin/model/InclusiveGateway.java create mode 100644 parallel-plugin/src/main/java/com/didiglobal/turbo/plugin/model/ParallelGateway.java create mode 100644 parallel-plugin/src/main/java/com/didiglobal/turbo/plugin/service/ParallelNodeInstanceService.java create mode 100644 parallel-plugin/src/main/java/com/didiglobal/turbo/plugin/util/ExecutorUtil.java create mode 100644 parallel-plugin/src/main/java/com/didiglobal/turbo/plugin/validator/InclusiveGatewayValidator.java create mode 100644 parallel-plugin/src/main/java/com/didiglobal/turbo/plugin/validator/ParallelGatewayValidator.java create mode 100644 parallel-plugin/src/main/resources/META-INF/services/com.didiglobal.turbo.engine.plugin.ElementPlugin create mode 100644 parallel-plugin/src/main/resources/plugin.properties create mode 100644 parallel-plugin/src/main/resources/sql/parallelGateway.sql create mode 100644 parallel-plugin/src/test/java/com/didiglobal/turbo/plugin/dao/ParallelNodeInstanceHandlerTest.java create mode 100644 parallel-plugin/src/test/java/com/didiglobal/turbo/plugin/dao/ParallelNodeInstanceLogHandlerTest.java create mode 100644 parallel-plugin/src/test/java/com/didiglobal/turbo/plugin/processor/RuntimeProcessorTest.java create mode 100644 parallel-plugin/src/test/java/com/didiglobal/turbo/plugin/runner/BaseTest.java create mode 100644 parallel-plugin/src/test/java/com/didiglobal/turbo/plugin/runner/TestEngineApplication.java create mode 100644 parallel-plugin/src/test/java/com/didiglobal/turbo/plugin/util/EntityBuilder.java create mode 100644 parallel-plugin/src/test/resources/application.properties create mode 100644 parallel-plugin/src/test/resources/logback-spring.xml create mode 100644 parallel-plugin/src/test/resources/plugin.properties diff --git a/CHANGELOG.md b/CHANGELOG.md index f9e21c5d..19a4a59b 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -3,6 +3,15 @@ All notable changes to this project will be documented in this file. +## [1.2.0] - 2024-11-21 + +Support plugin extension function + +Support parallel gateway and inclusive gateway through plugins + +### Bugfix +- Fix v1.1.1 release issues + ## [1.1.1] - 2023-06-26 ### Bugfix - Fix v1.1.0 release issues diff --git a/demo/pom.xml b/demo/pom.xml index 9225c30e..6cff96cb 100644 --- a/demo/pom.xml +++ b/demo/pom.xml @@ -6,13 +6,13 @@ com.didiglobal.turbo turbo - 1.1.1 + 1.2.0 4.0.0 demo - 1.1.1 + 1.2.0 jar @@ -20,7 +20,7 @@ 1.8 1.5.10.RELEASE - 1.1.1 + 1.2.0 1.4.200 diff --git a/engine/pom.xml b/engine/pom.xml index cea2d87a..6d42d172 100644 --- a/engine/pom.xml +++ b/engine/pom.xml @@ -5,13 +5,13 @@ 4.0.0 engine - 1.1.1 + 1.2.0 jar turbo com.didiglobal.turbo - 1.1.1 + 1.2.0 2.3.7 @@ -83,6 +83,11 @@ com.google.guava guava + + com.zaxxer + HikariCP + 3.4.5 + \ No newline at end of file diff --git a/engine/src/main/java/com/didiglobal/turbo/engine/bo/FlowInstanceBO.java b/engine/src/main/java/com/didiglobal/turbo/engine/bo/FlowInstanceBO.java index ef4a0ef0..f41b8ca8 100644 --- a/engine/src/main/java/com/didiglobal/turbo/engine/bo/FlowInstanceBO.java +++ b/engine/src/main/java/com/didiglobal/turbo/engine/bo/FlowInstanceBO.java @@ -2,12 +2,16 @@ import com.google.common.base.MoreObjects; +import java.util.HashMap; +import java.util.Map; + public class FlowInstanceBO { private String flowInstanceId; private String flowDeployId; private String flowModuleId; private Integer status; private String parentFlowInstanceId; + private Map properties = new HashMap<>(); public String getFlowInstanceId() { return flowInstanceId; @@ -49,6 +53,22 @@ public void setParentFlowInstanceId(String parentFlowInstanceId) { this.parentFlowInstanceId = parentFlowInstanceId; } + public Map getProperties() { + return properties; + } + + public void setProperties(Map properties) { + this.properties = properties; + } + + public Object get(String key) { + return properties.get(key); + } + + public void put(String key, Object value) { + properties.put(key, value); + } + @Override public String toString() { return MoreObjects.toStringHelper(this) diff --git a/engine/src/main/java/com/didiglobal/turbo/engine/bo/NodeInstance.java b/engine/src/main/java/com/didiglobal/turbo/engine/bo/NodeInstance.java index 95e7fa2f..23cf7c01 100644 --- a/engine/src/main/java/com/didiglobal/turbo/engine/bo/NodeInstance.java +++ b/engine/src/main/java/com/didiglobal/turbo/engine/bo/NodeInstance.java @@ -4,7 +4,9 @@ import com.google.common.base.MoreObjects; import java.util.Date; +import java.util.HashMap; import java.util.List; +import java.util.Map; public class NodeInstance extends ElementInstance { private String nodeInstanceId; @@ -12,6 +14,7 @@ public class NodeInstance extends ElementInstance { private List subNodeResultList; private Date createTime; private Date modifyTime; + private Map properties = new HashMap<>(); public String getNodeInstanceId() { return nodeInstanceId; @@ -53,6 +56,24 @@ public void setFlowElementType(int flowElementType) { this.flowElementType = flowElementType; } + @Override + public Map getProperties() { + return properties; + } + + @Override + public void setProperties(Map properties) { + this.properties = properties; + } + + public Object get(String key) { + return properties.get(key); + } + + public void put(String key, Object value) { + properties.put(key, value); + } + @Override public String toString() { return MoreObjects.toStringHelper(this) diff --git a/engine/src/main/java/com/didiglobal/turbo/engine/bo/NodeInstanceBO.java b/engine/src/main/java/com/didiglobal/turbo/engine/bo/NodeInstanceBO.java index 227f96b6..984569b1 100644 --- a/engine/src/main/java/com/didiglobal/turbo/engine/bo/NodeInstanceBO.java +++ b/engine/src/main/java/com/didiglobal/turbo/engine/bo/NodeInstanceBO.java @@ -2,7 +2,11 @@ import com.google.common.base.MoreObjects; -public class NodeInstanceBO { +import java.io.Serializable; +import java.util.HashMap; +import java.util.Map; + +public class NodeInstanceBO implements Serializable { //used while updateById private Long id; private String nodeInstanceId; @@ -11,6 +15,8 @@ public class NodeInstanceBO { private String sourceNodeKey; private String instanceDataId; private int status; + private int nodeType; + private Map properties = new HashMap<>(); public Long getId() { return id; @@ -68,6 +74,30 @@ public void setStatus(int status) { this.status = status; } + public int getNodeType() { + return nodeType; + } + + public void setNodeType(int nodeType) { + this.nodeType = nodeType; + } + + public Map getProperties() { + return properties; + } + + public void setProperties(Map properties) { + this.properties = properties; + } + + public Object get(String key) { + return properties.get(key); + } + + public void put(String key, Object value) { + properties.put(key, value); + } + @Override public String toString() { return MoreObjects.toStringHelper(this) @@ -78,6 +108,7 @@ public String toString() { .add("sourceNodeKey", sourceNodeKey) .add("instanceDataId", instanceDataId) .add("status", status) + .add("nodeType", nodeType) .toString(); } } diff --git a/engine/src/main/java/com/didiglobal/turbo/engine/common/EntityPOEnum.java b/engine/src/main/java/com/didiglobal/turbo/engine/common/EntityPOEnum.java new file mode 100644 index 00000000..6c4b7ff6 --- /dev/null +++ b/engine/src/main/java/com/didiglobal/turbo/engine/common/EntityPOEnum.java @@ -0,0 +1,52 @@ +package com.didiglobal.turbo.engine.common; + +import com.didiglobal.turbo.engine.entity.CommonPO; +import com.didiglobal.turbo.engine.entity.FlowDefinitionPO; +import com.didiglobal.turbo.engine.entity.FlowDeploymentPO; +import com.didiglobal.turbo.engine.entity.FlowInstanceMappingPO; +import com.didiglobal.turbo.engine.entity.FlowInstancePO; +import com.didiglobal.turbo.engine.entity.InstanceDataPO; +import com.didiglobal.turbo.engine.entity.NodeInstanceLogPO; +import com.didiglobal.turbo.engine.entity.NodeInstancePO; + +public enum EntityPOEnum { + FLOW_DEFINITION("em_flow_definition", FlowDefinitionPO.class), + FLOW_DEPLOYMENT("em_flow_deployment", FlowDeploymentPO.class), + FLOW_INSTANCE("ei_flow_instance", FlowInstancePO.class), + NODE_INSTANCE("ei_node_instance", NodeInstancePO.class), + INSTANCE_DATA("ei_instance_data", InstanceDataPO.class), + FLOW_INSTANCE_MAPPING("ei_flow_instance_mapping", FlowInstanceMappingPO.class), + NODE_INSTANCE_LOG("ei_node_instance_log", NodeInstanceLogPO.class); + + private String tableName; + private Class entityClass; + + EntityPOEnum(String flowDefinition, Class poClass) { + this.tableName = flowDefinition; + this.entityClass = poClass; + } + + public String getTableName() { + return tableName; + } + + public void setTableName(String tableName) { + this.tableName = tableName; + } + + public Class getEntityClass() { + return entityClass; + } + + public void setEntityClass(Class entityClass) { + this.entityClass = entityClass; + } + + @Override + public String toString() { + return "EntityEnum{" + + "entityName='" + tableName + '\'' + + ", entityType=" + entityClass.getName() + + '}'; + } +} diff --git a/engine/src/main/java/com/didiglobal/turbo/engine/common/ErrorEnum.java b/engine/src/main/java/com/didiglobal/turbo/engine/common/ErrorEnum.java index 2dcf092b..38649af0 100644 --- a/engine/src/main/java/com/didiglobal/turbo/engine/common/ErrorEnum.java +++ b/engine/src/main/java/com/didiglobal/turbo/engine/common/ErrorEnum.java @@ -59,6 +59,7 @@ public enum ErrorEnum { GROOVY_CALCULATE_FAILED(4018, "Groovy calculate failed"), GET_CALL_ACTIVITY_MODEL_FAILED(4019, "Get CallActivity model failed"), NO_RECEIVE_SUB_FLOW_INSTANCE(4020, "Do not receive subFlowInstanceId"), + UPDATE_INSTANCE_DATA_FAILED(4021, "Update instanceData failed"), //5000~5999 系统错误 diff --git a/engine/src/main/java/com/didiglobal/turbo/engine/common/ExtendRuntimeContext.java b/engine/src/main/java/com/didiglobal/turbo/engine/common/ExtendRuntimeContext.java new file mode 100644 index 00000000..dd21ae5d --- /dev/null +++ b/engine/src/main/java/com/didiglobal/turbo/engine/common/ExtendRuntimeContext.java @@ -0,0 +1,61 @@ +package com.didiglobal.turbo.engine.common; + +import com.didiglobal.turbo.engine.bo.NodeInstanceBO; +import com.didiglobal.turbo.engine.exception.TurboException; +import com.didiglobal.turbo.engine.model.FlowElement; +import com.didiglobal.turbo.engine.model.InstanceData; + +import java.io.Serializable; +import java.util.Map; + +public class ExtendRuntimeContext implements Serializable { + /** + * 分支执行数据 + */ + private Map branchExecuteDataMap; + /** + * 分支挂起节点 + */ + private NodeInstanceBO branchSuspendNodeInstance; + /** + * 分支挂起节点 + */ + private FlowElement currentNodeModel; + + /** + * 分支抛出的异常 + */ + private TurboException exception; + + public Map getBranchExecuteDataMap() { + return branchExecuteDataMap; + } + + public void setBranchExecuteDataMap(Map branchExecuteDataMap) { + this.branchExecuteDataMap = branchExecuteDataMap; + } + + public NodeInstanceBO getBranchSuspendNodeInstance() { + return branchSuspendNodeInstance; + } + + public void setBranchSuspendNodeInstance(NodeInstanceBO branchSuspendNodeInstance) { + this.branchSuspendNodeInstance = branchSuspendNodeInstance; + } + + public FlowElement getCurrentNodeModel() { + return currentNodeModel; + } + + public void setCurrentNodeModel(FlowElement currentNodeModel) { + this.currentNodeModel = currentNodeModel; + } + + public TurboException getException() { + return exception; + } + + public void setException(TurboException exception) { + this.exception = exception; + } +} diff --git a/engine/src/main/java/com/didiglobal/turbo/engine/common/PluginTypeEnum.java b/engine/src/main/java/com/didiglobal/turbo/engine/common/PluginTypeEnum.java new file mode 100644 index 00000000..33779bff --- /dev/null +++ b/engine/src/main/java/com/didiglobal/turbo/engine/common/PluginTypeEnum.java @@ -0,0 +1,38 @@ +package com.didiglobal.turbo.engine.common; + +import com.didiglobal.turbo.engine.plugin.ElementPlugin; +import com.didiglobal.turbo.engine.plugin.ExpressionCalculatorPlugin; +import com.didiglobal.turbo.engine.plugin.IdGeneratorPlugin; +import com.didiglobal.turbo.engine.plugin.ListenerPlugin; +import com.didiglobal.turbo.engine.plugin.Plugin; + +public enum PluginTypeEnum { + EXPRESSION_CALCULATOR_PLUGIN("expressionCalculatorPlugin", ExpressionCalculatorPlugin.class), + ELEMENT_PLUGIN("elementPlugin", ElementPlugin.class), + ID_GENERATOR_PLUGIN("idGeneratorPlugin", IdGeneratorPlugin.class), + LISTENER_PLUGIN("listenerPlugin", ListenerPlugin.class); + + private String pluginType; + private Class pluginClass; + + PluginTypeEnum(String pluginType, Class pluginClass) { + this.pluginType = pluginType; + this.pluginClass = pluginClass; + } + + public String getPluginType() { + return pluginType; + } + + public void setPluginType(String pluginType) { + this.pluginType = pluginType; + } + + public Class getPluginClass() { + return pluginClass; + } + + public void setPluginClass(Class pluginClass) { + this.pluginClass = pluginClass; + } +} diff --git a/engine/src/main/java/com/didiglobal/turbo/engine/common/RuntimeContext.java b/engine/src/main/java/com/didiglobal/turbo/engine/common/RuntimeContext.java index 70c4860c..5dcf9ee8 100644 --- a/engine/src/main/java/com/didiglobal/turbo/engine/common/RuntimeContext.java +++ b/engine/src/main/java/com/didiglobal/turbo/engine/common/RuntimeContext.java @@ -6,11 +6,14 @@ import com.didiglobal.turbo.engine.result.RuntimeResult; import com.google.common.base.MoreObjects; +import java.io.Serializable; +import java.util.ArrayList; +import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Stack; -public class RuntimeContext { +public class RuntimeContext implements Serializable { //0.parent info private RuntimeContext parentRuntimeContext; @@ -27,7 +30,7 @@ public class RuntimeContext { private String flowInstanceId; private int flowInstanceStatus; private NodeInstanceBO suspendNodeInstance; //point to the userTaskInstance to commit/rollback - private List nodeInstanceList; //processed nodeInstance list + private List nodeInstanceList = new ArrayList<>(); //processed nodeInstance list private Stack suspendNodeInstanceStack; // suspendNodeInstance Stack: commitNode > ... > currentNode //2.2 current info @@ -45,6 +48,10 @@ public class RuntimeContext { private String callActivityFlowModuleId; // from top to bottom transmit callActivityFlowModuleId private List callActivityRuntimeResultList; // from bottom to top transmit callActivityRuntimeResultList + // 3.other + private Map extendProperties = new HashMap<>(16); + private List extendRuntimeContextList = new ArrayList<>(); + public RuntimeContext getParentRuntimeContext() { return parentRuntimeContext; } @@ -189,6 +196,22 @@ public void setCallActivityRuntimeResultList(List callActivityRun this.callActivityRuntimeResultList = callActivityRuntimeResultList; } + public Map getExtendProperties() { + return extendProperties; + } + + public void setExtendProperties(Map extendProperties) { + this.extendProperties = extendProperties; + } + + public List getExtendRuntimeContextList() { + return extendRuntimeContextList; + } + + public void setExtendRuntimeContextList(List extendRuntimeContextList) { + this.extendRuntimeContextList = extendRuntimeContextList; + } + @Override public String toString() { return MoreObjects.toStringHelper(this) diff --git a/engine/src/main/java/com/didiglobal/turbo/engine/config/PluginConfig.java b/engine/src/main/java/com/didiglobal/turbo/engine/config/PluginConfig.java new file mode 100644 index 00000000..097dc216 --- /dev/null +++ b/engine/src/main/java/com/didiglobal/turbo/engine/config/PluginConfig.java @@ -0,0 +1,80 @@ +package com.didiglobal.turbo.engine.config; + +import com.didiglobal.turbo.engine.plugin.ExpressionCalculatorPlugin; +import com.didiglobal.turbo.engine.plugin.IdGeneratorPlugin; +import com.didiglobal.turbo.engine.plugin.manager.DefaultPluginManager; +import com.didiglobal.turbo.engine.plugin.manager.PluginManager; +import com.didiglobal.turbo.engine.util.ExpressionCalculator; +import com.didiglobal.turbo.engine.util.IdGenerator; +import com.didiglobal.turbo.engine.util.StrongUuidGenerator; +import com.didiglobal.turbo.engine.util.impl.GroovyExpressionCalculator; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.beans.factory.annotation.Value; +import org.springframework.beans.factory.support.DefaultListableBeanFactory; +import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.Configuration; + +import javax.annotation.Resource; + +import java.util.List; + +@Configuration +public class PluginConfig { + private static final Logger LOGGER = LoggerFactory.getLogger(PluginConfig.class); + @Value("${turbo.plugin.manager.custom-class:#{null}}") + private String customManagerClass; + + @Resource + private DefaultListableBeanFactory beanFactory; + + /** + * 若指定了自定义的PluginManager,则使用指定的,否则使用默认的DefaultPluginManager + * @return + */ + @Bean + public PluginManager getPluginManager() { + if (null == customManagerClass) { + LOGGER.info("No custom PluginManager specified, using default PluginManager."); + DefaultPluginManager pluginManager = new DefaultPluginManager(beanFactory); + return pluginManager; + } else { + try { + Class clazz = Class.forName(customManagerClass); + return (PluginManager) clazz.getDeclaredConstructor().newInstance(beanFactory); + } catch (Exception e) { + throw new RuntimeException("Failed to instantiate custom PluginManager", e); + } + } + } + + /** + * 优先从插件中获取表达式计算器,如有多个仅使用第一个,若未找到则使用默认实现 + * @param pluginManager + * @return + */ + @Bean + public ExpressionCalculator getExpressionCalculator(PluginManager pluginManager) { + List expressionCalculatorPlugins = pluginManager.getPluginsFor(ExpressionCalculatorPlugin.class); + if (!expressionCalculatorPlugins.isEmpty()) { + LOGGER.info("Found expression calculator plugin: {}", expressionCalculatorPlugins.get(0).getName()); + return expressionCalculatorPlugins.get(0).getExpressionCalculator(); + } + return new GroovyExpressionCalculator(); + } + + /** + * 优先从插件中获取id生成器,如有多个仅使用第一个,若未找到则使用默认实现 + * @param pluginManager + * @return + */ + @Bean + public IdGenerator getIdGenerator(PluginManager pluginManager) { + List expressionCalculatorPlugins = pluginManager.getPluginsFor(IdGeneratorPlugin.class); + if (!expressionCalculatorPlugins.isEmpty()) { + LOGGER.info("Found id generator plugin: {}", expressionCalculatorPlugins.get(0).getName()); + return expressionCalculatorPlugins.get(0).getIdGenerator(); + } + return new StrongUuidGenerator(); + } +} diff --git a/engine/src/main/java/com/didiglobal/turbo/engine/config/TurboMybatisConfig.java b/engine/src/main/java/com/didiglobal/turbo/engine/config/TurboMybatisConfig.java new file mode 100644 index 00000000..477ef8b1 --- /dev/null +++ b/engine/src/main/java/com/didiglobal/turbo/engine/config/TurboMybatisConfig.java @@ -0,0 +1,20 @@ +package com.didiglobal.turbo.engine.config; + +import com.didiglobal.turbo.engine.interceptor.MyBatisInterceptor; +import org.apache.ibatis.session.SqlSessionFactory; +import org.springframework.context.annotation.Configuration; + +import javax.annotation.PostConstruct; +import javax.annotation.Resource; + +@Configuration +public class TurboMybatisConfig { + @Resource + private SqlSessionFactory sqlSessionFactory; + + @PostConstruct + public void addCustomInterceptor() { + // 添加自定义拦截器 + sqlSessionFactory.getConfiguration().addInterceptor(new MyBatisInterceptor(sqlSessionFactory)); + } +} diff --git a/engine/src/main/java/com/didiglobal/turbo/engine/dao/BaseDAO.java b/engine/src/main/java/com/didiglobal/turbo/engine/dao/BaseDAO.java index a630b79b..2004ecb8 100644 --- a/engine/src/main/java/com/didiglobal/turbo/engine/dao/BaseDAO.java +++ b/engine/src/main/java/com/didiglobal/turbo/engine/dao/BaseDAO.java @@ -4,10 +4,15 @@ import com.baomidou.mybatisplus.core.mapper.BaseMapper; import com.baomidou.mybatisplus.extension.service.IService; import com.baomidou.mybatisplus.extension.service.impl.ServiceImpl; +import com.didiglobal.turbo.engine.plugin.manager.PluginManager; import org.slf4j.Logger; import org.slf4j.LoggerFactory; +import javax.annotation.Resource; + @DS("engine") public class BaseDAO, T> extends ServiceImpl implements IService { protected static final Logger LOGGER = LoggerFactory.getLogger(BaseDAO.class); + @Resource + protected PluginManager pluginManager; } diff --git a/engine/src/main/java/com/didiglobal/turbo/engine/dao/InstanceDataDAO.java b/engine/src/main/java/com/didiglobal/turbo/engine/dao/InstanceDataDAO.java index a6b59363..2888ebb6 100644 --- a/engine/src/main/java/com/didiglobal/turbo/engine/dao/InstanceDataDAO.java +++ b/engine/src/main/java/com/didiglobal/turbo/engine/dao/InstanceDataDAO.java @@ -1,7 +1,9 @@ package com.didiglobal.turbo.engine.dao; +import com.didiglobal.turbo.engine.common.ErrorEnum; import com.didiglobal.turbo.engine.dao.mapper.InstanceDataMapper; import com.didiglobal.turbo.engine.entity.InstanceDataPO; +import com.didiglobal.turbo.engine.exception.TurboException; import org.springframework.stereotype.Repository; @Repository @@ -36,4 +38,30 @@ public int insert(InstanceDataPO instanceDataPO) { } return -1; } + + /** + * update instanceData + * @param instanceDataPO + * @return + */ + public int updateData(InstanceDataPO instanceDataPO) { + try { + return baseMapper.updateData(instanceDataPO); + } catch (Exception e) { + LOGGER.error("update instance data exception.||instanceDataPO={}", instanceDataPO, e); + throw new TurboException(ErrorEnum.UPDATE_INSTANCE_DATA_FAILED); + } + } + + /** + * insert or update instanceData + * @param mergePo + * @return + */ + public int insertOrUpdate(InstanceDataPO mergePo) { + if (mergePo.getId() != null) { + return updateData(mergePo); + } + return insert(mergePo); + } } diff --git a/engine/src/main/java/com/didiglobal/turbo/engine/dao/NodeInstanceDAO.java b/engine/src/main/java/com/didiglobal/turbo/engine/dao/NodeInstanceDAO.java index fc31a072..d4159768 100644 --- a/engine/src/main/java/com/didiglobal/turbo/engine/dao/NodeInstanceDAO.java +++ b/engine/src/main/java/com/didiglobal/turbo/engine/dao/NodeInstanceDAO.java @@ -133,4 +133,14 @@ public void updateStatus(NodeInstancePO nodeInstancePO, int status) { nodeInstancePO.setModifyTime(new Date()); baseMapper.updateStatus(nodeInstancePO); } + + /** + * select nodeInstancePOList by flowInstanceId and nodeKey + * @param flowInstanceId + * @param nodeKey + * @return + */ + public List selectByFlowInstanceIdAndNodeKey(String flowInstanceId, String nodeKey) { + return baseMapper.selectByFlowInstanceIdAndNodeKey(flowInstanceId, nodeKey); + } } diff --git a/engine/src/main/java/com/didiglobal/turbo/engine/dao/mapper/InstanceDataMapper.java b/engine/src/main/java/com/didiglobal/turbo/engine/dao/mapper/InstanceDataMapper.java index 86c65c39..c7cbe87b 100644 --- a/engine/src/main/java/com/didiglobal/turbo/engine/dao/mapper/InstanceDataMapper.java +++ b/engine/src/main/java/com/didiglobal/turbo/engine/dao/mapper/InstanceDataMapper.java @@ -4,6 +4,7 @@ import com.didiglobal.turbo.engine.entity.InstanceDataPO; import org.apache.ibatis.annotations.Param; import org.apache.ibatis.annotations.Select; +import org.apache.ibatis.annotations.Update; public interface InstanceDataMapper extends BaseMapper { @@ -14,4 +15,7 @@ InstanceDataPO select(@Param("flowInstanceId") String flowInstanceId, @Select("SELECT * FROM ei_instance_data WHERE flow_instance_id=#{flowInstanceId} ORDER BY id DESC LIMIT 1") InstanceDataPO selectRecentOne(@Param("flowInstanceId") String flowInstanceId); + + @Update("UPDATE ei_instance_data SET instance_data=#{instanceData} WHERE id=#{id}") + int updateData(InstanceDataPO instanceDataPO); } diff --git a/engine/src/main/java/com/didiglobal/turbo/engine/dao/mapper/NodeInstanceLogMapper.java b/engine/src/main/java/com/didiglobal/turbo/engine/dao/mapper/NodeInstanceLogMapper.java index dbec1c8e..8ed2ed86 100644 --- a/engine/src/main/java/com/didiglobal/turbo/engine/dao/mapper/NodeInstanceLogMapper.java +++ b/engine/src/main/java/com/didiglobal/turbo/engine/dao/mapper/NodeInstanceLogMapper.java @@ -4,6 +4,7 @@ import com.didiglobal.turbo.engine.dao.provider.NodeInstanceLogProvider; import com.didiglobal.turbo.engine.entity.NodeInstanceLogPO; import org.apache.ibatis.annotations.InsertProvider; +import org.apache.ibatis.annotations.Options; import org.apache.ibatis.annotations.Param; import java.util.List; @@ -11,7 +12,8 @@ public interface NodeInstanceLogMapper extends BaseMapper { @InsertProvider(type = NodeInstanceLogProvider.class, method = "batchInsert") + @Options(useGeneratedKeys = true, keyProperty = "list.id") boolean batchInsert(@Param("flowInstanceId") String flowInstanceId, - @Param("nodeInstanceLogList") List nodeInstanceLogList); + @Param("list") List nodeInstanceLogList); } diff --git a/engine/src/main/java/com/didiglobal/turbo/engine/dao/mapper/NodeInstanceMapper.java b/engine/src/main/java/com/didiglobal/turbo/engine/dao/mapper/NodeInstanceMapper.java index 69620f8a..8d453354 100644 --- a/engine/src/main/java/com/didiglobal/turbo/engine/dao/mapper/NodeInstanceMapper.java +++ b/engine/src/main/java/com/didiglobal/turbo/engine/dao/mapper/NodeInstanceMapper.java @@ -4,6 +4,7 @@ import com.didiglobal.turbo.engine.dao.provider.NodeInstanceProvider; import com.didiglobal.turbo.engine.entity.NodeInstancePO; import org.apache.ibatis.annotations.InsertProvider; +import org.apache.ibatis.annotations.Options; import org.apache.ibatis.annotations.Param; import org.apache.ibatis.annotations.Select; import org.apache.ibatis.annotations.Update; @@ -41,7 +42,10 @@ NodeInstancePO selectBySourceInstanceId(@Param("flowInstanceId") String flowInst @InsertProvider(type = NodeInstanceProvider.class, method = "batchInsert") + @Options(useGeneratedKeys = true, keyProperty = "list.id") boolean batchInsert(@Param("flowInstanceId") String flowInstanceId, - @Param("nodeInstanceList") List nodeInstanceList); + @Param("list") List nodeInstanceList); + @Select("SELECT * FROM ei_node_instance WHERE flow_instance_id=#{flowInstanceId} AND node_key=#{nodeKey}") + List selectByFlowInstanceIdAndNodeKey(@Param("flowInstanceId") String flowInstanceId, @Param("nodeKey") String nodeKey); } diff --git a/engine/src/main/java/com/didiglobal/turbo/engine/dao/provider/NodeInstanceLogProvider.java b/engine/src/main/java/com/didiglobal/turbo/engine/dao/provider/NodeInstanceLogProvider.java index 51e2d3b9..05f0a190 100644 --- a/engine/src/main/java/com/didiglobal/turbo/engine/dao/provider/NodeInstanceLogProvider.java +++ b/engine/src/main/java/com/didiglobal/turbo/engine/dao/provider/NodeInstanceLogProvider.java @@ -6,6 +6,7 @@ import java.util.List; import java.util.Map; +@SuppressWarnings("unchecked") public class NodeInstanceLogProvider { private static final String TABLE_NAME = "ei_node_instance_log"; private static final String COLUMN_ARRAY = "(node_instance_id, flow_instance_id, instance_data_id, " + @@ -17,19 +18,19 @@ public String batchInsert(Map map) { .append("INSERT INTO ").append(TABLE_NAME).append(COLUMN_ARRAY).append(" VALUES "); MessageFormat mf = new MessageFormat("(" + - "#'{'nodeInstanceLogList[{0}].nodeInstanceId}, " + - "#'{'nodeInstanceLogList[{0}].flowInstanceId}, " + - "#'{'nodeInstanceLogList[{0}].instanceDataId}, " + - "#'{'nodeInstanceLogList[{0}].nodeKey}, " + - "#'{'nodeInstanceLogList[{0}].type}, " + - "#'{'nodeInstanceLogList[{0}].status}, " + - "#'{'nodeInstanceLogList[{0}].createTime}, " + - "#'{'nodeInstanceLogList[{0}].archive}," + - "#'{'nodeInstanceLogList[{0}].tenant}," + - "#'{'nodeInstanceLogList[{0}].caller}" + + "#'{'list[{0}].nodeInstanceId}, " + + "#'{'list[{0}].flowInstanceId}, " + + "#'{'list[{0}].instanceDataId}, " + + "#'{'list[{0}].nodeKey}, " + + "#'{'list[{0}].type}, " + + "#'{'list[{0}].status}, " + + "#'{'list[{0}].createTime}, " + + "#'{'list[{0}].archive}," + + "#'{'list[{0}].tenant}," + + "#'{'list[{0}].caller}" + ")"); - List nodeInstanceList = (List) map.get("nodeInstanceLogList"); + List nodeInstanceList = (List) map.get("list"); for (int i = 0; i < nodeInstanceList.size(); i++) { stringBuilder.append(mf.format(new Object[]{i})); if (i < nodeInstanceList.size() - 1) { diff --git a/engine/src/main/java/com/didiglobal/turbo/engine/dao/provider/NodeInstanceProvider.java b/engine/src/main/java/com/didiglobal/turbo/engine/dao/provider/NodeInstanceProvider.java index 99916241..962f1114 100644 --- a/engine/src/main/java/com/didiglobal/turbo/engine/dao/provider/NodeInstanceProvider.java +++ b/engine/src/main/java/com/didiglobal/turbo/engine/dao/provider/NodeInstanceProvider.java @@ -6,11 +6,12 @@ import java.util.List; import java.util.Map; +@SuppressWarnings("unchecked") public class NodeInstanceProvider { private static final String TABLE_NAME = "ei_node_instance"; private static final String COLUMN_ARRAY = "(flow_instance_id, flow_deploy_id, instance_data_id, " + - "node_instance_id, source_node_instance_id, node_key, source_node_key, status, create_time, modify_time, " + + "node_instance_id, source_node_instance_id, node_type, node_key, source_node_key, status, create_time, modify_time, " + "archive, tenant, caller)"; public String batchInsert(Map parameters) { @@ -19,22 +20,23 @@ public String batchInsert(Map parameters) { .append("INSERT INTO ").append(TABLE_NAME).append(COLUMN_ARRAY).append(" VALUES "); MessageFormat mf = new MessageFormat("(" + - "#'{'nodeInstanceList[{0}].flowInstanceId}, " + - "#'{'nodeInstanceList[{0}].flowDeployId}, " + - "#'{'nodeInstanceList[{0}].instanceDataId}, " + - "#'{'nodeInstanceList[{0}].nodeInstanceId}, " + - "#'{'nodeInstanceList[{0}].sourceNodeInstanceId}, " + - "#'{'nodeInstanceList[{0}].nodeKey}, " + - "#'{'nodeInstanceList[{0}].sourceNodeKey}, " + - "#'{'nodeInstanceList[{0}].status}, " + - "#'{'nodeInstanceList[{0}].createTime}, " + - "#'{'nodeInstanceList[{0}].modifyTime}, " + - "#'{'nodeInstanceList[{0}].archive}," + - "#'{'nodeInstanceList[{0}].tenant}, " + - "#'{'nodeInstanceList[{0}].caller}" + + "#'{'list[{0}].flowInstanceId}, " + + "#'{'list[{0}].flowDeployId}, " + + "#'{'list[{0}].instanceDataId}, " + + "#'{'list[{0}].nodeInstanceId}, " + + "#'{'list[{0}].sourceNodeInstanceId}, " + + "#'{'list[{0}].nodeType}, " + + "#'{'list[{0}].nodeKey}, " + + "#'{'list[{0}].sourceNodeKey}, " + + "#'{'list[{0}].status}, " + + "#'{'list[{0}].createTime}, " + + "#'{'list[{0}].modifyTime}, " + + "#'{'list[{0}].archive}," + + "#'{'list[{0}].tenant}, " + + "#'{'list[{0}].caller}" + ")"); - List nodeInstanceList = (List) parameters.get("nodeInstanceList"); + List nodeInstanceList = (List) parameters.get("list"); for (int i = 0; i < nodeInstanceList.size(); i++) { stringBuilder.append(mf.format(new Object[]{i})); if (i < nodeInstanceList.size() - 1) { diff --git a/engine/src/main/java/com/didiglobal/turbo/engine/entity/CommonPO.java b/engine/src/main/java/com/didiglobal/turbo/engine/entity/CommonPO.java index 9cf9a3df..ace4ac68 100644 --- a/engine/src/main/java/com/didiglobal/turbo/engine/entity/CommonPO.java +++ b/engine/src/main/java/com/didiglobal/turbo/engine/entity/CommonPO.java @@ -1,9 +1,12 @@ package com.didiglobal.turbo.engine.entity; import com.baomidou.mybatisplus.annotation.IdType; +import com.baomidou.mybatisplus.annotation.TableField; import com.baomidou.mybatisplus.annotation.TableId; import java.util.Date; +import java.util.HashMap; +import java.util.Map; public class CommonPO { @TableId(type = IdType.AUTO) @@ -12,6 +15,8 @@ public class CommonPO { private String caller; private Date createTime; private Integer archive = 0; + @TableField(exist = false) + private Map properties = new HashMap<>(); public Long getId() { return id; @@ -52,4 +57,20 @@ public Integer getArchive() { public void setArchive(Integer archive) { this.archive = archive; } + + public Map getProperties() { + return properties; + } + + public void setProperties(Map properties) { + this.properties = properties; + } + + public Object get(String key) { + return properties.get(key); + } + + public void put(String key, Object value) { + properties.put(key, value); + } } diff --git a/engine/src/main/java/com/didiglobal/turbo/engine/entity/NodeInstancePO.java b/engine/src/main/java/com/didiglobal/turbo/engine/entity/NodeInstancePO.java index 6bf46820..32574088 100644 --- a/engine/src/main/java/com/didiglobal/turbo/engine/entity/NodeInstancePO.java +++ b/engine/src/main/java/com/didiglobal/turbo/engine/entity/NodeInstancePO.java @@ -15,6 +15,7 @@ public class NodeInstancePO extends CommonPO { private String sourceNodeKey; private Integer status; private Date modifyTime; + private int nodeType; public String getFlowInstanceId() { return flowInstanceId; @@ -87,4 +88,12 @@ public Date getModifyTime() { public void setModifyTime(Date modifyTime) { this.modifyTime = modifyTime; } + + public int getNodeType() { + return nodeType; + } + + public void setNodeType(int nodeType) { + this.nodeType = nodeType; + } } diff --git a/engine/src/main/java/com/didiglobal/turbo/engine/exception/TurboException.java b/engine/src/main/java/com/didiglobal/turbo/engine/exception/TurboException.java index 0022696e..53acb910 100644 --- a/engine/src/main/java/com/didiglobal/turbo/engine/exception/TurboException.java +++ b/engine/src/main/java/com/didiglobal/turbo/engine/exception/TurboException.java @@ -4,7 +4,7 @@ import java.text.MessageFormat; -public class TurboException extends Exception { +public class TurboException extends RuntimeException { private static final String ERROR_MSG_FORMAT = "{0}({1})"; diff --git a/engine/src/main/java/com/didiglobal/turbo/engine/executor/ElementExecutor.java b/engine/src/main/java/com/didiglobal/turbo/engine/executor/ElementExecutor.java index 1efecfde..336d1267 100644 --- a/engine/src/main/java/com/didiglobal/turbo/engine/executor/ElementExecutor.java +++ b/engine/src/main/java/com/didiglobal/turbo/engine/executor/ElementExecutor.java @@ -83,6 +83,8 @@ protected void preExecute(RuntimeContext runtimeContext) throws ProcessException currentNodeInstance.setSourceNodeInstanceId(sourceNodeInstanceId); currentNodeInstance.setSourceNodeKey(sourceNodeKey); currentNodeInstance.setStatus(NodeInstanceStatus.ACTIVE); + currentNodeInstance.getProperties().putAll(runtimeContext.getExtendProperties()); + currentNodeInstance.setNodeType(runtimeContext.getCurrentNodeModel().getType()); currentNodeInstance.setInstanceDataId(StringUtils.defaultString(runtimeContext.getInstanceDataId(), StringUtils.EMPTY)); runtimeContext.setCurrentNodeInstance(currentNodeInstance); diff --git a/engine/src/main/java/com/didiglobal/turbo/engine/executor/ExecutorFactory.java b/engine/src/main/java/com/didiglobal/turbo/engine/executor/ExecutorFactory.java index 9c939b75..ed887758 100644 --- a/engine/src/main/java/com/didiglobal/turbo/engine/executor/ExecutorFactory.java +++ b/engine/src/main/java/com/didiglobal/turbo/engine/executor/ExecutorFactory.java @@ -6,14 +6,19 @@ import com.didiglobal.turbo.engine.exception.ProcessException; import com.didiglobal.turbo.engine.executor.callactivity.SyncSingleCallActivityExecutor; import com.didiglobal.turbo.engine.model.FlowElement; +import com.didiglobal.turbo.engine.plugin.ElementPlugin; +import com.didiglobal.turbo.engine.plugin.manager.PluginManager; import com.didiglobal.turbo.engine.util.FlowModelUtil; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.stereotype.Service; +import javax.annotation.PostConstruct; import javax.annotation.Resource; import java.text.MessageFormat; +import java.util.HashMap; +import java.util.List; import java.util.Map; @Service @@ -39,6 +44,26 @@ public class ExecutorFactory { @Resource private SyncSingleCallActivityExecutor syncSingleCallActivityExecutor; + @Resource + private PluginManager pluginManager; + + private final Map executorMap = new HashMap<>(16); + + /** + * 将原生执行器与插件扩展执行器汇总 + * 插件扩展执行器可以通过设置与原生执行器相同的elementType值进行覆盖 + */ + @PostConstruct + public void init() { + executorMap.put(FlowElementType.SEQUENCE_FLOW, sequenceFlowExecutor); + executorMap.put(FlowElementType.START_EVENT, startEventExecutor); + executorMap.put(FlowElementType.END_EVENT, endEventExecutor); + executorMap.put(FlowElementType.USER_TASK, userTaskExecutor); + executorMap.put(FlowElementType.EXCLUSIVE_GATEWAY, exclusiveGatewayExecutor); + List elementPlugins = pluginManager.getPluginsFor(ElementPlugin.class); + elementPlugins.forEach(elementPlugin -> executorMap.put(elementPlugin.getFlowElementType(), elementPlugin.getElementExecutor())); + } + public ElementExecutor getElementExecutor(FlowElement flowElement) throws ProcessException { ElementExecutor elementExecutor = getElementExecutorInternal(flowElement); @@ -54,22 +79,10 @@ public ElementExecutor getElementExecutor(FlowElement flowElement) throws Proces private ElementExecutor getElementExecutorInternal(FlowElement flowElement) { int elementType = flowElement.getType(); - switch (elementType) { - case FlowElementType.START_EVENT: - return startEventExecutor; - case FlowElementType.END_EVENT: - return endEventExecutor; - case FlowElementType.SEQUENCE_FLOW: - return sequenceFlowExecutor; - case FlowElementType.USER_TASK: - return userTaskExecutor; - case FlowElementType.EXCLUSIVE_GATEWAY: - return exclusiveGatewayExecutor; - case FlowElementType.CALL_ACTIVITY: - return getCallActivityExecutor(flowElement); - default: - return null; + if (elementType == FlowElementType.CALL_ACTIVITY) { + return getCallActivityExecutor(flowElement); } + return executorMap.get(elementType); } private ElementExecutor getCallActivityExecutor(FlowElement flowElement) { diff --git a/engine/src/main/java/com/didiglobal/turbo/engine/executor/FlowExecutor.java b/engine/src/main/java/com/didiglobal/turbo/engine/executor/FlowExecutor.java index d920cf0f..ac9e260b 100644 --- a/engine/src/main/java/com/didiglobal/turbo/engine/executor/FlowExecutor.java +++ b/engine/src/main/java/com/didiglobal/turbo/engine/executor/FlowExecutor.java @@ -28,7 +28,6 @@ import java.util.Date; import java.util.List; import java.util.Map; -import java.util.Set; @Service public class FlowExecutor extends RuntimeExecutor { @@ -122,6 +121,8 @@ private InstanceDataPO buildInstanceDataPO(FlowInstancePO flowInstancePO, Map idGeneratorPlugins = pluginManager.getPluginsFor(IdGeneratorPlugin.class); + if (!idGeneratorPlugins.isEmpty()) { + ID_GENERATOR = idGeneratorPlugins.get(0).getIdGenerator(); + } else { + ID_GENERATOR = new StrongUuidGenerator(); + } + } return ID_GENERATOR.getNextId(); } diff --git a/engine/src/main/java/com/didiglobal/turbo/engine/interceptor/MyBatisInterceptor.java b/engine/src/main/java/com/didiglobal/turbo/engine/interceptor/MyBatisInterceptor.java new file mode 100644 index 00000000..96bd321f --- /dev/null +++ b/engine/src/main/java/com/didiglobal/turbo/engine/interceptor/MyBatisInterceptor.java @@ -0,0 +1,144 @@ +package com.didiglobal.turbo.engine.interceptor; +import com.baomidou.mybatisplus.annotation.TableName; +import com.didiglobal.turbo.engine.common.EntityPOEnum; +import com.didiglobal.turbo.engine.plugin.CustomOperationHandler; +import com.didiglobal.turbo.engine.plugin.CustomOperationHandlerRegistry; +import org.apache.ibatis.executor.Executor; +import org.apache.ibatis.mapping.MappedStatement; +import org.apache.ibatis.mapping.SqlCommandType; +import org.apache.ibatis.plugin.*; +import org.apache.ibatis.session.SqlSessionFactory; +import org.apache.ibatis.session.ResultHandler; +import org.apache.ibatis.session.RowBounds; + +import java.util.List; +import java.util.Map; +import java.util.Properties; + +@Intercepts({ + @Signature(type = Executor.class, method = "update", args = {MappedStatement.class, Object.class}), + @Signature(type = Executor.class, method = "query", args = {MappedStatement.class, Object.class, RowBounds.class, ResultHandler.class}) +}) +public class MyBatisInterceptor implements Interceptor { + + private SqlSessionFactory sqlSessionFactory; + + public MyBatisInterceptor(SqlSessionFactory sqlSessionFactory) { + this.sqlSessionFactory = sqlSessionFactory; + } + + /** + * 拦截 MyBatis 的调用,用于在查询或更新操作前后执行自定义逻辑。 + * + * @param invocation 包含调用的上下文和参数 + * @return 方法调用的结果 + * @throws Throwable 可能抛出的任何异常 + */ + @Override + public Object intercept(Invocation invocation) throws Throwable { + MappedStatement mappedStatement = (MappedStatement) invocation.getArgs()[0]; + Object parameterObject = invocation.getArgs()[1]; + SqlCommandType commandType = mappedStatement.getSqlCommandType(); + + // 调用原始查询,获取原始结果集 + Object originalResult = invocation.proceed(); + + handleCustomOperation(commandType, mappedStatement, parameterObject, originalResult); + + return originalResult; + } + + /** + * 根据传入的命令类型、映射语句、参数和结果处理自定义操作。 + * + * @param commandType SQL 命令类型 + * @param mappedStatement MyBatis 映射语句 + * @param parameterObject 输入参数对象 + * @param originalResult 原始查询或更新的结果 + */ + private void handleCustomOperation(SqlCommandType commandType, MappedStatement mappedStatement, Object parameterObject, Object originalResult) { + String tableName = getTableName(parameterObject); + if (tableName == null) { + tableName = getTableName(originalResult); + } + EntityPOEnum entityEnum = getEntityPOEnumByTableName(tableName); + + if (entityEnum != null) { + List handlers = CustomOperationHandlerRegistry.getHandlers(entityEnum); + if (handlers != null) { + for (CustomOperationHandler handler : handlers) { + handler.handle(commandType, mappedStatement, parameterObject, originalResult, sqlSessionFactory); + } + } + } + } + + /** + * 获取对象对应的表名,如果对象是集合或数组,递归获取其中第一个元素的表名。 + * + * @param object 要提取表名的对象 + * @return 表名,如果对象未注解或为空则返回 null + */ + private String getTableName(Object object) { + if (object == null) { + return null; + } + // 处理批量插入时的 Map 参数 + if (object instanceof Map) { + Map paramMap = (Map) object; + Object value = paramMap.getOrDefault("list", null); + if (value != null) { + // 假设批量插入的实体在 Map 中以特定键存在,如 "list" 或其他约定 + if (value instanceof List) { + List list = (List) value; + if (!list.isEmpty()) { + return getTableName(list.get(0)); + } + } else if (value.getClass().isArray()) { + Object[] array = (Object[]) value; + if (array.length > 0) { + return getTableName(array[0]); + } + } else { + return getTableName(value); + } + } + } + // 处理单个对象的情况 + if (object instanceof List) { + List list = (List) object; + if (!list.isEmpty()) { + return getTableName(list.get(0)); + } + } else if (object.getClass().isArray()) { + Object[] array = (Object[]) object; + if (array.length > 0) { + return getTableName(array[0]); + } + } + Class clazz = object.getClass(); + if (clazz.isAnnotationPresent(TableName.class)) { + TableName tableNameAnnotation = clazz.getAnnotation(TableName.class); + return tableNameAnnotation.value(); + } + return null; + } + + private EntityPOEnum getEntityPOEnumByTableName(String tableName) { + for (EntityPOEnum entity : EntityPOEnum.values()) { + if (entity.getTableName().equals(tableName)) { + return entity; + } + } + return null; + } + + @Override + public Object plugin(Object target) { + return Plugin.wrap(target, this); + } + + @Override + public void setProperties(Properties properties) { + } +} diff --git a/engine/src/main/java/com/didiglobal/turbo/engine/model/FlowElement.java b/engine/src/main/java/com/didiglobal/turbo/engine/model/FlowElement.java index 4907f662..7885c657 100644 --- a/engine/src/main/java/com/didiglobal/turbo/engine/model/FlowElement.java +++ b/engine/src/main/java/com/didiglobal/turbo/engine/model/FlowElement.java @@ -2,10 +2,11 @@ import com.google.common.base.MoreObjects; +import java.io.Serializable; import java.util.List; import java.util.Map; -public class FlowElement { +public class FlowElement implements Serializable { private String key; // 流程内元素唯一key resourceId private int type; // stencil 类型 private List outgoing; diff --git a/engine/src/main/java/com/didiglobal/turbo/engine/param/RuntimeTaskParam.java b/engine/src/main/java/com/didiglobal/turbo/engine/param/RuntimeTaskParam.java index 94e333de..af96527a 100644 --- a/engine/src/main/java/com/didiglobal/turbo/engine/param/RuntimeTaskParam.java +++ b/engine/src/main/java/com/didiglobal/turbo/engine/param/RuntimeTaskParam.java @@ -3,11 +3,15 @@ import com.didiglobal.turbo.engine.common.RuntimeContext; import com.google.common.base.MoreObjects; +import java.util.HashMap; +import java.util.Map; + public class RuntimeTaskParam { private String flowInstanceId; private String taskInstanceId; // For internal transmission runtimeContext private RuntimeContext runtimeContext; + private Map extendProperties = new HashMap<>(16); public String getFlowInstanceId() { return flowInstanceId; @@ -33,6 +37,14 @@ public void setRuntimeContext(RuntimeContext runtimeContext) { this.runtimeContext = runtimeContext; } + public Map getExtendProperties() { + return extendProperties; + } + + public void setExtendProperties(Map extendProperties) { + this.extendProperties = extendProperties; + } + @Override public String toString() { return MoreObjects.toStringHelper(this) diff --git a/engine/src/main/java/com/didiglobal/turbo/engine/plugin/CustomOperationHandler.java b/engine/src/main/java/com/didiglobal/turbo/engine/plugin/CustomOperationHandler.java new file mode 100644 index 00000000..18462f85 --- /dev/null +++ b/engine/src/main/java/com/didiglobal/turbo/engine/plugin/CustomOperationHandler.java @@ -0,0 +1,16 @@ +package com.didiglobal.turbo.engine.plugin; + +import org.apache.ibatis.mapping.MappedStatement; +import org.apache.ibatis.mapping.SqlCommandType; +import org.apache.ibatis.session.SqlSessionFactory; + +public interface CustomOperationHandler { + /** + * 自定义数据库表扩展处理 + * @param mappedStatement + * @param parameterObject + * @param originalResult + * @param sqlSessionFactory + */ + void handle(SqlCommandType commandType , MappedStatement mappedStatement, Object parameterObject, Object originalResult, SqlSessionFactory sqlSessionFactory); +} diff --git a/engine/src/main/java/com/didiglobal/turbo/engine/plugin/CustomOperationHandlerRegistry.java b/engine/src/main/java/com/didiglobal/turbo/engine/plugin/CustomOperationHandlerRegistry.java new file mode 100644 index 00000000..3a203945 --- /dev/null +++ b/engine/src/main/java/com/didiglobal/turbo/engine/plugin/CustomOperationHandlerRegistry.java @@ -0,0 +1,32 @@ +package com.didiglobal.turbo.engine.plugin; + +import com.didiglobal.turbo.engine.common.EntityPOEnum; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +public class CustomOperationHandlerRegistry { + // 用于存储每个实体枚举和其对应的处理器列表的映射 + private static final Map> handlerMap = new HashMap<>(); + + /** + * 注册自定义操作处理器到特定的实体枚举。 + * + * @param entity 与自定义处理器关联的实体枚举 + * @param handler 要注册的自定义操作处理器 + */ + public static void registerHandler(EntityPOEnum entity, CustomOperationHandler handler) { + handlerMap.computeIfAbsent(entity, k -> new ArrayList<>()).add(handler); + } + + /** + * 获取与指定实体枚举关联的自定义操作处理器列表。 + * + * @param entity 需要获取处理器的实体枚举 + * @return 与该实体枚举关联的处理器列表,如果没有找到则返回空列表 + */ + public static List getHandlers(EntityPOEnum entity) { + return handlerMap.getOrDefault(entity, new ArrayList<>()); + } +} diff --git a/engine/src/main/java/com/didiglobal/turbo/engine/plugin/ElementPlugin.java b/engine/src/main/java/com/didiglobal/turbo/engine/plugin/ElementPlugin.java new file mode 100644 index 00000000..ea0b35e1 --- /dev/null +++ b/engine/src/main/java/com/didiglobal/turbo/engine/plugin/ElementPlugin.java @@ -0,0 +1,13 @@ +package com.didiglobal.turbo.engine.plugin; + +import com.didiglobal.turbo.engine.executor.ElementExecutor; +import com.didiglobal.turbo.engine.validator.ElementValidator; + +public interface ElementPlugin extends Plugin{ + String ELEMENT_TYPE_PREFIX = "turbo.plugin.element_type."; + ElementExecutor getElementExecutor(); + + ElementValidator getElementValidator(); + + Integer getFlowElementType(); +} diff --git a/engine/src/main/java/com/didiglobal/turbo/engine/plugin/ExpressionCalculatorPlugin.java b/engine/src/main/java/com/didiglobal/turbo/engine/plugin/ExpressionCalculatorPlugin.java new file mode 100644 index 00000000..ac3e2469 --- /dev/null +++ b/engine/src/main/java/com/didiglobal/turbo/engine/plugin/ExpressionCalculatorPlugin.java @@ -0,0 +1,7 @@ +package com.didiglobal.turbo.engine.plugin; + +import com.didiglobal.turbo.engine.util.ExpressionCalculator; + +public interface ExpressionCalculatorPlugin extends Plugin{ + ExpressionCalculator getExpressionCalculator(); +} diff --git a/engine/src/main/java/com/didiglobal/turbo/engine/plugin/IdGeneratorPlugin.java b/engine/src/main/java/com/didiglobal/turbo/engine/plugin/IdGeneratorPlugin.java new file mode 100644 index 00000000..2babd9a2 --- /dev/null +++ b/engine/src/main/java/com/didiglobal/turbo/engine/plugin/IdGeneratorPlugin.java @@ -0,0 +1,7 @@ +package com.didiglobal.turbo.engine.plugin; + +import com.didiglobal.turbo.engine.util.IdGenerator; + +public interface IdGeneratorPlugin extends Plugin{ + IdGenerator getIdGenerator(); +} diff --git a/engine/src/main/java/com/didiglobal/turbo/engine/plugin/ListenerPlugin.java b/engine/src/main/java/com/didiglobal/turbo/engine/plugin/ListenerPlugin.java new file mode 100644 index 00000000..61bb6abf --- /dev/null +++ b/engine/src/main/java/com/didiglobal/turbo/engine/plugin/ListenerPlugin.java @@ -0,0 +1,4 @@ +package com.didiglobal.turbo.engine.plugin; + +public interface ListenerPlugin extends Plugin{ +} diff --git a/engine/src/main/java/com/didiglobal/turbo/engine/plugin/Plugin.java b/engine/src/main/java/com/didiglobal/turbo/engine/plugin/Plugin.java new file mode 100644 index 00000000..8b79427d --- /dev/null +++ b/engine/src/main/java/com/didiglobal/turbo/engine/plugin/Plugin.java @@ -0,0 +1,24 @@ +package com.didiglobal.turbo.engine.plugin; + +public interface Plugin { + // turbo插件开关配置格式建议统一为turbo.plugin.support.${pluginName} + String PLUGIN_SUPPORT_PREFIX = "turbo.plugin.support."; + + // turbo插件初始化文件配置格式建议统一为turbo.plugin.init_sql.${pluginName} + String PLUGIN_INIT_SQL_FILE_PREFIX = "turbo.plugin.init_sql."; + + /** + * 插件名称,唯一标识 + */ + String getName(); + + /** + * 插件开关 + */ + Boolean support(); + + /** + * 插件初始化 + */ + Boolean init(); +} diff --git a/engine/src/main/java/com/didiglobal/turbo/engine/plugin/manager/AbstractPluginManager.java b/engine/src/main/java/com/didiglobal/turbo/engine/plugin/manager/AbstractPluginManager.java new file mode 100644 index 00000000..cc6d90b9 --- /dev/null +++ b/engine/src/main/java/com/didiglobal/turbo/engine/plugin/manager/AbstractPluginManager.java @@ -0,0 +1,74 @@ +package com.didiglobal.turbo.engine.plugin.manager; + +import com.didiglobal.turbo.engine.common.PluginTypeEnum; +import com.didiglobal.turbo.engine.plugin.ElementPlugin; +import com.didiglobal.turbo.engine.plugin.ExpressionCalculatorPlugin; +import com.didiglobal.turbo.engine.plugin.IdGeneratorPlugin; +import com.didiglobal.turbo.engine.plugin.ListenerPlugin; +import com.didiglobal.turbo.engine.plugin.Plugin; +import com.didiglobal.turbo.engine.util.SPIUtil; +import org.springframework.beans.factory.support.DefaultListableBeanFactory; + +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +public abstract class AbstractPluginManager implements PluginManager { + protected Map> pluginMap = new HashMap<>(); + protected DefaultListableBeanFactory beanFactory; + + public AbstractPluginManager() { + } + + /** + * 通过SPI方式加载插件,目前支持ElementPlugin、IdGeneratorPlugin、ExpressionCalculatorPlugin、ListenerPlugin四类插件 + */ + protected void loadPlugins() { + pluginMap.put(PluginTypeEnum.ELEMENT_PLUGIN.getPluginType(), SPIUtil.loadAllServices(ElementPlugin.class)); + pluginMap.put(PluginTypeEnum.ID_GENERATOR_PLUGIN.getPluginType(), SPIUtil.loadAllServices(IdGeneratorPlugin.class)); + pluginMap.put(PluginTypeEnum.EXPRESSION_CALCULATOR_PLUGIN.getPluginType(), SPIUtil.loadAllServices(ExpressionCalculatorPlugin.class)); + pluginMap.put(PluginTypeEnum.LISTENER_PLUGIN.getPluginType(), SPIUtil.loadAllServices(ListenerPlugin.class)); + } + + /** + * 初始化插件,可通过重写 initialize() 方法自定义插件初始化逻辑 + */ + protected void initialize() { + } + + @Override + public List getPlugins() { + List allPlugins = new ArrayList<>(); + pluginMap.values().forEach(t -> allPlugins.add((Plugin) t)); + return allPlugins; + } + + @Override + @SuppressWarnings("unchecked") + public List getPluginsFor(Class pluginInterface) { + switch (pluginInterface.getSimpleName()) { + case "ElementPlugin": + return (List) pluginMap.get(PluginTypeEnum.ELEMENT_PLUGIN.getPluginType()); + case "IdGeneratorPlugin": + return (List) pluginMap.get(PluginTypeEnum.ID_GENERATOR_PLUGIN.getPluginType()); + case "ExpressionCalculatorPlugin": + return (List) pluginMap.get(PluginTypeEnum.EXPRESSION_CALCULATOR_PLUGIN.getPluginType()); + case "ListenerPlugin": + return (List) pluginMap.get(PluginTypeEnum.LISTENER_PLUGIN.getPluginType()); + default: + return new ArrayList<>(); + } + } + + @Override + public Integer countPlugins() { + return pluginMap.values().stream().mapToInt(List::size).sum(); + } + + @Override + public Boolean containsPlugin(Class pluginInterface, String pluginName) { + List plugins = getPluginsFor(pluginInterface); + return plugins.stream().anyMatch(plugin -> plugin.getName().equals(pluginName)); + } +} \ No newline at end of file diff --git a/engine/src/main/java/com/didiglobal/turbo/engine/plugin/manager/BasePlugin.java b/engine/src/main/java/com/didiglobal/turbo/engine/plugin/manager/BasePlugin.java new file mode 100644 index 00000000..55df1af4 --- /dev/null +++ b/engine/src/main/java/com/didiglobal/turbo/engine/plugin/manager/BasePlugin.java @@ -0,0 +1,19 @@ +package com.didiglobal.turbo.engine.plugin.manager; + +import org.springframework.beans.factory.BeanFactory; +import org.springframework.beans.factory.support.DefaultListableBeanFactory; + +/** + * 用于支持Spring + */ +public class BasePlugin { + protected DefaultListableBeanFactory beanFactory; + + public BeanFactory getBeanFactory() { + return beanFactory; + } + + public void setBeanFactory(DefaultListableBeanFactory beanFactory) { + this.beanFactory = beanFactory; + } +} diff --git a/engine/src/main/java/com/didiglobal/turbo/engine/plugin/manager/DefaultPluginManager.java b/engine/src/main/java/com/didiglobal/turbo/engine/plugin/manager/DefaultPluginManager.java new file mode 100644 index 00000000..a612fcb8 --- /dev/null +++ b/engine/src/main/java/com/didiglobal/turbo/engine/plugin/manager/DefaultPluginManager.java @@ -0,0 +1,104 @@ +package com.didiglobal.turbo.engine.plugin.manager; + +import com.didiglobal.turbo.engine.common.PluginTypeEnum; +import com.didiglobal.turbo.engine.plugin.ElementPlugin; +import com.didiglobal.turbo.engine.plugin.ExpressionCalculatorPlugin; +import com.didiglobal.turbo.engine.plugin.IdGeneratorPlugin; +import com.didiglobal.turbo.engine.plugin.ListenerPlugin; +import com.didiglobal.turbo.engine.plugin.Plugin; +import com.didiglobal.turbo.engine.util.PluginSqlExecutorUtil; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.beans.factory.support.DefaultListableBeanFactory; + +import java.util.ArrayList; +import java.util.HashMap; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Set; + +public class DefaultPluginManager extends AbstractPluginManager{ + private static final Logger LOGGER = LoggerFactory.getLogger(DefaultPluginManager.class); + + public DefaultPluginManager(DefaultListableBeanFactory beanFactory) { + this.beanFactory = beanFactory; + loadPlugins(); + initialize(); + } + + /** + * 初始化插件管理器,将不同类型的插件进行初始化操作。 + */ + @Override + protected void initialize() { + initializePlugins(pluginMap.getOrDefault(PluginTypeEnum.ELEMENT_PLUGIN.getPluginType(), new ArrayList<>()), ElementPlugin.class); + initializePlugins(pluginMap.getOrDefault(PluginTypeEnum.ID_GENERATOR_PLUGIN.getPluginType(), new ArrayList<>()), IdGeneratorPlugin.class); + initializePlugins(pluginMap.getOrDefault(PluginTypeEnum.EXPRESSION_CALCULATOR_PLUGIN.getPluginType(), new ArrayList<>()), ExpressionCalculatorPlugin.class); + initializePlugins(pluginMap.getOrDefault(PluginTypeEnum.LISTENER_PLUGIN.getPluginType(), new ArrayList<>()), ListenerPlugin.class); + } + + public DefaultListableBeanFactory getBeanFactory() { + return beanFactory; + } + + /** + * 初始化插件列表,并检查和配置每个插件。 + * + * @param pluginList 要初始化的插件列表 + * @param pluginInterface 插件接口类型,用于日志和信息输出 + * @param 插件的具体类型 + */ + private void initializePlugins(List pluginList, Class pluginInterface) { + LOGGER.info("start checking and initializing {} plugins", pluginInterface.getSimpleName()); + checkSupport(pluginList); + pluginList.forEach(plugin -> { + try { + plugin.init(); + if (plugin instanceof BasePlugin) { + ((BasePlugin) plugin).setBeanFactory(beanFactory); + } + } catch (Exception e) { + LOGGER.warn("An error occurred while initializing plugin: {}. Error: {}", plugin.getClass().getName(), e.getMessage()); + throw new RuntimeException(e); + } + }); + PluginSqlExecutorUtil.closeDataSource(); + LOGGER.info("load {} plugin end. count:{}", pluginInterface.getSimpleName(), pluginList.size()); + } + + /** + * 校验插件是否支持 + * 1. 插件名称是否重复 + * 2. 插件是否开启 + * 3. 插件类型是否重复 + * @param plugins + * @param + */ + private void checkSupport(List plugins) { + Set pluginNames = new HashSet<>(); + Map pluginTypeMap = new HashMap<>(); + plugins.removeIf(plugin -> { + // 检查插件名称是否重复 + if (!pluginNames.add(plugin.getName())) { + throw new RuntimeException("plugin name duplicate: " + plugin.getName()); + } + + // 检查插件是否开启 + if (!plugin.support()) { + LOGGER.info("plugin not support: " + plugin.getName()); + return true; + } + + // 检查插件类型是否重复 + if (plugin instanceof ElementPlugin) { + int flowElementType = ((ElementPlugin) plugin).getFlowElementType(); + if (pluginTypeMap.containsKey(flowElementType)) { + throw new RuntimeException("plugin type duplicate: " + plugin.getName()); + } + pluginTypeMap.put(flowElementType, plugin); + } + return false; + }); + } +} diff --git a/engine/src/main/java/com/didiglobal/turbo/engine/plugin/manager/PluginManager.java b/engine/src/main/java/com/didiglobal/turbo/engine/plugin/manager/PluginManager.java new file mode 100644 index 00000000..cb5c11bb --- /dev/null +++ b/engine/src/main/java/com/didiglobal/turbo/engine/plugin/manager/PluginManager.java @@ -0,0 +1,19 @@ +package com.didiglobal.turbo.engine.plugin.manager; + +import com.didiglobal.turbo.engine.common.EntityPOEnum; +import com.didiglobal.turbo.engine.dao.BaseDAO; +import com.didiglobal.turbo.engine.plugin.Plugin; + +import java.util.List; +import java.util.Map; +import java.util.Set; + +public interface PluginManager { + List getPlugins(); + + List getPluginsFor(Class pluginInterface); + + Integer countPlugins(); + + Boolean containsPlugin(Class pluginInterface, String pluginName); +} diff --git a/engine/src/main/java/com/didiglobal/turbo/engine/processor/DefinitionProcessor.java b/engine/src/main/java/com/didiglobal/turbo/engine/processor/DefinitionProcessor.java index 8bc46a74..57b2af6b 100644 --- a/engine/src/main/java/com/didiglobal/turbo/engine/processor/DefinitionProcessor.java +++ b/engine/src/main/java/com/didiglobal/turbo/engine/processor/DefinitionProcessor.java @@ -16,6 +16,8 @@ import com.didiglobal.turbo.engine.param.DeployFlowParam; import com.didiglobal.turbo.engine.param.GetFlowModuleParam; import com.didiglobal.turbo.engine.param.UpdateFlowParam; +import com.didiglobal.turbo.engine.plugin.IdGeneratorPlugin; +import com.didiglobal.turbo.engine.plugin.manager.PluginManager; import com.didiglobal.turbo.engine.result.*; import com.didiglobal.turbo.engine.util.IdGenerator; import com.didiglobal.turbo.engine.util.StrongUuidGenerator; @@ -27,15 +29,20 @@ import org.springframework.beans.BeanUtils; import org.springframework.stereotype.Component; +import javax.annotation.PostConstruct; import javax.annotation.Resource; import java.util.Date; +import java.util.List; @Component public class DefinitionProcessor { private static final Logger LOGGER = LoggerFactory.getLogger(DefinitionProcessor.class); - private static final IdGenerator idGenerator = new StrongUuidGenerator(); + private static IdGenerator idGenerator; + + @Resource + private PluginManager pluginManager; @Resource private ModelValidator modelValidator; @@ -46,6 +53,16 @@ public class DefinitionProcessor { @Resource private FlowDeploymentDAO flowDeploymentDAO; + @PostConstruct + public void init() { + List idGeneratorPlugins = pluginManager.getPluginsFor(IdGeneratorPlugin.class); + if (null == idGeneratorPlugins || idGeneratorPlugins.isEmpty()) { + idGenerator = new StrongUuidGenerator(); + } else { + idGenerator = idGeneratorPlugins.get(0).getIdGenerator(); + } + } + public CreateFlowResult create(CreateFlowParam createFlowParam) { CreateFlowResult createFlowResult = new CreateFlowResult(); try { @@ -118,6 +135,8 @@ public DeployFlowResult deploy(DeployFlowParam deployFlowParam) { FlowDeploymentPO flowDeploymentPO = new FlowDeploymentPO(); BeanUtils.copyProperties(flowDefinitionPO, flowDeploymentPO); + // fix primary key duplicated + flowDeploymentPO.setId(null); String flowDeployId = idGenerator.getNextId(); flowDeploymentPO.setFlowDeployId(flowDeployId); flowDeploymentPO.setStatus(FlowDeploymentStatus.DEPLOYED); diff --git a/engine/src/main/java/com/didiglobal/turbo/engine/processor/RuntimeProcessor.java b/engine/src/main/java/com/didiglobal/turbo/engine/processor/RuntimeProcessor.java index 5da6ac51..37c09a6f 100644 --- a/engine/src/main/java/com/didiglobal/turbo/engine/processor/RuntimeProcessor.java +++ b/engine/src/main/java/com/didiglobal/turbo/engine/processor/RuntimeProcessor.java @@ -8,6 +8,7 @@ import com.didiglobal.turbo.engine.bo.NodeInstance; import com.didiglobal.turbo.engine.bo.NodeInstanceBO; import com.didiglobal.turbo.engine.common.ErrorEnum; +import com.didiglobal.turbo.engine.common.ExtendRuntimeContext; import com.didiglobal.turbo.engine.common.FlowElementType; import com.didiglobal.turbo.engine.common.FlowInstanceMappingType; import com.didiglobal.turbo.engine.common.FlowInstanceStatus; @@ -221,16 +222,25 @@ private RuntimeContext buildCommitContext(CommitTaskParam commitTaskParam, FlowI //5. set callActivity msg runtimeContext.setCallActivityFlowModuleId(commitTaskParam.getCallActivityFlowModuleId()); + //6. set extendProperties + runtimeContext.setExtendProperties(commitTaskParam.getExtendProperties()); + return runtimeContext; } private CommitTaskResult buildCommitTaskResult(RuntimeContext runtimeContext) { CommitTaskResult commitTaskResult = new CommitTaskResult(); + if (null != runtimeContext) { + BeanUtils.copyProperties(runtimeContext, commitTaskResult); + } return (CommitTaskResult) fillRuntimeResult(commitTaskResult, runtimeContext); } private CommitTaskResult buildCommitTaskResult(RuntimeContext runtimeContext, TurboException e) { CommitTaskResult commitTaskResult = new CommitTaskResult(); + if (null != runtimeContext) { + BeanUtils.copyProperties(runtimeContext, commitTaskResult); + } return (CommitTaskResult) fillRuntimeResult(commitTaskResult, runtimeContext, e); } @@ -305,16 +315,24 @@ private RuntimeContext buildRollbackContext(RollbackTaskParam rollbackTaskParam, suspendNodeInstance.setNodeInstanceId(realNodeInstanceId); runtimeContext.setSuspendNodeInstance(suspendNodeInstance); + //4. set extendProperties + runtimeContext.setExtendProperties(rollbackTaskParam.getExtendProperties()); return runtimeContext; } private RollbackTaskResult buildRollbackTaskResult(RuntimeContext runtimeContext) { RollbackTaskResult rollbackTaskResult = new RollbackTaskResult(); + if (null != runtimeContext) { + BeanUtils.copyProperties(runtimeContext, rollbackTaskResult); + } return (RollbackTaskResult) fillRuntimeResult(rollbackTaskResult, runtimeContext); } private RollbackTaskResult buildRollbackTaskResult(RuntimeContext runtimeContext, TurboException e) { RollbackTaskResult rollbackTaskResult = new RollbackTaskResult(); + if (null != runtimeContext) { + BeanUtils.copyProperties(runtimeContext, rollbackTaskResult); + } return (RollbackTaskResult) fillRuntimeResult(rollbackTaskResult, runtimeContext, e); } @@ -704,8 +722,25 @@ private RuntimeResult fillRuntimeResult(RuntimeResult runtimeResult, RuntimeCont if (runtimeContext != null) { runtimeResult.setFlowInstanceId(runtimeContext.getFlowInstanceId()); runtimeResult.setStatus(runtimeContext.getFlowInstanceStatus()); - runtimeResult.setActiveTaskInstance(buildActiveTaskInstance(runtimeContext.getSuspendNodeInstance(), runtimeContext)); - runtimeResult.setVariables(InstanceDataUtil.getInstanceDataList(runtimeContext.getInstanceDataMap())); + List nodeExecuteResults = Lists.newArrayList(); + + if (null != runtimeContext.getExtendRuntimeContextList() && !runtimeContext.getExtendRuntimeContextList().isEmpty()) { + for (ExtendRuntimeContext extendRuntimeContext : runtimeContext.getExtendRuntimeContextList()) { + RuntimeResult.NodeExecuteResult result = new RuntimeResult.NodeExecuteResult(); + result.setActiveTaskInstance(buildActiveTaskInstance(extendRuntimeContext.getBranchSuspendNodeInstance(), runtimeContext)); + result.setVariables(InstanceDataUtil.getInstanceDataList(extendRuntimeContext.getBranchExecuteDataMap())); + result.setErrCode(extendRuntimeContext.getException().getErrNo()); + result.setErrMsg(extendRuntimeContext.getException().getErrMsg()); + nodeExecuteResults.add(result); + } + } else { + RuntimeResult.NodeExecuteResult result = new RuntimeResult.NodeExecuteResult(); + result.setActiveTaskInstance(buildActiveTaskInstance(runtimeContext.getSuspendNodeInstance(), runtimeContext)); + result.setVariables(InstanceDataUtil.getInstanceDataList(runtimeContext.getInstanceDataMap())); + nodeExecuteResults.add(result); + } + + runtimeResult.setNodeExecuteResults(nodeExecuteResults); } return runtimeResult; } @@ -720,7 +755,6 @@ private NodeInstance buildActiveTaskInstance(NodeInstanceBO nodeInstanceBO, Runt activeNodeInstance.setFlowElementType(flowElement.getType()); activeNodeInstance.setSubNodeResultList(runtimeContext.getCallActivityRuntimeResultList()); - return activeNodeInstance; } diff --git a/engine/src/main/java/com/didiglobal/turbo/engine/result/CommonResult.java b/engine/src/main/java/com/didiglobal/turbo/engine/result/CommonResult.java index 303260b8..9ae7c1c5 100644 --- a/engine/src/main/java/com/didiglobal/turbo/engine/result/CommonResult.java +++ b/engine/src/main/java/com/didiglobal/turbo/engine/result/CommonResult.java @@ -3,7 +3,9 @@ import com.didiglobal.turbo.engine.common.ErrorEnum; import com.google.common.base.MoreObjects; -public class CommonResult { +import java.io.Serializable; + +public class CommonResult implements Serializable { private int errCode; private String errMsg; diff --git a/engine/src/main/java/com/didiglobal/turbo/engine/result/RuntimeResult.java b/engine/src/main/java/com/didiglobal/turbo/engine/result/RuntimeResult.java index 9d33754d..292f5fb1 100644 --- a/engine/src/main/java/com/didiglobal/turbo/engine/result/RuntimeResult.java +++ b/engine/src/main/java/com/didiglobal/turbo/engine/result/RuntimeResult.java @@ -5,13 +5,16 @@ import com.didiglobal.turbo.engine.model.InstanceData; import com.google.common.base.MoreObjects; +import java.util.ArrayList; import java.util.List; +import java.util.Map; public class RuntimeResult extends CommonResult { private String flowInstanceId; private int status; - private NodeInstance activeTaskInstance; - private List variables; + private List nodeExecuteResults; + + private Map extendProperties; public RuntimeResult() { super(); @@ -37,31 +40,94 @@ public void setStatus(int status) { this.status = status; } + // 兼容旧版本 public NodeInstance getActiveTaskInstance() { - return activeTaskInstance; + if (nodeExecuteResults == null || nodeExecuteResults.isEmpty()) { + return null; + } + return nodeExecuteResults.get(0).activeTaskInstance; } + // 兼容旧版本 public void setActiveTaskInstance(NodeInstance activeTaskInstance) { - this.activeTaskInstance = activeTaskInstance; + if (nodeExecuteResults == null) { + this.nodeExecuteResults = new ArrayList<>(); + } + if (nodeExecuteResults.isEmpty()) { + this.nodeExecuteResults.add(new NodeExecuteResult()); + } + this.nodeExecuteResults.get(0).activeTaskInstance = activeTaskInstance; } + // 兼容旧版本 public List getVariables() { - return variables; + if (nodeExecuteResults == null || nodeExecuteResults.isEmpty()) { + return null; + } + return nodeExecuteResults.get(0).variables; } + // 兼容旧版本 public void setVariables(List variables) { - this.variables = variables; + if (nodeExecuteResults == null) { + this.nodeExecuteResults = new ArrayList<>(); + } + if (nodeExecuteResults.isEmpty()) { + this.nodeExecuteResults.add(new NodeExecuteResult()); + } + this.nodeExecuteResults.get(0).variables = variables; + } + + public List getNodeExecuteResults() { + return nodeExecuteResults; + } + + public void setNodeExecuteResults(List nodeExecuteResults) { + this.nodeExecuteResults = nodeExecuteResults; + } + + public Map getExtendProperties() { + return extendProperties; + } + + public void setExtendProperties(Map extendProperties) { + this.extendProperties = extendProperties; } @Override public String toString() { return MoreObjects.toStringHelper(this) - .add("errCode", getErrCode()) - .add("errMsg", getErrMsg()) .add("flowInstanceId", flowInstanceId) .add("status", status) - .add("activeTaskInstance", activeTaskInstance) - .add("variables", variables) .toString(); } + + public static class NodeExecuteResult extends CommonResult{ + private NodeInstance activeTaskInstance; + private List variables; + + public NodeInstance getActiveTaskInstance() { + return activeTaskInstance; + } + + public void setActiveTaskInstance(NodeInstance activeTaskInstance) { + this.activeTaskInstance = activeTaskInstance; + } + + public List getVariables() { + return variables; + } + + public void setVariables(List variables) { + this.variables = variables; + } + + @Override + public String toString() { + return MoreObjects.toStringHelper(this) + .add("activeTaskInstance", activeTaskInstance) + .add("variables", variables) + .toString(); + } + } } diff --git a/engine/src/main/java/com/didiglobal/turbo/engine/util/MapToObjectConverter.java b/engine/src/main/java/com/didiglobal/turbo/engine/util/MapToObjectConverter.java new file mode 100644 index 00000000..8a427207 --- /dev/null +++ b/engine/src/main/java/com/didiglobal/turbo/engine/util/MapToObjectConverter.java @@ -0,0 +1,33 @@ +package com.didiglobal.turbo.engine.util; + +import java.lang.reflect.Field; +import java.util.HashMap; +import java.util.Map; + +public class MapToObjectConverter { + + public static T convertMapToObject(Map map, Class clazz) throws IllegalAccessException, InstantiationException { + T obj = clazz.newInstance(); + Field[] fields = clazz.getDeclaredFields(); + for (Field field : fields) { + field.setAccessible(true); + if (map.containsKey(field.getName())) { + Object value = map.get(field.getName()); + if (value!= null && field.getType().isAssignableFrom(value.getClass())) { + field.set(obj, value); + } + } + } + return obj; + } + + public static Map convertObjectToMap(Object obj) throws IllegalAccessException { + Map map = new HashMap<>(); + Field[] fields = obj.getClass().getDeclaredFields(); + for (Field field : fields) { + field.setAccessible(true); + map.put(field.getName(), field.get(obj)); + } + return map; + } +} diff --git a/engine/src/main/java/com/didiglobal/turbo/engine/util/PluginPropertiesUtil.java b/engine/src/main/java/com/didiglobal/turbo/engine/util/PluginPropertiesUtil.java new file mode 100644 index 00000000..3f9cecee --- /dev/null +++ b/engine/src/main/java/com/didiglobal/turbo/engine/util/PluginPropertiesUtil.java @@ -0,0 +1,59 @@ +package com.didiglobal.turbo.engine.util; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.IOException; +import java.io.InputStream; +import java.net.URL; +import java.util.Enumeration; +import java.util.Properties; + +public class PluginPropertiesUtil { + + private static final Logger LOGGER = LoggerFactory.getLogger(PluginPropertiesUtil.class); + + // 用于缓存所有加载并合并后的配置属性 + private static Properties cachedProperties; + + // 标记是否已经完成了配置文件的加载和合并处理 + private static boolean isLoaded = false; + + private static void loadAllPluginProperties() { + if (!isLoaded) { + cachedProperties = new Properties(); + try { + // 通过类加载器获取所有plugin.properties文件的资源URL + Enumeration urls = Thread.currentThread().getContextClassLoader().getResources("plugin.properties"); + while (urls.hasMoreElements()) { + URL url = urls.nextElement(); + Properties properties = new Properties(); + try (InputStream input = url.openStream()) { + properties.load(input); + cachedProperties.putAll(properties); + } + } + } catch (IOException e) { + LOGGER.error("load plugin.properties error", e); + } + isLoaded = true; + } + } + + public static String getPropertyValue(String key, String defaultValue) { + if (!isLoaded) { + loadAllPluginProperties(); + } + if (cachedProperties.get(key) == null) { + return defaultValue; + } + return cachedProperties.getProperty(key); + } + + public static String getPropertyValue(String key) { + if (!isLoaded) { + loadAllPluginProperties(); + } + return cachedProperties.getProperty(key); + } +} \ No newline at end of file diff --git a/engine/src/main/java/com/didiglobal/turbo/engine/util/PluginSqlExecutorUtil.java b/engine/src/main/java/com/didiglobal/turbo/engine/util/PluginSqlExecutorUtil.java new file mode 100644 index 00000000..9e2212e1 --- /dev/null +++ b/engine/src/main/java/com/didiglobal/turbo/engine/util/PluginSqlExecutorUtil.java @@ -0,0 +1,139 @@ +package com.didiglobal.turbo.engine.util; + +import com.zaxxer.hikari.HikariConfig; +import com.zaxxer.hikari.HikariDataSource; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.util.StringUtils; + +import java.io.BufferedReader; +import java.io.FileReader; +import java.io.IOException; +import java.sql.Connection; +import java.sql.SQLException; +import java.sql.Statement; +import java.util.Properties; + +public class PluginSqlExecutorUtil { + + private static final Logger LOGGER = LoggerFactory.getLogger(PluginSqlExecutorUtil.class); + + private static HikariDataSource dataSource; + private static final String JDBC_URL = "turbo.plugin.jdbc.url"; + private static final String USERNAME = "turbo.plugin.jdbc.username"; + private static final String PASSWORD = "turbo.plugin.jdbc.password"; + private static final String DRIVER_CLASS_NAME = "turbo.plugin.jdbc.driver"; + private static final String MAX_POOL_SIZE = "turbo.plugin.jdbc.maximumPoolSize"; + + static { + try { + // 初始化 HikariCP 数据源 + Properties properties = new Properties(); + ClassLoader classLoader = PluginSqlExecutorUtil.class.getClassLoader(); + if (classLoader.getResource("plugin.properties") == null) { + // 如果配置文件不存在,则不初始化数据源 + LOGGER.warn("Configuration file 'plugin.properties' not found. Skipping database initialization."); + dataSource = null; + } else { + properties.load(PluginSqlExecutorUtil.class.getClassLoader().getResourceAsStream("plugin.properties")); + HikariConfig config = new HikariConfig(); + config.setJdbcUrl(properties.getProperty(JDBC_URL)); + config.setUsername(properties.getProperty(USERNAME)); + config.setPassword(properties.getProperty(PASSWORD)); + config.setDriverClassName(properties.getProperty(DRIVER_CLASS_NAME)); + config.setMaximumPoolSize(Integer.parseInt(properties.getProperty(MAX_POOL_SIZE, "10"))); + if (validateConfig(config)) { + dataSource = new HikariDataSource(config); + } + } + } catch (IOException e) { + throw new RuntimeException("Failed to load database configuration", e); + } + } + + private static boolean validateConfig(HikariConfig config) { + if (config == null) { + return false; + } + if (StringUtils.isEmpty(config.getJdbcUrl())) { + LOGGER.error("Plugin JDBC URL is empty"); + return false; + } + if (StringUtils.isEmpty(config.getUsername())) { + LOGGER.error("Plugin JDBC username is empty"); + return false; + } + if (StringUtils.isEmpty(config.getPassword())) { + LOGGER.warn("Plugin JDBC password is empty"); + } + if (StringUtils.isEmpty(config.getDriverClassName())) { + LOGGER.error("Plugin JDBC driver class name is empty"); + return false; + } + return true; + } + + /** + * 执行 SQL 文件 + * + * @param sqlFilePath SQL 文件路径 + * @param useTransaction 是否启用事务 + * @param delimiter SQL 语句的分隔符 + * @throws IOException 读取文件异常 + * @throws SQLException 数据库操作异常 + */ + public static void executeSqlFile(String sqlFilePath, boolean useTransaction, String delimiter) throws IOException, SQLException { + if (delimiter == null || delimiter.isEmpty()) { + delimiter = ";"; // 默认使用分号作为分隔符 + } + + try (Connection connection = dataSource.getConnection(); + BufferedReader reader = new BufferedReader(new FileReader(sqlFilePath))) { + + connection.setAutoCommit(!useTransaction); + + try (Statement statement = connection.createStatement()) { + StringBuilder sqlBuilder = new StringBuilder(); + String line; + while ((line = reader.readLine()) != null) { + line = line.trim(); + if (line.isEmpty() || line.startsWith("--") || line.startsWith("#")) { + continue; // 跳过注释行或空行 + } + sqlBuilder.append(line).append(" "); // 防止语句分割时丢失空格 + + // 检测语句是否完成(以分隔符结尾) + if (line.endsWith(delimiter)) { + String sql = sqlBuilder.toString().replace(delimiter, "").trim(); // 移除分隔符 + try { + statement.execute(sql); + LOGGER.info("Executed SQL: " + sql); + } catch (SQLException e) { + throw new SQLException("Error executing SQL: " + sql, e); + } + sqlBuilder.setLength(0); // 清空当前 SQL 构造器 + } + } + // 提交事务 + if (useTransaction) { + connection.commit(); + } + } catch (SQLException e) { + if (useTransaction) { + connection.rollback(); + } + connection.close(); + throw e; + } + } + } + + /** + * 关闭数据源 + */ + public static void closeDataSource() { + if (dataSource != null) { + dataSource.close(); + } + } +} diff --git a/engine/src/main/java/com/didiglobal/turbo/engine/util/SPIUtil.java b/engine/src/main/java/com/didiglobal/turbo/engine/util/SPIUtil.java new file mode 100644 index 00000000..904bfe71 --- /dev/null +++ b/engine/src/main/java/com/didiglobal/turbo/engine/util/SPIUtil.java @@ -0,0 +1,34 @@ +package com.didiglobal.turbo.engine.util; + +import com.didiglobal.turbo.engine.plugin.Plugin; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.util.ArrayList; +import java.util.Iterator; +import java.util.List; +import java.util.ServiceLoader; + +public class SPIUtil { + private static final Logger LOGGER = LoggerFactory.getLogger(SPIUtil.class); + + public static T loadService(Class serviceInterface) { + ServiceLoader loader = ServiceLoader.load(serviceInterface); + Iterator iterator = loader.iterator(); + if (iterator.hasNext()) { + LOGGER.info("load service:{}", serviceInterface.getName()); + return iterator.next(); + } + return null; + } + + public static List loadAllServices(Class serviceInterface) { + List list = new ArrayList<>(); + Iterator iterator = ServiceLoader.load(serviceInterface).iterator(); + while (iterator.hasNext()) { + list.add(iterator.next()); + } + LOGGER.info("load all {} services:{}", serviceInterface.getSimpleName(), list); + return list; + } +} diff --git a/engine/src/main/java/com/didiglobal/turbo/engine/util/impl/GroovyExpressionCalculator.java b/engine/src/main/java/com/didiglobal/turbo/engine/util/impl/GroovyExpressionCalculator.java index bbafe930..8ea93d15 100644 --- a/engine/src/main/java/com/didiglobal/turbo/engine/util/impl/GroovyExpressionCalculator.java +++ b/engine/src/main/java/com/didiglobal/turbo/engine/util/impl/GroovyExpressionCalculator.java @@ -8,12 +8,10 @@ import com.didiglobal.turbo.engine.util.GroovyUtil; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import org.springframework.stereotype.Service; import java.text.MessageFormat; import java.util.Map; -@Service public class GroovyExpressionCalculator implements ExpressionCalculator { private static final Logger LOGGER = LoggerFactory.getLogger(GroovyExpressionCalculator.class); diff --git a/engine/src/main/java/com/didiglobal/turbo/engine/validator/ElementValidatorFactory.java b/engine/src/main/java/com/didiglobal/turbo/engine/validator/ElementValidatorFactory.java index e0c33241..5efde283 100644 --- a/engine/src/main/java/com/didiglobal/turbo/engine/validator/ElementValidatorFactory.java +++ b/engine/src/main/java/com/didiglobal/turbo/engine/validator/ElementValidatorFactory.java @@ -5,13 +5,19 @@ import com.didiglobal.turbo.engine.common.FlowElementType; import com.didiglobal.turbo.engine.exception.ProcessException; import com.didiglobal.turbo.engine.model.FlowElement; +import com.didiglobal.turbo.engine.plugin.ElementPlugin; +import com.didiglobal.turbo.engine.plugin.manager.PluginManager; import com.didiglobal.turbo.engine.util.FlowModelUtil; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.stereotype.Component; +import javax.annotation.PostConstruct; import javax.annotation.Resource; import java.text.MessageFormat; +import java.util.HashMap; +import java.util.List; +import java.util.Map; @Component public class ElementValidatorFactory { @@ -36,6 +42,26 @@ public class ElementValidatorFactory { @Resource private CallActivityValidator callActivityValidator; + @Resource + private PluginManager pluginManager; + + private final Map validatorMap = new HashMap<>(16); + + /** + * 将原生校验器与插件扩展校验器汇总 + * 插件扩展校验器可以通过设置与原生校验器相同的elementType值进行覆盖 + */ + @PostConstruct + public void init() { + validatorMap.put(FlowElementType.SEQUENCE_FLOW, sequenceFlowValidator); + validatorMap.put(FlowElementType.START_EVENT, startEventValidator); + validatorMap.put(FlowElementType.END_EVENT, endEventValidator); + validatorMap.put(FlowElementType.USER_TASK, userTaskValidator); + validatorMap.put(FlowElementType.EXCLUSIVE_GATEWAY, exclusiveGatewayValidator); + List elementPlugins = pluginManager.getPluginsFor(ElementPlugin.class); + elementPlugins.forEach(elementPlugin -> validatorMap.put(elementPlugin.getFlowElementType(), elementPlugin.getElementValidator())); + } + public ElementValidator getElementValidator(FlowElement flowElement) throws ProcessException { int elementType = flowElement.getType(); ElementValidator elementValidator = getElementValidator(elementType); @@ -50,21 +76,6 @@ public ElementValidator getElementValidator(FlowElement flowElement) throws Proc } private ElementValidator getElementValidator(int elementType) { - switch (elementType) { - case FlowElementType.START_EVENT: - return startEventValidator; - case FlowElementType.END_EVENT: - return endEventValidator; - case FlowElementType.SEQUENCE_FLOW: - return sequenceFlowValidator; - case FlowElementType.USER_TASK: - return userTaskValidator; - case FlowElementType.EXCLUSIVE_GATEWAY: - return exclusiveGatewayValidator; - case FlowElementType.CALL_ACTIVITY: - return callActivityValidator; - default: - return null; - } + return validatorMap.get(elementType); } } diff --git a/engine/src/main/resources/turbo.db.create/turbo.mysql.sql b/engine/src/main/resources/turbo.db.create/turbo.mysql.sql index ee95f522..663265e2 100644 --- a/engine/src/main/resources/turbo.db.create/turbo.mysql.sql +++ b/engine/src/main/resources/turbo.db.create/turbo.mysql.sql @@ -90,6 +90,7 @@ CREATE TABLE IF NOT EXISTS `ei_node_instance` `instance_data_id` varchar(128) NOT NULL DEFAULT '' COMMENT '实例数据id', `flow_deploy_id` varchar(128) NOT NULL DEFAULT '' COMMENT '流程模型部署id', `node_key` varchar(64) NOT NULL DEFAULT '' COMMENT '节点唯一标识', + `node_type` int NOT NULL COMMENT '流程类型', `source_node_key` varchar(64) NOT NULL DEFAULT '' COMMENT '上一个流程节点唯一标识', `tenant_id` varchar(16) NOT NULL DEFAULT '' COMMENT '业务方标识', `status` tinyint(4) NOT NULL DEFAULT '0' COMMENT '状态(1.处理成功 2.处理中 3.处理失败 4.处理已撤销)', diff --git a/parallel-plugin/pom.xml b/parallel-plugin/pom.xml new file mode 100644 index 00000000..0c9e173b --- /dev/null +++ b/parallel-plugin/pom.xml @@ -0,0 +1,58 @@ + + + 4.0.0 + + com.didiglobal.turbo + turbo + 1.2.0 + + + parallel-plugin + 1.0.0 + jar + + + 8 + 8 + UTF-8 + 1.2.0 + + + + + com.didiglobal.turbo + engine + ${turbo.engine.version} + + + org.springframework.boot + spring-boot-starter + + + org.springframework.boot + spring-boot-starter-aop + + + com.baomidou + dynamic-datasource-spring-boot-starter + + + org.springframework.boot + spring-boot-starter-aop + + + + + org.springframework.boot + spring-boot-starter-test + test + + + mysql + mysql-connector-java + runtime + + + \ No newline at end of file diff --git a/parallel-plugin/src/main/java/com/didiglobal/turbo/plugin/InclusiveGatewayElementPlugin.java b/parallel-plugin/src/main/java/com/didiglobal/turbo/plugin/InclusiveGatewayElementPlugin.java new file mode 100644 index 00000000..d98be3c8 --- /dev/null +++ b/parallel-plugin/src/main/java/com/didiglobal/turbo/plugin/InclusiveGatewayElementPlugin.java @@ -0,0 +1,63 @@ +package com.didiglobal.turbo.plugin; + +import com.didiglobal.turbo.engine.executor.ElementExecutor; +import com.didiglobal.turbo.engine.plugin.ElementPlugin; +import com.didiglobal.turbo.engine.plugin.manager.BasePlugin; +import com.didiglobal.turbo.engine.util.PluginPropertiesUtil; +import com.didiglobal.turbo.engine.util.PluginSqlExecutorUtil; +import com.didiglobal.turbo.engine.validator.ElementValidator; +import com.didiglobal.turbo.plugin.common.ExtendFlowElementType; +import com.didiglobal.turbo.plugin.executor.InclusiveGatewayExecutor; +import com.didiglobal.turbo.plugin.validator.InclusiveGatewayValidator; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.util.Objects; + +public class InclusiveGatewayElementPlugin extends BasePlugin implements ElementPlugin { + private static final Logger LOGGER = LoggerFactory.getLogger(InclusiveGatewayElementPlugin.class); + private static final String PLUGIN_NAME = "InclusiveGatewayElementPlugin"; + public static Integer elementType = ExtendFlowElementType.INCLUSIVE_GATEWAY; + public InclusiveGatewayElementPlugin() { + String elementType = PluginPropertiesUtil.getPropertyValue(ELEMENT_TYPE_PREFIX + PLUGIN_NAME); + if (elementType != null) { + InclusiveGatewayElementPlugin.elementType = Integer.valueOf(elementType); + } + } + @Override + public ElementExecutor getElementExecutor() { + return beanFactory.getBean(InclusiveGatewayExecutor.class); + } + + @Override + public ElementValidator getElementValidator() { + return beanFactory.getBean(InclusiveGatewayValidator.class); + } + + @Override + public Integer getFlowElementType() { + return elementType; + } + + @Override + public String getName() { + return PLUGIN_NAME; + } + + @Override + public Boolean support() { + return PluginPropertiesUtil.getPropertyValue(PLUGIN_SUPPORT_PREFIX + PLUGIN_NAME, "true").equals("true"); + } + + @Override + public Boolean init() { + String sqlFilePath = PluginPropertiesUtil.getPropertyValue(PLUGIN_INIT_SQL_FILE_PREFIX + PLUGIN_NAME); + try { + PluginSqlExecutorUtil.executeSqlFile(Objects.requireNonNull(getClass().getClassLoader().getResource(sqlFilePath)).getPath(), true, null); + } catch (Exception e) { + LOGGER.warn("init plugin failed", e); + return false; + } + return true; + } +} diff --git a/parallel-plugin/src/main/java/com/didiglobal/turbo/plugin/ParallelGatewayElementPlugin.java b/parallel-plugin/src/main/java/com/didiglobal/turbo/plugin/ParallelGatewayElementPlugin.java new file mode 100644 index 00000000..1bf14e1d --- /dev/null +++ b/parallel-plugin/src/main/java/com/didiglobal/turbo/plugin/ParallelGatewayElementPlugin.java @@ -0,0 +1,64 @@ +package com.didiglobal.turbo.plugin; + +import com.didiglobal.turbo.engine.executor.ElementExecutor; +import com.didiglobal.turbo.engine.plugin.ElementPlugin; +import com.didiglobal.turbo.engine.plugin.manager.BasePlugin; +import com.didiglobal.turbo.engine.util.PluginSqlExecutorUtil; +import com.didiglobal.turbo.engine.validator.ElementValidator; +import com.didiglobal.turbo.plugin.common.ExtendFlowElementType; +import com.didiglobal.turbo.plugin.executor.ParallelGatewayExecutor; +import com.didiglobal.turbo.engine.util.PluginPropertiesUtil; +import com.didiglobal.turbo.plugin.validator.ParallelGatewayValidator; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.util.Objects; + +public class ParallelGatewayElementPlugin extends BasePlugin implements ElementPlugin { + + private static final Logger LOGGER = LoggerFactory.getLogger(ParallelGatewayElementPlugin.class); + private static final String PLUGIN_NAME = "ParallelGatewayElementPlugin"; + public static Integer elementType = ExtendFlowElementType.PARALLEL_GATEWAY; + public ParallelGatewayElementPlugin() { + String elementType = PluginPropertiesUtil.getPropertyValue(ELEMENT_TYPE_PREFIX + PLUGIN_NAME); + if (elementType != null) { + ParallelGatewayElementPlugin.elementType = Integer.valueOf(elementType); + } + } + @Override + public ElementExecutor getElementExecutor() { + return beanFactory.getBean(ParallelGatewayExecutor.class); + } + + @Override + public ElementValidator getElementValidator() { + return beanFactory.getBean(ParallelGatewayValidator.class); + } + + @Override + public Integer getFlowElementType() { + return elementType; + } + + @Override + public String getName() { + return PLUGIN_NAME; + } + + @Override + public Boolean support() { + return PluginPropertiesUtil.getPropertyValue(PLUGIN_SUPPORT_PREFIX + PLUGIN_NAME, "true").equals("true"); + } + + @Override + public Boolean init() { + String sqlFilePath = PluginPropertiesUtil.getPropertyValue(PLUGIN_INIT_SQL_FILE_PREFIX + PLUGIN_NAME); + try { + PluginSqlExecutorUtil.executeSqlFile(Objects.requireNonNull(getClass().getClassLoader().getResource(sqlFilePath)).getPath(), true, null); + } catch (Exception e) { + LOGGER.warn("init plugin failed", e); + return false; + } + return true; + } +} diff --git a/parallel-plugin/src/main/java/com/didiglobal/turbo/plugin/common/Constants.java b/parallel-plugin/src/main/java/com/didiglobal/turbo/plugin/common/Constants.java new file mode 100644 index 00000000..ec504fd1 --- /dev/null +++ b/parallel-plugin/src/main/java/com/didiglobal/turbo/plugin/common/Constants.java @@ -0,0 +1,12 @@ +package com.didiglobal.turbo.plugin.common; + +public class Constants { + + public static final class ELEMENT_PROPERTIES { + public static final String FORK_JOIN_MATCH = "forkJoinMatch"; + public static final String FORK = "fork"; + public static final String JOIN = "join"; + public static final String BRANCH_MERGE = "branchMerge"; + public static final String DATA_MERGE = "dataMerge"; + } +} diff --git a/parallel-plugin/src/main/java/com/didiglobal/turbo/plugin/common/ExtendFlowElementType.java b/parallel-plugin/src/main/java/com/didiglobal/turbo/plugin/common/ExtendFlowElementType.java new file mode 100644 index 00000000..d2df7c47 --- /dev/null +++ b/parallel-plugin/src/main/java/com/didiglobal/turbo/plugin/common/ExtendFlowElementType.java @@ -0,0 +1,8 @@ +package com.didiglobal.turbo.plugin.common; + +import com.didiglobal.turbo.engine.common.FlowElementType; + +public class ExtendFlowElementType extends FlowElementType { + public static final int PARALLEL_GATEWAY = 9; + public static final int INCLUSIVE_GATEWAY = 10; +} diff --git a/parallel-plugin/src/main/java/com/didiglobal/turbo/plugin/common/MergeStrategy.java b/parallel-plugin/src/main/java/com/didiglobal/turbo/plugin/common/MergeStrategy.java new file mode 100644 index 00000000..5c6afa8f --- /dev/null +++ b/parallel-plugin/src/main/java/com/didiglobal/turbo/plugin/common/MergeStrategy.java @@ -0,0 +1,34 @@ +package com.didiglobal.turbo.plugin.common; + +public class MergeStrategy { + + /** + * 分支汇聚策略 + */ + public static final class BRANCH_MERGE { + /** + * 全部到达后进行汇聚 + */ + public static final String JOIN_ALL = "JoinAll"; + + /** + * 任意先到达的 + */ + public static final String ANY_ONE = "AnyOne"; + + /** + * 自定义脚本 + */ + public static final String CUSTOM = "Custom"; + } + + /** + * 流程汇聚策略 + */ + public static final class DATA_MERGE { + public static final String ALL = "All"; + public static final String NONE = "None"; + public static final String CUSTOM = "Custom"; + } + +} diff --git a/parallel-plugin/src/main/java/com/didiglobal/turbo/plugin/common/ParallelErrorEnum.java b/parallel-plugin/src/main/java/com/didiglobal/turbo/plugin/common/ParallelErrorEnum.java new file mode 100644 index 00000000..57f94ad4 --- /dev/null +++ b/parallel-plugin/src/main/java/com/didiglobal/turbo/plugin/common/ParallelErrorEnum.java @@ -0,0 +1,49 @@ +package com.didiglobal.turbo.plugin.common; + +public enum ParallelErrorEnum { + // 兼容ErrorEnum中返回码1000-2000 为成功 + WAITING_SUSPEND(1601, "Join gateway waiting suspend"), + PARALLEL_EXECUTE_TIMEOUT(6002, "Parallel execute timeout, please obtain the latest process execute status through query"), + PARALLEL_EXECUTE_REENTRY(6003, "Parallel execute reentry"), + REQUIRED_ELEMENT_ATTRIBUTES(6004, "required element attributes"), + FORK_AND_JOIN_NOT_MATCH(6005, "Fork and join not match"), + UNSUPPORTED_DATA_MERGE_STRATEGY(6006, "Unsupported data merge strategy"), + UNSUPPORTED_BRANCH_MERGE_STRATEGY(6007, "Unsupported branch merge strategy"), + BRANCH_MERGE_STRATEGY_ERROR(6008, "Branch merge strategy error"), + NOT_FOUND_FORK_INSTANCE(6009, "Not found fork instance"), + NOT_SUPPORT_ROLLBACK(6010, "Parallel and inclusive gateways are not supported for rollback"); + + + ParallelErrorEnum(int errNo, String errMsg) { + this.errNo = errNo; + this.errMsg = errMsg; + } + + private int errNo; + private String errMsg; + + public int getErrNo() { + return errNo; + } + + public void setErrNo(int errNo) { + this.errNo = errNo; + } + + public String getErrMsg() { + return errMsg; + } + + public void setErrMsg(String errMsg) { + this.errMsg = errMsg; + } + + public static com.didiglobal.turbo.engine.common.ErrorEnum getErrorEnum(int errNo) { + for (com.didiglobal.turbo.engine.common.ErrorEnum e : com.didiglobal.turbo.engine.common.ErrorEnum.values()) { + if (e.getErrNo() == errNo) { + return e; + } + } + return null; + } +} diff --git a/parallel-plugin/src/main/java/com/didiglobal/turbo/plugin/common/ParallelNodeInstanceStatus.java b/parallel-plugin/src/main/java/com/didiglobal/turbo/plugin/common/ParallelNodeInstanceStatus.java new file mode 100644 index 00000000..88c6abed --- /dev/null +++ b/parallel-plugin/src/main/java/com/didiglobal/turbo/plugin/common/ParallelNodeInstanceStatus.java @@ -0,0 +1,12 @@ +package com.didiglobal.turbo.plugin.common; + +public class ParallelNodeInstanceStatus { + /** + * 并行类网关,等待汇聚状态 + */ + public static final int WAITING = 5; + /** + * 关闭状态 + */ + public static final int CLOSED = 6; +} diff --git a/parallel-plugin/src/main/java/com/didiglobal/turbo/plugin/common/ParallelRuntimeContext.java b/parallel-plugin/src/main/java/com/didiglobal/turbo/plugin/common/ParallelRuntimeContext.java new file mode 100644 index 00000000..bdfbac77 --- /dev/null +++ b/parallel-plugin/src/main/java/com/didiglobal/turbo/plugin/common/ParallelRuntimeContext.java @@ -0,0 +1,30 @@ +package com.didiglobal.turbo.plugin.common; + +import com.didiglobal.turbo.engine.common.ExtendRuntimeContext; +import com.google.common.base.MoreObjects; + +public class ParallelRuntimeContext extends ExtendRuntimeContext { + + /** + * 分支执行ID + */ + private String executeId; + public String getExecuteId() { + return executeId; + } + + public void setExecuteId(String executeId) { + this.executeId = executeId; + } + + @Override + public String toString() { + return MoreObjects.toStringHelper(this) + .add("executeId", executeId) + .add("branchExecuteDataMap", getBranchExecuteDataMap()) + .add("branchSuspendNodeInstance", getBranchSuspendNodeInstance()) + .add("currentNodeModel", getCurrentNodeModel()) + .add("exception", getException()) + .toString(); + } +} diff --git a/parallel-plugin/src/main/java/com/didiglobal/turbo/plugin/config/ParallelPluginConfig.java b/parallel-plugin/src/main/java/com/didiglobal/turbo/plugin/config/ParallelPluginConfig.java new file mode 100644 index 00000000..3bd778b1 --- /dev/null +++ b/parallel-plugin/src/main/java/com/didiglobal/turbo/plugin/config/ParallelPluginConfig.java @@ -0,0 +1,26 @@ +package com.didiglobal.turbo.plugin.config; + +import com.alibaba.druid.spring.boot.autoconfigure.DruidDataSourceAutoConfigure; +import com.didiglobal.turbo.engine.common.EntityPOEnum; +import com.didiglobal.turbo.engine.plugin.CustomOperationHandlerRegistry; +import com.didiglobal.turbo.plugin.dao.ParallelNodeInstanceHandler; +import com.didiglobal.turbo.plugin.dao.ParallelNodeInstanceLogHandler; +import org.mybatis.spring.annotation.MapperScan; +import org.springframework.boot.autoconfigure.EnableAutoConfiguration; +import org.springframework.context.annotation.ComponentScan; +import org.springframework.context.annotation.Configuration; + +import javax.annotation.PostConstruct; + +@Configuration +@ComponentScan("com.didiglobal.turbo.plugin") +@MapperScan("com.didiglobal.turbo.plugin.dao") +@EnableAutoConfiguration(exclude = {DruidDataSourceAutoConfigure.class}) +public class ParallelPluginConfig { + + @PostConstruct + public void init() { + CustomOperationHandlerRegistry.registerHandler(EntityPOEnum.NODE_INSTANCE, new ParallelNodeInstanceHandler()); + CustomOperationHandlerRegistry.registerHandler(EntityPOEnum.NODE_INSTANCE_LOG, new ParallelNodeInstanceLogHandler()); + } +} diff --git a/parallel-plugin/src/main/java/com/didiglobal/turbo/plugin/config/ThreadPoolConfig.java b/parallel-plugin/src/main/java/com/didiglobal/turbo/plugin/config/ThreadPoolConfig.java new file mode 100644 index 00000000..766ae4fc --- /dev/null +++ b/parallel-plugin/src/main/java/com/didiglobal/turbo/plugin/config/ThreadPoolConfig.java @@ -0,0 +1,23 @@ +package com.didiglobal.turbo.plugin.config; + +import com.didiglobal.turbo.engine.util.PluginPropertiesUtil; +import com.didiglobal.turbo.plugin.executor.AsynTaskExecutor; +import org.apache.commons.lang3.StringUtils; +import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.Configuration; + +@Configuration +public class ThreadPoolConfig { + + private static final String TIMEOUT_CONFIG = "turbo.plugin.parallelGateway.threadPool.timeout"; + + @Bean + public AsynTaskExecutor executorService(){ + String timeout = PluginPropertiesUtil.getPropertyValue(TIMEOUT_CONFIG); + AsynTaskExecutor taskExecutor = new AsynTaskExecutor(); + if (!StringUtils.isEmpty(timeout)) { + taskExecutor.setTimeout(Long.parseLong(timeout)); + } + return taskExecutor; + } +} diff --git a/parallel-plugin/src/main/java/com/didiglobal/turbo/plugin/dao/ParallelNodeInstanceHandler.java b/parallel-plugin/src/main/java/com/didiglobal/turbo/plugin/dao/ParallelNodeInstanceHandler.java new file mode 100644 index 00000000..481dd37a --- /dev/null +++ b/parallel-plugin/src/main/java/com/didiglobal/turbo/plugin/dao/ParallelNodeInstanceHandler.java @@ -0,0 +1,125 @@ +package com.didiglobal.turbo.plugin.dao; + +import com.didiglobal.turbo.engine.entity.NodeInstancePO; +import com.didiglobal.turbo.engine.plugin.CustomOperationHandler; +import com.didiglobal.turbo.engine.util.MapToObjectConverter; +import com.didiglobal.turbo.plugin.dao.mapper.ParallelNodeInstanceMapper; +import com.didiglobal.turbo.plugin.entity.ParallelNodeInstancePO; +import org.apache.ibatis.mapping.MappedStatement; +import org.apache.ibatis.mapping.SqlCommandType; +import org.apache.ibatis.session.SqlSession; +import org.apache.ibatis.session.SqlSessionFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.util.List; +import java.util.Map; +import java.util.stream.Collectors; + +@SuppressWarnings("unchecked") +public class ParallelNodeInstanceHandler implements CustomOperationHandler { + private static final Logger LOGGER = LoggerFactory.getLogger(ParallelNodeInstanceHandler.class); + + @Override + public void handle(SqlCommandType commandType, MappedStatement mappedStatement, Object parameterObject, Object originalResult, SqlSessionFactory sqlSessionFactory) { + SqlSession sqlSession = sqlSessionFactory.openSession(); + try { + ParallelNodeInstanceMapper mapper = sqlSession.getMapper(ParallelNodeInstanceMapper.class); + switch (commandType) { + case INSERT: + handleInsert(parameterObject, mapper); + break; + case UPDATE: + handleUpdate(parameterObject, mapper); + break; + case DELETE: + handleDelete(parameterObject, mapper); + break; + case SELECT: + handleSelect(originalResult, mapper); + break; + default: + LOGGER.warn("Unhandled command type: {}", commandType); + break; + } + } catch (Exception e) { + LOGGER.error("Exception occurred during handling. CommandType={} | ParameterObject={} | OriginalResult={}", + commandType, parameterObject, originalResult, e); + } finally { + sqlSession.close(); + } + } + + private void handleInsert(Object parameterObject, ParallelNodeInstanceMapper mapper) { + if (parameterObject instanceof NodeInstancePO) { + ParallelNodeInstancePO parallelNodeInstancePO = convertToParallelLog((NodeInstancePO) parameterObject); + mapper.insert(parallelNodeInstancePO); + } else if (parameterObject instanceof Map) { + List list = (List) ((Map) parameterObject).get("list"); + if (list != null) { + List parallelLogList = list.stream() + .map(this::convertToParallelLogSafe) + .collect(Collectors.toList()); + mapper.insertList(parallelLogList); + } + } + } + + private void handleUpdate(Object parameterObject, ParallelNodeInstanceMapper mapper) { + if (parameterObject instanceof NodeInstancePO) { + ParallelNodeInstancePO parallelNodeInstancePO = convertToParallelLog((NodeInstancePO) parameterObject); + if (null == parallelNodeInstancePO.getExecuteId()) { + return; + } + mapper.updateById(parallelNodeInstancePO); + } + } + + private void handleDelete(Object parameterObject, ParallelNodeInstanceMapper mapper) { + if (parameterObject instanceof NodeInstancePO) { + ParallelNodeInstancePO parallelNodeInstancePO = convertToParallelLog((NodeInstancePO) parameterObject); + if (null != parallelNodeInstancePO.getId()) + mapper.deleteById(parallelNodeInstancePO.getId()); + } + } + + private void handleSelect(Object originalResult, ParallelNodeInstanceMapper mapper) { + if (originalResult instanceof List) { + List nodeInstancePOList = (List) originalResult; + nodeInstancePOList.forEach(nodeInstancePO -> { + try { + ParallelNodeInstancePO parallelNodeInstancePO = mapper.selectById(nodeInstancePO.getId()); + Map properties = MapToObjectConverter.convertObjectToMap(parallelNodeInstancePO); + nodeInstancePO.getProperties().putAll(properties); + } catch (IllegalAccessException e) { + LOGGER.error("Error converting ParallelNodeInstancePO to map. ID={}", nodeInstancePO.getId(), e); + } + }); + } + } + + private ParallelNodeInstancePO convertToParallelLog(NodeInstancePO nodeInstancePO) { + try { + ParallelNodeInstancePO parallelNodeInstancePO = MapToObjectConverter.convertMapToObject(nodeInstancePO.getProperties(), ParallelNodeInstancePO.class); + parallelNodeInstancePO.setId(nodeInstancePO.getId()); + return parallelNodeInstancePO; + } catch (IllegalAccessException | InstantiationException e) { + LOGGER.error("Error converting NodeInstancePO to ParallelNodeInstancePO. ID={}", nodeInstancePO.getId(), e); + throw new RuntimeException(e); + } + } + + private ParallelNodeInstancePO convertToParallelLogSafe(Object obj) { + try { + if (obj instanceof NodeInstancePO) { + return convertToParallelLog((NodeInstancePO) obj); + } else { + LOGGER.warn("Unexpected object type: {}", obj.getClass().getName()); + return null; + } + } catch (Exception e) { + LOGGER.error("Error converting object to ParallelNodeInstancePO", e); + throw new RuntimeException(e); + } + } +} diff --git a/parallel-plugin/src/main/java/com/didiglobal/turbo/plugin/dao/ParallelNodeInstanceLogHandler.java b/parallel-plugin/src/main/java/com/didiglobal/turbo/plugin/dao/ParallelNodeInstanceLogHandler.java new file mode 100644 index 00000000..597b2988 --- /dev/null +++ b/parallel-plugin/src/main/java/com/didiglobal/turbo/plugin/dao/ParallelNodeInstanceLogHandler.java @@ -0,0 +1,122 @@ +package com.didiglobal.turbo.plugin.dao; + +import com.didiglobal.turbo.engine.entity.NodeInstanceLogPO; +import com.didiglobal.turbo.engine.plugin.CustomOperationHandler; +import com.didiglobal.turbo.engine.util.MapToObjectConverter; +import com.didiglobal.turbo.plugin.dao.mapper.ParallelNodeInstanceLogMapper; +import com.didiglobal.turbo.plugin.entity.ParallelNodeInstanceLogPO; +import org.apache.ibatis.mapping.MappedStatement; +import org.apache.ibatis.mapping.SqlCommandType; +import org.apache.ibatis.session.SqlSession; +import org.apache.ibatis.session.SqlSessionFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.util.List; +import java.util.Map; +import java.util.stream.Collectors; + +@SuppressWarnings("unchecked") +public class ParallelNodeInstanceLogHandler implements CustomOperationHandler { + private static final Logger LOGGER = LoggerFactory.getLogger(ParallelNodeInstanceLogHandler.class); + + @Override + public void handle(SqlCommandType commandType, MappedStatement mappedStatement, Object parameterObject, Object originalResult, SqlSessionFactory sqlSessionFactory) { + SqlSession sqlSession = sqlSessionFactory.openSession(); + try { + ParallelNodeInstanceLogMapper mapper = sqlSession.getMapper(ParallelNodeInstanceLogMapper.class); + switch (commandType) { + case INSERT: + handleInsert(parameterObject, mapper); + break; + case UPDATE: + handleUpdate(parameterObject, mapper); + break; + case DELETE: + handleDelete(parameterObject, mapper); + break; + case SELECT: + handleSelect(originalResult, mapper); + break; + default: + LOGGER.warn("Unhandled SqlCommandType: {}", commandType); + break; + } + } catch (Exception e) { + LOGGER.error("Exception in handling command.||commandType={}||parameterObject={}||originalResult={}", commandType, parameterObject, originalResult, e); + } finally { + sqlSession.close(); + } + } + + private void handleInsert(Object parameterObject, ParallelNodeInstanceLogMapper mapper) throws IllegalAccessException, InstantiationException { + if (parameterObject instanceof NodeInstanceLogPO) { + ParallelNodeInstanceLogPO parallelNodeInstanceLogPO = convertToParallelLog((NodeInstanceLogPO) parameterObject); + mapper.insert(parallelNodeInstanceLogPO); + } else if (parameterObject instanceof Map) { + List list = (List) ((Map) parameterObject).get("list"); + if (list != null) { + List parallelLogList = list.stream() + .map(this::convertToParallelLogSafe) + .collect(Collectors.toList()); + mapper.insertList(parallelLogList); + } + } + } + + private void handleUpdate(Object parameterObject, ParallelNodeInstanceLogMapper mapper) throws IllegalAccessException, InstantiationException { + if (parameterObject instanceof NodeInstanceLogPO) { + ParallelNodeInstanceLogPO parallelNodeInstanceLogPO = convertToParallelLog((NodeInstanceLogPO) parameterObject); + if (null == parallelNodeInstanceLogPO.getExecuteId()) { + return; + } + mapper.updateById(parallelNodeInstanceLogPO); + } + } + + private void handleDelete(Object parameterObject, ParallelNodeInstanceLogMapper mapper) throws IllegalAccessException, InstantiationException { + if (parameterObject instanceof NodeInstanceLogPO) { + ParallelNodeInstanceLogPO parallelNodeInstanceLogPO = convertToParallelLog((NodeInstanceLogPO) parameterObject); + if (null == parallelNodeInstanceLogPO.getId()) + mapper.deleteById(parallelNodeInstanceLogPO.getId()); + } + } + + private void handleSelect(Object originalResult, ParallelNodeInstanceLogMapper mapper) { + if (originalResult instanceof List) { + List nodeInstanceLogList = (List) originalResult; + nodeInstanceLogList.forEach(nodeInstanceLogPO -> { + try { + ParallelNodeInstanceLogPO parallelNodeInstanceLogPO = mapper.selectById(nodeInstanceLogPO.getId()); + if (parallelNodeInstanceLogPO != null) { + Map properties = MapToObjectConverter.convertObjectToMap(parallelNodeInstanceLogPO); + nodeInstanceLogPO.getProperties().putAll(properties); + } + } catch (IllegalAccessException e) { + LOGGER.error("Error converting ParallelNodeInstanceLogPO to map for ID: {}", nodeInstanceLogPO.getId(), e); + throw new RuntimeException(e); + } + }); + } + } + + private ParallelNodeInstanceLogPO convertToParallelLog(NodeInstanceLogPO nodeInstanceLogPO) throws IllegalAccessException, InstantiationException { + ParallelNodeInstanceLogPO parallelNodeInstanceLogPO = MapToObjectConverter.convertMapToObject(nodeInstanceLogPO.getProperties(), ParallelNodeInstanceLogPO.class); + parallelNodeInstanceLogPO.setId(nodeInstanceLogPO.getId()); + return parallelNodeInstanceLogPO; + } + + private ParallelNodeInstanceLogPO convertToParallelLogSafe(Object obj) { + try { + if (obj instanceof NodeInstanceLogPO) { + return convertToParallelLog((NodeInstanceLogPO) obj); + } else { + LOGGER.warn("Unexpected object type: {}", obj.getClass().getName()); + return null; + } + } catch (Exception e) { + LOGGER.error("Error converting object to ParallelNodeInstanceLogPO", e); + throw new RuntimeException(e); + } + } +} \ No newline at end of file diff --git a/parallel-plugin/src/main/java/com/didiglobal/turbo/plugin/dao/mapper/ParallelNodeInstanceLogMapper.java b/parallel-plugin/src/main/java/com/didiglobal/turbo/plugin/dao/mapper/ParallelNodeInstanceLogMapper.java new file mode 100644 index 00000000..f4cc3339 --- /dev/null +++ b/parallel-plugin/src/main/java/com/didiglobal/turbo/plugin/dao/mapper/ParallelNodeInstanceLogMapper.java @@ -0,0 +1,20 @@ +package com.didiglobal.turbo.plugin.dao.mapper; + +import com.baomidou.mybatisplus.core.mapper.BaseMapper; +import com.didiglobal.turbo.plugin.entity.ParallelNodeInstanceLogPO; +import org.apache.ibatis.annotations.Insert; +import org.apache.ibatis.annotations.Param; + +import java.util.List; + +public interface ParallelNodeInstanceLogMapper extends BaseMapper { + @Insert({ + "" + }) + boolean insertList(@Param("parallelNodeInstanceLogPOS") List parallelNodeInstanceLogPOS); +} diff --git a/parallel-plugin/src/main/java/com/didiglobal/turbo/plugin/dao/mapper/ParallelNodeInstanceMapper.java b/parallel-plugin/src/main/java/com/didiglobal/turbo/plugin/dao/mapper/ParallelNodeInstanceMapper.java new file mode 100644 index 00000000..8f827ea8 --- /dev/null +++ b/parallel-plugin/src/main/java/com/didiglobal/turbo/plugin/dao/mapper/ParallelNodeInstanceMapper.java @@ -0,0 +1,20 @@ +package com.didiglobal.turbo.plugin.dao.mapper; + +import com.baomidou.mybatisplus.core.mapper.BaseMapper; +import com.didiglobal.turbo.plugin.entity.ParallelNodeInstancePO; +import org.apache.ibatis.annotations.Insert; +import org.apache.ibatis.annotations.Param; + +import java.util.List; + +public interface ParallelNodeInstanceMapper extends BaseMapper { + @Insert({ + "" + }) + boolean insertList(@Param("insertParallelNodeInstanceList") List insertParallelNodeInstanceList); +} diff --git a/parallel-plugin/src/main/java/com/didiglobal/turbo/plugin/entity/ParallelNodeInstanceLogPO.java b/parallel-plugin/src/main/java/com/didiglobal/turbo/plugin/entity/ParallelNodeInstanceLogPO.java new file mode 100644 index 00000000..80e883fc --- /dev/null +++ b/parallel-plugin/src/main/java/com/didiglobal/turbo/plugin/entity/ParallelNodeInstanceLogPO.java @@ -0,0 +1,33 @@ +package com.didiglobal.turbo.plugin.entity; + +import com.baomidou.mybatisplus.annotation.TableName; + +@TableName("ei_node_instance_log_parallel") +public class ParallelNodeInstanceLogPO { + private Long id; + private String executeId; + + public Long getId() { + return id; + } + + public void setId(Long id) { + this.id = id; + } + + public String getExecuteId() { + return executeId; + } + + public void setExecuteId(String executeId) { + this.executeId = executeId; + } + + @Override + public String toString() { + return "ParallelNodeInstanceLogPO{" + + "id=" + id + + ", executeId='" + executeId + '\'' + + '}'; + } +} diff --git a/parallel-plugin/src/main/java/com/didiglobal/turbo/plugin/entity/ParallelNodeInstancePO.java b/parallel-plugin/src/main/java/com/didiglobal/turbo/plugin/entity/ParallelNodeInstancePO.java new file mode 100644 index 00000000..5da896e0 --- /dev/null +++ b/parallel-plugin/src/main/java/com/didiglobal/turbo/plugin/entity/ParallelNodeInstancePO.java @@ -0,0 +1,32 @@ +package com.didiglobal.turbo.plugin.entity; + +import com.baomidou.mybatisplus.annotation.TableName; +@TableName("ei_node_instance_parallel") +public class ParallelNodeInstancePO{ + private Long id; + private String executeId; + + public Long getId() { + return id; + } + + public void setId(Long id) { + this.id = id; + } + + public String getExecuteId() { + return executeId; + } + + public void setExecuteId(String executeId) { + this.executeId = executeId; + } + + @Override + public String toString() { + return "ParallelNodeInstancePO{" + + "id=" + id + + ", executeId='" + executeId + '\'' + + '}'; + } +} diff --git a/parallel-plugin/src/main/java/com/didiglobal/turbo/plugin/executor/AbstractGatewayExecutor.java b/parallel-plugin/src/main/java/com/didiglobal/turbo/plugin/executor/AbstractGatewayExecutor.java new file mode 100644 index 00000000..a7da2f0e --- /dev/null +++ b/parallel-plugin/src/main/java/com/didiglobal/turbo/plugin/executor/AbstractGatewayExecutor.java @@ -0,0 +1,532 @@ +package com.didiglobal.turbo.plugin.executor; + +import com.didiglobal.turbo.engine.common.ErrorEnum; +import com.didiglobal.turbo.engine.common.ExtendRuntimeContext; +import com.didiglobal.turbo.engine.common.FlowInstanceStatus; +import com.didiglobal.turbo.engine.common.ProcessStatus; +import com.didiglobal.turbo.engine.entity.NodeInstanceLogPO; +import com.didiglobal.turbo.engine.exception.SuspendException; +import com.didiglobal.turbo.plugin.InclusiveGatewayElementPlugin; +import com.didiglobal.turbo.plugin.ParallelGatewayElementPlugin; +import com.didiglobal.turbo.plugin.common.Constants; +import com.didiglobal.turbo.plugin.common.ParallelErrorEnum; +import com.didiglobal.turbo.plugin.common.ParallelNodeInstanceStatus; +import com.didiglobal.turbo.plugin.common.MergeStrategy; +import com.didiglobal.turbo.plugin.common.ParallelRuntimeContext; +import com.didiglobal.turbo.engine.bo.NodeInstanceBO; +import com.didiglobal.turbo.engine.common.InstanceDataType; +import com.didiglobal.turbo.engine.common.NodeInstanceStatus; +import com.didiglobal.turbo.engine.common.NodeInstanceType; +import com.didiglobal.turbo.engine.common.RuntimeContext; +import com.didiglobal.turbo.engine.entity.InstanceDataPO; +import com.didiglobal.turbo.engine.entity.NodeInstancePO; +import com.didiglobal.turbo.engine.exception.ProcessException; +import com.didiglobal.turbo.engine.executor.ElementExecutor; +import com.didiglobal.turbo.engine.executor.RuntimeExecutor; +import com.didiglobal.turbo.engine.model.FlowElement; +import com.didiglobal.turbo.engine.model.InstanceData; +import com.didiglobal.turbo.engine.util.FlowModelUtil; +import com.didiglobal.turbo.engine.util.InstanceDataUtil; +import com.didiglobal.turbo.plugin.service.ParallelNodeInstanceService; +import com.didiglobal.turbo.plugin.util.ExecutorUtil; +import com.google.common.collect.Lists; +import com.google.common.collect.Sets; +import org.apache.commons.collections4.CollectionUtils; +import org.apache.commons.lang3.SerializationUtils; +import org.apache.commons.lang3.StringUtils; +import org.apache.commons.lang3.tuple.Pair; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.beans.BeanUtils; + +import javax.annotation.Resource; + +import java.util.ArrayList; +import java.util.Date; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Optional; +import java.util.Set; +import java.util.concurrent.CompletionService; +import java.util.concurrent.ExecutionException; +import java.util.concurrent.ExecutorCompletionService; +import java.util.concurrent.Future; +import java.util.concurrent.TimeUnit; +import java.util.concurrent.atomic.AtomicInteger; +import java.util.stream.Collectors; + +@SuppressWarnings("unchecked") +public abstract class AbstractGatewayExecutor extends ElementExecutor { + + private static final Logger LOGGER = LoggerFactory.getLogger(AbstractGatewayExecutor.class); + + @Resource + protected AsynTaskExecutor asynTaskExecutor; + + @Resource + private MergeStrategyFactory mergeStrategyFactory; + + @Resource + protected ParallelNodeInstanceService parallelNodeInstanceService; + + /** + * When parallel gateways and inclusive gateways are used as branch nodes, + * the processing of exits is different. Here, the actual number of exits needs to be calculated + */ + protected abstract int calculateOutgoingSize(FlowElement currentNodeModel, Map flowElementMap, Map instanceDataMap); + + protected List getExecuteExecutors(RuntimeContext runtimeContext) { + Pair pair = ExecutorUtil.getForkAndJoinNodeKey(runtimeContext.getCurrentNodeModel()); + String nodeKey = runtimeContext.getCurrentNodeInstance().getNodeKey(); + runtimeContext.getExtendProperties().put("parallelRuntimeContextList", new ArrayList()); + if (ExecutorUtil.isFork(nodeKey, pair)) { + List nextNodes = calculateNextNodes(runtimeContext.getCurrentNodeModel(), + runtimeContext.getFlowElementMap(), runtimeContext.getInstanceDataMap()); + List runtimeExecutors = new ArrayList<>(); + for (FlowElement nextNode : nextNodes) { + ParallelRuntimeContext context = new ParallelRuntimeContext(); + context.setCurrentNodeModel(nextNode); + List parallelRuntimeContextList = (List) runtimeContext.getExtendProperties().getOrDefault("parallelRuntimeContextList", new ArrayList()); + parallelRuntimeContextList.add(context); + runtimeContext.getExtendProperties().put("parallelRuntimeContextList", parallelRuntimeContextList); + ElementExecutor elementExecutor = executorFactory.getElementExecutor(nextNode); + runtimeExecutors.add(elementExecutor); + } + if (runtimeExecutors.size() <= 1) { + runtimeContext.setCurrentNodeModel(nextNodes.get(0)); + List parallelRuntimeContextList = (List) runtimeContext.getExtendProperties().getOrDefault("parallelRuntimeContextList", new ArrayList()); + //parallelRuntimeContextList.clear(); + } + return runtimeExecutors; + } else if (ExecutorUtil.isJoin(nodeKey, pair)) { + // select only one outgoing and do not evaluate the expression + return Lists.newArrayList(super.getExecuteExecutor(runtimeContext)); + } else { + // not match + LOGGER.warn("Mismatch between fork and join of node [{}] in flow definition", nodeKey); + throw new ProcessException(ParallelErrorEnum.FORK_AND_JOIN_NOT_MATCH.getErrNo(), ParallelErrorEnum.FORK_AND_JOIN_NOT_MATCH.getErrMsg()); + } + } + + @Override + protected void doExecute(RuntimeContext runtimeContext) { + FlowElement currentNodeModel = runtimeContext.getCurrentNodeModel(); + Pair forkAndJoinNodeKey = ExecutorUtil.getForkAndJoinNodeKey(currentNodeModel); + String flowInstanceId = runtimeContext.getFlowInstanceId(); + NodeInstanceBO currentNodeInstance = runtimeContext.getCurrentNodeInstance(); + + // save and clear node instance list before execute. + saveAndClearNodeInstanceList(runtimeContext); + + if (ExecutorUtil.isFork(currentNodeModel.getKey(), forkAndJoinNodeKey)) { + // fork + forkNodeHandle(runtimeContext, currentNodeModel); + markCurrentNodeCompleted(runtimeContext); + //super.preExecute(runtimeContext); + List executeExecutors = getExecuteExecutors(runtimeContext); + doExecuteByAsyn(runtimeContext, executeExecutors); + } else if (ExecutorUtil.isJoin(currentNodeModel.getKey(), forkAndJoinNodeKey)) { + // join + joinNodeHandle(runtimeContext, currentNodeModel, forkAndJoinNodeKey.getLeft(), flowInstanceId, currentNodeInstance); + } else { + LOGGER.error("Missing required element attributes: forkJoinMatch[fork,join]"); + throw new ProcessException(ParallelErrorEnum.REQUIRED_ELEMENT_ATTRIBUTES.getErrNo(), ParallelErrorEnum.REQUIRED_ELEMENT_ATTRIBUTES.getErrMsg()); + } + } + + private void doExecuteByAsyn(RuntimeContext runtimeContext, List runtimeExecutors) { + List contextList = Lists.newArrayList(); + CompletionService completionService = new ExecutorCompletionService<>(asynTaskExecutor); + + AtomicInteger processStatus = new AtomicInteger(ProcessStatus.SUCCESS); + String parentExecuteIdStr = ExecutorUtil.getParentExecuteId((String) runtimeContext.getExtendProperties().getOrDefault("executeId", "")); + List executeIds = new ArrayList<>(ExecutorUtil.getExecuteIdSet((String) runtimeContext.getExtendProperties().get("executeId"))); + runtimeContext.getExtendProperties().put("executeId", null); + for (int i = 0; i < runtimeExecutors.size(); i++) { + RuntimeExecutor executor = runtimeExecutors.get(i); + RuntimeContext rc = cloneRuntimeContext(runtimeContext, parentExecuteIdStr, executeIds, i); + contextList.add(rc); + completionService.submit(() -> asynExecute(processStatus, executor, rc)); + } + + // execute result handle + asynExecuteResultHandle(runtimeContext, contextList, completionService, executeIds, asynTaskExecutor.getTimeout()); + } + + private void asynExecuteResultHandle(RuntimeContext runtimeContext, List contextList, CompletionService completionService, List executeIds, long timeout) { + // system exception, execution exception, suspend exception + Map em = new HashMap<>(); + String systemErrorNodeKey = null; + String processExceptionNodeKey = null; + String suspendExceptionNodeKey = null; + List parallelRuntimeContextList = (List) runtimeContext.getExtendProperties().getOrDefault("parallelRuntimeContextList", new ArrayList()); + parallelRuntimeContextList.clear(); + for (RuntimeContext context : contextList) { + ParallelRuntimeContext prc = new ParallelRuntimeContext(); + try { + Future future; + if (timeout > 0) { + future = getResultWithTimeout(completionService, timeout); + } else { + future = completionService.take(); + } + parallelRuntimeContextList.add(prc); + future.get(); + } catch (ExecutionException e) { + Throwable cause = e.getCause(); + if (cause instanceof SuspendException) { + SuspendException exception = (SuspendException) cause; + suspendExceptionNodeKey = context.getSuspendNodeInstance().getNodeKey(); + em.put(suspendExceptionNodeKey, exception); + prc.setException(exception); + } else if (cause instanceof ProcessException) { + ProcessException exception = (ProcessException) cause; + processExceptionNodeKey = context.getSuspendNodeInstance().getNodeKey(); + em.put(processExceptionNodeKey, (ProcessException) cause); + prc.setException(exception); + } else { + LOGGER.error("parallel process exception", e); + systemErrorNodeKey = context.getSuspendNodeInstance().getNodeKey(); + ProcessException exception = new ProcessException(ErrorEnum.SYSTEM_ERROR); + em.put(systemErrorNodeKey, exception); + prc.setException(exception); + } + } catch (Exception e) { + LOGGER.error("parallel process exception", e); + systemErrorNodeKey = context.getSuspendNodeInstance().getNodeKey(); + ProcessException exception = new ProcessException(ErrorEnum.SYSTEM_ERROR); + em.put(systemErrorNodeKey, exception); + prc.setException(exception); + } finally { + prc.setExecuteId((String) context.getExtendProperties().get("executeId")); + prc.setBranchExecuteDataMap(context.getInstanceDataMap()); + prc.setBranchSuspendNodeInstance(context.getSuspendNodeInstance()); + } + } + + // fixme optimize + if (null != systemErrorNodeKey) { + parallelNodeInstanceService.closeParallelSuspendUserTask(runtimeContext, executeIds); + throw em.get(systemErrorNodeKey); + } + if (null != processExceptionNodeKey) { + parallelNodeInstanceService.closeParallelSuspendUserTask(runtimeContext, executeIds); + throw em.get(processExceptionNodeKey); + } + throw em.get(suspendExceptionNodeKey); + } + + private Future getResultWithTimeout(CompletionService completionService, long timeout) { + try { + Future future = completionService.poll(timeout, TimeUnit.MILLISECONDS); + if (future == null) { + LOGGER.warn("Parallel execute timeout, please obtain the latest process execution status through query||timeout={}", timeout); + throw new ProcessException(ParallelErrorEnum.PARALLEL_EXECUTE_TIMEOUT.getErrNo(), ParallelErrorEnum.PARALLEL_EXECUTE_TIMEOUT.getErrMsg()); + } + return future; + } catch (InterruptedException e) { + throw new RuntimeException(e); + } + } + + private RuntimeContext asynExecute(AtomicInteger processStatus, RuntimeExecutor executor, RuntimeContext rc) { + try { + doParallelExecute(rc, executor); + } catch (ProcessException e) { + if (!ErrorEnum.isSuccess(e.getErrNo()) + && processStatus.get() == ProcessStatus.SUCCESS) { + processStatus.set(ProcessStatus.FAILED); + } + throw e; + } finally { + rc.setProcessStatus(processStatus.get()); + doPostParallelExecute(rc); + } + return rc; + } + + private void doPostParallelExecute(RuntimeContext runtimeContext) throws ProcessException { + + // 1.update context with processStatus + if (runtimeContext.getProcessStatus() == ProcessStatus.SUCCESS) { + // SUCCESS: update runtimeContext: update suspendNodeInstance + if (runtimeContext.getCurrentNodeInstance() != null) { + runtimeContext.setSuspendNodeInstance(runtimeContext.getCurrentNodeInstance()); + } + } + + // 2.save nodeInstanceList to db + saveNodeInstanceList(runtimeContext); + + // 3.update flowInstance status while completed + if (isCompleted(runtimeContext)) { + if (isSubFlowInstance(runtimeContext)) { + processInstanceDAO.updateStatus(runtimeContext.getFlowInstanceId(), FlowInstanceStatus.END); + runtimeContext.setFlowInstanceStatus(FlowInstanceStatus.END); + } else { + processInstanceDAO.updateStatus(runtimeContext.getFlowInstanceId(), FlowInstanceStatus.COMPLETED); + runtimeContext.setFlowInstanceStatus(FlowInstanceStatus.COMPLETED); + } + LOGGER.info("postExecute: flowInstance process completely.||flowInstanceId={}", runtimeContext.getFlowInstanceId()); + } + } + + private void doParallelExecute(RuntimeContext runtimeContext, RuntimeExecutor runtimeExecutor) throws ProcessException { + while (runtimeExecutor != null) { + runtimeExecutor.execute(runtimeContext); + runtimeExecutor = super.getExecuteExecutor(runtimeContext); + } + } + + private RuntimeContext cloneRuntimeContext(RuntimeContext runtimeContext, String parentExecuteIdStr, List executeIds, int i) { + RuntimeContext rc = SerializationUtils.clone(runtimeContext); + String executeId = ExecutorUtil.genExecuteIdWithParent(parentExecuteIdStr, executeIds.get(i)); + rc.getExtendProperties().put("executeId", executeId); + List parallelRuntimeContextList = (List) rc.getExtendProperties().get("parallelRuntimeContextList"); + ParallelRuntimeContext context = parallelRuntimeContextList.get(i); + rc.setCurrentNodeModel(context.getCurrentNodeModel()); + parallelRuntimeContextList.clear(); + return rc; + } + + private void markCurrentNodeCompleted(RuntimeContext runtimeContext) { + NodeInstanceBO currentNodeInstance = runtimeContext.getCurrentNodeInstance(); + currentNodeInstance.setInstanceDataId(runtimeContext.getInstanceDataId()); + currentNodeInstance.setStatus(NodeInstanceStatus.COMPLETED); + } + + @Override + protected void preExecute(RuntimeContext runtimeContext) throws ProcessException { + super.preExecute(runtimeContext); + NodeInstanceBO currentNodeInstance = runtimeContext.getCurrentNodeInstance(); + String executeId = (String) runtimeContext.getExtendProperties().get("executeId"); + if (StringUtils.isEmpty(executeId)) { + Object parallelRuntimeContextList = runtimeContext.getExtendProperties().get("parallelRuntimeContextList"); + if (null != parallelRuntimeContextList && !((List) parallelRuntimeContextList).isEmpty()) { + executeId = String.valueOf(((List) parallelRuntimeContextList).get(0).getExecuteId()); + runtimeContext.getExtendProperties().put("executeId", executeId); + currentNodeInstance.getProperties().put("executeId", executeId); + } + } + } + + @Override + protected void postExecute(RuntimeContext runtimeContext) { + List parallelRuntimeContextList = (List) runtimeContext.getExtendProperties().get("parallelRuntimeContextList"); + runtimeContext.setExtendRuntimeContextList(parallelRuntimeContextList); + NodeInstanceBO currentNodeInstance = runtimeContext.getCurrentNodeInstance(); + currentNodeInstance.setInstanceDataId(runtimeContext.getInstanceDataId()); + currentNodeInstance.setStatus(NodeInstanceStatus.COMPLETED); + } + + @Override + protected void preRollback(RuntimeContext runtimeContext) throws ProcessException { + int nodeType = runtimeContext.getCurrentNodeModel().getType(); + // If the flowElementType is a parallel gateway or an inclusive gateway, rollback is disabled. + if(nodeType == ParallelGatewayElementPlugin.elementType || nodeType == InclusiveGatewayElementPlugin.elementType){ + LOGGER.warn("getRollbackExecutor failed: parallel gateway or inclusive gateway is not support rollback"); + throw new ProcessException(ParallelErrorEnum.NOT_SUPPORT_ROLLBACK.getErrNo(), ParallelErrorEnum.NOT_SUPPORT_ROLLBACK.getErrMsg()); + } + } + + private void joinNodeHandle(RuntimeContext runtimeContext, FlowElement currentNodeModel, String forkKey, String flowInstanceId, + NodeInstanceBO currentNodeInstance) { + // fixme add concurrent lock(Concurrent lock) + // current join node info + String currentExecuteId = ExecutorUtil.getCurrentExecuteId((String) runtimeContext.getExtendProperties().get("executeId")); + String parentExecuteId = ExecutorUtil.getParentExecuteId((String) runtimeContext.getExtendProperties().get("executeId")); + // matched fork node info + NodeInstancePO forkNodeInstancePo = findForkNodeInstancePO(currentExecuteId, flowInstanceId, forkKey); + if (forkNodeInstancePo == null) { + LOGGER.error("Not found matched fork node instance||join_node_key={}", currentNodeModel.getKey()); + throw new ProcessException(ParallelErrorEnum.NOT_FOUND_FORK_INSTANCE.getErrNo(), ParallelErrorEnum.NOT_FOUND_FORK_INSTANCE.getErrMsg()); + } + Set allExecuteIdSet = ExecutorUtil.getExecuteIdSet((String) forkNodeInstancePo.get("executeId")); + NodeInstancePO joinNodeInstancePo = findJoinNodeInstancePO(allExecuteIdSet, currentExecuteId, flowInstanceId, currentNodeInstance.getNodeKey()); + + Map properties = currentNodeModel.getProperties(); + String branchMerge = (String) properties.getOrDefault(com.didiglobal.turbo.plugin.common.Constants.ELEMENT_PROPERTIES.BRANCH_MERGE, MergeStrategy.BRANCH_MERGE.JOIN_ALL); + String dataMerch = (String) properties.getOrDefault(Constants.ELEMENT_PROPERTIES.DATA_MERGE, MergeStrategy.DATA_MERGE.ALL); + BranchMergeStrategy branchMergeStrategy = mergeStrategyFactory.getBranchMergeStrategy(branchMerge); + DataMergeStrategy dataMergeStrategy = mergeStrategyFactory.getDataMergeStrategy(dataMerch); + + if (joinNodeInstancePo == null) { + // branch first arrival + runtimeContext.getExtendProperties().clear(); + LOGGER.info("execute join first.||nodeKey={}||nodeInstanceId={}||executeId={}||dataMerge={}", + currentNodeModel.getKey(), currentNodeInstance.getNodeInstanceId(), runtimeContext.getExtendProperties().get("executeId"), dataMergeStrategy.name()); + branchMergeStrategy.joinFirst(runtimeContext, forkNodeInstancePo, currentNodeInstance, parentExecuteId, currentExecuteId, allExecuteIdSet, dataMergeStrategy); + } else { + runtimeContext.getExtendProperties().clear(); + if (joinNodeInstancePo.getStatus() != ParallelNodeInstanceStatus.WAITING) { + LOGGER.warn("reentrant warning, arrival branch already exists||joinNodeKey={}||nodeInstanceId={}", joinNodeInstancePo.getNodeKey(), joinNodeInstancePo.getNodeInstanceId()); + throw new ProcessException(ParallelErrorEnum.PARALLEL_EXECUTE_REENTRY.getErrNo(), ParallelErrorEnum.PARALLEL_EXECUTE_REENTRY.getErrMsg()); + } + LOGGER.info("execute join other.||nodeKey={}||nodeInstanceId={}||executeId={}||dataMerge={}", + currentNodeModel.getKey(), currentNodeInstance.getNodeInstanceId(), runtimeContext.getExtendProperties().get("executeId"), dataMergeStrategy.name()); + branchMergeStrategy.joinMerge(runtimeContext, joinNodeInstancePo, currentNodeInstance, parentExecuteId, currentExecuteId, allExecuteIdSet, dataMergeStrategy); + } + + // clear parallel context and reset execute id + runtimeContext.getExtendProperties().put("parallelRuntimeContextList", null); + runtimeContext.getExtendProperties().put("executeId", parentExecuteId); + } + + private NodeInstancePO findForkNodeInstancePO(String executeId, String flowInstanceId, String nodeKey) { + List nodeInstancePOList = nodeInstanceDAO.selectByFlowInstanceIdAndNodeKey(flowInstanceId, nodeKey); + Optional nodeInstancePOOptional = nodeInstancePOList.stream() + .filter(po -> po.get("executeId") != null && ((String) po.get("executeId")).contains(executeId)).findFirst(); + return nodeInstancePOOptional.orElse(null); + } + + private NodeInstancePO findJoinNodeInstancePO(Set executeIds, String executeId, String flowInstanceId, String nodeKey) { + List nodeInstancePOList = nodeInstanceDAO.selectByFlowInstanceIdAndNodeKey(flowInstanceId, nodeKey); + if (CollectionUtils.isEmpty(nodeInstancePOList)) { + return null; + } + + Set executeIdSet = Sets.newHashSet(executeIds); + executeIdSet.remove(executeId); + Optional instanceOptional = nodeInstancePOList.stream().filter(po -> { + Set currentExecuteIds = ExecutorUtil.getExecuteIdSet((String) po.get("executeId")); + if (currentExecuteIds.contains(executeId)) { + LOGGER.warn("reentrant warning, branch of [{}] has been executed.||instanceNodeId={}", executeId, po.getNodeInstanceId()); + throw new ProcessException(ParallelErrorEnum.PARALLEL_EXECUTE_REENTRY.getErrNo(), ParallelErrorEnum.PARALLEL_EXECUTE_REENTRY.getErrMsg()); + } + Optional any = executeIdSet.stream().filter(currentExecuteIds::contains).findAny(); + return any.isPresent(); + }).findFirst(); + return instanceOptional.orElse(null); + } + + private void forkNodeHandle(RuntimeContext runtimeContext, FlowElement currentNodeModel) { + // 1. execute outgoing size + int outgoingSize = calculateOutgoingSize(currentNodeModel, runtimeContext.getFlowElementMap(), runtimeContext.getInstanceDataMap()); + if (outgoingSize == 0) { + throw new ProcessException(ErrorEnum.GATEWAY_NO_OUTGOING); + } + // 2. gen execute ids + List executeIdList = getExecuteIdList(outgoingSize); + + NodeInstancePO nodeInstancePO = buildNodeInstancePO(runtimeContext, runtimeContext.getCurrentNodeInstance()); + String executeIds = ExecutorUtil.genExecuteIds((String) runtimeContext.getExtendProperties().get("executeId"), executeIdList); + runtimeContext.getExtendProperties().put("executeId", executeIds); + nodeInstancePO.put("executeId", executeIds); + nodeInstancePO.setStatus(NodeInstanceStatus.COMPLETED); + nodeInstanceDAO.insert(nodeInstancePO); + nodeInstanceLogDAO.insert(buildNodeInstanceLogPO(nodeInstancePO)); + instanceDataDAO.insert(buildInstanceDataPO(runtimeContext, runtimeContext.getCurrentNodeInstance(), runtimeContext.getFlowInstanceId())); + } + + private List getExecuteIdList(int outgoingSize) { + List executeIdList = Lists.newArrayList(); + for (int i = 0; i < outgoingSize; i++) { + executeIdList.add(genId()); + } + return executeIdList; + } + + private void saveAndClearNodeInstanceList(RuntimeContext runtimeContext) { + saveNodeInstanceList(runtimeContext); + runtimeContext.getNodeInstanceList().clear(); + } + + protected List calculateNextNodes(FlowElement currentFlowElement, Map flowElementMap, + Map instanceDataMap) throws ProcessException { + List nextFlowElements = calculateOutgoings(currentFlowElement, flowElementMap, instanceDataMap); + return nextFlowElements.stream() + .map(nextFlowElement -> getUniqueNextNode(nextFlowElement, flowElementMap)) + .collect(Collectors.toList()); + } + + protected List calculateOutgoings(FlowElement currentFlowElement, Map flowElementMap, Map instanceDataMap) { + List flowElements = new ArrayList<>(); + + List outgoingList = currentFlowElement.getOutgoing(); + for (String outgoingKey : outgoingList) { + FlowElement outgoingSequenceFlow = FlowModelUtil.getFlowElement(flowElementMap, outgoingKey); + if (calculateCondition(outgoingSequenceFlow, instanceDataMap)) { + flowElements.add(outgoingSequenceFlow); + } + } + + if (flowElements.size() > 0) { + return flowElements; + } + + LOGGER.warn("calculateOutgoing failed.||nodeKey={}", currentFlowElement.getKey()); + throw new ProcessException(ErrorEnum.GET_OUTGOING_FAILED); + } + + protected boolean calculateCondition(FlowElement outgoingSequenceFlow, Map instanceDataMap) { + // case1 condition is true, hit the outgoing + return true; + } + + private InstanceDataPO buildInstanceDataPO(RuntimeContext runtimeContext, NodeInstanceBO currentNodeInstance, String flowInstanceId) { + InstanceDataPO po = new InstanceDataPO(); + po.setFlowInstanceId(flowInstanceId); + po.setType(InstanceDataType.EXECUTE); + po.setFlowModuleId(runtimeContext.getFlowModuleId()); + po.setFlowDeployId(runtimeContext.getFlowDeployId()); + po.setCaller(runtimeContext.getCaller()); + po.setInstanceDataId(genId()); + po.setCreateTime(new Date()); + po.setTenant(runtimeContext.getTenant()); + po.setInstanceData(InstanceDataUtil.getInstanceDataListStr(runtimeContext.getInstanceDataMap())); + po.setNodeInstanceId(currentNodeInstance.getNodeInstanceId()); + po.setNodeKey(currentNodeInstance.getNodeKey()); + return po; + } + + private void saveNodeInstanceList(RuntimeContext runtimeContext) { + List processNodeList = runtimeContext.getNodeInstanceList(); + + if (CollectionUtils.isEmpty(processNodeList)) { + LOGGER.warn("parallel process saveNodeInstanceList: processNodeList is empty,||flowInstanceId={}||nodeInstanceType={}", + runtimeContext.getFlowInstanceId(), NodeInstanceType.EXECUTE); + return; + } + + List nodeInstancePOList = Lists.newArrayList(); + List nodeInstanceLogPOList = Lists.newArrayList(); + processNodeList.forEach(nodeInstanceBO -> { + NodeInstancePO nodeInstancePO = buildNodeInstancePO(runtimeContext, nodeInstanceBO); + nodeInstancePOList.add(nodeInstancePO); + // build nodeInstance log + NodeInstanceLogPO nodeInstanceLogPO = buildNodeInstanceLogPO(nodeInstancePO); + nodeInstanceLogPOList.add(nodeInstanceLogPO); + }); + nodeInstanceDAO.insertOrUpdateList(nodeInstancePOList); + nodeInstanceLogDAO.insertList(nodeInstanceLogPOList); + } + + private NodeInstancePO buildNodeInstancePO(RuntimeContext runtimeContext, NodeInstanceBO nodeInstanceBO) { + NodeInstancePO nodeInstancePO = new NodeInstancePO(); + BeanUtils.copyProperties(nodeInstanceBO, nodeInstancePO); + nodeInstancePO.setFlowInstanceId(runtimeContext.getFlowInstanceId()); + nodeInstancePO.setFlowDeployId(runtimeContext.getFlowDeployId()); + nodeInstancePO.setTenant(runtimeContext.getTenant()); + nodeInstancePO.setCaller(runtimeContext.getCaller()); + Date currentTime = new Date(); + if (null == nodeInstancePO.getCreateTime()) { + nodeInstancePO.setCreateTime(currentTime); + } + if (null != runtimeContext.getExtendProperties() + && runtimeContext.getExtendProperties().containsKey("parallelRuntimeContextList") + && !((List) runtimeContext.getExtendProperties().get("parallelRuntimeContextList")).isEmpty()) { + nodeInstancePO.put("executeId", ((List) runtimeContext.getExtendProperties().get("parallelRuntimeContextList")).get(0).getExecuteId()); + } + nodeInstancePO.setModifyTime(currentTime); + return nodeInstancePO; + } + + private NodeInstanceLogPO buildNodeInstanceLogPO(NodeInstancePO nodeInstancePO) { + NodeInstanceLogPO nodeInstanceLogPO = new NodeInstanceLogPO(); + BeanUtils.copyProperties(nodeInstancePO, nodeInstanceLogPO); + nodeInstanceLogPO.setId(null); + nodeInstanceLogPO.setType(NodeInstanceType.EXECUTE); + return nodeInstanceLogPO; + } +} diff --git a/parallel-plugin/src/main/java/com/didiglobal/turbo/plugin/executor/AsynTaskExecutor.java b/parallel-plugin/src/main/java/com/didiglobal/turbo/plugin/executor/AsynTaskExecutor.java new file mode 100644 index 00000000..2ebf33c2 --- /dev/null +++ b/parallel-plugin/src/main/java/com/didiglobal/turbo/plugin/executor/AsynTaskExecutor.java @@ -0,0 +1,15 @@ +package com.didiglobal.turbo.plugin.executor; + +import org.springframework.scheduling.concurrent.ThreadPoolTaskExecutor; + +public class AsynTaskExecutor extends ThreadPoolTaskExecutor { + private long timeout; + + public long getTimeout() { + return timeout; + } + + public void setTimeout(long timeout) { + this.timeout = timeout; + } +} \ No newline at end of file diff --git a/parallel-plugin/src/main/java/com/didiglobal/turbo/plugin/executor/BranchMergeAnyOne.java b/parallel-plugin/src/main/java/com/didiglobal/turbo/plugin/executor/BranchMergeAnyOne.java new file mode 100644 index 00000000..6fbc05af --- /dev/null +++ b/parallel-plugin/src/main/java/com/didiglobal/turbo/plugin/executor/BranchMergeAnyOne.java @@ -0,0 +1,53 @@ +package com.didiglobal.turbo.plugin.executor; + +import com.didiglobal.turbo.engine.bo.NodeInstanceBO; +import com.didiglobal.turbo.engine.common.NodeInstanceStatus; +import com.didiglobal.turbo.engine.common.RuntimeContext; +import com.didiglobal.turbo.engine.entity.InstanceDataPO; +import com.didiglobal.turbo.engine.entity.NodeInstancePO; +import com.didiglobal.turbo.engine.exception.ProcessException; +import com.didiglobal.turbo.plugin.common.ParallelErrorEnum; +import com.didiglobal.turbo.plugin.service.ParallelNodeInstanceService; +import com.didiglobal.turbo.plugin.util.ExecutorUtil; +import org.springframework.stereotype.Component; + +import javax.annotation.Resource; + +import java.util.Set; + +@Component +public class BranchMergeAnyOne extends BranchMergeStrategy { + + @Resource + private ParallelNodeInstanceService parallelNodeInstanceService; + + /** + * The first branch to arrive + */ + @Override + void joinFirst(RuntimeContext runtimeContext, NodeInstancePO forkNodeInstancePo, NodeInstanceBO currentNodeInstance, String parentExecuteId, String currentExecuteId, Set executeIds, DataMergeStrategy dataMergeStrategy) { + + InstanceDataPO joinInstanceData = instanceDataDAO.select(runtimeContext.getFlowInstanceId(), runtimeContext.getInstanceDataId()); + InstanceDataPO forkInstanceData = instanceDataDAO.select(runtimeContext.getFlowInstanceId(), forkNodeInstancePo.getInstanceDataId()); + InstanceDataPO mergePo = dataMergeStrategy.merge(runtimeContext, forkInstanceData, joinInstanceData); + instanceDataDAO.insertOrUpdate(mergePo); + currentNodeInstance.setStatus(NodeInstanceStatus.COMPLETED); + currentNodeInstance.put("executeId", ExecutorUtil.genExecuteIdWithParent(parentExecuteId, currentExecuteId)); + NodeInstancePO joinNodeInstancePo = buildNodeInstancePO(runtimeContext, currentNodeInstance); + nodeInstanceDAO.insert(joinNodeInstancePo); + nodeInstanceLogDAO.insert(buildNodeInstanceLogPO(joinNodeInstancePo)); + + // Close other nodes with pending ACTIVE status + parallelNodeInstanceService.closeParallelSuspendUserTask(runtimeContext, executeIds); + } + + /** + * The AnyOne policy only handles the first branch that arrives, + * and all subsequent branches that arrive fail + */ + @Override + void joinMerge(RuntimeContext runtimeContext, NodeInstancePO joinNodeInstance, NodeInstanceBO currentNodeInstance, String parentExecuteId, String currentExecuteId, Set allExecuteIdSet, DataMergeStrategy dataMergeStrategy) { + throw new ProcessException(ParallelErrorEnum.BRANCH_MERGE_STRATEGY_ERROR.getErrNo(), ParallelErrorEnum.BRANCH_MERGE_STRATEGY_ERROR.getErrMsg()); + } + +} \ No newline at end of file diff --git a/parallel-plugin/src/main/java/com/didiglobal/turbo/plugin/executor/BranchMergeCustom.java b/parallel-plugin/src/main/java/com/didiglobal/turbo/plugin/executor/BranchMergeCustom.java new file mode 100644 index 00000000..09f85871 --- /dev/null +++ b/parallel-plugin/src/main/java/com/didiglobal/turbo/plugin/executor/BranchMergeCustom.java @@ -0,0 +1,25 @@ +package com.didiglobal.turbo.plugin.executor; + +import com.didiglobal.turbo.engine.bo.NodeInstanceBO; +import com.didiglobal.turbo.engine.common.RuntimeContext; +import com.didiglobal.turbo.engine.entity.NodeInstancePO; +import com.didiglobal.turbo.engine.exception.ProcessException; +import com.didiglobal.turbo.plugin.common.ParallelErrorEnum; +import org.springframework.stereotype.Component; + +import java.util.Set; + +@Component +public class BranchMergeCustom extends BranchMergeStrategy{ + + @Override + void joinFirst(RuntimeContext runtimeContext, NodeInstancePO forkNodeInstancePo, NodeInstanceBO currentNodeInstance, String parentExecuteId, String currentExecuteId, Set executeIds, DataMergeStrategy dataMergeStrategy) { + throw new ProcessException(ParallelErrorEnum.UNSUPPORTED_BRANCH_MERGE_STRATEGY.getErrNo(), ParallelErrorEnum.UNSUPPORTED_BRANCH_MERGE_STRATEGY.getErrMsg()); + } + + @Override + void joinMerge(RuntimeContext runtimeContext, NodeInstancePO joinNodeInstance, NodeInstanceBO currentNodeInstance, String parentExecuteId, String currentExecuteId, Set allExecuteIdSet, DataMergeStrategy dataMergeStrategy) { + throw new ProcessException(ParallelErrorEnum.UNSUPPORTED_BRANCH_MERGE_STRATEGY.getErrNo(), ParallelErrorEnum.UNSUPPORTED_BRANCH_MERGE_STRATEGY.getErrMsg()); + } + +} \ No newline at end of file diff --git a/parallel-plugin/src/main/java/com/didiglobal/turbo/plugin/executor/BranchMergeJoinAll.java b/parallel-plugin/src/main/java/com/didiglobal/turbo/plugin/executor/BranchMergeJoinAll.java new file mode 100644 index 00000000..2c8f8494 --- /dev/null +++ b/parallel-plugin/src/main/java/com/didiglobal/turbo/plugin/executor/BranchMergeJoinAll.java @@ -0,0 +1,112 @@ +package com.didiglobal.turbo.plugin.executor; + +import com.didiglobal.turbo.engine.bo.NodeInstanceBO; +import com.didiglobal.turbo.engine.common.Constants; +import com.didiglobal.turbo.engine.common.InstanceDataType; +import com.didiglobal.turbo.engine.common.NodeInstanceStatus; +import com.didiglobal.turbo.engine.common.RuntimeContext; +import com.didiglobal.turbo.engine.entity.InstanceDataPO; +import com.didiglobal.turbo.engine.entity.NodeInstancePO; +import com.didiglobal.turbo.engine.exception.SuspendException; +import com.didiglobal.turbo.plugin.common.ParallelErrorEnum; +import com.didiglobal.turbo.plugin.common.ParallelNodeInstanceStatus; +import com.didiglobal.turbo.plugin.util.ExecutorUtil; +import com.google.common.collect.Sets; +import org.apache.commons.lang3.StringUtils; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.stereotype.Component; + +import java.text.MessageFormat; +import java.util.Date; +import java.util.Set; + +@Component +public class BranchMergeJoinAll extends BranchMergeStrategy { + + private static final Logger LOGGER = LoggerFactory.getLogger(BranchMergeJoinAll.class); + + + @Override + public void joinFirst(RuntimeContext runtimeContext, NodeInstancePO forkNodeInstancePo, NodeInstanceBO currentNodeInstance, + String parentExecuteId, String currentExecuteId, Set executeIds, DataMergeStrategy dataMergeStrategy) { + InstanceDataPO joinInstanceData = buildInstanceDataPO(runtimeContext, currentNodeInstance, runtimeContext.getFlowInstanceId()); + InstanceDataPO forkInstanceData = instanceDataDAO.select(runtimeContext.getFlowInstanceId(), forkNodeInstancePo.getInstanceDataId()); + if (ExecutorUtil.allArrived(executeIds, Sets.newHashSet(currentExecuteId))) { + merge(runtimeContext, forkInstanceData, currentNodeInstance, parentExecuteId, currentExecuteId, dataMergeStrategy, joinInstanceData, NodeInstanceStatus.COMPLETED); + } else { + merge(runtimeContext, forkInstanceData, currentNodeInstance, parentExecuteId, currentExecuteId, dataMergeStrategy, joinInstanceData, ParallelNodeInstanceStatus.WAITING); + + throw new SuspendException(ParallelErrorEnum.WAITING_SUSPEND.getErrNo(), ParallelErrorEnum.WAITING_SUSPEND.getErrMsg()); + } + } + + private void merge(RuntimeContext runtimeContext, InstanceDataPO forkInstanceData, NodeInstanceBO currentNodeInstance, + String parentExecuteId, String currentExecuteId, DataMergeStrategy dataMergeStrategy, InstanceDataPO instanceDataPo, int status) { + InstanceDataPO mergePo = dataMergeStrategy.merge(runtimeContext, forkInstanceData, instanceDataPo); + instanceDataDAO.insertOrUpdate(mergePo); + currentNodeInstance.setStatus(status); + currentNodeInstance.put("executeId", ExecutorUtil.genExecuteIdWithParent(parentExecuteId, currentExecuteId)); + NodeInstancePO joinPo = buildNodeInstancePO(runtimeContext, currentNodeInstance); + nodeInstanceDAO.insert(joinPo); + nodeInstanceLogDAO.insert(buildNodeInstanceLogPO(joinPo)); + } + + @Override + public void joinMerge(RuntimeContext runtimeContext, NodeInstancePO joinNodeInstancePo, NodeInstanceBO currentNodeInstance, + String parentExecuteId, String currentExecuteId, Set allExecuteIdSet, DataMergeStrategy dataMergeStrategy) { + Set arrivedExecuteIds = ExecutorUtil.getExecuteIdSet((String) joinNodeInstancePo.get("executeId")); + arrivedExecuteIds.add(currentExecuteId); + InstanceDataPO joinInstanceData = instanceDataDAO.select(runtimeContext.getFlowInstanceId(), runtimeContext.getInstanceDataId()); + String instanceDataId = joinInstanceData.getInstanceDataId(); + if (ExecutorUtil.allArrived(allExecuteIdSet, arrivedExecuteIds)) { + // All arrived + InstanceDataPO mergePo = dataMergeStrategy.merge(runtimeContext, null, joinInstanceData); + if ((mergePo.getId() == null || StringUtils.isBlank(instanceDataId)) && isNotEmpty(mergePo.getInstanceData())) { + instanceDataId = genId(); + joinNodeInstancePo.setInstanceDataId(instanceDataId); + fillMergePo(runtimeContext, mergePo, instanceDataId); + instanceDataDAO.insert(mergePo); + } else { + if(StringUtils.isNotBlank(instanceDataId)){ + instanceDataDAO.updateData(mergePo); + }else { + LOGGER.warn("There is no data to be merged."); + } + } + buildParallelNodeInstancePo(joinNodeInstancePo, currentNodeInstance, NodeInstanceStatus.COMPLETED); + nodeInstanceDAO.updateById(joinNodeInstancePo); + nodeInstanceLogDAO.insert(buildCurrentNodeInstanceLogPO(currentNodeInstance, currentExecuteId, joinNodeInstancePo)); + } else { + // Not all arrived + InstanceDataPO mergePo = dataMergeStrategy.merge(runtimeContext, null, joinInstanceData); + if(StringUtils.isNotBlank(instanceDataId)) { + instanceDataDAO.updateData(mergePo); + } + buildParallelNodeInstancePo(joinNodeInstancePo, currentNodeInstance, ParallelNodeInstanceStatus.WAITING); + nodeInstanceDAO.updateById(joinNodeInstancePo); + nodeInstanceLogDAO.insert(buildNodeInstanceLogPO(joinNodeInstancePo)); + + throw new SuspendException(ParallelErrorEnum.WAITING_SUSPEND.getErrNo(), MessageFormat.format(Constants.NODE_INSTANCE_FORMAT, + runtimeContext.getCurrentNodeModel().getKey(), + runtimeContext.getCurrentNodeModel().getProperties().getOrDefault(Constants.ELEMENT_PROPERTIES.NAME, StringUtils.EMPTY), + currentNodeInstance.getNodeInstanceId())); + } + } + + private boolean isNotEmpty(String instanceData) { + return StringUtils.isNotBlank(instanceData) && StringUtils.equals("{}",instanceData.replace(" ","")); + } + + private static void fillMergePo(RuntimeContext runtimeContext, InstanceDataPO mergePo, String instanceDataId) { + mergePo.setInstanceDataId(instanceDataId); + mergePo.setCreateTime(new Date()); + mergePo.setNodeInstanceId(runtimeContext.getCurrentNodeInstance().getNodeInstanceId()); + mergePo.setNodeKey(runtimeContext.getCurrentNodeInstance().getNodeKey()); + mergePo.setType(InstanceDataType.COMMIT); + mergePo.setTenant(runtimeContext.getTenant()); + mergePo.setFlowModuleId(runtimeContext.getFlowModuleId()); + mergePo.setFlowDeployId(runtimeContext.getFlowDeployId()); + mergePo.setCaller(runtimeContext.getCaller()); + } +} \ No newline at end of file diff --git a/parallel-plugin/src/main/java/com/didiglobal/turbo/plugin/executor/BranchMergeStrategy.java b/parallel-plugin/src/main/java/com/didiglobal/turbo/plugin/executor/BranchMergeStrategy.java new file mode 100644 index 00000000..6a593eef --- /dev/null +++ b/parallel-plugin/src/main/java/com/didiglobal/turbo/plugin/executor/BranchMergeStrategy.java @@ -0,0 +1,130 @@ +package com.didiglobal.turbo.plugin.executor; + +import com.didiglobal.turbo.engine.bo.NodeInstanceBO; +import com.didiglobal.turbo.engine.common.InstanceDataType; +import com.didiglobal.turbo.engine.common.NodeInstanceType; +import com.didiglobal.turbo.engine.common.RuntimeContext; +import com.didiglobal.turbo.engine.dao.InstanceDataDAO; +import com.didiglobal.turbo.engine.dao.NodeInstanceDAO; +import com.didiglobal.turbo.engine.dao.NodeInstanceLogDAO; +import com.didiglobal.turbo.engine.entity.InstanceDataPO; +import com.didiglobal.turbo.engine.entity.NodeInstanceLogPO; +import com.didiglobal.turbo.engine.entity.NodeInstancePO; +import com.didiglobal.turbo.engine.plugin.IdGeneratorPlugin; +import com.didiglobal.turbo.engine.plugin.manager.PluginManager; +import com.didiglobal.turbo.engine.util.IdGenerator; +import com.didiglobal.turbo.engine.util.InstanceDataUtil; +import com.didiglobal.turbo.engine.util.StrongUuidGenerator; +import com.didiglobal.turbo.plugin.util.ExecutorUtil; +import org.springframework.beans.BeanUtils; + +import javax.annotation.Resource; + +import java.util.Date; +import java.util.List; +import java.util.Set; + +public abstract class BranchMergeStrategy { + + @Resource + protected InstanceDataDAO instanceDataDAO; + + @Resource + protected NodeInstanceDAO nodeInstanceDAO; + + @Resource + protected NodeInstanceLogDAO nodeInstanceLogDAO; + + @Resource + protected PluginManager pluginManager; + + protected IdGenerator ID_GENERATOR; + + + + /** + * first arrival + */ + abstract void joinFirst(RuntimeContext runtimeContext, NodeInstancePO forkNodeInstancePo, NodeInstanceBO currentNodeInstance, + String parentExecuteId, String currentExecuteId, Set executeIds, DataMergeStrategy dataMergeStrategy); + + /** + * non-first arrival + */ + abstract void joinMerge(RuntimeContext runtimeContext, NodeInstancePO joinNodeInstance, NodeInstanceBO currentNodeInstance, + String parentExecuteId, String currentExecuteId, Set allExecuteIdSet, DataMergeStrategy dataMergeStrategy); + + protected String genId() { + if (null == ID_GENERATOR) { + List idGeneratorPlugins = pluginManager.getPluginsFor(IdGeneratorPlugin.class); + if (!idGeneratorPlugins.isEmpty()) { + ID_GENERATOR = idGeneratorPlugins.get(0).getIdGenerator(); + } else { + ID_GENERATOR = new StrongUuidGenerator(); + } + } + return ID_GENERATOR.getNextId(); + } + + protected InstanceDataPO buildInstanceDataPO(RuntimeContext runtimeContext, NodeInstanceBO currentNodeInstance, String flowInstanceId) { + InstanceDataPO po = new InstanceDataPO(); + po.setFlowInstanceId(flowInstanceId); + po.setType(InstanceDataType.EXECUTE); + po.setFlowModuleId(runtimeContext.getFlowModuleId()); + po.setFlowDeployId(runtimeContext.getFlowDeployId()); + po.setCaller(runtimeContext.getCaller()); + po.setInstanceDataId(genId()); + po.setCreateTime(new Date()); + po.setTenant(runtimeContext.getTenant()); + po.setInstanceData(InstanceDataUtil.getInstanceDataListStr(runtimeContext.getInstanceDataMap())); + po.setNodeInstanceId(currentNodeInstance.getNodeInstanceId()); + po.setNodeKey(currentNodeInstance.getNodeKey()); + return po; + } + + protected NodeInstancePO buildNodeInstancePO(RuntimeContext runtimeContext, NodeInstanceBO nodeInstanceBO) { + NodeInstancePO nodeInstancePO = new NodeInstancePO(); + BeanUtils.copyProperties(nodeInstanceBO, nodeInstancePO); + nodeInstancePO.setFlowDeployId(runtimeContext.getFlowDeployId()); + nodeInstancePO.setFlowInstanceId(runtimeContext.getFlowInstanceId()); + nodeInstancePO.setCaller(runtimeContext.getCaller()); + nodeInstancePO.setTenant(runtimeContext.getTenant()); + Date currentTime = new Date(); + if (null == nodeInstancePO.getCreateTime()) { + nodeInstancePO.setCreateTime(currentTime); + } + nodeInstancePO.setModifyTime(currentTime); + return nodeInstancePO; + } + + protected NodeInstanceLogPO buildNodeInstanceLogPO(NodeInstancePO nodeInstancePO) { + NodeInstanceLogPO nodeInstanceLogPO = new NodeInstanceLogPO(); + BeanUtils.copyProperties(nodeInstancePO, nodeInstanceLogPO); + nodeInstanceLogPO.setId(null); + nodeInstanceLogPO.setType(NodeInstanceType.EXECUTE); + return nodeInstanceLogPO; + } + + protected void buildParallelNodeInstancePo(NodeInstancePO joinNodeInstancePo, NodeInstanceBO currentNodeInstance, int status) { + String sourceNodeInstanceId = joinNodeInstancePo.getSourceNodeInstanceId(); + String sourceNodeKey = joinNodeInstancePo.getSourceNodeKey(); + String executeId = (String) joinNodeInstancePo.get("executeId"); + joinNodeInstancePo.setSourceNodeInstanceId(ExecutorUtil.append(sourceNodeInstanceId, currentNodeInstance.getSourceNodeInstanceId())); + joinNodeInstancePo.setSourceNodeKey(ExecutorUtil.append(sourceNodeKey, currentNodeInstance.getSourceNodeKey())); + String newExecuteId = ExecutorUtil.append(executeId, ExecutorUtil.getCurrentExecuteId((String) currentNodeInstance.get("executeId"))); + joinNodeInstancePo.put("executeId", newExecuteId); + joinNodeInstancePo.setStatus(status); + joinNodeInstancePo.setModifyTime(new Date()); + } + + protected NodeInstanceLogPO buildCurrentNodeInstanceLogPO(NodeInstanceBO currentNodeInstance, String executeId, NodeInstancePO nodeInstancePO) { + NodeInstanceLogPO nodeInstanceLogPO = new NodeInstanceLogPO(); + BeanUtils.copyProperties(nodeInstancePO, nodeInstanceLogPO); + nodeInstanceLogPO.setId(null); + nodeInstanceLogPO.setType(NodeInstanceType.EXECUTE); + nodeInstanceLogPO.setNodeInstanceId(currentNodeInstance.getNodeInstanceId()); + nodeInstancePO.setNodeKey(currentNodeInstance.getNodeKey()); + nodeInstancePO.put("executeId", executeId); + return nodeInstanceLogPO; + } +} \ No newline at end of file diff --git a/parallel-plugin/src/main/java/com/didiglobal/turbo/plugin/executor/DataMergeAll.java b/parallel-plugin/src/main/java/com/didiglobal/turbo/plugin/executor/DataMergeAll.java new file mode 100644 index 00000000..6b288917 --- /dev/null +++ b/parallel-plugin/src/main/java/com/didiglobal/turbo/plugin/executor/DataMergeAll.java @@ -0,0 +1,41 @@ +package com.didiglobal.turbo.plugin.executor; + +import com.didiglobal.turbo.plugin.common.MergeStrategy; +import com.didiglobal.turbo.engine.common.RuntimeContext; +import com.didiglobal.turbo.engine.entity.InstanceDataPO; +import com.didiglobal.turbo.engine.model.InstanceData; +import com.didiglobal.turbo.engine.util.InstanceDataUtil; +import com.google.common.collect.Maps; +import org.springframework.stereotype.Component; + +import java.util.Map; + +@Component +public class DataMergeAll extends DataMergeStrategy{ + + @Override + public InstanceDataPO merge(RuntimeContext runtimeContext, InstanceDataPO forkNodeInstanceData, InstanceDataPO joinNodeInstanceData) { + Map instanceDataMap = runtimeContext.getInstanceDataMap(); + if(instanceDataMap == null){ + instanceDataMap = Maps.newHashMap(); + } + Map forkInstanceDataMap = InstanceDataUtil.getInstanceDataMap(getForkData(forkNodeInstanceData)); + Map joinInstanceDataMap = InstanceDataUtil.getInstanceDataMap(joinNodeInstanceData.getInstanceData()); + forkInstanceDataMap.putAll(joinInstanceDataMap); + forkInstanceDataMap.putAll(instanceDataMap); + + runtimeContext.setInstanceDataMap(forkInstanceDataMap); + String dataListStr = InstanceDataUtil.getInstanceDataListStr(forkInstanceDataMap); + joinNodeInstanceData.setInstanceData(dataListStr); + return joinNodeInstanceData; + } + + private String getForkData(InstanceDataPO forkNodeInstanceData) { + return forkNodeInstanceData == null ? null : forkNodeInstanceData.getInstanceData(); + } + + @Override + public String name() { + return MergeStrategy.DATA_MERGE.ALL; + } +} \ No newline at end of file diff --git a/parallel-plugin/src/main/java/com/didiglobal/turbo/plugin/executor/DataMergeCustom.java b/parallel-plugin/src/main/java/com/didiglobal/turbo/plugin/executor/DataMergeCustom.java new file mode 100644 index 00000000..46969ca1 --- /dev/null +++ b/parallel-plugin/src/main/java/com/didiglobal/turbo/plugin/executor/DataMergeCustom.java @@ -0,0 +1,22 @@ +package com.didiglobal.turbo.plugin.executor; + +import com.didiglobal.turbo.plugin.common.ParallelErrorEnum; +import com.didiglobal.turbo.plugin.common.MergeStrategy; +import com.didiglobal.turbo.engine.common.RuntimeContext; +import com.didiglobal.turbo.engine.entity.InstanceDataPO; +import com.didiglobal.turbo.engine.exception.ProcessException; +import org.springframework.stereotype.Component; + +@Component +public class DataMergeCustom extends DataMergeStrategy{ + @Override + public InstanceDataPO merge(RuntimeContext runtimeContext, InstanceDataPO forkNodeInstanceData, InstanceDataPO joinNodeInstanceData) { + // TODO Confirm which part of the data to incorporate into the context based on the groovy script + throw new ProcessException(ParallelErrorEnum.UNSUPPORTED_DATA_MERGE_STRATEGY.getErrNo(), ParallelErrorEnum.UNSUPPORTED_DATA_MERGE_STRATEGY.getErrMsg()); + } + + @Override + public String name() { + return MergeStrategy.DATA_MERGE.CUSTOM; + } +} \ No newline at end of file diff --git a/parallel-plugin/src/main/java/com/didiglobal/turbo/plugin/executor/DataMergeNone.java b/parallel-plugin/src/main/java/com/didiglobal/turbo/plugin/executor/DataMergeNone.java new file mode 100644 index 00000000..22d9d33f --- /dev/null +++ b/parallel-plugin/src/main/java/com/didiglobal/turbo/plugin/executor/DataMergeNone.java @@ -0,0 +1,30 @@ +package com.didiglobal.turbo.plugin.executor; + + +import com.didiglobal.turbo.plugin.common.MergeStrategy; +import com.didiglobal.turbo.engine.common.RuntimeContext; +import com.didiglobal.turbo.engine.entity.InstanceDataPO; +import com.didiglobal.turbo.engine.model.InstanceData; +import com.didiglobal.turbo.engine.util.InstanceDataUtil; +import org.springframework.stereotype.Component; + +import java.util.Map; + +@Component +public class DataMergeNone extends DataMergeStrategy{ + + @Override + public InstanceDataPO merge(RuntimeContext runtimeContext, InstanceDataPO forkNodeInstanceData, InstanceDataPO joinNodeInstanceData) { + // update runtime context instance data map to be same to fork node data + runtimeContext.getInstanceDataMap().clear(); + Map instanceDataMap = InstanceDataUtil.getInstanceDataMap(forkNodeInstanceData.getInstanceData()); + runtimeContext.getInstanceDataMap().putAll(instanceDataMap); + // merge nothing + return forkNodeInstanceData; + } + + @Override + public String name() { + return MergeStrategy.DATA_MERGE.NONE; + } +} \ No newline at end of file diff --git a/parallel-plugin/src/main/java/com/didiglobal/turbo/plugin/executor/DataMergeStrategy.java b/parallel-plugin/src/main/java/com/didiglobal/turbo/plugin/executor/DataMergeStrategy.java new file mode 100644 index 00000000..8fae4d06 --- /dev/null +++ b/parallel-plugin/src/main/java/com/didiglobal/turbo/plugin/executor/DataMergeStrategy.java @@ -0,0 +1,24 @@ +package com.didiglobal.turbo.plugin.executor; + +import com.didiglobal.turbo.engine.common.RuntimeContext; +import com.didiglobal.turbo.engine.entity.InstanceDataPO; + +public abstract class DataMergeStrategy { + + /** + * merge data. update instanceDataPO.instanceData + * + * @param runtimeContext runtimeContext + * @param forkNodeInstanceData from db + * @param joinNodeInstanceData from db + * @return InstanceDataPO + */ + public abstract InstanceDataPO merge(RuntimeContext runtimeContext, InstanceDataPO forkNodeInstanceData, InstanceDataPO joinNodeInstanceData); + + /** + * strategy name + * + * @return name + */ + public abstract String name(); +} \ No newline at end of file diff --git a/parallel-plugin/src/main/java/com/didiglobal/turbo/plugin/executor/InclusiveGatewayExecutor.java b/parallel-plugin/src/main/java/com/didiglobal/turbo/plugin/executor/InclusiveGatewayExecutor.java new file mode 100644 index 00000000..556b2246 --- /dev/null +++ b/parallel-plugin/src/main/java/com/didiglobal/turbo/plugin/executor/InclusiveGatewayExecutor.java @@ -0,0 +1,25 @@ +package com.didiglobal.turbo.plugin.executor; + +import com.didiglobal.turbo.engine.model.FlowElement; +import com.didiglobal.turbo.engine.model.InstanceData; +import com.didiglobal.turbo.engine.util.FlowModelUtil; +import org.apache.commons.lang3.StringUtils; +import org.springframework.stereotype.Service; + +import java.util.Map; + +@Service +public class InclusiveGatewayExecutor extends AbstractGatewayExecutor { + + @Override + protected boolean calculateCondition(FlowElement outgoingSequenceFlow, Map instanceDataMap) { + // case1 condition is true, hit the outgoing + String condition = FlowModelUtil.getConditionFromSequenceFlow(outgoingSequenceFlow); + return StringUtils.isNotBlank(condition) && processCondition(condition, instanceDataMap); + } + + @Override + protected int calculateOutgoingSize(FlowElement currentNodeModel, Map flowElementMap, Map instanceDataMap) { + return calculateOutgoings(currentNodeModel, flowElementMap, instanceDataMap).size(); + } +} diff --git a/parallel-plugin/src/main/java/com/didiglobal/turbo/plugin/executor/MergeStrategyFactory.java b/parallel-plugin/src/main/java/com/didiglobal/turbo/plugin/executor/MergeStrategyFactory.java new file mode 100644 index 00000000..2af56a76 --- /dev/null +++ b/parallel-plugin/src/main/java/com/didiglobal/turbo/plugin/executor/MergeStrategyFactory.java @@ -0,0 +1,61 @@ +package com.didiglobal.turbo.plugin.executor; + +import com.didiglobal.turbo.plugin.common.ParallelErrorEnum; +import com.didiglobal.turbo.plugin.common.MergeStrategy; +import com.didiglobal.turbo.engine.exception.TurboException; +import org.springframework.stereotype.Component; + +import javax.annotation.Resource; + +/** + * MergeStrategyFactory + * + * @author didi + */ +@Component +public class MergeStrategyFactory { + + @Resource + private DataMergeAll dataMergeAll; + + @Resource + private DataMergeNone dataMergeNone; + + @Resource + private DataMergeCustom dataMergeCustom; + + @Resource + private BranchMergeJoinAll branchMergeJoinAll; + + @Resource + private BranchMergeAnyOne branchMergeAnyOne; + + @Resource + private BranchMergeCustom branchMergeCustom; + + public DataMergeStrategy getDataMergeStrategy(String dataMerge) throws TurboException { + switch (dataMerge) { + case MergeStrategy.DATA_MERGE.ALL: + return dataMergeAll; + case MergeStrategy.DATA_MERGE.NONE: + return dataMergeNone; + case MergeStrategy.DATA_MERGE.CUSTOM: + return dataMergeCustom; + default: + throw new TurboException(ParallelErrorEnum.UNSUPPORTED_DATA_MERGE_STRATEGY.getErrNo(), ParallelErrorEnum.UNSUPPORTED_DATA_MERGE_STRATEGY.getErrMsg()); + } + } + + public BranchMergeStrategy getBranchMergeStrategy(String branchMerge) throws TurboException { + switch (branchMerge) { + case MergeStrategy.BRANCH_MERGE.JOIN_ALL: + return branchMergeJoinAll; + case MergeStrategy.BRANCH_MERGE.ANY_ONE: + return branchMergeAnyOne; + case MergeStrategy.BRANCH_MERGE.CUSTOM: + return branchMergeCustom; + default: + throw new TurboException(ParallelErrorEnum.UNSUPPORTED_BRANCH_MERGE_STRATEGY.getErrNo(), ParallelErrorEnum.UNSUPPORTED_BRANCH_MERGE_STRATEGY.getErrMsg()); + } + } +} \ No newline at end of file diff --git a/parallel-plugin/src/main/java/com/didiglobal/turbo/plugin/executor/ParallelGatewayExecutor.java b/parallel-plugin/src/main/java/com/didiglobal/turbo/plugin/executor/ParallelGatewayExecutor.java new file mode 100644 index 00000000..7bbe588d --- /dev/null +++ b/parallel-plugin/src/main/java/com/didiglobal/turbo/plugin/executor/ParallelGatewayExecutor.java @@ -0,0 +1,16 @@ +package com.didiglobal.turbo.plugin.executor; + +import com.didiglobal.turbo.engine.model.FlowElement; +import com.didiglobal.turbo.engine.model.InstanceData; +import org.springframework.stereotype.Service; + +import java.util.Map; + +@Service +public class ParallelGatewayExecutor extends AbstractGatewayExecutor { + + @Override + protected int calculateOutgoingSize(FlowElement currentNodeModel, Map flowElementMap, Map instanceDataMap) { + return currentNodeModel.getOutgoing().size(); + } +} \ No newline at end of file diff --git a/parallel-plugin/src/main/java/com/didiglobal/turbo/plugin/model/InclusiveGateway.java b/parallel-plugin/src/main/java/com/didiglobal/turbo/plugin/model/InclusiveGateway.java new file mode 100644 index 00000000..8002d721 --- /dev/null +++ b/parallel-plugin/src/main/java/com/didiglobal/turbo/plugin/model/InclusiveGateway.java @@ -0,0 +1,6 @@ +package com.didiglobal.turbo.plugin.model; + +import com.didiglobal.turbo.engine.model.GatewayNode; + +public class InclusiveGateway extends GatewayNode { +} diff --git a/parallel-plugin/src/main/java/com/didiglobal/turbo/plugin/model/ParallelGateway.java b/parallel-plugin/src/main/java/com/didiglobal/turbo/plugin/model/ParallelGateway.java new file mode 100644 index 00000000..c766886b --- /dev/null +++ b/parallel-plugin/src/main/java/com/didiglobal/turbo/plugin/model/ParallelGateway.java @@ -0,0 +1,6 @@ +package com.didiglobal.turbo.plugin.model; + +import com.didiglobal.turbo.engine.model.GatewayNode; + +public class ParallelGateway extends GatewayNode { +} diff --git a/parallel-plugin/src/main/java/com/didiglobal/turbo/plugin/service/ParallelNodeInstanceService.java b/parallel-plugin/src/main/java/com/didiglobal/turbo/plugin/service/ParallelNodeInstanceService.java new file mode 100644 index 00000000..bd5d0434 --- /dev/null +++ b/parallel-plugin/src/main/java/com/didiglobal/turbo/plugin/service/ParallelNodeInstanceService.java @@ -0,0 +1,61 @@ +package com.didiglobal.turbo.plugin.service; + +import com.didiglobal.turbo.engine.common.FlowElementType; +import com.didiglobal.turbo.engine.common.NodeInstanceStatus; +import com.didiglobal.turbo.engine.common.NodeInstanceType; +import com.didiglobal.turbo.engine.common.RuntimeContext; +import com.didiglobal.turbo.engine.dao.NodeInstanceDAO; +import com.didiglobal.turbo.engine.dao.NodeInstanceLogDAO; +import com.didiglobal.turbo.engine.entity.NodeInstanceLogPO; +import com.didiglobal.turbo.engine.entity.NodeInstancePO; +import com.didiglobal.turbo.engine.exception.ProcessException; +import com.didiglobal.turbo.plugin.common.ParallelNodeInstanceStatus; +import com.didiglobal.turbo.plugin.util.ExecutorUtil; +import org.springframework.beans.BeanUtils; +import org.springframework.stereotype.Service; + +import javax.annotation.Resource; + +import java.util.Collection; +import java.util.List; + +@Service +public class ParallelNodeInstanceService { + + @Resource + private NodeInstanceDAO nodeInstanceDAO; + @Resource + private NodeInstanceLogDAO nodeInstanceLogDAO; + + public void closeParallelSuspendUserTask(RuntimeContext runtimeContext, Collection executeIds) throws ProcessException { + String flowInstanceId = runtimeContext.getFlowInstanceId(); + List poList = nodeInstanceDAO.selectByFlowInstanceId(flowInstanceId); + poList.forEach(po -> { + if(po.getNodeType() == FlowElementType.USER_TASK + && ExecutorUtil.containsAny(executeIds, (String) po.get("executeId")) + && po.getStatus() == NodeInstanceStatus.ACTIVE){ + nodeInstanceDAO.updateStatus(po, ParallelNodeInstanceStatus.CLOSED); + nodeInstanceLogDAO.insert(buildNodeInstanceLogPO(po)); + } + }); + } + + public void closeAllSuspendUserTask(String flowInstanceId) throws ProcessException { + List poList = nodeInstanceDAO.selectByFlowInstanceId(flowInstanceId); + poList.forEach(po -> { + if(po.getNodeType() == FlowElementType.USER_TASK + && po.getStatus() == NodeInstanceStatus.ACTIVE){ + nodeInstanceDAO.updateStatus(po, ParallelNodeInstanceStatus.CLOSED); + nodeInstanceLogDAO.insert(buildNodeInstanceLogPO(po)); + } + }); + } + + protected NodeInstanceLogPO buildNodeInstanceLogPO(NodeInstancePO nodeInstancePO) { + NodeInstanceLogPO nodeInstanceLogPO = new NodeInstanceLogPO(); + BeanUtils.copyProperties(nodeInstancePO, nodeInstanceLogPO); + nodeInstanceLogPO.setId(null); + nodeInstanceLogPO.setType(NodeInstanceType.EXECUTE); + return nodeInstanceLogPO; + } +} diff --git a/parallel-plugin/src/main/java/com/didiglobal/turbo/plugin/util/ExecutorUtil.java b/parallel-plugin/src/main/java/com/didiglobal/turbo/plugin/util/ExecutorUtil.java new file mode 100644 index 00000000..5eed2922 --- /dev/null +++ b/parallel-plugin/src/main/java/com/didiglobal/turbo/plugin/util/ExecutorUtil.java @@ -0,0 +1,182 @@ +package com.didiglobal.turbo.plugin.util; + +import com.alibaba.fastjson.JSON; +import com.alibaba.fastjson.JSONObject; +import com.didiglobal.turbo.plugin.common.Constants; +import com.didiglobal.turbo.plugin.common.ParallelErrorEnum; +import com.didiglobal.turbo.engine.exception.ProcessException; +import com.didiglobal.turbo.engine.model.FlowElement; +import com.google.common.base.Joiner; +import com.google.common.collect.Sets; +import org.apache.commons.lang3.StringUtils; +import org.apache.commons.lang3.tuple.Pair; + +import java.util.Collection; +import java.util.HashSet; +import java.util.Map; +import java.util.Set; + +public class ExecutorUtil { + + private static final String COMMA = ","; + private static final String VERTICAL_LINE = "|"; + + /** + * Generate a new execution id list + * If there is parallel nesting, the execution ID in the current context needs to be passed in. + * eg. + * current execute id : execute_A + * new execute id list: [execute_B, execute_C] + * result: execute_A|execute_B, execute_C + * + * @param parentExecuteId The parallel execution id of the previous level + * @param collection New Parallel Execution id + * @return A|b,c + */ + public static String genExecuteIds(String parentExecuteId, Collection collection) { + if (StringUtils.isBlank(parentExecuteId)) { + return Joiner.on(COMMA).join(collection); + } + if (parentExecuteId.endsWith(VERTICAL_LINE)) { + return parentExecuteId + Joiner.on(COMMA).join(collection); + } else { + return parentExecuteId + VERTICAL_LINE + Joiner.on(COMMA).join(collection); + } + } + + /** + * Generate a new execution Id + * + * @param parentExecuteId The parallel execution id of the previous level + * @param executeId current execute id; + * @return + */ + public static String genExecuteIdWithParent(String parentExecuteId, String executeId) { + if (StringUtils.isBlank(parentExecuteId)) { + return executeId; + } + if (parentExecuteId.endsWith(VERTICAL_LINE)) { + return parentExecuteId + executeId; + } else { + return parentExecuteId + VERTICAL_LINE + executeId; + } + } + + /** + * Get Execution ID List + * Example: + * Execution ID may be "a|b|c,d" + * The result is [c,d] + * + * @param executeIds execute id list str + * @return set + */ + public static Set getExecuteIdSet(String executeIds) { + if (StringUtils.isBlank(executeIds)) { + return Sets.newHashSet(); + } + if (StringUtils.containsNone(executeIds, VERTICAL_LINE)) { + return splitStrToSet(executeIds); + } + return splitStrToSet(executeIds.substring(executeIds.lastIndexOf(VERTICAL_LINE) + 1)); + } + + /** + * Get parent execute id + * + * @param executeIds execute id string, eg: a|b|c,d + * @return parent execute id , eg: a|b + */ + public static String getParentExecuteId(String executeIds) { + if (StringUtils.isBlank(executeIds) || StringUtils.containsNone(executeIds, VERTICAL_LINE)) { + return StringUtils.EMPTY; + } + return executeIds.substring(0, executeIds.lastIndexOf(VERTICAL_LINE)); + } + + /** + * Get execute id + * + * @param executeId execute id string, eg: a|b|c + * @return parent execute id , eg: c + */ + public static String getCurrentExecuteId(String executeId) { + if (StringUtils.isBlank(executeId)) { + return StringUtils.EMPTY; + } + if(StringUtils.containsNone(executeId, VERTICAL_LINE)){ + return executeId; + } + return executeId.substring(executeId.lastIndexOf(VERTICAL_LINE) + 1); + } + + /** + * left is fork + * right is join + * + * @param nodeModel + * @return + */ + public static Pair getForkAndJoinNodeKey(FlowElement nodeModel) throws ProcessException { + Map properties = nodeModel.getProperties(); + Object forkAndJoin = properties.get(Constants.ELEMENT_PROPERTIES.FORK_JOIN_MATCH); + if (forkAndJoin == null) { + throw new ProcessException(ParallelErrorEnum.UNSUPPORTED_DATA_MERGE_STRATEGY.getErrNo(), ParallelErrorEnum.UNSUPPORTED_DATA_MERGE_STRATEGY.getErrMsg()); + } + JSONObject forkJoinMatch = JSON.parseObject(forkAndJoin.toString()); + String fork = forkJoinMatch.getString(Constants.ELEMENT_PROPERTIES.FORK); + String join = forkJoinMatch.getString(Constants.ELEMENT_PROPERTIES.JOIN); + + return Pair.of(fork, join); + } + + /** + * Checks if any value from the delimited string exists in the executeIds collection. + * + * @param executeIds a collection of strings representing the execute IDs + * @param targetStr a string containing values separated by '|' delimiter + * @return true if any value from the delimited string exists in the executeIds collection, false otherwise + */ + public static boolean containsAny(Collection executeIds, String targetStr) { + if (executeIds == null || executeIds.isEmpty() || targetStr == null || targetStr.isEmpty()) { + return false; + } + Set executeIdSet = new HashSet<>(executeIds); + String[] values = targetStr.split("\\|"); + for (String value : values) { + if (executeIdSet.contains(value)) { + return true; + } + } + return false; + } + + public static boolean isFork(String nodeKey, Pair pair) { + return StringUtils.equalsIgnoreCase(nodeKey, pair.getLeft()); + } + + public static boolean isJoin(String nodeKey, Pair pair) { + return StringUtils.equalsIgnoreCase(nodeKey, pair.getRight()); + } + + public static String append(String sourceNodeInstanceId, String appendStr) { + if (StringUtils.isNotBlank(sourceNodeInstanceId)) { + return sourceNodeInstanceId + COMMA + appendStr; + } + return appendStr; + } + + public static Set splitStrToSet(String executeIds) { + if (StringUtils.isBlank(executeIds)) { + return Sets.newHashSet(); + } + String[] split = StringUtils.split(executeIds, COMMA); + return Sets.newHashSet(split); + } + + public static boolean allArrived(Set executeIds, Set arrivedExecuteIds) { + return executeIds != null && arrivedExecuteIds != null + && executeIds.size() == arrivedExecuteIds.size() + && executeIds.containsAll(arrivedExecuteIds); + } +} diff --git a/parallel-plugin/src/main/java/com/didiglobal/turbo/plugin/validator/InclusiveGatewayValidator.java b/parallel-plugin/src/main/java/com/didiglobal/turbo/plugin/validator/InclusiveGatewayValidator.java new file mode 100644 index 00000000..f9c47aed --- /dev/null +++ b/parallel-plugin/src/main/java/com/didiglobal/turbo/plugin/validator/InclusiveGatewayValidator.java @@ -0,0 +1,8 @@ +package com.didiglobal.turbo.plugin.validator; + +import com.didiglobal.turbo.engine.validator.ElementValidator; +import org.springframework.stereotype.Component; + +@Component +public class InclusiveGatewayValidator extends ElementValidator { +} diff --git a/parallel-plugin/src/main/java/com/didiglobal/turbo/plugin/validator/ParallelGatewayValidator.java b/parallel-plugin/src/main/java/com/didiglobal/turbo/plugin/validator/ParallelGatewayValidator.java new file mode 100644 index 00000000..0708bb1f --- /dev/null +++ b/parallel-plugin/src/main/java/com/didiglobal/turbo/plugin/validator/ParallelGatewayValidator.java @@ -0,0 +1,8 @@ +package com.didiglobal.turbo.plugin.validator; + +import com.didiglobal.turbo.engine.validator.ElementValidator; +import org.springframework.stereotype.Component; + +@Component +public class ParallelGatewayValidator extends ElementValidator { +} \ No newline at end of file diff --git a/parallel-plugin/src/main/resources/META-INF/services/com.didiglobal.turbo.engine.plugin.ElementPlugin b/parallel-plugin/src/main/resources/META-INF/services/com.didiglobal.turbo.engine.plugin.ElementPlugin new file mode 100644 index 00000000..f7c191dd --- /dev/null +++ b/parallel-plugin/src/main/resources/META-INF/services/com.didiglobal.turbo.engine.plugin.ElementPlugin @@ -0,0 +1,2 @@ +com.didiglobal.turbo.plugin.ParallelGatewayElementPlugin +com.didiglobal.turbo.plugin.InclusiveGatewayElementPlugin \ No newline at end of file diff --git a/parallel-plugin/src/main/resources/plugin.properties b/parallel-plugin/src/main/resources/plugin.properties new file mode 100644 index 00000000..29c1fb7b --- /dev/null +++ b/parallel-plugin/src/main/resources/plugin.properties @@ -0,0 +1,3 @@ +# init sql file path +turbo.plugin.init_sql.ParallelGatewayElementPlugin=sql/parallelGateway.sql +turbo.plugin.init_sql.InclusiveGatewayElementPlugin=sql/parallelGateway.sql \ No newline at end of file diff --git a/parallel-plugin/src/main/resources/sql/parallelGateway.sql b/parallel-plugin/src/main/resources/sql/parallelGateway.sql new file mode 100644 index 00000000..719d1251 --- /dev/null +++ b/parallel-plugin/src/main/resources/sql/parallelGateway.sql @@ -0,0 +1,11 @@ +CREATE TABLE IF NOT EXISTS `ei_node_instance_log_parallel` ( + `id` bigint unsigned NOT NULL AUTO_INCREMENT COMMENT '自增主键', + `execute_id` varchar(255) CHARACTER SET utf8mb4 COLLATE utf8mb4_0900_ai_ci DEFAULT NULL COMMENT '执行id', + PRIMARY KEY (`id`) +) ENGINE=InnoDB AUTO_INCREMENT=2 DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci COMMENT='节点执行记录平行表'; + +CREATE TABLE IF NOT EXISTS `ei_node_instance_parallel` ( + `id` bigint unsigned NOT NULL AUTO_INCREMENT COMMENT '自增主键', + `execute_id` varchar(255) CHARACTER SET utf8mb4 COLLATE utf8mb4_0900_ai_ci DEFAULT NULL COMMENT '执行id', + PRIMARY KEY (`id`) +) ENGINE=InnoDB AUTO_INCREMENT=15 DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci COMMENT='节点执行平行表'; \ No newline at end of file diff --git a/parallel-plugin/src/test/java/com/didiglobal/turbo/plugin/dao/ParallelNodeInstanceHandlerTest.java b/parallel-plugin/src/test/java/com/didiglobal/turbo/plugin/dao/ParallelNodeInstanceHandlerTest.java new file mode 100644 index 00000000..7626e603 --- /dev/null +++ b/parallel-plugin/src/test/java/com/didiglobal/turbo/plugin/dao/ParallelNodeInstanceHandlerTest.java @@ -0,0 +1,82 @@ +package com.didiglobal.turbo.plugin.dao; + +import com.didiglobal.turbo.engine.dao.NodeInstanceDAO; +import com.didiglobal.turbo.engine.entity.NodeInstancePO; +import org.junit.Assert; +import org.junit.Test; +import com.didiglobal.turbo.plugin.runner.BaseTest; +import com.didiglobal.turbo.plugin.util.EntityBuilder; + +import javax.annotation.Resource; + +import java.util.ArrayList; +import java.util.List; +import java.util.UUID; + + +public class ParallelNodeInstanceHandlerTest extends BaseTest { + + @Resource + NodeInstanceDAO nodeInstanceDAO; + + @Test + public void insert(){ + NodeInstancePO nodeInstancePO = EntityBuilder.buildDynamicParallelNodeInstancePO(); + int result = nodeInstanceDAO.insert(nodeInstancePO); + Assert.assertTrue(result == 1); + } + + @Test + public void insertOrUpdateList() { + NodeInstancePO nodeInstancePO = EntityBuilder.buildDynamicParallelNodeInstancePO(); + nodeInstanceDAO.insert(nodeInstancePO); + nodeInstancePO.setInstanceDataId(nodeInstancePO.getInstanceDataId() + "_updated"); + List list = new ArrayList<>(); + list.add(nodeInstancePO); + list.add(EntityBuilder.buildDynamicParallelNodeInstancePO()); + list.add(EntityBuilder.buildDynamicParallelNodeInstancePO()); + boolean result = nodeInstanceDAO.insertOrUpdateList(list); + Assert.assertTrue(result); + } + + @Test + public void selectByFlowInstanceIdAndNodeKey(){ + NodeInstancePO nodeInstancePO = EntityBuilder.buildDynamicParallelNodeInstancePO(); + nodeInstancePO.setFlowInstanceId(nodeInstancePO.getFlowInstanceId() + UUID.randomUUID()); + nodeInstanceDAO.insert(nodeInstancePO); + List result = nodeInstanceDAO.selectByFlowInstanceIdAndNodeKey(nodeInstancePO.getFlowInstanceId(), nodeInstancePO.getNodeKey()); + Assert.assertTrue(result.size() == 1); + } + + @Test + public void selectByFlowInstanceId(){ + NodeInstancePO nodeInstancePO = EntityBuilder.buildDynamicParallelNodeInstancePO(); + nodeInstancePO.setFlowInstanceId(nodeInstancePO.getFlowInstanceId() + UUID.randomUUID()); + nodeInstanceDAO.insert(nodeInstancePO); + NodeInstancePO nodeInstancePO1 = EntityBuilder.buildDynamicParallelNodeInstancePO(); + nodeInstancePO1.setFlowInstanceId(nodeInstancePO.getFlowInstanceId()); + nodeInstanceDAO.insert(nodeInstancePO1); + NodeInstancePO nodeInstancePO2 = EntityBuilder.buildDynamicParallelNodeInstancePO(); + nodeInstancePO2.setFlowInstanceId(nodeInstancePO.getFlowInstanceId()); + nodeInstanceDAO.insert(nodeInstancePO2); + List result = nodeInstanceDAO.selectByFlowInstanceId(nodeInstancePO.getFlowInstanceId()); + Assert.assertTrue(result.size() == 3); + } + + @Test + public void updateParallelNodeInstancePo(){ + NodeInstancePO parallelNodeInstancePO = EntityBuilder.buildParallelNodeInstancePO(); + nodeInstanceDAO.insert(parallelNodeInstancePO); + parallelNodeInstancePO.put("executeId", "12345678|1234567"); + parallelNodeInstancePO.setStatus(1); + nodeInstanceDAO.updateStatus(parallelNodeInstancePO, 1); + } + + @Test + public void selectOne() { + NodeInstancePO nodeInstancePO = EntityBuilder.buildParallelNodeInstancePO(); + nodeInstanceDAO.insert(nodeInstancePO); + NodeInstancePO nodeInstancePO1 = nodeInstanceDAO.selectByNodeInstanceId(null, nodeInstancePO.getNodeInstanceId()); + Assert.assertTrue(nodeInstancePO1.getId() != null); + } +} diff --git a/parallel-plugin/src/test/java/com/didiglobal/turbo/plugin/dao/ParallelNodeInstanceLogHandlerTest.java b/parallel-plugin/src/test/java/com/didiglobal/turbo/plugin/dao/ParallelNodeInstanceLogHandlerTest.java new file mode 100644 index 00000000..df85ecb0 --- /dev/null +++ b/parallel-plugin/src/test/java/com/didiglobal/turbo/plugin/dao/ParallelNodeInstanceLogHandlerTest.java @@ -0,0 +1,22 @@ +package com.didiglobal.turbo.plugin.dao; + +import com.didiglobal.turbo.engine.dao.NodeInstanceLogDAO; +import com.didiglobal.turbo.engine.entity.NodeInstanceLogPO; +import com.didiglobal.turbo.plugin.runner.BaseTest; +import com.didiglobal.turbo.plugin.util.EntityBuilder; +import org.junit.Assert; +import org.junit.Test; + +import javax.annotation.Resource; + +public class ParallelNodeInstanceLogHandlerTest extends BaseTest { + + @Resource + private NodeInstanceLogDAO nodeInstanceLogDAO; + @Test + public void insert() { + NodeInstanceLogPO nodeInstanceLogPO = EntityBuilder.buildParallelNodeInstanceLogPO(); + int insert = nodeInstanceLogDAO.insert(nodeInstanceLogPO); + Assert.assertEquals(1, insert); + } +} diff --git a/parallel-plugin/src/test/java/com/didiglobal/turbo/plugin/processor/RuntimeProcessorTest.java b/parallel-plugin/src/test/java/com/didiglobal/turbo/plugin/processor/RuntimeProcessorTest.java new file mode 100644 index 00000000..339dbe51 --- /dev/null +++ b/parallel-plugin/src/test/java/com/didiglobal/turbo/plugin/processor/RuntimeProcessorTest.java @@ -0,0 +1,618 @@ +package com.didiglobal.turbo.plugin.processor; + +import com.didiglobal.turbo.engine.common.ErrorEnum; +import com.didiglobal.turbo.engine.dao.mapper.FlowDeploymentMapper; +import com.didiglobal.turbo.engine.entity.FlowDeploymentPO; +import com.didiglobal.turbo.engine.model.InstanceData; +import com.didiglobal.turbo.engine.param.CommitTaskParam; +import com.didiglobal.turbo.engine.param.RollbackTaskParam; +import com.didiglobal.turbo.engine.param.StartProcessParam; +import com.didiglobal.turbo.engine.processor.RuntimeProcessor; +import com.didiglobal.turbo.engine.result.CommitTaskResult; +import com.didiglobal.turbo.engine.result.RollbackTaskResult; +import com.didiglobal.turbo.engine.result.StartProcessResult; +import com.didiglobal.turbo.engine.result.TerminateResult; +import com.didiglobal.turbo.plugin.common.ParallelErrorEnum; +import com.didiglobal.turbo.plugin.common.ParallelRuntimeContext; +import com.didiglobal.turbo.plugin.runner.BaseTest; +import com.didiglobal.turbo.plugin.util.EntityBuilder; +import org.apache.commons.lang3.StringUtils; +import org.junit.Assert; +import org.junit.Test; + +import javax.annotation.Resource; + +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +public class RuntimeProcessorTest extends BaseTest { + + @Resource + private RuntimeProcessor runtimeProcessor; + + @Resource + private FlowDeploymentMapper flowDeploymentMapper; + + private StartProcessResult startParallelProcess(String flag) throws Exception { + // prepare + FlowDeploymentPO flowDeploymentPO = EntityBuilder.buildParallelFlowDeploymentPO(); + flowDeploymentPO.setFlowModuleId(flowDeploymentPO.getFlowModuleId() + "_" + flag); + flowDeploymentPO.setFlowDeployId(flowDeploymentPO.getFlowDeployId() + "_" + flag); + FlowDeploymentPO _flowDeploymentPO = flowDeploymentMapper.selectByDeployId(flowDeploymentPO.getFlowDeployId()); + if (_flowDeploymentPO != null) { + if (!StringUtils.equals(_flowDeploymentPO.getFlowModel(), flowDeploymentPO.getFlowModel())) { + flowDeploymentMapper.deleteById(_flowDeploymentPO.getId()); + flowDeploymentMapper.insert(flowDeploymentPO); + } + } else { + flowDeploymentMapper.insert(flowDeploymentPO); + } + + // start process + StartProcessParam startProcessParam = new StartProcessParam(); + startProcessParam.setFlowDeployId(flowDeploymentPO.getFlowDeployId()); + List variables = new ArrayList<>(); + variables.add(new InstanceData("orderId", "123")); + variables.add(new InstanceData("orderStatus", "1")); + variables.add(new InstanceData("a", 11)); + startProcessParam.setVariables(variables); + // build + return runtimeProcessor.startProcess(startProcessParam); + } + + private StartProcessResult startInclusiveProcess(String flag) throws Exception { + // prepare + FlowDeploymentPO flowDeploymentPO = EntityBuilder.buildInclusiveFlowDeploymentPO(); + flowDeploymentPO.setFlowModuleId(flowDeploymentPO.getFlowModuleId() + "_" + flag); + flowDeploymentPO.setFlowDeployId(flowDeploymentPO.getFlowDeployId() + "_" + flag); + FlowDeploymentPO _flowDeploymentPO = flowDeploymentMapper.selectByDeployId(flowDeploymentPO.getFlowDeployId()); + if (_flowDeploymentPO != null) { + if (!StringUtils.equals(_flowDeploymentPO.getFlowModel(), flowDeploymentPO.getFlowModel())) { + flowDeploymentMapper.deleteById(_flowDeploymentPO.getId()); + flowDeploymentMapper.insert(flowDeploymentPO); + } + } else { + flowDeploymentMapper.insert(flowDeploymentPO); + } + + // start process + StartProcessParam startProcessParam = new StartProcessParam(); + startProcessParam.setFlowDeployId(flowDeploymentPO.getFlowDeployId()); + List variables = new ArrayList<>(); + variables.add(new InstanceData("orderId", "123")); + variables.add(new InstanceData("orderStatus", "1")); + variables.add(new InstanceData("a", 11)); + startProcessParam.setVariables(variables); + // build + return runtimeProcessor.startProcess(startProcessParam); + } + + private StartProcessResult startParallelProcessWithMergeOne(String flag) throws Exception { + // prepare + FlowDeploymentPO flowDeploymentPO = EntityBuilder.buildParallelFlowDeploymentPOWithMergeOne(); + flowDeploymentPO.setFlowModuleId(flowDeploymentPO.getFlowModuleId() + "_" + flag); + flowDeploymentPO.setFlowDeployId(flowDeploymentPO.getFlowDeployId() + "_" + flag); + FlowDeploymentPO _flowDeploymentPO = flowDeploymentMapper.selectByDeployId(flowDeploymentPO.getFlowDeployId()); + if (_flowDeploymentPO != null) { + if (!StringUtils.equals(_flowDeploymentPO.getFlowModel(), flowDeploymentPO.getFlowModel())) { + flowDeploymentMapper.deleteById(_flowDeploymentPO.getId()); + flowDeploymentMapper.insert(flowDeploymentPO); + } + } else { + flowDeploymentMapper.insert(flowDeploymentPO); + } + + // start process + StartProcessParam startProcessParam = new StartProcessParam(); + startProcessParam.setFlowDeployId(flowDeploymentPO.getFlowDeployId()); + List variables = new ArrayList<>(); + variables.add(new InstanceData("orderId", "123")); + variables.add(new InstanceData("orderStatus", "1")); + variables.add(new InstanceData("a", 11)); + startProcessParam.setVariables(variables); + // build + return runtimeProcessor.startProcess(startProcessParam); + } + + @Test + public void testStartProcess() throws Exception { + StartProcessResult startProcessResult = startParallelProcess(null); + Assert.assertEquals(startProcessResult.getErrCode(), ErrorEnum.COMMIT_SUSPEND.getErrNo()); + Assert.assertTrue(StringUtils.equals(startProcessResult.getNodeExecuteResults().get(0).getActiveTaskInstance().getModelKey(), "UserTask_0iv55sh")); + Assert.assertTrue(StringUtils.equals(startProcessResult.getNodeExecuteResults().get(1).getActiveTaskInstance().getModelKey(), "UserTask_0m7qih6")); + } + + /** + * --> UserTask_1ram9jm --> UserTask_32ed01b + * | | + * StartEvent_2s70149 --> ParallelGateway_38ad233 --> UserTask_0iv55sh --> ParallelGateway_1djgrgp --> UserTask_2npcbgp --> UserTask_01tuns9 --> ParallelGateway_3a1nn9f --> ParallelGateway_10lo44j --> EndEvent_2c8j53d + * | | + * | | + * -------------------------------------------------> UserTask_0m7qih6 -------------------------------------------------- + */ + @Test + public void testParallelGateway() throws Exception { + // Start -> UserTask_0iv55sh & UserTask_0m7qih6 + StartProcessResult startProcessResult = startParallelProcess("normal"); + Assert.assertEquals(startProcessResult.getErrCode(), ErrorEnum.COMMIT_SUSPEND.getErrNo()); + Assert.assertTrue(StringUtils.equals(startProcessResult.getNodeExecuteResults().get(0).getActiveTaskInstance().getModelKey(), "UserTask_0iv55sh")); + Assert.assertTrue(StringUtils.equals(startProcessResult.getNodeExecuteResults().get(1).getActiveTaskInstance().getModelKey(), "UserTask_0m7qih6")); + + // UserTask_0iv55sh -> UserTask_2npcbgp & UserTask_1ram9jm + CommitTaskParam commitTaskParam = new CommitTaskParam(); + commitTaskParam.setFlowInstanceId(startProcessResult.getFlowInstanceId()); + commitTaskParam.setTaskInstanceId(startProcessResult.getNodeExecuteResults().get(0).getActiveTaskInstance().getNodeInstanceId()); + commitTaskParam.setExtendProperties(copyExtendProperties(startProcessResult.getExtendProperties(), 0)); + List variables = new ArrayList<>(); + variables.add(new InstanceData("danxuankuang_ytgyk", 0)); + commitTaskParam.setVariables(variables); + CommitTaskResult commitTaskResult = runtimeProcessor.commit(commitTaskParam); + Assert.assertEquals(commitTaskResult.getErrCode(), ErrorEnum.COMMIT_SUSPEND.getErrNo()); + Assert.assertTrue(StringUtils.equals(commitTaskResult.getNodeExecuteResults().get(0).getActiveTaskInstance().getModelKey(), "UserTask_2npcbgp")); + Assert.assertTrue(StringUtils.equals(commitTaskResult.getNodeExecuteResults().get(1).getActiveTaskInstance().getModelKey(), "UserTask_1ram9jm")); + + // UserTask_2npcbgp -> UserTask_01tuns9 + CommitTaskParam commitTaskParam1 = new CommitTaskParam(); + commitTaskParam1.setFlowInstanceId(commitTaskResult.getFlowInstanceId()); + commitTaskParam1.setTaskInstanceId(commitTaskResult.getNodeExecuteResults().get(0).getActiveTaskInstance().getNodeInstanceId()); + commitTaskParam1.setExtendProperties(copyExtendProperties(commitTaskResult.getExtendProperties(), 0)); + List variables1 = new ArrayList<>(); + variables1.add(new InstanceData("danxuankuang_ytgyk", 1)); + commitTaskParam1.setVariables(variables1); + CommitTaskResult commitTaskResult1 = runtimeProcessor.commit(commitTaskParam1); + Assert.assertEquals(commitTaskResult1.getErrCode(), ErrorEnum.COMMIT_SUSPEND.getErrNo()); + Assert.assertTrue(StringUtils.equals(commitTaskResult1.getActiveTaskInstance().getModelKey(), "UserTask_01tuns9")); + + // UserTask_01tuns9 -> ParallelGateway_3a1nn9f + CommitTaskParam commitTaskParam2 = new CommitTaskParam(); + commitTaskParam2.setFlowInstanceId(commitTaskResult1.getFlowInstanceId()); + commitTaskParam2.setTaskInstanceId(commitTaskResult1.getActiveTaskInstance().getNodeInstanceId()); + commitTaskParam2.setExtendProperties(copyExtendProperties(commitTaskResult1.getExtendProperties(), 0)); + List variables2 = new ArrayList<>(); + variables2.add(new InstanceData("danxuankuang_ytgyk", 2)); + commitTaskParam2.setVariables(variables2); + CommitTaskResult commitTaskResult2 = runtimeProcessor.commit(commitTaskParam2); + Assert.assertEquals(commitTaskResult2.getErrCode(), ParallelErrorEnum.WAITING_SUSPEND.getErrNo()); + Assert.assertTrue(StringUtils.equals(commitTaskResult2.getActiveTaskInstance().getModelKey(), "ParallelGateway_3a1nn9f")); + + // UserTask_1ram9jm -> UserTask_32ed01b + CommitTaskParam commitTaskParam3 = new CommitTaskParam(); + commitTaskParam3.setFlowInstanceId(commitTaskResult.getFlowInstanceId()); + commitTaskParam3.setTaskInstanceId(commitTaskResult.getNodeExecuteResults().get(1).getActiveTaskInstance().getNodeInstanceId()); + commitTaskParam3.setExtendProperties(copyExtendProperties(commitTaskResult.getExtendProperties(), 1)); + List variables3 = new ArrayList<>(); + variables3.add(new InstanceData("danxuankuang_ytgyk", 3)); + commitTaskParam3.setVariables(variables3); + CommitTaskResult commitTaskResult3 = runtimeProcessor.commit(commitTaskParam3); + Assert.assertEquals(commitTaskResult3.getErrCode(), ErrorEnum.COMMIT_SUSPEND.getErrNo()); + Assert.assertTrue(StringUtils.equals(commitTaskResult3.getActiveTaskInstance().getModelKey(), "UserTask_32ed01b")); + + // UserTask_32ed01b -> ParallelGateway_10lo44j + CommitTaskParam commitTaskParam4 = new CommitTaskParam(); + commitTaskParam4.setFlowInstanceId(commitTaskResult3.getFlowInstanceId()); + commitTaskParam4.setTaskInstanceId(commitTaskResult3.getActiveTaskInstance().getNodeInstanceId()); + commitTaskParam4.setExtendProperties(copyExtendProperties(commitTaskResult3.getExtendProperties(), 0)); + List variables4 = new ArrayList<>(); + variables4.add(new InstanceData("danxuankuang_ytgyk", 4)); + commitTaskParam4.setVariables(variables4); + CommitTaskResult commitTaskResult4 = runtimeProcessor.commit(commitTaskParam4); + Assert.assertEquals(commitTaskResult4.getErrCode(), ParallelErrorEnum.WAITING_SUSPEND.getErrNo()); + Assert.assertTrue(StringUtils.equals(commitTaskResult4.getActiveTaskInstance().getModelKey(), "ParallelGateway_10lo44j")); + + // UserTask_0m7qih6 -> End + CommitTaskParam commitTaskParam5 = new CommitTaskParam(); + commitTaskParam5.setFlowInstanceId(startProcessResult.getFlowInstanceId()); + commitTaskParam5.setTaskInstanceId(startProcessResult.getNodeExecuteResults().get(1).getActiveTaskInstance().getNodeInstanceId()); + commitTaskParam5.setExtendProperties(copyExtendProperties(startProcessResult.getExtendProperties(), 1)); + List variables5 = new ArrayList<>(); + variables5.add(new InstanceData("danxuankuang_ytgyk", 5)); + commitTaskParam5.setVariables(variables5); + CommitTaskResult commitTaskResult5 = runtimeProcessor.commit(commitTaskParam5); + Assert.assertEquals(commitTaskResult5.getErrCode(), ErrorEnum.SUCCESS.getErrNo()); + } + + /** + * --> UserTask_1ram9jm --> UserTask_32ed01b + * | | + * StartEvent_2s70149 --> ParallelGateway_38ad233 --> UserTask_0iv55sh --> ParallelGateway_1djgrgp --> UserTask_2npcbgp --> UserTask_01tuns9 --> ParallelGateway_3a1nn9f --> ParallelGateway_10lo44j --> EndEvent_2c8j53d + * | | + * | | + * -------------------------------------------------> UserTask_0m7qih6 -------------------------------------------------- + */ + @Test + public void testParallelGatewayRollback() throws Exception { + // Start -> UserTask_0iv55sh & UserTask_0m7qih6 + StartProcessResult startProcessResult = startParallelProcess("rollback"); + Assert.assertEquals(startProcessResult.getErrCode(), ErrorEnum.COMMIT_SUSPEND.getErrNo()); + Assert.assertTrue(StringUtils.equals(startProcessResult.getNodeExecuteResults().get(0).getActiveTaskInstance().getModelKey(), "UserTask_0iv55sh")); + Assert.assertTrue(StringUtils.equals(startProcessResult.getNodeExecuteResults().get(1).getActiveTaskInstance().getModelKey(), "UserTask_0m7qih6")); + + // UserTask_0iv55sh -> UserTask_2npcbgp & UserTask_1ram9jm + CommitTaskParam commitTaskParam = new CommitTaskParam(); + commitTaskParam.setFlowInstanceId(startProcessResult.getFlowInstanceId()); + commitTaskParam.setTaskInstanceId(startProcessResult.getNodeExecuteResults().get(0).getActiveTaskInstance().getNodeInstanceId()); + commitTaskParam.setExtendProperties(copyExtendProperties(startProcessResult.getExtendProperties(), 0)); + List variables = new ArrayList<>(); + variables.add(new InstanceData("danxuankuang_ytgyk", 0)); + commitTaskParam.setVariables(variables); + CommitTaskResult commitTaskResult = runtimeProcessor.commit(commitTaskParam); + Assert.assertEquals(commitTaskResult.getErrCode(), ErrorEnum.COMMIT_SUSPEND.getErrNo()); + Assert.assertTrue(StringUtils.equals(commitTaskResult.getNodeExecuteResults().get(0).getActiveTaskInstance().getModelKey(), "UserTask_2npcbgp")); + Assert.assertTrue(StringUtils.equals(commitTaskResult.getNodeExecuteResults().get(1).getActiveTaskInstance().getModelKey(), "UserTask_1ram9jm")); + + // UserTask_2npcbgp -> UserTask_01tuns9 + CommitTaskParam commitTaskParam1 = new CommitTaskParam(); + commitTaskParam1.setFlowInstanceId(commitTaskResult.getFlowInstanceId()); + commitTaskParam1.setTaskInstanceId(commitTaskResult.getNodeExecuteResults().get(0).getActiveTaskInstance().getNodeInstanceId()); + commitTaskParam1.setExtendProperties(copyExtendProperties(commitTaskResult.getExtendProperties(), 0)); + List variables1 = new ArrayList<>(); + variables1.add(new InstanceData("danxuankuang_ytgyk", 1)); + commitTaskParam1.setVariables(variables1); + CommitTaskResult commitTaskResult1 = runtimeProcessor.commit(commitTaskParam1); + Assert.assertEquals(commitTaskResult1.getErrCode(), ErrorEnum.COMMIT_SUSPEND.getErrNo()); + Assert.assertTrue(StringUtils.equals(commitTaskResult1.getActiveTaskInstance().getModelKey(), "UserTask_01tuns9")); + + // UserTask_01tuns9 -> UserTask_2npcbgp + RollbackTaskParam rollbackTaskParam = new RollbackTaskParam(); + rollbackTaskParam.setFlowInstanceId(commitTaskResult1.getFlowInstanceId()); + rollbackTaskParam.setTaskInstanceId(commitTaskResult1.getActiveTaskInstance().getNodeInstanceId()); + rollbackTaskParam.setExtendProperties(copyExtendProperties(commitTaskResult1.getExtendProperties(), 0)); + RollbackTaskResult rollbackTaskResult = runtimeProcessor.rollback(rollbackTaskParam); + Assert.assertEquals(rollbackTaskResult.getErrCode(), ErrorEnum.ROLLBACK_SUSPEND.getErrNo()); + Assert.assertTrue(StringUtils.equals(rollbackTaskResult.getActiveTaskInstance().getModelKey(), "UserTask_2npcbgp")); + + // UserTask_1ram9jm -> ParallelGateway_1djgrgp (Exception) + RollbackTaskParam rollbackTaskParam1 = new RollbackTaskParam(); + rollbackTaskParam1.setFlowInstanceId(commitTaskResult.getFlowInstanceId()); + rollbackTaskParam1.setTaskInstanceId(commitTaskResult.getNodeExecuteResults().get(1).getActiveTaskInstance().getNodeInstanceId()); + rollbackTaskParam1.setExtendProperties(copyExtendProperties(commitTaskResult.getExtendProperties(), 1)); + RollbackTaskResult rollbackTaskResult1 = runtimeProcessor.rollback(rollbackTaskParam1); + Assert.assertEquals(rollbackTaskResult1.getErrCode(), ParallelErrorEnum.NOT_SUPPORT_ROLLBACK.getErrNo()); + } + + /** + * --> UserTask_1ram9jm --> UserTask_32ed01b + * | | + * StartEvent_2s70149 --> ParallelGateway_38ad233 --> UserTask_0iv55sh --> ParallelGateway_1djgrgp --> UserTask_2npcbgp --> UserTask_01tuns9 --> ParallelGateway_3a1nn9f --> ParallelGateway_10lo44j --> EndEvent_2c8j53d + * | | + * | | + * -------------------------------------------------> UserTask_0m7qih6 -------------------------------------------------- + */ + @Test + public void testParallelGatewayTerminate() throws Exception { + // Start -> UserTask_0iv55sh & UserTask_0m7qih6 + StartProcessResult startProcessResult = startParallelProcess("terminate"); + Assert.assertEquals(startProcessResult.getErrCode(), ErrorEnum.COMMIT_SUSPEND.getErrNo()); + Assert.assertTrue(StringUtils.equals(startProcessResult.getNodeExecuteResults().get(0).getActiveTaskInstance().getModelKey(), "UserTask_0iv55sh")); + Assert.assertTrue(StringUtils.equals(startProcessResult.getNodeExecuteResults().get(1).getActiveTaskInstance().getModelKey(), "UserTask_0m7qih6")); + + // UserTask_0iv55sh -> UserTask_2npcbgp & UserTask_1ram9jm + CommitTaskParam commitTaskParam = new CommitTaskParam(); + commitTaskParam.setFlowInstanceId(startProcessResult.getFlowInstanceId()); + commitTaskParam.setTaskInstanceId(startProcessResult.getNodeExecuteResults().get(0).getActiveTaskInstance().getNodeInstanceId()); + commitTaskParam.setExtendProperties(copyExtendProperties(startProcessResult.getExtendProperties(), 0)); + List variables = new ArrayList<>(); + variables.add(new InstanceData("danxuankuang_ytgyk", 0)); + commitTaskParam.setVariables(variables); + CommitTaskResult commitTaskResult = runtimeProcessor.commit(commitTaskParam); + Assert.assertEquals(commitTaskResult.getErrCode(), ErrorEnum.COMMIT_SUSPEND.getErrNo()); + Assert.assertTrue(StringUtils.equals(commitTaskResult.getNodeExecuteResults().get(0).getActiveTaskInstance().getModelKey(), "UserTask_2npcbgp")); + Assert.assertTrue(StringUtils.equals(commitTaskResult.getNodeExecuteResults().get(1).getActiveTaskInstance().getModelKey(), "UserTask_1ram9jm")); + + // UserTask_2npcbgp -> UserTask_01tuns9 + CommitTaskParam commitTaskParam1 = new CommitTaskParam(); + commitTaskParam1.setFlowInstanceId(commitTaskResult.getFlowInstanceId()); + commitTaskParam1.setTaskInstanceId(commitTaskResult.getNodeExecuteResults().get(0).getActiveTaskInstance().getNodeInstanceId()); + commitTaskParam1.setExtendProperties(copyExtendProperties(commitTaskResult.getExtendProperties(), 0)); + List variables1 = new ArrayList<>(); + variables1.add(new InstanceData("danxuankuang_ytgyk", 1)); + commitTaskParam1.setVariables(variables1); + CommitTaskResult commitTaskResult1 = runtimeProcessor.commit(commitTaskParam1); + Assert.assertEquals(commitTaskResult1.getErrCode(), ErrorEnum.COMMIT_SUSPEND.getErrNo()); + Assert.assertTrue(StringUtils.equals(commitTaskResult1.getActiveTaskInstance().getModelKey(), "UserTask_01tuns9")); + + // UserTask_1ram9jm -> UserTask_32ed01b + CommitTaskParam commitTaskParam2 = new CommitTaskParam(); + commitTaskParam2.setFlowInstanceId(commitTaskResult.getFlowInstanceId()); + commitTaskParam2.setTaskInstanceId(commitTaskResult.getNodeExecuteResults().get(1).getActiveTaskInstance().getNodeInstanceId()); + commitTaskParam2.setExtendProperties(copyExtendProperties(commitTaskResult.getExtendProperties(), 1)); + List variables2 = new ArrayList<>(); + variables2.add(new InstanceData("danxuankuang_ytgyk", 2)); + commitTaskParam2.setVariables(variables2); + CommitTaskResult commitTaskResult2 = runtimeProcessor.commit(commitTaskParam2); + Assert.assertEquals(commitTaskResult2.getErrCode(), ErrorEnum.COMMIT_SUSPEND.getErrNo()); + Assert.assertTrue(StringUtils.equals(commitTaskResult2.getActiveTaskInstance().getModelKey(), "UserTask_32ed01b")); + + // UserTask_32ed01b Terminate + TerminateResult terminateResult = runtimeProcessor.terminateProcess(commitTaskResult2.getFlowInstanceId(), false); + Assert.assertEquals(terminateResult.getErrCode(), ErrorEnum.SUCCESS.getErrNo()); + + // UserTask_01tuns9 Commit (Exception) + CommitTaskParam commitTaskParam3 = new CommitTaskParam(); + commitTaskParam3.setFlowInstanceId(commitTaskResult1.getFlowInstanceId()); + commitTaskParam3.setTaskInstanceId(commitTaskResult1.getActiveTaskInstance().getNodeInstanceId()); + CommitTaskResult commitTaskResult3 = runtimeProcessor.commit(commitTaskParam3); + Assert.assertTrue(commitTaskResult3.getErrCode() == ErrorEnum.COMMIT_REJECTRD.getErrNo()); + } + + /** + * --> UserTask_1ram9jm --> UserTask_32ed01b + * | | + * StartEvent_2s70149 --> ParallelGateway_38ad233 --> UserTask_0iv55sh --> ParallelGateway_1djgrgp --> UserTask_2npcbgp --> UserTask_01tuns9 --> ParallelGateway_3a1nn9f --> ParallelGateway_10lo44j --> EndEvent_2c8j53d + * | | + * | | + * -------------------------------------------------> UserTask_0m7qih6 -------------------------------------------------- + */ + @Test + public void testParallelGatewayMergeOne() throws Exception { + // Start -> UserTask_0iv55sh & UserTask_0m7qih6 + StartProcessResult startProcessResult = startParallelProcessWithMergeOne("normal"); + Assert.assertEquals(startProcessResult.getErrCode(), ErrorEnum.COMMIT_SUSPEND.getErrNo()); + Assert.assertTrue(StringUtils.equals(startProcessResult.getNodeExecuteResults().get(0).getActiveTaskInstance().getModelKey(), "UserTask_0iv55sh")); + Assert.assertTrue(StringUtils.equals(startProcessResult.getNodeExecuteResults().get(1).getActiveTaskInstance().getModelKey(), "UserTask_0m7qih6")); + + // UserTask_0iv55sh -> UserTask_2npcbgp & UserTask_1ram9jm + CommitTaskParam commitTaskParam = new CommitTaskParam(); + commitTaskParam.setFlowInstanceId(startProcessResult.getFlowInstanceId()); + commitTaskParam.setTaskInstanceId(startProcessResult.getNodeExecuteResults().get(0).getActiveTaskInstance().getNodeInstanceId()); + commitTaskParam.setExtendProperties(copyExtendProperties(startProcessResult.getExtendProperties(), 0)); + List variables = new ArrayList<>(); + variables.add(new InstanceData("danxuankuang_ytgyk", 0)); + commitTaskParam.setVariables(variables); + CommitTaskResult commitTaskResult = runtimeProcessor.commit(commitTaskParam); + Assert.assertEquals(commitTaskResult.getErrCode(), ErrorEnum.COMMIT_SUSPEND.getErrNo()); + Assert.assertTrue(StringUtils.equals(commitTaskResult.getNodeExecuteResults().get(0).getActiveTaskInstance().getModelKey(), "UserTask_2npcbgp")); + Assert.assertTrue(StringUtils.equals(commitTaskResult.getNodeExecuteResults().get(1).getActiveTaskInstance().getModelKey(), "UserTask_1ram9jm")); + + // UserTask_2npcbgp -> UserTask_01tuns9 + CommitTaskParam commitTaskParam1 = new CommitTaskParam(); + commitTaskParam1.setFlowInstanceId(commitTaskResult.getFlowInstanceId()); + commitTaskParam1.setTaskInstanceId(commitTaskResult.getNodeExecuteResults().get(0).getActiveTaskInstance().getNodeInstanceId()); + commitTaskParam1.setExtendProperties(copyExtendProperties(commitTaskResult.getExtendProperties(), 0)); + List variables1 = new ArrayList<>(); + variables1.add(new InstanceData("danxuankuang_ytgyk", 1)); + commitTaskParam1.setVariables(variables1); + CommitTaskResult commitTaskResult1 = runtimeProcessor.commit(commitTaskParam1); + Assert.assertEquals(commitTaskResult1.getErrCode(), ErrorEnum.COMMIT_SUSPEND.getErrNo()); + Assert.assertTrue(StringUtils.equals(commitTaskResult1.getActiveTaskInstance().getModelKey(), "UserTask_01tuns9")); + + // UserTask_01tuns9 -> ParallelGateway_10lo44j + CommitTaskParam commitTaskParam2 = new CommitTaskParam(); + commitTaskParam2.setFlowInstanceId(commitTaskResult1.getFlowInstanceId()); + commitTaskParam2.setTaskInstanceId(commitTaskResult1.getActiveTaskInstance().getNodeInstanceId()); + commitTaskParam2.setExtendProperties(copyExtendProperties(commitTaskResult1.getExtendProperties(), 0)); + List variables2 = new ArrayList<>(); + variables2.add(new InstanceData("danxuankuang_ytgyk", 2)); + commitTaskParam2.setVariables(variables2); + CommitTaskResult commitTaskResult2 = runtimeProcessor.commit(commitTaskParam2); + Assert.assertEquals(commitTaskResult2.getErrCode(), ParallelErrorEnum.WAITING_SUSPEND.getErrNo()); + Assert.assertTrue(StringUtils.equals(commitTaskResult2.getActiveTaskInstance().getModelKey(), "ParallelGateway_10lo44j")); + + // UserTask_1ram9jm Commit (Exception) + CommitTaskParam commitTaskParam3 = new CommitTaskParam(); + commitTaskParam3.setFlowInstanceId(commitTaskResult.getFlowInstanceId()); + commitTaskParam3.setTaskInstanceId(commitTaskResult.getNodeExecuteResults().get(1).getActiveTaskInstance().getNodeInstanceId()); + commitTaskParam3.setExtendProperties(copyExtendProperties(commitTaskResult.getExtendProperties(), 1)); + List variables3 = new ArrayList<>(); + variables3.add(new InstanceData("danxuankuang_ytgyk", 3)); + commitTaskParam3.setVariables(variables3); + CommitTaskResult commitTaskResult3 = runtimeProcessor.commit(commitTaskParam3); + Assert.assertEquals(commitTaskResult3.getErrCode(), ErrorEnum.COMMIT_FAILED.getErrNo()); + } + + /** + * -❌-> UserTask_1ram9jm --> UserTask_01tuns9 + * | | + * StartEvent_2s70149 --> ParallelGateway_38ad233 --> UserTask_0iv55sh --> InclusiveGateway_1djgrgp -✅-> UserTask_2npcbgp --> UserTask_32ed01b --> InclusiveGateway_3a1nn9f --> ParallelGateway_10lo44j --> EndEvent_2c8j53d + * | | + * | | + * -------------------------------------------------> UserTask_0m7qih6 ---------------------------------------------------- + */ + @Test + public void testInclusiveGateway() throws Exception { + // Start -> UserTask_0iv55sh & UserTask_0m7qih6 + StartProcessResult startProcessResult = startInclusiveProcess("normal"); + Assert.assertEquals(startProcessResult.getErrCode(), ErrorEnum.COMMIT_SUSPEND.getErrNo()); + Assert.assertTrue(StringUtils.equals(startProcessResult.getNodeExecuteResults().get(0).getActiveTaskInstance().getModelKey(), "UserTask_0iv55sh")); + Assert.assertTrue(StringUtils.equals(startProcessResult.getNodeExecuteResults().get(1).getActiveTaskInstance().getModelKey(), "UserTask_0m7qih6")); + + // UserTask_0iv55sh -> UserTask_2npcbgp + CommitTaskParam commitTaskParam = new CommitTaskParam(); + commitTaskParam.setFlowInstanceId(startProcessResult.getFlowInstanceId()); + commitTaskParam.setTaskInstanceId(startProcessResult.getNodeExecuteResults().get(0).getActiveTaskInstance().getNodeInstanceId()); + commitTaskParam.setExtendProperties(copyExtendProperties(startProcessResult.getExtendProperties(), 0)); + List variables = new ArrayList<>(); + variables.add(new InstanceData("danxuankuang_ytgyk", 0)); + commitTaskParam.setVariables(variables); + CommitTaskResult commitTaskResult = runtimeProcessor.commit(commitTaskParam); + Assert.assertEquals(commitTaskResult.getErrCode(), ErrorEnum.COMMIT_SUSPEND.getErrNo()); + Assert.assertEquals(commitTaskResult.getNodeExecuteResults().size(), 1); + Assert.assertTrue(StringUtils.equals(commitTaskResult.getNodeExecuteResults().get(0).getActiveTaskInstance().getModelKey(), "UserTask_2npcbgp")); + + // UserTask_2npcbgp -> UserTask_01tuns9 + CommitTaskParam commitTaskParam1 = new CommitTaskParam(); + commitTaskParam1.setFlowInstanceId(commitTaskResult.getFlowInstanceId()); + commitTaskParam1.setTaskInstanceId(commitTaskResult.getNodeExecuteResults().get(0).getActiveTaskInstance().getNodeInstanceId()); + commitTaskParam1.setExtendProperties(copyExtendProperties(commitTaskResult.getExtendProperties(), 0)); + List variables1 = new ArrayList<>(); + variables1.add(new InstanceData("danxuankuang_ytgyk", 1)); + commitTaskParam1.setVariables(variables1); + CommitTaskResult commitTaskResult1 = runtimeProcessor.commit(commitTaskParam1); + Assert.assertEquals(commitTaskResult1.getErrCode(), ErrorEnum.COMMIT_SUSPEND.getErrNo()); + Assert.assertTrue(StringUtils.equals(commitTaskResult1.getActiveTaskInstance().getModelKey(), "UserTask_01tuns9")); + + // UserTask_01tuns9 -> ParallelGateway_10lo44j + CommitTaskParam commitTaskParam2 = new CommitTaskParam(); + commitTaskParam2.setFlowInstanceId(commitTaskResult1.getFlowInstanceId()); + commitTaskParam2.setTaskInstanceId(commitTaskResult1.getActiveTaskInstance().getNodeInstanceId()); + commitTaskParam2.setExtendProperties(copyExtendProperties(commitTaskResult1.getExtendProperties(), 0)); + List variables2 = new ArrayList<>(); + variables2.add(new InstanceData("danxuankuang_ytgyk", 2)); + commitTaskParam2.setVariables(variables2); + CommitTaskResult commitTaskResult2 = runtimeProcessor.commit(commitTaskParam2); + Assert.assertEquals(commitTaskResult2.getErrCode(), ParallelErrorEnum.WAITING_SUSPEND.getErrNo()); + Assert.assertTrue(StringUtils.equals(commitTaskResult2.getActiveTaskInstance().getModelKey(), "ParallelGateway_10lo44j")); + + // UserTask_0m7qih6 -> End + CommitTaskParam commitTaskParam5 = new CommitTaskParam(); + commitTaskParam5.setFlowInstanceId(startProcessResult.getFlowInstanceId()); + commitTaskParam5.setTaskInstanceId(startProcessResult.getNodeExecuteResults().get(1).getActiveTaskInstance().getNodeInstanceId()); + commitTaskParam5.setExtendProperties(copyExtendProperties(startProcessResult.getExtendProperties(), 1)); + List variables5 = new ArrayList<>(); + variables5.add(new InstanceData("danxuankuang_ytgyk", 5)); + commitTaskParam5.setVariables(variables5); + CommitTaskResult commitTaskResult5 = runtimeProcessor.commit(commitTaskParam5); + Assert.assertEquals(commitTaskResult5.getErrCode(), ErrorEnum.SUCCESS.getErrNo()); + } + + /** + * -❌-> UserTask_1ram9jm --> UserTask_01tuns9 + * | | + * StartEvent_2s70149 --> ParallelGateway_38ad233 --> UserTask_0iv55sh --> InclusiveGateway_1djgrgp -✅-> UserTask_2npcbgp --> UserTask_32ed01b --> InclusiveGateway_3a1nn9f --> ParallelGateway_10lo44j --> EndEvent_2c8j53d + * | | + * | | + * -------------------------------------------------> UserTask_0m7qih6 ---------------------------------------------------- + */ + @Test + public void testInclusiveGatewayRollback() throws Exception { + // Start -> UserTask_0iv55sh & UserTask_0m7qih6 + StartProcessResult startProcessResult = startInclusiveProcess("rollback"); + Assert.assertEquals(startProcessResult.getErrCode(), ErrorEnum.COMMIT_SUSPEND.getErrNo()); + Assert.assertTrue(StringUtils.equals(startProcessResult.getNodeExecuteResults().get(0).getActiveTaskInstance().getModelKey(), "UserTask_0iv55sh")); + Assert.assertTrue(StringUtils.equals(startProcessResult.getNodeExecuteResults().get(1).getActiveTaskInstance().getModelKey(), "UserTask_0m7qih6")); + + // UserTask_0iv55sh -> UserTask_2npcbgp + CommitTaskParam commitTaskParam = new CommitTaskParam(); + commitTaskParam.setFlowInstanceId(startProcessResult.getFlowInstanceId()); + commitTaskParam.setTaskInstanceId(startProcessResult.getNodeExecuteResults().get(0).getActiveTaskInstance().getNodeInstanceId()); + commitTaskParam.setExtendProperties(copyExtendProperties(startProcessResult.getExtendProperties(), 0)); + List variables = new ArrayList<>(); + variables.add(new InstanceData("danxuankuang_ytgyk", 0)); + commitTaskParam.setVariables(variables); + CommitTaskResult commitTaskResult = runtimeProcessor.commit(commitTaskParam); + Assert.assertEquals(commitTaskResult.getErrCode(), ErrorEnum.COMMIT_SUSPEND.getErrNo()); + Assert.assertEquals(commitTaskResult.getNodeExecuteResults().size(), 1); + Assert.assertTrue(StringUtils.equals(commitTaskResult.getNodeExecuteResults().get(0).getActiveTaskInstance().getModelKey(), "UserTask_2npcbgp")); + + // UserTask_2npcbgp -> UserTask_01tuns9 + CommitTaskParam commitTaskParam1 = new CommitTaskParam(); + commitTaskParam1.setFlowInstanceId(commitTaskResult.getFlowInstanceId()); + commitTaskParam1.setTaskInstanceId(commitTaskResult.getNodeExecuteResults().get(0).getActiveTaskInstance().getNodeInstanceId()); + commitTaskParam1.setExtendProperties(copyExtendProperties(commitTaskResult.getExtendProperties(), 0)); + List variables1 = new ArrayList<>(); + variables1.add(new InstanceData("danxuankuang_ytgyk", 1)); + commitTaskParam1.setVariables(variables1); + CommitTaskResult commitTaskResult1 = runtimeProcessor.commit(commitTaskParam1); + Assert.assertEquals(commitTaskResult1.getErrCode(), ErrorEnum.COMMIT_SUSPEND.getErrNo()); + Assert.assertTrue(StringUtils.equals(commitTaskResult1.getActiveTaskInstance().getModelKey(), "UserTask_01tuns9")); + + // UserTask_01tuns9 -> UserTask_2npcbgp + RollbackTaskParam rollbackTaskParam = new RollbackTaskParam(); + rollbackTaskParam.setFlowInstanceId(commitTaskResult1.getFlowInstanceId()); + rollbackTaskParam.setTaskInstanceId(commitTaskResult1.getActiveTaskInstance().getNodeInstanceId()); + rollbackTaskParam.setExtendProperties(copyExtendProperties(commitTaskResult1.getExtendProperties(), 0)); + RollbackTaskResult rollbackTaskResult = runtimeProcessor.rollback(rollbackTaskParam); + Assert.assertEquals(rollbackTaskResult.getErrCode(), ErrorEnum.ROLLBACK_SUSPEND.getErrNo()); + Assert.assertTrue(StringUtils.equals(rollbackTaskResult.getActiveTaskInstance().getModelKey(), "UserTask_2npcbgp")); + + // UserTask_2npcbgp -> InclusiveGateway_1djgrgp (Exception) + RollbackTaskParam rollbackTaskParam1 = new RollbackTaskParam(); + rollbackTaskParam1.setFlowInstanceId(rollbackTaskResult.getFlowInstanceId()); + rollbackTaskParam1.setTaskInstanceId(rollbackTaskResult.getActiveTaskInstance().getNodeInstanceId()); + rollbackTaskParam1.setExtendProperties(copyExtendProperties(rollbackTaskResult.getExtendProperties(), 0)); + RollbackTaskResult rollbackTaskResult1 = runtimeProcessor.rollback(rollbackTaskParam1); + Assert.assertEquals(rollbackTaskResult1.getErrCode(), ParallelErrorEnum.NOT_SUPPORT_ROLLBACK.getErrNo()); + + // UserTask_2npcbgp -> UserTask_01tuns9 + CommitTaskParam commitTaskParam2 = new CommitTaskParam(); + commitTaskParam2.setFlowInstanceId(rollbackTaskResult.getFlowInstanceId()); + commitTaskParam2.setTaskInstanceId(rollbackTaskResult.getActiveTaskInstance().getNodeInstanceId()); + commitTaskParam2.setExtendProperties(copyExtendProperties(rollbackTaskResult.getExtendProperties(), 0)); + List variables2 = new ArrayList<>(); + variables2.add(new InstanceData("danxuankuang_ytgyk", 2)); + commitTaskParam2.setVariables(variables2); + CommitTaskResult commitTaskResult2 = runtimeProcessor.commit(commitTaskParam2); + Assert.assertEquals(commitTaskResult2.getErrCode(), ErrorEnum.COMMIT_SUSPEND.getErrNo()); + Assert.assertTrue(StringUtils.equals(commitTaskResult2.getActiveTaskInstance().getModelKey(), "UserTask_01tuns9")); + + // UserTask_01tuns9 -> ParallelGateway_10lo44j + CommitTaskParam commitTaskParam3 = new CommitTaskParam(); + commitTaskParam3.setFlowInstanceId(commitTaskResult2.getFlowInstanceId()); + commitTaskParam3.setTaskInstanceId(commitTaskResult2.getActiveTaskInstance().getNodeInstanceId()); + commitTaskParam3.setExtendProperties(copyExtendProperties(commitTaskResult2.getExtendProperties(), 0)); + List variables3 = new ArrayList<>(); + variables3.add(new InstanceData("danxuankuang_ytgyk", 3)); + commitTaskParam3.setVariables(variables3); + CommitTaskResult commitTaskResult3 = runtimeProcessor.commit(commitTaskParam3); + Assert.assertEquals(commitTaskResult3.getErrCode(), ParallelErrorEnum.WAITING_SUSPEND.getErrNo()); + Assert.assertTrue(StringUtils.equals(commitTaskResult3.getActiveTaskInstance().getModelKey(), "ParallelGateway_10lo44j")); + + // UserTask_0m7qih6 -> End + CommitTaskParam commitTaskParam5 = new CommitTaskParam(); + commitTaskParam5.setFlowInstanceId(startProcessResult.getFlowInstanceId()); + commitTaskParam5.setTaskInstanceId(startProcessResult.getNodeExecuteResults().get(1).getActiveTaskInstance().getNodeInstanceId()); + commitTaskParam5.setExtendProperties(copyExtendProperties(startProcessResult.getExtendProperties(), 1)); + List variables5 = new ArrayList<>(); + variables5.add(new InstanceData("danxuankuang_ytgyk", 5)); + commitTaskParam5.setVariables(variables5); + CommitTaskResult commitTaskResult5 = runtimeProcessor.commit(commitTaskParam5); + Assert.assertEquals(commitTaskResult5.getErrCode(), ErrorEnum.SUCCESS.getErrNo()); + } + + /** + * -❌-> UserTask_1ram9jm --> UserTask_01tuns9 + * | | + * StartEvent_2s70149 --> ParallelGateway_38ad233 --> UserTask_0iv55sh --> InclusiveGateway_1djgrgp -✅-> UserTask_2npcbgp --> UserTask_32ed01b --> InclusiveGateway_3a1nn9f --> ParallelGateway_10lo44j --> EndEvent_2c8j53d + * | | + * | | + * -------------------------------------------------> UserTask_0m7qih6 ---------------------------------------------------- + */ + @Test + public void testInclusiveGatewayTerminate() throws Exception { + // Start -> UserTask_0iv55sh & UserTask_0m7qih6 + StartProcessResult startProcessResult = startInclusiveProcess("terminate"); + Assert.assertEquals(startProcessResult.getErrCode(), ErrorEnum.COMMIT_SUSPEND.getErrNo()); + Assert.assertTrue(StringUtils.equals(startProcessResult.getNodeExecuteResults().get(0).getActiveTaskInstance().getModelKey(), "UserTask_0iv55sh")); + Assert.assertTrue(StringUtils.equals(startProcessResult.getNodeExecuteResults().get(1).getActiveTaskInstance().getModelKey(), "UserTask_0m7qih6")); + + // UserTask_0iv55sh -> UserTask_2npcbgp + CommitTaskParam commitTaskParam = new CommitTaskParam(); + commitTaskParam.setFlowInstanceId(startProcessResult.getFlowInstanceId()); + commitTaskParam.setTaskInstanceId(startProcessResult.getNodeExecuteResults().get(0).getActiveTaskInstance().getNodeInstanceId()); + commitTaskParam.setExtendProperties(copyExtendProperties(startProcessResult.getExtendProperties(), 0)); + List variables = new ArrayList<>(); + variables.add(new InstanceData("danxuankuang_ytgyk", 0)); + commitTaskParam.setVariables(variables); + CommitTaskResult commitTaskResult = runtimeProcessor.commit(commitTaskParam); + Assert.assertEquals(commitTaskResult.getErrCode(), ErrorEnum.COMMIT_SUSPEND.getErrNo()); + Assert.assertEquals(commitTaskResult.getNodeExecuteResults().size(), 1); + Assert.assertTrue(StringUtils.equals(commitTaskResult.getNodeExecuteResults().get(0).getActiveTaskInstance().getModelKey(), "UserTask_2npcbgp")); + + // UserTask_2npcbgp Terminate + TerminateResult terminateResult = runtimeProcessor.terminateProcess(commitTaskResult.getFlowInstanceId(), false); + Assert.assertEquals(terminateResult.getErrCode(), ErrorEnum.SUCCESS.getErrNo()); + + // UserTask_0m7qih6 Commit (Exception) + CommitTaskParam commitTaskParam5 = new CommitTaskParam(); + commitTaskParam5.setFlowInstanceId(startProcessResult.getFlowInstanceId()); + commitTaskParam5.setTaskInstanceId(startProcessResult.getNodeExecuteResults().get(1).getActiveTaskInstance().getNodeInstanceId()); + commitTaskParam5.setExtendProperties(copyExtendProperties(startProcessResult.getExtendProperties(), 1)); + List variables5 = new ArrayList<>(); + variables5.add(new InstanceData("danxuankuang_ytgyk", 5)); + commitTaskParam5.setVariables(variables5); + CommitTaskResult commitTaskResult5 = runtimeProcessor.commit(commitTaskParam5); + Assert.assertEquals(commitTaskResult5.getErrCode(), ErrorEnum.COMMIT_REJECTRD.getErrNo()); + } + + private Map copyExtendProperties(Map extendProperties, int i) { + Map copyExtendProperties = new HashMap<>(); + List parallelRuntimeContextList = (List) extendProperties.get("parallelRuntimeContextList"); + List copyParallelRuntimeContextList = new ArrayList<>(); + copyParallelRuntimeContextList.add(parallelRuntimeContextList.get(i)); + copyExtendProperties.put("parallelRuntimeContextList", copyParallelRuntimeContextList); + copyExtendProperties.put("executeId", parallelRuntimeContextList.get(i).getExecuteId()); + return copyExtendProperties; + } +} diff --git a/parallel-plugin/src/test/java/com/didiglobal/turbo/plugin/runner/BaseTest.java b/parallel-plugin/src/test/java/com/didiglobal/turbo/plugin/runner/BaseTest.java new file mode 100644 index 00000000..b6512493 --- /dev/null +++ b/parallel-plugin/src/test/java/com/didiglobal/turbo/plugin/runner/BaseTest.java @@ -0,0 +1,14 @@ +package com.didiglobal.turbo.plugin.runner; + +import org.junit.runner.RunWith; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.boot.test.context.SpringBootTest; +import org.springframework.test.context.junit4.SpringRunner; + +@RunWith(SpringRunner.class) +@SpringBootTest(classes = TestEngineApplication.class) +public class BaseTest { + + protected static final Logger LOGGER = LoggerFactory.getLogger(BaseTest.class); +} diff --git a/parallel-plugin/src/test/java/com/didiglobal/turbo/plugin/runner/TestEngineApplication.java b/parallel-plugin/src/test/java/com/didiglobal/turbo/plugin/runner/TestEngineApplication.java new file mode 100644 index 00000000..ab35ec5f --- /dev/null +++ b/parallel-plugin/src/test/java/com/didiglobal/turbo/plugin/runner/TestEngineApplication.java @@ -0,0 +1,18 @@ +package com.didiglobal.turbo.plugin.runner; + +import com.alibaba.druid.spring.boot.autoconfigure.DruidDataSourceAutoConfigure; +import org.mybatis.spring.annotation.MapperScan; +import org.springframework.boot.SpringApplication; +import org.springframework.boot.autoconfigure.EnableAutoConfiguration; +import org.springframework.boot.autoconfigure.SpringBootApplication; +import org.springframework.context.annotation.ComponentScan; + +@SpringBootApplication +@EnableAutoConfiguration(exclude = {DruidDataSourceAutoConfigure.class}) +@ComponentScan(basePackages = {"com.didiglobal.turbo.plugin", "com.didiglobal.turbo.engine"}) +@MapperScan(basePackages = {"com.didiglobal.turbo.engine.dao", "com.didiglobal.turbo.plugin.dao"}) +public class TestEngineApplication { + public static void main(String[] args) { + SpringApplication.run(TestEngineApplication.class, args); + } +} diff --git a/parallel-plugin/src/test/java/com/didiglobal/turbo/plugin/util/EntityBuilder.java b/parallel-plugin/src/test/java/com/didiglobal/turbo/plugin/util/EntityBuilder.java new file mode 100644 index 00000000..9a12b82a --- /dev/null +++ b/parallel-plugin/src/test/java/com/didiglobal/turbo/plugin/util/EntityBuilder.java @@ -0,0 +1,1483 @@ +package com.didiglobal.turbo.plugin.util; + +import com.alibaba.fastjson.JSON; +import com.alibaba.fastjson.JSONArray; +import com.didiglobal.turbo.engine.common.FlowDeploymentStatus; +import com.didiglobal.turbo.engine.common.FlowElementType; +import com.didiglobal.turbo.engine.common.NodeInstanceStatus; +import com.didiglobal.turbo.engine.common.NodeInstanceType; +import com.didiglobal.turbo.engine.entity.FlowDeploymentPO; +import com.didiglobal.turbo.engine.entity.NodeInstanceLogPO; +import com.didiglobal.turbo.engine.entity.NodeInstancePO; +import com.didiglobal.turbo.engine.model.EndEvent; +import com.didiglobal.turbo.engine.model.FlowElement; +import com.didiglobal.turbo.engine.model.FlowModel; +import com.didiglobal.turbo.engine.model.SequenceFlow; +import com.didiglobal.turbo.engine.model.StartEvent; +import com.didiglobal.turbo.engine.model.UserTask; +import com.didiglobal.turbo.plugin.common.Constants; +import com.didiglobal.turbo.plugin.common.ExtendFlowElementType; +import com.didiglobal.turbo.plugin.common.MergeStrategy; +import com.didiglobal.turbo.plugin.model.InclusiveGateway; +import com.didiglobal.turbo.plugin.model.ParallelGateway; +import com.google.common.collect.Lists; + +import java.util.ArrayList; +import java.util.Date; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.UUID; + +public class EntityBuilder { + private static long suffix = System.currentTimeMillis(); + private static String flowName = "testFlowName_" + suffix; + private static String flowKey = "testFlowKey_" + suffix; + private static String flowDeployId = "testFlowDeployId_" + suffix; + private static String flowInstanceId = "testFlowInstanceId_" + suffix; + private static String nodeInstanceId = "testNodeInstanceId_" + suffix; + private static String instanceDataId = "testInstanceDataId_" + suffix; + private static String sourceNodeInstanceId = "testSourceNodeInstanceId_" + suffix; + private static String nodeKey = "testNodeKey"; + private static String sourceNodeKey = "testSourceNodeKey"; + private static String operator = "testOperator"; + private static String remark = "testRemark"; + public static NodeInstancePO buildParallelNodeInstancePO() { + NodeInstancePO nodeInstancePO = new NodeInstancePO(); + nodeInstancePO.setFlowDeployId(flowDeployId); + nodeInstancePO.setFlowInstanceId(flowInstanceId); + nodeInstancePO.setNodeInstanceId(nodeInstanceId); + nodeInstancePO.setInstanceDataId(instanceDataId); + nodeInstancePO.setNodeKey(nodeKey); + nodeInstancePO.setSourceNodeInstanceId(sourceNodeInstanceId); + nodeInstancePO.setSourceNodeKey(sourceNodeKey); + nodeInstancePO.setStatus(NodeInstanceStatus.ACTIVE); + nodeInstancePO.setCreateTime(new Date()); + nodeInstancePO.setModifyTime(new Date()); + nodeInstancePO.setCaller("caller"); + nodeInstancePO.setTenant("tenant"); + nodeInstancePO.setNodeType(ExtendFlowElementType.PARALLEL_GATEWAY); + Map nodeInstanceProperties = new HashMap<>(); + nodeInstanceProperties.put("executeId", "123"); + nodeInstancePO.setProperties(nodeInstanceProperties); + return nodeInstancePO; + } + + public static NodeInstancePO buildDynamicParallelNodeInstancePO() { + NodeInstancePO nodeInstancePO = buildParallelNodeInstancePO(); + nodeInstancePO.setNodeInstanceId("testNodeInstanceId_" + UUID.randomUUID().toString()); + nodeInstancePO.setSourceNodeInstanceId("testSourceNodeInstanceId_" + UUID.randomUUID().toString()); + nodeInstancePO.put("executeId", UUID.randomUUID().toString()); + return nodeInstancePO; + } + + public static NodeInstanceLogPO buildParallelNodeInstanceLogPO() { + NodeInstanceLogPO nodeInstanceLogPO = new NodeInstanceLogPO(); + nodeInstanceLogPO.setFlowInstanceId(flowInstanceId); + nodeInstanceLogPO.setNodeInstanceId(nodeInstanceId); + nodeInstanceLogPO.setInstanceDataId(instanceDataId); + nodeInstanceLogPO.setNodeKey(nodeKey); + nodeInstanceLogPO.setType(NodeInstanceType.EXECUTE); + nodeInstanceLogPO.setStatus(NodeInstanceStatus.ACTIVE); + nodeInstanceLogPO.setCreateTime(new Date()); + nodeInstanceLogPO.setCaller("caller"); + nodeInstanceLogPO.setTenant("tenant"); + nodeInstanceLogPO.put("executeId", "585858"); + return nodeInstanceLogPO; + } + + // For runtime unit tests [RuntimeProcessorTest] to use, don't change it + public static FlowDeploymentPO buildParallelFlowDeploymentPO() { + FlowDeploymentPO flowDeploymentPO = new FlowDeploymentPO(); + flowDeploymentPO.setFlowName(flowName); + flowDeploymentPO.setFlowKey(flowKey); + flowDeploymentPO.setFlowModuleId("flowModuleId_parallel"); + flowDeploymentPO.setFlowDeployId("flowDeployId_parallel"); + flowDeploymentPO.setFlowModel(JSON.toJSONString(buildParallelFlowModel())); + flowDeploymentPO.setStatus(FlowDeploymentStatus.DEPLOYED); + flowDeploymentPO.setCreateTime(new Date()); + flowDeploymentPO.setModifyTime(new Date()); + flowDeploymentPO.setOperator(operator); + flowDeploymentPO.setRemark(remark); + return flowDeploymentPO; + } + + public static FlowDeploymentPO buildInclusiveFlowDeploymentPO() { + FlowDeploymentPO flowDeploymentPO = new FlowDeploymentPO(); + flowDeploymentPO.setFlowName(flowName); + flowDeploymentPO.setFlowKey(flowKey); + flowDeploymentPO.setFlowModuleId("flowModuleId_inclusive"); + flowDeploymentPO.setFlowDeployId("flowDeployId)_inclusive"); + flowDeploymentPO.setFlowModel(JSON.toJSONString(buildInclusiveFlowModel())); + flowDeploymentPO.setStatus(FlowDeploymentStatus.DEPLOYED); + flowDeploymentPO.setCreateTime(new Date()); + flowDeploymentPO.setModifyTime(new Date()); + flowDeploymentPO.setOperator(operator); + flowDeploymentPO.setRemark(remark); + return flowDeploymentPO; + } + + public static FlowDeploymentPO buildParallelFlowDeploymentPOWithMergeOne() { + FlowDeploymentPO flowDeploymentPO = new FlowDeploymentPO(); + flowDeploymentPO.setFlowName(flowName); + flowDeploymentPO.setFlowKey(flowKey); + flowDeploymentPO.setFlowModuleId("flowModuleId_mergeOne"); + flowDeploymentPO.setFlowDeployId("flowDeployId_mergeOne"); + flowDeploymentPO.setFlowModel(JSON.toJSONString(buildParallelFlowModelWithMergeOne())); + flowDeploymentPO.setStatus(FlowDeploymentStatus.DEPLOYED); + flowDeploymentPO.setCreateTime(new Date()); + flowDeploymentPO.setModifyTime(new Date()); + flowDeploymentPO.setOperator(operator); + flowDeploymentPO.setRemark(remark); + return flowDeploymentPO; + } + + /** + * --> UserTask_1ram9jm --> UserTask_32ed01b + * | | + * StartEvent_2s70149 --> ParallelGateway_38ad233 --> UserTask_0iv55sh --> ParallelGateway_1djgrgp --> UserTask_2npcbgp --> UserTask_01tuns9 --> ParallelGateway_3a1nn9f --> ParallelGateway_10lo44j --> EndEvent_2c8j53d + * | | + * | | + * -------------------------------------------------> UserTask_0m7qih6 -------------------------------------------------- + */ + public static FlowModel buildParallelFlowModel() { + List flowElementList = Lists.newArrayList(); + { + StartEvent startEvent = new StartEvent(); + startEvent.setKey("StartEvent_2s70149"); + startEvent.setType(FlowElementType.START_EVENT); + List outgoings = new ArrayList<>(); + outgoings.add("SequenceFlow_2gugjee"); + startEvent.setOutgoing(outgoings); + flowElementList.add(startEvent); + } + { + EndEvent endEvent = new EndEvent(); + endEvent.setKey("EndEvent_2c8j53d"); + endEvent.setType(FlowElementType.END_EVENT); + List incomings = new ArrayList<>(); + incomings.add("SequenceFlow_3uhg8uj"); + endEvent.setIncoming(incomings); + flowElementList.add(endEvent); + } + { + ParallelGateway parallelGateway = new ParallelGateway(); + parallelGateway.setKey("ParallelGateway_38ad233"); + parallelGateway.setType(ExtendFlowElementType.PARALLEL_GATEWAY); + + List egIncomings = new ArrayList<>(); + egIncomings.add("SequenceFlow_2gugjee"); + parallelGateway.setIncoming(egIncomings); + + List egOutgoings = new ArrayList<>(); + egOutgoings.add("SequenceFlow_12rbl6u"); + egOutgoings.add("SequenceFlow_3ih7eta"); + parallelGateway.setOutgoing(egOutgoings); + + Map properties = new HashMap<>(); + Map forkJoinMatch = new HashMap<>(); + forkJoinMatch.put(com.didiglobal.turbo.plugin.common.Constants.ELEMENT_PROPERTIES.FORK, "ParallelGateway_38ad233"); + forkJoinMatch.put(com.didiglobal.turbo.plugin.common.Constants.ELEMENT_PROPERTIES.JOIN, "ParallelGateway_10lo44j"); + properties.put(com.didiglobal.turbo.plugin.common.Constants.ELEMENT_PROPERTIES.FORK_JOIN_MATCH, JSONArray.toJSON(forkJoinMatch)); + parallelGateway.setProperties(properties); + + flowElementList.add(parallelGateway); + } + { + ParallelGateway parallelGateway = new ParallelGateway(); + parallelGateway.setKey("ParallelGateway_1djgrgp"); + parallelGateway.setType(ExtendFlowElementType.PARALLEL_GATEWAY); + + List egIncomings = new ArrayList<>(); + egIncomings.add("SequenceFlow_0h92s81"); + parallelGateway.setIncoming(egIncomings); + + List egOutgoings = new ArrayList<>(); + egOutgoings.add("SequenceFlow_36g4hqc"); + egOutgoings.add("SequenceFlow_191a52e"); + parallelGateway.setOutgoing(egOutgoings); + + Map properties = new HashMap<>(); + Map forkJoinMatch = new HashMap<>(); + forkJoinMatch.put(com.didiglobal.turbo.plugin.common.Constants.ELEMENT_PROPERTIES.FORK, "ParallelGateway_1djgrgp"); + forkJoinMatch.put(com.didiglobal.turbo.plugin.common.Constants.ELEMENT_PROPERTIES.JOIN, "ParallelGateway_3a1nn9f"); + properties.put(com.didiglobal.turbo.plugin.common.Constants.ELEMENT_PROPERTIES.FORK_JOIN_MATCH, JSONArray.toJSON(forkJoinMatch)); + parallelGateway.setProperties(properties); + + flowElementList.add(parallelGateway); + } + { + ParallelGateway parallelGateway = new ParallelGateway(); + parallelGateway.setKey("ParallelGateway_3a1nn9f"); + parallelGateway.setType(ExtendFlowElementType.PARALLEL_GATEWAY); + + List egIncomings = new ArrayList<>(); + egIncomings.add("SequenceFlow_1h65e8t"); + egIncomings.add("SequenceFlow_25kdv36"); + parallelGateway.setIncoming(egIncomings); + + List egOutgoings = new ArrayList<>(); + egOutgoings.add("SequenceFlow_3jkd63g"); + parallelGateway.setOutgoing(egOutgoings); + + Map properties = new HashMap<>(); + Map forkJoinMatch = new HashMap<>(); + forkJoinMatch.put(com.didiglobal.turbo.plugin.common.Constants.ELEMENT_PROPERTIES.FORK, "ParallelGateway_1djgrgp"); + forkJoinMatch.put(com.didiglobal.turbo.plugin.common.Constants.ELEMENT_PROPERTIES.JOIN, "ParallelGateway_3a1nn9f"); + properties.put(com.didiglobal.turbo.plugin.common.Constants.ELEMENT_PROPERTIES.FORK_JOIN_MATCH, JSONArray.toJSON(forkJoinMatch)); + parallelGateway.setProperties(properties); + + flowElementList.add(parallelGateway); + } + { + ParallelGateway parallelGateway = new ParallelGateway(); + parallelGateway.setKey("ParallelGateway_10lo44j"); + parallelGateway.setType(ExtendFlowElementType.PARALLEL_GATEWAY); + + List egIncomings = new ArrayList<>(); + egIncomings.add("SequenceFlow_3jkd63g"); + egIncomings.add("SequenceFlow_3bgdrp0"); + parallelGateway.setIncoming(egIncomings); + + List egOutgoings = new ArrayList<>(); + egOutgoings.add("SequenceFlow_3uhg8uj"); + parallelGateway.setOutgoing(egOutgoings); + + Map properties = new HashMap<>(); + Map forkJoinMatch = new HashMap<>(); + forkJoinMatch.put(com.didiglobal.turbo.plugin.common.Constants.ELEMENT_PROPERTIES.FORK, "ParallelGateway_38ad233"); + forkJoinMatch.put(com.didiglobal.turbo.plugin.common.Constants.ELEMENT_PROPERTIES.JOIN, "ParallelGateway_10lo44j"); + properties.put(com.didiglobal.turbo.plugin.common.Constants.ELEMENT_PROPERTIES.FORK_JOIN_MATCH, JSONArray.toJSON(forkJoinMatch)); + parallelGateway.setProperties(properties); + + flowElementList.add(parallelGateway); + } + { + UserTask userTask = new UserTask(); + userTask.setKey("UserTask_0iv55sh"); + userTask.setType(FlowElementType.USER_TASK); + + List utIncomings = new ArrayList<>(); + utIncomings.add("SequenceFlow_12rbl6u"); + userTask.setIncoming(utIncomings); + + List utOutgoings = new ArrayList<>(); + utOutgoings.add("SequenceFlow_0h92s81"); + userTask.setOutgoing(utOutgoings); + + flowElementList.add(userTask); + } + { + UserTask userTask = new UserTask(); + userTask.setKey("UserTask_0m7qih6"); + userTask.setType(FlowElementType.USER_TASK); + + List utIncomings = new ArrayList<>(); + utIncomings.add("SequenceFlow_3ih7eta"); + userTask.setIncoming(utIncomings); + + List utOutgoings = new ArrayList<>(); + utOutgoings.add("SequenceFlow_3bgdrp0"); + userTask.setOutgoing(utOutgoings); + + flowElementList.add(userTask); + } + { + UserTask userTask = new UserTask(); + userTask.setKey("UserTask_2npcbgp"); + userTask.setType(FlowElementType.USER_TASK); + + List utIncomings = new ArrayList<>(); + utIncomings.add("SequenceFlow_36g4hqc"); + userTask.setIncoming(utIncomings); + + List utOutgoings = new ArrayList<>(); + utOutgoings.add("SequenceFlow_27lme4l"); + userTask.setOutgoing(utOutgoings); + + flowElementList.add(userTask); + } + { + UserTask userTask = new UserTask(); + userTask.setKey("UserTask_01tuns9"); + userTask.setType(FlowElementType.USER_TASK); + + List utIncomings = new ArrayList<>(); + utIncomings.add("SequenceFlow_27lme4l"); + userTask.setIncoming(utIncomings); + + List utOutgoings = new ArrayList<>(); + utOutgoings.add("SequenceFlow_1h65e8t"); + userTask.setOutgoing(utOutgoings); + + flowElementList.add(userTask); + } + { + UserTask userTask = new UserTask(); + userTask.setKey("UserTask_1ram9jm"); + userTask.setType(FlowElementType.USER_TASK); + + List utIncomings = new ArrayList<>(); + utIncomings.add("SequenceFlow_191a52e"); + userTask.setIncoming(utIncomings); + + List utOutgoings = new ArrayList<>(); + utOutgoings.add("SequenceFlow_3a7oj2r"); + userTask.setOutgoing(utOutgoings); + + flowElementList.add(userTask); + } + { + UserTask userTask = new UserTask(); + userTask.setKey("UserTask_32ed01b"); + userTask.setType(FlowElementType.USER_TASK); + + List utIncomings = new ArrayList<>(); + utIncomings.add("SequenceFlow_3a7oj2r"); + userTask.setIncoming(utIncomings); + + List utOutgoings = new ArrayList<>(); + utOutgoings.add("SequenceFlow_25kdv36"); + userTask.setOutgoing(utOutgoings); + + flowElementList.add(userTask); + } + { + SequenceFlow sequenceFlow1 = new SequenceFlow(); + sequenceFlow1.setKey("SequenceFlow_2gugjee"); + sequenceFlow1.setType(FlowElementType.SEQUENCE_FLOW); + List sfIncomings = new ArrayList<>(); + sfIncomings.add("StartEvent_2s70149"); + sequenceFlow1.setIncoming(sfIncomings); + List sfOutgoings = new ArrayList<>(); + sfOutgoings.add("ParallelGateway_38ad233"); + sequenceFlow1.setOutgoing(sfOutgoings); + + Map properties = new HashMap<>(); + properties.put("defaultConditions", "false"); + properties.put("conditionsequenceflow", ""); + sequenceFlow1.setProperties(properties); + + flowElementList.add(sequenceFlow1); + } + { + SequenceFlow sequenceFlow1 = new SequenceFlow(); + sequenceFlow1.setKey("SequenceFlow_12rbl6u"); + sequenceFlow1.setType(FlowElementType.SEQUENCE_FLOW); + List sfIncomings = new ArrayList<>(); + sfIncomings.add("ParallelGateway_38ad233"); + sequenceFlow1.setIncoming(sfIncomings); + List sfOutgoings = new ArrayList<>(); + sfOutgoings.add("UserTask_0iv55sh"); + sequenceFlow1.setOutgoing(sfOutgoings); + + Map properties = new HashMap<>(); + properties.put("defaultConditions", "false"); + properties.put("conditionsequenceflow", ""); + sequenceFlow1.setProperties(properties); + + flowElementList.add(sequenceFlow1); + } + { + SequenceFlow sequenceFlow1 = new SequenceFlow(); + sequenceFlow1.setKey("SequenceFlow_3ih7eta"); + sequenceFlow1.setType(FlowElementType.SEQUENCE_FLOW); + List sfIncomings = new ArrayList<>(); + sfIncomings.add("ParallelGateway_38ad233"); + sequenceFlow1.setIncoming(sfIncomings); + List sfOutgoings = new ArrayList<>(); + sfOutgoings.add("UserTask_0m7qih6"); + sequenceFlow1.setOutgoing(sfOutgoings); + + Map properties = new HashMap<>(); + properties.put("defaultConditions", "false"); + properties.put("conditionsequenceflow", ""); + sequenceFlow1.setProperties(properties); + + flowElementList.add(sequenceFlow1); + } + { + SequenceFlow sequenceFlow1 = new SequenceFlow(); + sequenceFlow1.setKey("SequenceFlow_0h92s81"); + sequenceFlow1.setType(FlowElementType.SEQUENCE_FLOW); + List sfIncomings = new ArrayList<>(); + sfIncomings.add("UserTask_0iv55sh"); + sequenceFlow1.setIncoming(sfIncomings); + List sfOutgoings = new ArrayList<>(); + sfOutgoings.add("ParallelGateway_1djgrgp"); + sequenceFlow1.setOutgoing(sfOutgoings); + + Map properties = new HashMap<>(); + properties.put("defaultConditions", "false"); + properties.put("conditionsequenceflow", ""); + sequenceFlow1.setProperties(properties); + + flowElementList.add(sequenceFlow1); + } + { + SequenceFlow sequenceFlow1 = new SequenceFlow(); + sequenceFlow1.setKey("SequenceFlow_36g4hqc"); + sequenceFlow1.setType(FlowElementType.SEQUENCE_FLOW); + List sfIncomings = new ArrayList<>(); + sfIncomings.add("ParallelGateway_1djgrgp"); + sequenceFlow1.setIncoming(sfIncomings); + List sfOutgoings = new ArrayList<>(); + sfOutgoings.add("UserTask_2npcbgp"); + sequenceFlow1.setOutgoing(sfOutgoings); + + Map properties = new HashMap<>(); + properties.put("defaultConditions", "false"); + properties.put("conditionsequenceflow", ""); + sequenceFlow1.setProperties(properties); + + flowElementList.add(sequenceFlow1); + } + { + SequenceFlow sequenceFlow1 = new SequenceFlow(); + sequenceFlow1.setKey("SequenceFlow_191a52e"); + sequenceFlow1.setType(FlowElementType.SEQUENCE_FLOW); + List sfIncomings = new ArrayList<>(); + sfIncomings.add("ParallelGateway_1djgrgp"); + sequenceFlow1.setIncoming(sfIncomings); + List sfOutgoings = new ArrayList<>(); + sfOutgoings.add("UserTask_1ram9jm"); + sequenceFlow1.setOutgoing(sfOutgoings); + + Map properties = new HashMap<>(); + properties.put("defaultConditions", "false"); + properties.put("conditionsequenceflow", ""); + sequenceFlow1.setProperties(properties); + + flowElementList.add(sequenceFlow1); + } + { + SequenceFlow sequenceFlow1 = new SequenceFlow(); + sequenceFlow1.setKey("SequenceFlow_27lme4l"); + sequenceFlow1.setType(FlowElementType.SEQUENCE_FLOW); + List sfIncomings = new ArrayList<>(); + sfIncomings.add("UserTask_2npcbgp"); + sequenceFlow1.setIncoming(sfIncomings); + List sfOutgoings = new ArrayList<>(); + sfOutgoings.add("UserTask_01tuns9"); + sequenceFlow1.setOutgoing(sfOutgoings); + + Map properties = new HashMap<>(); + properties.put("defaultConditions", "false"); + properties.put("conditionsequenceflow", ""); + sequenceFlow1.setProperties(properties); + + flowElementList.add(sequenceFlow1); + } + { + SequenceFlow sequenceFlow1 = new SequenceFlow(); + sequenceFlow1.setKey("SequenceFlow_3a7oj2r"); + sequenceFlow1.setType(FlowElementType.SEQUENCE_FLOW); + List sfIncomings = new ArrayList<>(); + sfIncomings.add("UserTask_1ram9jm"); + sequenceFlow1.setIncoming(sfIncomings); + List sfOutgoings = new ArrayList<>(); + sfOutgoings.add("UserTask_32ed01b"); + sequenceFlow1.setOutgoing(sfOutgoings); + + Map properties = new HashMap<>(); + properties.put("defaultConditions", "false"); + properties.put("conditionsequenceflow", ""); + sequenceFlow1.setProperties(properties); + + flowElementList.add(sequenceFlow1); + } + { + SequenceFlow sequenceFlow1 = new SequenceFlow(); + sequenceFlow1.setKey("SequenceFlow_3jkd63g"); + sequenceFlow1.setType(FlowElementType.SEQUENCE_FLOW); + List sfIncomings = new ArrayList<>(); + sfIncomings.add("ParallelGateway_3a1nn9f"); + sequenceFlow1.setIncoming(sfIncomings); + List sfOutgoings = new ArrayList<>(); + sfOutgoings.add("ParallelGateway_10lo44j"); + sequenceFlow1.setOutgoing(sfOutgoings); + + Map properties = new HashMap<>(); + properties.put("defaultConditions", "false"); + properties.put("conditionsequenceflow", ""); + sequenceFlow1.setProperties(properties); + + flowElementList.add(sequenceFlow1); + } + { + SequenceFlow sequenceFlow1 = new SequenceFlow(); + sequenceFlow1.setKey("SequenceFlow_3bgdrp0"); + sequenceFlow1.setType(FlowElementType.SEQUENCE_FLOW); + List sfIncomings = new ArrayList<>(); + sfIncomings.add("UserTask_0m7qih6"); + sequenceFlow1.setIncoming(sfIncomings); + List sfOutgoings = new ArrayList<>(); + sfOutgoings.add("ParallelGateway_10lo44j"); + sequenceFlow1.setOutgoing(sfOutgoings); + + Map properties = new HashMap<>(); + properties.put("defaultConditions", "false"); + properties.put("conditionsequenceflow", ""); + sequenceFlow1.setProperties(properties); + + flowElementList.add(sequenceFlow1); + } + { + SequenceFlow sequenceFlow1 = new SequenceFlow(); + sequenceFlow1.setKey("SequenceFlow_3uhg8uj"); + sequenceFlow1.setType(FlowElementType.SEQUENCE_FLOW); + List sfIncomings = new ArrayList<>(); + sfIncomings.add("ParallelGateway_10lo44j"); + sequenceFlow1.setIncoming(sfIncomings); + List sfOutgoings = new ArrayList<>(); + sfOutgoings.add("EndEvent_2c8j53d"); + sequenceFlow1.setOutgoing(sfOutgoings); + + Map properties = new HashMap<>(); + properties.put("defaultConditions", "false"); + properties.put("conditionsequenceflow", ""); + sequenceFlow1.setProperties(properties); + + flowElementList.add(sequenceFlow1); + } + { + SequenceFlow sequenceFlow1 = new SequenceFlow(); + sequenceFlow1.setKey("SequenceFlow_1h65e8t"); + sequenceFlow1.setType(FlowElementType.SEQUENCE_FLOW); + List sfIncomings = new ArrayList<>(); + sfIncomings.add("UserTask_01tuns9"); + sequenceFlow1.setIncoming(sfIncomings); + List sfOutgoings = new ArrayList<>(); + sfOutgoings.add("ParallelGateway_3a1nn9f"); + sequenceFlow1.setOutgoing(sfOutgoings); + + Map properties = new HashMap<>(); + properties.put("defaultConditions", "false"); + properties.put("conditionsequenceflow", ""); + sequenceFlow1.setProperties(properties); + + flowElementList.add(sequenceFlow1); + } + { + SequenceFlow sequenceFlow1 = new SequenceFlow(); + sequenceFlow1.setKey("SequenceFlow_25kdv36"); + sequenceFlow1.setType(FlowElementType.SEQUENCE_FLOW); + List sfIncomings = new ArrayList<>(); + sfIncomings.add("UserTask_32ed01b"); + sequenceFlow1.setIncoming(sfIncomings); + List sfOutgoings = new ArrayList<>(); + sfOutgoings.add("ParallelGateway_3a1nn9f"); + sequenceFlow1.setOutgoing(sfOutgoings); + + Map properties = new HashMap<>(); + properties.put("defaultConditions", "false"); + properties.put("conditionsequenceflow", ""); + sequenceFlow1.setProperties(properties); + + flowElementList.add(sequenceFlow1); + } + FlowModel flowModel = new FlowModel(); + flowModel.setFlowElementList(flowElementList); + return flowModel; + } + + /** + * --> UserTask_1ram9jm --> UserTask_01tuns9 + * | | + * StartEvent_2s70149 --> ParallelGateway_38ad233 --> UserTask_0iv55sh --> InclusiveGateway_1djgrgp --> UserTask_2npcbgp --> UserTask_32ed01b --> InclusiveGateway_3a1nn9f --> ParallelGateway_10lo44j --> EndEvent_2c8j53d + * | | + * | | + * -------------------------------------------------> UserTask_0m7qih6 ---------------------------------------------------- + */ + public static FlowModel buildInclusiveFlowModel() { + List flowElementList = Lists.newArrayList(); + { + StartEvent startEvent = new StartEvent(); + startEvent.setKey("StartEvent_2s70149"); + startEvent.setType(FlowElementType.START_EVENT); + List outgoings = new ArrayList<>(); + outgoings.add("SequenceFlow_2gugjee"); + startEvent.setOutgoing(outgoings); + flowElementList.add(startEvent); + } + { + EndEvent endEvent = new EndEvent(); + endEvent.setKey("EndEvent_2c8j53d"); + endEvent.setType(FlowElementType.END_EVENT); + List incomings = new ArrayList<>(); + incomings.add("SequenceFlow_3uhg8uj"); + endEvent.setIncoming(incomings); + flowElementList.add(endEvent); + } + { + ParallelGateway parallelGateway = new ParallelGateway(); + parallelGateway.setKey("ParallelGateway_38ad233"); + parallelGateway.setType(ExtendFlowElementType.PARALLEL_GATEWAY); + + List egIncomings = new ArrayList<>(); + egIncomings.add("SequenceFlow_2gugjee"); + parallelGateway.setIncoming(egIncomings); + + List egOutgoings = new ArrayList<>(); + egOutgoings.add("SequenceFlow_12rbl6u"); + egOutgoings.add("SequenceFlow_3ih7eta"); + parallelGateway.setOutgoing(egOutgoings); + + Map properties = new HashMap<>(); + Map forkJoinMatch = new HashMap<>(); + forkJoinMatch.put(com.didiglobal.turbo.plugin.common.Constants.ELEMENT_PROPERTIES.FORK, "ParallelGateway_38ad233"); + forkJoinMatch.put(com.didiglobal.turbo.plugin.common.Constants.ELEMENT_PROPERTIES.JOIN, "ParallelGateway_10lo44j"); + properties.put(com.didiglobal.turbo.plugin.common.Constants.ELEMENT_PROPERTIES.FORK_JOIN_MATCH, JSONArray.toJSON(forkJoinMatch)); + parallelGateway.setProperties(properties); + + flowElementList.add(parallelGateway); + } + { + InclusiveGateway inclusiveGateway = new InclusiveGateway(); + inclusiveGateway.setKey("InclusiveGateway_1djgrgp"); + inclusiveGateway.setType(ExtendFlowElementType.INCLUSIVE_GATEWAY); + + List egIncomings = new ArrayList<>(); + egIncomings.add("SequenceFlow_0h92s81"); + inclusiveGateway.setIncoming(egIncomings); + + List egOutgoings = new ArrayList<>(); + egOutgoings.add("SequenceFlow_36g4hqc"); + egOutgoings.add("SequenceFlow_191a52e"); + inclusiveGateway.setOutgoing(egOutgoings); + + Map properties = new HashMap<>(); + Map forkJoinMatch = new HashMap<>(); + forkJoinMatch.put(com.didiglobal.turbo.plugin.common.Constants.ELEMENT_PROPERTIES.FORK, "InclusiveGateway_1djgrgp"); + forkJoinMatch.put(com.didiglobal.turbo.plugin.common.Constants.ELEMENT_PROPERTIES.JOIN, "InclusiveGateway_3a1nn9f"); + properties.put(com.didiglobal.turbo.plugin.common.Constants.ELEMENT_PROPERTIES.FORK_JOIN_MATCH, JSONArray.toJSON(forkJoinMatch)); + inclusiveGateway.setProperties(properties); + + flowElementList.add(inclusiveGateway); + } + { + InclusiveGateway inclusiveGateway = new InclusiveGateway(); + inclusiveGateway.setKey("InclusiveGateway_3a1nn9f"); + inclusiveGateway.setType(ExtendFlowElementType.INCLUSIVE_GATEWAY); + + List egIncomings = new ArrayList<>(); + egIncomings.add("SequenceFlow_1h65e8t"); + egIncomings.add("SequenceFlow_25kdv36"); + inclusiveGateway.setIncoming(egIncomings); + + List egOutgoings = new ArrayList<>(); + egOutgoings.add("SequenceFlow_3jkd63g"); + inclusiveGateway.setOutgoing(egOutgoings); + + Map properties = new HashMap<>(); + Map forkJoinMatch = new HashMap<>(); + forkJoinMatch.put(com.didiglobal.turbo.plugin.common.Constants.ELEMENT_PROPERTIES.FORK, "InclusiveGateway_1djgrgp"); + forkJoinMatch.put(com.didiglobal.turbo.plugin.common.Constants.ELEMENT_PROPERTIES.JOIN, "InclusiveGateway_3a1nn9f"); + properties.put(com.didiglobal.turbo.plugin.common.Constants.ELEMENT_PROPERTIES.FORK_JOIN_MATCH, JSONArray.toJSON(forkJoinMatch)); + inclusiveGateway.setProperties(properties); + + flowElementList.add(inclusiveGateway); + } + { + ParallelGateway parallelGateway = new ParallelGateway(); + parallelGateway.setKey("ParallelGateway_10lo44j"); + parallelGateway.setType(ExtendFlowElementType.PARALLEL_GATEWAY); + + List egIncomings = new ArrayList<>(); + egIncomings.add("SequenceFlow_3jkd63g"); + egIncomings.add("SequenceFlow_3bgdrp0"); + parallelGateway.setIncoming(egIncomings); + + List egOutgoings = new ArrayList<>(); + egOutgoings.add("SequenceFlow_3uhg8uj"); + parallelGateway.setOutgoing(egOutgoings); + + Map properties = new HashMap<>(); + Map forkJoinMatch = new HashMap<>(); + forkJoinMatch.put(com.didiglobal.turbo.plugin.common.Constants.ELEMENT_PROPERTIES.FORK, "ParallelGateway_38ad233"); + forkJoinMatch.put(com.didiglobal.turbo.plugin.common.Constants.ELEMENT_PROPERTIES.JOIN, "ParallelGateway_10lo44j"); + properties.put(com.didiglobal.turbo.plugin.common.Constants.ELEMENT_PROPERTIES.FORK_JOIN_MATCH, JSONArray.toJSON(forkJoinMatch)); + parallelGateway.setProperties(properties); + + flowElementList.add(parallelGateway); + } + { + UserTask userTask = new UserTask(); + userTask.setKey("UserTask_0iv55sh"); + userTask.setType(FlowElementType.USER_TASK); + + List utIncomings = new ArrayList<>(); + utIncomings.add("SequenceFlow_12rbl6u"); + userTask.setIncoming(utIncomings); + + List utOutgoings = new ArrayList<>(); + utOutgoings.add("SequenceFlow_0h92s81"); + userTask.setOutgoing(utOutgoings); + + flowElementList.add(userTask); + } + { + UserTask userTask = new UserTask(); + userTask.setKey("UserTask_0m7qih6"); + userTask.setType(FlowElementType.USER_TASK); + + List utIncomings = new ArrayList<>(); + utIncomings.add("SequenceFlow_3ih7eta"); + userTask.setIncoming(utIncomings); + + List utOutgoings = new ArrayList<>(); + utOutgoings.add("SequenceFlow_3bgdrp0"); + userTask.setOutgoing(utOutgoings); + + flowElementList.add(userTask); + } + { + UserTask userTask = new UserTask(); + userTask.setKey("UserTask_2npcbgp"); + userTask.setType(FlowElementType.USER_TASK); + + List utIncomings = new ArrayList<>(); + utIncomings.add("SequenceFlow_36g4hqc"); + userTask.setIncoming(utIncomings); + + List utOutgoings = new ArrayList<>(); + utOutgoings.add("SequenceFlow_27lme4l"); + userTask.setOutgoing(utOutgoings); + + flowElementList.add(userTask); + } + { + UserTask userTask = new UserTask(); + userTask.setKey("UserTask_01tuns9"); + userTask.setType(FlowElementType.USER_TASK); + + List utIncomings = new ArrayList<>(); + utIncomings.add("SequenceFlow_27lme4l"); + userTask.setIncoming(utIncomings); + + List utOutgoings = new ArrayList<>(); + utOutgoings.add("SequenceFlow_1h65e8t"); + userTask.setOutgoing(utOutgoings); + + flowElementList.add(userTask); + } + { + UserTask userTask = new UserTask(); + userTask.setKey("UserTask_1ram9jm"); + userTask.setType(FlowElementType.USER_TASK); + + List utIncomings = new ArrayList<>(); + utIncomings.add("SequenceFlow_191a52e"); + userTask.setIncoming(utIncomings); + + List utOutgoings = new ArrayList<>(); + utOutgoings.add("SequenceFlow_3a7oj2r"); + userTask.setOutgoing(utOutgoings); + + flowElementList.add(userTask); + } + { + UserTask userTask = new UserTask(); + userTask.setKey("UserTask_32ed01b"); + userTask.setType(FlowElementType.USER_TASK); + + List utIncomings = new ArrayList<>(); + utIncomings.add("SequenceFlow_3a7oj2r"); + userTask.setIncoming(utIncomings); + + List utOutgoings = new ArrayList<>(); + utOutgoings.add("SequenceFlow_25kdv36"); + userTask.setOutgoing(utOutgoings); + + flowElementList.add(userTask); + } + { + SequenceFlow sequenceFlow1 = new SequenceFlow(); + sequenceFlow1.setKey("SequenceFlow_2gugjee"); + sequenceFlow1.setType(FlowElementType.SEQUENCE_FLOW); + List sfIncomings = new ArrayList<>(); + sfIncomings.add("StartEvent_2s70149"); + sequenceFlow1.setIncoming(sfIncomings); + List sfOutgoings = new ArrayList<>(); + sfOutgoings.add("ParallelGateway_38ad233"); + sequenceFlow1.setOutgoing(sfOutgoings); + + Map properties = new HashMap<>(); + properties.put("defaultConditions", "false"); + properties.put("conditionsequenceflow", ""); + sequenceFlow1.setProperties(properties); + + flowElementList.add(sequenceFlow1); + } + { + SequenceFlow sequenceFlow1 = new SequenceFlow(); + sequenceFlow1.setKey("SequenceFlow_12rbl6u"); + sequenceFlow1.setType(FlowElementType.SEQUENCE_FLOW); + List sfIncomings = new ArrayList<>(); + sfIncomings.add("ParallelGateway_38ad233"); + sequenceFlow1.setIncoming(sfIncomings); + List sfOutgoings = new ArrayList<>(); + sfOutgoings.add("UserTask_0iv55sh"); + sequenceFlow1.setOutgoing(sfOutgoings); + + Map properties = new HashMap<>(); + properties.put("defaultConditions", "false"); + properties.put("conditionsequenceflow", ""); + sequenceFlow1.setProperties(properties); + + flowElementList.add(sequenceFlow1); + } + { + SequenceFlow sequenceFlow1 = new SequenceFlow(); + sequenceFlow1.setKey("SequenceFlow_3ih7eta"); + sequenceFlow1.setType(FlowElementType.SEQUENCE_FLOW); + List sfIncomings = new ArrayList<>(); + sfIncomings.add("ParallelGateway_38ad233"); + sequenceFlow1.setIncoming(sfIncomings); + List sfOutgoings = new ArrayList<>(); + sfOutgoings.add("UserTask_0m7qih6"); + sequenceFlow1.setOutgoing(sfOutgoings); + + Map properties = new HashMap<>(); + properties.put("defaultConditions", "false"); + properties.put("conditionsequenceflow", ""); + sequenceFlow1.setProperties(properties); + + flowElementList.add(sequenceFlow1); + } + { + SequenceFlow sequenceFlow1 = new SequenceFlow(); + sequenceFlow1.setKey("SequenceFlow_0h92s81"); + sequenceFlow1.setType(FlowElementType.SEQUENCE_FLOW); + List sfIncomings = new ArrayList<>(); + sfIncomings.add("UserTask_0iv55sh"); + sequenceFlow1.setIncoming(sfIncomings); + List sfOutgoings = new ArrayList<>(); + sfOutgoings.add("InclusiveGateway_1djgrgp"); + sequenceFlow1.setOutgoing(sfOutgoings); + + Map properties = new HashMap<>(); + properties.put("defaultConditions", "false"); + properties.put("conditionsequenceflow", ""); + sequenceFlow1.setProperties(properties); + + flowElementList.add(sequenceFlow1); + } + { + SequenceFlow sequenceFlow1 = new SequenceFlow(); + sequenceFlow1.setKey("SequenceFlow_36g4hqc"); + sequenceFlow1.setType(FlowElementType.SEQUENCE_FLOW); + List sfIncomings = new ArrayList<>(); + sfIncomings.add("InclusiveGateway_1djgrgp"); + sequenceFlow1.setIncoming(sfIncomings); + List sfOutgoings = new ArrayList<>(); + sfOutgoings.add("UserTask_2npcbgp"); + sequenceFlow1.setOutgoing(sfOutgoings); + + Map properties = new HashMap<>(); + properties.put("defaultConditions", "false"); + properties.put("conditionsequenceflow", "${(a>=10)}"); + sequenceFlow1.setProperties(properties); + + flowElementList.add(sequenceFlow1); + } + { + SequenceFlow sequenceFlow1 = new SequenceFlow(); + sequenceFlow1.setKey("SequenceFlow_191a52e"); + sequenceFlow1.setType(FlowElementType.SEQUENCE_FLOW); + List sfIncomings = new ArrayList<>(); + sfIncomings.add("InclusiveGateway_1djgrgp"); + sequenceFlow1.setIncoming(sfIncomings); + List sfOutgoings = new ArrayList<>(); + sfOutgoings.add("UserTask_1ram9jm"); + sequenceFlow1.setOutgoing(sfOutgoings); + + Map properties = new HashMap<>(); + properties.put("defaultConditions", "false"); + properties.put("conditionsequenceflow", "${(a<10)}"); + sequenceFlow1.setProperties(properties); + + flowElementList.add(sequenceFlow1); + } + { + SequenceFlow sequenceFlow1 = new SequenceFlow(); + sequenceFlow1.setKey("SequenceFlow_27lme4l"); + sequenceFlow1.setType(FlowElementType.SEQUENCE_FLOW); + List sfIncomings = new ArrayList<>(); + sfIncomings.add("UserTask_2npcbgp"); + sequenceFlow1.setIncoming(sfIncomings); + List sfOutgoings = new ArrayList<>(); + sfOutgoings.add("UserTask_01tuns9"); + sequenceFlow1.setOutgoing(sfOutgoings); + + Map properties = new HashMap<>(); + properties.put("defaultConditions", "false"); + properties.put("conditionsequenceflow", ""); + sequenceFlow1.setProperties(properties); + + flowElementList.add(sequenceFlow1); + } + { + SequenceFlow sequenceFlow1 = new SequenceFlow(); + sequenceFlow1.setKey("SequenceFlow_3a7oj2r"); + sequenceFlow1.setType(FlowElementType.SEQUENCE_FLOW); + List sfIncomings = new ArrayList<>(); + sfIncomings.add("UserTask_1ram9jm"); + sequenceFlow1.setIncoming(sfIncomings); + List sfOutgoings = new ArrayList<>(); + sfOutgoings.add("UserTask_32ed01b"); + sequenceFlow1.setOutgoing(sfOutgoings); + + Map properties = new HashMap<>(); + properties.put("defaultConditions", "false"); + properties.put("conditionsequenceflow", ""); + sequenceFlow1.setProperties(properties); + + flowElementList.add(sequenceFlow1); + } + { + SequenceFlow sequenceFlow1 = new SequenceFlow(); + sequenceFlow1.setKey("SequenceFlow_3jkd63g"); + sequenceFlow1.setType(FlowElementType.SEQUENCE_FLOW); + List sfIncomings = new ArrayList<>(); + sfIncomings.add("InclusiveGateway_3a1nn9f"); + sequenceFlow1.setIncoming(sfIncomings); + List sfOutgoings = new ArrayList<>(); + sfOutgoings.add("ParallelGateway_10lo44j"); + sequenceFlow1.setOutgoing(sfOutgoings); + + Map properties = new HashMap<>(); + properties.put("defaultConditions", "false"); + properties.put("conditionsequenceflow", ""); + sequenceFlow1.setProperties(properties); + + flowElementList.add(sequenceFlow1); + } + { + SequenceFlow sequenceFlow1 = new SequenceFlow(); + sequenceFlow1.setKey("SequenceFlow_3bgdrp0"); + sequenceFlow1.setType(FlowElementType.SEQUENCE_FLOW); + List sfIncomings = new ArrayList<>(); + sfIncomings.add("UserTask_0m7qih6"); + sequenceFlow1.setIncoming(sfIncomings); + List sfOutgoings = new ArrayList<>(); + sfOutgoings.add("ParallelGateway_10lo44j"); + sequenceFlow1.setOutgoing(sfOutgoings); + + Map properties = new HashMap<>(); + properties.put("defaultConditions", "false"); + properties.put("conditionsequenceflow", ""); + sequenceFlow1.setProperties(properties); + + flowElementList.add(sequenceFlow1); + } + { + SequenceFlow sequenceFlow1 = new SequenceFlow(); + sequenceFlow1.setKey("SequenceFlow_3uhg8uj"); + sequenceFlow1.setType(FlowElementType.SEQUENCE_FLOW); + List sfIncomings = new ArrayList<>(); + sfIncomings.add("ParallelGateway_10lo44j"); + sequenceFlow1.setIncoming(sfIncomings); + List sfOutgoings = new ArrayList<>(); + sfOutgoings.add("EndEvent_2c8j53d"); + sequenceFlow1.setOutgoing(sfOutgoings); + + Map properties = new HashMap<>(); + properties.put("defaultConditions", "false"); + properties.put("conditionsequenceflow", ""); + sequenceFlow1.setProperties(properties); + + flowElementList.add(sequenceFlow1); + } + { + SequenceFlow sequenceFlow1 = new SequenceFlow(); + sequenceFlow1.setKey("SequenceFlow_1h65e8t"); + sequenceFlow1.setType(FlowElementType.SEQUENCE_FLOW); + List sfIncomings = new ArrayList<>(); + sfIncomings.add("UserTask_01tuns9"); + sequenceFlow1.setIncoming(sfIncomings); + List sfOutgoings = new ArrayList<>(); + sfOutgoings.add("InclusiveGateway_3a1nn9f"); + sequenceFlow1.setOutgoing(sfOutgoings); + + Map properties = new HashMap<>(); + properties.put("defaultConditions", "false"); + properties.put("conditionsequenceflow", ""); + sequenceFlow1.setProperties(properties); + + flowElementList.add(sequenceFlow1); + } + { + SequenceFlow sequenceFlow1 = new SequenceFlow(); + sequenceFlow1.setKey("SequenceFlow_25kdv36"); + sequenceFlow1.setType(FlowElementType.SEQUENCE_FLOW); + List sfIncomings = new ArrayList<>(); + sfIncomings.add("UserTask_32ed01b"); + sequenceFlow1.setIncoming(sfIncomings); + List sfOutgoings = new ArrayList<>(); + sfOutgoings.add("InclusiveGateway_3a1nn9f"); + sequenceFlow1.setOutgoing(sfOutgoings); + + Map properties = new HashMap<>(); + properties.put("defaultConditions", "false"); + properties.put("conditionsequenceflow", ""); + sequenceFlow1.setProperties(properties); + + flowElementList.add(sequenceFlow1); + } + FlowModel flowModel = new FlowModel(); + flowModel.setFlowElementList(flowElementList); + return flowModel; + } + + /** + * --> UserTask_1ram9jm --> UserTask_32ed01b + * | | + * StartEvent_2s70149 --> ParallelGateway_38ad233 --> UserTask_0iv55sh --> ParallelGateway_1djgrgp --> UserTask_2npcbgp --> UserTask_01tuns9 --> ParallelGateway_3a1nn9f --> ParallelGateway_10lo44j --> EndEvent_2c8j53d + * | | + * | | + * -------------------------------------------------> UserTask_0m7qih6 -------------------------------------------------- + */ + public static FlowModel buildParallelFlowModelWithMergeOne() { + List flowElementList = Lists.newArrayList(); + { + StartEvent startEvent = new StartEvent(); + startEvent.setKey("StartEvent_2s70149"); + startEvent.setType(FlowElementType.START_EVENT); + List outgoings = new ArrayList<>(); + outgoings.add("SequenceFlow_2gugjee"); + startEvent.setOutgoing(outgoings); + flowElementList.add(startEvent); + } + { + EndEvent endEvent = new EndEvent(); + endEvent.setKey("EndEvent_2c8j53d"); + endEvent.setType(FlowElementType.END_EVENT); + List incomings = new ArrayList<>(); + incomings.add("SequenceFlow_3uhg8uj"); + endEvent.setIncoming(incomings); + flowElementList.add(endEvent); + } + { + ParallelGateway parallelGateway = new ParallelGateway(); + parallelGateway.setKey("ParallelGateway_38ad233"); + parallelGateway.setType(ExtendFlowElementType.PARALLEL_GATEWAY); + + List egIncomings = new ArrayList<>(); + egIncomings.add("SequenceFlow_2gugjee"); + parallelGateway.setIncoming(egIncomings); + + List egOutgoings = new ArrayList<>(); + egOutgoings.add("SequenceFlow_12rbl6u"); + egOutgoings.add("SequenceFlow_3ih7eta"); + parallelGateway.setOutgoing(egOutgoings); + + Map properties = new HashMap<>(); + Map forkJoinMatch = new HashMap<>(); + forkJoinMatch.put(com.didiglobal.turbo.plugin.common.Constants.ELEMENT_PROPERTIES.FORK, "ParallelGateway_38ad233"); + forkJoinMatch.put(com.didiglobal.turbo.plugin.common.Constants.ELEMENT_PROPERTIES.JOIN, "ParallelGateway_10lo44j"); + properties.put(com.didiglobal.turbo.plugin.common.Constants.ELEMENT_PROPERTIES.FORK_JOIN_MATCH, JSONArray.toJSON(forkJoinMatch)); + parallelGateway.setProperties(properties); + + flowElementList.add(parallelGateway); + } + { + ParallelGateway parallelGateway = new ParallelGateway(); + parallelGateway.setKey("ParallelGateway_1djgrgp"); + parallelGateway.setType(ExtendFlowElementType.PARALLEL_GATEWAY); + + List egIncomings = new ArrayList<>(); + egIncomings.add("SequenceFlow_0h92s81"); + parallelGateway.setIncoming(egIncomings); + + List egOutgoings = new ArrayList<>(); + egOutgoings.add("SequenceFlow_36g4hqc"); + egOutgoings.add("SequenceFlow_191a52e"); + parallelGateway.setOutgoing(egOutgoings); + + Map properties = new HashMap<>(); + Map forkJoinMatch = new HashMap<>(); + forkJoinMatch.put(com.didiglobal.turbo.plugin.common.Constants.ELEMENT_PROPERTIES.FORK, "ParallelGateway_1djgrgp"); + forkJoinMatch.put(com.didiglobal.turbo.plugin.common.Constants.ELEMENT_PROPERTIES.JOIN, "ParallelGateway_3a1nn9f"); + properties.put(com.didiglobal.turbo.plugin.common.Constants.ELEMENT_PROPERTIES.FORK_JOIN_MATCH, JSONArray.toJSON(forkJoinMatch)); + parallelGateway.setProperties(properties); + + flowElementList.add(parallelGateway); + } + { + ParallelGateway parallelGateway = new ParallelGateway(); + parallelGateway.setKey("ParallelGateway_3a1nn9f"); + parallelGateway.setType(ExtendFlowElementType.PARALLEL_GATEWAY); + + List egIncomings = new ArrayList<>(); + egIncomings.add("SequenceFlow_1h65e8t"); + egIncomings.add("SequenceFlow_25kdv36"); + parallelGateway.setIncoming(egIncomings); + + List egOutgoings = new ArrayList<>(); + egOutgoings.add("SequenceFlow_3jkd63g"); + parallelGateway.setOutgoing(egOutgoings); + + Map properties = new HashMap<>(); + Map forkJoinMatch = new HashMap<>(); + forkJoinMatch.put(Constants.ELEMENT_PROPERTIES.FORK, "ParallelGateway_1djgrgp"); + forkJoinMatch.put(Constants.ELEMENT_PROPERTIES.JOIN, "ParallelGateway_3a1nn9f"); + properties.put(Constants.ELEMENT_PROPERTIES.FORK_JOIN_MATCH, JSONArray.toJSON(forkJoinMatch)); + properties.put(Constants.ELEMENT_PROPERTIES.BRANCH_MERGE, MergeStrategy.BRANCH_MERGE.ANY_ONE); + properties.put(com.didiglobal.turbo.plugin.common.Constants.ELEMENT_PROPERTIES.DATA_MERGE, MergeStrategy.DATA_MERGE.NONE); + parallelGateway.setProperties(properties); + + flowElementList.add(parallelGateway); + } + { + ParallelGateway parallelGateway = new ParallelGateway(); + parallelGateway.setKey("ParallelGateway_10lo44j"); + parallelGateway.setType(ExtendFlowElementType.PARALLEL_GATEWAY); + + List egIncomings = new ArrayList<>(); + egIncomings.add("SequenceFlow_3jkd63g"); + egIncomings.add("SequenceFlow_3bgdrp0"); + parallelGateway.setIncoming(egIncomings); + + List egOutgoings = new ArrayList<>(); + egOutgoings.add("SequenceFlow_3uhg8uj"); + parallelGateway.setOutgoing(egOutgoings); + + Map properties = new HashMap<>(); + Map forkJoinMatch = new HashMap<>(); + forkJoinMatch.put(com.didiglobal.turbo.plugin.common.Constants.ELEMENT_PROPERTIES.FORK, "ParallelGateway_38ad233"); + forkJoinMatch.put(com.didiglobal.turbo.plugin.common.Constants.ELEMENT_PROPERTIES.JOIN, "ParallelGateway_10lo44j"); + properties.put(com.didiglobal.turbo.plugin.common.Constants.ELEMENT_PROPERTIES.FORK_JOIN_MATCH, JSONArray.toJSON(forkJoinMatch)); + parallelGateway.setProperties(properties); + + flowElementList.add(parallelGateway); + } + { + UserTask userTask = new UserTask(); + userTask.setKey("UserTask_0iv55sh"); + userTask.setType(FlowElementType.USER_TASK); + + List utIncomings = new ArrayList<>(); + utIncomings.add("SequenceFlow_12rbl6u"); + userTask.setIncoming(utIncomings); + + List utOutgoings = new ArrayList<>(); + utOutgoings.add("SequenceFlow_0h92s81"); + userTask.setOutgoing(utOutgoings); + + flowElementList.add(userTask); + } + { + UserTask userTask = new UserTask(); + userTask.setKey("UserTask_0m7qih6"); + userTask.setType(FlowElementType.USER_TASK); + + List utIncomings = new ArrayList<>(); + utIncomings.add("SequenceFlow_3ih7eta"); + userTask.setIncoming(utIncomings); + + List utOutgoings = new ArrayList<>(); + utOutgoings.add("SequenceFlow_3bgdrp0"); + userTask.setOutgoing(utOutgoings); + + flowElementList.add(userTask); + } + { + UserTask userTask = new UserTask(); + userTask.setKey("UserTask_2npcbgp"); + userTask.setType(FlowElementType.USER_TASK); + + List utIncomings = new ArrayList<>(); + utIncomings.add("SequenceFlow_36g4hqc"); + userTask.setIncoming(utIncomings); + + List utOutgoings = new ArrayList<>(); + utOutgoings.add("SequenceFlow_27lme4l"); + userTask.setOutgoing(utOutgoings); + + flowElementList.add(userTask); + } + { + UserTask userTask = new UserTask(); + userTask.setKey("UserTask_01tuns9"); + userTask.setType(FlowElementType.USER_TASK); + + List utIncomings = new ArrayList<>(); + utIncomings.add("SequenceFlow_27lme4l"); + userTask.setIncoming(utIncomings); + + List utOutgoings = new ArrayList<>(); + utOutgoings.add("SequenceFlow_1h65e8t"); + userTask.setOutgoing(utOutgoings); + + flowElementList.add(userTask); + } + { + UserTask userTask = new UserTask(); + userTask.setKey("UserTask_1ram9jm"); + userTask.setType(FlowElementType.USER_TASK); + + List utIncomings = new ArrayList<>(); + utIncomings.add("SequenceFlow_191a52e"); + userTask.setIncoming(utIncomings); + + List utOutgoings = new ArrayList<>(); + utOutgoings.add("SequenceFlow_3a7oj2r"); + userTask.setOutgoing(utOutgoings); + + flowElementList.add(userTask); + } + { + UserTask userTask = new UserTask(); + userTask.setKey("UserTask_32ed01b"); + userTask.setType(FlowElementType.USER_TASK); + + List utIncomings = new ArrayList<>(); + utIncomings.add("SequenceFlow_3a7oj2r"); + userTask.setIncoming(utIncomings); + + List utOutgoings = new ArrayList<>(); + utOutgoings.add("SequenceFlow_25kdv36"); + userTask.setOutgoing(utOutgoings); + + flowElementList.add(userTask); + } + { + SequenceFlow sequenceFlow1 = new SequenceFlow(); + sequenceFlow1.setKey("SequenceFlow_2gugjee"); + sequenceFlow1.setType(FlowElementType.SEQUENCE_FLOW); + List sfIncomings = new ArrayList<>(); + sfIncomings.add("StartEvent_2s70149"); + sequenceFlow1.setIncoming(sfIncomings); + List sfOutgoings = new ArrayList<>(); + sfOutgoings.add("ParallelGateway_38ad233"); + sequenceFlow1.setOutgoing(sfOutgoings); + + Map properties = new HashMap<>(); + properties.put("defaultConditions", "false"); + properties.put("conditionsequenceflow", ""); + sequenceFlow1.setProperties(properties); + + flowElementList.add(sequenceFlow1); + } + { + SequenceFlow sequenceFlow1 = new SequenceFlow(); + sequenceFlow1.setKey("SequenceFlow_12rbl6u"); + sequenceFlow1.setType(FlowElementType.SEQUENCE_FLOW); + List sfIncomings = new ArrayList<>(); + sfIncomings.add("ParallelGateway_38ad233"); + sequenceFlow1.setIncoming(sfIncomings); + List sfOutgoings = new ArrayList<>(); + sfOutgoings.add("UserTask_0iv55sh"); + sequenceFlow1.setOutgoing(sfOutgoings); + + Map properties = new HashMap<>(); + properties.put("defaultConditions", "false"); + properties.put("conditionsequenceflow", ""); + sequenceFlow1.setProperties(properties); + + flowElementList.add(sequenceFlow1); + } + { + SequenceFlow sequenceFlow1 = new SequenceFlow(); + sequenceFlow1.setKey("SequenceFlow_3ih7eta"); + sequenceFlow1.setType(FlowElementType.SEQUENCE_FLOW); + List sfIncomings = new ArrayList<>(); + sfIncomings.add("ParallelGateway_38ad233"); + sequenceFlow1.setIncoming(sfIncomings); + List sfOutgoings = new ArrayList<>(); + sfOutgoings.add("UserTask_0m7qih6"); + sequenceFlow1.setOutgoing(sfOutgoings); + + Map properties = new HashMap<>(); + properties.put("defaultConditions", "false"); + properties.put("conditionsequenceflow", ""); + sequenceFlow1.setProperties(properties); + + flowElementList.add(sequenceFlow1); + } + { + SequenceFlow sequenceFlow1 = new SequenceFlow(); + sequenceFlow1.setKey("SequenceFlow_0h92s81"); + sequenceFlow1.setType(FlowElementType.SEQUENCE_FLOW); + List sfIncomings = new ArrayList<>(); + sfIncomings.add("UserTask_0iv55sh"); + sequenceFlow1.setIncoming(sfIncomings); + List sfOutgoings = new ArrayList<>(); + sfOutgoings.add("ParallelGateway_1djgrgp"); + sequenceFlow1.setOutgoing(sfOutgoings); + + Map properties = new HashMap<>(); + properties.put("defaultConditions", "false"); + properties.put("conditionsequenceflow", ""); + sequenceFlow1.setProperties(properties); + + flowElementList.add(sequenceFlow1); + } + { + SequenceFlow sequenceFlow1 = new SequenceFlow(); + sequenceFlow1.setKey("SequenceFlow_36g4hqc"); + sequenceFlow1.setType(FlowElementType.SEQUENCE_FLOW); + List sfIncomings = new ArrayList<>(); + sfIncomings.add("ParallelGateway_1djgrgp"); + sequenceFlow1.setIncoming(sfIncomings); + List sfOutgoings = new ArrayList<>(); + sfOutgoings.add("UserTask_2npcbgp"); + sequenceFlow1.setOutgoing(sfOutgoings); + + Map properties = new HashMap<>(); + properties.put("defaultConditions", "false"); + properties.put("conditionsequenceflow", ""); + sequenceFlow1.setProperties(properties); + + flowElementList.add(sequenceFlow1); + } + { + SequenceFlow sequenceFlow1 = new SequenceFlow(); + sequenceFlow1.setKey("SequenceFlow_191a52e"); + sequenceFlow1.setType(FlowElementType.SEQUENCE_FLOW); + List sfIncomings = new ArrayList<>(); + sfIncomings.add("ParallelGateway_1djgrgp"); + sequenceFlow1.setIncoming(sfIncomings); + List sfOutgoings = new ArrayList<>(); + sfOutgoings.add("UserTask_1ram9jm"); + sequenceFlow1.setOutgoing(sfOutgoings); + + Map properties = new HashMap<>(); + properties.put("defaultConditions", "false"); + properties.put("conditionsequenceflow", ""); + sequenceFlow1.setProperties(properties); + + flowElementList.add(sequenceFlow1); + } + { + SequenceFlow sequenceFlow1 = new SequenceFlow(); + sequenceFlow1.setKey("SequenceFlow_27lme4l"); + sequenceFlow1.setType(FlowElementType.SEQUENCE_FLOW); + List sfIncomings = new ArrayList<>(); + sfIncomings.add("UserTask_2npcbgp"); + sequenceFlow1.setIncoming(sfIncomings); + List sfOutgoings = new ArrayList<>(); + sfOutgoings.add("UserTask_01tuns9"); + sequenceFlow1.setOutgoing(sfOutgoings); + + Map properties = new HashMap<>(); + properties.put("defaultConditions", "false"); + properties.put("conditionsequenceflow", ""); + sequenceFlow1.setProperties(properties); + + flowElementList.add(sequenceFlow1); + } + { + SequenceFlow sequenceFlow1 = new SequenceFlow(); + sequenceFlow1.setKey("SequenceFlow_3a7oj2r"); + sequenceFlow1.setType(FlowElementType.SEQUENCE_FLOW); + List sfIncomings = new ArrayList<>(); + sfIncomings.add("UserTask_1ram9jm"); + sequenceFlow1.setIncoming(sfIncomings); + List sfOutgoings = new ArrayList<>(); + sfOutgoings.add("UserTask_32ed01b"); + sequenceFlow1.setOutgoing(sfOutgoings); + + Map properties = new HashMap<>(); + properties.put("defaultConditions", "false"); + properties.put("conditionsequenceflow", ""); + sequenceFlow1.setProperties(properties); + + flowElementList.add(sequenceFlow1); + } + { + SequenceFlow sequenceFlow1 = new SequenceFlow(); + sequenceFlow1.setKey("SequenceFlow_3jkd63g"); + sequenceFlow1.setType(FlowElementType.SEQUENCE_FLOW); + List sfIncomings = new ArrayList<>(); + sfIncomings.add("ParallelGateway_3a1nn9f"); + sequenceFlow1.setIncoming(sfIncomings); + List sfOutgoings = new ArrayList<>(); + sfOutgoings.add("ParallelGateway_10lo44j"); + sequenceFlow1.setOutgoing(sfOutgoings); + + Map properties = new HashMap<>(); + properties.put("defaultConditions", "false"); + properties.put("conditionsequenceflow", ""); + sequenceFlow1.setProperties(properties); + + flowElementList.add(sequenceFlow1); + } + { + SequenceFlow sequenceFlow1 = new SequenceFlow(); + sequenceFlow1.setKey("SequenceFlow_3bgdrp0"); + sequenceFlow1.setType(FlowElementType.SEQUENCE_FLOW); + List sfIncomings = new ArrayList<>(); + sfIncomings.add("UserTask_0m7qih6"); + sequenceFlow1.setIncoming(sfIncomings); + List sfOutgoings = new ArrayList<>(); + sfOutgoings.add("ParallelGateway_10lo44j"); + sequenceFlow1.setOutgoing(sfOutgoings); + + Map properties = new HashMap<>(); + properties.put("defaultConditions", "false"); + properties.put("conditionsequenceflow", ""); + sequenceFlow1.setProperties(properties); + + flowElementList.add(sequenceFlow1); + } + { + SequenceFlow sequenceFlow1 = new SequenceFlow(); + sequenceFlow1.setKey("SequenceFlow_3uhg8uj"); + sequenceFlow1.setType(FlowElementType.SEQUENCE_FLOW); + List sfIncomings = new ArrayList<>(); + sfIncomings.add("ParallelGateway_10lo44j"); + sequenceFlow1.setIncoming(sfIncomings); + List sfOutgoings = new ArrayList<>(); + sfOutgoings.add("EndEvent_2c8j53d"); + sequenceFlow1.setOutgoing(sfOutgoings); + + Map properties = new HashMap<>(); + properties.put("defaultConditions", "false"); + properties.put("conditionsequenceflow", ""); + sequenceFlow1.setProperties(properties); + + flowElementList.add(sequenceFlow1); + } + { + SequenceFlow sequenceFlow1 = new SequenceFlow(); + sequenceFlow1.setKey("SequenceFlow_1h65e8t"); + sequenceFlow1.setType(FlowElementType.SEQUENCE_FLOW); + List sfIncomings = new ArrayList<>(); + sfIncomings.add("UserTask_01tuns9"); + sequenceFlow1.setIncoming(sfIncomings); + List sfOutgoings = new ArrayList<>(); + sfOutgoings.add("ParallelGateway_3a1nn9f"); + sequenceFlow1.setOutgoing(sfOutgoings); + + Map properties = new HashMap<>(); + properties.put("defaultConditions", "false"); + properties.put("conditionsequenceflow", ""); + sequenceFlow1.setProperties(properties); + + flowElementList.add(sequenceFlow1); + } + { + SequenceFlow sequenceFlow1 = new SequenceFlow(); + sequenceFlow1.setKey("SequenceFlow_25kdv36"); + sequenceFlow1.setType(FlowElementType.SEQUENCE_FLOW); + List sfIncomings = new ArrayList<>(); + sfIncomings.add("UserTask_32ed01b"); + sequenceFlow1.setIncoming(sfIncomings); + List sfOutgoings = new ArrayList<>(); + sfOutgoings.add("ParallelGateway_3a1nn9f"); + sequenceFlow1.setOutgoing(sfOutgoings); + + Map properties = new HashMap<>(); + properties.put("defaultConditions", "false"); + properties.put("conditionsequenceflow", ""); + sequenceFlow1.setProperties(properties); + + flowElementList.add(sequenceFlow1); + } + FlowModel flowModel = new FlowModel(); + flowModel.setFlowElementList(flowElementList); + return flowModel; + } +} diff --git a/parallel-plugin/src/test/resources/application.properties b/parallel-plugin/src/test/resources/application.properties new file mode 100644 index 00000000..f5b1bf95 --- /dev/null +++ b/parallel-plugin/src/test/resources/application.properties @@ -0,0 +1,6 @@ +spring.datasource.dynamic.primary=engine +spring.datasource.dynamic.datasource.engine.type=com.alibaba.druid.pool.DruidDataSource +spring.datasource.dynamic.datasource.engine.username=username +spring.datasource.dynamic.datasource.engine.password=password +spring.datasource.dynamic.datasource.engine.driver-class-name=com.mysql.jdbc.Driver +spring.datasource.dynamic.datasource.engine.url=jdbc:mysql://127.0.0.1:3306/t_engine?useUnicode=true&characterEncoding=UTF-8&zeroDateTimeBehavior=convertToNull&transformedBitIsBoolean=true&autoReconnect=true diff --git a/parallel-plugin/src/test/resources/logback-spring.xml b/parallel-plugin/src/test/resources/logback-spring.xml new file mode 100644 index 00000000..3a8621b2 --- /dev/null +++ b/parallel-plugin/src/test/resources/logback-spring.xml @@ -0,0 +1,35 @@ + + + + + + + + + + [%level][%d{yyyy-MM-dd'T'HH:mm:ss.SSSZ}][%logger:%L][%thread]||traceid=%X{X-B3-TraceId:-}||%msg%n + + + + + + + ${LOG_HOME}/turbo.log.%d{yyyyMMddHH} + + 240 + + + + + [%level][%d{yyyy-MM-dd'T'HH:mm:ss.SSSZ}][%logger:%L][%thread]||traceid=%X{X-B3-TraceId:-}||%msg%n + + UTF-8 + + + + + + + + + \ No newline at end of file diff --git a/parallel-plugin/src/test/resources/plugin.properties b/parallel-plugin/src/test/resources/plugin.properties new file mode 100644 index 00000000..3c3385bb --- /dev/null +++ b/parallel-plugin/src/test/resources/plugin.properties @@ -0,0 +1,12 @@ +# JDBC config +turbo.plugin.jdbc.url=jdbc:mysql://127.0.0.1:3306/t_engine?useUnicode=true&characterEncoding=UTF-8&zeroDateTimeBehavior=convertToNull&transformedBitIsBoolean=true&autoReconnect=true +turbo.plugin.jdbc.username=username +turbo.plugin.jdbc.password=password +turbo.plugin.jdbc.driver=com.mysql.jdbc.Driver +turbo.plugin.jdbc.maximumPoolSize=10 + +# plugin switch +#turbo.plugin.support.InclusiveGatewayElementPlugin=false + +# parallel gateway thread pool config +#turbo.plugin.parallelGateway.threadPool.timeout=5000 \ No newline at end of file diff --git a/pom.xml b/pom.xml index 90bfb993..ce6c4c8a 100644 --- a/pom.xml +++ b/pom.xml @@ -6,7 +6,7 @@ com.didiglobal.turbo turbo pom - 1.1.1 + 1.2.0 turbo Turbo is a light-weight flow engine framework, support BPMN2.0 https://www.github.com/didi/turbo @@ -50,11 +50,17 @@ Didi http://www.didiglobal.com + + lanzhengjian + Didi + http://www.didiglobal.com + engine demo + parallel-plugin From e0d8c83fe187d9a59878a50f84f7aabe9aad33fc Mon Sep 17 00:00:00 2001 From: lanzhengjian Date: Thu, 21 Nov 2024 18:42:47 +0800 Subject: [PATCH 2/4] update demo project --- .../demo/runner/CallActivityDemoRunner.java | 12 +- .../resources/script/turbo-mysql-h2-ddl.sql | 195 ++++++++++-------- .../turbo/engine/entity/NodeInstancePO.java | 2 + .../validator/ElementValidatorFactory.java | 1 + .../resources/turbo.db.create/turbo.mysql.sql | 2 +- 5 files changed, 118 insertions(+), 94 deletions(-) diff --git a/demo/src/main/java/com/didiglobal/turbo/demo/runner/CallActivityDemoRunner.java b/demo/src/main/java/com/didiglobal/turbo/demo/runner/CallActivityDemoRunner.java index fe589cf7..d8b051c7 100644 --- a/demo/src/main/java/com/didiglobal/turbo/demo/runner/CallActivityDemoRunner.java +++ b/demo/src/main/java/com/didiglobal/turbo/demo/runner/CallActivityDemoRunner.java @@ -108,16 +108,16 @@ public void updateFlow() { } public void deployFlow() { - // Deploy main flow - DeployFlowParam deployMainFlowParam = new DeployFlowParam(tenant, caller); - deployMainFlowParam.setFlowModuleId(createMainFlowResult.getFlowModuleId()); - deployMainFlowResult = processEngine.deployFlow(deployMainFlowParam); - LOGGER.info("deployMainFlow.||deployMainFlowResult={}", deployMainFlowResult); // Deploy sub flow DeployFlowParam deploySubFlowParam = new DeployFlowParam(tenant, caller); deploySubFlowParam.setFlowModuleId(createSubFlowResult.getFlowModuleId()); deploySubFlowResult = processEngine.deployFlow(deploySubFlowParam); LOGGER.info("deploySubFlow.||deploySubFlowResult={}", deploySubFlowResult); + // Deploy main flow + DeployFlowParam deployMainFlowParam = new DeployFlowParam(tenant, caller); + deployMainFlowParam.setFlowModuleId(createMainFlowResult.getFlowModuleId()); + deployMainFlowResult = processEngine.deployFlow(deployMainFlowParam); + LOGGER.info("deployMainFlow.||deployMainFlowResult={}", deployMainFlowResult); } public void startProcessToEnd(boolean auth) { @@ -134,7 +134,7 @@ public void startProcessToEnd(boolean auth) { // Now it is stuck in the second user node of the parent process, 'Generate Work Order', driving the completion of the parent process commitTaskResult = commitMainFlowUserTask2(commitTaskResult); - assert commitTaskResult.getStatus() == ErrorEnum.SUCCESS.getErrNo(); + assert commitTaskResult.getErrCode() == ErrorEnum.SUCCESS.getErrNo(); } private StartProcessResult startProcessToCallActivity() { diff --git a/demo/src/test/resources/script/turbo-mysql-h2-ddl.sql b/demo/src/test/resources/script/turbo-mysql-h2-ddl.sql index f918529f..2670dd54 100644 --- a/demo/src/test/resources/script/turbo-mysql-h2-ddl.sql +++ b/demo/src/test/resources/script/turbo-mysql-h2-ddl.sql @@ -22,101 +22,122 @@ CREATE TABLE IF NOT EXISTS `em_flow_definition` ( DROP TABLE IF EXISTS `em_flow_deployment`; CREATE TABLE IF NOT EXISTS `em_flow_deployment` ( - `id` bigint(20) unsigned NOT NULL AUTO_INCREMENT COMMENT '自增主键', - `flow_deploy_id` varchar(128) NOT NULL DEFAULT '' COMMENT '流程模型部署id', - `flow_module_id` varchar(128) NOT NULL DEFAULT '' COMMENT '流程模型id', - `flow_name` varchar(64) NOT NULL DEFAULT '' COMMENT '流程名称', - `flow_key` varchar(32) NOT NULL DEFAULT '' COMMENT '流程业务标识', - `tenant_id` varchar(16) NOT NULL DEFAULT '' COMMENT '业务方标识', - `flow_model` mediumtext COMMENT '表单定义', - `status` tinyint(4) NOT NULL DEFAULT '0' COMMENT '状态(1.已部署 3.已下线)', - `create_time` datetime NOT NULL DEFAULT '1970-01-01 00:00:00' COMMENT '流程创建时间', - `modify_time` datetime NOT NULL DEFAULT '1970-01-01 00:00:00' COMMENT '流程修改时间', - `operator` varchar(32) NOT NULL DEFAULT '' COMMENT '操作人', - `remark` varchar(512) NOT NULL DEFAULT '' COMMENT '备注', - `archive` tinyint(4) NOT NULL DEFAULT '0' COMMENT '归档状态(0未删除,1删除)', - `tenant` varchar(100) NOT NULL DEFAULT '' COMMENT '租户', - `caller` varchar(100) NOT NULL DEFAULT '' COMMENT '调用方', - PRIMARY KEY (`id`), - UNIQUE KEY `uniq_flow_deploy_id` (`flow_deploy_id`), - KEY `idx_flow_module_id` (`flow_module_id`) - ) ENGINE=InnoDB DEFAULT CHARSET=utf8 ROW_FORMAT=COMPACT COMMENT='流程部署表'; + `id` bigint(20) unsigned NOT NULL AUTO_INCREMENT COMMENT '自增主键', + `flow_deploy_id` varchar(128) NOT NULL DEFAULT '' COMMENT '流程模型部署id', + `flow_module_id` varchar(128) NOT NULL DEFAULT '' COMMENT '流程模型id', + `flow_name` varchar(64) NOT NULL DEFAULT '' COMMENT '流程名称', + `flow_key` varchar(32) NOT NULL DEFAULT '' COMMENT '流程业务标识', + `tenant_id` varchar(16) NOT NULL DEFAULT '' COMMENT '业务方标识', + `flow_model` mediumtext COMMENT '表单定义', + `status` tinyint(4) NOT NULL DEFAULT '0' COMMENT '状态(1.已部署 3.已下线)', + `create_time` datetime NOT NULL DEFAULT '1970-01-01 00:00:00' COMMENT '流程创建时间', + `modify_time` datetime NOT NULL DEFAULT '1970-01-01 00:00:00' COMMENT '流程修改时间', + `operator` varchar(32) NOT NULL DEFAULT '' COMMENT '操作人', + `remark` varchar(512) NOT NULL DEFAULT '' COMMENT '备注', + `archive` tinyint(4) NOT NULL DEFAULT '0' COMMENT '归档状态(0未删除,1删除)', + `tenant` varchar(100) NOT NULL DEFAULT '' COMMENT '租户', + `caller` varchar(100) NOT NULL DEFAULT '' COMMENT '调用方', + PRIMARY KEY (`id`), + UNIQUE KEY `uniq_flow_deploy_id` (`flow_deploy_id`), + KEY `idx_flow_module_id` (`flow_module_id`) +) ENGINE=InnoDB DEFAULT CHARSET=utf8 ROW_FORMAT=COMPACT COMMENT='流程部署表'; DROP TABLE IF EXISTS `ei_flow_instance`; -CREATE TABLE IF NOT EXISTS `ei_flow_instance` ( - `id` bigint(20) unsigned NOT NULL AUTO_INCREMENT COMMENT '自增主键', - `flow_instance_id` varchar(128) NOT NULL DEFAULT '' COMMENT '流程执行实例id', - `flow_deploy_id` varchar(128) NOT NULL DEFAULT '' COMMENT '流程模型部署id', - `flow_module_id` varchar(128) NOT NULL DEFAULT '' COMMENT '流程模型id', - `tenant_id` varchar(16) NOT NULL DEFAULT '' COMMENT '业务方标识', - `status` tinyint(4) NOT NULL DEFAULT '0' COMMENT '状态(1.执行完成 2.执行中 3.执行终止(强制终止))', - `create_time` datetime NOT NULL DEFAULT '1970-01-01 00:00:00' COMMENT '流程创建时间', - `modify_time` datetime NOT NULL DEFAULT '1970-01-01 00:00:00' COMMENT '流程修改时间', - `archive` tinyint(4) NOT NULL DEFAULT '0' COMMENT '归档状态(0未删除,1删除)', - `tenant` varchar(100) NOT NULL DEFAULT '' COMMENT '租户', - `caller` varchar(100) NOT NULL DEFAULT '' COMMENT '调用方', +CREATE TABLE IF NOT EXISTS `ei_flow_instance`( + `id` bigint(20) unsigned NOT NULL AUTO_INCREMENT COMMENT '自增主键', + `flow_instance_id` varchar(128) NOT NULL DEFAULT '' COMMENT '流程执行实例id', + `parent_flow_instance_id` varchar(128) NOT NULL DEFAULT '' COMMENT '父流程执行实例id', + `flow_deploy_id` varchar(128) NOT NULL DEFAULT '' COMMENT '流程模型部署id', + `flow_module_id` varchar(128) NOT NULL DEFAULT '' COMMENT '流程模型id', + `tenant_id` varchar(16) NOT NULL DEFAULT '' COMMENT '业务方标识', + `status` tinyint(4) NOT NULL DEFAULT '0' COMMENT '状态(1.执行完成 2.执行中 3.执行终止(强制终止))', + `create_time` datetime NOT NULL DEFAULT '1970-01-01 00:00:00' COMMENT '流程创建时间', + `modify_time` datetime NOT NULL DEFAULT '1970-01-01 00:00:00' COMMENT '流程修改时间', + `archive` tinyint(4) NOT NULL DEFAULT '0' COMMENT '归档状态(0未删除,1删除)', + `tenant` varchar(100) NOT NULL DEFAULT '' COMMENT '租户', + `caller` varchar(100) NOT NULL DEFAULT '' COMMENT '调用方', + PRIMARY KEY (`id`), + UNIQUE KEY `uniq_flow_instance_id` (`flow_instance_id`) +) ENGINE=InnoDB DEFAULT CHARSET=utf8 ROW_FORMAT=COMPACT COMMENT='流程执行实例表'; + +DROP TABLE IF EXISTS `ei_flow_instance_mapping`; +CREATE TABLE `ei_flow_instance_mapping` +( + `id` bigint(20) unsigned NOT NULL AUTO_INCREMENT COMMENT '自增主键', + `flow_instance_id` varchar(128) NOT NULL DEFAULT '' COMMENT '流程执行实例id', + `node_instance_id` varchar(128) NOT NULL DEFAULT '' COMMENT '节点执行实例id', + `node_key` varchar(64) NOT NULL DEFAULT '' COMMENT '节点唯一标识', + `sub_flow_instance_id` varchar(128) NOT NULL DEFAULT '' COMMENT '子流程执行实例id', + `type` tinyint(4) NOT NULL DEFAULT '0' COMMENT '状态(1.执行 2.回滚)', + `create_time` datetime NOT NULL DEFAULT '1970-01-01 00:00:00' COMMENT '流程创建时间', + `modify_time` datetime NOT NULL DEFAULT '1970-01-01 00:00:00' COMMENT '流程修改时间', + `archive` tinyint(4) NOT NULL DEFAULT '0' COMMENT '归档状态(0未删除,1删除)', + `tenant` varchar(100) NOT NULL DEFAULT 'didi' COMMENT '租户', + `caller` varchar(100) NOT NULL DEFAULT 'optimus-prime' COMMENT '调用方', PRIMARY KEY (`id`), - UNIQUE KEY `uniq_flow_instance_id` (`flow_instance_id`) - ) ENGINE=InnoDB DEFAULT CHARSET=utf8 ROW_FORMAT=COMPACT COMMENT='流程执行实例表'; + KEY `idx_fii` (`flow_instance_id`) +) ENGINE = InnoDB + DEFAULT CHARSET = utf8 + ROW_FORMAT = COMPACT COMMENT ='父子流程实例映射表'; DROP TABLE IF EXISTS `ei_node_instance`; -CREATE TABLE IF NOT EXISTS `ei_node_instance` ( - `id` bigint(20) unsigned NOT NULL AUTO_INCREMENT COMMENT '自增主键', - `node_instance_id` varchar(128) NOT NULL DEFAULT '' COMMENT '节点执行实例id', - `flow_instance_id` varchar(128) NOT NULL DEFAULT '' COMMENT '流程执行实例id', - `source_node_instance_id` varchar(128) NOT NULL DEFAULT '' COMMENT '上一个节点执行实例id', - `instance_data_id` varchar(128) NOT NULL DEFAULT '' COMMENT '实例数据id', - `flow_deploy_id` varchar(128) NOT NULL DEFAULT '' COMMENT '流程模型部署id', - `node_key` varchar(64) NOT NULL DEFAULT '' COMMENT '节点唯一标识', - `source_node_key` varchar(64) NOT NULL DEFAULT '' COMMENT '上一个流程节点唯一标识', - `tenant_id` varchar(16) NOT NULL DEFAULT '' COMMENT '业务方标识', - `status` tinyint(4) NOT NULL DEFAULT '0' COMMENT '状态(1.处理成功 2.处理中 3.处理失败 4.处理已撤销)', - `create_time` datetime NOT NULL DEFAULT '1970-01-01 00:00:00' COMMENT '流程创建时间', - `modify_time` datetime NOT NULL DEFAULT '1970-01-01 00:00:00' COMMENT '流程修改时间', - `archive` tinyint(4) NOT NULL DEFAULT '0' COMMENT '归档状态(0未删除,1删除)', - `tenant` varchar(100) NOT NULL DEFAULT '' COMMENT '租户', - `caller` varchar(100) NOT NULL DEFAULT '' COMMENT '调用方', - PRIMARY KEY (`id`), - UNIQUE KEY `uniq_node_instance_id` (`node_instance_id`), - KEY `idx_fiid_sniid_nk` (`flow_instance_id`,`source_node_instance_id`,`node_key`) - ) ENGINE=InnoDB DEFAULT CHARSET=utf8 ROW_FORMAT=COMPACT COMMENT='节点执行实例表'; +CREATE TABLE IF NOT EXISTS `ei_node_instance` +( + `id` bigint(20) unsigned NOT NULL AUTO_INCREMENT COMMENT '自增主键', + `node_instance_id` varchar(128) NOT NULL DEFAULT '' COMMENT '节点执行实例id', + `flow_instance_id` varchar(128) NOT NULL DEFAULT '' COMMENT '流程执行实例id', + `source_node_instance_id` varchar(128) NOT NULL DEFAULT '' COMMENT '上一个节点执行实例id', + `instance_data_id` varchar(128) NOT NULL DEFAULT '' COMMENT '实例数据id', + `flow_deploy_id` varchar(128) NOT NULL DEFAULT '' COMMENT '流程模型部署id', + `node_key` varchar(64) NOT NULL DEFAULT '' COMMENT '节点唯一标识', + `node_type` int NOT NULL DEFAULT 0 COMMENT '流程类型', + `source_node_key` varchar(64) NOT NULL DEFAULT '' COMMENT '上一个流程节点唯一标识', + `tenant_id` varchar(16) NOT NULL DEFAULT '' COMMENT '业务方标识', + `status` tinyint(4) NOT NULL DEFAULT '0' COMMENT '状态(1.处理成功 2.处理中 3.处理失败 4.处理已撤销)', + `create_time` datetime NOT NULL DEFAULT '1970-01-01 00:00:00' COMMENT '流程创建时间', + `modify_time` datetime NOT NULL DEFAULT '1970-01-01 00:00:00' COMMENT '流程修改时间', + `archive` tinyint(4) NOT NULL DEFAULT '0' COMMENT '归档状态(0未删除,1删除)', + `tenant` varchar(100) NOT NULL DEFAULT '' COMMENT '租户', + `caller` varchar(100) NOT NULL DEFAULT '' COMMENT '调用方', + PRIMARY KEY (`id`), + UNIQUE KEY `uniq_node_instance_id` (`node_instance_id`), + KEY `idx_fiid_sniid_nk` (`flow_instance_id`,`source_node_instance_id`,`node_key`) +) ENGINE=InnoDB DEFAULT CHARSET=utf8 ROW_FORMAT=COMPACT COMMENT='节点执行实例表'; DROP TABLE IF EXISTS `ei_node_instance_log`; CREATE TABLE IF NOT EXISTS `ei_node_instance_log` ( - `id` bigint(20) unsigned NOT NULL AUTO_INCREMENT COMMENT '自增主键', - `node_instance_id` varchar(128) NOT NULL DEFAULT '' COMMENT '节点执行实例id', - `flow_instance_id` varchar(128) NOT NULL DEFAULT '' COMMENT '流程执行实例id', - `instance_data_id` varchar(128) NOT NULL DEFAULT '' COMMENT '实例数据id', - `node_key` varchar(64) NOT NULL DEFAULT '' COMMENT '节点唯一标识', - `tenant_id` varchar(16) NOT NULL DEFAULT '' COMMENT '业务方标识', - `type` tinyint(4) NOT NULL DEFAULT '0' COMMENT '操作类型(1.系统执行 2.任务提交 3.任务撤销)', - `status` tinyint(4) NOT NULL DEFAULT '0' COMMENT '状态(1.处理成功 2.处理中 3.处理失败 4.处理已撤销)', - `create_time` datetime NOT NULL DEFAULT '1970-01-01 00:00:00' COMMENT '流程创建时间', - `archive` tinyint(4) NOT NULL DEFAULT '0' COMMENT '归档状态(0未删除,1删除)', - `tenant` varchar(100) NOT NULL DEFAULT '' COMMENT '租户', - `caller` varchar(100) NOT NULL DEFAULT '' COMMENT '调用方', - PRIMARY KEY (`id`) - ) ENGINE=InnoDB DEFAULT CHARSET=utf8 ROW_FORMAT=COMPACT COMMENT='节点执行记录表'; + `id` bigint(20) unsigned NOT NULL AUTO_INCREMENT COMMENT '自增主键', + `node_instance_id` varchar(128) NOT NULL DEFAULT '' COMMENT '节点执行实例id', + `flow_instance_id` varchar(128) NOT NULL DEFAULT '' COMMENT '流程执行实例id', + `instance_data_id` varchar(128) NOT NULL DEFAULT '' COMMENT '实例数据id', + `node_key` varchar(64) NOT NULL DEFAULT '' COMMENT '节点唯一标识', + `tenant_id` varchar(16) NOT NULL DEFAULT '' COMMENT '业务方标识', + `type` tinyint(4) NOT NULL DEFAULT '0' COMMENT '操作类型(1.系统执行 2.任务提交 3.任务撤销)', + `status` tinyint(4) NOT NULL DEFAULT '0' COMMENT '状态(1.处理成功 2.处理中 3.处理失败 4.处理已撤销)', + `create_time` datetime NOT NULL DEFAULT '1970-01-01 00:00:00' COMMENT '流程创建时间', + `archive` tinyint(4) NOT NULL DEFAULT '0' COMMENT '归档状态(0未删除,1删除)', + `tenant` varchar(100) NOT NULL DEFAULT '' COMMENT '租户', + `caller` varchar(100) NOT NULL DEFAULT '' COMMENT '调用方', + PRIMARY KEY (`id`) +) ENGINE=InnoDB DEFAULT CHARSET=utf8 ROW_FORMAT=COMPACT COMMENT='节点执行记录表'; DROP TABLE IF EXISTS `ei_instance_data`; CREATE TABLE IF NOT EXISTS `ei_instance_data` ( - `id` bigint(20) unsigned NOT NULL AUTO_INCREMENT COMMENT '自增主键', - `node_instance_id` varchar(128) NOT NULL DEFAULT '' COMMENT '节点执行实例id', - `flow_instance_id` varchar(128) NOT NULL DEFAULT '' COMMENT '流程执行实例id', - `instance_data_id` varchar(128) NOT NULL DEFAULT '' COMMENT '实例数据id', - `flow_deploy_id` varchar(128) NOT NULL DEFAULT '' COMMENT '流程模型部署id', - `flow_module_id` varchar(128) NOT NULL DEFAULT '' COMMENT '流程模型id', - `node_key` varchar(64) NOT NULL DEFAULT '' COMMENT '节点唯一标识', - `tenant_id` varchar(16) NOT NULL DEFAULT '' COMMENT '业务方标识', - `instance_data` longtext COMMENT '数据列表json', - `type` tinyint(4) NOT NULL DEFAULT '0' COMMENT '操作类型(1.实例初始化 2.系统执行 3.系统主动获取 4.上游更新 5.任务提交 6.任务撤回)', - `create_time` datetime NOT NULL DEFAULT '1970-01-01 00:00:00' COMMENT '流程创建时间', - `archive` tinyint(4) NOT NULL DEFAULT '0' COMMENT '归档状态(0未删除,1删除)', - `tenant` varchar(100) NOT NULL DEFAULT '' COMMENT '租户', - `caller` varchar(100) NOT NULL DEFAULT '' COMMENT '调用方', - PRIMARY KEY (`id`), - UNIQUE KEY `uniq_instance_data_id` (`instance_data_id`), - KEY `idx_flow_instance_id` (`flow_instance_id`) - ) ENGINE=InnoDB DEFAULT CHARSET=utf8 ROW_FORMAT=COMPACT COMMENT='节点执行记录表'; - --- 在ei_instance_data表中,如果需要存储表情符号, MySQL的建表语句需要切换utf8mb4 \ No newline at end of file + `id` bigint(20) unsigned NOT NULL AUTO_INCREMENT COMMENT '自增主键', + `node_instance_id` varchar(128) NOT NULL DEFAULT '' COMMENT '节点执行实例id', + `flow_instance_id` varchar(128) NOT NULL DEFAULT '' COMMENT '流程执行实例id', + `instance_data_id` varchar(128) NOT NULL DEFAULT '' COMMENT '实例数据id', + `flow_deploy_id` varchar(128) NOT NULL DEFAULT '' COMMENT '流程模型部署id', + `flow_module_id` varchar(128) NOT NULL DEFAULT '' COMMENT '流程模型id', + `node_key` varchar(64) NOT NULL DEFAULT '' COMMENT '节点唯一标识', + `tenant_id` varchar(16) NOT NULL DEFAULT '' COMMENT '业务方标识', + `instance_data` longtext COMMENT '数据列表json', + `type` tinyint(4) NOT NULL DEFAULT '0' COMMENT '操作类型(1.实例初始化 2.系统执行 3.系统主动获取 4.上游更新 5.任务提交 6.任务撤回)', + `create_time` datetime NOT NULL DEFAULT '1970-01-01 00:00:00' COMMENT '流程创建时间', + `archive` tinyint(4) NOT NULL DEFAULT '0' COMMENT '归档状态(0未删除,1删除)', + `tenant` varchar(100) NOT NULL DEFAULT '' COMMENT '租户', + `caller` varchar(100) NOT NULL DEFAULT '' COMMENT '调用方', + PRIMARY KEY (`id`), + UNIQUE KEY `uniq_instance_data_id` (`instance_data_id`), + KEY `idx_flow_instance_id` (`flow_instance_id`) +) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_unicode_ci ROW_FORMAT=COMPACT COMMENT='实例数据表'; diff --git a/engine/src/main/java/com/didiglobal/turbo/engine/entity/NodeInstancePO.java b/engine/src/main/java/com/didiglobal/turbo/engine/entity/NodeInstancePO.java index 32574088..a9ee4c3e 100644 --- a/engine/src/main/java/com/didiglobal/turbo/engine/entity/NodeInstancePO.java +++ b/engine/src/main/java/com/didiglobal/turbo/engine/entity/NodeInstancePO.java @@ -1,5 +1,6 @@ package com.didiglobal.turbo.engine.entity; +import com.baomidou.mybatisplus.annotation.TableField; import com.baomidou.mybatisplus.annotation.TableName; import java.util.Date; @@ -15,6 +16,7 @@ public class NodeInstancePO extends CommonPO { private String sourceNodeKey; private Integer status; private Date modifyTime; + @TableField(exist = false) private int nodeType; public String getFlowInstanceId() { diff --git a/engine/src/main/java/com/didiglobal/turbo/engine/validator/ElementValidatorFactory.java b/engine/src/main/java/com/didiglobal/turbo/engine/validator/ElementValidatorFactory.java index 5efde283..9ddb08a7 100644 --- a/engine/src/main/java/com/didiglobal/turbo/engine/validator/ElementValidatorFactory.java +++ b/engine/src/main/java/com/didiglobal/turbo/engine/validator/ElementValidatorFactory.java @@ -58,6 +58,7 @@ public void init() { validatorMap.put(FlowElementType.END_EVENT, endEventValidator); validatorMap.put(FlowElementType.USER_TASK, userTaskValidator); validatorMap.put(FlowElementType.EXCLUSIVE_GATEWAY, exclusiveGatewayValidator); + validatorMap.put(FlowElementType.CALL_ACTIVITY, callActivityValidator); List elementPlugins = pluginManager.getPluginsFor(ElementPlugin.class); elementPlugins.forEach(elementPlugin -> validatorMap.put(elementPlugin.getFlowElementType(), elementPlugin.getElementValidator())); } diff --git a/engine/src/main/resources/turbo.db.create/turbo.mysql.sql b/engine/src/main/resources/turbo.db.create/turbo.mysql.sql index 663265e2..b15f8f63 100644 --- a/engine/src/main/resources/turbo.db.create/turbo.mysql.sql +++ b/engine/src/main/resources/turbo.db.create/turbo.mysql.sql @@ -90,7 +90,7 @@ CREATE TABLE IF NOT EXISTS `ei_node_instance` `instance_data_id` varchar(128) NOT NULL DEFAULT '' COMMENT '实例数据id', `flow_deploy_id` varchar(128) NOT NULL DEFAULT '' COMMENT '流程模型部署id', `node_key` varchar(64) NOT NULL DEFAULT '' COMMENT '节点唯一标识', - `node_type` int NOT NULL COMMENT '流程类型', + `node_type` int NOT NULL DEFAULT 0 COMMENT '流程类型', `source_node_key` varchar(64) NOT NULL DEFAULT '' COMMENT '上一个流程节点唯一标识', `tenant_id` varchar(16) NOT NULL DEFAULT '' COMMENT '业务方标识', `status` tinyint(4) NOT NULL DEFAULT '0' COMMENT '状态(1.处理成功 2.处理中 3.处理失败 4.处理已撤销)', From 337286261031ef2c08bb5e6bbedbe986a22301f8 Mon Sep 17 00:00:00 2001 From: lanzhengjian Date: Thu, 21 Nov 2024 18:44:42 +0800 Subject: [PATCH 3/4] remove TableField annotation on nodeType --- .../java/com/didiglobal/turbo/engine/entity/NodeInstancePO.java | 1 - 1 file changed, 1 deletion(-) diff --git a/engine/src/main/java/com/didiglobal/turbo/engine/entity/NodeInstancePO.java b/engine/src/main/java/com/didiglobal/turbo/engine/entity/NodeInstancePO.java index a9ee4c3e..e4e76b96 100644 --- a/engine/src/main/java/com/didiglobal/turbo/engine/entity/NodeInstancePO.java +++ b/engine/src/main/java/com/didiglobal/turbo/engine/entity/NodeInstancePO.java @@ -16,7 +16,6 @@ public class NodeInstancePO extends CommonPO { private String sourceNodeKey; private Integer status; private Date modifyTime; - @TableField(exist = false) private int nodeType; public String getFlowInstanceId() { From 5de54e1ca67385905ca13670d53e78fb54be82a6 Mon Sep 17 00:00:00 2001 From: lanzhengjian Date: Sun, 24 Nov 2024 19:04:14 +0800 Subject: [PATCH 4/4] add document --- CHANGELOG.md | 4 + docs/Parallel&InclusiveGateway.md | 108 ++++++++++ docs/PluginDevelopGuide.md | 201 ++++++++++++++++++ engine/pom.xml | 5 - .../engine/util/PluginSqlExecutorUtil.java | 38 ++-- 5 files changed, 334 insertions(+), 22 deletions(-) create mode 100644 docs/Parallel&InclusiveGateway.md create mode 100644 docs/PluginDevelopGuide.md diff --git a/CHANGELOG.md b/CHANGELOG.md index 19a4a59b..8137c0ab 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -6,6 +6,10 @@ All notable changes to this project will be documented in this file. ## [1.2.0] - 2024-11-21 Support plugin extension function + - Add `node_type` field to the `ei_node_instance` table in the database to save the node type. + - Add `NodeExecuteResult` inner class to the `RuntimeResult` class, and move the `activeTaskInstance` and `variables` fields to the inner class. + - Add `properties` variable to the `CommonPO` entity class to store extended data. + - Add `ExtendRuntimeContext` class to store extended branch context information. Support parallel gateway and inclusive gateway through plugins diff --git a/docs/Parallel&InclusiveGateway.md b/docs/Parallel&InclusiveGateway.md new file mode 100644 index 00000000..d01cef64 --- /dev/null +++ b/docs/Parallel&InclusiveGateway.md @@ -0,0 +1,108 @@ +## 并行网关&包容网关 +### 1. 概述 +本插件为Turbo提供“并行网关”和“包容网关”的多分支并行处理能力,使开发者可以在工作流中灵活处理分支流程。 + +**🌟🌟🌟注意**:并行网关与包容网关均不支持跨网关的节点回滚操作 +### 2. 功能介绍 +#### 2.1 并行网关 +* 支持在流程节点处创建多个并行任务。 +* 所有分支任务完成后,流程继续向下执行。 +* 应用场景:同时启动多个独立任务,如审批、数据处理。 +#### 2.2 包容网关 +* 支持有选择性地激活部分分支任务。 +* 允许多个分支执行完毕后合并,未执行的分支不影响主流程。 +* 应用场景:根据条件选择性地执行某些任务,如特定条件下的审批链。 +### 3. 插件依赖 +* Turbo 1.2.0+ +### 4. 插件配置(plugin.properties) +* 数据库连接配置 +```properties +# JDBC config +turbo.plugin.jdbc.url=jdbc:mysql://127.0.0.1:3306/t_engine?useUnicode=true&characterEncoding=UTF-8&zeroDateTimeBehavior=convertToNull&transformedBitIsBoolean=true&autoReconnect=true +turbo.plugin.jdbc.username=username +turbo.plugin.jdbc.password=password +turbo.plugin.jdbc.driver=com.mysql.jdbc.Driver +turbo.plugin.jdbc.maximumPoolSize=10 +``` +* 并行网关节点与包容网关节点配置 +```properties +# 自定义设置并行网关与包容网关NodeType。并行网关默认为9,包容网关默认为10。如非覆盖Turbo原有执行器插件,请不要设置为1-8 +turbo.plugin.element_type.ParallelGatewayElementPlugin=9 +turbo.plugin.element_type.InclusiveGatewayElementPlugin=10 +# 并行网关与包容网关的开关配置。默认为true开启 +turbo.plugin.support.ParallelGatewayElementPlugin=true +turbo.plugin.support.InclusiveGatewayElementPlugin=true +``` +### 5. 插件使用 +#### 5.1 分支汇聚策略 +并行网关与包容网关都支持指定分支汇聚策略,目前支持的策略有: +* JoinAll(默认):所有分支任务完成后到达汇聚节点,继续向下执行。 +* AnyOne:任意一个分支任务完成后到达汇聚节点,继续向下执行。 +* Custom:自定义策略,需继承`com.didiglobal.turbo.plugin.executor.BranchMergeCustom`类,重写`joinFirst`、`joinMerge`方法,并在该类上添加`@Primary`注解。 +#### 5.2 数据汇聚策略 +并行网关与包容网关都支持指定分支数据合并策略,目前支持的策略有: +* All(默认): 将所有分支的数据合并到一个Map中,并作为参数传递给下游节点。需要注意的是,如果key相同的情况下,后到达的分支数据会覆盖之前到达的分支数据。 +* None: 不进行数据合并,使用分支fork时的数据作为参数传递给下游节点。 +* Custom: 自定义策略,需继承`com.didiglobal.turbo.plugin.executor.DataMergeCustom`类,重写`merge`方法,并在该类上添加`@Primary`注解。 +#### 5.3 并行网关节点示例 +```java +{ + ParallelGateway parallelGateway = new ParallelGateway(); + // 设置节点key, 节点唯一标识 + parallelGateway.setKey("ParallelGateway_38ad233"); + // 设置节点类型, 默认为9 + parallelGateway.setType(ExtendFlowElementType.PARALLEL_GATEWAY); + + List egIncomings = new ArrayList<>(); + egIncomings.add("SequenceFlow_2gugjee"); + parallelGateway.setIncoming(egIncomings); + + // 设置多个分支出口 + List egOutgoings = new ArrayList<>(); + egOutgoings.add("SequenceFlow_12rbl6u"); + egOutgoings.add("SequenceFlow_3ih7eta"); + parallelGateway.setOutgoing(egOutgoings); + + Map properties = new HashMap<>(); + Map forkJoinMatch = new HashMap<>(); + // 记录分支Fork节点 + forkJoinMatch.put(com.didiglobal.turbo.plugin.common.Constants.ELEMENT_PROPERTIES.FORK, "ParallelGateway_38ad233"); + // 记录分支Join节点 + forkJoinMatch.put(com.didiglobal.turbo.plugin.common.Constants.ELEMENT_PROPERTIES.JOIN, "ParallelGateway_10lo44j"); + properties.put(com.didiglobal.turbo.plugin.common.Constants.ELEMENT_PROPERTIES.FORK_JOIN_MATCH, JSONArray.toJSON(forkJoinMatch)); + parallelGateway.setProperties(properties); +} +``` +#### 5.4 包容网关节点示例 +```java +{ + InclusiveGateway inclusiveGateway = new InclusiveGateway(); + // 设置节点key, 节点唯一标识 + inclusiveGateway.setKey("InclusiveGateway_3a1nn9f"); + // 设置节点类型, 默认为10 + inclusiveGateway.setType(ExtendFlowElementType.INCLUSIVE_GATEWAY); + + // 多个分支入口 + List egIncomings = new ArrayList<>(); + egIncomings.add("SequenceFlow_1h65e8t"); + egIncomings.add("SequenceFlow_25kdv36"); + inclusiveGateway.setIncoming(egIncomings); + + List egOutgoings = new ArrayList<>(); + egOutgoings.add("SequenceFlow_3jkd63g"); + inclusiveGateway.setOutgoing(egOutgoings); + + Map properties = new HashMap<>(); + Map forkJoinMatch = new HashMap<>(); + // 记录分支Fork节点 + forkJoinMatch.put(com.didiglobal.turbo.plugin.common.Constants.ELEMENT_PROPERTIES.FORK, "InclusiveGateway_1djgrgp"); + // 记录分支Join节点 + forkJoinMatch.put(com.didiglobal.turbo.plugin.common.Constants.ELEMENT_PROPERTIES.JOIN, "InclusiveGateway_3a1nn9f"); + properties.put(com.didiglobal.turbo.plugin.common.Constants.ELEMENT_PROPERTIES.FORK_JOIN_MATCH, JSONArray.toJSON(forkJoinMatch)); + // 设置分支汇聚策略(在汇聚节点设置) + properties.put(com.didiglobal.turbo.plugin.common.Constants.ELEMENT_PROPERTIES.BRANCH_MERGE, MergeStrategy.BRANCH_MERGE.ANY_ONE); + // 设置分支数据合并策略(在汇聚节点设置) + properties.put(com.didiglobal.turbo.plugin.common.Constants.ELEMENT_PROPERTIES.DATA_MERGE, MergeStrategy.DATA_MERGE.NONE); + inclusiveGateway.setProperties(properties); +} +``` \ No newline at end of file diff --git a/docs/PluginDevelopGuide.md b/docs/PluginDevelopGuide.md new file mode 100644 index 00000000..9b786530 --- /dev/null +++ b/docs/PluginDevelopGuide.md @@ -0,0 +1,201 @@ +## Turbo插件开发指南 + +### 1. **概述** +Turbo支持插件扩展功能,开发者可以通过编写插件实现自定义逻辑,而无需修改主框架代码。插件使用 SPI 机制进行加载,支持在运行时动态发现和加载。 + +插件目前支持应用场景包括: +- 增加新的元素节点处理能力 +- 使用自定义的ID生成器 +- 使用自定义的表达式计算器 +--- +### 2. **插件结构和要求** +#### 插件目录结构 +``` +src/ +└── main/ + └── resources/ + ├── plugin.properties + └── META-INF/ + └── services/ + ├── com.didiglobal.turbo.engine.plugin.ElementPlugin + ├── com.didiglobal.turbo.engine.plugin.ExpressionCalculatorPlugin + └── com.didiglobal.turbo.engine.plugin.IdGeneratorPlugin +``` +#### 根据插件类型需要实现的接口 +**插件顶层接口**: +```java +public interface Plugin { + // turbo插件开关配置格式建议统一为turbo.plugin.support.${pluginName} + String PLUGIN_SUPPORT_PREFIX = "turbo.plugin.support."; + // turbo插件初始化文件配置格式建议统一为turbo.plugin.init_sql.${pluginName} + String PLUGIN_INIT_SQL_FILE_PREFIX = "turbo.plugin.init_sql."; + /** + * 插件名称,唯一标识 + */ + String getName(); + /** + * 插件开关 + */ + Boolean support(); + /** + * 插件初始化 + */ + Boolean init(); +} +``` +- **ElementPlugin**:实现该接口,扩展新的元素节点处理能力。 +```java +public interface ElementPlugin extends Plugin{ + String ELEMENT_TYPE_PREFIX = "turbo.plugin.element_type."; + ElementExecutor getElementExecutor(); + ElementValidator getElementValidator(); + Integer getFlowElementType(); +} +``` +- **ExpressionCalculatorPlugin**:实现该接口,使用自定义的表达式计算器。 +```java +public interface ExpressionCalculatorPlugin extends Plugin{ + ExpressionCalculator getExpressionCalculator(); +} +``` +- **IdGeneratorPlugin**:实现该接口,使用自定义的ID生成器。 +```java +public interface IdGeneratorPlugin extends Plugin{ + IdGenerator getIdGenerator(); +} +``` +--- +### 3. **开发流程** +- 创建项目(可以用 Maven/Gradle 或直接在现有项目中新增模块)。 +- 实现插件接口或继承插件基类。 +- 编写配置文件,声明插件初始化等信息。 +- 测试插件功能。 + +#### 步骤 1:创建插件项目 +使用 Maven 构建插件项目: +```shell +mvn archetype:generate -DgroupId=com.example.plugin -DartifactId=MyPlugin +``` + +#### 步骤 2:实现插件功能 +示例: +```java +public class MyPlugin implements IdGeneratorPlugin { + @Override + public String getName() { + return "MyPluginName"; + } + @Override + public Boolean support() { + return true; + } + @Override + public Boolean init() { + System.out.println("MyPlugin initialized"); + return true; + } + @Override + public IdGenerator getIdGenerator() { + return new MyDefinedIdGenerator(); + } +} +``` + +#### 步骤 3:添加配置文件,指定加载插件类 +在 `src/main/resources/plugin.properties` 中定义插件必要信息,如初始化脚本文件路径等: +``` +turbo.plugin.init_sql.ParallelGatewayElementPlugin=sql/parallelGateway.sql +``` +在 `src/main/resources/META-INF/services/` 文件夹下创建扩展插件类型接口对应全路径路名文件,并指定插件实现类: + +创建 `src/main/resources/META-INF/services/com.didiglobal.turbo.engine.plugin.ElementPlugin` 文件,并写入: +``` +com.didiglobal.turbo.plugin.ParallelGatewayElementPlugin +``` +#### 步骤 4:测试插件功能 +- 初始化测试: + - 检查插件的初始化逻辑是否正确执行。 + - 验证插件是否能正确加载配置(如 plugin.properties 或其他配置文件)。 +- 功能点测试: + - 调用插件的主要功能方法,验证输出是否符合预期。 + - 如果插件涉及外部接口或服务,检查是否能正常连接并获取数据。 +--- +### 4. **插件加载机制** +#### 插件发现与加载 +主应用会在初始化时通过 SPI 机制自动发现并加载插件。确保以下条件满足: +- `src/main/resources/META-INF/services/`文件夹下存在对应插件类型的全路径类名文件。 +- 全路径类名文件中指定了插件实现类的全路径 +#### 插件启动过程 +1. 通过 `ServiceLoader` 加载所有插件。 +2. 调用插件的 `getName` 方法,检查是否存在插件名称冲突。 +3. 调用插件的 `support` 方法,判断是否需要使用该插件。 + 1. 如果为元素节点插件,会调用 `getFlowElementType` 方法,判断该类型元素节点是否存在冲突。 +4. 调用插件的 `init` 方法,进行初始化操作。 +--- +### 5. **插件DAO扩展** +Turbo为维持DAO层的简洁,未提供直接在DAO层的扩展能力。为解决部分插件需要在原有DAO层进行扩展,Turbo提供通过Mybatis拦截器的方式进行扩展。 +#### 步骤 1:实现`CustomOperationHandler`接口 +示例: +```java +public class ParallelNodeInstanceHandler implements CustomOperationHandler { + private static final Logger LOGGER = LoggerFactory.getLogger(ParallelNodeInstanceHandler.class); + @Override + public void handle(SqlCommandType commandType, MappedStatement mappedStatement, Object parameterObject, Object originalResult, SqlSessionFactory sqlSessionFactory) { + SqlSession sqlSession = sqlSessionFactory.openSession(); + try { + ParallelNodeInstanceMapper mapper = sqlSession.getMapper(ParallelNodeInstanceMapper.class); + switch (commandType) { + case INSERT: + handleInsert(parameterObject, mapper); + break; + case UPDATE: + handleUpdate(parameterObject, mapper); + break; + case DELETE: + handleDelete(parameterObject, mapper); + break; + case SELECT: + handleSelect(originalResult, mapper); + break; + default: + LOGGER.warn("Unhandled command type: {}", commandType); + break; + } + } catch (Exception e) { + LOGGER.error("Exception occurred during handling. CommandType={} | ParameterObject={} | OriginalResult={}", + commandType, parameterObject, originalResult, e); + } finally { + sqlSession.close(); + } + } +} +``` +#### 步骤 2:注册自定义操作处理器,并指定处理的PO类型 +示例: +```java +@Configuration +@ComponentScan("com.didiglobal.turbo.plugin") +@MapperScan("com.didiglobal.turbo.plugin.dao") +@EnableAutoConfiguration(exclude = {DruidDataSourceAutoConfigure.class}) +public class ParallelPluginConfig { + + @PostConstruct + public void init() { + CustomOperationHandlerRegistry.registerHandler(EntityPOEnum.NODE_INSTANCE, new ParallelNodeInstanceHandler()); + CustomOperationHandlerRegistry.registerHandler(EntityPOEnum.NODE_INSTANCE_LOG, new ParallelNodeInstanceLogHandler()); + } +} +``` +--- +### 6. **示例插件** +[并行网关插件](../parallel-plugin/src/main/java/com/didiglobal/turbo/plugin/ParallelGatewayElementPlugin.java) + +--- +### 7. **插件相关配置** +以下是我们希望插件开发者遵循的一些配置项规范 + +| 配置项 | 配置名称 | 示例 | 配置说明 | +|----------------------------------|------------|----------------------------------------------------------------------------|--------------------------------------| +|turbo.plugin.support.${pluginName}| 插件开关配置 | turbo.plugin.support.ParallelGatewayElementPlugin=false | 用于控制support方法的返回值,默认返回true | +|turbo.plugin.init_sql.${pluginName}| 数据库初始化脚本路径 | turbo.plugin.init_sql.ParallelGatewayElementPlugin=sql/parallelGateway.sql | 用于指定初始化脚本位置,这个脚本应该是幂等的 | +|turbo.plugin.element_type.${pluginName}| 元素节点类型 | turbo.plugin.element_type.ParallelGatewayElementPlugin=9 | 支持插件使用方自己去指定元素节点类型,避免多个插件使用相同的元素类型导致冲突 | \ No newline at end of file diff --git a/engine/pom.xml b/engine/pom.xml index 6d42d172..8ff73a84 100644 --- a/engine/pom.xml +++ b/engine/pom.xml @@ -83,11 +83,6 @@ com.google.guava guava - - com.zaxxer - HikariCP - 3.4.5 - \ No newline at end of file diff --git a/engine/src/main/java/com/didiglobal/turbo/engine/util/PluginSqlExecutorUtil.java b/engine/src/main/java/com/didiglobal/turbo/engine/util/PluginSqlExecutorUtil.java index 9e2212e1..4d3176c9 100644 --- a/engine/src/main/java/com/didiglobal/turbo/engine/util/PluginSqlExecutorUtil.java +++ b/engine/src/main/java/com/didiglobal/turbo/engine/util/PluginSqlExecutorUtil.java @@ -1,7 +1,6 @@ package com.didiglobal.turbo.engine.util; -import com.zaxxer.hikari.HikariConfig; -import com.zaxxer.hikari.HikariDataSource; +import com.alibaba.druid.pool.DruidDataSource; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.util.StringUtils; @@ -18,7 +17,7 @@ public class PluginSqlExecutorUtil { private static final Logger LOGGER = LoggerFactory.getLogger(PluginSqlExecutorUtil.class); - private static HikariDataSource dataSource; + private static DruidDataSource dataSource; private static final String JDBC_URL = "turbo.plugin.jdbc.url"; private static final String USERNAME = "turbo.plugin.jdbc.username"; private static final String PASSWORD = "turbo.plugin.jdbc.password"; @@ -36,14 +35,19 @@ public class PluginSqlExecutorUtil { dataSource = null; } else { properties.load(PluginSqlExecutorUtil.class.getClassLoader().getResourceAsStream("plugin.properties")); - HikariConfig config = new HikariConfig(); - config.setJdbcUrl(properties.getProperty(JDBC_URL)); - config.setUsername(properties.getProperty(USERNAME)); - config.setPassword(properties.getProperty(PASSWORD)); - config.setDriverClassName(properties.getProperty(DRIVER_CLASS_NAME)); - config.setMaximumPoolSize(Integer.parseInt(properties.getProperty(MAX_POOL_SIZE, "10"))); - if (validateConfig(config)) { - dataSource = new HikariDataSource(config); + dataSource = new DruidDataSource(); + dataSource.setUrl(properties.getProperty(JDBC_URL)); + dataSource.setUsername(properties.getProperty(USERNAME)); + dataSource.setPassword(properties.getProperty(PASSWORD)); + dataSource.setDriverClassName(properties.getProperty(DRIVER_CLASS_NAME)); + // 配置 Druid 数据源的特性 + dataSource.setInitialSize(1); + dataSource.setMinIdle(1); + dataSource.setMaxActive(Integer.parseInt(properties.getProperty(MAX_POOL_SIZE, "10"))); + dataSource.setMaxWait(60000); + // 验证配置 + if (!validateConfig(dataSource)) { + dataSource = null; } } } catch (IOException e) { @@ -51,22 +55,22 @@ public class PluginSqlExecutorUtil { } } - private static boolean validateConfig(HikariConfig config) { - if (config == null) { + private static boolean validateConfig(DruidDataSource dataSource) { + if (dataSource == null) { return false; } - if (StringUtils.isEmpty(config.getJdbcUrl())) { + if (StringUtils.isEmpty(dataSource.getUrl())) { LOGGER.error("Plugin JDBC URL is empty"); return false; } - if (StringUtils.isEmpty(config.getUsername())) { + if (StringUtils.isEmpty(dataSource.getUsername())) { LOGGER.error("Plugin JDBC username is empty"); return false; } - if (StringUtils.isEmpty(config.getPassword())) { + if (StringUtils.isEmpty(dataSource.getPassword())) { LOGGER.warn("Plugin JDBC password is empty"); } - if (StringUtils.isEmpty(config.getDriverClassName())) { + if (StringUtils.isEmpty(dataSource.getDriverClassName())) { LOGGER.error("Plugin JDBC driver class name is empty"); return false; }