diff --git a/src/main/java/io/orkes/conductor/client/TaskClient.java b/src/main/java/io/orkes/conductor/client/TaskClient.java index caea457e..825e3fd5 100644 --- a/src/main/java/io/orkes/conductor/client/TaskClient.java +++ b/src/main/java/io/orkes/conductor/client/TaskClient.java @@ -13,5 +13,28 @@ package io.orkes.conductor.client; +import com.netflix.conductor.common.metadata.tasks.TaskResult; +import com.netflix.conductor.common.run.Workflow; -public class TaskClient extends com.netflix.conductor.client.http.TaskClient {} +public abstract class TaskClient extends com.netflix.conductor.client.http.TaskClient { + + /** + * Update the task status and output based given workflow id and task reference name + * @param workflowId Workflow Id + * @param taskReferenceName Reference name of the task to be updated + * @param status Status of the task + * @param output Output for the task + */ + public abstract void updateTask(String workflowId, String taskReferenceName, TaskResult.Status status, Object output); + + /** + * Update the task status and output based given workflow id and task reference name and return back the updated workflow status + * @param workflowId Workflow Id + * @param taskReferenceName Reference name of the task to be updated + * @param status Status of the task + * @param output Output for the task + * @return Status of the workflow after updating the task + */ + public abstract Workflow updateTaskSync(String workflowId, String taskReferenceName, TaskResult.Status status, Object output); + +} diff --git a/src/main/java/io/orkes/conductor/client/http/OrkesTaskClient.java b/src/main/java/io/orkes/conductor/client/http/OrkesTaskClient.java index ba311923..68eeb169 100644 --- a/src/main/java/io/orkes/conductor/client/http/OrkesTaskClient.java +++ b/src/main/java/io/orkes/conductor/client/http/OrkesTaskClient.java @@ -12,22 +12,27 @@ */ package io.orkes.conductor.client.http; +import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Optional; +import com.netflix.conductor.common.config.ObjectMapperProvider; import com.netflix.conductor.common.metadata.tasks.PollData; import com.netflix.conductor.common.metadata.tasks.Task; import com.netflix.conductor.common.metadata.tasks.TaskExecLog; import com.netflix.conductor.common.metadata.tasks.TaskResult; import com.netflix.conductor.common.run.SearchResult; import com.netflix.conductor.common.run.TaskSummary; +import com.netflix.conductor.common.run.Workflow; import io.orkes.conductor.client.ApiClient; import io.orkes.conductor.client.TaskClient; import io.orkes.conductor.client.grpc.GrpcTaskClient; import io.orkes.conductor.client.http.api.TaskResourceApi; +import com.fasterxml.jackson.databind.ObjectMapper; + public class OrkesTaskClient extends TaskClient implements AutoCloseable{ protected ApiClient apiClient; @@ -36,6 +41,8 @@ public class OrkesTaskClient extends TaskClient implements AutoCloseable{ private GrpcTaskClient grpcTaskClient; + private ObjectMapper objectMapper = new ObjectMapperProvider().getObjectMapper(); + public OrkesTaskClient(ApiClient apiClient) { this.apiClient = apiClient; this.taskResourceApi = new TaskResourceApi(apiClient); @@ -93,6 +100,42 @@ public void updateTask(TaskResult taskResult) { } } + + /** + * Update the task status and output based given workflow id and task reference name + * @param workflowId Workflow Id + * @param taskReferenceName Reference name of the task to be updated + * @param status Status of the task + * @param output Output for the task + */ + public void updateTask(String workflowId, String taskReferenceName, TaskResult.Status status, Object output) { + Map outputMap = new HashMap<>(); + try { + outputMap = objectMapper.convertValue(output, Map.class);; + } catch (Exception e) { + outputMap.put("result", output); + } + taskResourceApi.updateTaskByRefName(outputMap, workflowId, taskReferenceName, status.toString()); + } + + /** + * Update the task status and output based given workflow id and task reference name and return back the updated workflow status + * @param workflowId Workflow Id + * @param taskReferenceName Reference name of the task to be updated + * @param status Status of the task + * @param output Output for the task + * @return Status of the workflow after updating the task + */ + public Workflow updateTaskSync(String workflowId, String taskReferenceName, TaskResult.Status status, Object output) { + Map outputMap = new HashMap<>(); + try { + outputMap = objectMapper.convertValue(output, Map.class);; + } catch (Exception e) { + outputMap.put("result", output); + } + return taskResourceApi.updateTaskSync(outputMap, workflowId, taskReferenceName, status.toString()); + } + @Override public Optional evaluateAndUploadLargePayload( Map taskOutputData, String taskType) { diff --git a/src/main/java/io/orkes/conductor/client/http/api/TaskResourceApi.java b/src/main/java/io/orkes/conductor/client/http/api/TaskResourceApi.java index f24c9aa1..d03ffdbf 100644 --- a/src/main/java/io/orkes/conductor/client/http/api/TaskResourceApi.java +++ b/src/main/java/io/orkes/conductor/client/http/api/TaskResourceApi.java @@ -25,6 +25,7 @@ import com.netflix.conductor.common.metadata.tasks.Task; import com.netflix.conductor.common.metadata.tasks.TaskExecLog; import com.netflix.conductor.common.metadata.tasks.TaskResult; +import com.netflix.conductor.common.run.Workflow; import io.orkes.conductor.client.ApiClient; import io.orkes.conductor.client.http.*; @@ -1824,7 +1825,7 @@ private ApiResponse> sizeWithHttpInfo(List taskType * @return Call to execute * @throws ApiException If fail to serialize the request body object */ - public com.squareup.okhttp.Call updateTaskCall( + private com.squareup.okhttp.Call updateTaskCall( TaskResult taskResult, final ProgressResponseBody.ProgressListener progressListener, final ProgressRequestBody.ProgressRequestListener progressRequestListener) @@ -1928,33 +1929,22 @@ private ApiResponse updateTaskWithHttpInfo(TaskResult taskResult) throws return apiClient.execute(call, localVarReturnType); } - /** - * Build call for updateTask1 - * - * @param body (required) - * @param workflowId (required) - * @param taskRefName (required) - * @param status (required) - * @param workerId (optional) - * @param progressListener Progress listener - * @param progressRequestListener Progress request listener - * @return Call to execute - * @throws ApiException If fail to serialize the request body object - */ - public com.squareup.okhttp.Call updateTask1Call( + private com.squareup.okhttp.Call updateTaskByRefNameCall( Map body, String workflowId, String taskRefName, String status, String workerId, - final ProgressResponseBody.ProgressListener progressListener, - final ProgressRequestBody.ProgressRequestListener progressRequestListener) + boolean sync) throws ApiException { Object localVarPostBody = body; - + String path = "/tasks/{workflowId}/{taskRefName}/{status}"; + if(sync) { + path += "/sync"; + } // create path and map variables String localVarPath = - "/tasks/{workflowId}/{taskRefName}/{status}" + path .replaceAll( "\\{" + "workflowId" + "\\}", apiClient.escapeString(workflowId.toString())) @@ -1985,28 +1975,7 @@ public com.squareup.okhttp.Call updateTask1Call( final String localVarContentType = apiClient.selectHeaderContentType(localVarContentTypes); localVarHeaderParams.put("Content-Type", localVarContentType); - if (progressListener != null) { - apiClient - .getHttpClient() - .networkInterceptors() - .add( - new com.squareup.okhttp.Interceptor() { - @Override - public com.squareup.okhttp.Response intercept( - com.squareup.okhttp.Interceptor.Chain chain) - throws IOException { - com.squareup.okhttp.Response originalResponse = - chain.proceed(chain.request()); - return originalResponse - .newBuilder() - .body( - new ProgressResponseBody( - originalResponse.body(), - progressListener)) - .build(); - } - }); - } + String[] localVarAuthNames = new String[] {"api_key"}; return apiClient.buildCall( @@ -2018,7 +1987,7 @@ public com.squareup.okhttp.Response intercept( localVarHeaderParams, localVarFormParams, localVarAuthNames, - progressRequestListener); + null); } private com.squareup.okhttp.Call updateTask1ValidateBeforeCall( @@ -2026,8 +1995,7 @@ private com.squareup.okhttp.Call updateTask1ValidateBeforeCall( String workflowId, String taskRefName, String status, - final ProgressResponseBody.ProgressListener progressListener, - final ProgressRequestBody.ProgressRequestListener progressRequestListener) + boolean sync) throws ApiException { // verify the required parameter 'body' is set if (body == null) { @@ -2051,14 +2019,13 @@ private com.squareup.okhttp.Call updateTask1ValidateBeforeCall( } com.squareup.okhttp.Call call = - updateTask1Call( + updateTaskByRefNameCall( body, workflowId, taskRefName, status, - null, - progressListener, - progressRequestListener); + getIdentity(), + sync); return call; } @@ -2073,10 +2040,42 @@ private com.squareup.okhttp.Call updateTask1ValidateBeforeCall( * @throws ApiException If fail to call the API, e.g. server error or cannot deserialize the * response body */ + @Deprecated public String updateTask1( Map body, String workflowId, String taskRefName, String status) throws ApiException { - ApiResponse resp = updateTask1WithHttpInfo(body, workflowId, taskRefName, status); + Type localVarReturnType = new TypeToken() {}.getType(); + ApiResponse resp = updateTask1WithHttpInfo(body, workflowId, taskRefName, status, false, localVarReturnType); + return resp.getData(); + } + + /** + * + * @param output Task Output + * @param workflowId Workflow Id + * @param taskRefName Reference name of the task to be updated + * @param status Status + * @return Task Id + * @throws ApiException If fail to call the API, e.g. server error or cannot deserialize the response + */ + public String updateTaskByRefName(Map output, String workflowId, String taskRefName, String status) throws ApiException { + Type localVarReturnType = new TypeToken() {}.getType(); + ApiResponse resp = updateTask1WithHttpInfo(output, workflowId, taskRefName, status, false, localVarReturnType); + return resp.getData(); + } + + /** + * + * @param output Task Output + * @param workflowId Workflow Id + * @param taskRefName Reference name of the task to be updated + * @param status Status + * @return Status of the workflow after updating the task + * @throws ApiException If fail to call the API, e.g. server error or cannot deserialize the response + */ + public Workflow updateTaskSync(Map output, String workflowId, String taskRefName, String status) throws ApiException { + Type localVarReturnType = new TypeToken() {}.getType(); + ApiResponse resp = updateTask1WithHttpInfo(output, workflowId, taskRefName, status, true, localVarReturnType); return resp.getData(); } @@ -2091,12 +2090,18 @@ public String updateTask1( * @throws ApiException If fail to call the API, e.g. server error or cannot deserialize the * response body */ - private ApiResponse updateTask1WithHttpInfo( - Map body, String workflowId, String taskRefName, String status) + private ApiResponse updateTask1WithHttpInfo(Map body, String workflowId, String taskRefName, String status, boolean sync, Type returnType) throws ApiException { com.squareup.okhttp.Call call = - updateTask1ValidateBeforeCall(body, workflowId, taskRefName, status, null, null); - Type localVarReturnType = new TypeToken() {}.getType(); + updateTask1ValidateBeforeCall(body, workflowId, taskRefName, status, sync); + return apiClient.execute(call, returnType); + } + + private ApiResponse updateTaskSyncWithHttpInfo(Map body, String workflowId, String taskRefName, String status, boolean sync) + throws ApiException { + com.squareup.okhttp.Call call = + updateTask1ValidateBeforeCall(body, workflowId, taskRefName, status, sync); + Type localVarReturnType = new TypeToken() {}.getType(); return apiClient.execute(call, localVarReturnType); } diff --git a/src/test/java/io/orkes/conductor/client/api/TaskUpdateTests.java b/src/test/java/io/orkes/conductor/client/api/TaskUpdateTests.java new file mode 100644 index 00000000..86c92c98 --- /dev/null +++ b/src/test/java/io/orkes/conductor/client/api/TaskUpdateTests.java @@ -0,0 +1,150 @@ +package io.orkes.conductor.client.api; + +import com.fasterxml.jackson.databind.ObjectMapper; +import com.netflix.conductor.common.config.ObjectMapperProvider; +import com.netflix.conductor.common.metadata.tasks.Task; +import com.netflix.conductor.common.metadata.tasks.TaskResult; +import com.netflix.conductor.common.metadata.workflow.StartWorkflowRequest; +import com.netflix.conductor.common.metadata.workflow.WorkflowDef; +import com.netflix.conductor.common.run.Workflow; +import io.orkes.conductor.client.MetadataClient; +import io.orkes.conductor.client.OrkesClients; +import io.orkes.conductor.client.TaskClient; +import io.orkes.conductor.client.WorkflowClient; +import io.orkes.conductor.client.http.ApiException; +import io.orkes.conductor.client.http.OrkesTaskClient; +import io.orkes.conductor.client.util.ApiUtil; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.Test; +import org.testcontainers.shaded.com.google.common.util.concurrent.Uninterruptibles; + +import java.io.IOException; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.math.BigDecimal; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.concurrent.TimeUnit; +import java.util.stream.Collectors; + +import static org.junit.jupiter.api.Assertions.*; + +public class TaskUpdateTests { + + private static TaskClient taskClient; + + private static WorkflowClient workflowClient; + + private static MetadataClient metadataClient; + + private static String workflowName = ""; + + private static List tasks = null; + + @BeforeAll + public static void setup() throws IOException { + OrkesClients orkesClients = ApiUtil.getOrkesClient(); + taskClient = orkesClients.getTaskClient(); + metadataClient = orkesClients.getMetadataClient(); + workflowClient = orkesClients.getWorkflowClient(); + InputStream is = TaskUpdateTests.class.getResourceAsStream("/sdk_test.json"); + ObjectMapper om = new ObjectMapperProvider().getObjectMapper(); + WorkflowDef workflowDef = om.readValue(new InputStreamReader(is), WorkflowDef.class); + metadataClient.registerWorkflowDef(workflowDef, true); + workflowName = workflowDef.getName(); + tasks = workflowDef.collectTasks().stream().map(task -> task.getTaskReferenceName()).collect(Collectors.toList()); + } + @Test + public void testUpdateByRefName() { + StartWorkflowRequest request = new StartWorkflowRequest(); + request.setName(workflowName); + request.setVersion(1); + request.setInput(new HashMap<>()); + String workflowId = workflowClient.startWorkflow(request); + System.out.println(workflowId); + Workflow workflow = workflowClient.getWorkflow(workflowId, true); + assertNotNull(workflow); + + int maxLoop = 10; + int count = 0; + while (!workflow.getStatus().isTerminal() && count < maxLoop) { + workflow.getTasks().stream().filter(t -> !t.getStatus().isTerminal()).forEach(running -> { + String referenceName = running.getReferenceTaskName(); + System.out.println("Updating " + referenceName); + taskClient.updateTask(workflowId, referenceName, TaskResult.Status.COMPLETED, Map.of("k", "value")); + }); + count++; + Uninterruptibles.sleepUninterruptibly(1, TimeUnit.SECONDS); + workflow = workflowClient.getWorkflow(workflowId, true); + } + assertTrue(count < maxLoop); + workflow = workflowClient.getWorkflow(workflowId, true); + assertEquals(Workflow.WorkflowStatus.COMPLETED, workflow.getStatus()); + } + + @Test + public void testUpdateByRefNameSync() { + StartWorkflowRequest request = new StartWorkflowRequest(); + request.setName(workflowName); + request.setVersion(1); + request.setInput(new HashMap<>()); + String workflowId = workflowClient.startWorkflow(request); + System.out.println(workflowId); + Workflow workflow = workflowClient.getWorkflow(workflowId, true); + assertNotNull(workflow); + + int maxLoop = 10; + int count = 0; + while (!workflow.getStatus().isTerminal() && count < maxLoop) { + List runningTasks = workflow.getTasks().stream() + .filter(task -> !task.getStatus().isTerminal() && task.getTaskType().equals("there_is_no_worker")) + .map(t -> t.getReferenceTaskName()) + .collect(Collectors.toList()); + System.out.println("Running tasks: " + runningTasks); + if(runningTasks.isEmpty()) { + Uninterruptibles.sleepUninterruptibly(1, TimeUnit.SECONDS); + count++; + continue; + } + for (String referenceName : runningTasks) { + System.out.println("Updating " + referenceName); + try { + workflow = taskClient.updateTaskSync(workflowId, referenceName, TaskResult.Status.COMPLETED, new TaskOutput()); + System.out.println("Workflow: " + workflow); + } catch (ApiException apiException) { + //404 == task was updated already and there are no pending tasks + if(apiException.getStatusCode() != 404) { + fail(apiException); + } + } + } + count++; + } + assertTrue(count < maxLoop); + workflow = workflowClient.getWorkflow(workflowId, true); + assertEquals(Workflow.WorkflowStatus.COMPLETED, workflow.getStatus()); + } + + private static class TaskOutput { + private String name = "hello"; + + private BigDecimal value = BigDecimal.TEN; + + public String getName() { + return name; + } + + public void setName(String name) { + this.name = name; + } + + public BigDecimal getValue() { + return value; + } + + public void setValue(BigDecimal value) { + this.value = value; + } + } +} diff --git a/src/test/java/io/orkes/conductor/client/e2e/AbstractMultiUserTests.java b/src/test/java/io/orkes/conductor/client/e2e/AbstractMultiUserTests.java deleted file mode 100644 index 89d5da81..00000000 --- a/src/test/java/io/orkes/conductor/client/e2e/AbstractMultiUserTests.java +++ /dev/null @@ -1,100 +0,0 @@ -/* - * Copyright 2022 Orkes, Inc. - *

- * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

- * http://www.apache.org/licenses/LICENSE-2.0 - *

- * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package io.orkes.conductor.client.e2e; - -import java.util.Map; -import java.util.UUID; - -import org.junit.jupiter.api.AfterAll; -import org.junit.jupiter.api.BeforeAll; - -import com.netflix.conductor.common.config.ObjectMapperProvider; - -import io.orkes.conductor.client.*; -import io.orkes.conductor.client.http.*; -import io.orkes.conductor.client.model.*; -import io.orkes.conductor.client.util.ApiUtil; - -import com.fasterxml.jackson.databind.ObjectMapper; -import com.squareup.okhttp.OkHttpClient; -import com.squareup.okhttp.Request; -import com.squareup.okhttp.Response; -import lombok.SneakyThrows; -import lombok.extern.slf4j.Slf4j; - -import static io.orkes.conductor.client.util.ApiUtil.*; - -@Slf4j -public abstract class AbstractMultiUserTests { - - protected static AuthorizationClient authorizationClient; - protected static String applicationId; - - protected static MetadataClient metadataClient; - - protected static ApiClient apiUser1Client; - - protected static ApiClient apiUser2Client; - - protected static OkHttpClient client = new OkHttpClient(); - - protected static ObjectMapper objectMapper = new ObjectMapperProvider().getObjectMapper(); - - protected static String user1, user2; - - protected static String user1AppId, user2AppId; - - @BeforeAll - public static void setup() { - authorizationClient = ApiUtil.getOrkesClient().getAuthorizationClient(); - apiUser1Client = ApiUtil.getUser1Client(); - apiUser2Client = ApiUtil.getUser2Client(); - apiUser1Client.setReadTimeout(10_000); - apiUser2Client.setReadTimeout(10_000); - - CreateOrUpdateApplicationRequest request = new CreateOrUpdateApplicationRequest(); - request.setName("test-" + UUID.randomUUID().toString()); - ConductorApplication app = authorizationClient.createApplication(request); - applicationId = app.getId(); - - user1 = getUserId(apiUser1Client.getToken()); - user2 = getUserId(apiUser2Client.getToken()); - log.info("user1 {}", user1); - log.info("user2 {}", user2); - - user1AppId = getEnv(USER1_APP_ID); - user2AppId = getEnv(USER2_APP_ID); - - log.info("user1AppId {}", user1AppId); - log.info("user2AppId {}", user2AppId); - - } - @AfterAll - public static void cleanup() { - if(applicationId != null) { - authorizationClient.deleteApplication(applicationId); - } - } - - @SneakyThrows - protected static String getUserId(String token) { - Request request = new Request.Builder().url(ApiUtil.getBasePath() + "/token/userInfo") - .addHeader("X-Authorization", token) - .addHeader("accept", "application/json") - .build(); - Response response = client.newCall(request).execute(); - byte[] data = response.body().bytes(); - Map userInfo = objectMapper.readValue(data, Map.class); - return userInfo.get("id").toString(); - } -} diff --git a/src/test/java/io/orkes/conductor/client/e2e/BackoffTests.java b/src/test/java/io/orkes/conductor/client/e2e/BackoffTests.java deleted file mode 100644 index f1d385ba..00000000 --- a/src/test/java/io/orkes/conductor/client/e2e/BackoffTests.java +++ /dev/null @@ -1,217 +0,0 @@ -/* - * Copyright 2023 Orkes, Inc. - *

- * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

- * http://www.apache.org/licenses/LICENSE-2.0 - *

- * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package io.orkes.conductor.client.e2e; - -import java.util.*; -import java.util.concurrent.TimeUnit; - -import org.junit.jupiter.api.AfterAll; -import org.junit.jupiter.api.BeforeAll; -import org.junit.jupiter.api.Test; - -import com.netflix.conductor.client.http.MetadataClient; -import com.netflix.conductor.client.http.TaskClient; -import com.netflix.conductor.client.http.WorkflowClient; -import com.netflix.conductor.client.worker.Worker; -import com.netflix.conductor.common.config.ObjectMapperProvider; -import com.netflix.conductor.common.metadata.tasks.Task; -import com.netflix.conductor.common.metadata.tasks.TaskDef; -import com.netflix.conductor.common.metadata.tasks.TaskResult; -import com.netflix.conductor.common.metadata.workflow.StartWorkflowRequest; -import com.netflix.conductor.common.run.Workflow; -import com.netflix.conductor.sdk.workflow.def.ConductorWorkflow; -import com.netflix.conductor.sdk.workflow.def.tasks.SimpleTask; - -import io.orkes.conductor.client.AuthorizationClient; -import io.orkes.conductor.client.OrkesClients; -import io.orkes.conductor.client.automator.TaskRunnerConfigurer; -import io.orkes.conductor.client.http.OrkesTaskClient; -import io.orkes.conductor.sdk.examples.ApiUtil; - -import com.fasterxml.jackson.databind.ObjectMapper; -import lombok.SneakyThrows; -import lombok.extern.slf4j.Slf4j; - -import static org.junit.jupiter.api.Assertions.*; -import static org.testcontainers.shaded.org.awaitility.Awaitility.await; - -@Slf4j -public class BackoffTests { - - private static OrkesClients orkesClients; - - private static AuthorizationClient authClient; - - private static WorkflowClient workflowClient; - - private static TaskClient taskClient; - - private static MetadataClient metadataClient; - - private static ObjectMapper objectMapper = new ObjectMapperProvider().getObjectMapper(); - - private static final String WORKFLOW_NAME = "retry_logic_test"; - - - - private static TaskRunnerConfigurer configurer; - - @SneakyThrows - @BeforeAll - public static void beforeAll() { - orkesClients = ApiUtil.getOrkesClient(); - authClient = orkesClients.getAuthorizationClient(); - workflowClient = orkesClients.getWorkflowClient(); - taskClient = orkesClients.getTaskClient(); - metadataClient = orkesClients.getMetadataClient(); - - ConductorWorkflow workflow = new ConductorWorkflow(null); - workflow.setName(WORKFLOW_NAME); - workflow.setVersion(1); - - - List taskDefs = new ArrayList<>(); - int i = 0; - for (TaskDef.RetryLogic value : TaskDef.RetryLogic.values()) { - TaskDef taskDef = new TaskDef(); - taskDef.setName("retry_" + i++); - taskDef.setRetryLogic(value); - taskDef.setBackoffScaleFactor(2); - taskDef.setRetryDelaySeconds(2); - taskDef.setRetryCount(3); - taskDefs.add(taskDef); - - workflow.add(new SimpleTask(taskDef.getName(), taskDef.getName())); - } - - metadataClient.registerTaskDefs(taskDefs); - metadataClient.updateWorkflowDefs(Arrays.asList(workflow.toWorkflowDef())); - startWorkers(taskDefs); - - } - - @AfterAll - public static void cleanup() { - if(configurer != null) { - configurer.shutdown(); - } - } - - @Test - public void testRetryLogic() { - StartWorkflowRequest request = new StartWorkflowRequest(); - request.setName(WORKFLOW_NAME); - request.setVersion(1); - request.setInput(Map.of()); - String id = workflowClient.startWorkflow(request); - log.info("Started Retry logic workflow {} ", id); - - await().pollInterval(3, TimeUnit.SECONDS).atMost(1, TimeUnit.MINUTES).untilAsserted(()->{ - Workflow workflow = workflowClient.getWorkflow(id, true); - assertNotNull(workflow); - log.info("Workflow status {}", workflow.getStatus()); - assertEquals(Workflow.WorkflowStatus.COMPLETED, workflow.getStatus()); - }); - - Workflow workflow = workflowClient.getWorkflow(id, true); - assertNotNull(workflow); - assertEquals(9, workflow.getTasks().size()); - List tasks = workflow.getTasks(); - assertTaskRetryLogic(tasks); - } - - private void assertTaskRetryLogic(List runs) { - for (int i = 1; i < runs.size(); i++) { - Task task = runs.get(i); - TaskDef.RetryLogic retryLogic = task.getTaskDefinition().get().getRetryLogic(); - long delay = task.getTaskDefinition().get().getRetryDelaySeconds() * 1000; - long backoffRate = task.getTaskDefinition().get().getBackoffScaleFactor(); - switch (retryLogic) { - case FIXED: - long diff = task.getStartTime() - task.getScheduledTime(); - long expectedDelay = delay; - //+- 300 millis - assertTrue(diff < (expectedDelay + 300) && diff >= expectedDelay, "delay " + diff + " not within the range of expected " + expectedDelay + ", taskId = " + task.getReferenceTaskName() + ":" + task.getRetryCount()); - break; - case LINEAR_BACKOFF: - diff = task.getStartTime() - task.getScheduledTime(); - expectedDelay = task.getRetryCount() * delay * backoffRate; - //+- 300 millis - assertTrue(diff < (expectedDelay + 300) && diff >= expectedDelay, "delay " + diff + " not within the range of expected " + expectedDelay + ", taskId = " + task.getReferenceTaskName() + ":" + task.getRetryCount()); - break; - case EXPONENTIAL_BACKOFF: - diff = task.getStartTime() - task.getScheduledTime(); - if(task.getRetryCount() == 0) { - expectedDelay = 0; - } else { - expectedDelay = (long) (Math.pow(2, task.getRetryCount() - 1) * (delay)); - } - //+- 300 millis - assertTrue(diff < (expectedDelay + 300) && diff >= expectedDelay, "delay " + diff + " not within the range of expected " + expectedDelay + ", taskId = " + task.getReferenceTaskName() + ":" + task.getRetryCount()); - break; - default: - break; - } - } - } - - - - - private static void startWorkers(List tasks) { - List workers = new ArrayList<>(); - for (TaskDef task : tasks) { - workers.add(new TestWorker(task.getName())); - } - - configurer = new TaskRunnerConfigurer - .Builder((OrkesTaskClient)taskClient, workers) - .withThreadCount(1) - .withTaskPollTimeout(10) - .build(); - configurer.init(); - } - - - - private static class TestWorker implements Worker { - - private String name; - - public TestWorker(String name) { - this.name = name; - } - @Override - public String getTaskDefName() { - return name; - } - - @Override - public TaskResult execute(Task task) { - TaskResult result = new TaskResult(task); - result.getOutputData().put("number", 42); - if(task.getRetryCount() < 2) { - result.setStatus(TaskResult.Status.FAILED); - } else { - result.setStatus(TaskResult.Status.COMPLETED); - } - - return result; - } - - @Override - public int getPollingInterval() { - return 1; - } - } -} diff --git a/src/test/java/io/orkes/conductor/client/e2e/DoWhileWithDomainTests.java b/src/test/java/io/orkes/conductor/client/e2e/DoWhileWithDomainTests.java deleted file mode 100644 index 931b1ae9..00000000 --- a/src/test/java/io/orkes/conductor/client/e2e/DoWhileWithDomainTests.java +++ /dev/null @@ -1,151 +0,0 @@ -/* - * Copyright 2022 Orkes, Inc. - *

- * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

- * http://www.apache.org/licenses/LICENSE-2.0 - *

- * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package io.orkes.conductor.client.e2e; - -import java.util.Arrays; -import java.util.concurrent.TimeUnit; - -import org.apache.commons.lang3.RandomStringUtils; -import org.junit.jupiter.api.Test; - -import com.netflix.conductor.common.metadata.tasks.Task; -import com.netflix.conductor.common.metadata.tasks.TaskDef; -import com.netflix.conductor.common.metadata.tasks.TaskResult; -import com.netflix.conductor.common.metadata.tasks.TaskType; -import com.netflix.conductor.common.metadata.workflow.StartWorkflowRequest; -import com.netflix.conductor.common.metadata.workflow.SubWorkflowParams; -import com.netflix.conductor.common.metadata.workflow.WorkflowDef; -import com.netflix.conductor.common.metadata.workflow.WorkflowTask; -import com.netflix.conductor.common.run.Workflow; - -import io.orkes.conductor.client.ApiClient; -import io.orkes.conductor.client.MetadataClient; -import io.orkes.conductor.client.TaskClient; -import io.orkes.conductor.client.WorkflowClient; -import io.orkes.conductor.client.http.OrkesMetadataClient; -import io.orkes.conductor.client.http.OrkesTaskClient; -import io.orkes.conductor.client.http.OrkesWorkflowClient; -import io.orkes.conductor.client.model.WorkflowStatus; -import io.orkes.conductor.client.util.ApiUtil; - -import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.testcontainers.shaded.org.awaitility.Awaitility.await; - -public class DoWhileWithDomainTests { - - @Test - public void testSubWorkflow0version() { - ApiClient apiClient = ApiUtil.getApiClientWithCredentials(); - WorkflowClient workflowClient = new OrkesWorkflowClient(apiClient); - MetadataClient metadataClient = new OrkesMetadataClient(apiClient); - TaskClient taskClient = new OrkesTaskClient(apiClient); - - String parentWorkflowName = RandomStringUtils.randomAlphanumeric(5).toUpperCase(); - - // Register workflow - registerInlineWorkflowDef(parentWorkflowName, metadataClient); - - // Trigger workflow - StartWorkflowRequest startWorkflowRequest = new StartWorkflowRequest(); - startWorkflowRequest.setName(parentWorkflowName); - startWorkflowRequest.setVersion(1); - - String workflowId = workflowClient.startWorkflow(startWorkflowRequest); - - // User1 should be able to complete task/workflow - String taskId = workflowClient.getWorkflow(workflowId, true).getTasks().get(0).getTaskId(); - TaskResult taskResult = new TaskResult(); - taskResult.setWorkflowInstanceId(workflowId); - taskResult.setStatus(TaskResult.Status.COMPLETED); - taskResult.setTaskId(taskId); - taskClient.updateTask(taskResult); - - // Workflow will be still running state - await().atMost(1, TimeUnit.SECONDS).untilAsserted(() -> { - Workflow workflow1 = workflowClient.getWorkflow(workflowId, true); - assertEquals(workflow1.getStatus().name(), WorkflowStatus.StatusEnum.RUNNING.name()); - assertEquals(workflow1.getTasks().get(0).getStatus(), Task.Status.COMPLETED); - assertEquals(workflow1.getTasks().get(1).getStatus(), Task.Status.IN_PROGRESS); - }); - - Workflow workflow = workflowClient.getWorkflow(workflowId, true); - String subWorkflowId = workflow.getTasks().get(1).getSubWorkflowId(); - taskResult = new TaskResult(); - taskResult.setWorkflowInstanceId(subWorkflowId); - taskResult.setStatus(TaskResult.Status.COMPLETED); - taskResult.setTaskId(workflow.getTasks().get(1).getTaskId()); - taskClient.updateTask(taskResult); - - // Wait for workflow to get completed - await().atMost(3, TimeUnit.SECONDS).untilAsserted(() -> { - Workflow workflow1 = workflowClient.getWorkflow(workflowId, true); - assertEquals(workflow1.getTasks().get(0).getStatus(), Task.Status.COMPLETED); - assertEquals(workflow1.getTasks().get(1).getStatus(), Task.Status.COMPLETED); - }); - - // Cleanup - metadataClient.unregisterWorkflowDef(parentWorkflowName, 1); - } - - private void registerInlineWorkflowDef(String workflowName, MetadataClient metadataClient1) { - TaskDef taskDef = new TaskDef("dt1"); - taskDef.setOwnerEmail("test@orkes.io"); - - TaskDef taskDef2 = new TaskDef("dt2"); - taskDef2.setOwnerEmail("test@orkes.io"); - - WorkflowTask workflowTask = new WorkflowTask(); - workflowTask.setTaskReferenceName("dt2"); - workflowTask.setName("dt2"); - workflowTask.setTaskDefinition(taskDef2); - workflowTask.setWorkflowTaskType(TaskType.SIMPLE); - - WorkflowTask inline = new WorkflowTask(); - inline.setTaskReferenceName("dt1"); - inline.setName("dt1"); - inline.setTaskDefinition(taskDef); - inline.setWorkflowTaskType(TaskType.SIMPLE); - - WorkflowTask inlineSubworkflow = new WorkflowTask(); - inlineSubworkflow.setTaskReferenceName("dynamicFork"); - inlineSubworkflow.setName("dynamicFork"); - inlineSubworkflow.setTaskDefinition(taskDef); - inlineSubworkflow.setWorkflowTaskType(TaskType.SUB_WORKFLOW); - - WorkflowDef inlineWorkflowDef = new WorkflowDef(); - inlineWorkflowDef.setName("inline_test_sub_workflow"); - inlineWorkflowDef.setVersion(1); - inlineWorkflowDef.setTimeoutSeconds(600); - inlineWorkflowDef.setTimeoutPolicy(WorkflowDef.TimeoutPolicy.TIME_OUT_WF); - inlineWorkflowDef.setTasks(Arrays.asList(inline)); - SubWorkflowParams subWorkflowParams = new SubWorkflowParams(); - subWorkflowParams.setName("inline_test_sub_workflow"); - subWorkflowParams.setVersion(1); - subWorkflowParams.setWorkflowDef(inlineWorkflowDef); - inlineSubworkflow.setSubWorkflowParam(subWorkflowParams); - - - WorkflowDef workflowDef = new WorkflowDef(); - workflowDef.setName(workflowName); - workflowDef.setOwnerEmail("test@orkes.io"); - workflowDef.setTimeoutSeconds(600); - workflowDef.setTimeoutPolicy(WorkflowDef.TimeoutPolicy.TIME_OUT_WF); - workflowDef.setInputParameters(Arrays.asList("value", "inlineValue")); - workflowDef.setDescription("Workflow to test inline sub_workflow definition"); - workflowDef.setTasks(Arrays.asList( workflowTask, inlineSubworkflow)); - try { - metadataClient1.registerWorkflowDef(workflowDef); - metadataClient1.registerTaskDefs(Arrays.asList(taskDef, taskDef2)); - } catch (Exception e){} - } -} diff --git a/src/test/java/io/orkes/conductor/client/e2e/DynamicForkOptionalTests.java b/src/test/java/io/orkes/conductor/client/e2e/DynamicForkOptionalTests.java deleted file mode 100644 index e3a604cb..00000000 --- a/src/test/java/io/orkes/conductor/client/e2e/DynamicForkOptionalTests.java +++ /dev/null @@ -1,301 +0,0 @@ -/* - * Copyright 2022 Orkes, Inc. - *

- * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

- * http://www.apache.org/licenses/LICENSE-2.0 - *

- * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package io.orkes.conductor.client.e2e; - -import java.util.Arrays; -import java.util.HashMap; -import java.util.Map; -import java.util.concurrent.TimeUnit; - -import org.junit.jupiter.api.Test; - -import com.netflix.conductor.common.metadata.tasks.Task; -import com.netflix.conductor.common.metadata.tasks.TaskDef; -import com.netflix.conductor.common.metadata.tasks.TaskResult; -import com.netflix.conductor.common.metadata.tasks.TaskType; -import com.netflix.conductor.common.metadata.workflow.StartWorkflowRequest; -import com.netflix.conductor.common.metadata.workflow.WorkflowDef; -import com.netflix.conductor.common.metadata.workflow.WorkflowTask; -import com.netflix.conductor.common.run.Workflow; - -import io.orkes.conductor.client.*; -import io.orkes.conductor.client.http.OrkesMetadataClient; -import io.orkes.conductor.client.http.OrkesTaskClient; -import io.orkes.conductor.client.http.OrkesWorkflowClient; -import io.orkes.conductor.client.model.*; -import io.orkes.conductor.client.util.ApiUtil; - -import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.junit.jupiter.api.Assertions.assertTrue; -import static org.testcontainers.shaded.org.awaitility.Awaitility.await; - -public class DynamicForkOptionalTests { - - @Test - public void testTaskDynamicForkOptional() { - - ApiClient adminClient = ApiUtil.getApiClientWithCredentials(); - WorkflowClient workflowAdminClient = new OrkesWorkflowClient(adminClient); - MetadataClient metadataAdminClient =new OrkesMetadataClient(adminClient); - TaskClient taskClient = new OrkesTaskClient(adminClient); - String workflowName1 = "DynamicFanInOutTest"; - - // Register workflow - registerWorkflowDef(workflowName1, metadataAdminClient); - - // Trigger workflow - StartWorkflowRequest startWorkflowRequest = new StartWorkflowRequest(); - startWorkflowRequest.setName(workflowName1); - startWorkflowRequest.setVersion(1); - - String workflowId = workflowAdminClient.startWorkflow(startWorkflowRequest); - Workflow workflow = workflowAdminClient.getWorkflow(workflowId, true); - - workflow = workflowAdminClient.getWorkflow(workflowId, true); - TaskResult taskResult = new TaskResult(); - taskResult.setWorkflowInstanceId(workflowId); - taskResult.setTaskId(workflow.getTasks().get(0).getTaskId()); - taskResult.setStatus(TaskResult.Status.COMPLETED); - - WorkflowTask workflowTask2 = new WorkflowTask(); - workflowTask2.setName("integration_task_2"); - workflowTask2.setTaskReferenceName("xdt1"); - - WorkflowTask workflowTask3 = new WorkflowTask(); - workflowTask3.setName("integration_task_3"); - workflowTask3.setTaskReferenceName("xdt2"); - workflowTask3.setOptional(true); - - Map output = new HashMap<>(); - Map> input = new HashMap<>(); - input.put("xdt1", Map.of("k1", "v1")); - input.put("xdt2", Map.of("k2", "v2")); - output.put("dynamicTasks", Arrays.asList(workflowTask2, workflowTask3)); - output.put("dynamicTasksInput", input); - taskResult.setOutputData(output); - taskClient.updateTask(taskResult); - - await().atMost(1, TimeUnit.SECONDS).untilAsserted(() -> { - Workflow workflow1 = workflowAdminClient.getWorkflow(workflowId, true); - assertEquals(workflow1.getStatus().name(), WorkflowStatus.StatusEnum.RUNNING.name()); - assertTrue(workflow1.getTasks().size() == 5); - assertEquals(workflow1.getTasks().get(2).getStatus().name(), Task.Status.SCHEDULED.name()); - assertEquals(workflow1.getTasks().get(3).getStatus().name(), Task.Status.SCHEDULED.name()); - assertEquals(workflow1.getTasks().get(4).getStatus().name(), Task.Status.IN_PROGRESS.name()); - }); - - workflow = workflowAdminClient.getWorkflow(workflowId, true); - taskResult = new TaskResult(); - taskResult.setWorkflowInstanceId(workflowId); - taskResult.setTaskId(workflow.getTasks().get(3).getTaskId()); - taskResult.setStatus(TaskResult.Status.FAILED); - taskClient.updateTask(taskResult); - - //Since the tasks are marked as optional. The workflow should be in running state. - await().atMost(2, TimeUnit.SECONDS).untilAsserted(() -> { - Workflow workflow1 = workflowAdminClient.getWorkflow(workflowId, true); - assertTrue(workflow1.getTasks().size() == 6); - assertEquals(workflow1.getStatus().name(), WorkflowStatus.StatusEnum.RUNNING.name()); - assertEquals(workflow1.getTasks().get(2).getStatus().name(), Task.Status.SCHEDULED.name()); - assertEquals(workflow1.getTasks().get(3).getStatus().name(), Task.Status.FAILED.name()); - assertEquals(workflow1.getTasks().get(4).getStatus().name(), Task.Status.IN_PROGRESS.name()); - assertEquals(workflow1.getTasks().get(5).getStatus().name(), Task.Status.SCHEDULED.name()); - - }); - - workflow = workflowAdminClient.getWorkflow(workflowId, true); - taskResult = new TaskResult(); - taskResult.setWorkflowInstanceId(workflowId); - taskResult.setTaskId(workflow.getTasks().get(2).getTaskId()); - taskResult.setStatus(TaskResult.Status.COMPLETED); - taskClient.updateTask(taskResult); - - workflow = workflowAdminClient.getWorkflow(workflowId, true); - taskResult = new TaskResult(); - taskResult.setWorkflowInstanceId(workflowId); - taskResult.setTaskId(workflow.getTasks().get(5).getTaskId()); - taskResult.setStatus(TaskResult.Status.COMPLETED); - taskClient.updateTask(taskResult); - - // Workflow should be completed - await().atMost(100, TimeUnit.SECONDS).untilAsserted(() -> { - Workflow workflow1 = workflowAdminClient.getWorkflow(workflowId, true); - assertTrue(workflow1.getTasks().size() == 6); - assertEquals(workflow1.getStatus().name(), WorkflowStatus.StatusEnum.COMPLETED.name()); - assertEquals(workflow1.getTasks().get(2).getStatus().name(), Task.Status.COMPLETED.name()); - assertEquals(workflow1.getTasks().get(3).getStatus().name(), Task.Status.FAILED.name()); - assertEquals(workflow1.getTasks().get(4).getStatus().name(), Task.Status.COMPLETED.name()); - assertEquals(workflow1.getTasks().get(4).getStatus().name(), Task.Status.COMPLETED.name()); - }); - - metadataAdminClient.unregisterWorkflowDef(workflowName1, 1); - } - - @Test - public void testTaskDynamicForkRetryCount() { - - ApiClient adminClient = ApiUtil.getApiClientWithCredentials(); - WorkflowClient workflowAdminClient = new OrkesWorkflowClient(adminClient); - MetadataClient metadataAdminClient =new OrkesMetadataClient(adminClient); - TaskClient taskClient = new OrkesTaskClient(adminClient); - String workflowName1 = "DynamicFanInOutTest"; - - // Register workflow - registerWorkflowDef(workflowName1, metadataAdminClient); - - // Trigger workflow - StartWorkflowRequest startWorkflowRequest = new StartWorkflowRequest(); - startWorkflowRequest.setName(workflowName1); - startWorkflowRequest.setVersion(1); - - String workflowId = workflowAdminClient.startWorkflow(startWorkflowRequest); - Workflow workflow = workflowAdminClient.getWorkflow(workflowId, true); - - workflow = workflowAdminClient.getWorkflow(workflowId, true); - TaskResult taskResult = new TaskResult(); - taskResult.setWorkflowInstanceId(workflowId); - taskResult.setTaskId(workflow.getTasks().get(0).getTaskId()); - taskResult.setStatus(TaskResult.Status.COMPLETED); - - WorkflowTask workflowTask2 = new WorkflowTask(); - workflowTask2.setName("integration_task_2"); - workflowTask2.setTaskReferenceName("xdt1"); - workflowTask2.setOptional(true); - workflowTask2.setSink("kitchen_sink"); - - WorkflowTask workflowTask3 = new WorkflowTask(); - workflowTask3.setName("integration_task_3"); - workflowTask3.setTaskReferenceName("xdt2"); - workflowTask3.setRetryCount(2); - - Map output = new HashMap<>(); - Map> input = new HashMap<>(); - input.put("xdt1", Map.of("k1", "v1")); - input.put("xdt2", Map.of("k2", "v2")); - output.put("dynamicTasks", Arrays.asList(workflowTask2, workflowTask3)); - output.put("dynamicTasksInput", input); - taskResult.setOutputData(output); - taskClient.updateTask(taskResult); - - await().atMost(1, TimeUnit.SECONDS).untilAsserted(() -> { - Workflow workflow1 = workflowAdminClient.getWorkflow(workflowId, true); - assertEquals(workflow1.getStatus().name(), WorkflowStatus.StatusEnum.RUNNING.name()); - assertTrue(workflow1.getTasks().size() == 5); - assertEquals(workflow1.getTasks().get(2).getStatus().name(), Task.Status.SCHEDULED.name()); - assertEquals(workflow1.getTasks().get(2).getWorkflowTask().getSink(), "kitchen_sink"); - assertEquals(workflow1.getTasks().get(3).getStatus().name(), Task.Status.SCHEDULED.name()); - assertEquals(workflow1.getTasks().get(4).getStatus().name(), Task.Status.IN_PROGRESS.name()); - }); - - workflow = workflowAdminClient.getWorkflow(workflowId, true); - taskResult = new TaskResult(); - taskResult.setWorkflowInstanceId(workflowId); - taskResult.setTaskId(workflow.getTasks().get(3).getTaskId()); - taskResult.setStatus(TaskResult.Status.FAILED); - taskClient.updateTask(taskResult); - - //Since the retry count is 2 task will be retried. - await().atMost(3, TimeUnit.SECONDS).untilAsserted(() -> { - Workflow workflow1 = workflowAdminClient.getWorkflow(workflowId, true); - assertEquals(workflow1.getStatus().name(), WorkflowStatus.StatusEnum.RUNNING.name()); - assertTrue(workflow1.getTasks().size() == 6); - assertEquals(workflow1.getTasks().get(2).getStatus().name(), Task.Status.SCHEDULED.name()); - assertEquals(workflow1.getTasks().get(3).getStatus().name(), Task.Status.FAILED.name()); - assertEquals(workflow1.getTasks().get(4).getStatus().name(), Task.Status.IN_PROGRESS.name()); - assertEquals(workflow1.getTasks().get(5).getStatus().name(), Task.Status.SCHEDULED.name()); - }); - - workflow = workflowAdminClient.getWorkflow(workflowId, true); - taskResult = new TaskResult(); - taskResult.setWorkflowInstanceId(workflowId); - taskResult.setTaskId(workflow.getTasks().get(2).getTaskId()); - taskResult.setStatus(TaskResult.Status.COMPLETED); - taskClient.updateTask(taskResult); - - workflow = workflowAdminClient.getWorkflow(workflowId, true); - taskResult = new TaskResult(); - taskResult.setWorkflowInstanceId(workflowId); - taskResult.setTaskId(workflow.getTasks().get(5).getTaskId()); - taskResult.setStatus(TaskResult.Status.COMPLETED); - taskClient.updateTask(taskResult); - - // Workflow should be completed - await().atMost(1, TimeUnit.SECONDS).untilAsserted(() -> { - Workflow workflow1 = workflowAdminClient.getWorkflow(workflowId, true); - assertEquals(workflow1.getStatus().name(), WorkflowStatus.StatusEnum.COMPLETED.name()); - assertTrue(workflow1.getTasks().size() >= 6); - assertEquals(workflow1.getTasks().get(2).getStatus().name(), Task.Status.COMPLETED.name()); - assertEquals(workflow1.getTasks().get(3).getStatus().name(), Task.Status.FAILED.name()); - assertEquals(workflow1.getTasks().get(4).getStatus().name(), Task.Status.COMPLETED.name()); - assertEquals(workflow1.getTasks().get(5).getStatus().name(), Task.Status.COMPLETED.name()); - }); - - metadataAdminClient.unregisterWorkflowDef(workflowName1, 1); - - } - - private void registerWorkflowDef(String workflowName, MetadataClient metadataClient1) { - TaskDef taskDef = new TaskDef("dt1"); - taskDef.setOwnerEmail("test@orkes.io"); - - TaskDef taskDef4 = new TaskDef("integration_task_2"); - taskDef4.setOwnerEmail("test@orkes.io"); - - TaskDef taskDef3 = new TaskDef("integration_task_3"); - taskDef3.setOwnerEmail("test@orkes.io"); - - TaskDef taskDef2 = new TaskDef("dt2"); - taskDef2.setOwnerEmail("test@orkes.io"); - - WorkflowTask workflowTask = new WorkflowTask(); - workflowTask.setTaskReferenceName("dt2"); - workflowTask.setName("dt2"); - workflowTask.setTaskDefinition(taskDef2); - workflowTask.setWorkflowTaskType(TaskType.SIMPLE); - - WorkflowTask inline = new WorkflowTask(); - inline.setTaskReferenceName("dt1"); - inline.setName("dt1"); - inline.setTaskDefinition(taskDef); - inline.setWorkflowTaskType(TaskType.SIMPLE); - - WorkflowTask join = new WorkflowTask(); - join.setTaskReferenceName("join_dynamic"); - join.setName("join_dynamic"); - join.setWorkflowTaskType(TaskType.JOIN); - - WorkflowTask dynamicFork = new WorkflowTask(); - dynamicFork.setTaskReferenceName("dynamicFork"); - dynamicFork.setName("dynamicFork"); - dynamicFork.setTaskDefinition(taskDef); - dynamicFork.setWorkflowTaskType(TaskType.FORK_JOIN_DYNAMIC); - dynamicFork.setInputParameters(Map.of("dynamicTasks", "${dt1.output.dynamicTasks}", - "dynamicTasksInput", "${dt1.output.dynamicTasksInput}")); - dynamicFork.setDynamicForkTasksParam("dynamicTasks"); - dynamicFork.setDynamicForkTasksInputParamName("dynamicTasksInput"); - - WorkflowDef workflowDef = new WorkflowDef(); - workflowDef.setName(workflowName); - workflowDef.setOwnerEmail("test@orkes.io"); - workflowDef.setTimeoutSeconds(600); - workflowDef.setTimeoutPolicy(WorkflowDef.TimeoutPolicy.TIME_OUT_WF); - workflowDef.setInputParameters(Arrays.asList("value", "inlineValue")); - workflowDef.setDescription("Workflow to test retry"); - workflowDef.setTasks(Arrays.asList( inline, dynamicFork, join)); - try { - metadataClient1.registerWorkflowDef(workflowDef); - metadataClient1.registerTaskDefs(Arrays.asList(taskDef, taskDef2, taskDef3, taskDef4)); - }catch (Exception e){} - } - -} diff --git a/src/test/java/io/orkes/conductor/client/e2e/GraaljsTests.java b/src/test/java/io/orkes/conductor/client/e2e/GraaljsTests.java deleted file mode 100644 index 04e70b28..00000000 --- a/src/test/java/io/orkes/conductor/client/e2e/GraaljsTests.java +++ /dev/null @@ -1,102 +0,0 @@ -/* - * Copyright 2022 Orkes, Inc. - *

- * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

- * http://www.apache.org/licenses/LICENSE-2.0 - *

- * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package io.orkes.conductor.client.e2e; - -import java.util.ArrayList; -import java.util.List; -import java.util.Map; -import java.util.concurrent.ExecutionException; -import java.util.concurrent.TimeUnit; -import java.util.concurrent.TimeoutException; - -import org.apache.commons.lang3.RandomStringUtils; -import org.junit.After; -import org.junit.jupiter.api.BeforeAll; -import org.junit.jupiter.api.Test; - -import com.netflix.conductor.common.metadata.workflow.StartWorkflowRequest; -import com.netflix.conductor.common.metadata.workflow.WorkflowDef; -import com.netflix.conductor.common.run.Workflow; - -import io.orkes.conductor.client.ApiClient; -import io.orkes.conductor.client.http.OrkesMetadataClient; -import io.orkes.conductor.client.http.OrkesTaskClient; -import io.orkes.conductor.client.http.OrkesWorkflowClient; -import io.orkes.conductor.client.model.WorkflowStatus; -import io.orkes.conductor.client.util.ApiUtil; - -import static io.orkes.conductor.client.util.RegistrationUtil.registerWorkflowDef; -import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.testcontainers.shaded.org.awaitility.Awaitility.await; - -public class GraaljsTests { - static ApiClient apiClient; - static io.orkes.conductor.client.WorkflowClient workflowClient; - static io.orkes.conductor.client.TaskClient taskClient; - static io.orkes.conductor.client.MetadataClient metadataClient; - - List workflowNames = new ArrayList<>(); - List taskNames = new ArrayList<>(); - - @BeforeAll - public static void init() { - apiClient = ApiUtil.getApiClientWithCredentials(); - workflowClient = new OrkesWorkflowClient(apiClient); - metadataClient =new OrkesMetadataClient(apiClient); - taskClient = new OrkesTaskClient(apiClient); - } - - @Test - public void testInfiniteExecution() throws ExecutionException, InterruptedException, TimeoutException { - String workflowName = RandomStringUtils.randomAlphanumeric(5).toUpperCase(); - String taskName1 = RandomStringUtils.randomAlphanumeric(5).toUpperCase(); - String taskName2 = RandomStringUtils.randomAlphanumeric(5).toUpperCase(); - // Register workflow - registerWorkflowDef(workflowName, taskName1, taskName2, metadataClient); - WorkflowDef workflowDef = metadataClient.getWorkflowDef(workflowName, 1); - workflowDef.getTasks().get(0).setInputParameters(Map.of("evaluatorType", "graaljs", "expression", "function e() { while(true){} }; e();")); - metadataClient.registerWorkflowDef(workflowDef, true); - workflowNames.add(workflowName); - taskNames.add(taskName1); - taskNames.add(taskName2); - - // Trigger two workflows - StartWorkflowRequest startWorkflowRequest = new StartWorkflowRequest(); - startWorkflowRequest.setName(workflowName); - startWorkflowRequest.setVersion(1); - - String workflowId = workflowClient.startWorkflow(startWorkflowRequest); - - // Wait for workflow to get failed since inline task will failed - await().atMost(1, TimeUnit.SECONDS).untilAsserted(() -> { - Workflow workflow = workflowClient.getWorkflow(workflowId, true); - assertEquals(workflow.getStatus().name(), WorkflowStatus.StatusEnum.FAILED.name()); - }); - } - - @After - public void cleanUp() { - for (String workflowName : workflowNames) { - try { - metadataClient.unregisterWorkflowDef(workflowName, 1); - } catch (Exception e) {} - } - for (String taskName : taskNames) { - try { - metadataClient.unregisterTaskDef(taskName); - } catch (Exception e) {} - } - } - - -} diff --git a/src/test/java/io/orkes/conductor/client/e2e/GroupPermissionTests.java b/src/test/java/io/orkes/conductor/client/e2e/GroupPermissionTests.java deleted file mode 100644 index 330cc775..00000000 --- a/src/test/java/io/orkes/conductor/client/e2e/GroupPermissionTests.java +++ /dev/null @@ -1,184 +0,0 @@ -/* - * Copyright 2022 Orkes, Inc. - *

- * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

- * http://www.apache.org/licenses/LICENSE-2.0 - *

- * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package io.orkes.conductor.client.e2e; - -import java.util.Arrays; -import java.util.List; -import java.util.Map; -import java.util.concurrent.TimeUnit; - -import org.apache.commons.lang3.RandomStringUtils; -import org.junit.Assert; -import org.junit.jupiter.api.Test; - -import com.netflix.conductor.common.metadata.tasks.TaskDef; -import com.netflix.conductor.common.metadata.tasks.TaskResult; -import com.netflix.conductor.common.metadata.tasks.TaskType; -import com.netflix.conductor.common.metadata.workflow.StartWorkflowRequest; -import com.netflix.conductor.common.metadata.workflow.WorkflowDef; -import com.netflix.conductor.common.metadata.workflow.WorkflowTask; -import com.netflix.conductor.common.run.Workflow; - -import io.orkes.conductor.client.*; -import io.orkes.conductor.client.http.*; -import io.orkes.conductor.client.model.*; - -import lombok.extern.slf4j.Slf4j; - -import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.testcontainers.shaded.org.awaitility.Awaitility.await; - -@Slf4j -public class GroupPermissionTests extends AbstractMultiUserTests { - - private static MetadataClient metadataClient; - - @Test - public void testGroupRelatedPermissions() throws Exception { - WorkflowClient user1WorkflowClient = new OrkesWorkflowClient(apiUser1Client); - MetadataClient user1MetadataClient = new OrkesMetadataClient(apiUser1Client); - TaskClient user1TaskClient = new OrkesTaskClient(apiUser1Client); - - // Create user2 client and check access should not be there workflow1 - WorkflowClient user2WorkflowClient = new OrkesWorkflowClient(apiUser2Client); - MetadataClient user2MetadataClient = new OrkesMetadataClient(apiUser2Client); - TaskClient user2TaskClient = new OrkesTaskClient(apiUser2Client); - - String taskName1 = RandomStringUtils.randomAlphanumeric(5).toUpperCase(); - String workflowName1 = RandomStringUtils.randomAlphanumeric(5).toUpperCase(); - String tagKey = RandomStringUtils.randomAlphanumeric(5).toUpperCase(); - String tagValue = RandomStringUtils.randomAlphanumeric(5).toUpperCase(); - - TagObject tagObject = new TagObject().type(TagObject.TypeEnum.METADATA).key(tagKey).value(tagValue); - - // Register workflow - WorkflowDef workflowDef = generateWorkflowDef(workflowName1, taskName1); - user1MetadataClient.registerWorkflowDef(workflowDef); - user1MetadataClient.registerTaskDefs(Arrays.asList(new TaskDef(taskName1))); - - // Tag workflow and task - user1MetadataClient.addWorkflowTag(tagObject, workflowName1); - user1MetadataClient.addTaskTag(tagObject, taskName1); - - // Trigger workflow - StartWorkflowRequest startWorkflowRequest = new StartWorkflowRequest(); - startWorkflowRequest.setName(workflowName1); - startWorkflowRequest.setVersion(1); - - String workflowId = user1WorkflowClient.startWorkflow(startWorkflowRequest); - String finalWorkflowId = workflowId; - // User 2 should not have access to workflow or task. - Assert.assertThrows(ApiException.class, () -> user2TaskClient.pollTask(taskName1, "integration_test", null)); - Assert.assertThrows(ApiException.class, () -> user2WorkflowClient.getWorkflow(finalWorkflowId, false)); - - // User1 should be able to complete task/workflow - TaskResult taskResult = new TaskResult(); - taskResult.setWorkflowInstanceId(workflowId); - taskResult.setStatus(TaskResult.Status.COMPLETED); - taskResult.setTaskId(user1WorkflowClient.getWorkflow(workflowId, true).getTasks().get(0).getTaskId()); - user1TaskClient.updateTask(taskResult); - - // Wait for workflow to get completed - await().atMost(1, TimeUnit.SECONDS).untilAsserted(() -> { - Workflow workflow1 = user1WorkflowClient.getWorkflow(finalWorkflowId, false); - assertEquals(workflow1.getStatus().name(), WorkflowStatus.StatusEnum.COMPLETED.name()); - }); - - //Trigger workflow again. And give permissions so that user2 can execute workflow/task - workflowId = user1WorkflowClient.startWorkflow(startWorkflowRequest); - - user1MetadataClient.addWorkflowTag(tagObject, workflowName1); - - String groupName = "worker-test-group"; - // Create/Update group and add these two users in the group - Group group = authorizationClient.upsertGroup(getUpsertGroupRequest(), groupName); - authorizationClient.addUserToGroup(groupName, "conductoruser1@gmail.com"); - authorizationClient.addUserToGroup(groupName, "conductoruser2@gmail.com"); - - // Give permissions to tag in the group - AuthorizationRequest authorizationRequest = new AuthorizationRequest(); - authorizationRequest.setSubject(new SubjectRef().id(groupName).type(SubjectRef.TypeEnum.GROUP)); - authorizationRequest.setAccess(List.of(AuthorizationRequest.AccessEnum.READ, AuthorizationRequest.AccessEnum.EXECUTE, - AuthorizationRequest.AccessEnum.UPDATE, - AuthorizationRequest.AccessEnum.DELETE)); - authorizationRequest.setTarget(new TargetRef().id(tagKey + ":" + tagValue ).type(TargetRef.TypeEnum.TAG)); - authorizationClient.grantPermissions(authorizationRequest); - - //Grant permission to execute the task in user2 application. - authorizationRequest.setSubject(new SubjectRef().id("app:" + user2AppId).type(SubjectRef.TypeEnum.USER)); - authorizationClient.grantPermissions(authorizationRequest); - - String finalWorkflowId1 = workflowId; - await().atMost(5, TimeUnit.SECONDS).pollInterval(1, TimeUnit.SECONDS).untilAsserted(() -> { - try { - String id = user2WorkflowClient.getWorkflow(finalWorkflowId1, true).getTasks().get(0).getTaskId(); - TaskResult taskResult1 = new TaskResult(); - taskResult1.setWorkflowInstanceId(id); - taskResult1.setStatus(TaskResult.Status.COMPLETED); - taskResult1.setTaskId(user2WorkflowClient.getWorkflow(id, true).getTasks().get(0).getTaskId()); - user2TaskClient.updateTask(taskResult1); - }catch(Exception e){} - }); - - - - int retryAttemptsLimit = 5; - for (int retry = 0; retry < retryAttemptsLimit; retry += 1) { - try{ - // Wait for workflow to get completed - await().atMost(5, TimeUnit.SECONDS).untilAsserted(() -> { - Workflow workflow1 = user2WorkflowClient.getWorkflow(finalWorkflowId, false); - assertEquals(workflow1.getStatus().name(), WorkflowStatus.StatusEnum.COMPLETED.name()); - }); - break; - } catch (Exception e) { - Thread.sleep((retry + 5) * 1000); - } - } - - - user1MetadataClient.unregisterWorkflowDef(workflowName1, 1); - user1MetadataClient.unregisterTaskDef(taskName1); - authorizationClient.deleteGroup(groupName); - authorizationClient.removePermissions(authorizationRequest); - authorizationRequest.setSubject(new SubjectRef().id(groupName).type(SubjectRef.TypeEnum.GROUP)); - authorizationClient.removePermissions(authorizationRequest); - } - - UpsertGroupRequest getUpsertGroupRequest() { - return new UpsertGroupRequest() - .description("Group used for SDK testing") - .roles(List.of(UpsertGroupRequest.RolesEnum.USER)); - } - - private WorkflowDef generateWorkflowDef(String workflowName, String taskName) { - TaskDef taskDef = new TaskDef(taskName); - taskDef.setTimeoutSeconds(10); - WorkflowTask workflowTask = new WorkflowTask(); - workflowTask.setTaskReferenceName(taskName); - workflowTask.setName(taskName); - workflowTask.setTaskDefinition(taskDef); - workflowTask.setWorkflowTaskType(TaskType.SIMPLE); - workflowTask.setInputParameters(Map.of("value", "${workflow.input.value}", "order", "123")); - WorkflowDef workflowDef = new WorkflowDef(); - workflowDef.setName(workflowName); - workflowDef.setInputParameters(Arrays.asList("value", "inlineValue")); - workflowDef.setDescription("Workflow to monitor order state"); - workflowDef.setTasks(Arrays.asList(workflowTask)); - return workflowDef; - } - - - - -} diff --git a/src/test/java/io/orkes/conductor/client/e2e/JavaSDKTests.java b/src/test/java/io/orkes/conductor/client/e2e/JavaSDKTests.java deleted file mode 100644 index e5ff12cd..00000000 --- a/src/test/java/io/orkes/conductor/client/e2e/JavaSDKTests.java +++ /dev/null @@ -1,96 +0,0 @@ -/* - * Copyright 2022 Orkes, Inc. - *

- * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

- * http://www.apache.org/licenses/LICENSE-2.0 - *

- * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package io.orkes.conductor.client.e2e; - -import java.math.BigDecimal; -import java.util.HashMap; -import java.util.Map; -import java.util.concurrent.CompletableFuture; -import java.util.concurrent.ExecutionException; -import java.util.concurrent.TimeUnit; -import java.util.concurrent.TimeoutException; - -import org.junit.After; -import org.junit.jupiter.api.Test; - -import com.netflix.conductor.client.http.MetadataClient; -import com.netflix.conductor.client.http.TaskClient; -import com.netflix.conductor.client.http.WorkflowClient; -import com.netflix.conductor.common.run.Workflow; -import com.netflix.conductor.sdk.workflow.def.ConductorWorkflow; -import com.netflix.conductor.sdk.workflow.def.tasks.SimpleTask; -import com.netflix.conductor.sdk.workflow.def.tasks.Switch; -import com.netflix.conductor.sdk.workflow.executor.WorkflowExecutor; -import com.netflix.conductor.sdk.workflow.task.WorkerTask; - -import io.orkes.conductor.client.ApiClient; -import io.orkes.conductor.client.OrkesClients; -import io.orkes.conductor.client.util.ApiUtil; - -import static org.junit.jupiter.api.Assertions.*; - -public class JavaSDKTests { - - private WorkflowExecutor executor; - - @Test - public void hello() { - ConductorWorkflow> workflow = new ConductorWorkflow<>(executor); - workflow.setName("sdk_integration_test"); - workflow.setVersion(1); - workflow.setVariables(new HashMap<>()); - - } - @Test - public void testSDK() throws ExecutionException, InterruptedException, TimeoutException { - ApiClient apiClient = ApiUtil.getApiClientWithCredentials(); - TaskClient taskClient = new OrkesClients(apiClient).getTaskClient(); - WorkflowClient workflowClient = new OrkesClients(apiClient).getWorkflowClient(); - MetadataClient metadataClient = new OrkesClients(apiClient).getMetadataClient(); - executor = new WorkflowExecutor(taskClient, workflowClient, metadataClient, 1000); - executor.initWorkers("io.orkes.conductor.client.e2e"); - - ConductorWorkflow> workflow = new ConductorWorkflow<>(executor); - workflow.setName("sdk_integration_test"); - workflow.setVersion(1); - workflow.setVariables(new HashMap<>()); - workflow.add(new SimpleTask("task1", "task1").input("name", "orkes")); - - Switch decision = new Switch("decide_ref", "${workflow.input.caseValue}"); - decision.switchCase("caseA", new SimpleTask("task1", "task1"), new SimpleTask("task1", "task11")); - decision.switchCase("caseB", new SimpleTask("task2", "task2")); - decision.defaultCase(new SimpleTask("task1", "default_task")); - - - CompletableFuture future = workflow.executeDynamic(new HashMap<>()); - assertNotNull(future); - Workflow run = future.get(20, TimeUnit.SECONDS); - assertNotNull(run); - assertEquals(Workflow.WorkflowStatus.COMPLETED, run.getStatus()); - assertEquals(1, run.getTasks().size()); - assertEquals("Hello, orkes", run.getTasks().get(0).getOutputData().get("greetings")); - } - - @After - public void cleanup() { - if(executor != null) { - executor.shutdown(); - } - } - - @WorkerTask("sum_numbers") - public BigDecimal sum(BigDecimal num1, BigDecimal num2) { - return num1.add(num2); - } - -} diff --git a/src/test/java/io/orkes/conductor/client/e2e/SDKWorkers.java b/src/test/java/io/orkes/conductor/client/e2e/SDKWorkers.java deleted file mode 100644 index b61756b2..00000000 --- a/src/test/java/io/orkes/conductor/client/e2e/SDKWorkers.java +++ /dev/null @@ -1,25 +0,0 @@ -/* - * Copyright 2022 Orkes, Inc. - *

- * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

- * http://www.apache.org/licenses/LICENSE-2.0 - *

- * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package io.orkes.conductor.client.e2e; - -import com.netflix.conductor.sdk.workflow.task.InputParam; -import com.netflix.conductor.sdk.workflow.task.OutputParam; -import com.netflix.conductor.sdk.workflow.task.WorkerTask; - -public class SDKWorkers { - - @WorkerTask("task1") - public @OutputParam("greetings") String task1(@InputParam("name") String name) { - return "Hello, " + name; - } -} diff --git a/src/test/java/io/orkes/conductor/client/e2e/SecretsPermissionTests.java b/src/test/java/io/orkes/conductor/client/e2e/SecretsPermissionTests.java deleted file mode 100644 index 048a1f15..00000000 --- a/src/test/java/io/orkes/conductor/client/e2e/SecretsPermissionTests.java +++ /dev/null @@ -1,81 +0,0 @@ -/* - * Copyright 2022 Orkes, Inc. - *

- * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

- * http://www.apache.org/licenses/LICENSE-2.0 - *

- * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package io.orkes.conductor.client.e2e; - -import java.util.Arrays; -import java.util.List; -import java.util.UUID; - -import org.apache.commons.lang3.RandomStringUtils; -import org.junit.jupiter.api.Assertions; -import org.junit.jupiter.api.Test; - -import io.orkes.conductor.client.*; -import io.orkes.conductor.client.http.*; -import io.orkes.conductor.client.model.*; - -import lombok.extern.slf4j.Slf4j; - -import static org.junit.Assert.*; - - -@Slf4j -public class SecretsPermissionTests extends AbstractMultiUserTests { - - @Test - public void testSecretsForUser2() { - SecretClient user1SecretClient = new OrkesSecretClient(apiUser1Client); - SecretClient user2SecretClient = new OrkesSecretClient(apiUser2Client); - - String secretKey = "secret_key_" + UUID.randomUUID(); - String secretValue = "secret_value"; - - user1SecretClient.putSecret(secretValue, secretKey); - - String tagKey = RandomStringUtils.randomAlphanumeric(5).toUpperCase(); - String tagValue = RandomStringUtils.randomAlphanumeric(5).toUpperCase(); - TagObject tagObject = new TagObject().type(TagObject.TypeEnum.METADATA).key(tagKey).value(tagValue); - - // Tag secret - user1SecretClient.putTagForSecret(Arrays.asList(tagObject), secretKey); - - - // Give permissions to tag in the group - AuthorizationRequest authorizationRequest = new AuthorizationRequest(); - authorizationRequest.setSubject(new SubjectRef().id("app:" + user2AppId).type(SubjectRef.TypeEnum.USER)); - authorizationRequest.setAccess(List.of(AuthorizationRequest.AccessEnum.READ)); - authorizationRequest.setTarget(new TargetRef().id(tagKey + ":" + tagValue).type(TargetRef.TypeEnum.TAG)); - authorizationClient.grantPermissions(authorizationRequest); - - // Secret is accessible for user2 - Assertions.assertNotNull(user2SecretClient.getSecret(secretKey)); - - authorizationClient.removePermissions(authorizationRequest); - authorizationRequest.setSubject(new SubjectRef().id(user2AppId).type(SubjectRef.TypeEnum.USER)); - authorizationClient.removePermissions(authorizationRequest); - - user1SecretClient.deleteSecret(secretKey); - } - - @Test - public void testGrantTaskExecutePermissions() { - - } - - UpsertGroupRequest getUpsertGroupRequest() { - return new UpsertGroupRequest() - .description("Group used for SDK testing") - .roles(List.of(UpsertGroupRequest.RolesEnum.USER)); - } - -} diff --git a/src/test/java/io/orkes/conductor/client/e2e/SubWorkflowInlineTests.java b/src/test/java/io/orkes/conductor/client/e2e/SubWorkflowInlineTests.java deleted file mode 100644 index 40c869db..00000000 --- a/src/test/java/io/orkes/conductor/client/e2e/SubWorkflowInlineTests.java +++ /dev/null @@ -1,151 +0,0 @@ -/* - * Copyright 2022 Orkes, Inc. - *

- * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

- * http://www.apache.org/licenses/LICENSE-2.0 - *

- * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package io.orkes.conductor.client.e2e; - -import java.util.Arrays; -import java.util.concurrent.TimeUnit; - -import org.apache.commons.lang3.RandomStringUtils; -import org.junit.jupiter.api.Test; - -import com.netflix.conductor.common.metadata.tasks.Task; -import com.netflix.conductor.common.metadata.tasks.TaskDef; -import com.netflix.conductor.common.metadata.tasks.TaskResult; -import com.netflix.conductor.common.metadata.tasks.TaskType; -import com.netflix.conductor.common.metadata.workflow.StartWorkflowRequest; -import com.netflix.conductor.common.metadata.workflow.SubWorkflowParams; -import com.netflix.conductor.common.metadata.workflow.WorkflowDef; -import com.netflix.conductor.common.metadata.workflow.WorkflowTask; -import com.netflix.conductor.common.run.Workflow; - -import io.orkes.conductor.client.ApiClient; -import io.orkes.conductor.client.MetadataClient; -import io.orkes.conductor.client.TaskClient; -import io.orkes.conductor.client.WorkflowClient; -import io.orkes.conductor.client.http.OrkesMetadataClient; -import io.orkes.conductor.client.http.OrkesTaskClient; -import io.orkes.conductor.client.http.OrkesWorkflowClient; -import io.orkes.conductor.client.model.WorkflowStatus; -import io.orkes.conductor.client.util.ApiUtil; - -import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.testcontainers.shaded.org.awaitility.Awaitility.await; - -public class SubWorkflowInlineTests { - - @Test - public void testSubWorkflow0version() { - ApiClient apiClient = ApiUtil.getApiClientWithCredentials(); - WorkflowClient workflowClient = new OrkesWorkflowClient(apiClient); - MetadataClient metadataClient = new OrkesMetadataClient(apiClient); - TaskClient taskClient = new OrkesTaskClient(apiClient); - - String parentWorkflowName = RandomStringUtils.randomAlphanumeric(5).toUpperCase(); - - // Register workflow - registerInlineWorkflowDef(parentWorkflowName, metadataClient); - - // Trigger workflow - StartWorkflowRequest startWorkflowRequest = new StartWorkflowRequest(); - startWorkflowRequest.setName(parentWorkflowName); - startWorkflowRequest.setVersion(1); - - String workflowId = workflowClient.startWorkflow(startWorkflowRequest); - - // User1 should be able to complete task/workflow - String taskId = workflowClient.getWorkflow(workflowId, true).getTasks().get(0).getTaskId(); - TaskResult taskResult = new TaskResult(); - taskResult.setWorkflowInstanceId(workflowId); - taskResult.setStatus(TaskResult.Status.COMPLETED); - taskResult.setTaskId(taskId); - taskClient.updateTask(taskResult); - - // Workflow will be still running state - await().atMost(1, TimeUnit.SECONDS).untilAsserted(() -> { - Workflow workflow1 = workflowClient.getWorkflow(workflowId, true); - assertEquals(workflow1.getStatus().name(), WorkflowStatus.StatusEnum.RUNNING.name()); - assertEquals(workflow1.getTasks().get(0).getStatus(), Task.Status.COMPLETED); - assertEquals(workflow1.getTasks().get(1).getStatus(), Task.Status.IN_PROGRESS); - }); - - Workflow workflow = workflowClient.getWorkflow(workflowId, true); - String subWorkflowId = workflow.getTasks().get(1).getSubWorkflowId(); - taskResult = new TaskResult(); - taskResult.setWorkflowInstanceId(subWorkflowId); - taskResult.setStatus(TaskResult.Status.COMPLETED); - taskResult.setTaskId(workflow.getTasks().get(1).getTaskId()); - taskClient.updateTask(taskResult); - - // Wait for workflow to get completed - await().atMost(3, TimeUnit.SECONDS).untilAsserted(() -> { - Workflow workflow1 = workflowClient.getWorkflow(workflowId, true); - assertEquals(workflow1.getTasks().get(0).getStatus(), Task.Status.COMPLETED); - assertEquals(workflow1.getTasks().get(1).getStatus(), Task.Status.COMPLETED); - }); - - // Cleanup - metadataClient.unregisterWorkflowDef(parentWorkflowName, 1); - } - - private void registerInlineWorkflowDef(String workflowName, MetadataClient metadataClient1) { - TaskDef taskDef = new TaskDef("dt1"); - taskDef.setOwnerEmail("test@orkes.io"); - - TaskDef taskDef2 = new TaskDef("dt2"); - taskDef2.setOwnerEmail("test@orkes.io"); - - WorkflowTask workflowTask = new WorkflowTask(); - workflowTask.setTaskReferenceName("dt2"); - workflowTask.setName("dt2"); - workflowTask.setTaskDefinition(taskDef2); - workflowTask.setWorkflowTaskType(TaskType.SIMPLE); - - WorkflowTask inline = new WorkflowTask(); - inline.setTaskReferenceName("dt1"); - inline.setName("dt1"); - inline.setTaskDefinition(taskDef); - inline.setWorkflowTaskType(TaskType.SIMPLE); - - WorkflowTask inlineSubworkflow = new WorkflowTask(); - inlineSubworkflow.setTaskReferenceName("dynamicFork"); - inlineSubworkflow.setName("dynamicFork"); - inlineSubworkflow.setTaskDefinition(taskDef); - inlineSubworkflow.setWorkflowTaskType(TaskType.SUB_WORKFLOW); - - WorkflowDef inlineWorkflowDef = new WorkflowDef(); - inlineWorkflowDef.setName("inline_test_sub_workflow"); - inlineWorkflowDef.setVersion(1); - inlineWorkflowDef.setTasks(Arrays.asList(inline)); - inlineWorkflowDef.setTimeoutPolicy(WorkflowDef.TimeoutPolicy.TIME_OUT_WF); - inlineWorkflowDef.setTimeoutSeconds(600); - SubWorkflowParams subWorkflowParams = new SubWorkflowParams(); - subWorkflowParams.setName("inline_test_sub_workflow"); - subWorkflowParams.setVersion(1); - subWorkflowParams.setWorkflowDef(inlineWorkflowDef); - inlineSubworkflow.setSubWorkflowParam(subWorkflowParams); - - - WorkflowDef workflowDef = new WorkflowDef(); - workflowDef.setName(workflowName); - workflowDef.setTimeoutSeconds(600); - workflowDef.setTimeoutPolicy(WorkflowDef.TimeoutPolicy.TIME_OUT_WF); - workflowDef.setOwnerEmail("test@orkes.io"); - workflowDef.setInputParameters(Arrays.asList("value", "inlineValue")); - workflowDef.setDescription("Workflow to test inline sub_workflow definition"); - workflowDef.setTasks(Arrays.asList( workflowTask, inlineSubworkflow)); - try { - metadataClient1.registerWorkflowDef(workflowDef); - metadataClient1.registerTaskDefs(Arrays.asList(taskDef, taskDef2)); - } catch (Exception e){} - } -} diff --git a/src/test/java/io/orkes/conductor/client/e2e/SubWorkflowPermissionTests.java b/src/test/java/io/orkes/conductor/client/e2e/SubWorkflowPermissionTests.java deleted file mode 100644 index c34680a8..00000000 --- a/src/test/java/io/orkes/conductor/client/e2e/SubWorkflowPermissionTests.java +++ /dev/null @@ -1,169 +0,0 @@ -/* - * Copyright 2022 Orkes, Inc. - *

- * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

- * http://www.apache.org/licenses/LICENSE-2.0 - *

- * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package io.orkes.conductor.client.e2e; - -import java.util.List; -import java.util.concurrent.TimeUnit; - -import org.apache.commons.lang3.RandomStringUtils; -import org.junit.Assert; -import org.junit.jupiter.api.Test; - -import com.netflix.conductor.common.metadata.tasks.TaskResult; -import com.netflix.conductor.common.metadata.workflow.StartWorkflowRequest; -import com.netflix.conductor.common.run.Workflow; - -import io.orkes.conductor.client.*; -import io.orkes.conductor.client.http.*; -import io.orkes.conductor.client.model.*; -import io.orkes.conductor.client.util.ApiUtil; -import io.orkes.conductor.client.util.RegistrationUtil; - -import static io.orkes.conductor.client.util.ApiUtil.USER2_APP_ID; -import static io.orkes.conductor.client.util.ApiUtil.getEnv; -import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.testcontainers.shaded.org.awaitility.Awaitility.await; - -public class SubWorkflowPermissionTests { - - @Test - public void testSubWorkflowPermissionsForUser2() { - ApiClient apiUser1Client = ApiUtil.getUser1Client(); - WorkflowClient user1WorkflowClient = new OrkesWorkflowClient(apiUser1Client); - MetadataClient user1MetadataClient = new OrkesMetadataClient(apiUser1Client); - TaskClient user1TaskClient = new OrkesTaskClient(apiUser1Client); - - // Create user2 client and check access should not be there workflow1 - ApiClient apiUser2Client = ApiUtil.getUser2Client(); - WorkflowClient user2WorkflowClient = new OrkesWorkflowClient(apiUser2Client); - MetadataClient user2MetadataClient = new OrkesMetadataClient(apiUser2Client); - TaskClient user2TaskClient = new OrkesTaskClient(apiUser2Client); - - String taskName = RandomStringUtils.randomAlphanumeric(5).toUpperCase(); - String parentWorkflowName = RandomStringUtils.randomAlphanumeric(5).toUpperCase(); - String subWorkflowName = RandomStringUtils.randomAlphanumeric(5).toUpperCase(); - - String tagKey = RandomStringUtils.randomAlphanumeric(5).toUpperCase(); - String tagValue = RandomStringUtils.randomAlphanumeric(5).toUpperCase(); - - TagObject tagObject = new TagObject().type(TagObject.TypeEnum.METADATA).key(tagKey).value(tagValue); - - // Register workflow - RegistrationUtil.registerWorkflowWithSubWorkflowDef(parentWorkflowName, subWorkflowName, taskName, user1MetadataClient); - - // Tag workflow and task - user1MetadataClient.addWorkflowTag(tagObject, parentWorkflowName); - user1MetadataClient.addWorkflowTag(tagObject, subWorkflowName); - user1MetadataClient.addTaskTag(tagObject, taskName); - user1MetadataClient.addTaskTag(tagObject, subWorkflowName); - - // Trigger workflow - StartWorkflowRequest startWorkflowRequest = new StartWorkflowRequest(); - startWorkflowRequest.setName(parentWorkflowName); - startWorkflowRequest.setVersion(1); - - String workflowId = user1WorkflowClient.startWorkflow(startWorkflowRequest); - String finalWorkflowId = workflowId; - // User 2 should not have access to workflow or task. - Assert.assertThrows(ApiException.class, () -> user2TaskClient.pollTask(taskName, "integration_test", null)); - Assert.assertThrows(ApiException.class, () -> user2WorkflowClient.getWorkflow(finalWorkflowId, false)); - - // User1 should be able to complete task/workflow - String subWorkflowId = user1WorkflowClient.getWorkflow(workflowId, true).getTasks().get(0).getSubWorkflowId(); - TaskResult taskResult = new TaskResult(); - taskResult.setWorkflowInstanceId(subWorkflowId); - taskResult.setStatus(TaskResult.Status.COMPLETED); - taskResult.setTaskId(user1WorkflowClient.getWorkflow(subWorkflowId, true).getTasks().get(0).getTaskId()); - user1TaskClient.updateTask(taskResult); - - // Wait for workflow to get completed - await().atMost(41, TimeUnit.SECONDS).pollInterval(1, TimeUnit.SECONDS).untilAsserted(() -> { - Workflow workflow1 = user1WorkflowClient.getWorkflow(finalWorkflowId, false); - assertEquals(workflow1.getStatus().name(), WorkflowStatus.StatusEnum.COMPLETED.name()); - }); - - //Trigger workflow again. And give permissions so that user2 can execute workflow/task - workflowId = user1WorkflowClient.startWorkflow(startWorkflowRequest); - - user1MetadataClient.addWorkflowTag(tagObject, parentWorkflowName); - - ApiClient adminClient = ApiUtil.getApiClientWithCredentials(); - AuthorizationClient authorizationClient = new OrkesAuthorizationClient(adminClient); - - String groupName = "worker-test-group"; - try { - authorizationClient.deleteGroup(groupName); - } catch (Exception e) { - // Group does not exist. - } - // Create group and add these two users in the group - Group group = authorizationClient.upsertGroup(getUpsertGroupRequest(), groupName); - authorizationClient.addUserToGroup(groupName, "conductoruser1@gmail.com"); - authorizationClient.addUserToGroup(groupName, "conductoruser1@gmail.com"); - - // Give permissions to tag in the group - AuthorizationRequest authorizationRequest = new AuthorizationRequest(); - authorizationRequest.setSubject(new SubjectRef().id(groupName).type(SubjectRef.TypeEnum.GROUP)); - authorizationRequest.setAccess(List.of(AuthorizationRequest.AccessEnum.READ, AuthorizationRequest.AccessEnum.EXECUTE, - AuthorizationRequest.AccessEnum.UPDATE, - AuthorizationRequest.AccessEnum.DELETE)); - authorizationRequest.setTarget(new TargetRef().id(tagKey + ":" + tagValue).type(TargetRef.TypeEnum.TAG)); - authorizationClient.grantPermissions(authorizationRequest); - - // Grant permission to execute the task in user2 application. - authorizationRequest.setSubject(new SubjectRef().id(getEnv(USER2_APP_ID)).type(SubjectRef.TypeEnum.USER)); - authorizationClient.grantPermissions(authorizationRequest); - // User 2 should be able to query workflow information. - String finalWorkflowId1 = workflowId; - await().atMost(5, TimeUnit.SECONDS).untilAsserted(() -> { - try { - String id = user2WorkflowClient.getWorkflow(finalWorkflowId1, true).getTasks().get(0).getSubWorkflowId(); - - TaskResult taskResult1 = new TaskResult(); - taskResult1.setWorkflowInstanceId(id); - taskResult1.setStatus(TaskResult.Status.COMPLETED); - taskResult1.setTaskId(user2WorkflowClient.getWorkflow(id, true).getTasks().get(0).getTaskId()); - user2TaskClient.updateTask(taskResult1); - }catch(Exception e) { - // Server might take time to affect permission changes. - } - }); - - // Wait for workflow to get completed - await().atMost(1, TimeUnit.SECONDS).untilAsserted(() -> { - try { - Workflow workflow1 = user2WorkflowClient.getWorkflow(finalWorkflowId, false); - assertEquals(workflow1.getStatus().name(), WorkflowStatus.StatusEnum.COMPLETED.name()); - }catch(Exception e) { - - } - }); - - // Cleanup - try { - user1MetadataClient.unregisterWorkflowDef(parentWorkflowName, 1); - user1MetadataClient.unregisterWorkflowDef(subWorkflowName, 1); - user1MetadataClient.unregisterTaskDef(taskName); - authorizationClient.deleteGroup(groupName); - authorizationClient.removePermissions(authorizationRequest); - authorizationRequest.setSubject(new SubjectRef().id(groupName).type(SubjectRef.TypeEnum.GROUP)); - authorizationClient.removePermissions(authorizationRequest); - } catch (Exception e){} - } - - UpsertGroupRequest getUpsertGroupRequest() { - return new UpsertGroupRequest() - .description("Group used for SDK testing") - .roles(List.of(UpsertGroupRequest.RolesEnum.USER)); - } -} diff --git a/src/test/java/io/orkes/conductor/client/e2e/SubWorkflowTests.java b/src/test/java/io/orkes/conductor/client/e2e/SubWorkflowTests.java deleted file mode 100644 index 2ff7ef61..00000000 --- a/src/test/java/io/orkes/conductor/client/e2e/SubWorkflowTests.java +++ /dev/null @@ -1,287 +0,0 @@ -/* - * Copyright 2023 Orkes, Inc. - *

- * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

- * http://www.apache.org/licenses/LICENSE-2.0 - *

- * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package io.orkes.conductor.client.e2e; - -import java.io.InputStream; -import java.io.InputStreamReader; -import java.util.*; -import java.util.concurrent.TimeUnit; -import java.util.stream.Collectors; - -import org.junit.jupiter.api.AfterAll; -import org.junit.jupiter.api.BeforeAll; -import org.junit.jupiter.api.Test; -import org.testcontainers.shaded.com.google.common.util.concurrent.Uninterruptibles; - -import com.netflix.conductor.client.http.MetadataClient; -import com.netflix.conductor.client.http.TaskClient; -import com.netflix.conductor.client.http.WorkflowClient; -import com.netflix.conductor.client.worker.Worker; -import com.netflix.conductor.common.config.ObjectMapperProvider; -import com.netflix.conductor.common.metadata.tasks.Task; -import com.netflix.conductor.common.metadata.tasks.TaskResult; -import com.netflix.conductor.common.metadata.workflow.StartWorkflowRequest; -import com.netflix.conductor.common.metadata.workflow.SubWorkflowParams; -import com.netflix.conductor.common.metadata.workflow.WorkflowDef; -import com.netflix.conductor.common.metadata.workflow.WorkflowTask; -import com.netflix.conductor.common.run.Workflow; -import com.netflix.conductor.sdk.workflow.executor.WorkflowExecutor; - -import io.orkes.conductor.client.AuthorizationClient; -import io.orkes.conductor.client.OrkesClients; -import io.orkes.conductor.client.automator.TaskRunnerConfigurer; -import io.orkes.conductor.client.http.OrkesTaskClient; -import io.orkes.conductor.client.model.WorkflowStatus; -import io.orkes.conductor.sdk.examples.ApiUtil; - -import com.fasterxml.jackson.core.type.TypeReference; -import com.fasterxml.jackson.databind.ObjectMapper; -import lombok.SneakyThrows; -import lombok.extern.slf4j.Slf4j; - -import static org.junit.jupiter.api.Assertions.*; -import static org.testcontainers.shaded.org.awaitility.Awaitility.await; - -@Slf4j -public class SubWorkflowTests { - - private static OrkesClients orkesClients; - - private static AuthorizationClient authClient; - - private static WorkflowClient workflowClient; - - private static TaskClient taskClient; - - private static MetadataClient metadataClient; - - private static WorkflowExecutor workflowExecutor; - - private static ObjectMapper objectMapper = new ObjectMapperProvider().getObjectMapper(); - - private static TypeReference> WORKFLOW_DEF_LIST = new TypeReference>(){}; - - private static final String WORKFLOW_NAME = "sub_workflow_test"; - - private static Map taskToDomainMap = new HashMap<>(); - - private static TaskRunnerConfigurer configurer; - - private static TaskRunnerConfigurer configurerNoDomain; - - @SneakyThrows - @BeforeAll - public static void beforeAll() { - orkesClients = ApiUtil.getOrkesClient(); - authClient = orkesClients.getAuthorizationClient(); - workflowClient = orkesClients.getWorkflowClient(); - taskClient = orkesClients.getTaskClient(); - metadataClient = orkesClients.getMetadataClient(); - workflowExecutor = new WorkflowExecutor(taskClient, workflowClient, metadataClient, 1); - InputStream resource = SubWorkflowTests.class.getResourceAsStream("/metadata/workflows.json"); - List workflowDefs = objectMapper.readValue(new InputStreamReader(resource), WORKFLOW_DEF_LIST); - metadataClient.updateWorkflowDefs(workflowDefs); - Set tasks = new HashSet<>(); - List internalTasks = List.of("HTTP", "BUSINESS_RULE", "AWS_LAMBDA", "JDBC", "WAIT_FOR_EVENT", "PUBLISH_BUSINESS_STATE", - "WAIT", "WAIT_FOR_WEBHOOK", "DECISION", "SWITCH", "DYNAMIC", "JOIN", "DO_WHILE", "FORK_JOIN_DYNAMIC", "FORK_JOIN", "JSON_JQ_TRANSFORM", "FORK"); - for (WorkflowDef workflowDef : workflowDefs) { - List allTasks = workflowDef.collectTasks(); - tasks.addAll(allTasks.stream() - .filter(tt -> !tt.getType().equals("SIMPLE") && !internalTasks.contains(tt.getType())) - .map(t -> t.getType()).collect(Collectors.toSet())); - - tasks.addAll(allTasks.stream() - .filter(tt -> tt.getType().equals("SIMPLE") && !internalTasks.contains(tt.getType())) - .map(t -> t.getName()).collect(Collectors.toSet())); - - } - startWorkers(tasks); - log.info("Updated workflow definitions: {}", workflowDefs.stream().map(def -> def.getName()).collect(Collectors.toList())); - } - - @AfterAll - public static void cleanup() { - if(configurer != null) { - configurer.shutdown(); - configurerNoDomain.shutdown(); - } - } - - @Test - public void testSubWorkflowWithDomain() { - StartWorkflowRequest request = new StartWorkflowRequest(); - request.setName(WORKFLOW_NAME); - request.setTaskToDomain(taskToDomainMap); - String workflowId = workflowClient.startWorkflow(request); - log.info("Started {}", workflowId); - assertSubworkflowWithDomain(workflowId); - - int restartCount = 2; - for (int i = 0; i < restartCount; i++) { - workflowClient.restart(workflowId, true); - assertSubworkflowWithDomain(workflowId); - Uninterruptibles.sleepUninterruptibly(100, TimeUnit.MILLISECONDS); - } - } - - private void assertSubworkflowWithDomain(String workflowId) { - await() - .atMost(120, TimeUnit.SECONDS) - .pollInterval(5, TimeUnit.SECONDS) - .untilAsserted(() -> { - - Workflow workflow = workflowClient.getWorkflow(workflowId, true); - assertEquals(WorkflowStatus.StatusEnum.COMPLETED.name(), workflow.getStatus().name()); - Map workflowTaskToDomain = workflow.getTaskToDomain(); - assertNotNull(workflowTaskToDomain); - assertTrue(!workflowTaskToDomain.isEmpty()); - for (Map.Entry taskToDomain : workflowTaskToDomain.entrySet()) { - String taskName = taskToDomain.getKey(); - String domain = taskToDomain.getValue(); - assertEquals(domain, taskToDomainMap.get(taskName)); - } - workflow.getTasks().stream().filter(t -> t.getTaskType().equals("SUB_WORKFLOW")).forEach(subWorkflowTask -> { - String subWorkflowId = subWorkflowTask.getSubWorkflowId(); - Workflow subWorkflow = workflowClient.getWorkflow(subWorkflowId, true); - Map subWorkflowDomainMap = subWorkflow.getTaskToDomain(); - assertNotNull(subWorkflowDomainMap); - assertTrue(!subWorkflowDomainMap.isEmpty()); - - for (Map.Entry taskToDomain : subWorkflowDomainMap.entrySet()) { - String taskName = taskToDomain.getKey(); - String domain = taskToDomain.getValue(); - assertEquals(domain, taskToDomainMap.get(taskName)); - } - - - SubWorkflowParams subWorkflowParams = subWorkflowTask.getWorkflowTask().getSubWorkflowParam(); - if(subWorkflowParams.getWorkflowDefinition() == null) { - Integer version = subWorkflowParams.getVersion(); - log.info("version is {} for {} / {}", version, workflowId, subWorkflowTask.getReferenceTaskName()); - if(version == null) { - assertEquals(3, subWorkflow.getWorkflowVersion()); - } else { - assertEquals(version, subWorkflow.getWorkflowVersion()); - } - } else { - log.info("Sub workflow has inline definition {} - {}", subWorkflowParams.getWorkflowDefinition().getClass().getName(), subWorkflowParams.getWorkflowDefinition()); - } - }); - }); - - } - - @Test - public void testSubworkflowExecutionWithOutDomains() { - StartWorkflowRequest request = new StartWorkflowRequest(); - request.setName(WORKFLOW_NAME); - String workflowId = workflowClient.startWorkflow(request); - log.info("Started {}", workflowId); - assertSubworkflowExecutionWithOutDomains(workflowId); - - int restartCount = 2; - for (int i = 0; i < restartCount; i++) { - workflowClient.restart(workflowId, true); - assertSubworkflowExecutionWithOutDomains(workflowId); - Uninterruptibles.sleepUninterruptibly(100, TimeUnit.MILLISECONDS); - } - } - - - - private void assertSubworkflowExecutionWithOutDomains(String workflowId) { - await() - .atMost(120, TimeUnit.SECONDS) - .pollInterval(5, TimeUnit.SECONDS) - .untilAsserted(() -> { - Workflow workflow = workflowClient.getWorkflow(workflowId, true); - assertEquals(workflow.getStatus().name(), WorkflowStatus.StatusEnum.COMPLETED.name()); - - Map workflowTaskToDomain = workflow.getTaskToDomain(); - assertEquals(0, workflowTaskToDomain.size()); - - workflow.getTasks().stream().filter(t -> t.getTaskType().equals("SUB_WORKFLOW")).forEach(subWorkflowTask -> { - String subWorkflowId = subWorkflowTask.getSubWorkflowId(); - Workflow subWorkflow = workflowClient.getWorkflow(subWorkflowId, true); - Map subWorkflowDomainMap = subWorkflow.getTaskToDomain(); - assertEquals(0, subWorkflowDomainMap.size()); - - SubWorkflowParams subWorkflowParams = subWorkflowTask.getWorkflowTask().getSubWorkflowParam(); - if(subWorkflowParams.getWorkflowDefinition() == null) { - Integer version = subWorkflowParams.getVersion(); - log.info("version is {} for {} / {}", version, workflowId, subWorkflowTask.getReferenceTaskName()); - if(version == null) { - assertEquals(3, subWorkflow.getWorkflowVersion()); - } else { - assertEquals(version, subWorkflow.getWorkflowVersion()); - } - } else { - log.info("Sub workflow has inline definition {} - {}", subWorkflowParams.getWorkflowDefinition().getClass().getName(), subWorkflowParams.getWorkflowDefinition()); - } - }); - - - }); - } - - private static void startWorkers(Set tasks) { - log.info("Starting workers for {} with domainMap", tasks, taskToDomainMap); - List workers = new ArrayList<>(); - for (String task : tasks) { - workers.add(new TestWorker(task)); - taskToDomainMap.put(task, UUID.randomUUID().toString()); - } - configurer = new TaskRunnerConfigurer - .Builder((OrkesTaskClient)taskClient, workers) - .withTaskToDomain(taskToDomainMap) - .withThreadCount(1) - .withTaskPollTimeout(10) - .build(); - configurer.init(); - - configurerNoDomain = new TaskRunnerConfigurer - .Builder((OrkesTaskClient)taskClient, workers) - .withThreadCount(1) - .withTaskPollTimeout(10) - .build(); - configurerNoDomain.init(); - } - - - - private static class TestWorker implements Worker { - - private String name; - - public TestWorker(String name) { - this.name = name; - } - @Override - public String getTaskDefName() { - return name; - } - - @Override - public TaskResult execute(Task task) { - TaskResult result = new TaskResult(task); - result.getOutputData().put("number", 42); - result.setStatus(TaskResult.Status.COMPLETED); - return result; - } - - @Override - public int getPollingInterval() { - return 1; - } - } -} diff --git a/src/test/java/io/orkes/conductor/client/e2e/SubWorkflowTimeoutRetryTests.java b/src/test/java/io/orkes/conductor/client/e2e/SubWorkflowTimeoutRetryTests.java deleted file mode 100644 index 55288a77..00000000 --- a/src/test/java/io/orkes/conductor/client/e2e/SubWorkflowTimeoutRetryTests.java +++ /dev/null @@ -1,191 +0,0 @@ -/* - * Copyright 2023 Orkes, Inc. - *

- * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

- * http://www.apache.org/licenses/LICENSE-2.0 - *

- * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package io.orkes.conductor.client.e2e; - -import java.io.InputStream; -import java.io.InputStreamReader; -import java.util.*; -import java.util.concurrent.TimeUnit; -import java.util.stream.Collectors; - -import org.junit.jupiter.api.AfterAll; -import org.junit.jupiter.api.BeforeAll; -import org.junit.jupiter.api.Test; - -import com.netflix.conductor.client.http.MetadataClient; -import com.netflix.conductor.client.http.TaskClient; -import com.netflix.conductor.client.http.WorkflowClient; -import com.netflix.conductor.common.config.ObjectMapperProvider; -import com.netflix.conductor.common.metadata.tasks.Task; -import com.netflix.conductor.common.metadata.tasks.TaskResult; -import com.netflix.conductor.common.metadata.tasks.TaskType; -import com.netflix.conductor.common.metadata.workflow.StartWorkflowRequest; -import com.netflix.conductor.common.metadata.workflow.WorkflowDef; -import com.netflix.conductor.common.metadata.workflow.WorkflowTask; -import com.netflix.conductor.common.run.Workflow; - -import io.orkes.conductor.client.AuthorizationClient; -import io.orkes.conductor.client.OrkesClients; -import io.orkes.conductor.client.automator.TaskRunnerConfigurer; -import io.orkes.conductor.sdk.examples.ApiUtil; - -import com.fasterxml.jackson.core.type.TypeReference; -import com.fasterxml.jackson.databind.ObjectMapper; -import com.google.common.util.concurrent.Uninterruptibles; -import lombok.SneakyThrows; -import lombok.extern.slf4j.Slf4j; - -import static org.junit.jupiter.api.Assertions.*; -import static org.testcontainers.shaded.org.awaitility.Awaitility.await; - -@Slf4j -public class SubWorkflowTimeoutRetryTests { - - private static OrkesClients orkesClients; - - private static AuthorizationClient authClient; - - private static WorkflowClient workflowClient; - - private static TaskClient taskClient; - - private static MetadataClient metadataClient; - - private static ObjectMapper objectMapper = new ObjectMapperProvider().getObjectMapper(); - - private static TypeReference> WORKFLOW_DEF_LIST = new TypeReference>(){}; - - private static final String WORKFLOW_NAME = "integration_test_wf_with_sub_wf"; - - private static Map taskToDomainMap = new HashMap<>(); - - private static TaskRunnerConfigurer configurer; - - private static TaskRunnerConfigurer configurerNoDomain; - - @SneakyThrows - @BeforeAll - public static void beforeAll() { - orkesClients = ApiUtil.getOrkesClient(); - authClient = orkesClients.getAuthorizationClient(); - workflowClient = orkesClients.getWorkflowClient(); - taskClient = orkesClients.getTaskClient(); - metadataClient = orkesClients.getMetadataClient(); - InputStream resource = SubWorkflowTimeoutRetryTests.class.getResourceAsStream("/metadata/sub_workflow_tests.json"); - List workflowDefs = objectMapper.readValue(new InputStreamReader(resource), WORKFLOW_DEF_LIST); - metadataClient.updateWorkflowDefs(workflowDefs); - Set tasks = new HashSet<>(); - for (WorkflowDef workflowDef : workflowDefs) { - List allTasks = workflowDef.collectTasks(); - tasks.addAll(allTasks.stream() - .filter(tt -> !tt.getType().equals("SIMPLE")) - .map(t -> t.getType()).collect(Collectors.toSet())); - - tasks.addAll(allTasks.stream() - .filter(tt -> tt.getType().equals("SIMPLE")) - .map(t -> t.getName()).collect(Collectors.toSet())); - - } - log.info("Updated workflow definitions: {}", workflowDefs.stream().map(def -> def.getName()).collect(Collectors.toList())); - } - - @AfterAll - public static void cleanup() { - if(configurer != null) { - configurer.shutdown(); - configurerNoDomain.shutdown(); - } - } - - @Test - public void test() { - - String correlationId = "wf_with_subwf_test_1"; - Map input = Map.of("param1", "p1 value", "subwf", "sub_workflow"); - - StartWorkflowRequest request = new StartWorkflowRequest(); - request.setName(WORKFLOW_NAME); - request.setVersion(1); - request.setCorrelationId(correlationId); - request.setInput(input); - String workflowInstanceId = workflowClient.startWorkflow(request); - - log.info("Started {} ", workflowInstanceId); - pollAndCompleteTask(workflowInstanceId, "integration_task_1", Map.of()); - Workflow workflow = workflowClient.getWorkflow(workflowInstanceId, true); - await().atMost(3, TimeUnit.SECONDS).untilAsserted(() -> { - Workflow workflow1 = workflowClient.getWorkflow(workflowInstanceId, true); - assertNotNull(workflow1); - assertEquals(2, workflow1.getTasks().size()); - assertEquals(Task.Status.COMPLETED, workflow1.getTasks().get(0).getStatus()); - assertEquals(TaskType.SUB_WORKFLOW.name(), workflow1.getTasks().get(1).getTaskType()); - assertEquals(Task.Status.IN_PROGRESS, workflow1.getTasks().get(1).getStatus()); - }); - workflow = workflowClient.getWorkflow(workflowInstanceId, true); - String subWorkflowId = workflow.getTasks().get(1).getSubWorkflowId(); - log.info("Sub workflow Id {} ", subWorkflowId); - - assertNotNull(subWorkflowId); - Workflow subWorkflow = workflowClient.getWorkflow(subWorkflowId, true); - assertEquals(Workflow.WorkflowStatus.RUNNING, subWorkflow.getStatus()); - - //Wait for 7 seconds which is > 5 sec timeout for the workflow - Uninterruptibles.sleepUninterruptibly(7, TimeUnit.SECONDS); - workflowClient.runDecider(workflowInstanceId); - - workflow = workflowClient.getWorkflow(workflowInstanceId, true); - assertNotNull(workflow); - assertEquals(2, workflow.getTasks().size()); - assertEquals(Workflow.WorkflowStatus.TIMED_OUT, workflow.getStatus()); - assertEquals(Task.Status.COMPLETED, workflow.getTasks().get(0).getStatus()); - assertEquals(Task.Status.CANCELED, workflow.getTasks().get(1).getStatus()); - - //Verify that the sub-workflow is terminated - subWorkflow = workflowClient.getWorkflow(subWorkflowId, true); - assertEquals(Workflow.WorkflowStatus.TERMINATED, subWorkflow.getStatus()); - - //Retry sub-workflow - workflowClient.retryLastFailedTask(subWorkflowId); - - //Sub workflow should be in the running state now - subWorkflow = workflowClient.getWorkflow(subWorkflowId, true); - assertEquals(Workflow.WorkflowStatus.RUNNING, subWorkflow.getStatus()); - assertEquals(Task.Status.CANCELED, subWorkflow.getTasks().get(0).getStatus()); - assertEquals(Task.Status.SCHEDULED, subWorkflow.getTasks().get(1).getStatus()); - - await().atMost(3, TimeUnit.SECONDS).untilAsserted(() -> { - Workflow workflow1 = workflowClient.getWorkflow(workflowInstanceId, true); - assertFalse(workflow1.getTasks().get(1).isSubworkflowChanged()); - }); - } - - private Task pollAndCompleteTask(String workflowInstanceId, String taskName, Map output) { - Workflow workflow = workflowClient.getWorkflow(workflowInstanceId, true); - if(workflow == null) { - return null; - } - Optional optional = workflow.getTasks().stream().filter(task -> task.getTaskDefName().equals(taskName)).findFirst(); - if(optional.isEmpty()) { - return null; - } - Task task = optional.get(); - task.setStatus(Task.Status.COMPLETED); - task.getOutputData().putAll(output); - taskClient.updateTask(new TaskResult(task)); - - return task; - } - - - -} diff --git a/src/test/java/io/orkes/conductor/client/e2e/SubWorkflowVersionTests.java b/src/test/java/io/orkes/conductor/client/e2e/SubWorkflowVersionTests.java deleted file mode 100644 index 0590dd8d..00000000 --- a/src/test/java/io/orkes/conductor/client/e2e/SubWorkflowVersionTests.java +++ /dev/null @@ -1,341 +0,0 @@ -/* - * Copyright 2022 Orkes, Inc. - *

- * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

- * http://www.apache.org/licenses/LICENSE-2.0 - *

- * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package io.orkes.conductor.client.e2e; - -import java.util.Arrays; -import java.util.HashMap; -import java.util.Map; -import java.util.concurrent.TimeUnit; - -import org.apache.commons.lang3.RandomStringUtils; -import org.junit.jupiter.api.Test; - -import com.netflix.conductor.common.metadata.tasks.Task; -import com.netflix.conductor.common.metadata.tasks.TaskDef; -import com.netflix.conductor.common.metadata.tasks.TaskResult; -import com.netflix.conductor.common.metadata.tasks.TaskType; -import com.netflix.conductor.common.metadata.workflow.StartWorkflowRequest; -import com.netflix.conductor.common.metadata.workflow.SubWorkflowParams; -import com.netflix.conductor.common.metadata.workflow.WorkflowDef; -import com.netflix.conductor.common.metadata.workflow.WorkflowTask; -import com.netflix.conductor.common.run.Workflow; - -import io.orkes.conductor.client.*; -import io.orkes.conductor.client.http.*; -import io.orkes.conductor.client.model.*; -import io.orkes.conductor.client.util.ApiUtil; -import io.orkes.conductor.client.util.RegistrationUtil; - -import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.junit.jupiter.api.Assertions.assertTrue; -import static org.testcontainers.shaded.org.awaitility.Awaitility.await; - -public class SubWorkflowVersionTests { - - @Test - public void testSubWorkflow0version() { - ApiClient apiClient = ApiUtil.getApiClientWithCredentials(); - WorkflowClient workflowClient = new OrkesWorkflowClient(apiClient); - MetadataClient metadataClient = new OrkesMetadataClient(apiClient); - TaskClient taskClient = new OrkesTaskClient(apiClient); - - String taskName = RandomStringUtils.randomAlphanumeric(5).toUpperCase(); - String parentWorkflowName = RandomStringUtils.randomAlphanumeric(5).toUpperCase(); - String subWorkflowName = RandomStringUtils.randomAlphanumeric(5).toUpperCase(); - - // Register workflow - RegistrationUtil.registerWorkflowWithSubWorkflowDef(parentWorkflowName, subWorkflowName, taskName, metadataClient); - WorkflowDef workflowDef = metadataClient.getWorkflowDef(parentWorkflowName, 1); - //Set sub workflow version to 0 - workflowDef.getTasks().get(0).getSubWorkflowParam().setVersion(0); - metadataClient.registerWorkflowDef(workflowDef, true); - - // Trigger workflow - StartWorkflowRequest startWorkflowRequest = new StartWorkflowRequest(); - startWorkflowRequest.setName(parentWorkflowName); - startWorkflowRequest.setVersion(1); - - String workflowId = workflowClient.startWorkflow(startWorkflowRequest); - - // User1 should be able to complete task/workflow - String subWorkflowId = workflowClient.getWorkflow(workflowId, true).getTasks().get(0).getSubWorkflowId(); - TaskResult taskResult = new TaskResult(); - taskResult.setWorkflowInstanceId(subWorkflowId); - taskResult.setStatus(TaskResult.Status.COMPLETED); - taskResult.setTaskId(workflowClient.getWorkflow(subWorkflowId, true).getTasks().get(0).getTaskId()); - taskClient.updateTask(taskResult); - - // Wait for workflow to get completed - await().atMost(42, TimeUnit.SECONDS).pollInterval(1, TimeUnit.SECONDS).untilAsserted(() -> { - Workflow workflow1 = workflowClient.getWorkflow(workflowId, false); - assertEquals(workflow1.getStatus().name(), WorkflowStatus.StatusEnum.COMPLETED.name()); - }); - - // Cleanup - metadataClient.unregisterWorkflowDef(parentWorkflowName, 1); - metadataClient.unregisterWorkflowDef(subWorkflowName, 1); - metadataClient.unregisterTaskDef(taskName); - } - - @Test - public void testSubWorkflowNullVersion() { - ApiClient apiClient = ApiUtil.getApiClientWithCredentials(); - WorkflowClient workflowClient = new OrkesWorkflowClient(apiClient); - MetadataClient metadataClient = new OrkesMetadataClient(apiClient); - TaskClient taskClient = new OrkesTaskClient(apiClient); - - String taskName = RandomStringUtils.randomAlphanumeric(5).toUpperCase(); - String parentWorkflowName = RandomStringUtils.randomAlphanumeric(5).toUpperCase(); - String subWorkflowName = RandomStringUtils.randomAlphanumeric(5).toUpperCase(); - - // Register workflow - RegistrationUtil.registerWorkflowWithSubWorkflowDef(parentWorkflowName, subWorkflowName, taskName, metadataClient); - WorkflowDef workflowDef = metadataClient.getWorkflowDef(parentWorkflowName, 1); - //Set sub workflow version to null - workflowDef.getTasks().get(0).getSubWorkflowParam().setVersion(null); - metadataClient.registerWorkflowDef(workflowDef, true); - - // Trigger workflow - StartWorkflowRequest startWorkflowRequest = new StartWorkflowRequest(); - startWorkflowRequest.setName(parentWorkflowName); - startWorkflowRequest.setVersion(1); - - String workflowId = workflowClient.startWorkflow(startWorkflowRequest); - - // User1 should be able to complete task/workflow - String subWorkflowId = workflowClient.getWorkflow(workflowId, true).getTasks().get(0).getSubWorkflowId(); - TaskResult taskResult = new TaskResult(); - taskResult.setWorkflowInstanceId(subWorkflowId); - taskResult.setStatus(TaskResult.Status.COMPLETED); - taskResult.setTaskId(workflowClient.getWorkflow(subWorkflowId, true).getTasks().get(0).getTaskId()); - taskClient.updateTask(taskResult); - - // Wait for workflow to get completed - await().atMost(42, TimeUnit.SECONDS).pollInterval(1, TimeUnit.SECONDS).untilAsserted(() -> { - Workflow workflow1 = workflowClient.getWorkflow(workflowId, false); - assertEquals(workflow1.getStatus().name(), WorkflowStatus.StatusEnum.COMPLETED.name()); - }); - - // Cleanup - metadataClient.unregisterWorkflowDef(parentWorkflowName, 1); - metadataClient.unregisterWorkflowDef(subWorkflowName, 1); - metadataClient.unregisterTaskDef(taskName); - } - - @Test - public void testSubWorkflowEmptyVersion() { - ApiClient apiClient = ApiUtil.getApiClientWithCredentials(); - WorkflowClient workflowClient = new OrkesWorkflowClient(apiClient); - MetadataClient metadataClient = new OrkesMetadataClient(apiClient); - TaskClient taskClient = new OrkesTaskClient(apiClient); - - String taskName = RandomStringUtils.randomAlphanumeric(5).toUpperCase(); - String parentWorkflowName = RandomStringUtils.randomAlphanumeric(5).toUpperCase(); - String subWorkflowName = RandomStringUtils.randomAlphanumeric(5).toUpperCase(); - - // Register workflow - RegistrationUtil.registerWorkflowWithSubWorkflowDef(parentWorkflowName, subWorkflowName, taskName, metadataClient); - WorkflowDef workflowDef = metadataClient.getWorkflowDef(parentWorkflowName, 1); - WorkflowDef subWorkflowDef = metadataClient.getWorkflowDef(subWorkflowName, null); - subWorkflowDef.setVersion(1); - metadataClient.registerWorkflowDef(subWorkflowDef); - subWorkflowDef.setVersion(2); - metadataClient.registerWorkflowDef(subWorkflowDef); - //Set sub workflow version to empty in parent workflow definition - com.netflix.conductor.common.metadata.workflow.SubWorkflowParams subWorkflowParams = new SubWorkflowParams(); - subWorkflowParams.setName(subWorkflowName); - workflowDef.getTasks().get(0).setSubWorkflowParam(subWorkflowParams); - metadataClient.registerWorkflowDef(workflowDef, true); - - // Trigger workflow - StartWorkflowRequest startWorkflowRequest = new StartWorkflowRequest(); - startWorkflowRequest.setName(parentWorkflowName); - startWorkflowRequest.setVersion(1); - - String workflowId = workflowClient.startWorkflow(startWorkflowRequest); - - // User1 should be able to complete task/workflow - String subWorkflowId = workflowClient.getWorkflow(workflowId, true).getTasks().get(0).getSubWorkflowId(); - TaskResult taskResult = new TaskResult(); - taskResult.setWorkflowInstanceId(subWorkflowId); - taskResult.setStatus(TaskResult.Status.COMPLETED); - taskResult.setTaskId(workflowClient.getWorkflow(subWorkflowId, true).getTasks().get(0).getTaskId()); - taskClient.updateTask(taskResult); - - // Wait for workflow to get completed - //Check sub-workflow is executed with the latest version. - await().atMost(1, TimeUnit.SECONDS).untilAsserted(() -> { - Workflow workflow1 = workflowClient.getWorkflow(workflowId, true); - assertEquals(workflow1.getStatus().name(), WorkflowStatus.StatusEnum.COMPLETED.name()); - assertEquals(workflow1.getTasks().get(0).getWorkflowTask().getSubWorkflowParam().getVersion(), 2); - }); - - - // Cleanup - metadataClient.unregisterWorkflowDef(parentWorkflowName, 1); - metadataClient.unregisterWorkflowDef(subWorkflowName, 1); - metadataClient.unregisterTaskDef(taskName); - } - - @Test - public void testDynamicSubWorkflow() { - ApiClient adminClient = ApiUtil.getApiClientWithCredentials(); - WorkflowClient workflowAdminClient = new OrkesWorkflowClient(adminClient); - MetadataClient metadataAdminClient =new OrkesMetadataClient(adminClient); - TaskClient taskClient = new OrkesTaskClient(adminClient); - String workflowName1 = "DynamicFanInOutTest_Version"; - String subWorkflowName = "test_subworkflow"; - - - // Register workflow - registerWorkflowDef(workflowName1, metadataAdminClient); - registerSubWorkflow(subWorkflowName, "test_task", metadataAdminClient); - - // Trigger workflow - StartWorkflowRequest startWorkflowRequest = new StartWorkflowRequest(); - startWorkflowRequest.setName(workflowName1); - startWorkflowRequest.setVersion(1); - - String workflowId = workflowAdminClient.startWorkflow(startWorkflowRequest); - Workflow workflow = workflowAdminClient.getWorkflow(workflowId, true); - TaskResult taskResult = new TaskResult(); - taskResult.setWorkflowInstanceId(workflowId); - taskResult.setTaskId(workflow.getTasks().get(0).getTaskId()); - taskResult.setStatus(TaskResult.Status.COMPLETED); - - WorkflowTask workflowTask2 = new WorkflowTask(); - workflowTask2.setName("integration_task_2"); - workflowTask2.setTaskReferenceName("xdt1"); - workflowTask2.setType(TaskType.SUB_WORKFLOW.name()); - SubWorkflowParams subWorkflowParams = new SubWorkflowParams(); - subWorkflowParams.setName(subWorkflowName); - workflowTask2.setSubWorkflowParam(subWorkflowParams); - - Map output = new HashMap<>(); - Map> input = new HashMap<>(); - input.put("xdt1", Map.of("k1", "v1")); - output.put("dynamicTasks", Arrays.asList(workflowTask2)); - output.put("dynamicTasksInput", input); - taskResult.setOutputData(output); - taskClient.updateTask(taskResult); - - await().atMost(1, TimeUnit.SECONDS).untilAsserted(() -> { - Workflow workflow1 = workflowAdminClient.getWorkflow(workflowId, true); - assertEquals(workflow1.getStatus().name(), WorkflowStatus.StatusEnum.RUNNING.name()); - assertTrue(workflow1.getTasks().size() == 4); - assertEquals(workflow1.getTasks().get(0).getStatus().name(), Task.Status.COMPLETED.name()); - assertEquals(workflow1.getTasks().get(1).getStatus().name(), Task.Status.COMPLETED.name()); - assertEquals(workflow1.getTasks().get(2).getStatus().name(), Task.Status.IN_PROGRESS.name()); - assertEquals(workflow1.getTasks().get(3).getStatus().name(), Task.Status.IN_PROGRESS.name()); - }); - - workflow = workflowAdminClient.getWorkflow(workflowId, true); - taskResult = new TaskResult(); - taskResult.setWorkflowInstanceId(workflowId); - taskResult.setTaskId(workflow.getTasks().get(2).getTaskId()); - taskResult.setStatus(TaskResult.Status.COMPLETED); - taskClient.updateTask(taskResult); - - // Workflow should be completed - await().atMost(10, TimeUnit.SECONDS).untilAsserted(() -> { - Workflow workflow1 = workflowAdminClient.getWorkflow(workflowId, true); - assertTrue(workflow1.getTasks().size() == 4); - assertEquals(workflow1.getStatus().name(), WorkflowStatus.StatusEnum.COMPLETED.name()); - assertEquals(workflow1.getTasks().get(0).getStatus().name(), Task.Status.COMPLETED.name()); - assertEquals(workflow1.getTasks().get(1).getStatus().name(), Task.Status.COMPLETED.name()); - assertEquals(workflow1.getTasks().get(2).getStatus().name(), Task.Status.COMPLETED.name()); - assertEquals(workflow1.getTasks().get(2).getInputData().get("subWorkflowVersion"), 1l); - assertEquals(workflow1.getTasks().get(3).getStatus().name(), Task.Status.COMPLETED.name()); - }); - - metadataAdminClient.unregisterWorkflowDef(workflowName1, 1); - - } - - private void registerWorkflowDef(String workflowName, MetadataClient metadataClient1) { - TaskDef taskDef = new TaskDef("dt1"); - taskDef.setOwnerEmail("test@orkes.io"); - - TaskDef taskDef4 = new TaskDef("integration_task_2"); - taskDef4.setOwnerEmail("test@orkes.io"); - - TaskDef taskDef3 = new TaskDef("integration_task_3"); - taskDef3.setOwnerEmail("test@orkes.io"); - - TaskDef taskDef2 = new TaskDef("dt2"); - taskDef2.setOwnerEmail("test@orkes.io"); - - WorkflowTask workflowTask = new WorkflowTask(); - workflowTask.setTaskReferenceName("dt2"); - workflowTask.setName("dt2"); - workflowTask.setTaskDefinition(taskDef2); - workflowTask.setWorkflowTaskType(TaskType.SIMPLE); - - WorkflowTask inline = new WorkflowTask(); - inline.setTaskReferenceName("dt1"); - inline.setName("dt1"); - inline.setTaskDefinition(taskDef); - inline.setWorkflowTaskType(TaskType.SIMPLE); - - WorkflowTask join = new WorkflowTask(); - join.setTaskReferenceName("join_dynamic"); - join.setName("join_dynamic"); - join.setWorkflowTaskType(TaskType.JOIN); - - WorkflowTask dynamicFork = new WorkflowTask(); - dynamicFork.setTaskReferenceName("dynamicFork"); - dynamicFork.setName("dynamicFork"); - dynamicFork.setTaskDefinition(taskDef); - dynamicFork.setWorkflowTaskType(TaskType.FORK_JOIN_DYNAMIC); - dynamicFork.setInputParameters(Map.of("dynamicTasks", "${dt1.output.dynamicTasks}", - "dynamicTasksInput", "${dt1.output.dynamicTasksInput}")); - dynamicFork.setDynamicForkTasksParam("dynamicTasks"); - dynamicFork.setDynamicForkTasksInputParamName("dynamicTasksInput"); - - WorkflowDef workflowDef = new WorkflowDef(); - workflowDef.setName(workflowName); - workflowDef.setOwnerEmail("test@orkes.io"); - workflowDef.setInputParameters(Arrays.asList("value", "inlineValue")); - workflowDef.setDescription("Workflow to test retry"); - workflowDef.setTasks(Arrays.asList( inline, dynamicFork, join)); - try { - metadataClient1.registerWorkflowDef(workflowDef); - metadataClient1.registerTaskDefs(Arrays.asList(taskDef, taskDef2, taskDef3, taskDef4)); - }catch (Exception e){} - } - - public static void registerSubWorkflow(String subWorkflowName, String taskName, MetadataClient metadataClient) { - TaskDef taskDef = new TaskDef(taskName); - taskDef.setOwnerEmail("test@orkes.io"); - taskDef.setRetryCount(0); - - WorkflowTask inline = new WorkflowTask(); - inline.setTaskReferenceName(taskName); - inline.setName(taskName); - inline.setTaskDefinition(taskDef); - inline.setWorkflowTaskType(TaskType.SIMPLE); - inline.setInputParameters(Map.of("evaluatorType", "graaljs", "expression", "true;")); - - - WorkflowDef subworkflowDef = new WorkflowDef(); - subworkflowDef.setName(subWorkflowName); - subworkflowDef.setOwnerEmail("test@orkes.io"); - subworkflowDef.setInputParameters(Arrays.asList("value", "inlineValue")); - subworkflowDef.setDescription("Sub Workflow to test retry"); - subworkflowDef.setTasks(Arrays.asList(inline)); - subworkflowDef.setTimeoutSeconds(600); - subworkflowDef.setTimeoutPolicy(WorkflowDef.TimeoutPolicy.TIME_OUT_WF); - - metadataClient.registerWorkflowDef(subworkflowDef); - } -} diff --git a/src/test/java/io/orkes/conductor/client/e2e/SyncWorkflowExecutionTest.java b/src/test/java/io/orkes/conductor/client/e2e/SyncWorkflowExecutionTest.java deleted file mode 100644 index b12e96f4..00000000 --- a/src/test/java/io/orkes/conductor/client/e2e/SyncWorkflowExecutionTest.java +++ /dev/null @@ -1,287 +0,0 @@ -/* - * Copyright 2022 Orkes, Inc. - *

- * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

- * http://www.apache.org/licenses/LICENSE-2.0 - *

- * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package io.orkes.conductor.client.e2e; - -import com.netflix.conductor.common.metadata.workflow.StartWorkflowRequest; -import com.netflix.conductor.common.run.Workflow; -import io.orkes.conductor.client.ApiClient; -import io.orkes.conductor.client.WorkflowClient; -import io.orkes.conductor.client.http.OrkesWorkflowClient; -import io.orkes.conductor.client.util.ApiUtil; -import io.orkes.conductor.common.model.WorkflowRun; -import org.junit.jupiter.api.BeforeAll; -import org.junit.jupiter.api.DisplayName; -import org.junit.jupiter.api.Test; - -import java.util.concurrent.CompletableFuture; -import java.util.concurrent.ExecutionException; -import java.util.concurrent.TimeUnit; -import java.util.concurrent.TimeoutException; - -import static org.junit.jupiter.api.Assertions.*; - -public class SyncWorkflowExecutionTest { - - static ApiClient apiClient; - static WorkflowClient workflowClient; - - static int threshold = 2000; - - @BeforeAll - public static void init() { - apiClient = ApiUtil.getApiClientWithCredentials(); - apiClient.setExecutorThreadCount(10); - workflowClient = new OrkesWorkflowClient(apiClient); - } - - @Test - @DisplayName("Check sync workflow is execute within 11 seconds") - public void testSyncWorkflowExecution() throws ExecutionException, InterruptedException, TimeoutException { - - String workflowName = "load_test_perf_sync_workflow"; - - StartWorkflowRequest startWorkflowRequest = new StartWorkflowRequest(); - startWorkflowRequest.setName(workflowName); - startWorkflowRequest.setVersion(1); - - CompletableFuture completableFuture = workflowClient.executeWorkflow(startWorkflowRequest, null); - try { - long start = System.currentTimeMillis(); - WorkflowRun workflowRun = completableFuture.get(11, TimeUnit.SECONDS); - long end = System.currentTimeMillis(); - System.out.println("WorkflowId " + workflowRun.getWorkflowId()); - long timeTaken = end-start; - assertTrue(timeTaken < threshold, "Time taken was " + timeTaken); - } catch (Exception e) { - throw new RuntimeException("Workflow " + workflowName + " did not complete in 5 seconds"); - } - } - - @Test - @DisplayName("Check sync workflow end with simple task.") - public void testSyncWorkflowExecution2() throws ExecutionException, InterruptedException, TimeoutException { - - String workflowName = "sync_workflow_end_with_simple_task"; - - StartWorkflowRequest startWorkflowRequest = new StartWorkflowRequest(); - startWorkflowRequest.setName(workflowName); - startWorkflowRequest.setVersion(1); - - CompletableFuture completableFuture = workflowClient.executeWorkflow(startWorkflowRequest, "simple_task_rka0w_ref"); - long start = System.currentTimeMillis(); - WorkflowRun workflowRun = completableFuture.get(11, TimeUnit.SECONDS); - long end = System.currentTimeMillis(); - long timeTaken = end-start; - System.out.println("WorkflowId " + workflowRun.getWorkflowId()); - assertTrue(timeTaken < threshold, "Time taken was " + timeTaken); - assertEquals(Workflow.WorkflowStatus.RUNNING, workflowRun.getStatus()); - workflowClient.terminateWorkflow(workflowRun.getWorkflowId(), "Terminated"); - } - - @Test - @DisplayName("Check sync workflow end with set variable task.") - public void testSyncWorkflowExecution3() throws ExecutionException, InterruptedException, TimeoutException { - - String workflowName = "sync_workflow_end_with_set_variable_task"; - - StartWorkflowRequest startWorkflowRequest = new StartWorkflowRequest(); - startWorkflowRequest.setName(workflowName); - startWorkflowRequest.setVersion(1); - - CompletableFuture completableFuture = workflowClient.executeWorkflow(startWorkflowRequest, "set_variable_task_1fi09_ref"); - long start = System.currentTimeMillis(); - WorkflowRun workflowRun = completableFuture.get(11, TimeUnit.SECONDS); - long end = System.currentTimeMillis(); - System.out.println("WorkflowId " + workflowRun.getWorkflowId()); - long timeTaken = end - start; - assertTrue(timeTaken < threshold, "Time taken was " + timeTaken); - } - - @Test - @DisplayName("Check sync workflow end with jq task.") - public void testSyncWorkflowExecution4() throws ExecutionException, InterruptedException, TimeoutException { - - String workflowName = "sync_workflow_end_with_jq_task"; - - StartWorkflowRequest startWorkflowRequest = new StartWorkflowRequest(); - startWorkflowRequest.setName(workflowName); - startWorkflowRequest.setVersion(1); - - CompletableFuture completableFuture = workflowClient.executeWorkflow(startWorkflowRequest, "json_transform_task_jjowa_ref"); - long start = System.currentTimeMillis(); - WorkflowRun workflowRun = completableFuture.get(11, TimeUnit.SECONDS); - long end = System.currentTimeMillis(); - long timeTaken = end - start; - System.out.println("WorkflowId " + workflowRun.getWorkflowId()); - assertTrue(timeTaken < threshold, "Time taken was " + timeTaken); - } - - @Test - @DisplayName("Check sync workflow end with sub workflow task.") - public void testSyncWorkflowExecution5() throws ExecutionException, InterruptedException, TimeoutException { - - String workflowName = "sync_workflow_end_with_subworkflow_task"; - - StartWorkflowRequest startWorkflowRequest = new StartWorkflowRequest(); - startWorkflowRequest.setName(workflowName); - startWorkflowRequest.setVersion(1); - - CompletableFuture completableFuture = workflowClient.executeWorkflow(startWorkflowRequest, "http_sync"); - long start = System.currentTimeMillis(); - WorkflowRun workflowRun = completableFuture.get(11, TimeUnit.SECONDS); - long end = System.currentTimeMillis(); - long timeTaken = end-start; - System.out.println("WorkflowId " + workflowRun.getWorkflowId()); - assertTrue(timeTaken < threshold, "Time taken was " + timeTaken); - } - - @Test - @DisplayName("Check sync workflow end with failed case") - public void testSyncWorkflowExecution6() throws ExecutionException, InterruptedException, TimeoutException { - - String workflowName = "sync_workflow_failed_case"; - - StartWorkflowRequest startWorkflowRequest = new StartWorkflowRequest(); - startWorkflowRequest.setName(workflowName); - startWorkflowRequest.setVersion(1); - - CompletableFuture completableFuture = workflowClient.executeWorkflow(startWorkflowRequest, "http_fail"); - long start = System.currentTimeMillis(); - WorkflowRun workflowRun = completableFuture.get(11, TimeUnit.SECONDS); - long end = System.currentTimeMillis(); - long timeTaken = end-start; - assertTrue(timeTaken < threshold, "Time taken was " + timeTaken); - System.out.println("WorkflowId " + workflowRun.getWorkflowId()); - assertEquals(Workflow.WorkflowStatus.RUNNING, workflowRun.getStatus()); - workflowClient.terminateWorkflow(workflowRun.getWorkflowId(), "Terminated"); - } - - @Test - @DisplayName("Check sync workflow end with no poller") - public void testSyncWorkflowExecution7() throws ExecutionException, InterruptedException, TimeoutException { - - String workflowName = "sync_workflow_no_poller"; - - StartWorkflowRequest startWorkflowRequest = new StartWorkflowRequest(); - startWorkflowRequest.setName(workflowName); - startWorkflowRequest.setVersion(1); - - CompletableFuture completableFuture = workflowClient.executeWorkflow(startWorkflowRequest, "simple_task_pia0h_ref"); - long start = System.currentTimeMillis(); - WorkflowRun workflowRun = completableFuture.get(11, TimeUnit.SECONDS); - long end = System.currentTimeMillis(); - long timeTaken = end-start; - assertTrue(timeTaken < threshold, "Time taken was " + timeTaken); - System.out.println("WorkflowId " + workflowRun.getWorkflowId()); - assertEquals(Workflow.WorkflowStatus.RUNNING, workflowRun.getStatus()); - workflowClient.terminateWorkflow(workflowRun.getWorkflowId(), "Terminated"); - } - - @Test - @DisplayName("Check sync workflow end with set variable task. when wait until task is specified as null") - public void testSyncWorkflowExecution8() throws ExecutionException, InterruptedException, TimeoutException { - - String workflowName = "sync_workflow_end_with_set_variable_task"; - - StartWorkflowRequest startWorkflowRequest = new StartWorkflowRequest(); - startWorkflowRequest.setName(workflowName); - startWorkflowRequest.setVersion(1); - - CompletableFuture completableFuture = workflowClient.executeWorkflow(startWorkflowRequest, null); - long start = System.currentTimeMillis(); - WorkflowRun workflowRun = completableFuture.get(11, TimeUnit.SECONDS); - long end = System.currentTimeMillis(); - System.out.println("WorkflowId " + workflowRun.getWorkflowId()); - long timeTaken = end - start; - assertTrue(timeTaken < threshold, "Time taken was " + timeTaken); - } - - @Test - @DisplayName("Check sync workflow end with set variable task. when wrong wait until task is specified") - public void testSyncWorkflowExecution9() throws ExecutionException, InterruptedException, TimeoutException { - - String workflowName = "sync_workflow_end_with_set_variable_task"; - - StartWorkflowRequest startWorkflowRequest = new StartWorkflowRequest(); - startWorkflowRequest.setName(workflowName); - startWorkflowRequest.setVersion(1); - - CompletableFuture completableFuture = workflowClient.executeWorkflow(startWorkflowRequest, "no_such_Task_exist"); - long start = System.currentTimeMillis(); - WorkflowRun workflowRun = completableFuture.get(11, TimeUnit.SECONDS); - long end = System.currentTimeMillis(); - System.out.println("WorkflowId " + workflowRun.getWorkflowId()); - long timeTaken = end - start; - assertTrue(timeTaken < threshold, "Time taken was " + timeTaken); - } - - @Test - @DisplayName("Check sync workflow end with set variable task. The wait for duration is given.") - public void testSyncWorkflowExecution10() throws ExecutionException, InterruptedException, TimeoutException { - - String workflowName = "sync_workflow_end_with_set_variable_task"; - - StartWorkflowRequest startWorkflowRequest = new StartWorkflowRequest(); - startWorkflowRequest.setName(workflowName); - startWorkflowRequest.setVersion(1); - - CompletableFuture completableFuture = workflowClient.executeWorkflow(startWorkflowRequest, null, 1); - long start = System.currentTimeMillis(); - WorkflowRun workflowRun = completableFuture.get(1, TimeUnit.SECONDS); - long end = System.currentTimeMillis(); - System.out.println("WorkflowId " + workflowRun.getWorkflowId()); - long timeTaken = end - start; - assertTrue(timeTaken < threshold, "Time taken was " + timeTaken); - } - - @Test - @DisplayName("Check sync workflow with simple task. The wait for duration is given.") - public void testSyncWorkflowExecution11() throws ExecutionException, InterruptedException, TimeoutException { - - String workflowName = "sync_workflow_end_with_simple_task"; - - StartWorkflowRequest startWorkflowRequest = new StartWorkflowRequest(); - startWorkflowRequest.setName(workflowName); - startWorkflowRequest.setVersion(1); - - CompletableFuture completableFuture = workflowClient.executeWorkflow(startWorkflowRequest, null, 5); - long start = System.currentTimeMillis(); - WorkflowRun workflowRun = completableFuture.get(6, TimeUnit.SECONDS); - long end = System.currentTimeMillis(); - long timeTaken = end - start; - assertTrue(timeTaken < 6000, "Time taken was " + timeTaken); - assertEquals(Workflow.WorkflowStatus.RUNNING, workflowRun.getStatus()); - workflowClient.terminateWorkflow(workflowRun.getWorkflowId(), "Terminated"); - } - - @Test - @DisplayName("Check sync workflow with simple task. The wait for duration is not given.") - public void testSyncWorkflowExecution12() throws ExecutionException, InterruptedException, TimeoutException { - - String workflowName = "sync_workflow_end_with_simple_task"; - - StartWorkflowRequest startWorkflowRequest = new StartWorkflowRequest(); - startWorkflowRequest.setName(workflowName); - startWorkflowRequest.setVersion(1); - - CompletableFuture completableFuture = workflowClient.executeWorkflow(startWorkflowRequest, null, (Integer)null); - long start = System.currentTimeMillis(); - WorkflowRun workflowRun = completableFuture.get(11, TimeUnit.SECONDS); - long end = System.currentTimeMillis(); - long timeTaken = end-start; - System.out.println("WorkflowId " + workflowRun.getWorkflowId()); - assertTrue(timeTaken < 11000, "Time taken was " + timeTaken); - assertEquals(Workflow.WorkflowStatus.RUNNING, workflowRun.getStatus()); - workflowClient.terminateWorkflow(workflowRun.getWorkflowId(), "Terminated"); - } -} diff --git a/src/test/java/io/orkes/conductor/client/e2e/TaskRateLimitTests.java b/src/test/java/io/orkes/conductor/client/e2e/TaskRateLimitTests.java deleted file mode 100644 index fc2e8613..00000000 --- a/src/test/java/io/orkes/conductor/client/e2e/TaskRateLimitTests.java +++ /dev/null @@ -1,274 +0,0 @@ -/* - * Copyright 2022 Orkes, Inc. - *

- * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

- * http://www.apache.org/licenses/LICENSE-2.0 - *

- * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package io.orkes.conductor.client.e2e; - -import java.util.Arrays; -import java.util.Map; -import java.util.concurrent.TimeUnit; -import java.util.concurrent.atomic.AtomicReference; - -import org.apache.commons.lang3.RandomStringUtils; -import org.junit.jupiter.api.Assertions; -import org.junit.jupiter.api.DisplayName; -import org.junit.jupiter.api.Test; - -import com.netflix.conductor.common.metadata.tasks.Task; -import com.netflix.conductor.common.metadata.tasks.TaskDef; -import com.netflix.conductor.common.metadata.tasks.TaskResult; -import com.netflix.conductor.common.metadata.tasks.TaskType; -import com.netflix.conductor.common.metadata.workflow.StartWorkflowRequest; -import com.netflix.conductor.common.metadata.workflow.WorkflowDef; -import com.netflix.conductor.common.metadata.workflow.WorkflowTask; -import com.netflix.conductor.common.run.Workflow; - -import io.orkes.conductor.client.ApiClient; -import io.orkes.conductor.client.MetadataClient; -import io.orkes.conductor.client.TaskClient; -import io.orkes.conductor.client.WorkflowClient; -import io.orkes.conductor.client.http.OrkesMetadataClient; -import io.orkes.conductor.client.http.OrkesTaskClient; -import io.orkes.conductor.client.http.OrkesWorkflowClient; -import io.orkes.conductor.client.model.TagObject; -import io.orkes.conductor.sdk.examples.ApiUtil; - -import com.google.common.util.concurrent.Uninterruptibles; - -import static org.junit.jupiter.api.Assertions.*; -import static org.testcontainers.shaded.org.awaitility.Awaitility.await; - -public class TaskRateLimitTests { - - @Test - @DisplayName("Check workflow with simple rate limit by name") - public void testRateLimitByPerFrequency() throws InterruptedException { - ApiClient apiClient = ApiUtil.getApiClientWithCredentials(); - WorkflowClient workflowClient = new OrkesWorkflowClient(apiClient); - MetadataClient metadataClient = new OrkesMetadataClient(apiClient); - TaskClient taskClient = new OrkesTaskClient(apiClient); - String workflowName = RandomStringUtils.randomAlphanumeric(5).toUpperCase(); - String taskName = RandomStringUtils.randomAlphanumeric(5).toUpperCase(); - // Register workflow - registerWorkflowDef(workflowName, taskName, metadataClient, false); - - StartWorkflowRequest startWorkflowRequest = new StartWorkflowRequest(); - startWorkflowRequest.setName(workflowName); - //Start two workflows. Only first workflow task should be in_progress - String workflowId1 = workflowClient.startWorkflow(startWorkflowRequest); - String workflowId2 = workflowClient.startWorkflow(startWorkflowRequest); - - Workflow workflow1 = workflowClient.getWorkflow(workflowId1, true); - Workflow workflow2 = workflowClient.getWorkflow(workflowId1, true); - - // Assertions - Assertions.assertEquals(workflow1.getStatus(), Workflow.WorkflowStatus.RUNNING); - Assertions.assertEquals(workflow1.getTasks().size(), 1); - Assertions.assertEquals(workflow2.getStatus(), Workflow.WorkflowStatus.RUNNING); - Assertions.assertEquals(workflow2.getTasks().size(), 1); - - Task task1 = taskClient.pollTask(taskName, "test", null); - Task task2 = taskClient.pollTask(taskName, "test", null); - - // Task2 should be null. - Task finalTask = task2; - await().atMost(1, TimeUnit.SECONDS).untilAsserted(() -> { - assertNull(finalTask); - assertNotNull(task1); - }); - - TaskResult taskResult = new TaskResult(); - taskResult.setTaskId(task1.getTaskId()); - taskResult.setStatus(TaskResult.Status.COMPLETED); - taskResult.setWorkflowInstanceId(task1.getWorkflowInstanceId()); - taskClient.updateTask(taskResult); - - // Task2 should not be pollable still. It should be available only after 10 seconds. - task2 = taskClient.pollTask(taskName, "test", null); - assertNull(task2); - - Uninterruptibles.sleepUninterruptibly(13, TimeUnit.SECONDS); - // Task2 should be available to poll - task2 = taskClient.pollTask(taskName, "test", null); - assertNotNull(task2); - taskResult = new TaskResult(); - taskResult.setTaskId(task2.getTaskId()); - taskResult.setStatus(TaskResult.Status.COMPLETED); - taskResult.setWorkflowInstanceId(task2.getWorkflowInstanceId()); - taskClient.updateTask(taskResult); - - await().atMost(1, TimeUnit.SECONDS).untilAsserted(() -> { - // Assert both workflows completed - assertEquals(workflowClient.getWorkflow(workflowId1, false).getStatus(), Workflow.WorkflowStatus.COMPLETED); - assertEquals(workflowClient.getWorkflow(workflowId2, false).getStatus(), Workflow.WorkflowStatus.COMPLETED); - }); - metadataClient.unregisterWorkflowDef(workflowName, 1); - metadataClient.unregisterTaskDef(taskName); - } - - @Test - @DisplayName("Check workflow with simple rate limit by name") - public void testConcurrentExeclimit() { - ApiClient apiClient = ApiUtil.getApiClientWithCredentials(); - WorkflowClient workflowClient = new OrkesWorkflowClient(apiClient); - MetadataClient metadataClient = new OrkesMetadataClient(apiClient); - TaskClient taskClient = new OrkesTaskClient(apiClient); - String workflowName = RandomStringUtils.randomAlphanumeric(5).toUpperCase(); - String taskName = RandomStringUtils.randomAlphanumeric(5).toUpperCase(); - // Register workflow - registerWorkflowDef(workflowName, taskName, metadataClient, true); - - StartWorkflowRequest startWorkflowRequest = new StartWorkflowRequest(); - startWorkflowRequest.setName(workflowName); - //Start two workflows. Only first workflow task should be in_progress - String workflowId1 = workflowClient.startWorkflow(startWorkflowRequest); - String workflowId2 = workflowClient.startWorkflow(startWorkflowRequest); - - Workflow workflow1 = workflowClient.getWorkflow(workflowId1, true); - Workflow workflow2 = workflowClient.getWorkflow(workflowId1, true); - - // Assertions - Assertions.assertEquals(workflow1.getStatus(), Workflow.WorkflowStatus.RUNNING); - Assertions.assertEquals(workflow1.getTasks().size(), 1); - Assertions.assertEquals(workflow2.getStatus(), Workflow.WorkflowStatus.RUNNING); - Assertions.assertEquals(workflow2.getTasks().size(), 1); - - Task task1 = taskClient.pollTask(taskName, "test", null); - Task task2 = taskClient.pollTask(taskName, "test", null); - - // Task2 should be null. - Task finalTask = task2; - await().atMost(1, TimeUnit.SECONDS).untilAsserted(() -> { - assertNull(finalTask); - assertNotNull(task1); - }); - - TaskResult taskResult = new TaskResult(); - taskResult.setTaskId(task1.getTaskId()); - taskResult.setStatus(TaskResult.Status.COMPLETED); - taskResult.setWorkflowInstanceId(task1.getWorkflowInstanceId()); - taskClient.updateTask(taskResult); - Uninterruptibles.sleepUninterruptibly(60, TimeUnit.SECONDS); - - // Task2 should not be pollable still. It should be available only after 10 seconds. - await().atMost(10, TimeUnit.SECONDS).untilAsserted(() -> { - Task task3 = taskClient.pollTask(taskName, "test", null); - assertNotNull(task3); - TaskResult taskResult1 = new TaskResult(); - taskResult1.setTaskId(task3.getTaskId()); - taskResult1.setStatus(TaskResult.Status.COMPLETED); - taskResult1.setWorkflowInstanceId(task3.getWorkflowInstanceId()); - taskClient.updateTask(taskResult1); - }); - - await().atMost(1, TimeUnit.SECONDS).untilAsserted(() -> { - // Assert both workflows completed - assertEquals(workflowClient.getWorkflow(workflowId1, false).getStatus(), Workflow.WorkflowStatus.COMPLETED); - assertEquals(workflowClient.getWorkflow(workflowId2, false).getStatus(), Workflow.WorkflowStatus.COMPLETED); - }); - metadataClient.unregisterWorkflowDef(workflowName, 1); - metadataClient.unregisterTaskDef(taskName); - } - - @Test - @DisplayName("Check workflow with simple rate limit by correlationId") - public void testRateLimitByWorkflowCorrelationId() { - ApiClient apiClient = ApiUtil.getApiClientWithCredentials(); - WorkflowClient workflowClient = new OrkesWorkflowClient(apiClient); - MetadataClient metadataClient = new OrkesMetadataClient(apiClient); - TaskClient taskClient = new OrkesTaskClient(apiClient); - String workflowName = RandomStringUtils.randomAlphanumeric(5).toUpperCase(); - String taskName = RandomStringUtils.randomAlphanumeric(5).toUpperCase(); - // Register workflow - registerWorkflowDef(workflowName, taskName, metadataClient, false); - TagObject tagObject = new TagObject(); - tagObject.setType(TagObject.TypeEnum.RATE_LIMIT); - tagObject.setKey("${workflow.correlationId}"); - tagObject.setValue(3); // Only 3 invocations are allowed for same correlationId - metadataClient.addWorkflowTag(tagObject, workflowName); - - StartWorkflowRequest startWorkflowRequest = new StartWorkflowRequest(); - startWorkflowRequest.setCorrelationId("rate_limited"); - startWorkflowRequest.setName(workflowName); - String workflowId1 = workflowClient.startWorkflow(startWorkflowRequest); - String workflowId2 = workflowClient.startWorkflow(startWorkflowRequest); - String workflowId3 = workflowClient.startWorkflow(startWorkflowRequest); - String workflowId4 = workflowClient.startWorkflow(startWorkflowRequest); - // Trigger workflow5 without correlationId. It should not get rate limited. - startWorkflowRequest.setCorrelationId(""); - String workflowId5 = workflowClient.startWorkflow(startWorkflowRequest); - - Workflow workflow1 = workflowClient.getWorkflow(workflowId1, true); - Workflow workflow2 = workflowClient.getWorkflow(workflowId2, true); - Workflow workflow3 = workflowClient.getWorkflow(workflowId3, true); - AtomicReference workflow4 = new AtomicReference<>(workflowClient.getWorkflow(workflowId4, true)); - AtomicReference workflow5 = new AtomicReference<>(workflowClient.getWorkflow(workflowId5, true)); - - // Assertions - Assertions.assertEquals(workflow1.getStatus(), Workflow.WorkflowStatus.RUNNING); - Assertions.assertEquals(workflow2.getStatus(), Workflow.WorkflowStatus.RUNNING); - Assertions.assertEquals(workflow3.getStatus(), Workflow.WorkflowStatus.RUNNING); - Assertions.assertEquals(workflow4.get().getStatus(), Workflow.WorkflowStatus.RUNNING); - Assertions.assertEquals(workflow5.get().getStatus(), Workflow.WorkflowStatus.RUNNING); - // Workflow4 and workflow5 tasks should not get scheduled. - Assertions.assertEquals(workflow1.getTasks().size(), 1); - Assertions.assertEquals(workflow2.getTasks().size(), 1); - Assertions.assertEquals(workflow3.getTasks().size(), 1); - Assertions.assertEquals(workflow4.get().getTasks().size(), 0); - Assertions.assertEquals(workflow5.get().getTasks().size(), 1); - - TaskResult taskResult = new TaskResult(); - taskResult.setWorkflowInstanceId(workflowId1); - taskResult.setTaskId(workflow1.getTasks().get(0).getTaskId()); - taskResult.setStatus(TaskResult.Status.COMPLETED); - taskClient.updateTask(taskResult); - - // Now workflow4 task get scheduled. Workflow5 tasks should not get scheduled. - // Wait for 1 second to let sweeper run - await().atMost(1, TimeUnit.SECONDS).untilAsserted(() -> { - workflow4.set(workflowClient.getWorkflow(workflowId4, true)); - assertEquals(workflow4.get().getTasks().size(), 1); - }); - metadataClient.unregisterWorkflowDef(workflowName, 1); - metadataClient.unregisterTaskDef(taskName); - } - - private static void registerWorkflowDef(String workflowName, String taskName, MetadataClient metadataClient, boolean isExecLimit) { - TaskDef taskDef = new TaskDef(taskName); - taskDef.setOwnerEmail("test@orkes.io"); - taskDef.setRetryCount(0); - if (isExecLimit) { - taskDef.setConcurrentExecLimit(1); - } else { - taskDef.setRateLimitPerFrequency(1); - taskDef.setRateLimitFrequencyInSeconds(10); - } - - WorkflowTask simpleTask = new WorkflowTask(); - simpleTask.setTaskReferenceName(taskName); - simpleTask.setName(taskName); - simpleTask.setTaskDefinition(taskDef); - simpleTask.setWorkflowTaskType(TaskType.SIMPLE); - simpleTask.setInputParameters(Map.of("value", "${workflow.input.value}", "order", "123")); - - - WorkflowDef workflowDef = new WorkflowDef(); - workflowDef.setName(workflowName); - workflowDef.setTimeoutSeconds(600); - workflowDef.setTimeoutPolicy(WorkflowDef.TimeoutPolicy.TIME_OUT_WF); - workflowDef.setOwnerEmail("test@orkes.io"); - workflowDef.setInputParameters(Arrays.asList("value", "inlineValue")); - workflowDef.setDescription("Workflow to monitor order state"); - workflowDef.setTasks(Arrays.asList(simpleTask)); - metadataClient.registerWorkflowDef(workflowDef); - metadataClient.registerTaskDefs(Arrays.asList(taskDef)); - } -} diff --git a/src/test/java/io/orkes/conductor/client/e2e/WaitTaskTest.java b/src/test/java/io/orkes/conductor/client/e2e/WaitTaskTest.java deleted file mode 100644 index e76e38b1..00000000 --- a/src/test/java/io/orkes/conductor/client/e2e/WaitTaskTest.java +++ /dev/null @@ -1,76 +0,0 @@ -/* - * Copyright 2023 Orkes, Inc. - *

- * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

- * http://www.apache.org/licenses/LICENSE-2.0 - *

- * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package io.orkes.conductor.client.e2e; - -import java.time.Duration; -import java.util.HashMap; -import java.util.Map; -import java.util.concurrent.CompletableFuture; -import java.util.concurrent.ExecutionException; -import java.util.concurrent.TimeUnit; -import java.util.concurrent.TimeoutException; - -import org.junit.After; -import org.junit.jupiter.api.Test; - -import com.netflix.conductor.client.http.MetadataClient; -import com.netflix.conductor.client.http.TaskClient; -import com.netflix.conductor.client.http.WorkflowClient; -import com.netflix.conductor.common.run.Workflow; -import com.netflix.conductor.sdk.workflow.def.ConductorWorkflow; -import com.netflix.conductor.sdk.workflow.def.tasks.Wait; -import com.netflix.conductor.sdk.workflow.executor.WorkflowExecutor; - -import io.orkes.conductor.client.ApiClient; -import io.orkes.conductor.client.OrkesClients; -import io.orkes.conductor.client.util.ApiUtil; - -import static java.time.temporal.ChronoUnit.SECONDS; -import static org.junit.jupiter.api.Assertions.*; - -public class WaitTaskTest { - - private WorkflowExecutor executor; - - @Test - public void testWaitTimeout() throws ExecutionException, InterruptedException, TimeoutException { - ApiClient apiClient = ApiUtil.getApiClientWithCredentials(); - TaskClient taskClient = new OrkesClients(apiClient).getTaskClient(); - WorkflowClient workflowClient = new OrkesClients(apiClient).getWorkflowClient(); - MetadataClient metadataClient = new OrkesClients(apiClient).getMetadataClient(); - executor = new WorkflowExecutor(taskClient, workflowClient, metadataClient, 1000); - - ConductorWorkflow> workflow = new ConductorWorkflow<>(executor); - workflow.setName("wait_task_test"); - workflow.setVersion(1); - workflow.setVariables(new HashMap<>()); - workflow.add(new Wait("wait_for_2_second", Duration.of(2, SECONDS))); - CompletableFuture future = workflow.executeDynamic(new HashMap<>()); - assertNotNull(future); - Workflow run = future.get(60, TimeUnit.SECONDS); - assertNotNull(run); - assertEquals(Workflow.WorkflowStatus.COMPLETED, run.getStatus()); - assertEquals(1, run.getTasks().size()); - long timeToExecute = run.getTasks().get(0).getEndTime() - run.getTasks().get(0).getScheduledTime(); - - //Ensure the wait completes within 500ms buffer - assertTrue(timeToExecute < 2500, "Wait task did not complete in time, took " + timeToExecute + " millis"); - } - - @After - public void cleanup() { - if(executor != null) { - executor.shutdown(); - } - } -} diff --git a/src/test/java/io/orkes/conductor/client/e2e/WebhookConfig.java b/src/test/java/io/orkes/conductor/client/e2e/WebhookConfig.java deleted file mode 100644 index 8f73c5bf..00000000 --- a/src/test/java/io/orkes/conductor/client/e2e/WebhookConfig.java +++ /dev/null @@ -1,46 +0,0 @@ -/* - * Copyright 2020 Orkes, Inc. - *

- * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

- * http://www.apache.org/licenses/LICENSE-2.0 - *

- * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package io.orkes.conductor.client.e2e; - -import java.util.Map; - -import lombok.AllArgsConstructor; -import lombok.Data; -import lombok.RequiredArgsConstructor; - -@Data -@RequiredArgsConstructor -@AllArgsConstructor -public class WebhookConfig { - - private String id; - - private String name; - - private Map receiverWorkflowNamesToVersions; - - private Map workflowsToStart; - - private Map headers; - - private Verifier verifier; - - private String sourcePlatform; - - public enum Verifier { - SLACK_BASED, - SIGNATURE_BASED, - HEADER_BASED, - TWITTER - } -} diff --git a/src/test/java/io/orkes/conductor/client/e2e/WebhookTests.java b/src/test/java/io/orkes/conductor/client/e2e/WebhookTests.java deleted file mode 100644 index 71f051d4..00000000 --- a/src/test/java/io/orkes/conductor/client/e2e/WebhookTests.java +++ /dev/null @@ -1,245 +0,0 @@ -/* - * Copyright 2023 Orkes, Inc. - *

- * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

- * http://www.apache.org/licenses/LICENSE-2.0 - *

- * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package io.orkes.conductor.client.e2e; - -import java.util.*; -import java.util.concurrent.TimeUnit; -import java.util.stream.Collectors; - -import org.junit.jupiter.api.AfterAll; -import org.junit.jupiter.api.BeforeAll; -import org.junit.jupiter.api.Test; - -import com.netflix.conductor.common.config.ObjectMapperProvider; -import com.netflix.conductor.common.metadata.tasks.Task; -import com.netflix.conductor.common.metadata.workflow.WorkflowDef; -import com.netflix.conductor.common.metadata.workflow.WorkflowTask; -import com.netflix.conductor.common.run.SearchResult; -import com.netflix.conductor.common.run.Workflow; -import com.netflix.conductor.common.run.WorkflowSummary; - -import io.orkes.conductor.client.ApiClient; -import io.orkes.conductor.client.MetadataClient; -import io.orkes.conductor.client.OrkesClients; -import io.orkes.conductor.client.WorkflowClient; -import io.orkes.conductor.client.http.OrkesWorkflowClient; -import io.orkes.conductor.sdk.examples.ApiUtil; - -import com.fasterxml.jackson.databind.ObjectMapper; -import com.squareup.okhttp.*; -import lombok.SneakyThrows; -import lombok.extern.slf4j.Slf4j; - -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNotNull; -import static org.testcontainers.shaded.org.awaitility.Awaitility.await; - -@Slf4j -public class WebhookTests { - - private static ObjectMapper om = new ObjectMapperProvider().getObjectMapper(); - - private static String startWorkflowWebhookId; - - private static String receiveWebhookId; - - private static String webhookUrl; - - private static String receiveWebhookUrl; - - private static String webhookHeaderKey = UUID.randomUUID().toString(); - - private static String webhookHeaderValue = UUID.randomUUID().toString(); - - private static ApiClient client = ApiUtil.getApiClientWithCredentials();; - - private static final String WORKFLOW_NAME = "e2e-webhook-wf"; - - private static final OkHttpClient httpClient = new OkHttpClient(); - - private String correlationId = UUID.randomUUID().toString(); - - @SneakyThrows - @Test - public void testWebHook() { - WorkflowClient workflowClient = new OrkesWorkflowClient(client); - int count = 64; - String[] keys = new String[count]; - for (int i = 0; i < count; i++) { - String key = UUID.randomUUID().toString(); - keys[i] = key; - } - - sendWebhook(keys, webhookUrl); - List workflowIds = new ArrayList<>(); - await().pollInterval(1, TimeUnit.SECONDS).atMost(60, TimeUnit.SECONDS).untilAsserted(() ->{ - SearchResult workflows = workflowClient.search(0, count, "", correlationId, "status = 'RUNNING'"); - assertNotNull(workflows); - assertNotNull(workflows.getResults()); - assertEquals(count, workflows.getResults().size()); - workflowIds.addAll(workflows.getResults().stream().map(result -> result.getWorkflowId()).collect(Collectors.toList())); - log.info("Found {}", workflowIds); - }); - assertNotNull(workflowIds); - assertEquals(count, workflowIds.size()); - - for (int i = 0; i < count; i++) { - String key = keys[i]; - Map input = new HashMap<>(); - input.put("event", Map.of("id", key)); - sendWebhook(input, receiveWebhookUrl); - } - - await().pollInterval(1, TimeUnit.SECONDS).atMost(30, TimeUnit.SECONDS).untilAsserted(() ->{ - for (String wfId : workflowIds) { - Workflow workflow = workflowClient.getWorkflow(wfId, true); - assertNotNull(workflow); - assertEquals(2, workflow.getTasks().size()); - assertEquals(Task.Status.COMPLETED, workflow.getTasks().get(0).getStatus()); - assertEquals(Task.Status.IN_PROGRESS, workflow.getTasks().get(1).getStatus()); - Map event = (Map) workflow.getTasks().get(0).getOutputData().get("event"); - assertEquals(workflow.getInput().get("key"), event.get("id")); - } - }); - - for (int i = 0; i < count; i++) { - String key = keys[i]; - Map input = new HashMap<>(); - input.put("key", 12); - sendWebhook(input, receiveWebhookUrl + "?id=" + key); - } - - await().pollInterval(1, TimeUnit.SECONDS).atMost(30, TimeUnit.SECONDS).untilAsserted(() ->{ - for (String wfId : workflowIds) { - Workflow workflow = workflowClient.getWorkflow(wfId, true); - assertNotNull(workflow); - assertEquals(Workflow.WorkflowStatus.COMPLETED, workflow.getStatus()); - assertEquals(2, workflow.getTasks().size()); - assertEquals(Task.Status.COMPLETED, workflow.getTasks().get(0).getStatus()); - assertEquals(Task.Status.COMPLETED, workflow.getTasks().get(1).getStatus()); - assertEquals(workflow.getInput().get("key"), workflow.getTasks().get(1).getOutputData().get("id")); - } - }); - - } - - private void sendWebhook(String[] keys, String url) { - for (int i = 0; i < keys.length; i++) { - Map input = new HashMap<>(); - input.put("key", keys[i]); - input.put("correlationId", correlationId); - sendWebhook(input, url); - } - } - - @SneakyThrows - private void sendWebhook(Map input, String url) { - String json = om.writeValueAsString(input); - - RequestBody requestBody = RequestBody.create(MediaType.parse("application/json"), json); - Request.Builder builder = new Request.Builder().addHeader(webhookHeaderKey, webhookHeaderValue).url(url); - Request request = builder.post(requestBody).build(); - Response response = httpClient.newCall(request).execute(); - assertEquals(200, response.code()); - } - - @SneakyThrows - @BeforeAll - public static void registerWebhook() { - - MetadataClient metadataClient = new OrkesClients(client).getMetadataClient(); - - - WebhookConfig config = new WebhookConfig(); - config.setName("e2e-webhook-test-start-wf"); - config.setWorkflowsToStart(Map.of(WORKFLOW_NAME, 1)); - config.setHeaders(Map.of(webhookHeaderKey,webhookHeaderValue)); - config.setVerifier(WebhookConfig.Verifier.HEADER_BASED); - config.setSourcePlatform("Custom"); - - WebhookConfig config2 = new WebhookConfig(); - config2.setName("e2e-webhook-test-receive-webhook"); - config2.setReceiverWorkflowNamesToVersions(Map.of(WORKFLOW_NAME, 1)); - config2.setHeaders(Map.of(webhookHeaderKey,webhookHeaderValue)); - config2.setVerifier(WebhookConfig.Verifier.HEADER_BASED); - config2.setSourcePlatform("Custom"); - - - WorkflowDef def = new WorkflowDef(); - def.setName(WORKFLOW_NAME); - def.setVersion(1); - WorkflowTask workflowTask = new WorkflowTask(); - workflowTask.setType("WAIT_FOR_WEBHOOK"); - workflowTask.setName("wait_for_webhook"); - workflowTask.setTaskReferenceName("wait_for_webhook"); - workflowTask.getInputParameters().put("matches", Map.of("$['event']['id']", "${workflow.input.key}")); - - WorkflowTask workflowTask2 = new WorkflowTask(); - workflowTask2.setType("WAIT_FOR_WEBHOOK"); - workflowTask2.setName("wait_for_webhook2"); - workflowTask2.setTaskReferenceName("wait_for_webhook2"); - workflowTask2.getInputParameters().put("matches", Map.of("$['id']", "${workflow.input.key}")); - - def.getTasks().add(workflowTask); - def.getTasks().add(workflowTask2); - - metadataClient.updateWorkflowDefs(List.of(def)); - - startWorkflowWebhookId = registerWebHook(config); - webhookUrl = client.getBasePath().replaceFirst("api","webhook") + "/" + startWorkflowWebhookId; - - receiveWebhookId = registerWebHook(config2); - receiveWebhookUrl = client.getBasePath().replaceFirst("api","webhook") + "/" + receiveWebhookId; - - - log.info("webhookUrl URL {}", webhookUrl); - log.info("receiveWebhookUrl URL {}", receiveWebhookUrl); - - } - - @SneakyThrows - private static String registerWebHook(WebhookConfig config) { - String url = client.getBasePath() + "/metadata/webhook"; - String json = om.writeValueAsString(config); - RequestBody requestBody = RequestBody.create(MediaType.parse("application/json"), json); - Request.Builder builder = new Request.Builder().url(url); - Request request = builder.post(requestBody).addHeader("X-Authorization", client.getToken()).build(); - Response response = httpClient.newCall(request).execute(); - assertEquals(200, response.code()); - byte[] responseBytes = response.body().bytes(); - config = om.readValue(responseBytes, WebhookConfig.class); - return config.getId(); - } - - @SneakyThrows - @AfterAll - public static void cleanUp() { - if(startWorkflowWebhookId != null) { - String url = client.getBasePath() + "/metadata/webhook/" + startWorkflowWebhookId; - OkHttpClient httpClient = new OkHttpClient(); - Request.Builder builder = new Request.Builder().url(url); - Request request = builder.delete().addHeader("X-Authorization", client.getToken()).build(); - httpClient.newCall(request).execute(); - - } - - if(receiveWebhookUrl != null) { - String url = client.getBasePath() + "/metadata/webhook/" + receiveWebhookUrl; - OkHttpClient httpClient = new OkHttpClient(); - Request.Builder builder = new Request.Builder().url(url); - Request request = builder.delete().addHeader("X-Authorization", client.getToken()).build(); - httpClient.newCall(request).execute(); - - } - } -} diff --git a/src/test/java/io/orkes/conductor/client/e2e/WorkerTaskPermissionTests.java b/src/test/java/io/orkes/conductor/client/e2e/WorkerTaskPermissionTests.java deleted file mode 100644 index d0cb0cd3..00000000 --- a/src/test/java/io/orkes/conductor/client/e2e/WorkerTaskPermissionTests.java +++ /dev/null @@ -1,109 +0,0 @@ -/* - * Copyright 2022 Orkes, Inc. - *

- * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

- * http://www.apache.org/licenses/LICENSE-2.0 - *

- * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package io.orkes.conductor.client.e2e; - -import java.util.List; -import java.util.concurrent.TimeUnit; - -import org.apache.commons.lang3.RandomStringUtils; -import org.junit.Assert; -import org.junit.jupiter.api.Assertions; -import org.junit.jupiter.api.Test; - -import io.orkes.conductor.client.*; -import io.orkes.conductor.client.http.*; -import io.orkes.conductor.client.model.*; -import io.orkes.conductor.client.util.ApiUtil; -import io.orkes.conductor.client.util.RegistrationUtil; - -import static io.orkes.conductor.client.util.ApiUtil.USER1_APP_ID; -import static io.orkes.conductor.client.util.ApiUtil.getEnv; -import static org.testcontainers.shaded.org.awaitility.Awaitility.await; - -public class WorkerTaskPermissionTests { - - @Test - public void testWorkerTaskPermissionForUser2() { - ApiClient apiUser1Client = ApiUtil.getUser1Client(); - MetadataClient user1MetadataClient = new OrkesMetadataClient(apiUser1Client); - - // Create user2 client and check access should not be there workflow1 - ApiClient apiUser2Client = ApiUtil.getUser2Client(); - MetadataClient user2MetadataClient = new OrkesMetadataClient(apiUser2Client); - - String taskName = RandomStringUtils.randomAlphanumeric(5).toUpperCase(); - String taskName2 = RandomStringUtils.randomAlphanumeric(5).toUpperCase(); - String workflowName = RandomStringUtils.randomAlphanumeric(5).toUpperCase(); - String tagKey = RandomStringUtils.randomAlphanumeric(5).toUpperCase(); - String tagValue = RandomStringUtils.randomAlphanumeric(5).toUpperCase(); - - TagObject tagObject = new TagObject().type(TagObject.TypeEnum.METADATA).key(tagKey).value(tagValue); - - // Register workflow - RegistrationUtil.registerWorkflowDef(workflowName, taskName, taskName2, user1MetadataClient); - - // Tag workflow and task - user1MetadataClient.addWorkflowTag(tagObject, workflowName); - user1MetadataClient.addTaskTag(tagObject, taskName); - - // User 2 should not have access to workflow or task. - Assert.assertThrows(ApiException.class, () -> user2MetadataClient.getWorkflowDef(workflowName, 1)); - - ApiClient adminClient = ApiUtil.getApiClientWithCredentials(); - AuthorizationClient authorizationClient = new OrkesAuthorizationClient(adminClient); - - String groupName = "worker-test-group"; - try { - authorizationClient.deleteGroup(groupName); - } catch (Exception e) { - // Group does not exist. - } - // Create group and add these two users in the group - Group group = authorizationClient.upsertGroup(getUpsertGroupRequest(), groupName); - authorizationClient.addUserToGroup(groupName, "conductoruser1@gmail.com"); - authorizationClient.addUserToGroup(groupName, "conductoruser2@gmail.com"); - - // Give permissions to tag in the group - AuthorizationRequest authorizationRequest = new AuthorizationRequest(); - authorizationRequest.setSubject(new SubjectRef().id(groupName).type(SubjectRef.TypeEnum.GROUP)); - authorizationRequest.setAccess(List.of(AuthorizationRequest.AccessEnum.READ, AuthorizationRequest.AccessEnum.EXECUTE, - AuthorizationRequest.AccessEnum.UPDATE, - AuthorizationRequest.AccessEnum.DELETE)); - authorizationRequest.setTarget(new TargetRef().id(tagKey + ":" + tagValue ).type(TargetRef.TypeEnum.TAG)); - authorizationClient.grantPermissions(authorizationRequest); - - //Grant permission to execute the task in user2 application. - authorizationRequest.setSubject(new SubjectRef().id(getEnv(USER1_APP_ID)).type(SubjectRef.TypeEnum.USER)); - authorizationClient.grantPermissions(authorizationRequest); - - // user2 should be able to access workflow definition - await().atMost(3, TimeUnit.SECONDS).untilAsserted(() -> { - try { - Assertions.assertNotNull(user2MetadataClient.getWorkflowDef(workflowName, 1)); - }catch(Exception e) { - // Server might take time to affect permission changes. - } - }); - - user1MetadataClient.unregisterWorkflowDef(workflowName, 1); - user1MetadataClient.unregisterTaskDef(taskName); - authorizationClient.deleteGroup(groupName); - authorizationClient.removePermissions(authorizationRequest); - } - - UpsertGroupRequest getUpsertGroupRequest() { - return new UpsertGroupRequest() - .description("Group used for SDK testing") - .roles(List.of(UpsertGroupRequest.RolesEnum.USER)); - } -} diff --git a/src/test/java/io/orkes/conductor/client/e2e/WorkflowRateLimiterTests.java b/src/test/java/io/orkes/conductor/client/e2e/WorkflowRateLimiterTests.java deleted file mode 100644 index 70e794c4..00000000 --- a/src/test/java/io/orkes/conductor/client/e2e/WorkflowRateLimiterTests.java +++ /dev/null @@ -1,213 +0,0 @@ -/* - * Copyright 2022 Orkes, Inc. - *

- * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

- * http://www.apache.org/licenses/LICENSE-2.0 - *

- * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package io.orkes.conductor.client.e2e; - -import java.util.Arrays; -import java.util.Map; -import java.util.concurrent.TimeUnit; -import java.util.concurrent.atomic.AtomicReference; - -import org.apache.commons.lang3.RandomStringUtils; -import org.junit.jupiter.api.Assertions; -import org.junit.jupiter.api.DisplayName; -import org.junit.jupiter.api.Test; - -import com.netflix.conductor.common.metadata.tasks.TaskDef; -import com.netflix.conductor.common.metadata.tasks.TaskResult; -import com.netflix.conductor.common.metadata.tasks.TaskType; -import com.netflix.conductor.common.metadata.workflow.StartWorkflowRequest; -import com.netflix.conductor.common.metadata.workflow.WorkflowDef; -import com.netflix.conductor.common.metadata.workflow.WorkflowTask; -import com.netflix.conductor.common.run.Workflow; - -import io.orkes.conductor.client.ApiClient; -import io.orkes.conductor.client.MetadataClient; -import io.orkes.conductor.client.TaskClient; -import io.orkes.conductor.client.WorkflowClient; -import io.orkes.conductor.client.http.OrkesMetadataClient; -import io.orkes.conductor.client.http.OrkesTaskClient; -import io.orkes.conductor.client.http.OrkesWorkflowClient; -import io.orkes.conductor.client.model.TagObject; -import io.orkes.conductor.sdk.examples.ApiUtil; - -import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.testcontainers.shaded.org.awaitility.Awaitility.await; - -public class WorkflowRateLimiterTests { - - @Test - @DisplayName("Check workflow with simple rate limit by name") - public void testRateLimitByWorkflowName() { - ApiClient apiClient = ApiUtil.getApiClientWithCredentials(); - WorkflowClient workflowClient = new OrkesWorkflowClient(apiClient); - MetadataClient metadataClient = new OrkesMetadataClient(apiClient); - TaskClient taskClient = new OrkesTaskClient(apiClient); - String workflowName = RandomStringUtils.randomAlphanumeric(5).toUpperCase(); - String taskName = RandomStringUtils.randomAlphanumeric(5).toUpperCase(); - // Register workflow - registerWorkflowDef(workflowName, taskName, metadataClient); - TagObject tagObject = new TagObject(); - tagObject.setType(TagObject.TypeEnum.RATE_LIMIT); - tagObject.setKey(workflowName); - tagObject.setValue(3); // Only 3 invocations are allowed. - metadataClient.addWorkflowTag(tagObject, workflowName); - - StartWorkflowRequest startWorkflowRequest = new StartWorkflowRequest(); - startWorkflowRequest.setName(workflowName); - String workflowId1 = workflowClient.startWorkflow(startWorkflowRequest); - String workflowId2 = workflowClient.startWorkflow(startWorkflowRequest); - String workflowId3 = workflowClient.startWorkflow(startWorkflowRequest); - String workflowId4 = workflowClient.startWorkflow(startWorkflowRequest); - String workflowId5 = workflowClient.startWorkflow(startWorkflowRequest); - - Workflow workflow1 = workflowClient.getWorkflow(workflowId1, true); - Workflow workflow2 = workflowClient.getWorkflow(workflowId2, true); - Workflow workflow3 = workflowClient.getWorkflow(workflowId3, true); - AtomicReference workflow4 = new AtomicReference<>(workflowClient.getWorkflow(workflowId4, true)); - AtomicReference workflow5 = new AtomicReference<>(workflowClient.getWorkflow(workflowId5, true)); - - // Assertions - Assertions.assertEquals(workflow1.getStatus(), Workflow.WorkflowStatus.RUNNING); - Assertions.assertEquals(workflow2.getStatus(), Workflow.WorkflowStatus.RUNNING); - Assertions.assertEquals(workflow3.getStatus(), Workflow.WorkflowStatus.RUNNING); - Assertions.assertEquals(workflow4.get().getStatus(), Workflow.WorkflowStatus.RUNNING); - Assertions.assertEquals(workflow5.get().getStatus(), Workflow.WorkflowStatus.RUNNING); - // Workflow4 and workflow5 tasks should not get scheduled. - Assertions.assertEquals(workflow1.getTasks().size(), 1); - Assertions.assertEquals(workflow2.getTasks().size(), 1); - Assertions.assertEquals(workflow3.getTasks().size(), 1); - Assertions.assertEquals(workflow4.get().getTasks().size(), 0); - Assertions.assertEquals(workflow5.get().getTasks().size(), 0); - - // Complete workflow1. - TaskResult taskResult = new TaskResult(); - taskResult.setWorkflowInstanceId(workflowId1); - taskResult.setTaskId(workflow1.getTasks().get(0).getTaskId()); - taskResult.setStatus(TaskResult.Status.COMPLETED); - taskClient.updateTask(taskResult); - - // Now workflow4 task get scheduled. Workflow5 tasks should not get scheduled. - await().atMost(33, TimeUnit.SECONDS).pollInterval(2, TimeUnit.SECONDS).untilAsserted(() -> { - try { - workflow4.set(workflowClient.getWorkflow(workflowId4, true)); - assertEquals(workflow4.get().getTasks().size(), 1); - workflow5.set(workflowClient.getWorkflow(workflowId5, true)); - assertEquals(workflow5.get().getTasks().size(), 0); - }catch(Exception e) {} - }); - - // Complete workflow2 - taskResult.setWorkflowInstanceId(workflowId2); - taskResult.setTaskId(workflow2.getTasks().get(0).getTaskId()); - taskResult.setStatus(TaskResult.Status.COMPLETED); - taskClient.updateTask(taskResult); - - await().atMost(1, TimeUnit.SECONDS).untilAsserted(() -> { - workflow5.set(workflowClient.getWorkflow(workflowId5, true)); - assertEquals(workflow4.get().getTasks().size(), 1); - }); - metadataClient.unregisterWorkflowDef(workflowName, 1); - metadataClient.unregisterTaskDef(taskName); - } - - @Test - @DisplayName("Check workflow with simple rate limit by correlationId") - public void testRateLimitByWorkflowCorrelationId() { - ApiClient apiClient = ApiUtil.getApiClientWithCredentials(); - WorkflowClient workflowClient = new OrkesWorkflowClient(apiClient); - MetadataClient metadataClient = new OrkesMetadataClient(apiClient); - TaskClient taskClient = new OrkesTaskClient(apiClient); - String workflowName = RandomStringUtils.randomAlphanumeric(5).toUpperCase(); - String taskName = RandomStringUtils.randomAlphanumeric(5).toUpperCase(); - // Register workflow - registerWorkflowDef(workflowName, taskName, metadataClient); - TagObject tagObject = new TagObject(); - tagObject.setType(TagObject.TypeEnum.RATE_LIMIT); - tagObject.setKey("${workflow.correlationId}"); - tagObject.setValue(3); // Only 3 invocations are allowed for same correlationId - metadataClient.addWorkflowTag(tagObject, workflowName); - - StartWorkflowRequest startWorkflowRequest = new StartWorkflowRequest(); - startWorkflowRequest.setCorrelationId("rate_limited"); - startWorkflowRequest.setName(workflowName); - String workflowId1 = workflowClient.startWorkflow(startWorkflowRequest); - String workflowId2 = workflowClient.startWorkflow(startWorkflowRequest); - String workflowId3 = workflowClient.startWorkflow(startWorkflowRequest); - String workflowId4 = workflowClient.startWorkflow(startWorkflowRequest); - // Triger workflow5 without correlationId. It should not get rate limited. - startWorkflowRequest.setCorrelationId(""); - String workflowId5 = workflowClient.startWorkflow(startWorkflowRequest); - - Workflow workflow1 = workflowClient.getWorkflow(workflowId1, true); - Workflow workflow2 = workflowClient.getWorkflow(workflowId2, true); - Workflow workflow3 = workflowClient.getWorkflow(workflowId3, true); - AtomicReference workflow4 = new AtomicReference<>(workflowClient.getWorkflow(workflowId4, true)); - AtomicReference workflow5 = new AtomicReference<>(workflowClient.getWorkflow(workflowId5, true)); - - // Assertions - Assertions.assertEquals(workflow1.getStatus(), Workflow.WorkflowStatus.RUNNING); - Assertions.assertEquals(workflow2.getStatus(), Workflow.WorkflowStatus.RUNNING); - Assertions.assertEquals(workflow3.getStatus(), Workflow.WorkflowStatus.RUNNING); - Assertions.assertEquals(workflow4.get().getStatus(), Workflow.WorkflowStatus.RUNNING); - Assertions.assertEquals(workflow5.get().getStatus(), Workflow.WorkflowStatus.RUNNING); - // Workflow4 and workflow5 tasks should not get scheduled. - Assertions.assertEquals(workflow1.getTasks().size(), 1); - Assertions.assertEquals(workflow2.getTasks().size(), 1); - Assertions.assertEquals(workflow3.getTasks().size(), 1); - Assertions.assertEquals(workflow4.get().getTasks().size(), 0); - Assertions.assertEquals(workflow5.get().getTasks().size(), 1); - - // Complete workflow1. - TaskResult taskResult = new TaskResult(); - taskResult.setWorkflowInstanceId(workflowId1); - taskResult.setTaskId(workflow1.getTasks().get(0).getTaskId()); - taskResult.setStatus(TaskResult.Status.COMPLETED); - taskClient.updateTask(taskResult); - - // Now workflow4 task get scheduled. Workflow5 tasks should not get scheduled. - // Wait for 1 second to let sweeper run - await().atMost(41, TimeUnit.SECONDS).pollInterval(1,TimeUnit.SECONDS).untilAsserted(() -> { - try { - workflow4.set(workflowClient.getWorkflow(workflowId4, true)); - assertEquals(workflow4.get().getTasks().size(), 1); - }catch(Exception e){} - }); - metadataClient.unregisterWorkflowDef(workflowName, 1); - metadataClient.unregisterTaskDef(taskName); - } - - private static void registerWorkflowDef(String workflowName, String taskName, MetadataClient metadataClient) { - TaskDef taskDef = new TaskDef(taskName); - taskDef.setOwnerEmail("test@orkes.io"); - taskDef.setRetryCount(0); - - WorkflowTask simpleTask = new WorkflowTask(); - simpleTask.setTaskReferenceName(taskName); - simpleTask.setName(taskName); - simpleTask.setTaskDefinition(taskDef); - simpleTask.setWorkflowTaskType(TaskType.SIMPLE); - simpleTask.setInputParameters(Map.of("value", "${workflow.input.value}", "order", "123")); - - - WorkflowDef workflowDef = new WorkflowDef(); - workflowDef.setName(workflowName); - workflowDef.setOwnerEmail("test@orkes.io"); - workflowDef.setInputParameters(Arrays.asList("value", "inlineValue")); - workflowDef.setDescription("Workflow to monitor order state"); - workflowDef.setTimeoutSeconds(600); - workflowDef.setTimeoutPolicy(WorkflowDef.TimeoutPolicy.TIME_OUT_WF); - workflowDef.setTasks(Arrays.asList(simpleTask)); - metadataClient.registerWorkflowDef(workflowDef); - metadataClient.registerTaskDefs(Arrays.asList(taskDef)); - } -} diff --git a/src/test/java/io/orkes/conductor/client/e2e/WorkflowRerunTests.java b/src/test/java/io/orkes/conductor/client/e2e/WorkflowRerunTests.java deleted file mode 100644 index 9f03a6bd..00000000 --- a/src/test/java/io/orkes/conductor/client/e2e/WorkflowRerunTests.java +++ /dev/null @@ -1,211 +0,0 @@ -/* - * Copyright 2022 Orkes, Inc. - *

- * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

- * http://www.apache.org/licenses/LICENSE-2.0 - *

- * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package io.orkes.conductor.client.e2e; - -import java.util.ArrayList; -import java.util.List; -import java.util.concurrent.TimeUnit; - -import org.apache.commons.lang3.RandomStringUtils; -import org.junit.After; -import org.junit.Before; -import org.junit.jupiter.api.BeforeAll; -import org.junit.jupiter.api.DisplayName; -import org.junit.jupiter.api.Test; - -import com.netflix.conductor.common.metadata.tasks.Task; -import com.netflix.conductor.common.metadata.tasks.TaskResult; -import com.netflix.conductor.common.metadata.workflow.*; -import com.netflix.conductor.common.run.Workflow; - -import io.orkes.conductor.client.ApiClient; -import io.orkes.conductor.client.MetadataClient; -import io.orkes.conductor.client.TaskClient; -import io.orkes.conductor.client.WorkflowClient; -import io.orkes.conductor.client.http.OrkesMetadataClient; -import io.orkes.conductor.client.http.OrkesTaskClient; -import io.orkes.conductor.client.http.OrkesWorkflowClient; -import io.orkes.conductor.client.model.WorkflowStatus; -import io.orkes.conductor.client.util.ApiUtil; - -import static io.orkes.conductor.client.util.RegistrationUtil.registerWorkflowDef; -import static io.orkes.conductor.client.util.RegistrationUtil.registerWorkflowWithSubWorkflowDef; -import static org.junit.jupiter.api.Assertions.*; -import static org.testcontainers.shaded.org.awaitility.Awaitility.await; - -public class WorkflowRerunTests { - - static ApiClient apiClient; - static WorkflowClient workflowClient; - static TaskClient taskClient; - static MetadataClient metadataClient; - - List workflowNames = new ArrayList<>(); - - @BeforeAll - public static void init() { - apiClient = ApiUtil.getApiClientWithCredentials(); - workflowClient = new OrkesWorkflowClient(apiClient); - metadataClient =new OrkesMetadataClient(apiClient); - taskClient = new OrkesTaskClient(apiClient); - } - - @Before - public void initTest() { - workflowNames = new ArrayList<>(); - } - @After - public void cleanUp() { - try { - for (String workflowName : workflowNames) { - metadataClient.unregisterWorkflowDef(workflowName, 1); - } - } catch (Exception e) {} - } - - @Test - @DisplayName("Check workflow with simple task and rerun functionality") - public void testRerunSimpleWorkflow() { - - String workflowName = RandomStringUtils.randomAlphanumeric(5).toUpperCase(); - String taskName1 = RandomStringUtils.randomAlphanumeric(5).toUpperCase(); - String taskName2 = RandomStringUtils.randomAlphanumeric(5).toUpperCase(); - // Register workflow - registerWorkflowDef(workflowName, taskName1, taskName2, metadataClient); - workflowNames.add(workflowName); - - // Trigger two workflows - StartWorkflowRequest startWorkflowRequest = new StartWorkflowRequest(); - startWorkflowRequest.setName(workflowName); - startWorkflowRequest.setVersion(1); - - String workflowId = workflowClient.startWorkflow(startWorkflowRequest); - Workflow workflow = workflowClient.getWorkflow(workflowId, true); - // Fail the simple task - String taskId = workflow.getTasks().get(1).getTaskId(); - TaskResult taskResult = new TaskResult(); - taskResult.setWorkflowInstanceId(workflowId); - taskResult.setTaskId(taskId); - taskResult.setStatus(TaskResult.Status.FAILED); - taskClient.updateTask(taskResult); - - // Wait for workflow to get failed - await().atMost(1, TimeUnit.SECONDS).untilAsserted(() -> { - Workflow workflow1 = workflowClient.getWorkflow(workflowId, false); - assertEquals(workflow1.getStatus().name(), WorkflowStatus.StatusEnum.FAILED.name()); - }); - - RerunWorkflowRequest rerunWorkflowRequest = new RerunWorkflowRequest(); - rerunWorkflowRequest.setReRunFromWorkflowId(workflowId); - rerunWorkflowRequest.setReRunFromTaskId(taskId); - // Retry the workflow - workflowClient.rerunWorkflow(workflowId, rerunWorkflowRequest); - // Check the workflow status and few other parameters - await().atMost(5, TimeUnit.SECONDS).untilAsserted(() -> { - Workflow workflow1 = workflowClient.getWorkflow(workflowId, true); - assertEquals(workflow1.getStatus().name(), WorkflowStatus.StatusEnum.RUNNING.name()); - assertEquals(workflow1.getTasks().get(1).getStatus().name(), Task.Status.SCHEDULED.name()); - assertTrue(workflow1.getTasks().get(0).isExecuted()); - assertFalse(workflow1.getTasks().get(1).isExecuted()); - }); - - taskResult = new TaskResult(); - taskResult.setWorkflowInstanceId(workflowId); - taskResult.setTaskId(taskId); - taskResult.setStatus(TaskResult.Status.COMPLETED); - taskClient.updateTask(taskResult); - - // Wait for workflow to get completed - await().atMost(5, TimeUnit.SECONDS).untilAsserted(() -> { - Workflow workflow1 = workflowClient.getWorkflow(workflowId, false); - assertEquals(workflow1.getStatus().name(), WorkflowStatus.StatusEnum.COMPLETED.name()); - }); - - try { - metadataClient.unregisterWorkflowDef(workflowName, 1); - metadataClient.unregisterTaskDef(taskName2); - metadataClient.unregisterTaskDef(taskName2); - }catch (Exception e){} - } - - @Test - @DisplayName("Check workflow with sub_workflow task and rerun functionality") - public void testRerunWithSubWorkflow() throws Exception { - - apiClient = ApiUtil.getApiClientWithCredentials(); - workflowClient = new OrkesWorkflowClient(apiClient); - metadataClient = new OrkesMetadataClient(apiClient); - taskClient = new OrkesTaskClient(apiClient); - String workflowName = RandomStringUtils.randomAlphanumeric(5).toUpperCase(); - String taskName = RandomStringUtils.randomAlphanumeric(5).toUpperCase(); - String subWorkflowName = RandomStringUtils.randomAlphanumeric(5).toUpperCase(); - workflowNames.add(workflowName); - workflowNames.add(subWorkflowName); - - // Register workflow - registerWorkflowWithSubWorkflowDef(workflowName, subWorkflowName, taskName, metadataClient); - - // Trigger two workflows - StartWorkflowRequest startWorkflowRequest = new StartWorkflowRequest(); - startWorkflowRequest.setName(workflowName); - startWorkflowRequest.setVersion(1); - - String workflowId = workflowClient.startWorkflow(startWorkflowRequest); - System.out.print("Workflow id is " + workflowId); - Workflow workflow = workflowClient.getWorkflow(workflowId, true); - // Fail the simple task - String subworkflowId = workflow.getTasks().get(0).getSubWorkflowId(); - Workflow subWorkflow = workflowClient.getWorkflow(subworkflowId, true); - String taskId = subWorkflow.getTasks().get(0).getTaskId(); - TaskResult taskResult = new TaskResult(); - taskResult.setWorkflowInstanceId(subworkflowId); - taskResult.setTaskId(taskId); - taskResult.setStatus(TaskResult.Status.FAILED); - taskClient.updateTask(taskResult); - - // Wait for parent workflow to get failed - await().atMost(3, TimeUnit.SECONDS).pollInterval(1, TimeUnit.SECONDS).untilAsserted(() -> { - Workflow workflow1 = workflowClient.getWorkflow(workflowId, false); - assertEquals(workflow1.getStatus().name(), WorkflowStatus.StatusEnum.FAILED.name()); - }); - - // Retry the sub workflow. - RerunWorkflowRequest rerunWorkflowRequest = new RerunWorkflowRequest(); - rerunWorkflowRequest.setReRunFromWorkflowId(subworkflowId); - rerunWorkflowRequest.setReRunFromTaskId(taskId); - workflowClient.rerunWorkflow(subworkflowId, rerunWorkflowRequest); - // Check the workflow status and few other parameters - await().atMost(3, TimeUnit.SECONDS).untilAsserted(() -> { - Workflow workflow1 = workflowClient.getWorkflow(subworkflowId, true); - assertEquals(WorkflowStatus.StatusEnum.RUNNING.name(), workflow1.getStatus().name()); - assertEquals(workflow1.getTasks().get(0).getStatus().name(), Task.Status.SCHEDULED.name()); - }); - taskId = workflowClient.getWorkflow(subworkflowId, true).getTasks().get(0).getTaskId(); - - taskResult = new TaskResult(); - taskResult.setWorkflowInstanceId(subworkflowId); - taskResult.setTaskId(taskId); - taskResult.setStatus(TaskResult.Status.COMPLETED); - taskClient.updateTask(taskResult); - - await().atMost(3, TimeUnit.SECONDS).untilAsserted(() -> { - Workflow workflow1 = workflowClient.getWorkflow(workflowId, false); - assertEquals(workflow1.getStatus().name(), WorkflowStatus.StatusEnum.COMPLETED.name()); - }); - - try { - metadataClient.unregisterWorkflowDef(workflowName, 1); - metadataClient.unregisterTaskDef(taskName); - } catch (Exception e){} - } -} diff --git a/src/test/java/io/orkes/conductor/client/e2e/WorkflowRestartTests.java b/src/test/java/io/orkes/conductor/client/e2e/WorkflowRestartTests.java deleted file mode 100644 index 984d2b81..00000000 --- a/src/test/java/io/orkes/conductor/client/e2e/WorkflowRestartTests.java +++ /dev/null @@ -1,265 +0,0 @@ -/* - * Copyright 2022 Orkes, Inc. - *

- * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

- * http://www.apache.org/licenses/LICENSE-2.0 - *

- * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package io.orkes.conductor.client.e2e; - -import java.util.Arrays; -import java.util.concurrent.TimeUnit; - -import org.apache.commons.lang3.RandomStringUtils; -import org.junit.jupiter.api.BeforeAll; -import org.junit.jupiter.api.DisplayName; -import org.junit.jupiter.api.Test; - -import com.netflix.conductor.common.metadata.tasks.Task; -import com.netflix.conductor.common.metadata.tasks.TaskDef; -import com.netflix.conductor.common.metadata.tasks.TaskResult; -import com.netflix.conductor.common.metadata.tasks.TaskType; -import com.netflix.conductor.common.metadata.workflow.*; -import com.netflix.conductor.common.run.Workflow; - -import io.orkes.conductor.client.ApiClient; -import io.orkes.conductor.client.MetadataClient; -import io.orkes.conductor.client.TaskClient; -import io.orkes.conductor.client.WorkflowClient; -import io.orkes.conductor.client.http.OrkesMetadataClient; -import io.orkes.conductor.client.http.OrkesTaskClient; -import io.orkes.conductor.client.http.OrkesWorkflowClient; -import io.orkes.conductor.client.model.WorkflowStatus; -import io.orkes.conductor.client.util.ApiUtil; - -import static io.orkes.conductor.client.util.RegistrationUtil.registerWorkflowDef; -import static io.orkes.conductor.client.util.RegistrationUtil.registerWorkflowWithSubWorkflowDef; -import static org.junit.jupiter.api.Assertions.*; -import static org.testcontainers.shaded.org.awaitility.Awaitility.await; - -public class WorkflowRestartTests { - - static ApiClient apiClient; - static WorkflowClient workflowClient; - static TaskClient taskClient; - static MetadataClient metadataClient; - - @BeforeAll - public static void init() { - apiClient = ApiUtil.getApiClientWithCredentials(); - workflowClient = new OrkesWorkflowClient(apiClient); - metadataClient =new OrkesMetadataClient(apiClient); - taskClient = new OrkesTaskClient(apiClient); - - } - - @Test - @DisplayName("Check workflow with simple task and restart functionality") - public void testRestartSimpleWorkflow() { - String workflowName = RandomStringUtils.randomAlphanumeric(5).toUpperCase(); - - // Register workflow - registerWorkflowDef(workflowName, "simple", "inline", metadataClient); - - StartWorkflowRequest startWorkflowRequest = new StartWorkflowRequest(); - startWorkflowRequest.setName(workflowName); - startWorkflowRequest.setVersion(1); - - String workflowId = workflowClient.startWorkflow(startWorkflowRequest); - Workflow workflow = workflowClient.getWorkflow(workflowId, true); - // Fail the simple task - String taskId = workflow.getTasks().get(1).getTaskId(); - TaskResult taskResult = new TaskResult(); - taskResult.setWorkflowInstanceId(workflowId); - taskResult.setTaskId(taskId); - taskResult.setStatus(TaskResult.Status.FAILED); - taskClient.updateTask(taskResult); - - // Wait for workflow to get failed - await().atMost(1, TimeUnit.SECONDS).untilAsserted(() -> { - Workflow workflow1 = workflowClient.getWorkflow(workflowId, false); - assertEquals(workflow1.getStatus().name(), WorkflowStatus.StatusEnum.FAILED.name()); - }); - - // Restart the workflow - workflowClient.restart(workflowId, false); - // Check the workflow status and few other parameters - await().atMost(5, TimeUnit.SECONDS).untilAsserted(() -> { - Workflow workflow1 = workflowClient.getWorkflow(workflowId, true); - assertEquals(workflow1.getStatus().name(), WorkflowStatus.StatusEnum.RUNNING.name()); - assertEquals(workflow1.getTasks().get(1).getStatus().name(), Task.Status.SCHEDULED.name()); - assertTrue(workflow1.getTasks().get(0).isExecuted()); - assertFalse(workflow1.getTasks().get(1).isExecuted()); - }); - - taskResult = new TaskResult(); - taskResult.setWorkflowInstanceId(workflowId); - taskResult.setTaskId(workflowClient.getWorkflow(workflowId, true).getTasks().get(1).getTaskId()); - taskResult.setStatus(TaskResult.Status.COMPLETED); - taskClient.updateTask(taskResult); - - // Wait for workflow to get completed - await().atMost(5, TimeUnit.SECONDS).untilAsserted(() -> { - Workflow workflow1 = workflowClient.getWorkflow(workflowId, false); - assertEquals(workflow1.getStatus().name(), WorkflowStatus.StatusEnum.COMPLETED.name()); - }); - - metadataClient.unregisterWorkflowDef(workflowName, 1); - metadataClient.unregisterTaskDef("simple"); - metadataClient.unregisterTaskDef("inline"); - } - - @Test - @DisplayName("Check workflow with simple task and restart functionality by changing workflow definition") - public void testRestartSimpleWorkflowChangeDefinition() { - String workflowName = RandomStringUtils.randomAlphanumeric(5).toUpperCase(); - - // Register workflow - registerWorkflowDef(workflowName,"simple", "inline", metadataClient); - - StartWorkflowRequest startWorkflowRequest = new StartWorkflowRequest(); - startWorkflowRequest.setName(workflowName); - startWorkflowRequest.setVersion(1); - - String workflowId = workflowClient.startWorkflow(startWorkflowRequest); - Workflow workflow = workflowClient.getWorkflow(workflowId, true); - // Fail the simple task - String taskId = workflow.getTasks().get(1).getTaskId(); - TaskResult taskResult = new TaskResult(); - taskResult.setWorkflowInstanceId(workflowId); - taskResult.setTaskId(taskId); - taskResult.setStatus(TaskResult.Status.COMPLETED); - taskClient.updateTask(taskResult); - - // Wait for workflow to get failed - await().atMost(1, TimeUnit.SECONDS).untilAsserted(() -> { - Workflow workflow1 = workflowClient.getWorkflow(workflowId, false); - assertEquals(workflow1.getStatus().name(), WorkflowStatus.StatusEnum.COMPLETED.name()); - }); - - // Change the workflow definition. - WorkflowDef workflowDef = metadataClient.getWorkflowDef(workflowName, 1); - addTasksInWorkflowDef(workflowDef); - - // Restart the workflow - workflowClient.restart(workflowId, true); - // Check the workflow status and few other parameters - await().atMost(5, TimeUnit.SECONDS).untilAsserted(() -> { - Workflow workflow1 = workflowClient.getWorkflow(workflowId, true); - assertEquals(workflow1.getStatus().name(), WorkflowStatus.StatusEnum.RUNNING.name()); - assertEquals(workflow1.getTasks().get(1).getStatus().name(), Task.Status.SCHEDULED.name()); - assertTrue(workflow1.getTasks().get(0).isExecuted()); - assertFalse(workflow1.getTasks().get(1).isExecuted()); - }); - taskResult = new TaskResult(); - taskResult.setWorkflowInstanceId(workflowId); - taskResult.setTaskId(workflowClient.getWorkflow(workflowId, true).getTasks().get(1).getTaskId()); - taskResult.setStatus(TaskResult.Status.COMPLETED); - taskClient.updateTask(taskResult); - - // Workflow will still be running since new simple task has been added. - await().atMost(5, TimeUnit.SECONDS).untilAsserted(() -> { - Workflow workflow1 = workflowClient.getWorkflow(workflowId, false); - assertEquals(workflow1.getStatus().name(), WorkflowStatus.StatusEnum.RUNNING.name()); - }); - - // Complete the newly added simple task. - taskResult = new TaskResult(); - taskResult.setWorkflowInstanceId(workflowId); - taskResult.setTaskId(workflowClient.getWorkflow(workflowId, true).getTasks().get(2).getTaskId()); - taskResult.setStatus(TaskResult.Status.COMPLETED); - taskClient.updateTask(taskResult); - - await().atMost(5, TimeUnit.SECONDS).untilAsserted(() -> { - Workflow workflow1 = workflowClient.getWorkflow(workflowId, false); - assertEquals(workflow1.getStatus().name(), WorkflowStatus.StatusEnum.COMPLETED.name()); - }); - - metadataClient.unregisterWorkflowDef(workflowName, 1); - metadataClient.unregisterTaskDef("simple"); - metadataClient.unregisterTaskDef("inline"); - } - - private void addTasksInWorkflowDef(WorkflowDef workflowDef) { - WorkflowTask workflowTask = new WorkflowTask(); - workflowTask.setTaskReferenceName("added"); - workflowTask.setName("added"); - workflowTask.setType(TaskType.SIMPLE.name()); - - TaskDef task = new TaskDef(); - task.setName("added"); - task.setOwnerEmail("test@orkes.io"); - metadataClient.registerTaskDefs(Arrays.asList(task)); - workflowDef.getTasks().add(workflowTask); - metadataClient.registerWorkflowDef(workflowDef); - } - - @Test - @DisplayName("Check workflow with sub_workflow task and restart functionality") - public void testRestartWithSubWorkflow() { - - apiClient = ApiUtil.getApiClientWithCredentials(); - workflowClient = new OrkesWorkflowClient(apiClient); - metadataClient = new OrkesMetadataClient(apiClient); - taskClient = new OrkesTaskClient(apiClient); - String workflowName = RandomStringUtils.randomAlphanumeric(5).toUpperCase(); - String subWorkflowName = RandomStringUtils.randomAlphanumeric(5).toUpperCase(); - - // Register workflow - registerWorkflowWithSubWorkflowDef(workflowName, subWorkflowName, "simple", metadataClient); - - // Trigger two workflows - StartWorkflowRequest startWorkflowRequest = new StartWorkflowRequest(); - startWorkflowRequest.setName(workflowName); - startWorkflowRequest.setVersion(1); - - String workflowId = workflowClient.startWorkflow(startWorkflowRequest); - System.out.print("Workflow id is " + workflowId); - Workflow workflow = workflowClient.getWorkflow(workflowId, true); - // Fail the simple task - String subworkflowId = workflow.getTasks().get(0).getSubWorkflowId(); - Workflow subWorkflow = workflowClient.getWorkflow(subworkflowId, true); - String taskId = subWorkflow.getTasks().get(0).getTaskId(); - TaskResult taskResult = new TaskResult(); - taskResult.setWorkflowInstanceId(subworkflowId); - taskResult.setTaskId(taskId); - taskResult.setStatus(TaskResult.Status.FAILED); - taskClient.updateTask(taskResult); - - // Wait for parent workflow to get failed - await().atMost(3, TimeUnit.SECONDS).pollInterval(1, TimeUnit.SECONDS).untilAsserted(() -> { - Workflow workflow1 = workflowClient.getWorkflow(workflowId, false); - assertEquals(workflow1.getStatus().name(), WorkflowStatus.StatusEnum.FAILED.name()); - }); - - // Restart the sub workflow. - workflowClient.restart(subworkflowId, false); - // Check the workflow status and few other parameters - await().atMost(3, TimeUnit.SECONDS).untilAsserted(() -> { - Workflow workflow1 = workflowClient.getWorkflow(subworkflowId, true); - assertEquals(WorkflowStatus.StatusEnum.RUNNING.name(), workflow1.getStatus().name()); - assertEquals(workflow1.getTasks().get(0).getStatus().name(), Task.Status.SCHEDULED.name()); - }); - taskId = workflowClient.getWorkflow(subworkflowId, true).getTasks().get(0).getTaskId(); - - taskResult = new TaskResult(); - taskResult.setWorkflowInstanceId(subworkflowId); - taskResult.setTaskId(taskId); - taskResult.setStatus(TaskResult.Status.COMPLETED); - taskClient.updateTask(taskResult); - - await().atMost(3, TimeUnit.SECONDS).untilAsserted(() -> { - Workflow workflow1 = workflowClient.getWorkflow(workflowId, false); - assertEquals(workflow1.getStatus().name(), WorkflowStatus.StatusEnum.COMPLETED.name()); - }); - - metadataClient.unregisterWorkflowDef(workflowName, 1); - metadataClient.unregisterTaskDef("simple"); - } - -} diff --git a/src/test/java/io/orkes/conductor/client/e2e/WorkflowRetryTests.java b/src/test/java/io/orkes/conductor/client/e2e/WorkflowRetryTests.java deleted file mode 100644 index bd6b82e0..00000000 --- a/src/test/java/io/orkes/conductor/client/e2e/WorkflowRetryTests.java +++ /dev/null @@ -1,196 +0,0 @@ -/* - * Copyright 2022 Orkes, Inc. - *

- * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

- * http://www.apache.org/licenses/LICENSE-2.0 - *

- * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package io.orkes.conductor.client.e2e; - -import java.util.UUID; -import java.util.concurrent.TimeUnit; - -import org.apache.commons.lang3.RandomStringUtils; -import org.junit.jupiter.api.BeforeAll; -import org.junit.jupiter.api.DisplayName; -import org.junit.jupiter.api.Test; - -import com.netflix.conductor.common.metadata.tasks.Task; -import com.netflix.conductor.common.metadata.tasks.TaskResult; -import com.netflix.conductor.common.metadata.workflow.StartWorkflowRequest; -import com.netflix.conductor.common.run.Workflow; - -import io.orkes.conductor.client.*; -import io.orkes.conductor.client.http.OrkesMetadataClient; -import io.orkes.conductor.client.http.OrkesTaskClient; -import io.orkes.conductor.client.http.OrkesWorkflowClient; -import io.orkes.conductor.client.model.*; -import io.orkes.conductor.client.util.ApiUtil; - -import lombok.extern.slf4j.Slf4j; - -import static io.orkes.conductor.client.util.RegistrationUtil.registerWorkflowDef; -import static io.orkes.conductor.client.util.RegistrationUtil.registerWorkflowWithSubWorkflowDef; -import static org.junit.jupiter.api.Assertions.*; -import static org.testcontainers.shaded.org.awaitility.Awaitility.await; - -@Slf4j -public class WorkflowRetryTests { - - static ApiClient apiClient; - static WorkflowClient workflowClient; - static TaskClient taskClient; - static MetadataClient metadataClient; - - static String taskDefName = UUID.randomUUID().toString(); - - @BeforeAll - public static void init() { - apiClient = ApiUtil.getApiClientWithCredentials(); - workflowClient = new OrkesWorkflowClient(apiClient); - metadataClient =new OrkesMetadataClient(apiClient); - taskClient = new OrkesTaskClient(apiClient); - - } - - @Test - @DisplayName("Check workflow with simple task and retry functionality") - public void testRetrySimpleWorkflow() { - String workflowName = RandomStringUtils.randomAlphanumeric(5).toUpperCase(); - - // Register workflow - registerWorkflowDef(workflowName, taskDefName, taskDefName, metadataClient); - - // Trigger two workflows - StartWorkflowRequest startWorkflowRequest = new StartWorkflowRequest(); - startWorkflowRequest.setName(workflowName); - startWorkflowRequest.setVersion(1); - - String workflowId = workflowClient.startWorkflow(startWorkflowRequest); - Workflow workflow = workflowClient.getWorkflow(workflowId, true); - // Fail the simple task - String taskId = workflow.getTasks().get(1).getTaskId(); - TaskResult taskResult = new TaskResult(); - taskResult.setWorkflowInstanceId(workflowId); - taskResult.setTaskId(taskId); - taskResult.setStatus(TaskResult.Status.FAILED); - taskResult.setReasonForIncompletion("failed"); - taskClient.updateTask(taskResult); - - // Wait for workflow to get failed - await() - .atMost(30, TimeUnit.SECONDS) - .pollInterval(1, TimeUnit.SECONDS) - .untilAsserted(() -> { - Workflow workflow1 = workflowClient.getWorkflow(workflowId, false); - assertEquals(workflow1.getStatus().name(), WorkflowStatus.StatusEnum.FAILED.name()); - }); - - // Retry the workflow - workflowClient.retryLastFailedTask(workflowId); - // Check the workflow status and few other parameters - await() - .atMost(5, TimeUnit.SECONDS) - .pollInterval(1, TimeUnit.SECONDS) - .untilAsserted(() -> { - Workflow workflow1 = workflowClient.getWorkflow(workflowId, true); - assertEquals(workflow1.getStatus().name(), WorkflowStatus.StatusEnum.RUNNING.name()); - assertTrue(workflow1.getLastRetriedTime() != 0L); - assertEquals(workflow1.getTasks().get(2).getStatus().name(), Task.Status.SCHEDULED.name()); - }); - - taskResult = new TaskResult(); - taskResult.setWorkflowInstanceId(workflowId); - taskResult.setTaskId(workflowClient.getWorkflow(workflowId, true).getTasks().get(2).getTaskId()); - taskResult.setStatus(TaskResult.Status.COMPLETED); - taskClient.updateTask(taskResult); - - // Wait for workflow to get completed - await() - .atMost(30, TimeUnit.SECONDS) - .pollInterval(10, TimeUnit.SECONDS) - .untilAsserted(() -> { - Workflow workflow1 = workflowClient.getWorkflow(workflowId, false); - assertEquals(workflow1.getStatus().name(), WorkflowStatus.StatusEnum.COMPLETED.name()); - }); - - metadataClient.unregisterWorkflowDef(workflowName, 1); - unregisterTaskDef(); - } - - @Test - @DisplayName("Check workflow with sub_workflow task and retry functionality") - public void testRetryWithSubWorkflow() { - - apiClient = ApiUtil.getApiClientWithCredentials(); - workflowClient = new OrkesWorkflowClient(apiClient); - metadataClient = new OrkesMetadataClient(apiClient); - taskClient = new OrkesTaskClient(apiClient); - String workflowName = RandomStringUtils.randomAlphanumeric(5).toUpperCase(); - String subWorkflowName = RandomStringUtils.randomAlphanumeric(5).toUpperCase(); - - // Register workflow - registerWorkflowWithSubWorkflowDef(workflowName, subWorkflowName, "simple", metadataClient); - - // Trigger two workflows - StartWorkflowRequest startWorkflowRequest = new StartWorkflowRequest(); - startWorkflowRequest.setName(workflowName); - startWorkflowRequest.setVersion(1); - - String workflowId = workflowClient.startWorkflow(startWorkflowRequest); - System.out.print("Workflow id is " + workflowId); - Workflow workflow = workflowClient.getWorkflow(workflowId, true); - // Fail the simple task - String subworkflowId = workflow.getTasks().get(0).getSubWorkflowId(); - Workflow subWorkflow = workflowClient.getWorkflow(subworkflowId, true); - String taskId = subWorkflow.getTasks().get(0).getTaskId(); - TaskResult taskResult = new TaskResult(); - taskResult.setWorkflowInstanceId(subworkflowId); - taskResult.setTaskId(taskId); - taskResult.setStatus(TaskResult.Status.FAILED); - taskClient.updateTask(taskResult); - - // Wait for parent workflow to get failed - await().atMost(3, TimeUnit.SECONDS).pollInterval(1, TimeUnit.SECONDS).untilAsserted(() -> { - Workflow workflow1 = workflowClient.getWorkflow(workflowId, false); - assertEquals(workflow1.getStatus().name(), WorkflowStatus.StatusEnum.FAILED.name()); - }); - - // Retry the sub workflow. - workflowClient.retryLastFailedTask(subworkflowId); - // Check the workflow status and few other parameters - await().atMost(3, TimeUnit.SECONDS).untilAsserted(() -> { - Workflow workflow1 = workflowClient.getWorkflow(subworkflowId, true); - assertEquals(WorkflowStatus.StatusEnum.RUNNING.name(), workflow1.getStatus().name()); - assertTrue(workflow1.getLastRetriedTime() != 0L); - assertEquals(workflow1.getTasks().get(0).getStatus().name(), Task.Status.FAILED.name()); - assertEquals(workflow1.getTasks().get(1).getStatus().name(), Task.Status.SCHEDULED.name()); - }); - taskId = workflowClient.getWorkflow(subworkflowId, true).getTasks().get(1).getTaskId(); - - taskResult = new TaskResult(); - taskResult.setWorkflowInstanceId(subworkflowId); - taskResult.setTaskId(taskId); - taskResult.setStatus(TaskResult.Status.COMPLETED); - taskClient.updateTask(taskResult); - - await().atMost(3, TimeUnit.SECONDS).untilAsserted(() -> { - Workflow workflow1 = workflowClient.getWorkflow(workflowId, false); - assertEquals(workflow1.getStatus().name(), WorkflowStatus.StatusEnum.COMPLETED.name()); - }); - - metadataClient.unregisterWorkflowDef(workflowName, 1); - unregisterTaskDef(); - } - - void unregisterTaskDef() { - try { - metadataClient.unregisterTaskDef(taskDefName); - }catch (Exception e) {} - } -} diff --git a/src/test/java/io/orkes/conductor/client/e2e/WorkflowSearchTests.java b/src/test/java/io/orkes/conductor/client/e2e/WorkflowSearchTests.java deleted file mode 100644 index 98c8a284..00000000 --- a/src/test/java/io/orkes/conductor/client/e2e/WorkflowSearchTests.java +++ /dev/null @@ -1,205 +0,0 @@ -/* - * Copyright 2022 Orkes, Inc. - *

- * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - *

- * http://www.apache.org/licenses/LICENSE-2.0 - *

- * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package io.orkes.conductor.client.e2e; - -import java.util.Arrays; -import java.util.List; -import java.util.Map; -import java.util.concurrent.TimeUnit; -import java.util.concurrent.atomic.AtomicReference; - -import org.apache.commons.lang3.RandomStringUtils; -import org.junit.jupiter.api.Test; - -import com.netflix.conductor.common.metadata.tasks.TaskDef; -import com.netflix.conductor.common.metadata.tasks.TaskType; -import com.netflix.conductor.common.metadata.workflow.StartWorkflowRequest; -import com.netflix.conductor.common.metadata.workflow.WorkflowDef; -import com.netflix.conductor.common.metadata.workflow.WorkflowTask; -import com.netflix.conductor.common.run.SearchResult; -import com.netflix.conductor.common.run.WorkflowSummary; - -import io.orkes.conductor.client.ApiClient; -import io.orkes.conductor.client.AuthorizationClient; -import io.orkes.conductor.client.MetadataClient; -import io.orkes.conductor.client.WorkflowClient; -import io.orkes.conductor.client.http.OrkesAuthorizationClient; -import io.orkes.conductor.client.http.OrkesMetadataClient; -import io.orkes.conductor.client.http.OrkesWorkflowClient; -import io.orkes.conductor.client.model.*; -import io.orkes.conductor.client.util.ApiUtil; - -import com.google.common.util.concurrent.Uninterruptibles; - -import static io.orkes.conductor.client.util.ApiUtil.getEnv; -import static org.junit.jupiter.api.Assertions.*; -import static org.testcontainers.shaded.org.awaitility.Awaitility.await; - -public class WorkflowSearchTests { - - @Test - public void testWorkflowSearchPermissions() { - - ApiClient adminClient = ApiUtil.getApiClientWithCredentials(); - WorkflowClient workflowAdminClient = new OrkesWorkflowClient(adminClient); - MetadataClient metadataAdminClient =new OrkesMetadataClient(adminClient); - String taskName1 = RandomStringUtils.randomAlphanumeric(5).toUpperCase(); - String workflowName1 = RandomStringUtils.randomAlphanumeric(5).toUpperCase(); - // Run workflow search it should return 0 result - AtomicReference> workflowSummarySearchResult = new AtomicReference<>(workflowAdminClient.search("workflowType IN (" + workflowName1 + ")")); - assertEquals(workflowSummarySearchResult.get().getResults().size(), 0); - - // Register workflow - registerWorkflowDef(workflowName1, taskName1, metadataAdminClient); - - // Trigger two workflows - StartWorkflowRequest startWorkflowRequest = new StartWorkflowRequest(); - startWorkflowRequest.setName(workflowName1); - startWorkflowRequest.setVersion(1); - - workflowAdminClient.startWorkflow(startWorkflowRequest); - workflowAdminClient.startWorkflow(startWorkflowRequest); - await().pollInterval(100, TimeUnit.MILLISECONDS).until(() -> - { - workflowSummarySearchResult.set(workflowAdminClient.search("workflowType IN (" + workflowName1 + ")")); - return workflowSummarySearchResult.get().getResults().size() == 2; - }); - - // Register another workflow - String taskName2 = RandomStringUtils.randomAlphanumeric(5).toUpperCase(); - String workflowName2 = RandomStringUtils.randomAlphanumeric(5).toUpperCase(); - registerWorkflowDef(workflowName2, taskName2, metadataAdminClient); - - startWorkflowRequest = new StartWorkflowRequest(); - startWorkflowRequest.setName(workflowName2); - startWorkflowRequest.setVersion(1); - - // Trigger workflow - workflowAdminClient.startWorkflow(startWorkflowRequest); - workflowAdminClient.startWorkflow(startWorkflowRequest); - // In search result when only this workflow searched 2 results should come - await().pollInterval(100, TimeUnit.MILLISECONDS).until(() -> - { - workflowSummarySearchResult.set(workflowAdminClient.search("workflowType IN (" + workflowName2 + ")")); - return workflowSummarySearchResult.get().getResults().size() == 2; - }); - - // In search result when both workflow searched then 4 results should come - await().pollInterval(100, TimeUnit.MILLISECONDS).until(() -> - { - workflowSummarySearchResult.set(workflowAdminClient.search("workflowType IN (" + workflowName1 + "," + workflowName2 + ")")); - return workflowSummarySearchResult.get().getResults().size() == 4; - }); - - // Terminate all the workflows - workflowSummarySearchResult.get().getResults().forEach(workflowSummary -> workflowAdminClient.terminateWorkflow(workflowSummary.getWorkflowId(), "test")); - String department = RandomStringUtils.randomAlphanumeric(5).toUpperCase(); - TagObject tagObject = new TagObject().type(TagObject.TypeEnum.METADATA).key("department").value(department); - metadataAdminClient.addWorkflowTag(tagObject, workflowName1); - tagObject = new TagObject().type(TagObject.TypeEnum.METADATA).key("department2").value(department); - metadataAdminClient.addWorkflowTag(tagObject, workflowName1); - - AuthorizationClient authorizationClient = new OrkesAuthorizationClient(adminClient); - ApiClient apiClient2 = ApiUtil.getUser2Client(); - WorkflowClient workflowClient2 = new OrkesWorkflowClient(apiClient2); - try { - authorizationClient.deleteGroup("workflow-search-group"); - } catch (Exception e){} - // Create user2 client and check access should not be there workflow1 - - SearchResult workflowSummarySearchResult1 = workflowClient2.search("workflowType IN (" + workflowName1 + ")"); - // There should be no workflow in search. - assertTrue(workflowSummarySearchResult1.getResults().size() == 0); - - // Create group and add these two users in the group - Group group = authorizationClient.upsertGroup(getUpsertGroupRequest(), "workflow-search-group"); - authorizationClient.addUserToGroup("workflow-search-group", "app:"+ getEnv(ApiUtil.USER1_APP_ID)); - authorizationClient.addUserToGroup("workflow-search-group", "app:"+ getEnv(ApiUtil.USER2_APP_ID)); - - // Give permissions to tag in the group - AuthorizationRequest authorizationRequest = new AuthorizationRequest(); - authorizationRequest.setSubject(new SubjectRef().id("workflow-search-group").type(SubjectRef.TypeEnum.GROUP)); - authorizationRequest.setAccess(List.of(AuthorizationRequest.AccessEnum.READ)); - authorizationRequest.setTarget(new TargetRef().id("department:" + department).type(TargetRef.TypeEnum.TAG)); - authorizationClient.grantPermissions(authorizationRequest); - - Uninterruptibles.sleepUninterruptibly(1, TimeUnit.MINUTES); - - await().atMost(5, TimeUnit.SECONDS).untilAsserted(() -> { - // Search should give results - SearchResult finalWorkflowSummarySearchResult = workflowClient2.search("workflowType IN (" + workflowName1 + ")"); - // There should be 2 workflow in search. - assertTrue(finalWorkflowSummarySearchResult.getResults().size() == 2); - }); - - authorizationRequest = new AuthorizationRequest(); - authorizationRequest.setSubject(new SubjectRef().id(getEnv(ApiUtil.USER2_APP_ID)).type(SubjectRef.TypeEnum.USER)); - authorizationRequest.setAccess(List.of(AuthorizationRequest.AccessEnum.READ)); - authorizationRequest.setTarget(new TargetRef().id("department:" + department).type(TargetRef.TypeEnum.TAG)); - authorizationClient.grantPermissions(authorizationRequest); - authorizationRequest = new AuthorizationRequest(); - authorizationRequest.setSubject(new SubjectRef().id(getEnv(ApiUtil.USER2_APP_ID)).type(SubjectRef.TypeEnum.USER)); - authorizationRequest.setAccess(List.of(AuthorizationRequest.AccessEnum.READ)); - authorizationRequest.setTarget(new TargetRef().id("department2:" + department).type(TargetRef.TypeEnum.TAG)); - authorizationClient.grantPermissions(authorizationRequest); - authorizationRequest = new AuthorizationRequest(); - authorizationRequest.setSubject(new SubjectRef().id("workflow-search-group").type(SubjectRef.TypeEnum.GROUP)); - authorizationRequest.setAccess(List.of(AuthorizationRequest.AccessEnum.READ)); - authorizationRequest.setTarget(new TargetRef().id("department2:" + department).type(TargetRef.TypeEnum.TAG)); - authorizationClient.grantPermissions(authorizationRequest); - authorizationRequest.setSubject(new SubjectRef().id("workflow-search-group").type(SubjectRef.TypeEnum.GROUP)); - authorizationRequest.setAccess(List.of(AuthorizationRequest.AccessEnum.READ)); - authorizationRequest.setTarget(new TargetRef().id(workflowName1).type(TargetRef.TypeEnum.WORKFLOW_DEF)); - authorizationClient.grantPermissions(authorizationRequest); - - await().atMost(5, TimeUnit.SECONDS).untilAsserted(() -> { - // Search should give results - SearchResult finalWorkflowSummarySearchResult = workflowClient2.search("workflowType IN (" + workflowName1 + ")"); - // There should be 2 workflow in search. This will fail without distinct fix - assertTrue(finalWorkflowSummarySearchResult.getResults().size() == 2); - }); - - metadataAdminClient.unregisterWorkflowDef(workflowName1, 1); - metadataAdminClient.unregisterWorkflowDef(workflowName2, 1); - } - - UpsertGroupRequest getUpsertGroupRequest() { - return new UpsertGroupRequest() - .description("Group used for SDK testing") - .roles(List.of(UpsertGroupRequest.RolesEnum.USER)); - } - - List getAccessListAll() { - return List.of("CREATE", "READ", "UPDATE", "EXECUTE", "DELETE"); - } - - private void registerWorkflowDef(String workflowName, String taskName, MetadataClient metadataClient1) { - TaskDef taskDef = new TaskDef(taskName); - taskDef.setOwnerEmail("test@orkes.io"); - WorkflowTask workflowTask = new WorkflowTask(); - workflowTask.setTaskReferenceName(taskName); - workflowTask.setName(taskName); - workflowTask.setTaskDefinition(taskDef); - workflowTask.setWorkflowTaskType(TaskType.SIMPLE); - workflowTask.setInputParameters(Map.of("value", "${workflow.input.value}", "order", "123")); - WorkflowDef workflowDef = new WorkflowDef(); - workflowDef.setName(workflowName); - workflowDef.setOwnerEmail("test@orkes.io"); - workflowDef.setInputParameters(Arrays.asList("value", "inlineValue")); - workflowDef.setDescription("Workflow to monitor order state"); - workflowDef.setTasks(Arrays.asList(workflowTask)); - metadataClient1.registerWorkflowDef(workflowDef); - metadataClient1.registerTaskDefs(Arrays.asList(taskDef)); - } - -} diff --git a/src/test/resources/sdk_test.json b/src/test/resources/sdk_test.json new file mode 100644 index 00000000..41686937 --- /dev/null +++ b/src/test/resources/sdk_test.json @@ -0,0 +1,252 @@ +{ + "createTime": 1685128933480, + "updateTime": 1685128981013, + "name": "sdk_test_workflow", + "version": 1, + "tasks": [ + { + "name": "x_test_worker_0", + "taskReferenceName": "simple_task_0", + "inputParameters": {}, + "type": "SIMPLE", + "decisionCases": {}, + "defaultCase": [], + "forkTasks": [], + "startDelay": 0, + "joinOn": [], + "optional": false, + "defaultExclusiveJoinTask": [], + "asyncComplete": false, + "loopOver": [], + "onStateChange": {} + }, + { + "name": "jq", + "taskReferenceName": "jq", + "inputParameters": { + "key1": { + "value1": [ + "a", + "b" + ] + }, + "queryExpression": "{ key3: (.key1.value1 + .key2.value2) }", + "value2": [ + "d", + "e" + ] + }, + "type": "JSON_JQ_TRANSFORM", + "decisionCases": {}, + "defaultCase": [], + "forkTasks": [], + "startDelay": 0, + "joinOn": [], + "optional": false, + "defaultExclusiveJoinTask": [], + "asyncComplete": false, + "loopOver": [], + "onStateChange": {} + }, + { + "name": "set_state", + "taskReferenceName": "set_state", + "inputParameters": { + "call_made": true, + "number": "${simple_task_0.output.number}" + }, + "type": "SET_VARIABLE", + "decisionCases": {}, + "defaultCase": [], + "forkTasks": [], + "startDelay": 0, + "joinOn": [], + "optional": false, + "defaultExclusiveJoinTask": [], + "asyncComplete": false, + "loopOver": [], + "onStateChange": {} + }, + { + "name": "fork_task_t7nhng", + "taskReferenceName": "fork_task_t7nhng_ref", + "inputParameters": {}, + "type": "FORK_JOIN", + "decisionCases": {}, + "defaultCase": [], + "forkTasks": [ + [ + { + "name": "there_is_no_worker", + "taskReferenceName": "no_worker_3", + "inputParameters": {}, + "type": "SIMPLE", + "decisionCases": {}, + "defaultCase": [], + "forkTasks": [], + "startDelay": 0, + "joinOn": [], + "optional": false, + "defaultExclusiveJoinTask": [], + "asyncComplete": false, + "loopOver": [], + "onStateChange": {} + } + ], + [ + { + "name": "there_is_no_worker", + "taskReferenceName": "no_worker_2", + "inputParameters": {}, + "type": "SIMPLE", + "decisionCases": {}, + "defaultCase": [], + "forkTasks": [], + "startDelay": 0, + "joinOn": [], + "optional": false, + "defaultExclusiveJoinTask": [], + "asyncComplete": false, + "loopOver": [], + "onStateChange": {} + } + ], + [ + { + "name": "there_is_no_worker", + "taskReferenceName": "no_worker_1", + "inputParameters": {}, + "type": "SIMPLE", + "decisionCases": {}, + "defaultCase": [], + "forkTasks": [], + "startDelay": 0, + "joinOn": [], + "optional": false, + "defaultExclusiveJoinTask": [], + "asyncComplete": false, + "loopOver": [], + "onStateChange": {} + } + ] + ], + "startDelay": 0, + "joinOn": [], + "optional": false, + "defaultExclusiveJoinTask": [], + "asyncComplete": false, + "loopOver": [], + "onStateChange": {} + }, + { + "name": "join_task_y6nux", + "taskReferenceName": "join_task_y6nux_ref", + "inputParameters": {}, + "type": "JOIN", + "decisionCases": {}, + "defaultCase": [], + "forkTasks": [], + "startDelay": 0, + "joinOn": [ + "no_worker_1", + "no_worker_2", + "no_worker_3" + ], + "optional": false, + "defaultExclusiveJoinTask": [], + "asyncComplete": false, + "loopOver": [], + "onStateChange": {} + }, + { + "name": "fork", + "taskReferenceName": "fork", + "inputParameters": {}, + "type": "FORK_JOIN", + "decisionCases": {}, + "defaultCase": [], + "forkTasks": [ + [ + { + "name": "there_is_no_worker", + "taskReferenceName": "simple_task_5", + "inputParameters": {}, + "type": "SIMPLE", + "decisionCases": {}, + "defaultCase": [], + "forkTasks": [], + "startDelay": 0, + "joinOn": [], + "optional": false, + "defaultExclusiveJoinTask": [], + "asyncComplete": false, + "loopOver": [], + "onStateChange": {} + } + ], + [ + { + "name": "there_is_no_worker", + "taskReferenceName": "simple_task_2", + "inputParameters": {}, + "type": "SIMPLE", + "decisionCases": {}, + "defaultCase": [], + "forkTasks": [], + "startDelay": 0, + "joinOn": [], + "optional": false, + "defaultExclusiveJoinTask": [], + "asyncComplete": false, + "loopOver": [], + "onStateChange": {} + } + ] + ], + "startDelay": 0, + "joinOn": [ + "sub_flow_inline", + "simple_task_5" + ], + "optional": false, + "defaultExclusiveJoinTask": [], + "asyncComplete": false, + "loopOver": [], + "onStateChange": {} + }, + { + "name": "fork_join", + "taskReferenceName": "fork_join", + "inputParameters": {}, + "type": "JOIN", + "decisionCases": {}, + "defaultCase": [], + "forkTasks": [], + "startDelay": 0, + "joinOn": [ + "simple_task_2", + "simple_task_5" + ], + "optional": false, + "defaultExclusiveJoinTask": [], + "asyncComplete": false, + "loopOver": [], + "onStateChange": {} + } + ], + "inputParameters": [], + "outputParameters": { + "task1": "${simple_task_0.output}", + "jq": "${jq.output}", + "inner_task": "${x_test_worker_1.output}" + }, + "schemaVersion": 2, + "restartable": true, + "workflowStatusListenerEnabled": false, + "ownerEmail": "viren@orkes.io", + "timeoutPolicy": "TIME_OUT_WF", + "timeoutSeconds": 120, + "variables": {}, + "inputTemplate": {}, + "onStateChange": {} +} \ No newline at end of file