From 9af2c48bb0f3d6244a0cb1fb9f755a1561207535 Mon Sep 17 00:00:00 2001 From: Augustin Date: Mon, 16 Oct 2023 22:34:08 +0200 Subject: [PATCH] airbyte-ci: concurrent java tests (#31426) --- airbyte-ci/connectors/pipelines/README.md | 1 + .../pipelines/tests/java_connectors.py | 75 +++++++++++-------- .../connectors/pipelines/pyproject.toml | 2 +- 3 files changed, 44 insertions(+), 34 deletions(-) diff --git a/airbyte-ci/connectors/pipelines/README.md b/airbyte-ci/connectors/pipelines/README.md index 351028cc3aa2..e905b0b05cb2 100644 --- a/airbyte-ci/connectors/pipelines/README.md +++ b/airbyte-ci/connectors/pipelines/README.md @@ -425,6 +425,7 @@ This command runs the Python tests for a airbyte-ci poetry package. ## Changelog | Version | PR | Description | | ------- | ---------------------------------------------------------- | --------------------------------------------------------------------------------------------------------- | +| 1.9.2 | [#31426](https://github.com/airbytehq/airbyte/pull/31426) | Concurrent execution of java connectors tests. | | 1.9.1 | [#31455](https://github.com/airbytehq/airbyte/pull/31455) | Fix `None` docker credentials on publish. | | 1.9.0 | [#30520](https://github.com/airbytehq/airbyte/pull/30520) | New commands: `bump-version`, `upgrade-base-image`, `migrate-to-base-image`. | | 1.8.0 | [#30520](https://github.com/airbytehq/airbyte/pull/30520) | New commands: `bump-version`, `upgrade-base-image`, `migrate-to-base-image`. | diff --git a/airbyte-ci/connectors/pipelines/pipelines/tests/java_connectors.py b/airbyte-ci/connectors/pipelines/pipelines/tests/java_connectors.py index 1859f3b5dd67..d668c47314ce 100644 --- a/airbyte-ci/connectors/pipelines/pipelines/tests/java_connectors.py +++ b/airbyte-ci/connectors/pipelines/pipelines/tests/java_connectors.py @@ -7,7 +7,8 @@ from typing import List, Optional import anyio -from dagger import File, QueryError +import asyncer +from dagger import Directory, File, QueryError from pipelines.actions import environments, secrets from pipelines.bases import StepResult, StepStatus from pipelines.builds.java_connectors import BuildConnectorDistributionTar, BuildConnectorImages, dist_tar_directory_path @@ -76,40 +77,48 @@ async def run_all_tests(context: ConnectorContext) -> List[StepResult]: context.connector_secrets = await secrets.get_connector_secrets(context) step_results = [] - build_distribution_tar_results = await BuildConnectorDistributionTar(context).run() - step_results.append(build_distribution_tar_results) - if build_distribution_tar_results.status is StepStatus.FAILURE: + build_distribution_tar_result = await BuildConnectorDistributionTar(context).run() + step_results.append(build_distribution_tar_result) + if build_distribution_tar_result.status is StepStatus.FAILURE: return step_results - dist_tar_dir = await build_distribution_tar_results.output_artifact.directory(dist_tar_directory_path(context)) - build_connector_image_results = await BuildConnectorImages(context, LOCAL_BUILD_PLATFORM).run(dist_tar_dir) - step_results.append(build_connector_image_results) - if build_connector_image_results.status is StepStatus.FAILURE: + dist_tar_dir = build_distribution_tar_result.output_artifact.directory(dist_tar_directory_path(context)) + + async def run_docker_build_dependent_steps(dist_tar_dir: Directory) -> List[StepResult]: + step_results = [] + build_connector_image_results = await BuildConnectorImages(context, LOCAL_BUILD_PLATFORM).run(dist_tar_dir) + step_results.append(build_connector_image_results) + if build_connector_image_results.status is StepStatus.FAILURE: + return step_results + + if context.connector.supports_normalization: + normalization_image = f"{context.connector.normalization_repository}:dev" + context.logger.info(f"This connector supports normalization: will build {normalization_image}.") + build_normalization_results = await BuildOrPullNormalization(context, normalization_image, LOCAL_BUILD_PLATFORM).run() + normalization_container = build_normalization_results.output_artifact + normalization_tar_file, _ = await export_container_to_tarball( + context, normalization_container, tar_file_name=f"{context.connector.normalization_repository}_{context.git_revision}.tar" + ) + step_results.append(build_normalization_results) + else: + normalization_tar_file = None + + connector_container = build_connector_image_results.output_artifact[LOCAL_BUILD_PLATFORM] + connector_image_tar_file, _ = await export_container_to_tarball(context, connector_container) + + async with asyncer.create_task_group() as docker_build_dependent_group: + soon_integration_tests_results = docker_build_dependent_group.soonify(IntegrationTests(context).run)( + connector_tar_file=connector_image_tar_file, normalization_tar_file=normalization_tar_file + ) + soon_cat_results = docker_build_dependent_group.soonify(AcceptanceTests(context).run)( + connector_under_test_image_tar=connector_image_tar_file + ) + + step_results += [soon_cat_results.value, soon_integration_tests_results.value] return step_results - unit_tests_results = await UnitTests(context).run() - step_results.append(unit_tests_results) - if context.fail_fast and unit_tests_results.status is StepStatus.FAILURE: - return step_results + async with asyncer.create_task_group() as test_task_group: + soon_unit_tests_result = test_task_group.soonify(UnitTests(context).run)() + soon_docker_build_dependent_steps_results = test_task_group.soonify(run_docker_build_dependent_steps)(dist_tar_dir) - if context.connector.supports_normalization: - normalization_image = f"{context.connector.normalization_repository}:dev" - context.logger.info(f"This connector supports normalization: will build {normalization_image}.") - build_normalization_results = await BuildOrPullNormalization(context, normalization_image, LOCAL_BUILD_PLATFORM).run() - normalization_container = build_normalization_results.output_artifact - normalization_tar_file, _ = await export_container_to_tarball( - context, normalization_container, tar_file_name=f"{context.connector.normalization_repository}_{context.git_revision}.tar" - ) - step_results.append(build_normalization_results) - else: - normalization_tar_file = None - - connector_container = build_connector_image_results.output_artifact[LOCAL_BUILD_PLATFORM] - connector_image_tar_file, _ = await export_container_to_tarball(context, connector_container) - - integration_tests_results = await IntegrationTests(context).run(connector_image_tar_file, normalization_tar_file) - step_results.append(integration_tests_results) - - acceptance_tests_results = await AcceptanceTests(context).run(connector_image_tar_file) - step_results.append(acceptance_tests_results) - return step_results + return step_results + [soon_unit_tests_result.value] + soon_docker_build_dependent_steps_results.value diff --git a/airbyte-ci/connectors/pipelines/pyproject.toml b/airbyte-ci/connectors/pipelines/pyproject.toml index 51ea3dceb74e..8be684ab0dbb 100644 --- a/airbyte-ci/connectors/pipelines/pyproject.toml +++ b/airbyte-ci/connectors/pipelines/pyproject.toml @@ -4,7 +4,7 @@ build-backend = "poetry.core.masonry.api" [tool.poetry] name = "pipelines" -version = "1.9.1" +version = "1.9.2" description = "Packaged maintained by the connector operations team to perform CI for connectors' pipelines" authors = ["Airbyte "]