From 99b9fc94fde1fc2fedd372165687aa4bbd8c4f28 Mon Sep 17 00:00:00 2001 From: Augustin Date: Sat, 14 Oct 2023 16:06:14 +0200 Subject: [PATCH] [airbyte-ci] new commands for migration to base image (#30520) --- airbyte-ci/connectors/pipelines/README.md | 57 +++- .../pipelines/actions/environments.py | 3 +- .../connectors/pipelines/pipelines/bases.py | 2 +- .../pipelines/commands/groups/connectors.py | 194 ++++++++++- .../pipelines/connector_changes/__init__.py | 3 + .../base_image_version_migration.py | 322 ++++++++++++++++++ .../metadata_change_helpers.py | 21 ++ .../connector_changes/version_bump.py | 175 ++++++++++ .../connectors/pipelines/pipelines/consts.py | 4 +- .../pipelines/pipelines/contexts.py | 9 + .../pipelines/pipelines/dagger_run.py | 3 +- airbyte-ci/connectors/pipelines/poetry.lock | 143 +++++++- .../connectors/pipelines/pyproject.toml | 3 +- 13 files changed, 916 insertions(+), 23 deletions(-) create mode 100644 airbyte-ci/connectors/pipelines/pipelines/connector_changes/__init__.py create mode 100644 airbyte-ci/connectors/pipelines/pipelines/connector_changes/base_image_version_migration.py create mode 100644 airbyte-ci/connectors/pipelines/pipelines/connector_changes/metadata_change_helpers.py create mode 100644 airbyte-ci/connectors/pipelines/pipelines/connector_changes/version_bump.py diff --git a/airbyte-ci/connectors/pipelines/README.md b/airbyte-ci/connectors/pipelines/README.md index 74217e48a040..477720409cfa 100644 --- a/airbyte-ci/connectors/pipelines/README.md +++ b/airbyte-ci/connectors/pipelines/README.md @@ -90,7 +90,9 @@ At this point you can run `airbyte-ci` commands from the root of the repository. - [`connectors publish` command](#connectors-publish-command) - [Examples](#examples) - [Options](#options-2) - * [What it runs](#what-it-runs-1) +- [`connectors bump-version` command](#connectors-bump-version) +- [`connectors upgrade-base-image` command](#connectors-upgrade-base-image) +- [`connectors migrate-to-base-image` command](#connectors-migrate-to-base-image) - [`metadata` command subgroup](#metadata-command-subgroup) - [`metadata validate` command](#metadata-validate-command) * [Example](#example) @@ -342,6 +344,48 @@ flowchart TD validate-->check-->build-->upload_spec-->push-->pull-->upload_metadata ``` + +### `connectors bump-version` command +Bump the version of the selected connectors. + +### Examples +Bump source-openweather: `airbyte-ci connectors --name=source-openweather bump-version patch ""` + +#### Arguments +| Argument | Description | +| --------------------- | ---------------------------------------------------------------------- | +| `BUMP_TYPE` | major, minor or patch | +| `PULL_REQUEST_NUMBER` | The GitHub pull request number, used in the changelog entry | +| `CHANGELOG_ENTRY` | The changelog entry that will get added to the connector documentation | + +### `connectors upgrade-base-image` command +Modify the selected connector metadata to use the latest base image version. + +### Examples +Upgrade the base image for source-openweather: `airbyte-ci connectors --name=source-openweather upgrade-base-image` + +### Options +| Option | Required | Default | Mapped environment variable | Description | +| ----------------------- | -------- | ------- | --------------------------- | --------------------------------------------------------------------------------------------------------------- | +| `--docker-hub-username` | True | | `DOCKER_HUB_USERNAME` | Your username to connect to DockerHub. It's used to read the base image registry. | +| `--docker-hub-password` | True | | `DOCKER_HUB_PASSWORD` | Your password to connect to DockerHub. It's used to read the base image registry. | +| `--set-if-not-exists` | False | True | | Whether to set or not the baseImage metadata if no connectorBuildOptions is declared in the connector metadata. | + +### `connectors migrate-to-base-image` command +Make a connector using a Dockerfile migrate to the base image by: +* Removing its Dockerfile +* Updating its metadata to use the latest base image version +* Updating its documentation to explain the build process +* Bumping by a patch version + +### Examples +Migrate source-openweather to use the base image: `airbyte-ci connectors --name=source-openweather migrate-to-base-image` + +### Arguments +| Argument | Description | +| --------------------- | ---------------------------------------------------------------------- | +| `PULL_REQUEST_NUMBER` | The GitHub pull request number, used in the changelog entry | + ### `metadata` command subgroup Available commands: @@ -381,11 +425,12 @@ This command runs the Python tests for a airbyte-ci poetry package. ## Changelog | Version | PR | Description | | ------- | ---------------------------------------------------------- | --------------------------------------------------------------------------------------------------------- | -| 1.8.0 | [#31369](https://github.com/airbytehq/airbyte/pull/31369) | Remove metadata test commands | -| 1.7.2 | [#31343](https://github.com/airbytehq/airbyte/pull/31343) | Bind Pytest integration tests to a dockerhost. | -| 1.7.1 | [#31332](https://github.com/airbytehq/airbyte/pull/31332) | Disable Gradle step caching on source-postgres. | -| 1.7.0 | [#30526](https://github.com/airbytehq/airbyte/pull/30526) | Implement pre/post install hooks support. | -| 1.6.0 | [#30474](https://github.com/airbytehq/airbyte/pull/30474) | Test connector inside their containers. | +| 1.9.0 | [#30520](https://github.com/airbytehq/airbyte/pull/30520) | New commands: `bump-version`, `upgrade-base-image`, `migrate-to-base-image`. | +| 1.8.0 | [#30520](https://github.com/airbytehq/airbyte/pull/30520) | New commands: `bump-version`, `upgrade-base-image`, `migrate-to-base-image`. | +| 1.7.2 | [#31343](https://github.com/airbytehq/airbyte/pull/31343) | Bind Pytest integration tests to a dockerhost. | +| 1.7.1 | [#31332](https://github.com/airbytehq/airbyte/pull/31332) | Disable Gradle step caching on source-postgres. | +| 1.7.0 | [#30526](https://github.com/airbytehq/airbyte/pull/30526) | Implement pre/post install hooks support. | +| 1.6.0 | [#30474](https://github.com/airbytehq/airbyte/pull/30474) | Test connector inside their containers. | | 1.5.1 | [#31227](https://github.com/airbytehq/airbyte/pull/31227) | Use python 3.11 in amazoncorretto-bazed gradle containers, run 'test' gradle task instead of 'check'. | | 1.5.0 | [#30456](https://github.com/airbytehq/airbyte/pull/30456) | Start building Python connectors using our base images. | | 1.4.6 | [ #31087](https://github.com/airbytehq/airbyte/pull/31087) | Throw error if airbyte-ci tools is out of date | diff --git a/airbyte-ci/connectors/pipelines/pipelines/actions/environments.py b/airbyte-ci/connectors/pipelines/pipelines/actions/environments.py index 04912f55c1e2..28e019ef4997 100644 --- a/airbyte-ci/connectors/pipelines/pipelines/actions/environments.py +++ b/airbyte-ci/connectors/pipelines/pipelines/actions/environments.py @@ -87,7 +87,7 @@ def with_testing_dependencies(context: PipelineContext) -> Container: ) -def with_git(dagger_client, ci_github_access_token_secret, ci_git_user) -> Container: +def with_git(dagger_client, ci_git_user: str = "octavia") -> Container: return ( dagger_client.container() .from_("alpine:latest") @@ -102,7 +102,6 @@ def with_git(dagger_client, ci_github_access_token_secret, ci_git_user) -> Conta ] ) ) - .with_secret_variable("GITHUB_TOKEN", ci_github_access_token_secret) .with_workdir("/ghcli") .with_exec( sh_dash_c( diff --git a/airbyte-ci/connectors/pipelines/pipelines/bases.py b/airbyte-ci/connectors/pipelines/pipelines/bases.py index 919443dd685d..cb1cbd6314da 100644 --- a/airbyte-ci/connectors/pipelines/pipelines/bases.py +++ b/airbyte-ci/connectors/pipelines/pipelines/bases.py @@ -586,7 +586,7 @@ async def to_html(self) -> str: async def save(self) -> None: local_html_path = await self.save_local(self.html_report_file_name, await self.to_html()) absolute_path = await local_html_path.resolve() - if self.pipeline_context.is_local: + if self.pipeline_context.open_report_in_browser: self.pipeline_context.logger.info(f"HTML report saved locally: {absolute_path}") if self.pipeline_context.open_report_in_browser: self.pipeline_context.logger.info("Opening HTML report in browser.") diff --git a/airbyte-ci/connectors/pipelines/pipelines/commands/groups/connectors.py b/airbyte-ci/connectors/pipelines/pipelines/commands/groups/connectors.py index b4b8e0d7711e..88a74793866e 100644 --- a/airbyte-ci/connectors/pipelines/pipelines/commands/groups/connectors.py +++ b/airbyte-ci/connectors/pipelines/pipelines/commands/groups/connectors.py @@ -15,13 +15,20 @@ from pipelines import main_logger from pipelines.bases import ConnectorWithModifiedFiles from pipelines.builds import run_connector_build_pipeline +from pipelines.connector_changes.base_image_version_migration import ( + run_connector_base_image_upgrade_pipeline, + run_connector_migration_to_base_image_pipeline, +) +from pipelines.connector_changes.version_bump import run_connector_version_bump_pipeline from pipelines.contexts import ConnectorContext, ContextState, PublishConnectorContext -from pipelines.format import run_connectors_format_pipelines +from pipelines.format import run_connector_format_pipeline from pipelines.github import update_global_commit_status_check_for_tests from pipelines.pipelines.connectors import run_connectors_pipelines from pipelines.publish import reorder_contexts, run_connector_publish_pipeline from pipelines.tests import run_connector_test_pipeline from pipelines.utils import DaggerPipelineCommand, get_connector_modified_files, get_modified_connectors +from rich.table import Table +from rich.text import Text # HELPERS @@ -535,13 +542,188 @@ def format_code(ctx: click.Context) -> bool: ] anyio.run( - run_connectors_format_pipelines, + run_connectors_pipelines, + connectors_contexts, + run_connector_format_pipeline, + "Format connectors pipeline", + ctx.obj["concurrency"], + ctx.obj["dagger_logs_path"], + ctx.obj["execute_timeout"], + ) + + return True + + +@connectors.command(cls=DaggerPipelineCommand, help="Upgrades the base image version used by the selected connectors..") +@click.option("--set-if-not-exists", default=True) +@click.option( + "--docker-hub-username", + help="Your username to connect to DockerHub to read the registries.", + type=click.STRING, + required=True, + envvar="DOCKER_HUB_USERNAME", +) +@click.option( + "--docker-hub-password", + help="Your password to connect to DockerHub to read the registries.", + type=click.STRING, + required=True, + envvar="DOCKER_HUB_PASSWORD", +) +@click.pass_context +def upgrade_base_image(ctx: click.Context, set_if_not_exists: bool, docker_hub_username: str, docker_hub_password: str) -> bool: + """Upgrades the base image version used by the selected connectors.""" + + connectors_contexts = [ + ConnectorContext( + pipeline_name=f"Upgrade base image versions of connector {connector.technical_name}", + connector=connector, + is_local=ctx.obj["is_local"], + git_branch=ctx.obj["git_branch"], + git_revision=ctx.obj["git_revision"], + ci_report_bucket=ctx.obj["ci_report_bucket_name"], + report_output_prefix=ctx.obj["report_output_prefix"], + use_remote_secrets=ctx.obj["use_remote_secrets"], + gha_workflow_run_url=ctx.obj.get("gha_workflow_run_url"), + dagger_logs_url=ctx.obj.get("dagger_logs_url"), + pipeline_start_timestamp=ctx.obj.get("pipeline_start_timestamp"), + ci_context=ctx.obj.get("ci_context"), + ci_gcs_credentials=ctx.obj["ci_gcs_credentials"], + ci_git_user=ctx.obj["ci_git_user"], + ci_github_access_token=ctx.obj["ci_github_access_token"], + open_report_in_browser=False, + docker_hub_username=docker_hub_username, + docker_hub_password=docker_hub_password, + ) + for connector in ctx.obj["selected_connectors_with_modified_files"] + ] + + anyio.run( + run_connectors_pipelines, + connectors_contexts, + run_connector_base_image_upgrade_pipeline, + "Upgrade base image pipeline", + ctx.obj["concurrency"], + ctx.obj["dagger_logs_path"], + ctx.obj["execute_timeout"], + set_if_not_exists, + ) + + return True + + +@connectors.command(cls=DaggerPipelineCommand, help="Bump a connector version: update metadata.yaml and changelog.") +@click.argument("bump-type", type=click.Choice(["patch", "minor", "major"])) +@click.argument("pull-request-number", type=str) +@click.argument("changelog-entry", type=str) +@click.pass_context +def bump_version( + ctx: click.Context, + bump_type: str, + pull_request_number: str, + changelog_entry: str, +) -> bool: + """Bump a connector version: update metadata.yaml and changelog.""" + + connectors_contexts = [ + ConnectorContext( + pipeline_name=f"Upgrade base image versions of connector {connector.technical_name}", + connector=connector, + is_local=ctx.obj["is_local"], + git_branch=ctx.obj["git_branch"], + git_revision=ctx.obj["git_revision"], + ci_report_bucket=ctx.obj["ci_report_bucket_name"], + report_output_prefix=ctx.obj["report_output_prefix"], + use_remote_secrets=ctx.obj["use_remote_secrets"], + gha_workflow_run_url=ctx.obj.get("gha_workflow_run_url"), + dagger_logs_url=ctx.obj.get("dagger_logs_url"), + pipeline_start_timestamp=ctx.obj.get("pipeline_start_timestamp"), + ci_context=ctx.obj.get("ci_context"), + ci_gcs_credentials=ctx.obj["ci_gcs_credentials"], + ci_git_user=ctx.obj["ci_git_user"], + ci_github_access_token=ctx.obj["ci_github_access_token"], + open_report_in_browser=False, + ) + for connector in ctx.obj["selected_connectors_with_modified_files"] + ] + + anyio.run( + run_connectors_pipelines, connectors_contexts, - ctx.obj["ci_git_user"], - ctx.obj["ci_github_access_token"], - ctx.obj["git_branch"], - ctx.obj["is_local"], + run_connector_version_bump_pipeline, + "Version bump pipeline pipeline", + ctx.obj["concurrency"], + ctx.obj["dagger_logs_path"], + ctx.obj["execute_timeout"], + bump_type, + changelog_entry, + pull_request_number, + ) + + return True + + +@connectors.command( + cls=DaggerPipelineCommand, + help="Make the selected connectors use our base image: remove dockerfile, update metadata.yaml and update documentation.", +) +@click.argument("pull-request-number", type=str) +@click.option( + "--docker-hub-username", + help="Your username to connect to DockerHub to read the registries.", + type=click.STRING, + required=True, + envvar="DOCKER_HUB_USERNAME", +) +@click.option( + "--docker-hub-password", + help="Your password to connect to DockerHub to read the registries.", + type=click.STRING, + required=True, + envvar="DOCKER_HUB_PASSWORD", +) +@click.pass_context +def migrate_to_base_image( + ctx: click.Context, + pull_request_number: str, + docker_hub_username: str, + docker_hub_password: str, +) -> bool: + """Bump a connector version: update metadata.yaml, changelog and delete legacy files.""" + + connectors_contexts = [ + ConnectorContext( + pipeline_name=f"Upgrade base image versions of connector {connector.technical_name}", + connector=connector, + is_local=ctx.obj["is_local"], + git_branch=ctx.obj["git_branch"], + git_revision=ctx.obj["git_revision"], + ci_report_bucket=ctx.obj["ci_report_bucket_name"], + report_output_prefix=ctx.obj["report_output_prefix"], + use_remote_secrets=ctx.obj["use_remote_secrets"], + gha_workflow_run_url=ctx.obj.get("gha_workflow_run_url"), + dagger_logs_url=ctx.obj.get("dagger_logs_url"), + pipeline_start_timestamp=ctx.obj.get("pipeline_start_timestamp"), + ci_context=ctx.obj.get("ci_context"), + ci_gcs_credentials=ctx.obj["ci_gcs_credentials"], + ci_git_user=ctx.obj["ci_git_user"], + ci_github_access_token=ctx.obj["ci_github_access_token"], + open_report_in_browser=False, + docker_hub_username=docker_hub_username, + docker_hub_password=docker_hub_password, + ) + for connector in ctx.obj["selected_connectors_with_modified_files"] + ] + + anyio.run( + run_connectors_pipelines, + connectors_contexts, + run_connector_migration_to_base_image_pipeline, + "Migration to base image pipeline", + ctx.obj["concurrency"], + ctx.obj["dagger_logs_path"], ctx.obj["execute_timeout"], + pull_request_number, ) return True diff --git a/airbyte-ci/connectors/pipelines/pipelines/connector_changes/__init__.py b/airbyte-ci/connectors/pipelines/pipelines/connector_changes/__init__.py new file mode 100644 index 000000000000..c941b3045795 --- /dev/null +++ b/airbyte-ci/connectors/pipelines/pipelines/connector_changes/__init__.py @@ -0,0 +1,3 @@ +# +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. +# diff --git a/airbyte-ci/connectors/pipelines/pipelines/connector_changes/base_image_version_migration.py b/airbyte-ci/connectors/pipelines/pipelines/connector_changes/base_image_version_migration.py new file mode 100644 index 000000000000..25a683ad852f --- /dev/null +++ b/airbyte-ci/connectors/pipelines/pipelines/connector_changes/base_image_version_migration.py @@ -0,0 +1,322 @@ +# +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. +# +import textwrap +from copy import deepcopy +from typing import Optional + +from base_images import version_registry +from connector_ops.utils import ConnectorLanguage +from dagger import Directory +from jinja2 import Template +from pipelines import consts +from pipelines.bases import ConnectorReport, Step, StepResult, StepStatus +from pipelines.connector_changes.version_bump import AddChangelogEntry, BumpDockerImageTagInMetadata, get_bumped_version +from pipelines.contexts import ConnectorContext, PipelineContext + +from . import metadata_change_helpers + + +class UpgradeBaseImageMetadata(Step): + title = "Upgrade the base image to the latest version in metadata.yaml" + + def __init__( + self, + context: ConnectorContext, + repo_dir: Directory, + set_if_not_exists: bool = True, + ): + super().__init__(context) + self.repo_dir = repo_dir + self.set_if_not_exists = set_if_not_exists + + async def get_latest_base_image_address(self) -> Optional[str]: + try: + version_registry_for_language = await version_registry.get_registry_for_language( + self.dagger_client, self.context.connector.language, (self.context.docker_hub_username, self.context.docker_hub_password) + ) + return version_registry_for_language.latest_not_pre_released_published_entry.published_docker_image.address + except NotImplementedError: + return None + + @staticmethod + def update_base_image_in_metadata(current_metadata: dict, latest_base_image_version_address: str) -> dict: + current_connector_build_options = current_metadata["data"].get("connectorBuildOptions", {}) + updated_metadata = deepcopy(current_metadata) + updated_metadata["data"]["connectorBuildOptions"] = { + **current_connector_build_options, + **{"baseImage": latest_base_image_version_address}, + } + return updated_metadata + + async def _run(self) -> StepResult: + latest_base_image_address = await self.get_latest_base_image_address() + if latest_base_image_address is None: + return StepResult( + self, + StepStatus.SKIPPED, + stdout="Could not find a base image for this connector language.", + output_artifact=self.repo_dir, + ) + + metadata_path = self.context.connector.metadata_file_path + current_metadata = await metadata_change_helpers.get_current_metadata(self.repo_dir, metadata_path) + current_base_image_address = current_metadata.get("data", {}).get("connectorBuildOptions", {}).get("baseImage") + + if current_base_image_address is None and not self.set_if_not_exists: + return StepResult( + self, + StepStatus.SKIPPED, + stdout="Connector does not have a base image metadata field.", + output_artifact=self.repo_dir, + ) + + if current_base_image_address == latest_base_image_address: + return StepResult( + self, + StepStatus.SKIPPED, + stdout="Connector already uses latest base image", + output_artifact=self.repo_dir, + ) + updated_metadata = self.update_base_image_in_metadata(current_metadata, latest_base_image_address) + updated_repo_dir = metadata_change_helpers.get_repo_dir_with_updated_metadata(self.repo_dir, metadata_path, updated_metadata) + + return StepResult( + self, + StepStatus.SUCCESS, + stdout=f"Updated base image to {latest_base_image_address} in {metadata_path}", + output_artifact=updated_repo_dir, + ) + + +class DeleteConnectorFile(Step): + def __init__( + self, + context: ConnectorContext, + file_to_delete: str, + ): + super().__init__(context) + self.file_to_delete = file_to_delete + + @property + def title(self): + return f"Delete {self.file_to_delete}" + + async def _run(self) -> StepResult: + file_to_delete_path = self.context.connector.code_directory / self.file_to_delete + if not file_to_delete_path.exists(): + return StepResult( + self, + StepStatus.SKIPPED, + stdout=f"Connector does not have a {self.file_to_delete}", + ) + # As this is a deletion of a file, this has to happen on the host fs + # Deleting the file in a Directory container would not work because the directory.export method would not export the deleted file from the Directory back to host. + file_to_delete_path.unlink() + return StepResult( + self, + StepStatus.SUCCESS, + stdout=f"Deleted {file_to_delete_path}", + ) + + +class AddBuildInstructionsToDoc(Step): + title = "Add build instructions to doc" + + def __init__(self, context: PipelineContext, repo_dir: Directory) -> None: + super().__init__(context) + self.repo_dir = repo_dir + + async def _run(self) -> StepResult: + doc_path = self.context.connector.documentation_file_path + if not doc_path.exists(): + return StepResult( + self, + StepStatus.SKIPPED, + stdout="Connector does not have a documentation file.", + output_artifact=self.container_with_airbyte_repo, + ) + current_doc = await self.repo_dir.file(str(doc_path)).contents() + try: + updated_doc = self.add_build_instructions(current_doc) + except Exception as e: + return StepResult( + self, + StepStatus.FAILURE, + stderr=f"Could not add the build instructions: {e}", + output_artifact=self.container_with_airbyte_repo, + ) + updated_repo_dir = await self.repo_dir.with_new_file(str(doc_path), updated_doc) + return StepResult( + self, + StepStatus.SUCCESS, + stdout=f"Added changelog entry to {doc_path}", + output_artifact=updated_repo_dir, + ) + + def add_build_instructions(self, og_doc_content) -> str: + line_no_for_build_instructions = None + og_lines = og_doc_content.splitlines() + for line_no, line in enumerate(og_lines): + if "## Build instructions" in line: + return og_doc_content + if "## Changelog" in line: + line_no_for_build_instructions = line_no + if line_no_for_build_instructions is None: + line_no_for_build_instructions = len(og_lines) - 1 + + build_instructions_template = Template( + textwrap.dedent( + """ + ## Build instructions + ### Build your own connector image + This connector is built using our dynamic built process. + The base image used to build it is defined within the metadata.yaml file under the `connectorBuildOptions`. + The build logic is defined using [Dagger](https://dagger.io/) [here](https://github.com/airbytehq/airbyte/blob/master/airbyte-ci/connectors/pipelines/pipelines/builds/python_connectors.py). + It does not rely on a Dockerfile. + + If you would like to patch our connector and build your own a simple approach would be: + + 1. Create your own Dockerfile based on the latest version of the connector image. + ```Dockerfile + FROM {{ connector_image }}:latest + + COPY . ./airbyte/integration_code + RUN pip install ./airbyte/integration_code + + # The entrypoint and default env vars are already set in the base image + # ENV AIRBYTE_ENTRYPOINT "python /airbyte/integration_code/main.py" + # ENTRYPOINT ["python", "/airbyte/integration_code/main.py"] + ``` + Please use this as an example. This is not optimized. + + 2. Build your image: + ```bash + docker build -t {{ connector_image }}:dev . + # Running the spec command against your patched connector + docker run {{ connector_image }}:dev spec + ``` + + ### Customizing our build process + When contributing on our connector you might need to customize the build process to add a system dependency or set an env var. + You can customize our build process by adding a `build_customization.py` module to your connector. + This module should contain a `pre_connector_install` and `post_connector_install` async function that will mutate the base image and the connector container respectively. + It will be imported at runtime by our build process and the functions will be called if they exist. + + Here is an example of a `build_customization.py` module: + ```python + from __future__ import annotations + + from typing import TYPE_CHECKING + + if TYPE_CHECKING: + # Feel free to check the dagger documentation for more information on the Container object and its methods. + # https://dagger-io.readthedocs.io/en/sdk-python-v0.6.4/ + from dagger import Container + + + async def pre_connector_install(base_image_container: Container) -> Container: + return await base_image_container.with_env_variable("MY_PRE_BUILD_ENV_VAR", "my_pre_build_env_var_value") + + async def post_connector_install(connector_container: Container) -> Container: + return await connector_container.with_env_variable("MY_POST_BUILD_ENV_VAR", "my_post_build_env_var_value") + ``` + """ + ) + ) + + build_instructions = build_instructions_template.render({"connector_image": self.context.connector.metadata["dockerRepository"]}) + + new_doc = "\n".join(og_lines[:line_no_for_build_instructions] + [build_instructions] + og_lines[line_no_for_build_instructions:]) + return new_doc + + +async def run_connector_base_image_upgrade_pipeline(context: ConnectorContext, semaphore, set_if_not_exists: bool) -> ConnectorReport: + """Run a pipeline to upgrade for a single connector to use our base image.""" + async with semaphore: + steps_results = [] + async with context: + og_repo_dir = await context.get_repo_dir() + update_base_image_in_metadata = UpgradeBaseImageMetadata( + context, + og_repo_dir, + set_if_not_exists=set_if_not_exists, + ) + update_base_image_in_metadata_result = await update_base_image_in_metadata.run() + steps_results.append(update_base_image_in_metadata_result) + final_repo_dir = update_base_image_in_metadata_result.output_artifact + await og_repo_dir.diff(final_repo_dir).export(str(consts.REPO_PATH)) + context.report = ConnectorReport(context, steps_results, name="BASE IMAGE UPGRADE RESULTS") + return context.report + + +async def run_connector_migration_to_base_image_pipeline(context: ConnectorContext, semaphore, pull_request_number: str): + async with semaphore: + steps_results = [] + async with context: + # DELETE DOCKERFILE + delete_docker_file = DeleteConnectorFile( + context, + "Dockerfile", + ) + delete_docker_file_result = await delete_docker_file.run() + steps_results.append(delete_docker_file_result) + + # DELETE BUILD.GRADLE IF NOT JAVA + if context.connector.language is not ConnectorLanguage.JAVA: + delete_gradle_file = DeleteConnectorFile( + context, + "build.gradle", + ) + delete_gradle_file_result = await delete_gradle_file.run() + steps_results.append(delete_gradle_file_result) + + og_repo_dir = await context.get_repo_dir() + + # UPDATE BASE IMAGE IN METADATA + update_base_image_in_metadata = UpgradeBaseImageMetadata( + context, + og_repo_dir, + set_if_not_exists=True, + ) + update_base_image_in_metadata_result = await update_base_image_in_metadata.run() + steps_results.append(update_base_image_in_metadata_result) + if update_base_image_in_metadata_result.status is not StepStatus.SUCCESS: + context.report = ConnectorReport(context, steps_results, name="BASE IMAGE UPGRADE RESULTS") + return context.report + + # BUMP CONNECTOR VERSION IN METADATA + new_version = get_bumped_version(context.connector.version, "patch") + bump_version_in_metadata = BumpDockerImageTagInMetadata( + context, + update_base_image_in_metadata_result.output_artifact, + new_version, + ) + bump_version_in_metadata_result = await bump_version_in_metadata.run() + steps_results.append(bump_version_in_metadata_result) + + # ADD CHANGELOG ENTRY + add_changelog_entry = AddChangelogEntry( + context, + bump_version_in_metadata_result.output_artifact, + new_version, + "Use our base image and remove Dockerfile", + pull_request_number, + ) + add_changelog_entry_result = await add_changelog_entry.run() + steps_results.append(add_changelog_entry_result) + + # UPDATE DOC + add_build_instructions_to_doc = AddBuildInstructionsToDoc( + context, + add_changelog_entry_result.output_artifact, + ) + add_build_instructions_to_doc_results = await add_build_instructions_to_doc.run() + steps_results.append(add_build_instructions_to_doc_results) + + # EXPORT MODIFIED FILES BACK TO HOST + final_repo_dir = add_build_instructions_to_doc_results.output_artifact + await og_repo_dir.diff(final_repo_dir).export(str(consts.REPO_PATH)) + + context.report = ConnectorReport(context, steps_results, name="MIGRATE TO BASE IMAGE RESULTS") + return context.report diff --git a/airbyte-ci/connectors/pipelines/pipelines/connector_changes/metadata_change_helpers.py b/airbyte-ci/connectors/pipelines/pipelines/connector_changes/metadata_change_helpers.py new file mode 100644 index 000000000000..179cfe4a6e7f --- /dev/null +++ b/airbyte-ci/connectors/pipelines/pipelines/connector_changes/metadata_change_helpers.py @@ -0,0 +1,21 @@ +# +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. +# +from pathlib import Path + +import yaml +from dagger import Directory + +# Helpers + + +async def get_current_metadata(repo_dir: Directory, metadata_path: Path) -> dict: + return yaml.safe_load(await repo_dir.file(str(metadata_path)).contents()) + + +def get_repo_dir_with_updated_metadata(repo_dir: Directory, metadata_path: Path, updated_metadata: dict) -> Directory: + return repo_dir.with_new_file(str(metadata_path), yaml.safe_dump(updated_metadata)) + + +def get_current_version(current_metadata: dict) -> str: + return current_metadata.get("data", {}).get("dockerImageTag") diff --git a/airbyte-ci/connectors/pipelines/pipelines/connector_changes/version_bump.py b/airbyte-ci/connectors/pipelines/pipelines/connector_changes/version_bump.py new file mode 100644 index 000000000000..7aa500cc4bd3 --- /dev/null +++ b/airbyte-ci/connectors/pipelines/pipelines/connector_changes/version_bump.py @@ -0,0 +1,175 @@ +# +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. +# +import datetime +from copy import deepcopy + +import semver +from dagger import Container +from pipelines import consts +from pipelines.bases import ConnectorReport, Step, StepResult, StepStatus +from pipelines.contexts import ConnectorContext + +from . import metadata_change_helpers + + +class BumpDockerImageTagInMetadata(Step): + title = "Upgrade the dockerImageTag to the latest version in metadata.yaml" + + def __init__( + self, + context: ConnectorContext, + repo_dir: Container, + new_version: str, + ): + super().__init__(context) + self.repo_dir = repo_dir + self.new_version = new_version + + @staticmethod + def get_metadata_with_bumped_version(previous_version: str, new_version: str, current_metadata: dict) -> dict: + updated_metadata = deepcopy(current_metadata) + updated_metadata["data"]["dockerImageTag"] = new_version + # Bump strict versions + if current_metadata["data"].get("registries", {}).get("cloud", {}).get("dockerImageTag") == previous_version: + updated_metadata["data"]["registries"]["cloud"]["dockerImageTag"] = new_version + return updated_metadata + + async def _run(self) -> StepResult: + metadata_path = self.context.connector.metadata_file_path + current_metadata = await metadata_change_helpers.get_current_metadata(self.repo_dir, metadata_path) + current_version = metadata_change_helpers.get_current_version(current_metadata) + if current_version is None: + return StepResult( + self, + StepStatus.SKIPPED, + stdout="Can't retrieve the connector current version.", + output_artifact=self.repo_dir, + ) + updated_metadata = self.get_metadata_with_bumped_version(current_version, self.new_version, current_metadata) + repo_dir_with_updated_metadata = metadata_change_helpers.get_repo_dir_with_updated_metadata( + self.repo_dir, metadata_path, updated_metadata + ) + + return StepResult( + self, + StepStatus.SUCCESS, + stdout=f"Updated dockerImageTag from {current_version} to {self.new_version} in {metadata_path}", + output_artifact=repo_dir_with_updated_metadata, + ) + + +class AddChangelogEntry(Step): + title = "Add changelog entry" + + def __init__( + self, + context: ConnectorContext, + repo_dir: Container, + new_version: str, + changelog_entry: str, + pull_request_number: str, + ): + super().__init__(context) + self.repo_dir = repo_dir + self.new_version = new_version + self.changelog_entry = changelog_entry + self.pull_request_number = pull_request_number + + async def _run(self) -> StepResult: + doc_path = self.context.connector.documentation_file_path + if not doc_path.exists(): + return StepResult( + self, + StepStatus.SKIPPED, + stdout="Connector does not have a documentation file.", + output_artifact=self.repo_dir, + ) + try: + updated_doc = self.add_changelog_entry(doc_path.read_text()) + except Exception as e: + return StepResult( + self, + StepStatus.FAILURE, + stdout=f"Could not add changelog entry: {e}", + output_artifact=self.container_with_airbyte_repo, + ) + updated_repo_dir = self.repo_dir.with_new_file(str(doc_path), updated_doc) + return StepResult( + self, + StepStatus.SUCCESS, + stdout=f"Added changelog entry to {doc_path}", + output_artifact=updated_repo_dir, + ) + + def find_line_index_for_new_entry(self, markdown_text) -> int: + lines = markdown_text.splitlines() + for line_index, line in enumerate(lines): + if "version" in line.lower() and "date" in line.lower() and "pull request" in line.lower() and "subject" in line.lower(): + return line_index + 2 + raise Exception("Could not find the changelog section table in the documentation file.") + + def add_changelog_entry(self, og_doc_content) -> str: + today = datetime.date.today().strftime("%Y-%m-%d") + lines = og_doc_content.splitlines() + line_index_for_new_entry = self.find_line_index_for_new_entry(og_doc_content) + new_entry = f"| {self.new_version} | {today} | [{self.pull_request_number}](https://github.com/airbytehq/airbyte/pull/{self.pull_request_number}) | {self.changelog_entry} |" + lines.insert(line_index_for_new_entry, new_entry) + return "\n".join(lines) + + +def get_bumped_version(version: str, bump_type: str) -> str: + current_version = semver.VersionInfo.parse(version) + if bump_type == "patch": + new_version = current_version.bump_patch() + elif bump_type == "minor": + new_version = current_version.bump_minor() + elif bump_type == "major": + new_version = current_version.bump_major() + else: + raise ValueError(f"Unknown bump type: {bump_type}") + return str(new_version) + + +async def run_connector_version_bump_pipeline( + context: ConnectorContext, + semaphore, + bump_type: str, + changelog_entry: str, + pull_request_number: str, +) -> ConnectorReport: + """Run a pipeline to upgrade for a single connector. + + Args: + context (ConnectorContext): The initialized connector context. + + Returns: + ConnectorReport: The reports holding the base image version upgrade results. + """ + async with semaphore: + steps_results = [] + async with context: + og_repo_dir = await context.get_repo_dir() + new_version = get_bumped_version(context.connector.version, bump_type) + update_docker_image_tag_in_metadata = BumpDockerImageTagInMetadata( + context, + og_repo_dir, + new_version, + ) + update_docker_image_tag_in_metadata_result = await update_docker_image_tag_in_metadata.run() + repo_dir_with_updated_metadata = update_docker_image_tag_in_metadata_result.output_artifact + steps_results.append(update_docker_image_tag_in_metadata_result) + + add_changelog_entry = AddChangelogEntry( + context, + repo_dir_with_updated_metadata, + new_version, + changelog_entry, + pull_request_number, + ) + add_changelog_entry_result = await add_changelog_entry.run() + steps_results.append(add_changelog_entry_result) + final_repo_dir = add_changelog_entry_result.output_artifact + await og_repo_dir.diff(final_repo_dir).export(str(consts.REPO_PATH)) + context.report = ConnectorReport(context, steps_results, name="CONNECTOR VERSION BUMP RESULTS") + return context.report diff --git a/airbyte-ci/connectors/pipelines/pipelines/consts.py b/airbyte-ci/connectors/pipelines/pipelines/consts.py index 1605590b8cfa..2785eb228e49 100644 --- a/airbyte-ci/connectors/pipelines/pipelines/consts.py +++ b/airbyte-ci/connectors/pipelines/pipelines/consts.py @@ -3,8 +3,8 @@ # import platform -from pathlib import Path +import git from dagger import Platform PYPROJECT_TOML_FILE_PATH = "pyproject.toml" @@ -38,3 +38,5 @@ DOCKER_HOST_NAME = "global-docker-host" DOCKER_HOST_PORT = 2375 DOCKER_TMP_VOLUME_NAME = "shared-tmp" +REPO = git.Repo(search_parent_directories=True) +REPO_PATH = REPO.working_tree_dir diff --git a/airbyte-ci/connectors/pipelines/pipelines/contexts.py b/airbyte-ci/connectors/pipelines/pipelines/contexts.py index b38a4960c546..903f1116eedf 100644 --- a/airbyte-ci/connectors/pipelines/pipelines/contexts.py +++ b/airbyte-ci/connectors/pipelines/pipelines/contexts.py @@ -74,6 +74,7 @@ def __init__( ci_gcs_credentials: Optional[str] = None, ci_git_user: Optional[str] = None, ci_github_access_token: Optional[str] = None, + open_report_in_browser: bool = True, ): """Initialize a pipeline context. @@ -116,6 +117,7 @@ def __init__( self.started_at = None self.stopped_at = None self.secrets_to_mask = [] + self.open_report_in_browser = open_report_in_browser update_commit_status_check(**self.github_commit_status) @property @@ -336,6 +338,8 @@ def __init__( use_local_cdk: bool = False, use_host_gradle_dist_tar: bool = False, open_report_in_browser: bool = True, + docker_hub_username: Optional[str] = None, + docker_hub_password: Optional[str] = None, ): """Initialize a connector context. @@ -359,6 +363,8 @@ def __init__( code_tests_only (bool, optional): Whether to ignore non-code tests like QA and metadata checks. Defaults to False. use_host_gradle_dist_tar (bool, optional): Used when developing java connectors with gradle. Defaults to False. open_report_in_browser (bool, optional): Open HTML report in browser window. Defaults to True. + docker_hub_username (Optional[str], optional): Docker Hub username to use to read registries. Defaults to None. + docker_hub_password (Optional[str], optional): Docker Hub password to use to read registries. Defaults to None. """ self.pipeline_name = pipeline_name @@ -376,6 +382,8 @@ def __init__( self.use_local_cdk = use_local_cdk self.use_host_gradle_dist_tar = use_host_gradle_dist_tar self.open_report_in_browser = open_report_in_browser + self.docker_hub_username = docker_hub_username + self.docker_hub_password = docker_hub_password super().__init__( pipeline_name=pipeline_name, @@ -393,6 +401,7 @@ def __init__( ci_gcs_credentials=ci_gcs_credentials, ci_git_user=ci_git_user, ci_github_access_token=ci_github_access_token, + open_report_in_browser=open_report_in_browser, ) @property diff --git a/airbyte-ci/connectors/pipelines/pipelines/dagger_run.py b/airbyte-ci/connectors/pipelines/pipelines/dagger_run.py index df84c79b35be..b1ab352896f8 100644 --- a/airbyte-ci/connectors/pipelines/pipelines/dagger_run.py +++ b/airbyte-ci/connectors/pipelines/pipelines/dagger_run.py @@ -22,7 +22,8 @@ "_EXPERIMENTAL_DAGGER_CLOUD_TOKEN", "p.eyJ1IjogIjFiZjEwMmRjLWYyZmQtNDVhNi1iNzM1LTgxNzI1NGFkZDU2ZiIsICJpZCI6ICJlNjk3YzZiYy0yMDhiLTRlMTktODBjZC0yNjIyNGI3ZDBjMDEifQ.hT6eMOYt3KZgNoVGNYI3_v4CC-s19z8uQsBkGrBhU3k", ) -ARGS_DISABLING_TUI = ["--no-tui", "publish", "--version"] + +ARGS_DISABLING_TUI = ["--no-tui", "--version", "publish", "upgrade-base-image", "--help", "format", "bump-version", "migrate-to-base-image"] def get_dagger_path() -> Optional[str]: diff --git a/airbyte-ci/connectors/pipelines/poetry.lock b/airbyte-ci/connectors/pipelines/poetry.lock index 5ac03e3e04e6..6d235e4d85a9 100644 --- a/airbyte-ci/connectors/pipelines/poetry.lock +++ b/airbyte-ci/connectors/pipelines/poetry.lock @@ -1,5 +1,27 @@ # This file is automatically @generated by Poetry 1.5.1 and should not be changed by hand. +[[package]] +name = "airbyte-connectors-base-images" +version = "0.1.0" +description = "This package is used to generate and publish the base images for Airbyte Connectors." +optional = false +python-versions = "^3.10" +files = [] +develop = true + +[package.dependencies] +connector-ops = {path = "../connector_ops", develop = true} +dagger-io = "0.6.4" +gitpython = "^3.1.35" +inquirer = "^3.1.3" +jinja2 = "^3.1.2" +rich = "^13.5.2" +semver = "^3.0.1" + +[package.source] +type = "directory" +url = "../base_images" + [[package]] name = "airbyte-protocol-models" version = "1.0.1" @@ -14,6 +36,17 @@ files = [ [package.dependencies] pydantic = ">=1.9.2,<1.10.0" +[[package]] +name = "ansicon" +version = "1.89.0" +description = "Python wrapper for loading Jason Hood's ANSICON" +optional = false +python-versions = "*" +files = [ + {file = "ansicon-1.89.0-py2.py3-none-any.whl", hash = "sha256:f1def52d17f65c2c9682cf8370c03f541f410c1752d6a14029f97318e4b9dfec"}, + {file = "ansicon-1.89.0.tar.gz", hash = "sha256:e4d039def5768a47e4afec8e89e83ec3ae5a26bf00ad851f914d1240b444d2b1"}, +] + [[package]] name = "anyio" version = "3.7.1" @@ -106,6 +139,22 @@ doc-rtd = ["autoapi (>=0.9.0)", "pydata-sphinx-theme (<=0.7.2)", "sphinx (>=4.2. test-tox = ["mypy (>=0.800)", "numpy", "pandera", "pytest (>=4.0.0)", "sphinx", "typing-extensions (>=3.10.0.0)"] test-tox-coverage = ["coverage (>=5.5)"] +[[package]] +name = "blessed" +version = "1.20.0" +description = "Easy, practical library for making terminal apps, by providing an elegant, well-documented interface to Colors, Keyboard input, and screen Positioning capabilities." +optional = false +python-versions = ">=2.7" +files = [ + {file = "blessed-1.20.0-py2.py3-none-any.whl", hash = "sha256:0c542922586a265e699188e52d5f5ac5ec0dd517e5a1041d90d2bbf23f906058"}, + {file = "blessed-1.20.0.tar.gz", hash = "sha256:2cdd67f8746e048f00df47a2880f4d6acbcdb399031b604e34ba8f71d5787680"}, +] + +[package.dependencies] +jinxed = {version = ">=1.1.0", markers = "platform_system == \"Windows\""} +six = ">=1.9.0" +wcwidth = ">=0.1.4" + [[package]] name = "cachetools" version = "5.3.1" @@ -698,19 +747,20 @@ grpc = ["grpcio (>=1.38.0,<2.0dev)"] [[package]] name = "google-cloud-storage" -version = "2.11.0" +version = "2.12.0" description = "Google Cloud Storage API client library" optional = false python-versions = ">=3.7" files = [ - {file = "google-cloud-storage-2.11.0.tar.gz", hash = "sha256:6fbf62659b83c8f3a0a743af0d661d2046c97c3a5bfb587c4662c4bc68de3e31"}, - {file = "google_cloud_storage-2.11.0-py2.py3-none-any.whl", hash = "sha256:88cbd7fb3d701c780c4272bc26952db99f25eb283fb4c2208423249f00b5fe53"}, + {file = "google-cloud-storage-2.12.0.tar.gz", hash = "sha256:57c0bcda2f5e11f008a155d8636d8381d5abab46b58e0cae0e46dd5e595e6b46"}, + {file = "google_cloud_storage-2.12.0-py2.py3-none-any.whl", hash = "sha256:bc52563439d42981b6e21b071a76da2791672776eda3ba99d13a8061ebbd6e5e"}, ] [package.dependencies] google-api-core = ">=1.31.5,<2.0.dev0 || >2.3.0,<3.0.0dev" -google-auth = ">=1.25.0,<3.0dev" +google-auth = ">=2.23.3,<3.0dev" google-cloud-core = ">=2.3.0,<3.0dev" +google-crc32c = ">=1.0,<2.0dev" google-resumable-media = ">=2.6.0" requests = ">=2.18.0,<3.0.0dev" @@ -946,6 +996,22 @@ files = [ {file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"}, ] +[[package]] +name = "inquirer" +version = "3.1.3" +description = "Collection of common interactive command line user interfaces, based on Inquirer.js" +optional = false +python-versions = ">=3.8" +files = [ + {file = "inquirer-3.1.3-py3-none-any.whl", hash = "sha256:a7441fd74d06fcac4385218a1f5e8703f7a113f7944e01af47b8c58e84f95ce5"}, + {file = "inquirer-3.1.3.tar.gz", hash = "sha256:aac309406f5b49d4b8ab7c6872117f43bf082a552dc256aa16bc95e16bb58bec"}, +] + +[package.dependencies] +blessed = ">=1.19.0" +python-editor = ">=1.0.4" +readchar = ">=3.0.6" + [[package]] name = "jinja2" version = "3.1.2" @@ -963,6 +1029,20 @@ MarkupSafe = ">=2.0" [package.extras] i18n = ["Babel (>=2.7)"] +[[package]] +name = "jinxed" +version = "1.2.0" +description = "Jinxed Terminal Library" +optional = false +python-versions = "*" +files = [ + {file = "jinxed-1.2.0-py2.py3-none-any.whl", hash = "sha256:cfc2b2e4e3b4326954d546ba6d6b9a7a796ddcb0aef8d03161d005177eb0d48b"}, + {file = "jinxed-1.2.0.tar.gz", hash = "sha256:032acda92d5c57cd216033cbbd53de731e6ed50deb63eb4781336ca55f72cda5"}, +] + +[package.dependencies] +ansicon = {version = "*", markers = "platform_system == \"Windows\""} + [[package]] name = "markdown-it-py" version = "3.0.0" @@ -1584,6 +1664,18 @@ files = [ [package.dependencies] six = ">=1.5" +[[package]] +name = "python-editor" +version = "1.0.4" +description = "Programmatically open an editor, capture the result." +optional = false +python-versions = "*" +files = [ + {file = "python-editor-1.0.4.tar.gz", hash = "sha256:51fda6bcc5ddbbb7063b2af7509e43bd84bfc32a4ff71349ec7847713882327b"}, + {file = "python_editor-1.0.4-py2-none-any.whl", hash = "sha256:5f98b069316ea1c2ed3f67e7f5df6c0d8f10b689964a4a811ff64f0106819ec8"}, + {file = "python_editor-1.0.4-py3-none-any.whl", hash = "sha256:1bf6e860a8ad52a14c3ee1252d5dc25b2030618ed80c022598f00176adc8367d"}, +] + [[package]] name = "pytz" version = "2023.3.post1" @@ -1667,6 +1759,20 @@ files = [ {file = "PyYAML-6.0.1.tar.gz", hash = "sha256:bfdf460b1736c775f2ba9f6a92bca30bc2095067b8a9d77876d1fad6cc3b4a43"}, ] +[[package]] +name = "readchar" +version = "4.0.5" +description = "Library to easily read single chars and key strokes" +optional = false +python-versions = ">=3.7" +files = [ + {file = "readchar-4.0.5-py3-none-any.whl", hash = "sha256:76ec784a5dd2afac3b7da8003329834cdd9824294c260027f8c8d2e4d0a78f43"}, + {file = "readchar-4.0.5.tar.gz", hash = "sha256:08a456c2d7c1888cde3f4688b542621b676eb38cd6cfed7eb6cb2e2905ddc826"}, +] + +[package.dependencies] +setuptools = ">=41.0" + [[package]] name = "requests" version = "2.28.2" @@ -1776,6 +1882,22 @@ starlette = ["starlette (>=0.19.1)"] starlite = ["starlite (>=1.48)"] tornado = ["tornado (>=5)"] +[[package]] +name = "setuptools" +version = "68.2.2" +description = "Easily download, build, install, upgrade, and uninstall Python packages" +optional = false +python-versions = ">=3.8" +files = [ + {file = "setuptools-68.2.2-py3-none-any.whl", hash = "sha256:b454a35605876da60632df1a60f736524eb73cc47bbc9f3f1ef1b644de74fd2a"}, + {file = "setuptools-68.2.2.tar.gz", hash = "sha256:4ac1475276d2f1c48684874089fefcd83bd7162ddaafb81fac866ba0db282a87"}, +] + +[package.extras] +docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-hoverxref (<2)", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier"] +testing = ["build[virtualenv]", "filelock (>=3.4.0)", "flake8-2020", "ini2toml[lite] (>=0.9)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pip (>=19.1)", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-ruff", "pytest-timeout", "pytest-xdist", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] +testing-integration = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "packaging (>=23.1)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] + [[package]] name = "simpleeval" version = "0.9.13" @@ -1894,6 +2016,17 @@ brotli = ["brotli (==1.0.9)", "brotli (>=1.0.9)", "brotlicffi (>=0.8.0)", "brotl secure = ["certifi", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "ipaddress", "pyOpenSSL (>=0.14)", "urllib3-secure-extra"] socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"] +[[package]] +name = "wcwidth" +version = "0.2.8" +description = "Measures the displayed width of unicode strings in a terminal" +optional = false +python-versions = "*" +files = [ + {file = "wcwidth-0.2.8-py2.py3-none-any.whl", hash = "sha256:77f719e01648ed600dfa5402c347481c0992263b81a027344f3e1ba25493a704"}, + {file = "wcwidth-0.2.8.tar.gz", hash = "sha256:8705c569999ffbb4f6a87c6d1b80f324bd6db952f5eb0b95bc07517f4c1813d4"}, +] + [[package]] name = "websocket-client" version = "1.6.4" @@ -2084,4 +2217,4 @@ multidict = ">=4.0" [metadata] lock-version = "2.0" python-versions = "~3.10" -content-hash = "1de89e7d2152377c7ac94a01965243c73fd779fe4b5dc08e94f826e0bb8eae94" +content-hash = "eb562583bcf251947d5cfc38f334bd3d2b694939b5604b86f7efae39e3b971ed" diff --git a/airbyte-ci/connectors/pipelines/pyproject.toml b/airbyte-ci/connectors/pipelines/pyproject.toml index 39144ba2ed02..790e19968c70 100644 --- a/airbyte-ci/connectors/pipelines/pyproject.toml +++ b/airbyte-ci/connectors/pipelines/pyproject.toml @@ -4,7 +4,7 @@ build-backend = "poetry.core.masonry.api" [tool.poetry] name = "pipelines" -version = "1.8.0" +version = "1.9.0" description = "Packaged maintained by the connector operations team to perform CI for connectors' pipelines" authors = ["Airbyte "] @@ -20,6 +20,7 @@ airbyte-protocol-models = "*" tabulate = "^0.8.9" jinja2 = "^3.0.2" requests = "2.28.2" # Pinned as the requests 2.29.0 version is not compatible with the docker package +airbyte-connectors-base-images = {path = "../base_images", develop = true} connector-ops = {path = "../connector_ops", develop = true} toml = "^0.10.2" sentry-sdk = "^1.28.1"