From 342a7bfbc14cf846484f861de9da86767f350de7 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Tue, 24 Sep 2024 21:24:08 +0000 Subject: [PATCH] =?UTF-8?q?=F0=9F=8E=A8=20[pre-commit.ci]=20Auto=20format?= =?UTF-8?q?=20from=20pre-commit.com=20hooks?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- docs/getting_started/gcp-cloud-run-job.rst | 16 ++++++++-------- tests/operators/test_gcp_cloud_run_job.py | 6 +++--- 2 files changed, 11 insertions(+), 11 deletions(-) diff --git a/docs/getting_started/gcp-cloud-run-job.rst b/docs/getting_started/gcp-cloud-run-job.rst index a9a4b2261..a378fbe20 100644 --- a/docs/getting_started/gcp-cloud-run-job.rst +++ b/docs/getting_started/gcp-cloud-run-job.rst @@ -16,14 +16,14 @@ Prerequisites 4. Astronomer-cosmos package containing the dbt Cloud Run Job operators 5. GCP account with: 1. A GCP project (`setup guide `_) - 2. IAM roles: - * Basic Role: `Owner `_ (control over whole project) or + 2. IAM roles: + * Basic Role: `Owner `_ (control over whole project) or * Predefined Roles: `Artifact Registry Administrator `_, `Cloud Run Developer `_ (control over specific services) 3. Enabled service APIs: - * Artifact Registry API + * Artifact Registry API * Cloud Run Admin API * BigQuery API - 4. A service account with BigQuery roles: `JobUser `_ and `DataEditor `_ + 4. A service account with BigQuery roles: `JobUser `_ and `DataEditor `_ 6. Docker image built with required dbt project and dbt DAG 7. dbt DAG with Cloud Run Job operators in the Airflow DAGs directory to run in Airflow @@ -85,7 +85,7 @@ In case BigQuery has never been used before in the project, run below command to **Setup Artifact Registry** -In order to run a container in Cloud Run Job, it needs access to the container image. In our setup, we will use Artifact Registry repository that stores images. +In order to run a container in Cloud Run Job, it needs access to the container image. In our setup, we will use Artifact Registry repository that stores images. To use Artifact Registry, you need to enable the API first: .. code-block:: bash @@ -177,7 +177,7 @@ First, enable Cloud Run Admin API using below command: .. code-block:: bash - gcloud services enable run.googleapis.com + gcloud services enable run.googleapis.com Next, set default Cloud Run region to your GCP region: @@ -243,7 +243,7 @@ You can also verify the tables that were created using dbt in BigQuery Studio: After the successfull tests, don't forget to delete Google Cloud resources to save up costs: .. code-block:: bash - + # Delete Cloud Run Job instance gcloud run jobs delete $CLOUD_RUN_JOB_NAME @@ -261,4 +261,4 @@ After the successfull tests, don't forget to delete Google Cloud resources to sa # Delete Artifact Registry repository with all images included gcloud artifacts repositories delete $REPO_NAME \ - --location=$REGION \ No newline at end of file + --location=$REGION diff --git a/tests/operators/test_gcp_cloud_run_job.py b/tests/operators/test_gcp_cloud_run_job.py index a6f16942a..1582456d6 100644 --- a/tests/operators/test_gcp_cloud_run_job.py +++ b/tests/operators/test_gcp_cloud_run_job.py @@ -9,14 +9,14 @@ try: from cosmos.operators.gcp_cloud_run_job import ( + DbtBuildGcpCloudRunJobOperator, DbtGcpCloudRunJobBaseOperator, DbtLSGcpCloudRunJobOperator, DbtRunGcpCloudRunJobOperator, + DbtRunOperationGcpCloudRunJobOperator, DbtSeedGcpCloudRunJobOperator, - DbtTestGcpCloudRunJobOperator, - DbtBuildGcpCloudRunJobOperator, DbtSnapshotGcpCloudRunJobOperator, - DbtRunOperationGcpCloudRunJobOperator, + DbtTestGcpCloudRunJobOperator, ) class ConcreteDbtGcpCloudRunJobOperator(DbtGcpCloudRunJobBaseOperator):