diff --git a/cosmos/__init__.py b/cosmos/__init__.py index 21df0a580..f76f18b7c 100644 --- a/cosmos/__init__.py +++ b/cosmos/__init__.py @@ -187,8 +187,7 @@ "cosmos.operators.gcp_cloud_run_job.DbtRunGcpCloudRunJobOperator", "gcp-cloud-run-job" ) DbtRunOperationGcpCloudRunJobOperator = MissingPackage( - "cosmos.operators.gcp_cloud_run_job.DbtRunOperationGcpCloudRunJobOperator", - "gcp-cloud-run-job" + "cosmos.operators.gcp_cloud_run_job.DbtRunOperationGcpCloudRunJobOperator", "gcp-cloud-run-job" ) DbtSeedGcpCloudRunJobOperator = MissingPackage( "cosmos.operators.gcp_cloud_run_job.DbtSeedGcpCloudRunJobOperator", "gcp-cloud-run-job" diff --git a/docs/getting_started/gcp-cloud-run-job.rst b/docs/getting_started/gcp-cloud-run-job.rst index a9a4b2261..a378fbe20 100644 --- a/docs/getting_started/gcp-cloud-run-job.rst +++ b/docs/getting_started/gcp-cloud-run-job.rst @@ -16,14 +16,14 @@ Prerequisites 4. Astronomer-cosmos package containing the dbt Cloud Run Job operators 5. GCP account with: 1. A GCP project (`setup guide `_) - 2. IAM roles: - * Basic Role: `Owner `_ (control over whole project) or + 2. IAM roles: + * Basic Role: `Owner `_ (control over whole project) or * Predefined Roles: `Artifact Registry Administrator `_, `Cloud Run Developer `_ (control over specific services) 3. Enabled service APIs: - * Artifact Registry API + * Artifact Registry API * Cloud Run Admin API * BigQuery API - 4. A service account with BigQuery roles: `JobUser `_ and `DataEditor `_ + 4. A service account with BigQuery roles: `JobUser `_ and `DataEditor `_ 6. Docker image built with required dbt project and dbt DAG 7. dbt DAG with Cloud Run Job operators in the Airflow DAGs directory to run in Airflow @@ -85,7 +85,7 @@ In case BigQuery has never been used before in the project, run below command to **Setup Artifact Registry** -In order to run a container in Cloud Run Job, it needs access to the container image. In our setup, we will use Artifact Registry repository that stores images. +In order to run a container in Cloud Run Job, it needs access to the container image. In our setup, we will use Artifact Registry repository that stores images. To use Artifact Registry, you need to enable the API first: .. code-block:: bash @@ -177,7 +177,7 @@ First, enable Cloud Run Admin API using below command: .. code-block:: bash - gcloud services enable run.googleapis.com + gcloud services enable run.googleapis.com Next, set default Cloud Run region to your GCP region: @@ -243,7 +243,7 @@ You can also verify the tables that were created using dbt in BigQuery Studio: After the successfull tests, don't forget to delete Google Cloud resources to save up costs: .. code-block:: bash - + # Delete Cloud Run Job instance gcloud run jobs delete $CLOUD_RUN_JOB_NAME @@ -261,4 +261,4 @@ After the successfull tests, don't forget to delete Google Cloud resources to sa # Delete Artifact Registry repository with all images included gcloud artifacts repositories delete $REPO_NAME \ - --location=$REGION \ No newline at end of file + --location=$REGION diff --git a/tests/operators/test_gcp_cloud_run_job.py b/tests/operators/test_gcp_cloud_run_job.py index a6f16942a..1582456d6 100644 --- a/tests/operators/test_gcp_cloud_run_job.py +++ b/tests/operators/test_gcp_cloud_run_job.py @@ -9,14 +9,14 @@ try: from cosmos.operators.gcp_cloud_run_job import ( + DbtBuildGcpCloudRunJobOperator, DbtGcpCloudRunJobBaseOperator, DbtLSGcpCloudRunJobOperator, DbtRunGcpCloudRunJobOperator, + DbtRunOperationGcpCloudRunJobOperator, DbtSeedGcpCloudRunJobOperator, - DbtTestGcpCloudRunJobOperator, - DbtBuildGcpCloudRunJobOperator, DbtSnapshotGcpCloudRunJobOperator, - DbtRunOperationGcpCloudRunJobOperator, + DbtTestGcpCloudRunJobOperator, ) class ConcreteDbtGcpCloudRunJobOperator(DbtGcpCloudRunJobBaseOperator):