From 45491a9c29a30b54471de13bff7376a7412a2459 Mon Sep 17 00:00:00 2001 From: agarwalsh Date: Tue, 8 Oct 2024 10:07:42 -0700 Subject: [PATCH 1/5] fix: rebalanced integrations tests --- java/.ci/Jenkinsfile | 228 +++++++++++++++++++++---------------------- 1 file changed, 114 insertions(+), 114 deletions(-) diff --git a/java/.ci/Jenkinsfile b/java/.ci/Jenkinsfile index 65f50d18..70bfac78 100644 --- a/java/.ci/Jenkinsfile +++ b/java/.ci/Jenkinsfile @@ -579,6 +579,41 @@ EOF } } } + stage('HBASE TO GCS (Automated)(avro)') { + when { + // Run this stage only if JOB_TYPE is not set to CLUSTER + expression { env.JOB_TYPE != "CLUSTER" } + } + steps{ + retry(count: stageRetryCount) { + sh ''' + + gsutil cp gs://python-dataproc-templates/surjitsh/hbase-site.xml . + gcloud auth configure-docker us-docker.pkg.dev + export CATALOG='{"table":{"namespace":"default","name":"my_table"},"rowkey":"key","columns":{"key":{"cf":"rowkey","col":"key","type":"string"},"name":{"cf":"cf","col":"name","type":"string"}}}' + export IMAGE_NAME_VERSION=hbase-to-gcs:1.0.3 + export HBASE_SITE_PATH=../hbase-site.xml + export IMAGE=us-docker.pkg.dev/${GCP_PROJECT}/dataproc-cicd/${IMAGE_NAME_VERSION} + export JARS="gs://deps-dataproc-template/hbase-spark-protocol-shaded-1.0.1-spark_3.2-scala_2.12.jar,gs://deps-dataproc-template/hbase-spark-1.0.1-spark_3.2-scala_2.12.jar,gs://deps-dataproc-template/protobuf-java-2.5.0.jar,gs://deps-dataproc-template/htrace-core4-4.2.0-incubating.jar" + + export SKIP_IMAGE_BUILD=TRUE + export SKIP_BUILD=true + + cd java + + bin/start.sh \ + --container-image=$IMAGE \ + --properties='spark.dataproc.driverEnv.SPARK_EXTRA_CLASSPATH=/etc/hbase/conf/' \ + -- --template HBASETOGCS \ + --templateProperty hbasetogcs.output.fileformat=avro \ + --templateProperty hbasetogcs.output.savemode=overwrite \ + --templateProperty hbasetogcs.output.path=gs://dataproc-templates/integration-testing/output/HBASETOGCS_automated/avro \ + --templateProperty hbasetogcs.table.catalog=$CATALOG + + ''' + } + } + } } } stage('Parallel Execution 6'){ @@ -840,6 +875,32 @@ EOF } } } + stage('HBASE TO GCS (Manual)(avro)') { + when { + // Run this stage only if JOB_TYPE is not set to CLUSTER + expression { env.JOB_TYPE != "CLUSTER" } + } + steps{ + retry(count: stageRetryCount) { + sh ''' + export SKIP_BUILD=true + export CATALOG='{"table":{"namespace":"default","name":"my_table"},"rowkey":"key","columns":{"key":{"cf":"rowkey","col":"key","type":"string"},"name":{"cf":"cf","col":"name","type":"string"}}}' + export JARS="gs://deps-dataproc-template/hbase-spark-protocol-shaded-1.0.1-spark_3.2-scala_2.12.jar,gs://deps-dataproc-template/hbase-spark-1.0.1-spark_3.2-scala_2.12.jar,gs://deps-dataproc-template/protobuf-java-2.5.0.jar,gs://deps-dataproc-template/htrace-core4-4.2.0-incubating.jar" + + cd java + + bin/start.sh \ + --container-image=us-docker.pkg.dev/${GCP_PROJECT}/dataproc-cicd/hbase-to-gcs:1.0.3 \ + --properties='spark.dataproc.driverEnv.SPARK_EXTRA_CLASSPATH=/etc/hbase/conf/' \ + -- --template=HBASETOGCS \ + --templateProperty hbasetogcs.output.fileformat=avro \ + --templateProperty hbasetogcs.output.savemode=overwrite \ + --templateProperty hbasetogcs.output.path=gs://dataproc-templates/integration-testing/output/HBASETOGCS_manual/avro \ + --templateProperty hbasetogcs.table.catalog=$CATALOG + ''' + } + } + } } } stage('Parallel Execution 9'){ @@ -1023,126 +1084,65 @@ EOF } } } - } - } - stage('HBASE TO GCS (Manual)') { - when { - // Run this stage only if JOB_TYPE is not set to CLUSTER - expression { env.JOB_TYPE != "CLUSTER" } - } - steps{ - retry(count: stageRetryCount) { - sh ''' - export SKIP_BUILD=true - export CATALOG='{"table":{"namespace":"default","name":"my_table"},"rowkey":"key","columns":{"key":{"cf":"rowkey","col":"key","type":"string"},"name":{"cf":"cf","col":"name","type":"string"}}}' - export JARS="gs://deps-dataproc-template/hbase-spark-protocol-shaded-1.0.1-spark_3.2-scala_2.12.jar,gs://deps-dataproc-template/hbase-spark-1.0.1-spark_3.2-scala_2.12.jar,gs://deps-dataproc-template/protobuf-java-2.5.0.jar,gs://deps-dataproc-template/htrace-core4-4.2.0-incubating.jar" - - cd java - - bin/start.sh \ - --container-image=us-docker.pkg.dev/${GCP_PROJECT}/dataproc-cicd/hbase-to-gcs:1.0.3 \ - --properties='spark.dataproc.driverEnv.SPARK_EXTRA_CLASSPATH=/etc/hbase/conf/' \ - -- --template=HBASETOGCS \ - --templateProperty hbasetogcs.output.fileformat=csv \ - --templateProperty hbasetogcs.output.savemode=overwrite \ - --templateProperty hbasetogcs.output.path=gs://dataproc-templates/integration-testing/output/HBASETOGCS_manual/csv \ - --templateProperty hbasetogcs.table.catalog=$CATALOG - ''' - } - } - } - stage('HBASE TO GCS (Automated)') { - when { - // Run this stage only if JOB_TYPE is not set to CLUSTER - expression { env.JOB_TYPE != "CLUSTER" } - } - steps{ - retry(count: stageRetryCount) { - sh ''' - gsutil cp gs://python-dataproc-templates/surjitsh/hbase-site.xml . - gcloud auth configure-docker us-docker.pkg.dev - export CATALOG='{"table":{"namespace":"default","name":"my_table"},"rowkey":"key","columns":{"key":{"cf":"rowkey","col":"key","type":"string"},"name":{"cf":"cf","col":"name","type":"string"}}}' - export IMAGE_NAME_VERSION=hbase-to-gcs:1.0.3 - export HBASE_SITE_PATH=../hbase-site.xml - export IMAGE=us-docker.pkg.dev/${GCP_PROJECT}/dataproc-cicd/${IMAGE_NAME_VERSION} - export JARS="gs://deps-dataproc-template/hbase-spark-protocol-shaded-1.0.1-spark_3.2-scala_2.12.jar,gs://deps-dataproc-template/hbase-spark-1.0.1-spark_3.2-scala_2.12.jar,gs://deps-dataproc-template/protobuf-java-2.5.0.jar,gs://deps-dataproc-template/htrace-core4-4.2.0-incubating.jar" - - export SKIP_IMAGE_BUILD=TRUE - export SKIP_BUILD=true - - cd java - - bin/start.sh \ - --container-image=$IMAGE \ - --properties='spark.dataproc.driverEnv.SPARK_EXTRA_CLASSPATH=/etc/hbase/conf/' \ - -- --template HBASETOGCS \ - --templateProperty hbasetogcs.output.fileformat=csv \ - --templateProperty hbasetogcs.output.savemode=overwrite \ - --templateProperty hbasetogcs.output.path=gs://dataproc-templates/integration-testing/output/HBASETOGCS_automated/csv \ - --templateProperty hbasetogcs.table.catalog=$CATALOG - - ''' - } - } - } - stage('HBASE TO GCS (Manual)(avro)') { - when { - // Run this stage only if JOB_TYPE is not set to CLUSTER - expression { env.JOB_TYPE != "CLUSTER" } - } - steps{ - retry(count: stageRetryCount) { - sh ''' - export SKIP_BUILD=true - export CATALOG='{"table":{"namespace":"default","name":"my_table"},"rowkey":"key","columns":{"key":{"cf":"rowkey","col":"key","type":"string"},"name":{"cf":"cf","col":"name","type":"string"}}}' - export JARS="gs://deps-dataproc-template/hbase-spark-protocol-shaded-1.0.1-spark_3.2-scala_2.12.jar,gs://deps-dataproc-template/hbase-spark-1.0.1-spark_3.2-scala_2.12.jar,gs://deps-dataproc-template/protobuf-java-2.5.0.jar,gs://deps-dataproc-template/htrace-core4-4.2.0-incubating.jar" + stage('HBASE TO GCS (Manual)') { + when { + // Run this stage only if JOB_TYPE is not set to CLUSTER + expression { env.JOB_TYPE != "CLUSTER" } + } + steps{ + retry(count: stageRetryCount) { + sh ''' + export SKIP_BUILD=true + export CATALOG='{"table":{"namespace":"default","name":"my_table"},"rowkey":"key","columns":{"key":{"cf":"rowkey","col":"key","type":"string"},"name":{"cf":"cf","col":"name","type":"string"}}}' + export JARS="gs://deps-dataproc-template/hbase-spark-protocol-shaded-1.0.1-spark_3.2-scala_2.12.jar,gs://deps-dataproc-template/hbase-spark-1.0.1-spark_3.2-scala_2.12.jar,gs://deps-dataproc-template/protobuf-java-2.5.0.jar,gs://deps-dataproc-template/htrace-core4-4.2.0-incubating.jar" - cd java + cd java - bin/start.sh \ - --container-image=us-docker.pkg.dev/${GCP_PROJECT}/dataproc-cicd/hbase-to-gcs:1.0.3 \ - --properties='spark.dataproc.driverEnv.SPARK_EXTRA_CLASSPATH=/etc/hbase/conf/' \ - -- --template=HBASETOGCS \ - --templateProperty hbasetogcs.output.fileformat=avro \ - --templateProperty hbasetogcs.output.savemode=overwrite \ - --templateProperty hbasetogcs.output.path=gs://dataproc-templates/integration-testing/output/HBASETOGCS_manual/avro \ - --templateProperty hbasetogcs.table.catalog=$CATALOG - ''' + bin/start.sh \ + --container-image=us-docker.pkg.dev/${GCP_PROJECT}/dataproc-cicd/hbase-to-gcs:1.0.3 \ + --properties='spark.dataproc.driverEnv.SPARK_EXTRA_CLASSPATH=/etc/hbase/conf/' \ + -- --template=HBASETOGCS \ + --templateProperty hbasetogcs.output.fileformat=csv \ + --templateProperty hbasetogcs.output.savemode=overwrite \ + --templateProperty hbasetogcs.output.path=gs://dataproc-templates/integration-testing/output/HBASETOGCS_manual/csv \ + --templateProperty hbasetogcs.table.catalog=$CATALOG + ''' + } + } } - } - } - stage('HBASE TO GCS (Automated)(avro)') { - when { - // Run this stage only if JOB_TYPE is not set to CLUSTER - expression { env.JOB_TYPE != "CLUSTER" } - } - steps{ - retry(count: stageRetryCount) { - sh ''' - - gsutil cp gs://python-dataproc-templates/surjitsh/hbase-site.xml . - gcloud auth configure-docker us-docker.pkg.dev - export CATALOG='{"table":{"namespace":"default","name":"my_table"},"rowkey":"key","columns":{"key":{"cf":"rowkey","col":"key","type":"string"},"name":{"cf":"cf","col":"name","type":"string"}}}' - export IMAGE_NAME_VERSION=hbase-to-gcs:1.0.3 - export HBASE_SITE_PATH=../hbase-site.xml - export IMAGE=us-docker.pkg.dev/${GCP_PROJECT}/dataproc-cicd/${IMAGE_NAME_VERSION} - export JARS="gs://deps-dataproc-template/hbase-spark-protocol-shaded-1.0.1-spark_3.2-scala_2.12.jar,gs://deps-dataproc-template/hbase-spark-1.0.1-spark_3.2-scala_2.12.jar,gs://deps-dataproc-template/protobuf-java-2.5.0.jar,gs://deps-dataproc-template/htrace-core4-4.2.0-incubating.jar" - - export SKIP_IMAGE_BUILD=TRUE - export SKIP_BUILD=true + stage('HBASE TO GCS (Automated)') { + when { + // Run this stage only if JOB_TYPE is not set to CLUSTER + expression { env.JOB_TYPE != "CLUSTER" } + } + steps{ + retry(count: stageRetryCount) { + sh ''' + gsutil cp gs://python-dataproc-templates/surjitsh/hbase-site.xml . + gcloud auth configure-docker us-docker.pkg.dev + export CATALOG='{"table":{"namespace":"default","name":"my_table"},"rowkey":"key","columns":{"key":{"cf":"rowkey","col":"key","type":"string"},"name":{"cf":"cf","col":"name","type":"string"}}}' + export IMAGE_NAME_VERSION=hbase-to-gcs:1.0.3 + export HBASE_SITE_PATH=../hbase-site.xml + export IMAGE=us-docker.pkg.dev/${GCP_PROJECT}/dataproc-cicd/${IMAGE_NAME_VERSION} + export JARS="gs://deps-dataproc-template/hbase-spark-protocol-shaded-1.0.1-spark_3.2-scala_2.12.jar,gs://deps-dataproc-template/hbase-spark-1.0.1-spark_3.2-scala_2.12.jar,gs://deps-dataproc-template/protobuf-java-2.5.0.jar,gs://deps-dataproc-template/htrace-core4-4.2.0-incubating.jar" + + export SKIP_IMAGE_BUILD=TRUE + export SKIP_BUILD=true - cd java + cd java - bin/start.sh \ - --container-image=$IMAGE \ - --properties='spark.dataproc.driverEnv.SPARK_EXTRA_CLASSPATH=/etc/hbase/conf/' \ - -- --template HBASETOGCS \ - --templateProperty hbasetogcs.output.fileformat=avro \ - --templateProperty hbasetogcs.output.savemode=overwrite \ - --templateProperty hbasetogcs.output.path=gs://dataproc-templates/integration-testing/output/HBASETOGCS_automated/avro \ - --templateProperty hbasetogcs.table.catalog=$CATALOG + bin/start.sh \ + --container-image=$IMAGE \ + --properties='spark.dataproc.driverEnv.SPARK_EXTRA_CLASSPATH=/etc/hbase/conf/' \ + -- --template HBASETOGCS \ + --templateProperty hbasetogcs.output.fileformat=csv \ + --templateProperty hbasetogcs.output.savemode=overwrite \ + --templateProperty hbasetogcs.output.path=gs://dataproc-templates/integration-testing/output/HBASETOGCS_automated/csv \ + --templateProperty hbasetogcs.table.catalog=$CATALOG - ''' + ''' + } + } } } } From 1c8f3a84afe9daacfef9c31b6cdb2e96c4645355 Mon Sep 17 00:00:00 2001 From: agarwalsh Date: Wed, 9 Oct 2024 22:59:03 -0700 Subject: [PATCH 2/5] fix: add s3 to bq integration test --- java/.ci/Jenkinsfile | 25 +++++++++++++++++++++++++ 1 file changed, 25 insertions(+) diff --git a/java/.ci/Jenkinsfile b/java/.ci/Jenkinsfile index 70bfac78..3a07ad51 100644 --- a/java/.ci/Jenkinsfile +++ b/java/.ci/Jenkinsfile @@ -27,6 +27,8 @@ pipeline { '''.stripIndent(), returnStdout: true ).trim() + + S3_USER = credentials('aws-s3-ro-credentials') } stages { stage('Prepare Environment'){ @@ -333,6 +335,29 @@ EOF } } } + stage('S3 TO BigQuery (avro)'){ + steps { + retry(count: stageRetryCount) { + sh ''' + export SKIP_BUILD=true + + cd java + + bin/start.sh \ + -- --template S3TOBIGQUERY \ + --templateProperty project.id=yadavaja-sandbox \ + --templateProperty s3.bq.access.key=$S3_USER \ + --templateProperty s3.bq.secret.key=$S3_USER_PSW \ + --templateProperty s3.bq.input.format=avro \ + --templateProperty s3.bq.input.location=s3a://dataproc-templates-integration-tests/cities.avro \ + --templateProperty s3.bq.output.dataset.name=dataproc_templates \ + --templateProperty s3.bq.output.table.name=s3_to_bq_avro \ + --templateProperty s3.bq.output.mode=Overwrite \ + --templateProperty s3.bq.ld.temp.bucket.name=dataproc-templates + ''' + } + } + } } } stage('Parallel Execution 3'){ From 65d34b661df4bf5c753038c82c277185c36f2aa7 Mon Sep 17 00:00:00 2001 From: agarwalsh Date: Thu, 10 Oct 2024 09:17:31 -0700 Subject: [PATCH 3/5] fix: removed aws hadoop dependencies --- java/.ci/Jenkinsfile | 9 +++++---- java/pom.xml | 31 ------------------------------- 2 files changed, 5 insertions(+), 35 deletions(-) diff --git a/java/.ci/Jenkinsfile b/java/.ci/Jenkinsfile index 3a07ad51..d320ddb1 100644 --- a/java/.ci/Jenkinsfile +++ b/java/.ci/Jenkinsfile @@ -27,8 +27,6 @@ pipeline { '''.stripIndent(), returnStdout: true ).trim() - - S3_USER = credentials('aws-s3-ro-credentials') } stages { stage('Prepare Environment'){ @@ -338,6 +336,8 @@ EOF stage('S3 TO BigQuery (avro)'){ steps { retry(count: stageRetryCount) { + withCredentials([usernamePassword(credentialsId: 'aws-s3-ro-credentials', + passwordVariable: 'S3_SECRET', usernameVariable: 'S3_KEY')]) { sh ''' export SKIP_BUILD=true @@ -346,8 +346,8 @@ EOF bin/start.sh \ -- --template S3TOBIGQUERY \ --templateProperty project.id=yadavaja-sandbox \ - --templateProperty s3.bq.access.key=$S3_USER \ - --templateProperty s3.bq.secret.key=$S3_USER_PSW \ + --templateProperty s3.bq.access.key=$S3_KEY \ + --templateProperty s3.bq.secret.key=$S3_SECRET \ --templateProperty s3.bq.input.format=avro \ --templateProperty s3.bq.input.location=s3a://dataproc-templates-integration-tests/cities.avro \ --templateProperty s3.bq.output.dataset.name=dataproc_templates \ @@ -355,6 +355,7 @@ EOF --templateProperty s3.bq.output.mode=Overwrite \ --templateProperty s3.bq.ld.temp.bucket.name=dataproc-templates ''' + } } } } diff --git a/java/pom.xml b/java/pom.xml index d5f7fce7..f35f648b 100644 --- a/java/pom.xml +++ b/java/pom.xml @@ -94,10 +94,6 @@ 3.0.0 - - 3.4.0 - 1.12.772 - 1.0.1 2.6.0-hadoop3 @@ -393,33 +389,6 @@ ${spark.snowflake.connector.version} - - - org.apache.hadoop - hadoop-common - ${hadoop.aws.version} - - - org.apache.hadoop - hadoop-aws - ${hadoop.aws.version} - - - org.slf4j - slf4j-log4j12 - - - log4j - log4j - - - - - com.amazonaws - aws-java-sdk - ${aws.java.sdk.version} - - org.apache.hbase.connectors.spark From f6414378b24a7e2ce2b800e2bcc45d705720c265 Mon Sep 17 00:00:00 2001 From: agarwalsh Date: Thu, 10 Oct 2024 13:50:23 -0700 Subject: [PATCH 4/5] docs: deprecated redshift template --- README.md | 2 +- java/README.md | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/README.md b/README.md index 9719016a..dc751e5d 100644 --- a/README.md +++ b/README.md @@ -47,7 +47,7 @@ Please refer to the [Dataproc Templates (Java - Spark) README](/java) for more i * [PubSubToBigTable](/java/src/main/java/com/google/cloud/dataproc/templates/pubsub#1-pubsub-to-bigtable) (blogpost [link](https://medium.com/google-cloud/stream-data-from-pub-sub-to-bigtable-using-dataproc-serverless-3142c1bcc22a)) * [PubSubLiteToBigTable](/java/src/main/java/com/google/cloud/dataproc/templates/pubsublite#1-pubsublite-to-bigtable) (blogpost [link](https://medium.com/google-cloud/stream-data-from-pub-sub-lite-to-bigtable-using-dataproc-serverless-2c8816f40581)) **Deprecated and will be removed in Q1 2025** * [PubSubToGCS](/java/src/main/java/com/google/cloud/dataproc/templates/pubsub#2-pubsub-to-gcs) (blogpost [link](https://medium.com/google-cloud/stream-data-from-pub-sub-to-cloud-storage-using-dataproc-serverless-7a1e4823926e)) -* [RedshiftToGCS](/java/src/main/java/com/google/cloud/dataproc/templates/databases#executing-redshift-to-gcs-template) (blogpost [Link](https://medium.com/google-cloud/exporting-data-from-redshift-to-gcs-using-gcp-dataproc-serverless-java-5b82044d78e7)) +* [RedshiftToGCS](/java/src/main/java/com/google/cloud/dataproc/templates/databases#executing-redshift-to-gcs-template) (blogpost [Link](https://medium.com/google-cloud/exporting-data-from-redshift-to-gcs-using-gcp-dataproc-serverless-java-5b82044d78e7)) **Deprecated and will be removed in Q1 2025** * [S3ToBigQuery](/java/src/main/java/com/google/cloud/dataproc/templates/s3#1-s3-to-bigquery) (blogpost [link](https://medium.com/google-cloud/export-data-from-aws-s3-to-bigquery-using-dataproc-serverless-6dc7a9952fc4)) * [SnowflakeToGCS](/java/src/main/java/com/google/cloud/dataproc/templates/snowflake#1-snowflake-to-gcs) (blogpost [link](https://medium.com/google-cloud/export-snowflake-query-results-to-gcs-using-dataproc-serverless-3d68f5a01ca9)) * [SpannerToGCS](/java/src/main/java/com/google/cloud/dataproc/templates/databases#executing-spanner-to-gcs-template) (blogpost [link](https://medium.com/google-cloud/cloud-spanner-export-query-results-using-dataproc-serverless-6f2f65b583a4)) diff --git a/java/README.md b/java/README.md index 0287a8e3..eeaf3bba 100644 --- a/java/README.md +++ b/java/README.md @@ -34,7 +34,7 @@ Please refer to the [Dataproc Templates (Java - Spark) README](java/README.md) f * [PubSubToBigTable](/java/src/main/java/com/google/cloud/dataproc/templates/pubsub#1-pubsub-to-bigtable) (blogpost [link](https://medium.com/google-cloud/stream-data-from-pub-sub-to-bigtable-using-dataproc-serverless-3142c1bcc22a)) * [PubSubLiteToBigTable](/java/src/main/java/com/google/cloud/dataproc/templates/pubsublite#1-pubsublite-to-bigtable) (blogpost [link](https://medium.com/google-cloud/stream-data-from-pub-sub-lite-to-bigtable-using-dataproc-serverless-2c8816f40581)) **Deprecated and will be removed in Q1 2025** * [PubSubToGCS](/java/src/main/java/com/google/cloud/dataproc/templates/pubsub/README.md#2-pubsub-to-gcs) (blogpost [link](https://medium.com/google-cloud/stream-data-from-pub-sub-to-cloud-storage-using-dataproc-serverless-7a1e4823926e)) -* [RedshiftToGCS](/java/src/main/java/com/google/cloud/dataproc/templates/databases#executing-redshift-to-gcs-template) +* [RedshiftToGCS](/java/src/main/java/com/google/cloud/dataproc/templates/databases#executing-redshift-to-gcs-template) **Deprecated and will be removed in Q1 2025** * [S3ToBigQuery](/java/src/main/java/com/google/cloud/dataproc/templates/s3#1-s3-to-bigquery) (blogpost [link](https://medium.com/google-cloud/export-data-from-aws-s3-to-bigquery-using-dataproc-serverless-6dc7a9952fc4)) * [SnowflakeToGCS](/java/src/main/java/com/google/cloud/dataproc/templates/snowflake#1-snowflake-to-gcs) (blogpost [link](https://medium.com/google-cloud/export-snowflake-query-results-to-gcs-using-dataproc-serverless-3d68f5a01ca9)) * [SpannerToGCS](/java/src/main/java/com/google/cloud/dataproc/templates/databases#executing-spanner-to-gcs-template) (blogpost [link](https://medium.com/google-cloud/cloud-spanner-export-query-results-using-dataproc-serverless-6f2f65b583a4)) From b1dc6020c44f129d1b3d78feedda2976e9c28231 Mon Sep 17 00:00:00 2001 From: agarwalsh Date: Thu, 10 Oct 2024 14:11:52 -0700 Subject: [PATCH 5/5] ci: updated gcp project variable --- java/.ci/Jenkinsfile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/java/.ci/Jenkinsfile b/java/.ci/Jenkinsfile index c79ec3c8..a90d328b 100644 --- a/java/.ci/Jenkinsfile +++ b/java/.ci/Jenkinsfile @@ -345,7 +345,7 @@ EOF bin/start.sh \ -- --template S3TOBIGQUERY \ - --templateProperty project.id=yadavaja-sandbox \ + --templateProperty project.id=$GCP_PROJECT \ --templateProperty s3.bq.access.key=$S3_KEY \ --templateProperty s3.bq.secret.key=$S3_SECRET \ --templateProperty s3.bq.input.format=avro \