From ab632dff2d22c22ad23eebdeb85d5a527c3c2f6f Mon Sep 17 00:00:00 2001 From: Charlie Wang Date: Fri, 15 Nov 2024 14:48:08 +0000 Subject: [PATCH 1/7] add uv required project table segment in toml file --- pyproject.toml | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/pyproject.toml b/pyproject.toml index 98a6876a..e500633a 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -12,6 +12,15 @@ # See the License for the specific language governing permissions and # limitations under the License. +[project] +name = "marketing-analytics-jumpstart" +version = "1.0.0" +description = "Marketing Analytics Jumpstart" +authors = ["Marketing Analytics Solutions Architects "] +license = "Apache 2.0" +readme = "README.md" +requires-python = ">=3.8,<3.11" + [tool.poetry] name = "marketing-analytics-jumpstart" version = "1.0.0" From effe0d403bba72400921eec6d08bd2b1f1911b21 Mon Sep 17 00:00:00 2001 From: Charlie Wang Date: Fri, 15 Nov 2024 15:39:15 +0000 Subject: [PATCH 2/7] switch to uv in terraform deployment --- infrastructure/terraform/main.tf | 49 ++----- .../terraform/modules/activation/main.tf | 7 +- .../terraform/modules/activation/variables.tf | 9 +- .../terraform/modules/pipelines/pipelines.tf | 129 +++++++++--------- .../terraform/modules/pipelines/variables.tf | 9 +- infrastructure/terraform/variables.tf | 6 + 6 files changed, 88 insertions(+), 121 deletions(-) diff --git a/infrastructure/terraform/main.tf b/infrastructure/terraform/main.tf index a6ede8bc..1136cb91 100644 --- a/infrastructure/terraform/main.tf +++ b/infrastructure/terraform/main.tf @@ -69,8 +69,8 @@ locals { source_root_dir = "../.." # The config_file_name is the name of the config file. config_file_name = "config" - # The poetry_run_alias is the alias of the poetry command. - poetry_run_alias = "${var.poetry_cmd} run" + # The uv_run_alias is the alias of the uv run command. + uv_run_alias = "${var.uv_cmd} run" # The mds_dataset_suffix is the suffix of the marketing data store dataset. mds_dataset_suffix = var.create_staging_environment ? "staging" : var.create_dev_environment ? "dev" : "prod" # The project_toml_file_path is the path to the project.toml file. @@ -127,39 +127,22 @@ resource "local_file" "feature_store_configuration" { }) } -# Runs the poetry command to install the dependencies. -# The command is: poetry install -resource "null_resource" "poetry_install" { - triggers = { - create_command = "${var.poetry_cmd} lock && ${var.poetry_cmd} install" - source_contents_hash = local.project_toml_content_hash - } - - # Only run the command when `terraform apply` executes and the resource doesn't exist. - provisioner "local-exec" { - when = create - command = self.triggers.create_command - working_dir = local.source_root_dir - } -} - data "external" "check_ga4_property_type" { - program = ["bash", "-c", "${local.poetry_run_alias} ga4-setup --ga4_resource=check_property_type --ga4_property_id=${var.ga4_property_id} --ga4_stream_id=${var.ga4_stream_id}"] + program = ["bash", "-c", "${local.uv_run_alias} ga4-setup --ga4_resource=check_property_type --ga4_property_id=${var.ga4_property_id} --ga4_stream_id=${var.ga4_stream_id}"] working_dir = local.source_root_dir - depends_on = [null_resource.poetry_install] } -# Runs the poetry invoke command to generate the sql queries and procedures. +# Runs the uv invoke command to generate the sql queries and procedures. # This command is executed before the feature store is created. resource "null_resource" "generate_sql_queries" { triggers = { # The create command generates the sql queries and procedures. - # The command is: poetry inv [function_name] --env-name=${local.config_file_name} + # The command is: uv inv [function_name] --env-name=${local.config_file_name} # The --env-name argument is the name of the configuration file. create_command = <<-EOT - ${local.poetry_run_alias} inv apply-config-parameters-to-all-queries --env-name=${local.config_file_name} - ${local.poetry_run_alias} inv apply-config-parameters-to-all-procedures --env-name=${local.config_file_name} + ${local.uv_run_alias} inv apply-config-parameters-to-all-queries --env-name=${local.config_file_name} + ${local.uv_run_alias} inv apply-config-parameters-to-all-procedures --env-name=${local.config_file_name} EOT # The destroy command removes the generated sql queries and procedures. @@ -171,10 +154,6 @@ resource "null_resource" "generate_sql_queries" { # The working directory is the root of the project. working_dir = local.source_root_dir - # The poetry_installed trigger is the ID of the null_resource.poetry_install resource. - # This is used to ensure that the poetry command is run before the generate_sql_queries command. - poetry_installed = null_resource.poetry_install.id - # The source_contents_hash trigger is the hash of the project.toml file. # This is used to ensure that the generate_sql_queries command is run only if the project.toml file has changed. # It also ensures that the generate_sql_queries command is run only if the sql queries and procedures have changed. @@ -415,15 +394,12 @@ module "pipelines" { # The source is the path to the pipelines module. source = "./modules/pipelines" config_file_path = local_file.feature_store_configuration.id != "" ? local_file.feature_store_configuration.filename : "" - poetry_run_alias = local.poetry_run_alias + uv_run_alias = local.uv_run_alias # The count determines if the pipelines are created or not. # If the count is 1, the pipelines are created. # If the count is 0, the pipelines are not created. # This is done to avoid creating the pipelines if the `deploy_pipelines` variable is set to false in the terraform.tfvars file. count = var.deploy_pipelines ? 1 : 0 - # The poetry_installed trigger is the ID of the null_resource.poetry_install resource. - # This is used to ensure that the poetry command is run before the pipelines module is created. - poetry_installed = null_resource.poetry_install.id # The project_id is the project in which the data is stored. # This is set to the data project ID in the terraform.tfvars file. mds_project_id = var.data_project_id @@ -454,9 +430,9 @@ module "activation" { # The trigger function is used to trigger the activation function. # The trigger function is created in the same region as the activation function. trigger_function_location = var.google_default_region - # The poetry_cmd is the poetry_cmd variable. - # This can be set on the poetry_cmd in the terraform.tfvars file. - poetry_cmd = var.poetry_cmd + # The uv_run_alias is the uv_run_alias variable. + # This can be set on the uv_cmd in the terraform.tfvars file. + uv_run_alias = local.uv_run_alias # The ga4_measurement_id is the ga4_measurement_id variable. # This can be set on the ga4_measurement_id in the terraform.tfvars file. ga4_measurement_id = var.ga4_measurement_id @@ -479,9 +455,6 @@ module "activation" { # This is done to avoid creating the activation function if the `deploy_activation` variable is set # to false in the terraform.tfvars file. count = var.deploy_activation ? 1 : 0 - # The poetry_installed is the ID of the null_resource poetry_install - # This is used to ensure that the poetry command is run before the activation module is created. - poetry_installed = null_resource.poetry_install.id mds_project_id = var.data_project_id mds_dataset_suffix = local.mds_dataset_suffix diff --git a/infrastructure/terraform/modules/activation/main.tf b/infrastructure/terraform/modules/activation/main.tf index c2c8f8b8..1bf35a9d 100644 --- a/infrastructure/terraform/modules/activation/main.tf +++ b/infrastructure/terraform/modules/activation/main.tf @@ -15,7 +15,6 @@ locals { app_prefix = "activation" source_root_dir = "../.." - poetry_run_alias = "${var.poetry_cmd} run" template_dir = "${local.source_root_dir}/templates" pipeline_source_dir = "${local.source_root_dir}/python/activation" trigger_function_dir = "${local.source_root_dir}/python/function" @@ -373,7 +372,7 @@ resource "null_resource" "create_custom_events" { } provisioner "local-exec" { command = <<-EOT - ${local.poetry_run_alias} ga4-setup --ga4_resource=custom_events --ga4_property_id=${var.ga4_property_id} --ga4_stream_id=${var.ga4_stream_id} + ${var.uv_run_alias} ga4-setup --ga4_resource=custom_events --ga4_property_id=${var.ga4_property_id} --ga4_stream_id=${var.ga4_stream_id} EOT working_dir = local.source_root_dir } @@ -391,7 +390,7 @@ resource "null_resource" "create_custom_dimensions" { } provisioner "local-exec" { command = <<-EOT - ${local.poetry_run_alias} ga4-setup --ga4_resource=custom_dimensions --ga4_property_id=${var.ga4_property_id} --ga4_stream_id=${var.ga4_stream_id} + ${var.uv_run_alias} ga4-setup --ga4_resource=custom_dimensions --ga4_property_id=${var.ga4_property_id} --ga4_stream_id=${var.ga4_stream_id} EOT working_dir = local.source_root_dir } @@ -447,7 +446,7 @@ module "trigger_function_account" { # a python command defined in the module ga4_setup. # This informatoin can then be used in other parts of the Terraform configuration to access the retrieved information. data "external" "ga4_measurement_properties" { - program = ["bash", "-c", "${local.poetry_run_alias} ga4-setup --ga4_resource=measurement_properties --ga4_property_id=${var.ga4_property_id} --ga4_stream_id=${var.ga4_stream_id}"] + program = ["bash", "-c", "${var.uv_run_alias} ga4-setup --ga4_resource=measurement_properties --ga4_property_id=${var.ga4_property_id} --ga4_stream_id=${var.ga4_stream_id}"] working_dir = local.source_root_dir # The count attribute specifies how many times the external data source should be executed. # This means that the external data source will be executed only if either the diff --git a/infrastructure/terraform/modules/activation/variables.tf b/infrastructure/terraform/modules/activation/variables.tf index 6c2d6428..5814361b 100644 --- a/infrastructure/terraform/modules/activation/variables.tf +++ b/infrastructure/terraform/modules/activation/variables.tf @@ -43,8 +43,8 @@ variable "trigger_function_location" { type = string } -variable "poetry_cmd" { - description = "alias for poetry command on the current system" +variable "uv_run_alias" { + description = "alias for uv run command on the current system" type = string } @@ -72,11 +72,6 @@ variable "ga4_stream_id" { type = string } -variable "poetry_installed" { - description = "Construct to specify dependency to poetry installed" - type = string -} - variable "mds_project_id" { type = string description = "MDS Project ID" diff --git a/infrastructure/terraform/modules/pipelines/pipelines.tf b/infrastructure/terraform/modules/pipelines/pipelines.tf index bd78fa84..a55e9136 100644 --- a/infrastructure/terraform/modules/pipelines/pipelines.tf +++ b/infrastructure/terraform/modules/pipelines/pipelines.tf @@ -309,13 +309,12 @@ resource "null_resource" "build_push_pipelines_components_image" { docker_repo_id = google_artifact_registry_repository.pipelines_docker_repo.id docker_repo_create_time = google_artifact_registry_repository.pipelines_docker_repo.create_time source_content_hash = local.component_image_content_hash - poetry_installed = var.poetry_installed } # The provisioner block specifies the command that will be executed to build and push the base component image. # This command will execute the build-push function in the base_component_image module, which will build and push the base component image to the specified Docker repository. provisioner "local-exec" { - command = "${var.poetry_run_alias} python -m base_component_image.build-push -c ${local.config_file_path_relative_python_run_dir}" + command = "${var.uv_run_alias} python -m base_component_image.build-push -c ${local.config_file_path_relative_python_run_dir}" working_dir = self.triggers.working_dir } } @@ -367,9 +366,9 @@ resource "null_resource" "compile_feature_engineering_auto_audience_segmentation # which will upload the pipeline YAML file to the specified Artifact Registry repository. The scheduler function will then schedule the pipeline to run on a regular basis. provisioner "local-exec" { command = <<-EOT - ${var.poetry_run_alias} python -m pipelines.compiler -c ${local.config_file_path_relative_python_run_dir} -p vertex_ai.pipelines.feature-creation-auto-audience-segmentation.execution -o fe_auto_audience_segmentation.yaml - ${var.poetry_run_alias} python -m pipelines.uploader -c ${local.config_file_path_relative_python_run_dir} -f fe_auto_audience_segmentation.yaml -t ${self.triggers.tag} -t latest - ${var.poetry_run_alias} python -m pipelines.scheduler -c ${local.config_file_path_relative_python_run_dir} -p vertex_ai.pipelines.feature-creation-auto-audience-segmentation.execution -i fe_auto_audience_segmentation.yaml + ${var.uv_run_alias} python -m pipelines.compiler -c ${local.config_file_path_relative_python_run_dir} -p vertex_ai.pipelines.feature-creation-auto-audience-segmentation.execution -o fe_auto_audience_segmentation.yaml + ${var.uv_run_alias} python -m pipelines.uploader -c ${local.config_file_path_relative_python_run_dir} -f fe_auto_audience_segmentation.yaml -t ${self.triggers.tag} -t latest + ${var.uv_run_alias} python -m pipelines.scheduler -c ${local.config_file_path_relative_python_run_dir} -p vertex_ai.pipelines.feature-creation-auto-audience-segmentation.execution -i fe_auto_audience_segmentation.yaml EOT working_dir = self.triggers.working_dir } @@ -391,9 +390,9 @@ resource "null_resource" "compile_feature_engineering_aggregated_value_based_bid # which will upload the pipeline YAML file to the specified Artifact Registry repository. The scheduler function will then schedule the pipeline to run on a regular basis. provisioner "local-exec" { command = <<-EOT - ${var.poetry_run_alias} python -m pipelines.compiler -c ${local.config_file_path_relative_python_run_dir} -p vertex_ai.pipelines.feature-creation-aggregated-value-based-bidding.execution -o fe_agg_vbb.yaml - ${var.poetry_run_alias} python -m pipelines.uploader -c ${local.config_file_path_relative_python_run_dir} -f fe_agg_vbb.yaml -t ${self.triggers.tag} -t latest - ${var.poetry_run_alias} python -m pipelines.scheduler -c ${local.config_file_path_relative_python_run_dir} -p vertex_ai.pipelines.feature-creation-aggregated-value-based-bidding.execution -i fe_agg_vbb.yaml + ${var.uv_run_alias} python -m pipelines.compiler -c ${local.config_file_path_relative_python_run_dir} -p vertex_ai.pipelines.feature-creation-aggregated-value-based-bidding.execution -o fe_agg_vbb.yaml + ${var.uv_run_alias} python -m pipelines.uploader -c ${local.config_file_path_relative_python_run_dir} -f fe_agg_vbb.yaml -t ${self.triggers.tag} -t latest + ${var.uv_run_alias} python -m pipelines.scheduler -c ${local.config_file_path_relative_python_run_dir} -p vertex_ai.pipelines.feature-creation-aggregated-value-based-bidding.execution -i fe_agg_vbb.yaml EOT working_dir = self.triggers.working_dir } @@ -415,9 +414,9 @@ resource "null_resource" "compile_feature_engineering_audience_segmentation_pipe # which will upload the pipeline YAML file to the specified Artifact Registry repository. The scheduler function will then schedule the pipeline to run on a regular basis. provisioner "local-exec" { command = <<-EOT - ${var.poetry_run_alias} python -m pipelines.compiler -c ${local.config_file_path_relative_python_run_dir} -p vertex_ai.pipelines.feature-creation-audience-segmentation.execution -o fe_audience_segmentation.yaml - ${var.poetry_run_alias} python -m pipelines.uploader -c ${local.config_file_path_relative_python_run_dir} -f fe_audience_segmentation.yaml -t ${self.triggers.tag} -t latest - ${var.poetry_run_alias} python -m pipelines.scheduler -c ${local.config_file_path_relative_python_run_dir} -p vertex_ai.pipelines.feature-creation-audience-segmentation.execution -i fe_audience_segmentation.yaml + ${var.uv_run_alias} python -m pipelines.compiler -c ${local.config_file_path_relative_python_run_dir} -p vertex_ai.pipelines.feature-creation-audience-segmentation.execution -o fe_audience_segmentation.yaml + ${var.uv_run_alias} python -m pipelines.uploader -c ${local.config_file_path_relative_python_run_dir} -f fe_audience_segmentation.yaml -t ${self.triggers.tag} -t latest + ${var.uv_run_alias} python -m pipelines.scheduler -c ${local.config_file_path_relative_python_run_dir} -p vertex_ai.pipelines.feature-creation-audience-segmentation.execution -i fe_audience_segmentation.yaml EOT working_dir = self.triggers.working_dir } @@ -439,9 +438,9 @@ resource "null_resource" "compile_feature_engineering_purchase_propensity_pipeli # which will upload the pipeline YAML file to the specified Artifact Registry repository. The scheduler function will then schedule the pipeline to run on a regular basis. provisioner "local-exec" { command = <<-EOT - ${var.poetry_run_alias} python -m pipelines.compiler -c ${local.config_file_path_relative_python_run_dir} -p vertex_ai.pipelines.feature-creation-purchase-propensity.execution -o fe_purchase_propensity.yaml - ${var.poetry_run_alias} python -m pipelines.uploader -c ${local.config_file_path_relative_python_run_dir} -f fe_purchase_propensity.yaml -t ${self.triggers.tag} -t latest - ${var.poetry_run_alias} python -m pipelines.scheduler -c ${local.config_file_path_relative_python_run_dir} -p vertex_ai.pipelines.feature-creation-purchase-propensity.execution -i fe_purchase_propensity.yaml + ${var.uv_run_alias} python -m pipelines.compiler -c ${local.config_file_path_relative_python_run_dir} -p vertex_ai.pipelines.feature-creation-purchase-propensity.execution -o fe_purchase_propensity.yaml + ${var.uv_run_alias} python -m pipelines.uploader -c ${local.config_file_path_relative_python_run_dir} -f fe_purchase_propensity.yaml -t ${self.triggers.tag} -t latest + ${var.uv_run_alias} python -m pipelines.scheduler -c ${local.config_file_path_relative_python_run_dir} -p vertex_ai.pipelines.feature-creation-purchase-propensity.execution -i fe_purchase_propensity.yaml EOT working_dir = self.triggers.working_dir } @@ -463,9 +462,9 @@ resource "null_resource" "compile_feature_engineering_churn_propensity_pipeline" # which will upload the pipeline YAML file to the specified Artifact Registry repository. The scheduler function will then schedule the pipeline to run on a regular basis. provisioner "local-exec" { command = <<-EOT - ${var.poetry_run_alias} python -m pipelines.compiler -c ${local.config_file_path_relative_python_run_dir} -p vertex_ai.pipelines.feature-creation-churn-propensity.execution -o fe_churn_propensity.yaml - ${var.poetry_run_alias} python -m pipelines.uploader -c ${local.config_file_path_relative_python_run_dir} -f fe_churn_propensity.yaml -t ${self.triggers.tag} -t latest - ${var.poetry_run_alias} python -m pipelines.scheduler -c ${local.config_file_path_relative_python_run_dir} -p vertex_ai.pipelines.feature-creation-churn-propensity.execution -i fe_churn_propensity.yaml + ${var.uv_run_alias} python -m pipelines.compiler -c ${local.config_file_path_relative_python_run_dir} -p vertex_ai.pipelines.feature-creation-churn-propensity.execution -o fe_churn_propensity.yaml + ${var.uv_run_alias} python -m pipelines.uploader -c ${local.config_file_path_relative_python_run_dir} -f fe_churn_propensity.yaml -t ${self.triggers.tag} -t latest + ${var.uv_run_alias} python -m pipelines.scheduler -c ${local.config_file_path_relative_python_run_dir} -p vertex_ai.pipelines.feature-creation-churn-propensity.execution -i fe_churn_propensity.yaml EOT working_dir = self.triggers.working_dir } @@ -487,9 +486,9 @@ resource "null_resource" "compile_feature_engineering_customer_lifetime_value_pi # which will upload the pipeline YAML file to the specified Artifact Registry repository. The scheduler function will then schedule the pipeline to run on a regular basis. provisioner "local-exec" { command = <<-EOT - ${var.poetry_run_alias} python -m pipelines.compiler -c ${local.config_file_path_relative_python_run_dir} -p vertex_ai.pipelines.feature-creation-customer-ltv.execution -o fe_customer_ltv.yaml - ${var.poetry_run_alias} python -m pipelines.uploader -c ${local.config_file_path_relative_python_run_dir} -f fe_customer_ltv.yaml -t ${self.triggers.tag} -t latest - ${var.poetry_run_alias} python -m pipelines.scheduler -c ${local.config_file_path_relative_python_run_dir} -p vertex_ai.pipelines.feature-creation-customer-ltv.execution -i fe_customer_ltv.yaml + ${var.uv_run_alias} python -m pipelines.compiler -c ${local.config_file_path_relative_python_run_dir} -p vertex_ai.pipelines.feature-creation-customer-ltv.execution -o fe_customer_ltv.yaml + ${var.uv_run_alias} python -m pipelines.uploader -c ${local.config_file_path_relative_python_run_dir} -f fe_customer_ltv.yaml -t ${self.triggers.tag} -t latest + ${var.uv_run_alias} python -m pipelines.scheduler -c ${local.config_file_path_relative_python_run_dir} -p vertex_ai.pipelines.feature-creation-customer-ltv.execution -i fe_customer_ltv.yaml EOT working_dir = self.triggers.working_dir } @@ -512,9 +511,9 @@ resource "null_resource" "compile_purchase_propensity_training_pipelines" { # which will upload the pipeline YAML file to the specified Artifact Registry repository. The scheduler function will then schedule the pipeline to run on a regular basis. provisioner "local-exec" { command = <<-EOT - ${var.poetry_run_alias} python -m pipelines.compiler -c ${local.config_file_path_relative_python_run_dir} -p vertex_ai.pipelines.purchase_propensity.training -o purchase_propensity_training.yaml - ${var.poetry_run_alias} python -m pipelines.uploader -c ${local.config_file_path_relative_python_run_dir} -f purchase_propensity_training.yaml -t ${self.triggers.tag} -t latest - ${var.poetry_run_alias} python -m pipelines.scheduler -c ${local.config_file_path_relative_python_run_dir} -p vertex_ai.pipelines.purchase_propensity.training -i purchase_propensity_training.yaml + ${var.uv_run_alias} python -m pipelines.compiler -c ${local.config_file_path_relative_python_run_dir} -p vertex_ai.pipelines.purchase_propensity.training -o purchase_propensity_training.yaml + ${var.uv_run_alias} python -m pipelines.uploader -c ${local.config_file_path_relative_python_run_dir} -f purchase_propensity_training.yaml -t ${self.triggers.tag} -t latest + ${var.uv_run_alias} python -m pipelines.scheduler -c ${local.config_file_path_relative_python_run_dir} -p vertex_ai.pipelines.purchase_propensity.training -i purchase_propensity_training.yaml EOT working_dir = self.triggers.working_dir } @@ -533,9 +532,9 @@ resource "null_resource" "compile_purchase_propensity_prediction_pipelines" { # which will upload the pipeline YAML file to the specified Artifact Registry repository. The scheduler function will then schedule the pipeline to run on a regular basis. provisioner "local-exec" { command = <<-EOT - ${var.poetry_run_alias} python -m pipelines.compiler -c ${local.config_file_path_relative_python_run_dir} -p vertex_ai.pipelines.purchase_propensity.prediction -o purchase_propensity_prediction.yaml - ${var.poetry_run_alias} python -m pipelines.uploader -c ${local.config_file_path_relative_python_run_dir} -f purchase_propensity_prediction.yaml -t ${self.triggers.tag} -t latest - ${var.poetry_run_alias} python -m pipelines.scheduler -c ${local.config_file_path_relative_python_run_dir} -p vertex_ai.pipelines.purchase_propensity.prediction -i purchase_propensity_prediction.yaml + ${var.uv_run_alias} python -m pipelines.compiler -c ${local.config_file_path_relative_python_run_dir} -p vertex_ai.pipelines.purchase_propensity.prediction -o purchase_propensity_prediction.yaml + ${var.uv_run_alias} python -m pipelines.uploader -c ${local.config_file_path_relative_python_run_dir} -f purchase_propensity_prediction.yaml -t ${self.triggers.tag} -t latest + ${var.uv_run_alias} python -m pipelines.scheduler -c ${local.config_file_path_relative_python_run_dir} -p vertex_ai.pipelines.purchase_propensity.prediction -i purchase_propensity_prediction.yaml EOT working_dir = self.triggers.working_dir } @@ -554,9 +553,9 @@ resource "null_resource" "compile_propensity_clv_training_pipelines" { # which will upload the pipeline YAML file to the specified Artifact Registry repository. The scheduler function will then schedule the pipeline to run on a regular basis. provisioner "local-exec" { command = <<-EOT - ${var.poetry_run_alias} python -m pipelines.compiler -c ${local.config_file_path_relative_python_run_dir} -p vertex_ai.pipelines.propensity_clv.training -o propensity_clv_training.yaml - ${var.poetry_run_alias} python -m pipelines.uploader -c ${local.config_file_path_relative_python_run_dir} -f propensity_clv_training.yaml -t ${self.triggers.tag} -t latest - ${var.poetry_run_alias} python -m pipelines.scheduler -c ${local.config_file_path_relative_python_run_dir} -p vertex_ai.pipelines.propensity_clv.training -i propensity_clv_training.yaml + ${var.uv_run_alias} python -m pipelines.compiler -c ${local.config_file_path_relative_python_run_dir} -p vertex_ai.pipelines.propensity_clv.training -o propensity_clv_training.yaml + ${var.uv_run_alias} python -m pipelines.uploader -c ${local.config_file_path_relative_python_run_dir} -f propensity_clv_training.yaml -t ${self.triggers.tag} -t latest + ${var.uv_run_alias} python -m pipelines.scheduler -c ${local.config_file_path_relative_python_run_dir} -p vertex_ai.pipelines.propensity_clv.training -i propensity_clv_training.yaml EOT working_dir = self.triggers.working_dir } @@ -575,9 +574,9 @@ resource "null_resource" "compile_clv_training_pipelines" { # which will upload the pipeline YAML file to the specified Artifact Registry repository. The scheduler function will then schedule the pipeline to run on a regular basis. provisioner "local-exec" { command = <<-EOT - ${var.poetry_run_alias} python -m pipelines.compiler -c ${local.config_file_path_relative_python_run_dir} -p vertex_ai.pipelines.clv.training -o clv_training.yaml - ${var.poetry_run_alias} python -m pipelines.uploader -c ${local.config_file_path_relative_python_run_dir} -f clv_training.yaml -t ${self.triggers.tag} -t latest - ${var.poetry_run_alias} python -m pipelines.scheduler -c ${local.config_file_path_relative_python_run_dir} -p vertex_ai.pipelines.clv.training -i clv_training.yaml + ${var.uv_run_alias} python -m pipelines.compiler -c ${local.config_file_path_relative_python_run_dir} -p vertex_ai.pipelines.clv.training -o clv_training.yaml + ${var.uv_run_alias} python -m pipelines.uploader -c ${local.config_file_path_relative_python_run_dir} -f clv_training.yaml -t ${self.triggers.tag} -t latest + ${var.uv_run_alias} python -m pipelines.scheduler -c ${local.config_file_path_relative_python_run_dir} -p vertex_ai.pipelines.clv.training -i clv_training.yaml EOT working_dir = self.triggers.working_dir } @@ -596,9 +595,9 @@ resource "null_resource" "compile_clv_prediction_pipelines" { # which will upload the pipeline YAML file to the specified Artifact Registry repository. The scheduler function will then schedule the pipeline to run on a regular basis. provisioner "local-exec" { command = <<-EOT - ${var.poetry_run_alias} python -m pipelines.compiler -c ${local.config_file_path_relative_python_run_dir} -p vertex_ai.pipelines.clv.prediction -o clv_prediction.yaml - ${var.poetry_run_alias} python -m pipelines.uploader -c ${local.config_file_path_relative_python_run_dir} -f clv_prediction.yaml -t ${self.triggers.tag} -t latest - ${var.poetry_run_alias} python -m pipelines.scheduler -c ${local.config_file_path_relative_python_run_dir} -p vertex_ai.pipelines.clv.prediction -i clv_prediction.yaml + ${var.uv_run_alias} python -m pipelines.compiler -c ${local.config_file_path_relative_python_run_dir} -p vertex_ai.pipelines.clv.prediction -o clv_prediction.yaml + ${var.uv_run_alias} python -m pipelines.uploader -c ${local.config_file_path_relative_python_run_dir} -f clv_prediction.yaml -t ${self.triggers.tag} -t latest + ${var.uv_run_alias} python -m pipelines.scheduler -c ${local.config_file_path_relative_python_run_dir} -p vertex_ai.pipelines.clv.prediction -i clv_prediction.yaml EOT working_dir = self.triggers.working_dir } @@ -617,9 +616,9 @@ resource "null_resource" "compile_segmentation_training_pipelines" { # which will upload the pipeline YAML file to the specified Artifact Registry repository. The scheduler function will then schedule the pipeline to run on a regular basis. provisioner "local-exec" { command = <<-EOT - ${var.poetry_run_alias} python -m pipelines.compiler -c ${local.config_file_path_relative_python_run_dir} -p vertex_ai.pipelines.segmentation.training -o segmentation_training.yaml - ${var.poetry_run_alias} python -m pipelines.uploader -c ${local.config_file_path_relative_python_run_dir} -f segmentation_training.yaml -t ${self.triggers.tag} -t latest - ${var.poetry_run_alias} python -m pipelines.scheduler -c ${local.config_file_path_relative_python_run_dir} -p vertex_ai.pipelines.segmentation.training -i segmentation_training.yaml + ${var.uv_run_alias} python -m pipelines.compiler -c ${local.config_file_path_relative_python_run_dir} -p vertex_ai.pipelines.segmentation.training -o segmentation_training.yaml + ${var.uv_run_alias} python -m pipelines.uploader -c ${local.config_file_path_relative_python_run_dir} -f segmentation_training.yaml -t ${self.triggers.tag} -t latest + ${var.uv_run_alias} python -m pipelines.scheduler -c ${local.config_file_path_relative_python_run_dir} -p vertex_ai.pipelines.segmentation.training -i segmentation_training.yaml EOT working_dir = self.triggers.working_dir } @@ -638,9 +637,9 @@ resource "null_resource" "compile_segmentation_prediction_pipelines" { # which will upload the pipeline YAML file to the specified Artifact Registry repository. The scheduler function will then schedule the pipeline to run on a regular basis. provisioner "local-exec" { command = <<-EOT - ${var.poetry_run_alias} python -m pipelines.compiler -c ${local.config_file_path_relative_python_run_dir} -p vertex_ai.pipelines.segmentation.prediction -o segmentation_prediction.yaml - ${var.poetry_run_alias} python -m pipelines.uploader -c ${local.config_file_path_relative_python_run_dir} -f segmentation_prediction.yaml -t ${self.triggers.tag} -t latest - ${var.poetry_run_alias} python -m pipelines.scheduler -c ${local.config_file_path_relative_python_run_dir} -p vertex_ai.pipelines.segmentation.prediction -i segmentation_prediction.yaml + ${var.uv_run_alias} python -m pipelines.compiler -c ${local.config_file_path_relative_python_run_dir} -p vertex_ai.pipelines.segmentation.prediction -o segmentation_prediction.yaml + ${var.uv_run_alias} python -m pipelines.uploader -c ${local.config_file_path_relative_python_run_dir} -f segmentation_prediction.yaml -t ${self.triggers.tag} -t latest + ${var.uv_run_alias} python -m pipelines.scheduler -c ${local.config_file_path_relative_python_run_dir} -p vertex_ai.pipelines.segmentation.prediction -i segmentation_prediction.yaml EOT working_dir = self.triggers.working_dir } @@ -659,9 +658,9 @@ resource "null_resource" "compile_auto_segmentation_training_pipelines" { # which will upload the pipeline YAML file to the specified Artifact Registry repository. The scheduler function will then schedule the pipeline to run on a regular basis. provisioner "local-exec" { command = <<-EOT - ${var.poetry_run_alias} python -m pipelines.compiler -c ${local.config_file_path_relative_python_run_dir} -p vertex_ai.pipelines.auto_segmentation.training -o auto_segmentation_training.yaml - ${var.poetry_run_alias} python -m pipelines.uploader -c ${local.config_file_path_relative_python_run_dir} -f auto_segmentation_training.yaml -t ${self.triggers.tag} -t latest - ${var.poetry_run_alias} python -m pipelines.scheduler -c ${local.config_file_path_relative_python_run_dir} -p vertex_ai.pipelines.auto_segmentation.training -i auto_segmentation_training.yaml + ${var.uv_run_alias} python -m pipelines.compiler -c ${local.config_file_path_relative_python_run_dir} -p vertex_ai.pipelines.auto_segmentation.training -o auto_segmentation_training.yaml + ${var.uv_run_alias} python -m pipelines.uploader -c ${local.config_file_path_relative_python_run_dir} -f auto_segmentation_training.yaml -t ${self.triggers.tag} -t latest + ${var.uv_run_alias} python -m pipelines.scheduler -c ${local.config_file_path_relative_python_run_dir} -p vertex_ai.pipelines.auto_segmentation.training -i auto_segmentation_training.yaml EOT working_dir = self.triggers.working_dir } @@ -680,9 +679,9 @@ resource "null_resource" "compile_auto_segmentation_prediction_pipelines" { # which will upload the pipeline YAML file to the specified Artifact Registry repository. The scheduler function will then schedule the pipeline to run on a regular basis. provisioner "local-exec" { command = <<-EOT - ${var.poetry_run_alias} python -m pipelines.compiler -c ${local.config_file_path_relative_python_run_dir} -p vertex_ai.pipelines.auto_segmentation.prediction -o auto_segmentation_prediction.yaml - ${var.poetry_run_alias} python -m pipelines.uploader -c ${local.config_file_path_relative_python_run_dir} -f auto_segmentation_prediction.yaml -t ${self.triggers.tag} -t latest - ${var.poetry_run_alias} python -m pipelines.scheduler -c ${local.config_file_path_relative_python_run_dir} -p vertex_ai.pipelines.auto_segmentation.prediction -i auto_segmentation_prediction.yaml + ${var.uv_run_alias} python -m pipelines.compiler -c ${local.config_file_path_relative_python_run_dir} -p vertex_ai.pipelines.auto_segmentation.prediction -o auto_segmentation_prediction.yaml + ${var.uv_run_alias} python -m pipelines.uploader -c ${local.config_file_path_relative_python_run_dir} -f auto_segmentation_prediction.yaml -t ${self.triggers.tag} -t latest + ${var.uv_run_alias} python -m pipelines.scheduler -c ${local.config_file_path_relative_python_run_dir} -p vertex_ai.pipelines.auto_segmentation.prediction -i auto_segmentation_prediction.yaml EOT working_dir = self.triggers.working_dir } @@ -701,9 +700,9 @@ resource "null_resource" "compile_value_based_bidding_training_pipelines" { # which will upload the pipeline YAML file to the specified Artifact Registry repository. The scheduler function will then schedule the pipeline to run on a regular basis. provisioner "local-exec" { command = <<-EOT - ${var.poetry_run_alias} python -m pipelines.compiler -c ${local.config_file_path_relative_python_run_dir} -p vertex_ai.pipelines.value_based_bidding.training -o vbb_training.yaml - ${var.poetry_run_alias} python -m pipelines.uploader -c ${local.config_file_path_relative_python_run_dir} -f vbb_training.yaml -t ${self.triggers.tag} -t latest - ${var.poetry_run_alias} python -m pipelines.scheduler -c ${local.config_file_path_relative_python_run_dir} -p vertex_ai.pipelines.value_based_bidding.training -i vbb_training.yaml + ${var.uv_run_alias} python -m pipelines.compiler -c ${local.config_file_path_relative_python_run_dir} -p vertex_ai.pipelines.value_based_bidding.training -o vbb_training.yaml + ${var.uv_run_alias} python -m pipelines.uploader -c ${local.config_file_path_relative_python_run_dir} -f vbb_training.yaml -t ${self.triggers.tag} -t latest + ${var.uv_run_alias} python -m pipelines.scheduler -c ${local.config_file_path_relative_python_run_dir} -p vertex_ai.pipelines.value_based_bidding.training -i vbb_training.yaml EOT working_dir = self.triggers.working_dir } @@ -722,9 +721,9 @@ resource "null_resource" "compile_value_based_bidding_explanation_pipelines" { # which will upload the pipeline YAML file to the specified Artifact Registry repository. The scheduler function will then schedule the pipeline to run on a regular basis. provisioner "local-exec" { command = <<-EOT - ${var.poetry_run_alias} python -m pipelines.compiler -c ${local.config_file_path_relative_python_run_dir} -p vertex_ai.pipelines.value_based_bidding.explanation -o vbb_explanation.yaml - ${var.poetry_run_alias} python -m pipelines.uploader -c ${local.config_file_path_relative_python_run_dir} -f vbb_explanation.yaml -t ${self.triggers.tag} -t latest - ${var.poetry_run_alias} python -m pipelines.scheduler -c ${local.config_file_path_relative_python_run_dir} -p vertex_ai.pipelines.value_based_bidding.explanation -i vbb_explanation.yaml + ${var.uv_run_alias} python -m pipelines.compiler -c ${local.config_file_path_relative_python_run_dir} -p vertex_ai.pipelines.value_based_bidding.explanation -o vbb_explanation.yaml + ${var.uv_run_alias} python -m pipelines.uploader -c ${local.config_file_path_relative_python_run_dir} -f vbb_explanation.yaml -t ${self.triggers.tag} -t latest + ${var.uv_run_alias} python -m pipelines.scheduler -c ${local.config_file_path_relative_python_run_dir} -p vertex_ai.pipelines.value_based_bidding.explanation -i vbb_explanation.yaml EOT working_dir = self.triggers.working_dir } @@ -743,9 +742,9 @@ resource "null_resource" "compile_churn_propensity_training_pipelines" { # which will upload the pipeline YAML file to the specified Artifact Registry repository. The scheduler function will then schedule the pipeline to run on a regular basis. provisioner "local-exec" { command = <<-EOT - ${var.poetry_run_alias} python -m pipelines.compiler -c ${local.config_file_path_relative_python_run_dir} -p vertex_ai.pipelines.churn_propensity.training -o churn_propensity_training.yaml - ${var.poetry_run_alias} python -m pipelines.uploader -c ${local.config_file_path_relative_python_run_dir} -f churn_propensity_training.yaml -t ${self.triggers.tag} -t latest - ${var.poetry_run_alias} python -m pipelines.scheduler -c ${local.config_file_path_relative_python_run_dir} -p vertex_ai.pipelines.churn_propensity.training -i churn_propensity_training.yaml + ${var.uv_run_alias} python -m pipelines.compiler -c ${local.config_file_path_relative_python_run_dir} -p vertex_ai.pipelines.churn_propensity.training -o churn_propensity_training.yaml + ${var.uv_run_alias} python -m pipelines.uploader -c ${local.config_file_path_relative_python_run_dir} -f churn_propensity_training.yaml -t ${self.triggers.tag} -t latest + ${var.uv_run_alias} python -m pipelines.scheduler -c ${local.config_file_path_relative_python_run_dir} -p vertex_ai.pipelines.churn_propensity.training -i churn_propensity_training.yaml EOT working_dir = self.triggers.working_dir } @@ -764,9 +763,9 @@ resource "null_resource" "compile_churn_propensity_prediction_pipelines" { # which will upload the pipeline YAML file to the specified Artifact Registry repository. The scheduler function will then schedule the pipeline to run on a regular basis. provisioner "local-exec" { command = <<-EOT - ${var.poetry_run_alias} python -m pipelines.compiler -c ${local.config_file_path_relative_python_run_dir} -p vertex_ai.pipelines.churn_propensity.prediction -o churn_propensity_prediction.yaml - ${var.poetry_run_alias} python -m pipelines.uploader -c ${local.config_file_path_relative_python_run_dir} -f churn_propensity_prediction.yaml -t ${self.triggers.tag} -t latest - ${var.poetry_run_alias} python -m pipelines.scheduler -c ${local.config_file_path_relative_python_run_dir} -p vertex_ai.pipelines.churn_propensity.prediction -i churn_propensity_prediction.yaml + ${var.uv_run_alias} python -m pipelines.compiler -c ${local.config_file_path_relative_python_run_dir} -p vertex_ai.pipelines.churn_propensity.prediction -o churn_propensity_prediction.yaml + ${var.uv_run_alias} python -m pipelines.uploader -c ${local.config_file_path_relative_python_run_dir} -f churn_propensity_prediction.yaml -t ${self.triggers.tag} -t latest + ${var.uv_run_alias} python -m pipelines.scheduler -c ${local.config_file_path_relative_python_run_dir} -p vertex_ai.pipelines.churn_propensity.prediction -i churn_propensity_prediction.yaml EOT working_dir = self.triggers.working_dir } @@ -785,9 +784,9 @@ resource "null_resource" "compile_reporting_preparation_aggregate_predictions_pi # which will upload the pipeline YAML file to the specified Artifact Registry repository. The scheduler function will then schedule the pipeline to run on a regular basis. provisioner "local-exec" { command = <<-EOT - ${var.poetry_run_alias} python -m pipelines.compiler -c ${local.config_file_path_relative_python_run_dir} -p vertex_ai.pipelines.reporting_preparation.execution -o reporting_preparation.yaml - ${var.poetry_run_alias} python -m pipelines.uploader -c ${local.config_file_path_relative_python_run_dir} -f reporting_preparation.yaml -t ${self.triggers.tag} -t latest - ${var.poetry_run_alias} python -m pipelines.scheduler -c ${local.config_file_path_relative_python_run_dir} -p vertex_ai.pipelines.reporting_preparation.execution -i reporting_preparation.yaml + ${var.uv_run_alias} python -m pipelines.compiler -c ${local.config_file_path_relative_python_run_dir} -p vertex_ai.pipelines.reporting_preparation.execution -o reporting_preparation.yaml + ${var.uv_run_alias} python -m pipelines.uploader -c ${local.config_file_path_relative_python_run_dir} -f reporting_preparation.yaml -t ${self.triggers.tag} -t latest + ${var.uv_run_alias} python -m pipelines.scheduler -c ${local.config_file_path_relative_python_run_dir} -p vertex_ai.pipelines.reporting_preparation.execution -i reporting_preparation.yaml EOT working_dir = self.triggers.working_dir } @@ -806,9 +805,9 @@ resource "null_resource" "compile_gemini_insights_pipelines" { # which will upload the pipeline YAML file to the specified Artifact Registry repository. The scheduler function will then schedule the pipeline to run on a regular basis. provisioner "local-exec" { command = <<-EOT - ${var.poetry_run_alias} python -m pipelines.compiler -c ${local.config_file_path_relative_python_run_dir} -p vertex_ai.pipelines.gemini_insights.execution -o gemini_insights.yaml - ${var.poetry_run_alias} python -m pipelines.uploader -c ${local.config_file_path_relative_python_run_dir} -f gemini_insights.yaml -t ${self.triggers.tag} -t latest - ${var.poetry_run_alias} python -m pipelines.scheduler -c ${local.config_file_path_relative_python_run_dir} -p vertex_ai.pipelines.gemini_insights.execution -i gemini_insights.yaml + ${var.uv_run_alias} python -m pipelines.compiler -c ${local.config_file_path_relative_python_run_dir} -p vertex_ai.pipelines.gemini_insights.execution -o gemini_insights.yaml + ${var.uv_run_alias} python -m pipelines.uploader -c ${local.config_file_path_relative_python_run_dir} -f gemini_insights.yaml -t ${self.triggers.tag} -t latest + ${var.uv_run_alias} python -m pipelines.scheduler -c ${local.config_file_path_relative_python_run_dir} -p vertex_ai.pipelines.gemini_insights.execution -i gemini_insights.yaml EOT working_dir = self.triggers.working_dir } diff --git a/infrastructure/terraform/modules/pipelines/variables.tf b/infrastructure/terraform/modules/pipelines/variables.tf index 3afaaed3..3c618cac 100644 --- a/infrastructure/terraform/modules/pipelines/variables.tf +++ b/infrastructure/terraform/modules/pipelines/variables.tf @@ -17,13 +17,8 @@ variable "config_file_path" { description = "pipelines config file" } -variable "poetry_run_alias" { - description = "alias for poetry run command on the current system" - type = string -} - -variable "poetry_installed" { - description = "Construct to specify dependency to poetry installed" +variable "uv_run_alias" { + description = "alias for uv run command on the current system" type = string } diff --git a/infrastructure/terraform/variables.tf b/infrastructure/terraform/variables.tf index e479a7d2..0d4e0d09 100644 --- a/infrastructure/terraform/variables.tf +++ b/infrastructure/terraform/variables.tf @@ -231,6 +231,12 @@ variable "poetry_cmd" { default = "poetry" } +variable "uv_cmd" { + description = "alias for uv run command on the current system" + type = string + default = "uv" +} + variable "feature_store_project_id" { type = string description = "Project ID where feature store resources are created" From fa0c8ac9f6cd0269b6485df2796591336cdf54f3 Mon Sep 17 00:00:00 2001 From: Charlie Wang Date: Fri, 15 Nov 2024 16:07:44 +0000 Subject: [PATCH 3/7] switch to uv --- .../terraform/modules/feature-store/bigquery-procedures.tf | 2 +- infrastructure/terraform/modules/feature-store/main.tf | 1 - infrastructure/terraform/modules/feature-store/variables.tf | 6 +++--- 3 files changed, 4 insertions(+), 5 deletions(-) diff --git a/infrastructure/terraform/modules/feature-store/bigquery-procedures.tf b/infrastructure/terraform/modules/feature-store/bigquery-procedures.tf index 96412569..ff043b77 100644 --- a/infrastructure/terraform/modules/feature-store/bigquery-procedures.tf +++ b/infrastructure/terraform/modules/feature-store/bigquery-procedures.tf @@ -1471,7 +1471,7 @@ resource "null_resource" "create_gemini_model" { provisioner "local-exec" { command = <<-EOT - ${local.poetry_run_alias} bq query --use_legacy_sql=false --max_rows=100 --maximum_bytes_billed=10000000 < ${data.local_file.create_gemini_model_file.filename} + ${var.uv_run_alias} bq query --use_legacy_sql=false --max_rows=100 --maximum_bytes_billed=10000000 < ${data.local_file.create_gemini_model_file.filename} EOT } diff --git a/infrastructure/terraform/modules/feature-store/main.tf b/infrastructure/terraform/modules/feature-store/main.tf index 25b6830e..78c0eb83 100644 --- a/infrastructure/terraform/modules/feature-store/main.tf +++ b/infrastructure/terraform/modules/feature-store/main.tf @@ -21,7 +21,6 @@ locals { config_bigquery = local.config_vars.bigquery feature_store_project_id = local.config_vars.bigquery.dataset.feature_store.project_id sql_dir = var.sql_dir_input - poetry_run_alias = "${var.poetry_cmd} run" builder_repository_id = "marketing-analytics-jumpstart-base-repo" purchase_propensity_project_id = null_resource.check_bigquery_api.id != "" ? local.config_vars.bigquery.dataset.purchase_propensity.project_id : local.feature_store_project_id churn_propensity_project_id = null_resource.check_bigquery_api.id != "" ? local.config_vars.bigquery.dataset.churn_propensity.project_id : local.feature_store_project_id diff --git a/infrastructure/terraform/modules/feature-store/variables.tf b/infrastructure/terraform/modules/feature-store/variables.tf index d20b92b7..a9bc07a5 100644 --- a/infrastructure/terraform/modules/feature-store/variables.tf +++ b/infrastructure/terraform/modules/feature-store/variables.tf @@ -37,8 +37,8 @@ variable "sql_dir_input" { description = "SQL queries directory" } -variable "poetry_cmd" { - description = "alias for poetry command on the current system" +variable "uv_run_alias" { + description = "alias for uv run command on the current system" type = string - default = "poetry" + default = "uv run" } From 6c3700961c962fe9f1d6f4ed95a730ae0d45a8fb Mon Sep 17 00:00:00 2001 From: Charlie Wang Date: Fri, 15 Nov 2024 16:18:33 +0000 Subject: [PATCH 4/7] remove poetry usage from terraform --- DEVELOPMENT.md | 2 +- infrastructure/cloudshell/tutorial.md | 40 +++--------------- infrastructure/terraform/README.md | 58 ++++++--------------------- infrastructure/terraform/variables.tf | 6 --- 4 files changed, 19 insertions(+), 87 deletions(-) diff --git a/DEVELOPMENT.md b/DEVELOPMENT.md index 19c0800f..e72dee1b 100644 --- a/DEVELOPMENT.md +++ b/DEVELOPMENT.md @@ -3,7 +3,7 @@ Marketing Analytics Jumpstart consists of an easy, extensible and automated impl ## Developer pre-requisites Use Visual Studio Code to develop the solution. Install Gemini Code Assistant, Docker, GitHub, Hashicorp, Terraform, Jinja extensions. -You should have Python 3, Poetry, Terraform, Git and Docker installed in your developer terminal environment. +You should have Python 3, uv, Terraform, Git and Docker installed in your developer terminal environment. ## Preparing development environment diff --git a/infrastructure/cloudshell/tutorial.md b/infrastructure/cloudshell/tutorial.md index e0293e30..d4d2e519 100644 --- a/infrastructure/cloudshell/tutorial.md +++ b/infrastructure/cloudshell/tutorial.md @@ -12,45 +12,17 @@ export PROJECT_ID="" gcloud config set project $PROJECT_ID ``` -## Install or update Python3 -Install a compatible version of Python 3.8-3.10 and set the CLOUDSDK_PYTHON environment variable to point to it. -```sh -sudo apt-get install python3.10 -CLOUDSDK_PYTHON=python3.10 -``` +## Install update uv for running python scripts +Install [uv](https://docs.astral.sh/uv/) that manages the python version and dependecies for the solution. -## Install Python's Poetry and set Poetry to use Python 3.10 version -[Poetry](https://python-poetry.org/docs/) is a Python's tool for dependency management and packaging. -If you are installing on in Cloud Shell use the following commands: -```sh -pipx install poetry -``` -If you don't have pipx installed - follow the [Pipx installation guide](https://pipx.pypa.io/stable/installation/) -```sh -sudo apt update -sudo apt install pipx -pipx ensurepath -pipx install poetry -``` -Verify that `poetry` is on your $PATH variable: -```sh -poetry --version -``` -If it fails - add it to your $PATH variable: ```sh +curl -LsSf https://astral.sh/uv/install.sh | sh export PATH="$HOME/.local/bin:$PATH" ``` -Verify poetry is properly installed, run: -```sh -poetry --version -``` -Set poetry to use your latest python3 -```sh -poetry env use python3 -``` -Install python dependencies, run: + +Check uv installation ```sh -poetry install +uv --version ``` ## Authenticate with additional OAuth 2.0 scopes diff --git a/infrastructure/terraform/README.md b/infrastructure/terraform/README.md index e0ba4aa9..dd026e8b 100644 --- a/infrastructure/terraform/README.md +++ b/infrastructure/terraform/README.md @@ -43,52 +43,18 @@ Also, this method allows you to extend this solution and develop it to satisfy y gcloud config set project $PROJECT_ID ``` -1. Install or update Python3 - Install a compatible version of Python 3.8-3.10 and set the CLOUDSDK_PYTHON environment variable to point to it. - - ```bash - sudo apt-get install python3.10 - CLOUDSDK_PYTHON=python3.10 - ``` - If you are installing on a Mac: - ```shell - brew install python@3.10 - CLOUDSDK_PYTHON=python3.10 - ``` - -1. Install Python's Poetry and set Poetry to use Python 3.10 version - - [Poetry](https://python-poetry.org/docs/) is a Python's tool for dependency management and packaging. - - If you are installing on in Cloud Shell use the following commands: - ```shell - pipx install poetry - ``` - If you don't have pipx installed - follow the [Pipx installation guide](https://pipx.pypa.io/stable/installation/) - ```shell - sudo apt update - sudo apt install pipx - pipx ensurepath - pipx install poetry - ``` - Verify that `poetry` is on your $PATH variable: - ```shell - poetry --version - ``` - If it fails - add it to your $PATH variable: - ```shell - export PATH="$HOME/.local/bin:$PATH" - ``` - If you are installing on a Mac: - ```shell - brew install poetry - ``` - Set poetry to use your latest python3 - ```shell - SOURCE_ROOT=${HOME}/${REPO} - cd ${SOURCE_ROOT} - poetry env use python3 - ``` +1. ## Install update uv for running python scripts +Install [uv](https://docs.astral.sh/uv/) that manages the python version and dependecies for the solution. + +```sh +curl -LsSf https://astral.sh/uv/install.sh | sh +export PATH="$HOME/.local/bin:$PATH" +``` + +Check uv installation +```sh +uv --version +``` 1. Authenticate with additional OAuth 2.0 scopes needed to use the Google Analytics Admin API: ```shell diff --git a/infrastructure/terraform/variables.tf b/infrastructure/terraform/variables.tf index 0d4e0d09..cd1e84ae 100644 --- a/infrastructure/terraform/variables.tf +++ b/infrastructure/terraform/variables.tf @@ -225,12 +225,6 @@ variable "feature_store_config_env" { default = "config" } -variable "poetry_cmd" { - description = "alias for poetry run command on the current system" - type = string - default = "poetry" -} - variable "uv_cmd" { description = "alias for uv run command on the current system" type = string From 8feaf07c76cafd1bcec855a76d88b01550c28b40 Mon Sep 17 00:00:00 2001 From: Charlie Wang Date: Fri, 15 Nov 2024 16:20:23 +0000 Subject: [PATCH 5/7] format --- infrastructure/terraform/README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/infrastructure/terraform/README.md b/infrastructure/terraform/README.md index dd026e8b..bea2a3dd 100644 --- a/infrastructure/terraform/README.md +++ b/infrastructure/terraform/README.md @@ -43,7 +43,7 @@ Also, this method allows you to extend this solution and develop it to satisfy y gcloud config set project $PROJECT_ID ``` -1. ## Install update uv for running python scripts +1. Install update uv for running python scripts Install [uv](https://docs.astral.sh/uv/) that manages the python version and dependecies for the solution. ```sh From 9ace77482f77b5cb40e005a336f6e36528d1eac1 Mon Sep 17 00:00:00 2001 From: Charlie Wang Date: Fri, 15 Nov 2024 16:25:16 +0000 Subject: [PATCH 6/7] remove poetry --- infrastructure/terraform/README.md | 18 +++++++++--------- scripts/generate-tf-backend.sh | 10 +++------- 2 files changed, 12 insertions(+), 16 deletions(-) diff --git a/infrastructure/terraform/README.md b/infrastructure/terraform/README.md index bea2a3dd..5d49e845 100644 --- a/infrastructure/terraform/README.md +++ b/infrastructure/terraform/README.md @@ -44,17 +44,17 @@ Also, this method allows you to extend this solution and develop it to satisfy y ``` 1. Install update uv for running python scripts -Install [uv](https://docs.astral.sh/uv/) that manages the python version and dependecies for the solution. + Install [uv](https://docs.astral.sh/uv/) that manages the python version and dependecies for the solution. -```sh -curl -LsSf https://astral.sh/uv/install.sh | sh -export PATH="$HOME/.local/bin:$PATH" -``` + ```sh + curl -LsSf https://astral.sh/uv/install.sh | sh + export PATH="$HOME/.local/bin:$PATH" + ``` -Check uv installation -```sh -uv --version -``` + Check uv installation: + ```sh + uv --version + ``` 1. Authenticate with additional OAuth 2.0 scopes needed to use the Google Analytics Admin API: ```shell diff --git a/scripts/generate-tf-backend.sh b/scripts/generate-tf-backend.sh index 5a1178fc..2611dc0e 100755 --- a/scripts/generate-tf-backend.sh +++ b/scripts/generate-tf-backend.sh @@ -19,15 +19,15 @@ set -o nounset . scripts/common.sh -section_open "Check if the necessary dependencies are available: gcloud, gsutil, terraform, poetry" +section_open "Check if the necessary dependencies are available: gcloud, gsutil, terraform, uv" check_exec_dependency "gcloud" check_exec_version "gcloud" check_exec_dependency "gsutil" check_exec_version "gsutil" check_exec_dependency "terraform" check_exec_version "terraform" - check_exec_dependency "poetry" - check_exec_version "poetry" + check_exec_dependency "uv" + check_exec_version "uv" section_close section_open "Check if the necessary variables are set: PROJECT_ID" @@ -51,10 +51,6 @@ section_open "Enable all the required APIs" enable_all_apis section_close -section_open "Install poetry libraries in the virtual environment for Terraform" - poetry install -section_close - section_open "Creating a new Google Cloud Storage bucket to store the Terraform state in ${TF_STATE_PROJECT} project, bucket: ${TF_STATE_BUCKET}" if gsutil ls -b gs://"${TF_STATE_BUCKET}" >/dev/null 2>&1; then printf "The ${TF_STATE_BUCKET} Google Cloud Storage bucket already exists. \n" From cdde58c5ccb53c7dd7c1ff4bcd8335ed8f0b4b17 Mon Sep 17 00:00:00 2001 From: Charlie Wang <2144018+kingman@users.noreply.github.com> Date: Fri, 15 Nov 2024 18:54:25 +0100 Subject: [PATCH 7/7] Add files via upload --- notebooks/quick_installation.ipynb | 18 ++++-------------- 1 file changed, 4 insertions(+), 14 deletions(-) diff --git a/notebooks/quick_installation.ipynb b/notebooks/quick_installation.ipynb index d7c31085..a2d1be9f 100644 --- a/notebooks/quick_installation.ipynb +++ b/notebooks/quick_installation.ipynb @@ -6,8 +6,7 @@ "provenance": [], "collapsed_sections": [ "DDGHqJNhq5Oi", - "mOISt4ShqIbc", - "US36yJ8lmqnP" + "mOISt4ShqIbc" ] }, "kernelspec": { @@ -305,19 +304,10 @@ "# @title\n", "%%capture\n", "%%bash\n", - "# prompt: install packages\n", - "apt-get install python3.10\n", - "CLOUDSDK_PYTHON=python3.10\n", - "\n", - "#pip3 install poetry\n", - "sudo apt update\n", - "sudo apt install pipx\n", - "pipx ensurepath\n", - "pipx install poetry\n", - "\n", + "# prompt: install uv\n", + "curl -LsSf https://astral.sh/uv/install.sh | sh\n", "export PATH=\"/root/.local/bin:$PATH\"\n", - "poetry env use python3.10\n", - "poetry --version\n", + "uv --version\n", "\n", "git clone --depth=1 https://github.com/tfutils/tfenv.git ~/.tfenv\n", "echo 'export PATH=\"~/.tfenv/bin:$PATH\"' >> ~/.bash_profile\n",