diff --git a/dataeng/jobs/analytics/DBTRun.groovy b/dataeng/jobs/analytics/DBTRun.groovy index e30a6b88f..bf554a920 100644 --- a/dataeng/jobs/analytics/DBTRun.groovy +++ b/dataeng/jobs/analytics/DBTRun.groovy @@ -109,6 +109,7 @@ class DBTRun{ wrappers common_wrappers(allVars) publishers common_publishers(allVars) steps { + shell(dslFactory.readFileFromWorkspace('dataeng/resources/secrets-manager-setup.sh')) shell(dslFactory.readFileFromWorkspace('dataeng/resources/dbt-run.sh')) } } diff --git a/dataeng/jobs/analytics/DBTSourceFreshness.groovy b/dataeng/jobs/analytics/DBTSourceFreshness.groovy index 721ab1fcb..fd379e59c 100644 --- a/dataeng/jobs/analytics/DBTSourceFreshness.groovy +++ b/dataeng/jobs/analytics/DBTSourceFreshness.groovy @@ -44,6 +44,7 @@ class DBTSourceFreshness{ } publishers common_publishers(allVars) steps { + shell(dslFactory.readFileFromWorkspace('dataeng/resources/secrets-manager-setup.sh')) shell(dslFactory.readFileFromWorkspace('dataeng/resources/dbtsource-freshness.sh')) } } diff --git a/dataeng/jobs/analytics/SnowflakeSchemaBuilder.groovy b/dataeng/jobs/analytics/SnowflakeSchemaBuilder.groovy index 6e308d497..8c49e40b4 100644 --- a/dataeng/jobs/analytics/SnowflakeSchemaBuilder.groovy +++ b/dataeng/jobs/analytics/SnowflakeSchemaBuilder.groovy @@ -45,6 +45,7 @@ class SnowflakeSchemaBuilder { steps { // This will create python 3.8 venv inside shell script instead of using shiningpanda + shell(dslFactory.readFileFromWorkspace('dataeng/resources/secrets-manager-setup.sh')) shell(dslFactory.readFileFromWorkspace('dataeng/resources/snowflake-schema-builder.sh')) } } diff --git a/dataeng/jobs/analytics/WarehouseTransforms.groovy b/dataeng/jobs/analytics/WarehouseTransforms.groovy index cad6f7768..47c4c497d 100644 --- a/dataeng/jobs/analytics/WarehouseTransforms.groovy +++ b/dataeng/jobs/analytics/WarehouseTransforms.groovy @@ -72,6 +72,7 @@ class WarehouseTransforms{ } } steps { + shell(dslFactory.readFileFromWorkspace('dataeng/resources/secrets-manager-setup.sh')) shell(dslFactory.readFileFromWorkspace('dataeng/resources/opsgenie-enable-heartbeat.sh')) shell(dslFactory.readFileFromWorkspace('dataeng/resources/warehouse-transforms.sh')) } diff --git a/dataeng/jobs/analytics/WarehouseTransformsCI.groovy b/dataeng/jobs/analytics/WarehouseTransformsCI.groovy index 02cbab598..abd8552b6 100644 --- a/dataeng/jobs/analytics/WarehouseTransformsCI.groovy +++ b/dataeng/jobs/analytics/WarehouseTransformsCI.groovy @@ -109,6 +109,7 @@ class WarehouseTransformsCI{ } wrappers common_wrappers(allVars) steps { + shell(dslFactory.readFileFromWorkspace('dataeng/resources/secrets-manager-setup.sh')) shell(dslFactory.readFileFromWorkspace('dataeng/resources/warehouse-transforms-ci.sh')) } } diff --git a/dataeng/jobs/analytics/WarehouseTransformsCIManual.groovy b/dataeng/jobs/analytics/WarehouseTransformsCIManual.groovy index b32dd9e4e..0a80f3e9c 100644 --- a/dataeng/jobs/analytics/WarehouseTransformsCIManual.groovy +++ b/dataeng/jobs/analytics/WarehouseTransformsCIManual.groovy @@ -89,8 +89,9 @@ class WarehouseTransformsCIManual{ } wrappers common_wrappers(allVars) steps { + shell(dslFactory.readFileFromWorkspace('dataeng/resources/secrets-manager-setup.sh')) shell(dslFactory.readFileFromWorkspace('dataeng/resources/warehouse-transforms-ci-manual.sh')) } } } -} \ No newline at end of file +} diff --git a/dataeng/resources/dbt-run.sh b/dataeng/resources/dbt-run.sh index bd0e59b26..c95130c33 100644 --- a/dataeng/resources/dbt-run.sh +++ b/dataeng/resources/dbt-run.sh @@ -20,8 +20,15 @@ else IS_SCHEMA_BUILDER_PR="false" fi +source $WORKSPACE/secrets-manager.sh +# Fetch the secrets from AWS +set +x +get_secret_value warehouse-transforms/profiles/profiles DBT_PASSWORD +set -x +export DBT_PASSWORD -DBT_PROFILE_ARGS="--profiles-dir $WORKSPACE/analytics-secure/warehouse-transforms/ --profile $DBT_PROFILE --target $DBT_TARGET" + +DBT_PROFILE_ARGS="--profiles-dir $WORKSPACE/warehouse-transforms/profiles/ --profile $DBT_PROFILE --target $DBT_TARGET" if ! [ -z "$DBT_MODEL_INCLUDE" ] then diff --git a/dataeng/resources/dbtsource-freshness.sh b/dataeng/resources/dbtsource-freshness.sh index fb07dbcd2..4e1ed189e 100644 --- a/dataeng/resources/dbtsource-freshness.sh +++ b/dataeng/resources/dbtsource-freshness.sh @@ -14,11 +14,18 @@ pip install -r requirements.txt cd $WORKSPACE/warehouse-transforms/projects/reporting -dbt clean --profiles-dir $WORKSPACE/analytics-secure/warehouse-transforms/ --profile $DBT_PROFILE --target $DBT_TARGET -dbt deps --profiles-dir $WORKSPACE/analytics-secure/warehouse-transforms/ --profile $DBT_PROFILE --target $DBT_TARGET +source $WORKSPACE/secrets-manager.sh +# Fetch the secrets from AWS +set +x +get_secret_value warehouse-transforms/profiles/profiles DBT_PASSWORD +set -x +export DBT_PASSWORD + +dbt clean --profiles-dir $WORKSPACE/warehouse-transforms/profiles/ --profile $DBT_PROFILE --target $DBT_TARGET +dbt deps --profiles-dir $WORKSPACE/warehouse-transforms/profiles/ --profile $DBT_PROFILE --target $DBT_TARGET # For dbt v0.21.0 or above, dbt source snapshot-freshness has been renamed to dbt source freshness. # Its node selection logic is now consistent with other tasks. In order to check freshness for a specific source, # use --select flag and you must prefix it with source: e.g. dbt source freshness --select source:snowplow -dbt source freshness --profiles-dir $WORKSPACE/analytics-secure/warehouse-transforms/ --profile $DBT_PROFILE --target $DBT_TARGET +dbt source freshness --profiles-dir $WORKSPACE/warehouse-transforms/profiles/ --profile $DBT_PROFILE --target $DBT_TARGET diff --git a/dataeng/resources/snowflake-schema-builder.sh b/dataeng/resources/snowflake-schema-builder.sh index 2dfa003f3..c0f83c026 100644 --- a/dataeng/resources/snowflake-schema-builder.sh +++ b/dataeng/resources/snowflake-schema-builder.sh @@ -10,12 +10,19 @@ source "${PYTHON38_VENV}/bin/activate" cd $WORKSPACE/warehouse-transforms pip install --upgrade dbt-schema-builder +source $WORKSPACE/secrets-manager.sh +# Fetch the secrets from AWS +set +x +get_secret_value warehouse-transforms/profiles/profiles DBT_PASSWORD +set -x +export DBT_PASSWORD + cd $WORKSPACE/warehouse-transforms/projects/$SOURCE_PROJECT -dbt clean --profiles-dir $WORKSPACE/analytics-secure/warehouse-transforms/ --profile $DBT_PROFILE --target $DBT_TARGET +dbt clean --profiles-dir $WORKSPACE/warehouse-transforms/profiles/ --profile $DBT_PROFILE --target $DBT_TARGET # DESTINATION_PROJECT is always relative to SOURCE_PROJECT cd $DESTINATION_PROJECT -dbt clean --profiles-dir $WORKSPACE/analytics-secure/warehouse-transforms/ --profile $DBT_PROFILE --target $DBT_TARGET +dbt clean --profiles-dir $WORKSPACE/warehouse-transforms/profiles/ --profile $DBT_PROFILE --target $DBT_TARGET cd $WORKSPACE/warehouse-transforms @@ -26,7 +33,7 @@ git checkout -b "$branchname" # Run the dbt script to update schemas and sql, from the source project directory (necessary for dbt to run) cd $WORKSPACE/warehouse-transforms/projects/$SOURCE_PROJECT -dbt_schema_builder build --destination-project $DESTINATION_PROJECT --profile $DBT_PROFILE --target $DBT_TARGET --profiles-dir $WORKSPACE/analytics-secure/warehouse-transforms/ +dbt_schema_builder build --destination-project $DESTINATION_PROJECT --profile $DBT_PROFILE --target $DBT_TARGET --profiles-dir $WORKSPACE/warehouse-transforms/profiles/ # Check if any files are added, deleted, or changed. If so, commit them and create a PR. if [[ -z $(git status -s) ]] diff --git a/dataeng/resources/warehouse-transforms-ci-dbt.sh b/dataeng/resources/warehouse-transforms-ci-dbt.sh index df2008247..27524f9bb 100644 --- a/dataeng/resources/warehouse-transforms-ci-dbt.sh +++ b/dataeng/resources/warehouse-transforms-ci-dbt.sh @@ -6,19 +6,26 @@ set -ex cd $WORKSPACE/warehouse-transforms/projects/$DBT_PROJECT_PATH -dbt clean --profiles-dir $WORKSPACE/analytics-secure/warehouse-transforms/ --profile $DBT_PROFILE --target $DBT_TARGET -dbt deps --profiles-dir $WORKSPACE/analytics-secure/warehouse-transforms/ --profile $DBT_PROFILE --target $DBT_TARGET -dbt seed --full-refresh --profiles-dir $WORKSPACE/analytics-secure/warehouse-transforms/ --profile $DBT_PROFILE --target $DBT_TARGET +source $WORKSPACE/secrets-manager.sh +# Fetch the secrets from AWS +set +x +get_secret_value warehouse-transforms/profiles/profiles DBT_PASSWORD +set -x +export DBT_PASSWORD + +dbt clean --profiles-dir $WORKSPACE/warehouse-transforms/profiles/ --profile $DBT_PROFILE --target $DBT_TARGET +dbt deps --profiles-dir $WORKSPACE/warehouse-transforms/profiles/ --profile $DBT_PROFILE --target $DBT_TARGET +dbt seed --full-refresh --profiles-dir $WORKSPACE/warehouse-transforms/profiles/ --profile $DBT_PROFILE --target $DBT_TARGET if [ "$WITH_SNAPSHOT" == "true" ] then - dbt snapshot --profiles-dir $WORKSPACE/analytics-secure/warehouse-transforms/ --profile $DBT_PROFILE --target $DBT_TARGET + dbt snapshot --profiles-dir $WORKSPACE/warehouse-transforms/profiles/ --profile $DBT_PROFILE --target $DBT_TARGET fi -dbt run $DBT_RUN_OPTIONS $DBT_RUN_EXCLUDE --profiles-dir $WORKSPACE/analytics-secure/warehouse-transforms/ --profile $DBT_PROFILE --target $DBT_TARGET +dbt run $DBT_RUN_OPTIONS $DBT_RUN_EXCLUDE --profiles-dir $WORKSPACE/warehouse-transforms/profiles/ --profile $DBT_PROFILE --target $DBT_TARGET # Jenkins jobs are marked as failed when any of command fails so writing the following test command with && true so it will give a chance to # evaluate its success or failure base on success or failure we can do further re-tries on failed tests -dbt test $DBT_TEST_OPTIONS $DBT_TEST_EXCLUDE --profiles-dir $WORKSPACE/analytics-secure/warehouse-transforms/ --profile $DBT_PROFILE --target $DBT_TARGET && true +dbt test $DBT_TEST_OPTIONS $DBT_TEST_EXCLUDE --profiles-dir $WORKSPACE/warehouse-transforms/profiles/ --profile $DBT_PROFILE --target $DBT_TARGET && true if [ $? -eq 1 ] then if [ "$WITH_RETRY" == "true" ] @@ -26,12 +33,12 @@ then pip install -r ../../tools/ci_scripts/requirements.txt if [ "$DBT_TEST_EXCLUDE" == "" ] then - python ../../tools/ci_scripts/rerun_flaky_tests.py --project-path . --profiles-dir $WORKSPACE/analytics-secure/warehouse-transforms/ --profile $DBT_PROFILE \ + python ../../tools/ci_scripts/rerun_flaky_tests.py --project-path . --profiles-dir $WORKSPACE/warehouse-transforms/profiles/ --profile $DBT_PROFILE \ --target $DBT_TARGET --count $NO_OF_TRIES else PREFIX="--exclude " TEST_EXCLUSIONS=$(echo "$DBT_TEST_EXCLUDE" | sed -e "s/^$PREFIX//") - python ../../tools/ci_scripts/rerun_flaky_tests.py --project-path . --profiles-dir $WORKSPACE/analytics-secure/warehouse-transforms/ --profile $DBT_PROFILE \ + python ../../tools/ci_scripts/rerun_flaky_tests.py --project-path . --profiles-dir $WORKSPACE/warehouse-transforms/profiles/ --profile $DBT_PROFILE \ --target $DBT_TARGET --exclusions $TEST_EXCLUSIONS --count $NO_OF_TRIES fi else diff --git a/dataeng/resources/warehouse-transforms-ci-manual.sh b/dataeng/resources/warehouse-transforms-ci-manual.sh index 469afe7d3..8c26cc2a4 100644 --- a/dataeng/resources/warehouse-transforms-ci-manual.sh +++ b/dataeng/resources/warehouse-transforms-ci-manual.sh @@ -45,13 +45,20 @@ python create_ci_schema.py --key_path $KEY_PATH --passphrase_path $PASSPHRASE_PA cd $WORKSPACE/warehouse-transforms/projects/$DBT_PROJECT_PATH -dbt clean --profiles-dir $WORKSPACE/analytics-secure/warehouse-transforms/ --profile $DBT_PROFILE --target $DBT_TARGET -dbt deps --profiles-dir $WORKSPACE/analytics-secure/warehouse-transforms/ --profile $DBT_PROFILE --target $DBT_TARGET -dbt seed --profiles-dir $WORKSPACE/analytics-secure/warehouse-transforms/ --profile $DBT_PROFILE --target $DBT_TARGET +source $WORKSPACE/secrets-manager.sh +# Fetch the secrets from AWS +set +x +get_secret_value warehouse-transforms/profiles/profiles DBT_PASSWORD +set -x +export DBT_PASSWORD + +dbt clean --profiles-dir $WORKSPACE/warehouse-transforms/profiles/ --profile $DBT_PROFILE --target $DBT_TARGET +dbt deps --profiles-dir $WORKSPACE/warehouse-transforms/profiles/ --profile $DBT_PROFILE --target $DBT_TARGET +dbt seed --profiles-dir $WORKSPACE/warehouse-transforms/profiles/ --profile $DBT_PROFILE --target $DBT_TARGET if [[ "$RUN_TESTS_ONLY" != "true" ]] then - dbt run $DBT_RUN_OPTIONS $DBT_RUN_EXCLUDE --profiles-dir $WORKSPACE/analytics-secure/warehouse-transforms/ --profile $DBT_PROFILE --target $DBT_TARGET + dbt run $DBT_RUN_OPTIONS $DBT_RUN_EXCLUDE --profiles-dir $WORKSPACE/warehouse-transforms/profiles/ --profile $DBT_PROFILE --target $DBT_TARGET fi -dbt test $DBT_TEST_OPTIONS $DBT_TEST_EXCLUDE --profiles-dir $WORKSPACE/analytics-secure/warehouse-transforms/ --profile $DBT_PROFILE --target $DBT_TARGET \ No newline at end of file +dbt test $DBT_TEST_OPTIONS $DBT_TEST_EXCLUDE --profiles-dir $WORKSPACE/warehouse-transforms/profiles/ --profile $DBT_PROFILE --target $DBT_TARGET diff --git a/dataeng/resources/warehouse-transforms.sh b/dataeng/resources/warehouse-transforms.sh index bcd53ece6..b9e90ad15 100644 --- a/dataeng/resources/warehouse-transforms.sh +++ b/dataeng/resources/warehouse-transforms.sh @@ -13,6 +13,13 @@ pip install -r requirements.txt cd $WORKSPACE/warehouse-transforms/projects/$DBT_PROJECT +source $WORKSPACE/secrets-manager.sh +# Fetch the secrets from AWS +set +x +get_secret_value warehouse-transforms/profiles/profiles DBT_PASSWORD +set -x +export DBT_PASSWORD + # Fails the job if a dbt command fails and uploads the dbt artifacts to Snowflake if the job is configured for it # First argument is the dbt operation name, second is the result code from the dbt command function postCommandChecks { @@ -21,7 +28,7 @@ function postCommandChecks { if [ "$PUSH_ARTIFACTS_TO_SNOWFLAKE" = 'true' ] then # Errors from this operation are eaten as they are just telemetry data and not worth failing jobs over - dbt run-operation upload_dbt_run_artifacts --args '{operation: '$1'}' --profile $DBT_PROFILE --target $DBT_TARGET --profiles-dir $WORKSPACE/analytics-secure/warehouse-transforms/ || true + dbt run-operation upload_dbt_run_artifacts --args '{operation: '$1'}' --profile $DBT_PROFILE --target $DBT_TARGET --profiles-dir $WORKSPACE/warehouse-transforms/profiles/ || true fi if [ 0 != $2 ]; @@ -46,15 +53,15 @@ then fi # These commands don't have artifacts to be uploaded so if they fail the job can just fail -dbt clean --profiles-dir $WORKSPACE/analytics-secure/warehouse-transforms/ --profile $DBT_PROFILE --target $DBT_TARGET -dbt deps --profiles-dir $WORKSPACE/analytics-secure/warehouse-transforms/ --profile $DBT_PROFILE --target $DBT_TARGET +dbt clean --profiles-dir $WORKSPACE/warehouse-transforms/profiles/ --profile $DBT_PROFILE --target $DBT_TARGET +dbt deps --profiles-dir $WORKSPACE/warehouse-transforms/profiles/ --profile $DBT_PROFILE --target $DBT_TARGET # Turn off automatic failure of this script if the command returns non-0 for the rest of these commands set +e if [ "$SKIP_SEED" != 'true' ] then - dbt seed --full-refresh --models "$SEED_SELECTOR" --profile $DBT_PROFILE --target $DBT_TARGET --profiles-dir $WORKSPACE/analytics-secure/warehouse-transforms/ ; ret=$?; + dbt seed --full-refresh --models "$SEED_SELECTOR" --profile $DBT_PROFILE --target $DBT_TARGET --profiles-dir $WORKSPACE/warehouse-transforms/profiles/ ; ret=$?; postCommandChecks "seed" $ret ; fi @@ -62,7 +69,7 @@ fi if [ "$TEST_SOURCES_FIRST" = 'true' ] && [ "$SKIP_TESTS" != 'true' ] then # Run the source tests, sadly not just the ones upstream from this tag - dbt test --models source:* --profile $DBT_PROFILE --target $DBT_TARGET --profiles-dir $WORKSPACE/analytics-secure/warehouse-transforms/ ; ret=$?; + dbt test --models source:* --profile $DBT_PROFILE --target $DBT_TARGET --profiles-dir $WORKSPACE/warehouse-transforms/profiles/ ; ret=$?; postCommandChecks "source_test" $ret ; fi @@ -86,12 +93,12 @@ then fi # This will only runs parents tests without running current models tests. - dbt test --models $MODEL_SELECTOR_WITH_PARENTS --exclude $EXCLUDE_MODELS --profile $DBT_PROFILE --target $DBT_TARGET --profiles-dir $WORKSPACE/analytics-secure/warehouse-transforms/ ; ret=$?; + dbt test --models $MODEL_SELECTOR_WITH_PARENTS --exclude $EXCLUDE_MODELS --profile $DBT_PROFILE --target $DBT_TARGET --profiles-dir $WORKSPACE/warehouse-transforms/profiles/ ; ret=$?; postCommandChecks "parent_models_tests" $ret ; fi # Compile/build all models with this tag. -dbt $DBT_COMMAND $FULL_REFRESH_ARG --models $MODEL_SELECTOR --profile $DBT_PROFILE --target $DBT_TARGET --profiles-dir $WORKSPACE/analytics-secure/warehouse-transforms/ ; ret=$?; +dbt $DBT_COMMAND $FULL_REFRESH_ARG --models $MODEL_SELECTOR --profile $DBT_PROFILE --target $DBT_TARGET --profiles-dir $WORKSPACE/warehouse-transforms/profiles/ ; ret=$?; postCommandChecks "run" $ret ; if [ "$SKIP_TESTS" != 'true' ] @@ -114,6 +121,6 @@ then fi # Run all tests as specified. - dbt test --models $MODEL_SELECTOR $exclude_param $INDIRECT_SELECTION_PARAM --profile $DBT_PROFILE --target $DBT_TARGET --profiles-dir $WORKSPACE/analytics-secure/warehouse-transforms/ ; ret=$?; + dbt test --models $MODEL_SELECTOR $exclude_param $INDIRECT_SELECTION_PARAM --profile $DBT_PROFILE --target $DBT_TARGET --profiles-dir $WORKSPACE/warehouse-transforms/profiles/ ; ret=$?; postCommandChecks "test" $ret ; fi