From 46c0e88ab8a836317c61648bac8aa6e08f9b6467 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Maciej=20Kara=C5=9B?= Date: Wed, 16 Jul 2025 10:35:35 +0200 Subject: [PATCH] Move run_python.sh to scripts/dev/ --- .evergreen-functions.yml | 12 ++++---- Makefile | 38 ++++++++++++------------ scripts/{evergreen => dev}/run_python.sh | 0 scripts/dev/switch_context_by_test.sh | 2 +- scripts/evergreen/e2e/e2e.sh | 2 +- 5 files changed, 27 insertions(+), 27 deletions(-) rename scripts/{evergreen => dev}/run_python.sh (100%) diff --git a/.evergreen-functions.yml b/.evergreen-functions.yml index 00e95d8f1..9d5431546 100644 --- a/.evergreen-functions.yml +++ b/.evergreen-functions.yml @@ -392,7 +392,7 @@ functions: add_to_path: - ${workdir}/bin working_dir: src/github.com/mongodb/mongodb-kubernetes - binary: scripts/evergreen/run_python.sh scripts/update_supported_dockerfiles.py + binary: scripts/dev/run_python.sh scripts/update_supported_dockerfiles.py - command: subprocess.exec type: setup params: @@ -486,7 +486,7 @@ functions: include_expansions_in_env: - image_version - rh_pyxis - binary: scripts/evergreen/run_python.sh scripts/preflight_images.py --image ${image_name} --submit "${preflight_submit}" + binary: scripts/dev/run_python.sh scripts/preflight_images.py --image ${image_name} --submit "${preflight_submit}" build_multi_cluster_binary: - command: subprocess.exec @@ -538,7 +538,7 @@ functions: shell: bash <<: *e2e_include_expansions_in_env working_dir: src/github.com/mongodb/mongodb-kubernetes - binary: scripts/evergreen/run_python.sh pipeline.py --include ${image_name} --parallel --sign + binary: scripts/dev/run_python.sh pipeline.py --include ${image_name} --parallel --sign teardown_cloud_qa_all: - *switch_context @@ -549,7 +549,7 @@ functions: working_dir: src/github.com/mongodb/mongodb-kubernetes script: | source .generated/context.export.env - scripts/evergreen/run_python.sh scripts/evergreen/e2e/setup_cloud_qa.py delete_all + scripts/dev/run_python.sh scripts/evergreen/e2e/setup_cloud_qa.py delete_all # Updates current expansions with variables from release.json file. # Use e.g. ${mongoDbOperator} afterwards. @@ -596,7 +596,7 @@ functions: add_to_path: - ${workdir}/bin # Below script deletes agent images created for an Evergreen patch older than 1 day - command: scripts/evergreen/run_python.sh scripts/evergreen/periodic-cleanup-aws.py + command: scripts/dev/run_python.sh scripts/evergreen/periodic-cleanup-aws.py ### Test Functions ### @@ -695,7 +695,7 @@ functions: working_dir: src/github.com/mongodb/mongodb-kubernetes script: | source .generated/context.export.env - scripts/evergreen/run_python.sh scripts/evergreen/e2e/performance/create_variants.py ${variant} ${size}> evergreen_tasks.json + scripts/dev/run_python.sh scripts/evergreen/e2e/performance/create_variants.py ${variant} ${size}> evergreen_tasks.json echo "tasks to run:" cat evergreen_tasks.json - command: generate.tasks diff --git a/Makefile b/Makefile index 9c45c1c3b..ea6097cc6 100644 --- a/Makefile +++ b/Makefile @@ -75,13 +75,13 @@ operator: configure-operator build-and-push-operator-image # build-push, (todo) restart database database: aws_login - @ scripts/evergreen/run_python.sh pipeline.py --include database + @ scripts/dev/run_python.sh pipeline.py --include database readiness_probe: aws_login - @ scripts/evergreen/run_python.sh pipeline.py --include readiness-probe + @ scripts/dev/run_python.sh pipeline.py --include readiness-probe upgrade_hook: aws_login - @ scripts/evergreen/run_python.sh pipeline.py --include upgrade-hook + @ scripts/dev/run_python.sh pipeline.py --include upgrade-hook # ensures cluster is up, cleans Kubernetes + OM, build-push-deploy operator, # push-deploy database, create secrets, config map, resources etc @@ -90,7 +90,7 @@ full: build-and-push-images # build-push appdb image appdb: aws_login - @ scripts/evergreen/run_python.sh pipeline.py --include appdb + @ scripts/dev/run_python.sh pipeline.py --include appdb # runs the e2e test: make e2e test=e2e_sharded_cluster_pv. The Operator is redeployed before the test, the namespace is cleaned. # The e2e test image is built and pushed together with all main ones (operator, database, init containers) @@ -112,7 +112,7 @@ mco-e2e: aws_login build-and-push-mco-test-image generate-env-file: ## generates a local-test.env for local testing mkdir -p .generated - { scripts/evergreen/run_python.sh mongodb-community-operator/scripts/dev/get_e2e_env_vars.py ".generated/config.json" | tee >(cut -d' ' -f2 > .generated/mco-test.env) ;} > .generated/mco-test.export.env + { scripts/dev/run_python.sh mongodb-community-operator/scripts/dev/get_e2e_env_vars.py ".generated/config.json" | tee >(cut -d' ' -f2 > .generated/mco-test.env) ;} > .generated/mco-test.export.env . .generated/mco-test.export.env reset-helm-leftovers: ## sometimes you didn't cleanly uninstall a helm release, this cleans the existing helm artifacts @@ -154,19 +154,19 @@ aws_cleanup: @ scripts/evergreen/prepare_aws.sh build-and-push-operator-image: aws_login - @ scripts/evergreen/run_python.sh pipeline.py --include operator-quick + @ scripts/dev/run_python.sh pipeline.py --include operator-quick build-and-push-database-image: aws_login @ scripts/dev/build_push_database_image build-and-push-test-image: aws_login build-multi-cluster-binary @ if [[ -z "$(local)" ]]; then \ - scripts/evergreen/run_python.sh pipeline.py --include test; \ + scripts/dev/run_python.sh pipeline.py --include test; \ fi build-and-push-mco-test-image: aws_login @ if [[ -z "$(local)" ]]; then \ - scripts/evergreen/run_python.sh pipeline.py --include mco-test; \ + scripts/dev/run_python.sh pipeline.py --include mco-test; \ fi build-multi-cluster-binary: @@ -181,27 +181,27 @@ build-and-push-images: build-and-push-operator-image appdb-init-image om-init-im build-and-push-init-images: appdb-init-image om-init-image database-init-image database-init-image: - @ scripts/evergreen/run_python.sh pipeline.py --include init-database + @ scripts/dev/run_python.sh pipeline.py --include init-database appdb-init-image: - @ scripts/evergreen/run_python.sh pipeline.py --include init-appdb + @ scripts/dev/run_python.sh pipeline.py --include init-appdb # Not setting a parallel-factor will default to 0 which will lead to using all CPUs, that can cause docker to die. # Here we are defaulting to 6, a higher value might work for you. agent-image: - @ scripts/evergreen/run_python.sh pipeline.py --include agent --all-agents --parallel --parallel-factor 6 + @ scripts/dev/run_python.sh pipeline.py --include agent --all-agents --parallel --parallel-factor 6 agent-image-slow: - @ scripts/evergreen/run_python.sh pipeline.py --include agent --parallel-factor 1 + @ scripts/dev/run_python.sh pipeline.py --include agent --parallel-factor 1 operator-image: - @ scripts/evergreen/run_python.sh pipeline.py --include operator + @ scripts/dev/run_python.sh pipeline.py --include operator om-init-image: - @ scripts/evergreen/run_python.sh pipeline.py --include init-ops-manager + @ scripts/dev/run_python.sh pipeline.py --include init-ops-manager om-image: - @ scripts/evergreen/run_python.sh pipeline.py --include ops-manager + @ scripts/dev/run_python.sh pipeline.py --include ops-manager configure-operator: @ scripts/dev/configure_operator.sh @@ -284,16 +284,16 @@ golang-tests-race: USE_RACE=true scripts/evergreen/unit-tests.sh sbom-tests: - @ scripts/evergreen/run_python.sh -m pytest generate_ssdlc_report_test.py + @ scripts/dev/run_python.sh -m pytest generate_ssdlc_report_test.py # e2e tests are also in python and we will need to ignore them as they are in the docker/mongodb-kubernetes-tests folder # additionally, we have one lib which we want to test which is in the =docker/mongodb-kubernetes-tests folder. python-tests: - @ scripts/evergreen/run_python.sh -m pytest docker/mongodb-kubernetes-tests/kubeobject - @ scripts/evergreen/run_python.sh -m pytest --ignore=docker/mongodb-kubernetes-tests + @ scripts/dev/run_python.sh -m pytest docker/mongodb-kubernetes-tests/kubeobject + @ scripts/dev/run_python.sh -m pytest --ignore=docker/mongodb-kubernetes-tests generate-ssdlc-report: - @ scripts/evergreen/run_python.sh generate_ssdlc_report.py + @ scripts/dev/run_python.sh generate_ssdlc_report.py # test-race runs golang test with race enabled test-race: generate fmt vet manifests golang-tests-race diff --git a/scripts/evergreen/run_python.sh b/scripts/dev/run_python.sh similarity index 100% rename from scripts/evergreen/run_python.sh rename to scripts/dev/run_python.sh diff --git a/scripts/dev/switch_context_by_test.sh b/scripts/dev/switch_context_by_test.sh index 17e1f4428..f8ee90430 100755 --- a/scripts/dev/switch_context_by_test.sh +++ b/scripts/dev/switch_context_by_test.sh @@ -58,7 +58,7 @@ main() { find_variant_arg="--task-name" fi - if ! contexts=$(scripts/evergreen/run_python.sh scripts/python/find_test_variants.py "${find_variant_arg}" "${test}"); then + if ! contexts=$(scripts/dev/run_python.sh scripts/python/find_test_variants.py "${find_variant_arg}" "${test}"); then echo "Couldn't find any test contexts running test: ${test}" echo "${contexts}" exit 1 diff --git a/scripts/evergreen/e2e/e2e.sh b/scripts/evergreen/e2e/e2e.sh index f0294787f..9f7ef1e98 100755 --- a/scripts/evergreen/e2e/e2e.sh +++ b/scripts/evergreen/e2e/e2e.sh @@ -21,7 +21,7 @@ run_e2e_mco_tests() { docker exec kind-control-plane mkdir -p /opt/data/mongo-data-{0..2} /opt/data/mongo-logs-{0..2} set +e # let's not fail here, such that we can still dump all information - scripts/evergreen/run_python.sh mongodb-community-operator/scripts/dev/e2e.py --test "${TEST_NAME}" --distro ubi --cluster-wide "${cluster_wide}" + scripts/dev/run_python.sh mongodb-community-operator/scripts/dev/e2e.py --test "${TEST_NAME}" --distro ubi --cluster-wide "${cluster_wide}" local test_results=$? set -e