From da61296de0466fc29b8618f18d7b652ebb369ae9 Mon Sep 17 00:00:00 2001 From: samugi Date: Wed, 3 Jan 2024 13:54:43 +0100 Subject: [PATCH] feat(ci): dynamic test scheduler / balancer This reverts commit e804fd4b10a78df58c758831347cdc5006ff4b0f effectively reapplying 543004ca259c86e463767b17e782f064e43aa6ea. Original commit message: This commit adds an automatic scheduler for running busted tests. It replaces the static, shell script based scheduler by a mechanism that distributes the load onto a number of runners. Each runner gets to work on a portion of the tests that need to be run. The scheduler uses historic run time information to distribute the work evenly across runners, with the goal of making them all run for the same amount of time. With the 7 runners configured in the PR, the overall time it takes to run tests is reduced from around 30 minutes to around 11 minutes. Previously, the scheduling for tests was defined by what the run_tests.sh shell script did. This has now changed so that the new JSON file `test_suites.json` is instead used to define the tests that need to run. Like before, each of the test suites can have its own set of environment variables and test exclusions. The test runner has been rewritten in Javascript in order to make it easier to interface with the declarative configuration file and to facilitate reporting and interfacing with busted. It resides in the https://github.com/Kong/gateway-test-scheduler repository and provides its functionality through custom GitHub Actions. A couple of tests had to be changed to isolate them from other tests better. As the tests are no longer run in identical order every time, it has become more important that each test performs any required cleanup before it runs. --- .ci/run_tests.sh | 165 ------------ .ci/test_suites.json | 34 +++ .github/workflows/build_and_test.yml | 247 +++++++----------- .../update-test-runtime-statistics.yml | 35 +++ spec/busted-ci-helper.lua | 54 ++++ spec/busted-log-failed.lua | 33 --- 6 files changed, 216 insertions(+), 352 deletions(-) delete mode 100755 .ci/run_tests.sh create mode 100644 .ci/test_suites.json create mode 100644 .github/workflows/update-test-runtime-statistics.yml create mode 100644 spec/busted-ci-helper.lua delete mode 100644 spec/busted-log-failed.lua diff --git a/.ci/run_tests.sh b/.ci/run_tests.sh deleted file mode 100755 index 55f64dc03dd4..000000000000 --- a/.ci/run_tests.sh +++ /dev/null @@ -1,165 +0,0 @@ -#!/usr/bin/env bash -set -e - -function cyan() { - echo -e "\033[1;36m$*\033[0m" -} - -function red() { - echo -e "\033[1;31m$*\033[0m" -} - -function get_failed { - if [ ! -z "$FAILED_TEST_FILES_FILE" -a -s "$FAILED_TEST_FILES_FILE" ] - then - cat < $FAILED_TEST_FILES_FILE - else - echo "$@" - fi -} - -BUSTED_ARGS="--keep-going -o htest -v --exclude-tags=flaky,ipv6" -if [ ! -z "$FAILED_TEST_FILES_FILE" ] -then - BUSTED_ARGS="--helper=spec/busted-log-failed.lua $BUSTED_ARGS" -fi - -if [ "$KONG_TEST_DATABASE" == "postgres" ]; then - export TEST_CMD="bin/busted $BUSTED_ARGS,off" - - psql -v ON_ERROR_STOP=1 -h localhost --username "$KONG_TEST_PG_USER" <<-EOSQL - CREATE user ${KONG_TEST_PG_USER}_ro; - GRANT CONNECT ON DATABASE $KONG_TEST_PG_DATABASE TO ${KONG_TEST_PG_USER}_ro; - \c $KONG_TEST_PG_DATABASE; - GRANT USAGE ON SCHEMA public TO ${KONG_TEST_PG_USER}_ro; - ALTER DEFAULT PRIVILEGES FOR ROLE $KONG_TEST_PG_USER IN SCHEMA public GRANT SELECT ON TABLES TO ${KONG_TEST_PG_USER}_ro; -EOSQL - -elif [ "$KONG_TEST_DATABASE" == "cassandra" ]; then - echo "Cassandra is no longer supported" - exit 1 - -else - export TEST_CMD="bin/busted $BUSTED_ARGS,postgres,db" -fi - -if [ "$TEST_SUITE" == "integration" ]; then - if [[ "$TEST_SPLIT" == first* ]]; then - # GitHub Actions, run first batch of integration tests - files=$(ls -d spec/02-integration/* | sort | grep -v 05-proxy) - files=$(get_failed $files) - eval "$TEST_CMD" $files - - elif [[ "$TEST_SPLIT" == second* ]]; then - # GitHub Actions, run second batch of integration tests - # Note that the split here is chosen carefully to result - # in a similar run time between the two batches, and should - # be adjusted if imbalance become significant in the future - files=$(ls -d spec/02-integration/* | sort | grep 05-proxy) - files=$(get_failed $files) - eval "$TEST_CMD" $files - - else - # Non GitHub Actions - eval "$TEST_CMD" $(get_failed spec/02-integration/) - fi -fi - -if [ "$TEST_SUITE" == "dbless" ]; then - eval "$TEST_CMD" $(get_failed spec/02-integration/02-cmd \ - spec/02-integration/05-proxy \ - spec/02-integration/04-admin_api/02-kong_routes_spec.lua \ - spec/02-integration/04-admin_api/15-off_spec.lua \ - spec/02-integration/08-status_api/01-core_routes_spec.lua \ - spec/02-integration/08-status_api/03-readiness_endpoint_spec.lua \ - spec/02-integration/11-dbless \ - spec/02-integration/20-wasm) -fi -if [ "$TEST_SUITE" == "plugins" ]; then - set +ex - rm -f .failed - - if [[ "$TEST_SPLIT" == first* ]]; then - # GitHub Actions, run first batch of plugin tests - PLUGINS=$(get_failed $(ls -d spec/03-plugins/* | head -n22)) - - elif [[ "$TEST_SPLIT" == second* ]]; then - # GitHub Actions, run second batch of plugin tests - # Note that the split here is chosen carefully to result - # in a similar run time between the two batches, and should - # be adjusted if imbalance become significant in the future - PLUGINS=$(get_failed $(ls -d spec/03-plugins/* | tail -n+23)) - - else - # Non GitHub Actions - PLUGINS=$(get_failed $(ls -d spec/03-plugins/*)) - fi - - for p in $PLUGINS; do - echo - cyan "--------------------------------------" - cyan $(basename $p) - cyan "--------------------------------------" - echo - - $TEST_CMD $p || echo "* $p" >> .failed - - # the suite is run multiple times for plugins: collect partial failures - if [ ! -z "$FAILED_TEST_FILES_FILE" ] - then - cat "$FAILED_TEST_FILES_FILE" >> "$FAILED_TEST_FILES_FILE.tmp" - fi - done - - if [ ! -z "$FAILED_TEST_FILES_FILE.tmp" -a -s "$FAILED_TEST_FILES_FILE.tmp" ] - then - mv "$FAILED_TEST_FILES_FILE.tmp" "$FAILED_TEST_FILES_FILE" - fi - - if [[ "$TEST_SPLIT" != first* ]]; then - cat kong-*.rockspec | grep kong- | grep -v zipkin | grep -v sidecar | grep "~" | grep -v kong-prometheus-plugin | while read line ; do - REPOSITORY=`echo $line | sed "s/\"/ /g" | awk -F" " '{print $1}'` - VERSION=`luarocks show $REPOSITORY | grep $REPOSITORY | head -1 | awk -F" " '{print $2}' | cut -f1 -d"-"` - REPOSITORY=`echo $REPOSITORY | sed -e 's/kong-prometheus-plugin/kong-plugin-prometheus/g'` - REPOSITORY=`echo $REPOSITORY | sed -e 's/kong-proxy-cache-plugin/kong-plugin-proxy-cache/g'` - - echo - cyan "--------------------------------------" - cyan $REPOSITORY $VERSION - cyan "--------------------------------------" - echo - - git clone https://github.com/Kong/$REPOSITORY.git --branch $VERSION --single-branch /tmp/test-$REPOSITORY || \ - git clone https://github.com/Kong/$REPOSITORY.git --branch v$VERSION --single-branch /tmp/test-$REPOSITORY - sed -i 's/grpcbin:9000/localhost:15002/g' /tmp/test-$REPOSITORY/spec/*.lua - sed -i 's/grpcbin:9001/localhost:15003/g' /tmp/test-$REPOSITORY/spec/*.lua - cp -R /tmp/test-$REPOSITORY/spec/fixtures/* spec/fixtures/ || true - pushd /tmp/test-$REPOSITORY - luarocks make - popd - - $TEST_CMD /tmp/test-$REPOSITORY/spec/ || echo "* $REPOSITORY" >> .failed - - done - fi - - if [ -f .failed ]; then - echo - red "--------------------------------------" - red "Plugin tests failed:" - red "--------------------------------------" - cat .failed - exit 1 - else - exit 0 - fi -fi -if [ "$TEST_SUITE" == "pdk" ]; then - prove -I. -r t -fi -if [ "$TEST_SUITE" == "unit" ]; then - unset KONG_TEST_NGINX_USER KONG_PG_PASSWORD KONG_TEST_PG_PASSWORD - scripts/autodoc - bin/busted -v -o htest spec/01-unit - make lint -fi diff --git a/.ci/test_suites.json b/.ci/test_suites.json new file mode 100644 index 000000000000..eb6b15e5909e --- /dev/null +++ b/.ci/test_suites.json @@ -0,0 +1,34 @@ +[ + { + "name": "unit", + "exclude_tags": "flaky,ipv6", + "specs": ["spec/01-unit/"] + }, + { + "name": "integration", + "exclude_tags": "flaky,ipv6,off", + "environment": { + "KONG_TEST_DATABASE": "postgres" + }, + "specs": ["spec/02-integration/"] + }, + { + "name": "dbless", + "exclude_tags": "flaky,ipv6,postgres,db", + "specs": [ + "spec/02-integration/02-cmd/", + "spec/02-integration/05-proxy/", + "spec/02-integration/04-admin_api/02-kong_routes_spec.lua", + "spec/02-integration/04-admin_api/15-off_spec.lua", + "spec/02-integration/08-status_api/01-core_routes_spec.lua", + "spec/02-integration/08-status_api/03-readiness_endpoint_spec.lua", + "spec/02-integration/11-dbless/", + "spec/02-integration/20-wasm/" + ] + }, + { + "name": "plugins", + "exclude_tags": "flaky,ipv6", + "specs": ["spec/03-plugins/"] + } +] diff --git a/.github/workflows/build_and_test.yml b/.github/workflows/build_and_test.yml index 9ad8a072ebb0..1aa7fc23a580 100644 --- a/.github/workflows/build_and_test.yml +++ b/.github/workflows/build_and_test.yml @@ -33,6 +33,7 @@ concurrency: env: BUILD_ROOT: ${{ github.workspace }}/bazel-bin/build KONG_TEST_COVERAGE: ${{ inputs.coverage == true || github.event_name == 'schedule' }} + RUNNER_COUNT: 7 jobs: build: @@ -40,22 +41,11 @@ jobs: with: relative-build-root: bazel-bin/build - lint-doc-and-unit-tests: - name: Lint, Doc and Unit tests + lint-and-doc-tests: + name: Lint and Doc tests runs-on: ubuntu-22.04 needs: build - services: - postgres: - image: postgres:13 - env: - POSTGRES_USER: kong - POSTGRES_DB: kong - POSTGRES_HOST_AUTH_METHOD: trust - ports: - - 5432:5432 - options: --health-cmd pg_isready --health-interval 5s --health-timeout 5s --health-retries 8 - steps: - name: Bump max open files run: | @@ -100,41 +90,56 @@ jobs: - name: Check labeler configuration run: scripts/check-labeler.pl .github/labeler.yml - - name: Unit tests - env: - KONG_TEST_PG_DATABASE: kong - KONG_TEST_PG_USER: kong - run: | - source ${{ env.BUILD_ROOT }}/kong-dev-venv.sh - TEST_CMD="bin/busted -v -o htest spec/01-unit" - if [[ $KONG_TEST_COVERAGE = true ]]; then - TEST_CMD="$TEST_CMD --coverage" - fi - $TEST_CMD + schedule: + name: Schedule busted tests to run + runs-on: ubuntu-22.04 + needs: build - - name: Archive coverage stats file + env: + WORKFLOW_ID: ${{ github.run_id }} + + outputs: + runners: ${{ steps.generate-runner-array.outputs.RUNNERS }} + + steps: + - name: Checkout source code + uses: actions/checkout@v4 + + - name: Download runtimes file + uses: Kong/gh-storage/download@v1 + with: + repo-path: Kong/gateway-action-storage/main/.ci/runtimes.json + + - name: Schedule tests + uses: Kong/gateway-test-scheduler/schedule@b91bd7aec42bd13748652929f087be81d1d40843 # v1 + with: + test-suites-file: .ci/test_suites.json + test-file-runtime-file: .ci/runtimes.json + output-prefix: test-chunk. + runner-count: ${{ env.RUNNER_COUNT }} + + - name: Upload schedule files uses: actions/upload-artifact@v3 - if: ${{ always() && (inputs.coverage == true || github.event_name == 'schedule') }} + continue-on-error: true with: - name: luacov-stats-out-${{ github.job }}-${{ github.run_id }} - retention-days: 1 - path: | - luacov.stats.out + name: schedule-test-files + path: test-chunk.* + retention-days: 7 - - name: Get kernel message - if: failure() + - name: Generate runner array + id: generate-runner-array run: | - sudo dmesg -T + echo "RUNNERS=[$(seq -s "," 1 $(( "$RUNNER_COUNT" )))]" >> "$GITHUB_OUTPUT" - integration-tests-postgres: - name: Postgres ${{ matrix.suite }} - ${{ matrix.split }} tests + busted-tests: + name: Busted test runner ${{ matrix.runner }} runs-on: ubuntu-22.04 - needs: build + needs: [build,schedule] + strategy: fail-fast: false matrix: - suite: [integration, plugins] - split: [first, second] + runner: ${{ fromJSON(needs.schedule.outputs.runners) }} services: postgres: @@ -193,7 +198,6 @@ jobs: echo "127.0.0.1 grpcs_2.test" | sudo tee -a /etc/hosts - name: Enable SSL for Redis - if: ${{ matrix.suite == 'plugins' }} run: | docker cp ${{ github.workspace }} kong_redis:/workspace docker cp ${{ github.workspace }}/spec/fixtures/redis/docker-entrypoint.sh kong_redis:/usr/local/bin/docker-entrypoint.sh @@ -216,47 +220,53 @@ jobs: docker logs opentelemetry-collector - name: Install AWS SAM cli tool - if: ${{ matrix.suite == 'plugins' }} run: | curl -L -s -o /tmp/aws-sam-cli.zip https://github.com/aws/aws-sam-cli/releases/latest/download/aws-sam-cli-linux-x86_64.zip unzip -o /tmp/aws-sam-cli.zip -d /tmp/aws-sam-cli sudo /tmp/aws-sam-cli/install --update - - name: Update PATH + - name: Create kong_ro user in Postgres run: | - echo "$BUILD_ROOT/kong-dev/bin" >> $GITHUB_PATH - echo "$BUILD_ROOT/kong-dev/openresty/nginx/sbin" >> $GITHUB_PATH - echo "$BUILD_ROOT/kong-dev/openresty/bin" >> $GITHUB_PATH - - - name: Debug (nginx) - run: | - echo nginx: $(which nginx) - nginx -V 2>&1 | sed -re 's/ --/\n--/g' - ldd $(which nginx) - - - name: Debug (luarocks) - run: | - echo luarocks: $(which luarocks) - luarocks --version - luarocks config + psql -v ON_ERROR_STOP=1 -h localhost --username kong <<\EOD + CREATE user kong_ro; + GRANT CONNECT ON DATABASE kong TO kong_ro; + \c kong; + GRANT USAGE ON SCHEMA public TO kong_ro; + ALTER DEFAULT PRIVILEGES FOR ROLE kong IN SCHEMA public GRANT SELECT ON TABLES TO kong_ro; + EOD - name: Tune up postgres max_connections run: | # arm64 runners may use more connections due to more worker cores psql -hlocalhost -Ukong kong -tAc 'alter system set max_connections = 5000;' - - name: Generate test rerun filename + - name: Download test schedule file + uses: actions/download-artifact@v3 + with: + name: schedule-test-files + + - name: Generate helper environment variables run: | - echo FAILED_TEST_FILES_FILE=$(echo '${{ github.run_id }}-${{ matrix.suite }}-${{ matrix.split }}' | tr A-Z a-z | sed -Ee 's/[^a-z0-9]+/-/g').txt >> $GITHUB_ENV + echo FAILED_TEST_FILES_FILE=failed-tests.json >> $GITHUB_ENV + echo TEST_FILE_RUNTIME_FILE=test-runtime.json >> $GITHUB_ENV + - name: Build & install dependencies + run: | + make dev - name: Download test rerun information uses: actions/download-artifact@v3 continue-on-error: true with: - name: ${{ env.FAILED_TEST_FILES_FILE }} + name: test-rerun-info-${{ matrix.runner }} - - name: Tests + - name: Download test runtime statistics from previous runs + uses: actions/download-artifact@v3 + continue-on-error: true + with: + name: test-runtime-statistics-${{ matrix.runner }} + + - name: Run Tests env: KONG_TEST_PG_DATABASE: kong KONG_TEST_PG_USER: kong @@ -264,115 +274,44 @@ jobs: KONG_SPEC_TEST_GRPCBIN_PORT: "15002" KONG_SPEC_TEST_GRPCBIN_SSL_PORT: "15003" KONG_SPEC_TEST_OTELCOL_FILE_EXPORTER_PATH: ${{ github.workspace }}/tmp/otel/file_exporter.json - TEST_SUITE: ${{ matrix.suite }} - TEST_SPLIT: ${{ matrix.split }} - run: | - make dev # required to install other dependencies like bin/grpcurl - source ${{ env.BUILD_ROOT }}/kong-dev-venv.sh - .ci/run_tests.sh + DD_ENV: ci + DD_SERVICE: kong-ce-ci + DD_CIVISIBILITY_MANUAL_API_ENABLED: 1 + DD_CIVISIBILITY_AGENTLESS_ENABLED: true + DD_TRACE_GIT_METADATA_ENABLED: true + DD_API_KEY: ${{ secrets.DATADOG_API_KEY }} + uses: Kong/gateway-test-scheduler/runner@b91bd7aec42bd13748652929f087be81d1d40843 # v1 + with: + tests-to-run-file: test-chunk.${{ matrix.runner }}.json + failed-test-files-file: ${{ env.FAILED_TEST_FILES_FILE }} + test-file-runtime-file: ${{ env.TEST_FILE_RUNTIME_FILE }} + setup-venv: . ${{ env.BUILD_ROOT }}/kong-dev-venv.sh - name: Upload test rerun information if: always() uses: actions/upload-artifact@v3 with: - name: ${{ env.FAILED_TEST_FILES_FILE }} + name: test-rerun-info-${{ matrix.runner }} path: ${{ env.FAILED_TEST_FILES_FILE }} retention-days: 2 - - name: Archive coverage stats file + - name: Upload test runtime statistics for offline scheduling + if: always() uses: actions/upload-artifact@v3 - if: ${{ always() && (inputs.coverage == true || github.event_name == 'schedule') }} with: - name: luacov-stats-out-${{ github.job }}-${{ github.run_id }}-${{ matrix.suite }}-${{ contains(matrix.split, 'first') && '1' || '2' }} - retention-days: 1 - path: | - luacov.stats.out - - - name: Get kernel message - if: failure() - run: | - sudo dmesg -T - - integration-tests-dbless: - name: DB-less integration tests - runs-on: ubuntu-22.04 - needs: build - - services: - grpcbin: - image: kong/grpcbin - ports: - - 15002:9000 - - 15003:9001 - - steps: - - name: Bump max open files - run: | - sudo echo 'kong soft nofile 65536' | sudo tee -a /etc/security/limits.d/kong-ci.conf - sudo echo 'kong hard nofile 65536' | sudo tee -a /etc/security/limits.d/kong-ci.conf - sudo echo "$(whoami) soft nofile 65536" | sudo tee -a /etc/security/limits.d/kong-ci.conf - sudo echo "$(whoami) hard nofile 65536" | sudo tee -a /etc/security/limits.d/kong-ci.conf - - - name: Checkout Kong source code - uses: actions/checkout@v4 - - - name: Lookup build cache - id: cache-deps - uses: actions/cache@v3 - with: - path: ${{ env.BUILD_ROOT }} - key: ${{ needs.build.outputs.cache-key }} - - - name: Build WASM Test Filters - uses: ./.github/actions/build-wasm-test-filters - - - name: Add gRPC test host names - run: | - echo "127.0.0.1 grpcs_1.test" | sudo tee -a /etc/hosts - echo "127.0.0.1 grpcs_2.test" | sudo tee -a /etc/hosts - - - name: Run OpenTelemetry Collector - run: | - mkdir -p ${{ github.workspace }}/tmp/otel - touch ${{ github.workspace }}/tmp/otel/file_exporter.json - sudo chmod 777 -R ${{ github.workspace }}/tmp/otel - docker run -p 4317:4317 -p 4318:4318 -p 55679:55679 \ - -v ${{ github.workspace }}/spec/fixtures/opentelemetry/otelcol.yaml:/etc/otel-collector-config.yaml \ - -v ${{ github.workspace }}/tmp/otel:/etc/otel \ - --name opentelemetry-collector -d \ - otel/opentelemetry-collector-contrib:0.52.0 \ - --config=/etc/otel-collector-config.yaml - sleep 2 - docker logs opentelemetry-collector - - - name: Tests - env: - KONG_TEST_PG_DATABASE: kong - KONG_TEST_PG_USER: kong - KONG_TEST_DATABASE: 'off' - KONG_SPEC_TEST_GRPCBIN_PORT: "15002" - KONG_SPEC_TEST_GRPCBIN_SSL_PORT: "15003" - KONG_SPEC_TEST_OTELCOL_FILE_EXPORTER_PATH: ${{ github.workspace }}/tmp/otel/file_exporter.json - TEST_SUITE: dbless - run: | - make dev # required to install other dependencies like bin/grpcurl - source ${{ env.BUILD_ROOT }}/kong-dev-venv.sh - .ci/run_tests.sh + name: test-runtime-statistics-${{ matrix.runner }} + path: ${{ env.TEST_FILE_RUNTIME_FILE }} + retention-days: 7 - name: Archive coverage stats file uses: actions/upload-artifact@v3 if: ${{ always() && (inputs.coverage == true || github.event_name == 'schedule') }} with: - name: luacov-stats-out-${{ github.job }}-${{ github.run_id }} + name: luacov-stats-out-${{ github.job }}-${{ github.run_id }}-${{ matrix.runner }} retention-days: 1 path: | luacov.stats.out - - name: Get kernel message - if: failure() - run: | - sudo dmesg -T - pdk-tests: name: PDK tests runs-on: ubuntu-22.04 @@ -416,7 +355,7 @@ jobs: export PDK_LUACOV=1 fi eval $(perl -I $HOME/perl5/lib/perl5/ -Mlocal::lib) - .ci/run_tests.sh + prove -I. -r t - name: Archive coverage stats file uses: actions/upload-artifact@v3 @@ -432,9 +371,9 @@ jobs: run: | sudo dmesg -T - aggregator: - needs: [lint-doc-and-unit-tests,pdk-tests,integration-tests-postgres,integration-tests-dbless] - name: Luacov stats aggregator + cleanup-and-aggregate-stats: + needs: [lint-and-doc-tests,pdk-tests,busted-tests] + name: Cleanup and Luacov stats aggregator if: ${{ always() && (inputs.coverage == true || github.event_name == 'schedule') }} runs-on: ubuntu-22.04 diff --git a/.github/workflows/update-test-runtime-statistics.yml b/.github/workflows/update-test-runtime-statistics.yml new file mode 100644 index 000000000000..43e4017a518a --- /dev/null +++ b/.github/workflows/update-test-runtime-statistics.yml @@ -0,0 +1,35 @@ +name: Update test runtime statistics file for test scheduling +on: + workflow_dispatch: + schedule: + - cron: "1 0 * * SAT" + # push rule below needed for testing only + push: + branches: + - feat/test-run-scheduler + +jobs: + process-statistics: + name: Download statistics from GitHub and combine them + runs-on: ubuntu-22.04 + steps: + - name: Checkout source code + uses: actions/checkout@v4 + with: + token: ${{ secrets.PAT }} + + - name: Process statistics + uses: Kong/gateway-test-scheduler/analyze@b91bd7aec42bd13748652929f087be81d1d40843 # v1 + env: + GITHUB_TOKEN: ${{ secrets.PAT }} + with: + workflow-name: build_and_test.yml + test-file-runtime-file: .ci/runtimes.json + artifact-name-regexp: "^test-runtime-statistics-\\d+$" + + - name: Upload new runtimes file + uses: Kong/gh-storage/upload@v1 + env: + GITHUB_TOKEN: ${{ secrets.PAT }} + with: + repo-path: Kong/gateway-action-storage/main/.ci/runtimes.json diff --git a/spec/busted-ci-helper.lua b/spec/busted-ci-helper.lua new file mode 100644 index 000000000000..a28b2f367eff --- /dev/null +++ b/spec/busted-ci-helper.lua @@ -0,0 +1,54 @@ +-- busted-ci-helper.lua + +local busted = require 'busted' +local cjson = require 'cjson' +local socket_unix = require 'socket.unix' + +local busted_event_path = os.getenv("BUSTED_EVENT_PATH") + +-- Function to recursively copy a table, skipping keys associated with functions +local function copyTable(original, copied) + copied = copied or {} + + for key, value in pairs(original) do + if type(value) == "table" then + copied[key] = copyTable(value, {}) + elseif type(value) ~= "function" then + copied[key] = value + end + end + + return copied +end + +if busted_event_path then + local sock = assert(socket_unix()) + assert(sock:connect(busted_event_path)) + + local events = {{ 'suite', 'reset' }, + { 'suite', 'start' }, + { 'suite', 'end' }, + { 'file', 'start' }, + { 'file', 'end' }, + { 'test', 'start' }, + { 'test', 'end' }, + { 'pending' }, + { 'failure', 'it' }, + { 'error', 'it' }, + { 'failure' }, + { 'error' }} + for _, event in ipairs(events) do + busted.subscribe(event, function (...) + local args = {} + for i, original in ipairs{...} do + if type(original) == "table" then + args[i] = copyTable(original) + elseif type(original) ~= "function" then + args[i] = original + end + end + + sock:send(cjson.encode({ event = event[1] .. (event[2] and ":" .. event[2] or ""), args = args }) .. "\n") + end) + end +end diff --git a/spec/busted-log-failed.lua b/spec/busted-log-failed.lua deleted file mode 100644 index 7bfe6804b83f..000000000000 --- a/spec/busted-log-failed.lua +++ /dev/null @@ -1,33 +0,0 @@ --- busted-log-failed.lua - --- Log which test files run by busted had failures or errors in a --- file. The file to use for logging is specified in the --- FAILED_TEST_FILES_FILE environment variable. This is used to --- reduce test rerun times for flaky tests. - -local busted = require 'busted' -local failed_files_file = assert(os.getenv("FAILED_TEST_FILES_FILE"), - "FAILED_TEST_FILES_FILE environment variable not set") - -local FAILED_FILES = {} - -busted.subscribe({ 'failure' }, function(element, parent, message, debug) - FAILED_FILES[element.trace.source] = true -end) - -busted.subscribe({ 'error' }, function(element, parent, message, debug) - FAILED_FILES[element.trace.source] = true -end) - -busted.subscribe({ 'suite', 'end' }, function(suite, count, total) - local output = assert(io.open(failed_files_file, "w")) - if next(FAILED_FILES) then - for failed_file in pairs(FAILED_FILES) do - if failed_file:sub(1, 1) == '@' then - failed_file = failed_file:sub(2) - end - assert(output:write(failed_file .. "\n")) - end - end - output:close() -end)