From 0eea70299e5ed0f69c907f49cefed6a7f3a1017f Mon Sep 17 00:00:00 2001 From: MyroTk Date: Tue, 14 May 2024 01:22:17 -0700 Subject: [PATCH 01/79] update labels --- .github/workflows/pull_request.yml | 25 ++++++++++--------------- .github/workflows/reusable_build.yml | 27 +++++++++++++++++++++++++-- .github/workflows/reusable_docker.yml | 12 ++++++------ .github/workflows/reusable_test.yml | 27 +++++++++++++++++++++++++-- 4 files changed, 66 insertions(+), 25 deletions(-) diff --git a/.github/workflows/pull_request.yml b/.github/workflows/pull_request.yml index fe21f043ab82..7be03fec36ef 100644 --- a/.github/workflows/pull_request.yml +++ b/.github/workflows/pull_request.yml @@ -17,14 +17,14 @@ on: # yamllint disable-line rule:truthy jobs: RunConfig: - runs-on: [self-hosted, style-checker-aarch64] + runs-on: [self-hosted, altinity-on-demand, altinity-type-cax41, altinity-in-hel1, altinity-image-arm-app-docker-ce] outputs: data: ${{ steps.runconfig.outputs.CI_DATA }} steps: - name: DebugInfo uses: hmarr/debug-action@f7318c783045ac39ed9bb497e22ce835fdafbfe6 - name: Check out repository code - uses: ClickHouse/checkout@v1 + uses: Altinity/checkout@19599efdf36c4f3f30eb55d5bb388896faea69f with: clear-repository: true # to ensure correct digests fetch-depth: 0 # to get version @@ -75,25 +75,20 @@ jobs: test_name: Style check runner_type: style-checker run_command: | - python3 style_check.py + python3 style_check.py --no-push data: ${{ needs.RunConfig.outputs.data }} - secrets: - secret_envs: | - ROBOT_CLICKHOUSE_SSH_KEY<> ${GITHUB_OUTPUT} + env: + input: ${{ inputs.runner_type }} + Build: name: Build-${{inputs.build_name}} if: ${{ contains(fromJson(inputs.data).jobs_data.jobs_to_do, inputs.build_name) || inputs.force }} env: GITHUB_JOB_OVERRIDDEN: Build-${{inputs.build_name}} - runs-on: [self-hosted, '${{inputs.runner_type}}'] + needs: [runner_labels_setup] + runs-on: ${{ fromJson(needs.runner_labels_setup.outputs.runner_labels) }} steps: - name: Check out repository code - uses: ClickHouse/checkout@v1 + uses: Altinity/checkout@19599efdf36c4f3f30eb55d5bb388896faea69f with: clear-repository: true ref: ${{ fromJson(inputs.data).git_ref }} diff --git a/.github/workflows/reusable_docker.yml b/.github/workflows/reusable_docker.yml index 3fe1a8883c60..8c9a3bb47e0b 100644 --- a/.github/workflows/reusable_docker.yml +++ b/.github/workflows/reusable_docker.yml @@ -13,12 +13,12 @@ name: Build docker images default: false jobs: DockerBuildAarch64: - runs-on: [self-hosted, style-checker-aarch64] + runs-on: [altinity-on-demand, altinity-type-cax41, altinity-in-hel1, altinity-image-arm-app-docker-ce] if: | !failure() && !cancelled() && toJson(fromJson(inputs.data).docker_data.missing_aarch64) != '[]' steps: - name: Check out repository code - uses: ClickHouse/checkout@v1 + uses: Altinity/checkout@19599efdf36c4f3f30eb55d5bb388896faea69f with: ref: ${{ fromJson(inputs.data).git_ref }} - name: Build images @@ -28,12 +28,12 @@ jobs: --image-tags '${{ toJson(fromJson(inputs.data).docker_data.images) }}' \ --missing-images '${{ toJson(fromJson(inputs.data).docker_data.missing_aarch64) }}' DockerBuildAmd64: - runs-on: [self-hosted, style-checker] + runs-on: [self-hosted, altinity-on-demand, altinity-type-cpx51, altinity-in-ash, altinity-image-x86-app-docker-ce] if: | !failure() && !cancelled() && toJson(fromJson(inputs.data).docker_data.missing_amd64) != '[]' steps: - name: Check out repository code - uses: ClickHouse/checkout@v1 + uses: Altinity/checkout@19599efdf36c4f3f30eb55d5bb388896faea69f with: ref: ${{ fromJson(inputs.data).git_ref }} - name: Build images @@ -44,12 +44,12 @@ jobs: --missing-images '${{ toJson(fromJson(inputs.data).docker_data.missing_amd64) }}' DockerMultiArchManifest: needs: [DockerBuildAmd64, DockerBuildAarch64] - runs-on: [self-hosted, style-checker] + runs-on: [self-hosted, altinity-on-demand, altinity-type-cpx51, altinity-in-ash, altinity-image-x86-app-docker-ce] if: | !failure() && !cancelled() && (toJson(fromJson(inputs.data).docker_data.missing_multi) != '[]' || inputs.set_latest) steps: - name: Check out repository code - uses: ClickHouse/checkout@v1 + uses: Altinity/checkout@19599efdf36c4f3f30eb55d5bb388896faea69f with: ref: ${{ fromJson(inputs.data).git_ref }} - name: Build images diff --git a/.github/workflows/reusable_test.yml b/.github/workflows/reusable_test.yml index e30ef863a862..9028205598e2 100644 --- a/.github/workflows/reusable_test.yml +++ b/.github/workflows/reusable_test.yml @@ -51,8 +51,31 @@ env: CHECK_NAME: ${{inputs.test_name}} jobs: + runner_labels_setup: + name: Compute proper runner labels for the rest of the jobs + runs-on: ubuntu-latest + outputs: + runner_labels: ${{ steps.setVariables.outputs.runner_labels }} + steps: + - id: setVariables + name: Prepare runner_labels variables for the later steps + run: | + + # Prepend self-hosted + input="self-hosted, ${input}" + + # Remove all whitespace + input="$(echo ${input} | tr -d [:space:])" + # Make something like a JSON array from comma-separated list + input="[ '${input//\,/\'\, \'}' ]" + + echo "runner_labels=$input" >> ${GITHUB_OUTPUT} + env: + input: ${{ inputs.runner_type }} + Test: - runs-on: [self-hosted, '${{inputs.runner_type}}'] + needs: [runner_labels_setup] + runs-on: ${{ fromJson(needs.runner_labels_setup.outputs.runner_labels) }} if: ${{ !failure() && !cancelled() && contains(fromJson(inputs.data).jobs_data.jobs_to_do, inputs.test_name) }} name: ${{inputs.test_name}}${{ fromJson(inputs.data).jobs_data.jobs_params[inputs.test_name].num_batches > 1 && format('-{0}',matrix.batch) || '' }} env: @@ -63,7 +86,7 @@ jobs: batch: ${{ fromJson(inputs.data).jobs_data.jobs_params[inputs.test_name].batches }} steps: - name: Check out repository code - uses: ClickHouse/checkout@v1 + uses: Altinity/checkout@19599efdf36c4f3f30eb55d5bb388896faea69f with: clear-repository: true ref: ${{ fromJson(inputs.data).git_ref }} From bcba7eca14f640083f680a859615f3baa3476d55 Mon Sep 17 00:00:00 2001 From: MyroTk Date: Tue, 14 May 2024 03:00:26 -0700 Subject: [PATCH 02/79] Update dockerfiles to use altinityinfra, add sccache, and update release_branches --- .github/retry.sh | 22 + .github/workflows/regression.yml | 587 ++++++++++++++++++ .github/workflows/release_branches.yml | 151 +++-- .github/workflows/reusable_build.yml | 26 +- docker/packager/binary-builder/Dockerfile | 7 +- docker/packager/cctools/Dockerfile | 2 +- docker/packager/packager | 18 +- docker/test/base/Dockerfile | 2 +- docker/test/clickbench/Dockerfile | 2 +- docker/test/fasttest/Dockerfile | 4 +- docker/test/fuzzer/Dockerfile | 4 +- docker/test/integration/base/Dockerfile | 4 +- docker/test/keeper-jepsen/Dockerfile | 4 +- docker/test/libfuzzer/Dockerfile | 2 +- docker/test/performance-comparison/Dockerfile | 4 +- docker/test/server-jepsen/Dockerfile | 4 +- docker/test/sqllogic/Dockerfile | 4 +- docker/test/sqltest/Dockerfile | 4 +- docker/test/stateful/Dockerfile | 4 +- docker/test/stateless/Dockerfile | 4 +- docker/test/stress/Dockerfile | 4 +- docker/test/unit/Dockerfile | 2 +- docker/test/upgrade/Dockerfile | 2 +- tests/ci/build_check.py | 4 + tests/ci/env_helper.py | 8 +- 25 files changed, 781 insertions(+), 98 deletions(-) create mode 100755 .github/retry.sh create mode 100644 .github/workflows/regression.yml diff --git a/.github/retry.sh b/.github/retry.sh new file mode 100755 index 000000000000..566c2cf11315 --- /dev/null +++ b/.github/retry.sh @@ -0,0 +1,22 @@ +#!/bin/bash +# Execute command until exitcode is 0 or +# maximum number of retries is reached +# Example: +# ./retry +retries=$1 +delay=$2 +command="${@:3}" +exitcode=0 +try=0 +until [ "$try" -ge $retries ] +do + echo "$command" + eval "$command" + exitcode=$? + if [ $exitcode -eq 0 ]; then + break + fi + try=$((try+1)) + sleep $2 +done +exit $exitcode diff --git a/.github/workflows/regression.yml b/.github/workflows/regression.yml new file mode 100644 index 000000000000..39e789e63e9a --- /dev/null +++ b/.github/workflows/regression.yml @@ -0,0 +1,587 @@ +name: Regression test workflow - Release +'on': + workflow_call: + inputs: + runner_type: + description: the label of runner to use, can be a simple string or a comma-separated list + required: true + type: string + commit: + description: commit hash of the regression tests. + required: true + type: string + arch: + description: arch to run the tests on. + required: true + type: string + timeout_minutes: + description: Maximum number of minutes to let workflow run before GitHub cancels it. + default: 210 + type: number + build_sha: + description: commit sha of the workflow run for artifact upload. + required: true + type: string + checkout_depth: + description: the value of the git shallow checkout + required: false + type: number + default: 1 + submodules: + description: if the submodules should be checked out + required: false + type: boolean + default: false + additional_envs: + description: additional ENV variables to setup the job + type: string + secrets: + secret_envs: + description: if given, it's passed to the environments + required: false + AWS_SECRET_ACCESS_KEY: + description: the access key to the aws param store. + required: true + AWS_ACCESS_KEY_ID: + description: the access key id to the aws param store. + required: true + AWS_DEFAULT_REGION: + description: the region of the aws param store. + required: true + AWS_REPORT_KEY_ID: + description: aws s3 key id used for regression test reports. + required: true + AWS_REPORT_SECRET_ACCESS_KEY: + description: aws s3 secret access key used for regression test reports. + required: true + AWS_REPORT_REGION: + description: aws s3 region used for regression test reports. + required: true + DOCKER_USERNAME: + description: username of the docker user. + required: true + DOCKER_PASSWORD: + description: password to the docker user. + required: true + REGRESSION_AWS_S3_BUCKET: + description: aws s3 bucket used for regression tests. + required: true + REGRESSION_AWS_S3_KEY_ID: + description: aws s3 key id used for regression tests. + required: true + REGRESSION_AWS_S3_SECRET_ACCESS_KEY: + description: aws s3 secret access key used for regression tests. + required: true + REGRESSION_AWS_S3_REGION: + description: aws s3 region used for regression tests. + required: true + REGRESSION_GCS_KEY_ID: + description: gcs key id used for regression tests. + required: true + REGRESSION_GCS_KEY_SECRET: + description: gcs key secret used for regression tests. + required: true + REGRESSION_GCS_URI: + description: gcs uri used for regression tests. + required: true + +env: + # Force the stdout and stderr streams to be unbuffered + PYTHONUNBUFFERED: 1 + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_REPORT_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_REPORT_SECRET_ACCESS_KEY }} + AWS_DEFAULT_REGION: ${{ secrets.AWS_REPORT_REGION }} + DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }} + DOCKER_PASSWORD: ${{ secrets.DOCKER_PASSWORD }} + args: --test-to-end + --no-colors + --local + --collect-service-logs + --output classic + --parallel 1 + --log raw.log + artifacts: builds + artifact_paths: | + ./report.html + ./*.log.txt + ./*.log + ./*.html + ./*/_instances/*.log + ./*/_instances/*/logs/*.log + ./*/*/_instances/*/logs/*.log + ./*/*/_instances/*.log + build_sha: ${{ inputs.build_sha }} + pr_number: ${{ github.event.number }} + event_name: ${{ github.event_name }} + +jobs: + runner_labels_setup: + name: Compute proper runner labels for the rest of the jobs + runs-on: ubuntu-latest + outputs: + runner_labels: ${{ steps.setVariables.outputs.runner_labels }} + steps: + - id: setVariables + name: Prepare runner_labels variables for the later steps + run: | + + # Prepend self-hosted + input="self-hosted, ${input}" + + # Remove all whitespace + input="$(echo ${input} | tr -d [:space:])" + # Make something like a JSON array from comma-separated list + input="[ '${input//\,/\'\, \'}' ]" + + echo "runner_labels=$input" >> ${GITHUB_OUTPUT} + env: + input: ${{ inputs.runner_type }} + + Common: + strategy: + fail-fast: false + matrix: + SUITE: [aes_encryption, aggregate_functions, alter, atomic_insert, base_58, clickhouse_keeper, data_types, datetime64_extended_range, disk_level_encryption, dns, engines, example, extended_precision_data_types, kafka, kerberos, key_value, lightweight_delete, memory, part_moves_between_shards, rbac, selects, session_timezone, ssl_server, tiered_storage, window_functions] + needs: [runner_labels_setup] + runs-on: ${{ fromJson(needs.runner_labels_setup.outputs.runner_labels) }} + timeout-minutes: ${{ inputs.timeout_minutes }} + steps: + - name: Checkout regression repo + uses: actions/checkout@v3 + with: + repository: Altinity/clickhouse-regression + ref: ${{ inputs.commit }} + - name: Set envs + run: | + cat >> "$GITHUB_ENV" << 'EOF' + REPORTS_PATH=${{ runner.temp }}/reports_dir + SUITE=${{ matrix.SUITE }} + EOF + - name: Download json reports + uses: actions/download-artifact@v3 + with: + path: ${{ env.REPORTS_PATH }} + name: build_urls_package_${{ inputs.arch }} + - name: Rename report + run: | + mv ${{ env.REPORTS_PATH }}/build_urls_package_${{ inputs.arch }}.json ${{ env.REPORTS_PATH }}/build_report_package_${{ inputs.arch }}.json + - name: Setup + run: .github/setup.sh + - name: Get deb url + run: python3 .github/get-deb-url.py --reports-path ${{ env.REPORTS_PATH }} --github-env $GITHUB_ENV + - name: Run ${{ env.SUITE }} suite + run: python3 + -u ${{ env.SUITE }}/regression.py + --clickhouse-binary-path ${{ env.clickhouse_binary_path }} + --attr project="$GITHUB_REPOSITORY" project.id="$GITHUB_REPOSITORY_ID" package="${{ env.clickhouse_binary_path }}" version="${{ env.version }}" user.name="$GITHUB_ACTOR" repository="https://github.com/Altinity/clickhouse-regression" commit.hash="$(git rev-parse HEAD)" job.id="$GITHUB_RUN_ID" job.url="$GITHUB_SERVER_URL/$GITHUB_REPOSITORY/actions/runs/$GITHUB_RUN_ID" arch="$(uname -i)" + ${{ env.args }} + - name: Create and upload logs + if: always() + run: .github/create_and_upload_logs.sh 1 + - uses: actions/upload-artifact@v3 + if: always() + with: + name: ${{ env.SUITE }}-${{ inputs.arch }}-artifacts + path: ${{ env.artifact_paths}} + + Alter: + strategy: + fail-fast: false + matrix: + ONLY: [replace, attach, move] + needs: [runner_labels_setup] + runs-on: ${{ fromJson(needs.runner_labels_setup.outputs.runner_labels) }} + timeout-minutes: ${{ inputs.timeout_minutes }} + steps: + - name: Checkout regression repo + uses: actions/checkout@v3 + with: + repository: Altinity/clickhouse-regression + ref: ${{ inputs.commit }} + - name: Set envs + run: | + cat >> "$GITHUB_ENV" << 'EOF' + REPORTS_PATH=${{ runner.temp }}/reports_dir + SUITE=alter-${{ matrix.ONLY }}_partition + EOF + - name: Download json reports + uses: actions/download-artifact@v3 + with: + path: ${{ env.REPORTS_PATH }} + name: build_urls_package_${{ inputs.arch }} + - name: Rename report + run: | + mv ${{ env.REPORTS_PATH }}/build_urls_package_${{ inputs.arch }}.json ${{ env.REPORTS_PATH }}/build_report_package_${{ inputs.arch }}.json + - name: Setup + run: .github/setup.sh + - name: Get deb url + run: python3 .github/get-deb-url.py --reports-path ${{ env.REPORTS_PATH }} --github-env $GITHUB_ENV + - name: Run ${{ env.SUITE }} suite + run: python3 + -u alter/regression.py + --clickhouse-binary-path ${{ env.clickhouse_binary_path }} + --only "/alter/${{ matrix.ONLY }} partition/*" + --attr project="$GITHUB_REPOSITORY" project.id="$GITHUB_REPOSITORY_ID" package="${{ env.clickhouse_binary_path }}" version="${{ env.version }}" user.name="$GITHUB_ACTOR" repository="https://github.com/Altinity/clickhouse-regression" commit.hash="$(git rev-parse HEAD)" job.id="$GITHUB_RUN_ID" job.url="$GITHUB_SERVER_URL/$GITHUB_REPOSITORY/actions/runs/$GITHUB_RUN_ID" arch="$(uname -i)" + ${{ env.args }} + - name: Create and upload logs + if: always() + run: .github/create_and_upload_logs.sh 1 + - uses: actions/upload-artifact@v3 + if: always() + with: + name: ${{ env.SUITE }}-${{ inputs.arch }}-artifacts + path: ${{ env.artifact_paths}} + + Benchmark: + strategy: + fail-fast: false + matrix: + STORAGE: [minio, aws_s3, gcs] + needs: [runner_labels_setup] + runs-on: ${{ fromJson(needs.runner_labels_setup.outputs.runner_labels) }} + timeout-minutes: ${{ inputs.timeout_minutes }} + steps: + - name: Checkout regression repo + uses: actions/checkout@v3 + with: + repository: Altinity/clickhouse-regression + ref: ${{ inputs.commit }} + - name: Set envs + run: | + cat >> "$GITHUB_ENV" << 'EOF' + REPORTS_PATH=${{ runner.temp }}/reports_dir + SUITE=ontime_benchmark + STORAGE=/${{ matrix.STORAGE }} + EOF + - name: Download json reports + uses: actions/download-artifact@v3 + with: + path: ${{ env.REPORTS_PATH }} + name: build_urls_package_${{ inputs.arch }} + - name: Rename report + run: | + mv ${{ env.REPORTS_PATH }}/build_urls_package_${{ inputs.arch }}.json ${{ env.REPORTS_PATH }}/build_report_package_${{ inputs.arch }}.json + - name: Setup + run: .github/setup.sh + - name: Get deb url + run: python3 .github/get-deb-url.py --reports-path ${{ env.REPORTS_PATH }} --github-env $GITHUB_ENV + - name: Run ${{ env.SUITE }} suite + run: python3 + -u ${{ env.SUITE }}/benchmark.py + --clickhouse-binary-path ${{ env.clickhouse_binary_path }} + --storage ${{ matrix.STORAGE }} + --gcs-uri ${{ secrets.REGRESSION_GCS_URI }} + --gcs-key-id ${{ secrets.REGRESSION_GCS_KEY_ID }} + --gcs-key-secret ${{ secrets.REGRESSION_GCS_KEY_SECRET }} + --aws-s3-bucket ${{ secrets.REGRESSION_AWS_S3_BUCKET }} + --aws-s3-region ${{ secrets.REGRESSION_AWS_S3_REGION }} + --aws-s3-key-id ${{ secrets.REGRESSION_AWS_S3_KEY_ID }} + --aws-s3-access-key ${{ secrets.REGRESSION_AWS_S3_SECRET_ACCESS_KEY }} + --attr project="$GITHUB_REPOSITORY" project.id="$GITHUB_REPOSITORY_ID" package="${{ env.clickhouse_binary_path }}" version="${{ env.version }}" user.name="$GITHUB_ACTOR" repository="https://github.com/Altinity/clickhouse-regression" commit.hash="$(git rev-parse HEAD)" job.id="$GITHUB_RUN_ID" job.url="$GITHUB_SERVER_URL/$GITHUB_REPOSITORY/actions/runs/$GITHUB_RUN_ID" arch="$(uname -i)" + ${{ env.args }} + - name: Create and upload logs + if: always() + run: .github/create_and_upload_logs.sh 1 + - uses: actions/upload-artifact@v3 + if: always() + with: + name: benchmark-${{ matrix.STORAGE }}-${{ inputs.arch }}-artifacts + path: ${{ env.artifact_paths }} + + ClickHouseKeeperSSL: + needs: [runner_labels_setup] + runs-on: ${{ fromJson(needs.runner_labels_setup.outputs.runner_labels) }} + timeout-minutes: ${{ inputs.timeout_minutes }} + steps: + - name: Checkout regression repo + uses: actions/checkout@v3 + with: + repository: Altinity/clickhouse-regression + ref: ${{ inputs.commit }} + - name: Set envs + run: | + cat >> "$GITHUB_ENV" << 'EOF' + REPORTS_PATH=${{runner.temp}}/reports_dir + SUITE=clickhouse_keeper + STORAGE=/ssl + EOF + - name: Download json reports + uses: actions/download-artifact@v3 + with: + path: ${{ env.REPORTS_PATH }} + name: build_urls_package_${{ inputs.arch }} + - name: Rename report + run: | + mv ${{ env.REPORTS_PATH }}/build_urls_package_${{ inputs.arch }}.json ${{ env.REPORTS_PATH }}/build_report_package_${{ inputs.arch }}.json + - name: Setup + run: .github/setup.sh + - name: Get deb url + run: python3 .github/get-deb-url.py --reports-path ${{ env.REPORTS_PATH }} --github-env $GITHUB_ENV + - name: Run ${{ env.SUITE }} suite + run: python3 + -u ${{ env.SUITE }}/regression.py + --ssl + --clickhouse-binary-path ${{ env.clickhouse_binary_path }} + --attr project="$GITHUB_REPOSITORY" project.id="$GITHUB_REPOSITORY_ID" package="${{ env.clickhouse_binary_path }}" version="${{ env.version }}" user.name="$GITHUB_ACTOR" repository="https://github.com/Altinity/clickhouse-regression" commit.hash="$(git rev-parse HEAD)" job.id="$GITHUB_RUN_ID" job.url="$GITHUB_SERVER_URL/$GITHUB_REPOSITORY/actions/runs/$GITHUB_RUN_ID" arch="$(uname -i)" + ${{ env.args }} + - name: Create and upload logs + if: always() + run: .github/create_and_upload_logs.sh 1 + - uses: actions/upload-artifact@v3 + if: always() + with: + name: ${{ env.SUITE }}-${{ inputs.arch }}-ssl-artifacts + path: ${{ env.artifact_paths }} + + LDAP: + strategy: + fail-fast: false + matrix: + SUITE: [authentication, external_user_directory, role_mapping] + needs: [runner_labels_setup] + runs-on: ${{ fromJson(needs.runner_labels_setup.outputs.runner_labels) }} + timeout-minutes: ${{ inputs.timeout_minutes }} + steps: + - name: Checkout regression repo + uses: actions/checkout@v3 + with: + repository: Altinity/clickhouse-regression + ref: ${{ inputs.commit }} + - name: Set envs + run: | + cat >> "$GITHUB_ENV" << 'EOF' + REPORTS_PATH=${{ runner.temp }}/reports_dir + SUITE=ldap/${{ matrix.SUITE }} + EOF + - name: Download json reports + uses: actions/download-artifact@v3 + with: + path: ${{ env.REPORTS_PATH }} + name: build_urls_package_${{ inputs.arch }} + - name: Rename report + run: | + mv ${{ env.REPORTS_PATH }}/build_urls_package_${{ inputs.arch }}.json ${{ env.REPORTS_PATH }}/build_report_package_${{ inputs.arch }}.json + - name: Setup + run: .github/setup.sh + - name: Get deb url + run: python3 .github/get-deb-url.py --reports-path ${{ env.REPORTS_PATH }} --github-env $GITHUB_ENV + - name: Run ${{ env.SUITE }} suite + run: python3 + -u ${{ env.SUITE }}/regression.py + --clickhouse-binary-path ${{ env.clickhouse_binary_path }} + --attr project="$GITHUB_REPOSITORY" project.id="$GITHUB_REPOSITORY_ID" package="${{ env.clickhouse_binary_path }}" version="${{ env.version }}" user.name="$GITHUB_ACTOR" repository="https://github.com/Altinity/clickhouse-regression" commit.hash="$(git rev-parse HEAD)" job.id="$GITHUB_RUN_ID" job.url="$GITHUB_SERVER_URL/$GITHUB_REPOSITORY/actions/runs/$GITHUB_RUN_ID" arch="$(uname -i)" + ${{ env.args }} + - name: Create and upload logs + if: always() + run: .github/create_and_upload_logs.sh 1 + - uses: actions/upload-artifact@v3 + if: always() + with: + name: ldap-${{ matrix.SUITE }}-${{ inputs.arch }}-artifacts + path: ${{ env.artifact_paths }} + + Parquet: + needs: [runner_labels_setup] + runs-on: ${{ fromJson(needs.runner_labels_setup.outputs.runner_labels) }} + timeout-minutes: ${{ inputs.timeout_minutes }} + steps: + - name: Checkout regression repo + uses: actions/checkout@v3 + with: + repository: Altinity/clickhouse-regression + ref: ${{ inputs.commit }} + - name: Set envs + run: | + cat >> "$GITHUB_ENV" << 'EOF' + REPORTS_PATH=${{ runner.temp }}/reports_dir + SUITE=parquet + EOF + - name: Download json reports + uses: actions/download-artifact@v3 + with: + path: ${{ env.REPORTS_PATH }} + name: build_urls_package_${{ inputs.arch }} + - name: Rename report + run: | + mv ${{ env.REPORTS_PATH }}/build_urls_package_${{ inputs.arch }}.json ${{ env.REPORTS_PATH }}/build_report_package_${{ inputs.arch }}.json + - name: Setup + run: .github/setup.sh + - name: Get deb url + run: python3 .github/get-deb-url.py --reports-path ${{ env.REPORTS_PATH }} --github-env $GITHUB_ENV + - name: Run ${{ env.SUITE }} suite + run: python3 + -u ${{ env.SUITE }}/regression.py + --clickhouse-binary-path ${{ env.clickhouse_binary_path }} + --attr project="$GITHUB_REPOSITORY" project.id="$GITHUB_REPOSITORY_ID" package="${{ env.clickhouse_binary_path }}" version="${{ env.version }}" user.name="$GITHUB_ACTOR" repository="https://github.com/Altinity/clickhouse-regression" commit.hash="$(git rev-parse HEAD)" job.id="$GITHUB_RUN_ID" job.url="$GITHUB_SERVER_URL/$GITHUB_REPOSITORY/actions/runs/$GITHUB_RUN_ID" arch="$(uname -i)" + ${{ env.args }} + - name: Create and upload logs + if: always() + run: .github/create_and_upload_logs.sh 1 + - uses: actions/upload-artifact@v3 + if: always() + with: + name: ${{ env.SUITE }}-${{ inputs.arch }}-artifacts + path: ${{ env.artifact_paths }} + + ParquetS3: + strategy: + fail-fast: false + matrix: + STORAGE: [minio, aws_s3] + needs: [runner_labels_setup] + runs-on: ${{ fromJson(needs.runner_labels_setup.outputs.runner_labels) }} + timeout-minutes: ${{ inputs.timeout_minutes }} + steps: + - name: Checkout regression repo + uses: actions/checkout@v3 + with: + repository: Altinity/clickhouse-regression + ref: ${{ inputs.commit }} + - name: Set envs + run: | + cat >> "$GITHUB_ENV" << 'EOF' + REPORTS_PATH=${{ runner.temp }}/reports_dir + SUITE=parquet + STORAGE=${{ matrix.STORAGE}} + EOF + - name: Download json reports + uses: actions/download-artifact@v3 + with: + path: ${{ env.REPORTS_PATH }} + name: build_urls_package_${{ inputs.arch }} + - name: Rename report + run: | + mv ${{ env.REPORTS_PATH }}/build_urls_package_${{ inputs.arch }}.json ${{ env.REPORTS_PATH }}/build_report_package_${{ inputs.arch }}.json + - name: Setup + run: .github/setup.sh + - name: Get deb url + run: python3 .github/get-deb-url.py --reports-path ${{ env.REPORTS_PATH }} --github-env $GITHUB_ENV + - name: Run ${{ env.SUITE }} suite + run: python3 + -u ${{ env.SUITE }}/regression.py + --clickhouse-binary-path ${{ env.clickhouse_binary_path }} + --storage ${{ matrix.STORAGE }} + --aws-s3-bucket ${{ secrets.REGRESSION_AWS_S3_BUCKET }} + --aws-s3-region ${{ secrets.REGRESSION_AWS_S3_REGION }} + --aws-s3-key-id ${{ secrets.REGRESSION_AWS_S3_KEY_ID }} + --aws-s3-access-key ${{ secrets.REGRESSION_AWS_S3_SECRET_ACCESS_KEY }} + --attr project="$GITHUB_REPOSITORY" project.id="$GITHUB_REPOSITORY_ID" package="${{ env.clickhouse_binary_path }}" version="${{ env.version }}" user.name="$GITHUB_ACTOR" repository="https://github.com/Altinity/clickhouse-regression" commit.hash="$(git rev-parse HEAD)" job.id="$GITHUB_RUN_ID" job.url="$GITHUB_SERVER_URL/$GITHUB_REPOSITORY/actions/runs/$GITHUB_RUN_ID" arch="$(uname -i)" + ${{ env.args }} + - name: Create and upload logs + if: always() + run: .github/create_and_upload_logs.sh 1 + - uses: actions/upload-artifact@v3 + if: always() + with: + name: ${{ env.SUITE }}-${{ env.STORAGE }}-${{ inputs.arch }}-artifacts + path: ${{ env.artifact_paths }} + + S3: + strategy: + fail-fast: false + matrix: + STORAGE: [minio, aws_s3, gcs] + needs: [runner_labels_setup] + runs-on: ${{ fromJson(needs.runner_labels_setup.outputs.runner_labels) }} + timeout-minutes: ${{ inputs.timeout_minutes }} + steps: + - name: Checkout regression repo + uses: actions/checkout@v3 + with: + repository: Altinity/clickhouse-regression + ref: ${{ inputs.commit }} + - name: Set envs + run: | + cat >> "$GITHUB_ENV" << 'EOF' + REPORTS_PATH=${{ runner.temp }}/reports_dir + SUITE=s3 + STORAGE=/${{ matrix.STORAGE }} + EOF + - name: Download json reports + uses: actions/download-artifact@v3 + with: + path: ${{ env.REPORTS_PATH }} + name: build_urls_package_${{ inputs.arch }} + - name: Rename report + run: | + mv ${{ env.REPORTS_PATH }}/build_urls_package_${{ inputs.arch }}.json ${{ env.REPORTS_PATH }}/build_report_package_${{ inputs.arch }}.json + - name: Setup + run: .github/setup.sh + - name: Get deb url + run: python3 .github/get-deb-url.py --reports-path ${{ env.REPORTS_PATH }} --github-env $GITHUB_ENV + - name: Run ${{ env.SUITE }} suite + run: python3 + -u ${{ env.SUITE }}/regression.py + --clickhouse-binary-path ${{ env.clickhouse_binary_path }} + --storage ${{ matrix.STORAGE }} + --gcs-uri ${{ secrets.REGRESSION_GCS_URI }} + --gcs-key-id ${{ secrets.REGRESSION_GCS_KEY_ID }} + --gcs-key-secret ${{ secrets.REGRESSION_GCS_KEY_SECRET }} + --aws-s3-bucket ${{ secrets.REGRESSION_AWS_S3_BUCKET }} + --aws-s3-region ${{ secrets.REGRESSION_AWS_S3_REGION }} + --aws-s3-key-id ${{ secrets.REGRESSION_AWS_S3_KEY_ID }} + --aws-s3-access-key ${{ secrets.REGRESSION_AWS_S3_SECRET_ACCESS_KEY }} + --attr project="$GITHUB_REPOSITORY" project.id="$GITHUB_REPOSITORY_ID" package="${{ env.clickhouse_binary_path }}" version="${{ env.version }}" user.name="$GITHUB_ACTOR" repository="https://github.com/Altinity/clickhouse-regression" commit.hash="$(git rev-parse HEAD)" job.id="$GITHUB_RUN_ID" job.url="$GITHUB_SERVER_URL/$GITHUB_REPOSITORY/actions/runs/$GITHUB_RUN_ID" arch="$(uname -i)" + ${{ env.args }} + - name: Create and upload logs + if: always() + run: .github/create_and_upload_logs.sh 1 + - uses: actions/upload-artifact@v3 + if: always() + with: + name: ${{ env.SUITE }}-${{ matrix.STORAGE }}-${{ inputs.arch }}-artifacts + path: ${{ env.artifact_paths}} + + TieredStorage: + strategy: + fail-fast: false + matrix: + STORAGE: [minio, s3amazon, s3gcs] + needs: [runner_labels_setup] + runs-on: ${{ fromJson(needs.runner_labels_setup.outputs.runner_labels) }} + timeout-minutes: ${{ inputs.timeout_minutes }} + steps: + - name: Checkout regression repo + uses: actions/checkout@v3 + with: + repository: Altinity/clickhouse-regression + - name: Set envs + run: | + cat >> "$GITHUB_ENV" << 'EOF' + REPORTS_PATH=${{ runner.temp }}/reports_dir + SUITE=tiered_storage + STORAGE=/${{ matrix.STORAGE }} + EOF + - name: Download json reports + uses: actions/download-artifact@v3 + with: + path: ${{ env.REPORTS_PATH }} + name: build_urls_package_${{ inputs.arch }} + - name: Rename report + run: | + mv ${{ env.REPORTS_PATH }}/build_urls_package_${{ inputs.arch }}.json ${{ env.REPORTS_PATH }}/build_report_package_${{ inputs.arch }}.json + - name: Setup + run: .github/setup.sh + - name: Get deb url + run: python3 .github/get-deb-url.py --reports-path ${{ env.REPORTS_PATH }} --github-env $GITHUB_ENV + - name: Run ${{ env.SUITE }} suite + run: python3 + -u ${{ env.SUITE }}/regression.py + --clickhouse-binary-path ${{ env.clickhouse_binary_path }} + --aws-s3-access-key ${{ secrets.REGRESSION_AWS_S3_SECRET_ACCESS_KEY }} + --aws-s3-key-id ${{ secrets.REGRESSION_AWS_S3_KEY_ID }} + --aws-s3-uri https://s3.${{ secrets.REGRESSION_AWS_S3_REGION}}.amazonaws.com/${{ secrets.REGRESSION_AWS_S3_BUCKET }}/data/ + --gcs-key-id ${{ secrets.REGRESSION_GCS_KEY_ID }} + --gcs-key-secret ${{ secrets.REGRESSION_GCS_KEY_SECRET }} + --gcs-uri ${{ secrets.REGRESSION_GCS_URI }} + --with-${{ matrix.STORAGE }} + --attr project="$GITHUB_REPOSITORY" project.id="$GITHUB_REPOSITORY_ID" package="${{ env.clickhouse_binary_path }}" version="${{ env.version }}" user.name="$GITHUB_ACTOR" repository="https://github.com/Altinity/clickhouse-regression" commit.hash="$(git rev-parse HEAD)" job.id="$GITHUB_RUN_ID" job.url="$GITHUB_SERVER_URL/$GITHUB_REPOSITORY/actions/runs/$GITHUB_RUN_ID" arch="$(uname -i)" + ${{ env.args }} + - name: Create and upload logs + if: always() + run: .github/create_and_upload_logs.sh 1 + - uses: actions/upload-artifact@v3 + if: always() + with: + name: ${{ env.SUITE }}-${{ matrix.STORAGE }}-${{ inputs.arch }}-artifacts + path: ${{ env.artifact_paths}} diff --git a/.github/workflows/release_branches.yml b/.github/workflows/release_branches.yml index 4d45c8d8d4b4..64e5f504fd79 100644 --- a/.github/workflows/release_branches.yml +++ b/.github/workflows/release_branches.yml @@ -6,20 +6,30 @@ env: PYTHONUNBUFFERED: 1 on: # yamllint disable-line rule:truthy + pull_request: + types: + - synchronize + - reopened + - opened + branches: + # Anything/24.3 (e.g customizations/24.3.x) + - '**/24.3*' + release: + types: + - published + - prereleased push: branches: - # 22.1 and 22.10 - - '2[1-9].[1-9][0-9]' - - '2[1-9].[1-9]' + - 'releases/24.3**' jobs: RunConfig: - runs-on: [self-hosted, style-checker-aarch64] + runs-on: [self-hosted, altinity-on-demand, altinity-type-cax41, altinity-in-hel1, altinity-image-arm-app-docker-ce] outputs: data: ${{ steps.runconfig.outputs.CI_DATA }} steps: - name: Check out repository code - uses: ClickHouse/checkout@v1 + uses: Altinity/checkout@19599efdf36c4f3f30eb55d5bb388896faea69f with: clear-repository: true # to ensure correct digests fetch-depth: 0 # to get version @@ -66,7 +76,7 @@ jobs: uses: ./.github/workflows/reusable_test.yml with: test_name: Compatibility check (amd64) - runner_type: style-checker + runner_type: altinity-on-demand, altinity-type-cpx51, altinity-in-ash, altinity-image-x86-app-docker-ce data: ${{ needs.RunConfig.outputs.data }} CompatibilityCheckAarch64: needs: [RunConfig, BuilderDebAarch64] @@ -74,7 +84,7 @@ jobs: uses: ./.github/workflows/reusable_test.yml with: test_name: Compatibility check (aarch64) - runner_type: style-checker + runner_type: altinity-on-demand, altinity-type-cax41, altinity-in-hel1, altinity-image-arm-app-docker-ce data: ${{ needs.RunConfig.outputs.data }} ######################################################################################### #################################### ORDINARY BUILDS #################################### @@ -163,7 +173,7 @@ jobs: uses: ./.github/workflows/reusable_test.yml with: test_name: Docker server image - runner_type: style-checker + runner_type: altinity-on-demand, altinity-type-cpx41, altinity-in-ash, altinity-image-x86-app-docker-ce data: ${{ needs.RunConfig.outputs.data }} DockerKeeperImage: needs: [RunConfig, BuilderDebRelease, BuilderDebAarch64] @@ -171,7 +181,7 @@ jobs: uses: ./.github/workflows/reusable_test.yml with: test_name: Docker keeper image - runner_type: style-checker + runner_type: altinity-on-demand, altinity-type-cpx41, altinity-in-ash, altinity-image-x86-app-docker-ce data: ${{ needs.RunConfig.outputs.data }} ############################################################################################ ##################################### BUILD REPORTER ####################################### @@ -191,7 +201,7 @@ jobs: uses: ./.github/workflows/reusable_test.yml with: test_name: ClickHouse build check - runner_type: style-checker-aarch64 + runner_type: altinity-on-demand, altinity-type-cax11, altinity-in-hel1, altinity-image-arm-app-docker-ce data: ${{ needs.RunConfig.outputs.data }} BuilderSpecialReport: # run report check for failed builds to indicate the CI error @@ -203,7 +213,7 @@ jobs: uses: ./.github/workflows/reusable_test.yml with: test_name: ClickHouse special build check - runner_type: style-checker-aarch64 + runner_type: altinity-on-demand, altinity-type-cax11, altinity-in-hel1, altinity-image-arm-app-docker-ce data: ${{ needs.RunConfig.outputs.data }} MarkReleaseReady: if: ${{ !failure() && !cancelled() }} @@ -230,7 +240,7 @@ jobs: run: exit 1 - name: Check out repository code if: ${{ ! (contains(needs.*.result, 'skipped') || contains(needs.*.result, 'failure')) }} - uses: ClickHouse/checkout@v1 + uses: Altinity/checkout@19599efdf36c4f3f30eb55d5bb388896faea69f - name: Mark Commit Release Ready if: ${{ ! (contains(needs.*.result, 'skipped') || contains(needs.*.result, 'failure')) }} run: | @@ -245,7 +255,7 @@ jobs: uses: ./.github/workflows/reusable_test.yml with: test_name: Install packages (amd64) - runner_type: style-checker + runner_type: altinity-on-demand, altinity-type-cpx51, altinity-in-ash, altinity-image-x86-app-docker-ce data: ${{ needs.RunConfig.outputs.data }} run_command: | python3 install_check.py "$CHECK_NAME" @@ -255,7 +265,7 @@ jobs: uses: ./.github/workflows/reusable_test.yml with: test_name: Install packages (arm64) - runner_type: style-checker-aarch64 + runner_type: altinity-on-demand, altinity-type-cax41, altinity-in-hel1, altinity-image-arm-app-docker-ce data: ${{ needs.RunConfig.outputs.data }} run_command: | python3 install_check.py "$CHECK_NAME" @@ -268,7 +278,7 @@ jobs: uses: ./.github/workflows/reusable_test.yml with: test_name: Stateless tests (release) - runner_type: func-tester + runner_type: altinity-on-demand, altinity-type-cpx51, altinity-in-ash, altinity-image-x86-app-docker-ce data: ${{ needs.RunConfig.outputs.data }} FunctionalStatelessTestAarch64: needs: [RunConfig, BuilderDebAarch64] @@ -276,7 +286,7 @@ jobs: uses: ./.github/workflows/reusable_test.yml with: test_name: Stateless tests (aarch64) - runner_type: func-tester-aarch64 + runner_type: altinity-on-demand, altinity-type-cax41, altinity-in-hel1, altinity-image-arm-app-docker-ce data: ${{ needs.RunConfig.outputs.data }} FunctionalStatelessTestAsan: needs: [RunConfig, BuilderDebAsan] @@ -284,7 +294,7 @@ jobs: uses: ./.github/workflows/reusable_test.yml with: test_name: Stateless tests (asan) - runner_type: func-tester + runner_type: altinity-on-demand, altinity-type-cpx51, altinity-in-ash, altinity-image-x86-app-docker-ce data: ${{ needs.RunConfig.outputs.data }} FunctionalStatelessTestTsan: needs: [RunConfig, BuilderDebTsan] @@ -292,7 +302,7 @@ jobs: uses: ./.github/workflows/reusable_test.yml with: test_name: Stateless tests (tsan) - runner_type: func-tester + runner_type: altinity-on-demand, altinity-type-cpx51, altinity-in-ash, altinity-image-x86-app-docker-ce data: ${{ needs.RunConfig.outputs.data }} FunctionalStatelessTestMsan: needs: [RunConfig, BuilderDebMsan] @@ -300,7 +310,7 @@ jobs: uses: ./.github/workflows/reusable_test.yml with: test_name: Stateless tests (msan) - runner_type: func-tester + runner_type: altinity-on-demand, altinity-type-cpx51, altinity-in-ash, altinity-image-x86-app-docker-ce data: ${{ needs.RunConfig.outputs.data }} FunctionalStatelessTestUBsan: needs: [RunConfig, BuilderDebUBsan] @@ -308,7 +318,7 @@ jobs: uses: ./.github/workflows/reusable_test.yml with: test_name: Stateless tests (ubsan) - runner_type: func-tester + runner_type: altinity-on-demand, altinity-type-cpx51, altinity-in-ash, altinity-image-x86-app-docker-ce data: ${{ needs.RunConfig.outputs.data }} FunctionalStatelessTestDebug: needs: [RunConfig, BuilderDebDebug] @@ -316,7 +326,7 @@ jobs: uses: ./.github/workflows/reusable_test.yml with: test_name: Stateless tests (debug) - runner_type: func-tester + runner_type: altinity-on-demand, altinity-type-cpx51, altinity-in-ash, altinity-image-x86-app-docker-ce data: ${{ needs.RunConfig.outputs.data }} ############################################################################################## ############################ FUNCTIONAl STATEFUL TESTS ####################################### @@ -327,7 +337,7 @@ jobs: uses: ./.github/workflows/reusable_test.yml with: test_name: Stateful tests (release) - runner_type: func-tester + runner_type: altinity-on-demand, altinity-type-cpx51, altinity-in-ash, altinity-image-x86-app-docker-ce data: ${{ needs.RunConfig.outputs.data }} FunctionalStatefulTestAarch64: needs: [RunConfig, BuilderDebAarch64] @@ -335,7 +345,7 @@ jobs: uses: ./.github/workflows/reusable_test.yml with: test_name: Stateful tests (aarch64) - runner_type: func-tester-aarch64 + runner_type: altinity-on-demand, altinity-type-cax41, altinity-in-hel1, altinity-image-arm-app-docker-ce data: ${{ needs.RunConfig.outputs.data }} FunctionalStatefulTestAsan: needs: [RunConfig, BuilderDebAsan] @@ -343,7 +353,7 @@ jobs: uses: ./.github/workflows/reusable_test.yml with: test_name: Stateful tests (asan) - runner_type: func-tester + runner_type: altinity-on-demand, altinity-type-cpx51, altinity-in-ash, altinity-image-x86-app-docker-ce data: ${{ needs.RunConfig.outputs.data }} FunctionalStatefulTestTsan: needs: [RunConfig, BuilderDebTsan] @@ -351,7 +361,7 @@ jobs: uses: ./.github/workflows/reusable_test.yml with: test_name: Stateful tests (tsan) - runner_type: func-tester + runner_type: altinity-on-demand, altinity-type-cpx51, altinity-in-ash, altinity-image-x86-app-docker-ce data: ${{ needs.RunConfig.outputs.data }} FunctionalStatefulTestMsan: needs: [RunConfig, BuilderDebMsan] @@ -359,7 +369,7 @@ jobs: uses: ./.github/workflows/reusable_test.yml with: test_name: Stateful tests (msan) - runner_type: func-tester + runner_type: altinity-on-demand, altinity-type-cpx51, altinity-in-ash, altinity-image-x86-app-docker-ce data: ${{ needs.RunConfig.outputs.data }} FunctionalStatefulTestUBsan: needs: [RunConfig, BuilderDebUBsan] @@ -367,7 +377,7 @@ jobs: uses: ./.github/workflows/reusable_test.yml with: test_name: Stateful tests (ubsan) - runner_type: func-tester + runner_type: altinity-on-demand, altinity-type-cpx51, altinity-in-ash, altinity-image-x86-app-docker-ce data: ${{ needs.RunConfig.outputs.data }} FunctionalStatefulTestDebug: needs: [RunConfig, BuilderDebDebug] @@ -375,7 +385,7 @@ jobs: uses: ./.github/workflows/reusable_test.yml with: test_name: Stateful tests (debug) - runner_type: func-tester + runner_type: altinity-on-demand, altinity-type-cpx51, altinity-in-ash, altinity-image-x86-app-docker-ce data: ${{ needs.RunConfig.outputs.data }} ############################################################################################## ######################################### STRESS TESTS ####################################### @@ -386,7 +396,7 @@ jobs: uses: ./.github/workflows/reusable_test.yml with: test_name: Stress test (asan) - runner_type: stress-tester + runner_type: altinity-on-demand, altinity-type-cpx51, altinity-in-ash, altinity-image-x86-app-docker-ce data: ${{ needs.RunConfig.outputs.data }} StressTestTsan: needs: [RunConfig, BuilderDebTsan] @@ -394,7 +404,7 @@ jobs: uses: ./.github/workflows/reusable_test.yml with: test_name: Stress test (tsan) - runner_type: stress-tester + runner_type: altinity-on-demand, altinity-type-cpx51, altinity-in-ash, altinity-image-x86-app-docker-ce data: ${{ needs.RunConfig.outputs.data }} StressTestMsan: needs: [RunConfig, BuilderDebMsan] @@ -402,7 +412,7 @@ jobs: uses: ./.github/workflows/reusable_test.yml with: test_name: Stress test (msan) - runner_type: stress-tester + runner_type: altinity-on-demand, altinity-type-cpx51, altinity-in-ash, altinity-image-x86-app-docker-ce data: ${{ needs.RunConfig.outputs.data }} StressTestUBsan: needs: [RunConfig, BuilderDebUBsan] @@ -410,7 +420,7 @@ jobs: uses: ./.github/workflows/reusable_test.yml with: test_name: Stress test (ubsan) - runner_type: stress-tester + runner_type: altinity-on-demand, altinity-type-cpx51, altinity-in-ash, altinity-image-x86-app-docker-ce data: ${{ needs.RunConfig.outputs.data }} StressTestDebug: needs: [RunConfig, BuilderDebDebug] @@ -418,7 +428,7 @@ jobs: uses: ./.github/workflows/reusable_test.yml with: test_name: Stress test (debug) - runner_type: stress-tester + runner_type: altinity-on-demand, altinity-type-cpx51, altinity-in-ash, altinity-image-x86-app-docker-ce data: ${{ needs.RunConfig.outputs.data }} ############################################################################################# ############################# INTEGRATION TESTS ############################################# @@ -429,7 +439,7 @@ jobs: uses: ./.github/workflows/reusable_test.yml with: test_name: Integration tests (asan) - runner_type: stress-tester + runner_type: altinity-on-demand, altinity-type-cpx51, altinity-in-ash, altinity-image-x86-app-docker-ce data: ${{ needs.RunConfig.outputs.data }} IntegrationTestsAnalyzerAsan: needs: [RunConfig, BuilderDebAsan] @@ -437,7 +447,7 @@ jobs: uses: ./.github/workflows/reusable_test.yml with: test_name: Integration tests (asan, old analyzer) - runner_type: stress-tester + runner_type: altinity-on-demand, altinity-type-cpx51, altinity-in-ash, altinity-image-x86-app-docker-ce data: ${{ needs.RunConfig.outputs.data }} IntegrationTestsTsan: needs: [RunConfig, BuilderDebTsan] @@ -445,7 +455,7 @@ jobs: uses: ./.github/workflows/reusable_test.yml with: test_name: Integration tests (tsan) - runner_type: stress-tester + runner_type: altinity-on-demand, altinity-type-cpx51, altinity-in-ash, altinity-image-x86-app-docker-ce data: ${{ needs.RunConfig.outputs.data }} IntegrationTestsRelease: needs: [RunConfig, BuilderDebRelease] @@ -453,8 +463,68 @@ jobs: uses: ./.github/workflows/reusable_test.yml with: test_name: Integration tests (release) - runner_type: stress-tester + runner_type: altinity-on-demand, altinity-type-cpx51, altinity-in-ash, altinity-image-x86-app-docker-ce data: ${{ needs.RunConfig.outputs.data }} +############################################################################################# +##################################### REGRESSION TESTS ###################################### +############################################################################################# + RegressionTestsRelease: + needs: [BuilderReport] + uses: ./.github/workflows/regression.yml + secrets: inherit + with: + runner_type: altinity-on-demand, altinity-type-cpx51, altinity-image-x86-app-docker-ce, altinity-setup-regression + commit: 17a81c07fc1f41fbee651e0ef0ca4b44e537e5b1 + arch: release + build_sha: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }} + RegressionTestsAarch64: + needs: [BuilderReport] + uses: ./.github/workflows/regression.yml + secrets: inherit + with: + runner_type: altinity-on-demand, altinity-type-cax41, altinity-image-arm-app-docker-ce, altinity-setup-regression + commit: 17a81c07fc1f41fbee651e0ef0ca4b44e537e5b1 + arch: aarch64 + build_sha: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }} + SignRelease: + needs: [BuilderDebRelease] + runs-on: [self-hosted, altinity-on-demand, altinity-type-cpx41, altinity-in-ash, altinity-image-x86-app-docker-ce] + timeout-minutes: 180 + steps: + - name: Set envs + run: | + cat >> "$GITHUB_ENV" << 'EOF' + TEMP_PATH=${{runner.temp}}/signed + REPORTS_PATH=${{runner.temp}}/reports_dir + EOF + - name: Clear repository + run: | + sudo rm -fr "$GITHUB_WORKSPACE" && mkdir "$GITHUB_WORKSPACE" + - name: Check out repository code + uses: actions/checkout@v2 + - name: Download json reports + uses: actions/download-artifact@v2 + with: + path: ${{ env.REPORTS_PATH }} + - name: Sign release + env: + GPG_BINARY_SIGNING_KEY: ${{ secrets.GPG_BINARY_SIGNING_KEY }} + GPG_BINARY_SIGNING_PASSPHRASE: ${{ secrets.GPG_BINARY_SIGNING_PASSPHRASE }} + REPORTS_PATH: ${{ env.REPORTS_PATH }} + run: | + cd "$GITHUB_WORKSPACE/tests/ci" + python3 sign_release.py + - name: Upload signed hashes + uses: actions/upload-artifact@v2 + with: + name: signed-hashes + path: ${{ env.TEMP_PATH }}/*.gpg + - name: Cleanup + if: always() + run: | + docker ps --quiet | xargs --no-run-if-empty docker kill ||: + docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: + sudo rm -fr "$TEMP_PATH" FinishCheck: if: ${{ !failure() && !cancelled() }} needs: @@ -487,10 +557,13 @@ jobs: - IntegrationTestsRelease - CompatibilityCheckX86 - CompatibilityCheckAarch64 - runs-on: [self-hosted, style-checker] + - RegressionTestsRelease + - RegressionTestsAarch64 + - SignRelease + runs-on: [self-hosted, altinity-on-demand, altinity-type-cax11, altinity-image-arm-app-docker-ce, altinity-setup-regression] steps: - name: Check out repository code - uses: ClickHouse/checkout@v1 + uses: Altinity/checkout@19599efdf36c4f3f30eb55d5bb388896faea69f with: clear-repository: true - name: Finish label diff --git a/.github/workflows/reusable_build.yml b/.github/workflows/reusable_build.yml index 6c15a0a383aa..d417ef795103 100644 --- a/.github/workflows/reusable_build.yml +++ b/.github/workflows/reusable_build.yml @@ -4,6 +4,7 @@ env: # Force the stdout and stderr streams to be unbuffered PYTHONUNBUFFERED: 1 + CLICKHOUSE_STABLE_VERSION_SUFFIX: altinitystable name: Build ClickHouse 'on': @@ -35,35 +36,12 @@ name: Build ClickHouse type: string jobs: - runner_labels_setup: - name: Compute proper runner labels for the rest of the jobs - runs-on: ubuntu-latest - outputs: - runner_labels: ${{ steps.setVariables.outputs.runner_labels }} - steps: - - id: setVariables - name: Prepare runner_labels variables for the later steps - run: | - - # Prepend self-hosted - input="self-hosted, ${input}" - - # Remove all whitespace - input="$(echo ${input} | tr -d [:space:])" - # Make something like a JSON array from comma-separated list - input="[ '${input//\,/\'\, \'}' ]" - - echo "runner_labels=$input" >> ${GITHUB_OUTPUT} - env: - input: ${{ inputs.runner_type }} - Build: name: Build-${{inputs.build_name}} if: ${{ contains(fromJson(inputs.data).jobs_data.jobs_to_do, inputs.build_name) || inputs.force }} env: GITHUB_JOB_OVERRIDDEN: Build-${{inputs.build_name}} - needs: [runner_labels_setup] - runs-on: ${{ fromJson(needs.runner_labels_setup.outputs.runner_labels) }} + runs-on: [self-hosted, altinity-setup-builder, altinity-type-ccx53, altinity-on-demand, altinity-in-ash, altinity-image-x86-app-docker-ce] steps: - name: Check out repository code uses: Altinity/checkout@19599efdf36c4f3f30eb55d5bb388896faea69f diff --git a/docker/packager/binary-builder/Dockerfile b/docker/packager/binary-builder/Dockerfile index 73ec4275f12b..44897d572195 100644 --- a/docker/packager/binary-builder/Dockerfile +++ b/docker/packager/binary-builder/Dockerfile @@ -1,11 +1,12 @@ -# docker build -t clickhouse/binary-builder . +# docker build -t altinityinfra/binary-builder . ARG FROM_TAG=latest -FROM clickhouse/fasttest:$FROM_TAG +FROM altinityinfra/fasttest:$FROM_TAG ENV CC=clang-${LLVM_VERSION} ENV CXX=clang++-${LLVM_VERSION} # If the cctools is updated, then first build it in the CI, then update here in a different commit -COPY --from=clickhouse/cctools:d9e3596e706b /cctools /cctools +# TODO: (mtkachenko) change tag here. Decide what tag to use. +COPY --from=altinityinfra/cctools:$FROM_TAG /cctools /cctools # Rust toolchain and libraries ENV RUSTUP_HOME=/rust/rustup diff --git a/docker/packager/cctools/Dockerfile b/docker/packager/cctools/Dockerfile index d986c6a3c86a..2f6dbba45df4 100644 --- a/docker/packager/cctools/Dockerfile +++ b/docker/packager/cctools/Dockerfile @@ -2,7 +2,7 @@ # It's based on the assumption that we don't care of the cctools version so much # It event does not depend on the clickhouse/fasttest in the `docker/images.json` ARG FROM_TAG=latest -FROM clickhouse/fasttest:$FROM_TAG as builder +FROM altinityinfra/fasttest:$FROM_TAG as builder ENV CC=clang-${LLVM_VERSION} ENV CXX=clang++-${LLVM_VERSION} diff --git a/docker/packager/packager b/docker/packager/packager index 23fc26bc1a41..aeb413f567d4 100755 --- a/docker/packager/packager +++ b/docker/packager/packager @@ -10,7 +10,7 @@ from typing import List, Optional SCRIPT_PATH = Path(__file__).absolute() IMAGE_TYPE = "binary-builder" -IMAGE_NAME = f"clickhouse/{IMAGE_TYPE}" +IMAGE_NAME = f"altinityinfra/{IMAGE_TYPE}" class BuildException(Exception): @@ -130,9 +130,11 @@ def parse_env_variables( sanitizer: str, package_type: str, cache: str, + s3_access_key_id: str, s3_bucket: str, s3_directory: str, s3_rw_access: bool, + s3_secret_access_key: str, clang_tidy: bool, version: str, official: bool, @@ -317,6 +319,10 @@ def parse_env_variables( result.append(f"SCCACHE_S3_KEY_PREFIX={sccache_dir}") if not s3_rw_access: result.append("SCCACHE_S3_NO_CREDENTIALS=true") + if s3_access_key_id: + result.append(f"AWS_ACCESS_KEY_ID={s3_access_key_id}") + if s3_secret_access_key: + result.append(f"AWS_SECRET_ACCESS_KEY={s3_secret_access_key}") if clang_tidy: # `CTCACHE_DIR` has the same purpose as the `CCACHE_DIR` above. @@ -442,6 +448,14 @@ def parse_args() -> argparse.Namespace: type=dir_name, help="a directory with ccache", ) + parser.add_argument( + "--s3-access-key-id", + help="an S3 access key id used for sscache bucket", + ) + parser.add_argument( + "--s3-secret-access-key", + help="an S3 secret access key used for sscache bucket", + ) parser.add_argument( "--s3-bucket", help="an S3 bucket used for sscache and clang-tidy-cache", @@ -525,9 +539,11 @@ def main() -> None: args.sanitizer, args.package_type, args.cache, + args.s3_access_key_id, args.s3_bucket, args.s3_directory, args.s3_rw_access, + args.s3_secret_access_key, args.clang_tidy, args.version, args.official, diff --git a/docker/test/base/Dockerfile b/docker/test/base/Dockerfile index 2317f84e0cbd..569db18788fb 100644 --- a/docker/test/base/Dockerfile +++ b/docker/test/base/Dockerfile @@ -1,7 +1,7 @@ # rebuild in #33610 # docker build -t clickhouse/test-base . ARG FROM_TAG=latest -FROM clickhouse/test-util:$FROM_TAG +FROM altinityinfra/test-util:$FROM_TAG RUN apt-get update \ && apt-get install \ diff --git a/docker/test/clickbench/Dockerfile b/docker/test/clickbench/Dockerfile index 0b6b1736e031..214191a8b488 100644 --- a/docker/test/clickbench/Dockerfile +++ b/docker/test/clickbench/Dockerfile @@ -1,5 +1,5 @@ ARG FROM_TAG=latest -FROM clickhouse/test-base:$FROM_TAG +FROM altinityinfra/test-base:$FROM_TAG ENV TZ=Europe/Amsterdam RUN ln -snf /usr/share/zoneinfo/$TZ /etc/localtime && echo $TZ > /etc/timezone diff --git a/docker/test/fasttest/Dockerfile b/docker/test/fasttest/Dockerfile index 912ff191e57e..62d7dec60531 100644 --- a/docker/test/fasttest/Dockerfile +++ b/docker/test/fasttest/Dockerfile @@ -1,7 +1,7 @@ # rebuild in #33610 -# docker build -t clickhouse/fasttest . +# docker build -t altinityinfra/fasttest . ARG FROM_TAG=latest -FROM clickhouse/test-util:$FROM_TAG +FROM altinityinfra/test-util:$FROM_TAG RUN apt-get update \ && apt-get install \ diff --git a/docker/test/fuzzer/Dockerfile b/docker/test/fuzzer/Dockerfile index d3f78ac1d95b..898f3c053ffa 100644 --- a/docker/test/fuzzer/Dockerfile +++ b/docker/test/fuzzer/Dockerfile @@ -1,7 +1,7 @@ # rebuild in #33610 -# docker build -t clickhouse/fuzzer . +# docker build -t altinityinfra/fuzzer . ARG FROM_TAG=latest -FROM clickhouse/test-base:$FROM_TAG +FROM altinityinfra/test-base:$FROM_TAG # ARG for quick switch to a given ubuntu mirror ARG apt_archive="http://archive.ubuntu.com" diff --git a/docker/test/integration/base/Dockerfile b/docker/test/integration/base/Dockerfile index 270b40e23a6d..a8c7c2e8257a 100644 --- a/docker/test/integration/base/Dockerfile +++ b/docker/test/integration/base/Dockerfile @@ -1,7 +1,7 @@ # rebuild in #33610 -# docker build -t clickhouse/integration-test . +# docker build -t altinityinfra/integration-test . ARG FROM_TAG=latest -FROM clickhouse/test-base:$FROM_TAG +FROM altinityinfra/test-base:$FROM_TAG SHELL ["/bin/bash", "-c"] diff --git a/docker/test/keeper-jepsen/Dockerfile b/docker/test/keeper-jepsen/Dockerfile index 3c5d0a6ecb42..d3080a526711 100644 --- a/docker/test/keeper-jepsen/Dockerfile +++ b/docker/test/keeper-jepsen/Dockerfile @@ -1,7 +1,7 @@ # rebuild in #33610 -# docker build -t clickhouse/keeper-jepsen-test . +# docker build -t altinityinfra/keeper-jepsen-test . ARG FROM_TAG=latest -FROM clickhouse/test-base:$FROM_TAG +FROM altinityinfra/test-base:$FROM_TAG ENV DEBIAN_FRONTEND=noninteractive ENV CLOJURE_VERSION=1.10.3.814 diff --git a/docker/test/libfuzzer/Dockerfile b/docker/test/libfuzzer/Dockerfile index c9802a0e44e7..3581d8605653 100644 --- a/docker/test/libfuzzer/Dockerfile +++ b/docker/test/libfuzzer/Dockerfile @@ -1,5 +1,5 @@ ARG FROM_TAG=latest -FROM clickhouse/test-base:$FROM_TAG +FROM altinityinfra/test-base:$FROM_TAG # ARG for quick switch to a given ubuntu mirror ARG apt_archive="http://archive.ubuntu.com" diff --git a/docker/test/performance-comparison/Dockerfile b/docker/test/performance-comparison/Dockerfile index 1835900b316c..a27e16587ff0 100644 --- a/docker/test/performance-comparison/Dockerfile +++ b/docker/test/performance-comparison/Dockerfile @@ -1,7 +1,7 @@ -# docker build -t clickhouse/performance-comparison . +# docker build -t altinityinfra/performance-comparison . ARG FROM_TAG=latest -FROM clickhouse/test-base:$FROM_TAG +FROM altinityinfra/test-base:$FROM_TAG RUN apt-get update \ && DEBIAN_FRONTEND=noninteractive apt-get install --yes --no-install-recommends \ diff --git a/docker/test/server-jepsen/Dockerfile b/docker/test/server-jepsen/Dockerfile index fd70fc457020..5207f31b953f 100644 --- a/docker/test/server-jepsen/Dockerfile +++ b/docker/test/server-jepsen/Dockerfile @@ -1,7 +1,7 @@ # rebuild in #33610 -# docker build -t clickhouse/server-jepsen-test . +# docker build -t altinityinfra/server-jepsen-test . ARG FROM_TAG=latest -FROM clickhouse/test-base:$FROM_TAG +FROM altinityinfra/test-base:$FROM_TAG ENV DEBIAN_FRONTEND=noninteractive ENV CLOJURE_VERSION=1.10.3.814 diff --git a/docker/test/sqllogic/Dockerfile b/docker/test/sqllogic/Dockerfile index 1ea1e52e6fab..993bdc191e8f 100644 --- a/docker/test/sqllogic/Dockerfile +++ b/docker/test/sqllogic/Dockerfile @@ -1,6 +1,6 @@ -# docker build -t clickhouse/sqllogic-test . +# docker build -t altinityinfra/sqllogic-test . ARG FROM_TAG=latest -FROM clickhouse/test-base:$FROM_TAG +FROM altinityinfra/test-base:$FROM_TAG RUN apt-get update --yes \ && env DEBIAN_FRONTEND=noninteractive \ diff --git a/docker/test/sqltest/Dockerfile b/docker/test/sqltest/Dockerfile index 7f59f65761fd..773172b8d5e5 100644 --- a/docker/test/sqltest/Dockerfile +++ b/docker/test/sqltest/Dockerfile @@ -1,6 +1,6 @@ -# docker build -t clickhouse/sqltest . +# docker build -t altinityinfra/sqltest . ARG FROM_TAG=latest -FROM clickhouse/test-base:$FROM_TAG +FROM altinityinfra/test-base:$FROM_TAG RUN apt-get update --yes \ && env DEBIAN_FRONTEND=noninteractive \ diff --git a/docker/test/stateful/Dockerfile b/docker/test/stateful/Dockerfile index 355e70f180e9..c40fbf46fff1 100644 --- a/docker/test/stateful/Dockerfile +++ b/docker/test/stateful/Dockerfile @@ -1,7 +1,7 @@ # rebuild in #47031 -# docker build -t clickhouse/stateful-test . +# docker build -t altinityinfra/stateful-test . ARG FROM_TAG=latest -FROM clickhouse/stateless-test:$FROM_TAG +FROM altinityinfra/stateless-test:$FROM_TAG RUN apt-get update -y \ && env DEBIAN_FRONTEND=noninteractive \ diff --git a/docker/test/stateless/Dockerfile b/docker/test/stateless/Dockerfile index cd8864c62998..aabf4d02faa5 100644 --- a/docker/test/stateless/Dockerfile +++ b/docker/test/stateless/Dockerfile @@ -1,7 +1,7 @@ # rebuild in #33610 -# docker build -t clickhouse/stateless-test . +# docker build -t altinityinfra/stateless-test . ARG FROM_TAG=latest -FROM clickhouse/test-base:$FROM_TAG +FROM altinityinfra/test-base:$FROM_TAG ARG odbc_driver_url="https://github.com/ClickHouse/clickhouse-odbc/releases/download/v1.1.6.20200320/clickhouse-odbc-1.1.6-Linux.tar.gz" diff --git a/docker/test/stress/Dockerfile b/docker/test/stress/Dockerfile index 0f81a1cd07fb..507bf90bc2dd 100644 --- a/docker/test/stress/Dockerfile +++ b/docker/test/stress/Dockerfile @@ -1,7 +1,7 @@ # rebuild in #33610 -# docker build -t clickhouse/stress-test . +# docker build -t altinityinfra/stress-test . ARG FROM_TAG=latest -FROM clickhouse/stateful-test:$FROM_TAG +FROM altinityinfra/stateful-test:$FROM_TAG RUN apt-get update -y \ && env DEBIAN_FRONTEND=noninteractive \ diff --git a/docker/test/unit/Dockerfile b/docker/test/unit/Dockerfile index cf5ba1eec7fa..804441e1d4d7 100644 --- a/docker/test/unit/Dockerfile +++ b/docker/test/unit/Dockerfile @@ -1,7 +1,7 @@ # rebuild in #33610 # docker build -t clickhouse/unit-test . ARG FROM_TAG=latest -FROM clickhouse/stateless-test:$FROM_TAG +FROM altinityinfra/stateless-test:$FROM_TAG RUN apt-get install gdb diff --git a/docker/test/upgrade/Dockerfile b/docker/test/upgrade/Dockerfile index 78d912fd0312..22e0486e890b 100644 --- a/docker/test/upgrade/Dockerfile +++ b/docker/test/upgrade/Dockerfile @@ -1,7 +1,7 @@ # rebuild in #33610 # docker build -t clickhouse/upgrade-check . ARG FROM_TAG=latest -FROM clickhouse/stateful-test:$FROM_TAG +FROM altinityinfra/stateful-test:$FROM_TAG RUN apt-get update -y \ && env DEBIAN_FRONTEND=noninteractive \ diff --git a/tests/ci/build_check.py b/tests/ci/build_check.py index f2a2ffc667bf..fa738a9bd942 100644 --- a/tests/ci/build_check.py +++ b/tests/ci/build_check.py @@ -13,7 +13,9 @@ from env_helper import ( REPO_COPY, + S3_ACCESS_KEY_ID, S3_BUILDS_BUCKET, + S3_SECRET_ACCESS_KEY, TEMP_PATH, ) from git_helper import Git @@ -72,6 +74,8 @@ def get_packager_cmd( cmd += " --cache=sccache" cmd += " --s3-rw-access" cmd += f" --s3-bucket={S3_BUILDS_BUCKET}" + cmd += f" --s3-access-key-id={S3_ACCESS_KEY_ID}" + cmd += f" --s3-secret-access-key={S3_SECRET_ACCESS_KEY}" cmd += f" --cargo-cache-dir={cargo_cache_dir}" if build_config.additional_pkgs: diff --git a/tests/ci/env_helper.py b/tests/ci/env_helper.py index 155a1acaca56..a8a00b02882b 100644 --- a/tests/ci/env_helper.py +++ b/tests/ci/env_helper.py @@ -19,7 +19,7 @@ CLOUDFLARE_TOKEN = os.getenv("CLOUDFLARE_TOKEN") GITHUB_EVENT_PATH = os.getenv("GITHUB_EVENT_PATH", "") GITHUB_JOB = os.getenv("GITHUB_JOB_OVERRIDDEN", "") or os.getenv("GITHUB_JOB", "local") -GITHUB_REPOSITORY = os.getenv("GITHUB_REPOSITORY", "ClickHouse/ClickHouse") +GITHUB_REPOSITORY = os.getenv("GITHUB_REPOSITORY", "Altinity/ClickHouse") GITHUB_RUN_ID = os.getenv("GITHUB_RUN_ID", "0") GITHUB_SERVER_URL = os.getenv("GITHUB_SERVER_URL", "https://github.com") GITHUB_WORKSPACE = os.getenv("GITHUB_WORKSPACE", git_root) @@ -27,8 +27,10 @@ IMAGES_PATH = os.getenv("IMAGES_PATH", TEMP_PATH) REPO_COPY = os.getenv("REPO_COPY", GITHUB_WORKSPACE) RUNNER_TEMP = os.getenv("RUNNER_TEMP", p.abspath(p.join(module_dir, "./tmp"))) -S3_BUILDS_BUCKET = os.getenv("S3_BUILDS_BUCKET", "clickhouse-builds") -S3_TEST_REPORTS_BUCKET = os.getenv("S3_TEST_REPORTS_BUCKET", "clickhouse-test-reports") +S3_ACCESS_KEY_ID = os.getenv("AWS_ACCESS_KEY_ID") +S3_BUILDS_BUCKET = os.getenv("S3_BUILDS_BUCKET", "altinity-build-artifacts") +S3_SECRET_ACCESS_KEY = os.getenv("AWS_SECRET_ACCESS_KEY") +S3_TEST_REPORTS_BUCKET = os.getenv("S3_TEST_REPORTS_BUCKET", "altinity-build-artifacts") S3_URL = os.getenv("S3_URL", "https://s3.amazonaws.com") S3_DOWNLOAD = os.getenv("S3_DOWNLOAD", S3_URL) S3_ARTIFACT_DOWNLOAD_TEMPLATE = ( From 93d53faff27c2ef94dabc2933a896a9d1c625618 Mon Sep 17 00:00:00 2001 From: MyroTk Date: Tue, 14 May 2024 03:07:16 -0700 Subject: [PATCH 03/79] fix typo in commit sha --- .github/workflows/release_branches.yml | 6 +++--- .github/workflows/reusable_build.yml | 2 +- .github/workflows/reusable_docker.yml | 6 +++--- .github/workflows/reusable_test.yml | 2 +- 4 files changed, 8 insertions(+), 8 deletions(-) diff --git a/.github/workflows/release_branches.yml b/.github/workflows/release_branches.yml index 64e5f504fd79..0b2353980376 100644 --- a/.github/workflows/release_branches.yml +++ b/.github/workflows/release_branches.yml @@ -29,7 +29,7 @@ jobs: data: ${{ steps.runconfig.outputs.CI_DATA }} steps: - name: Check out repository code - uses: Altinity/checkout@19599efdf36c4f3f30eb55d5bb388896faea69f + uses: Altinity/checkout@19599efdf36c4f3f30eb55d5bb388896faea69f6 with: clear-repository: true # to ensure correct digests fetch-depth: 0 # to get version @@ -240,7 +240,7 @@ jobs: run: exit 1 - name: Check out repository code if: ${{ ! (contains(needs.*.result, 'skipped') || contains(needs.*.result, 'failure')) }} - uses: Altinity/checkout@19599efdf36c4f3f30eb55d5bb388896faea69f + uses: Altinity/checkout@19599efdf36c4f3f30eb55d5bb388896faea69f6 - name: Mark Commit Release Ready if: ${{ ! (contains(needs.*.result, 'skipped') || contains(needs.*.result, 'failure')) }} run: | @@ -563,7 +563,7 @@ jobs: runs-on: [self-hosted, altinity-on-demand, altinity-type-cax11, altinity-image-arm-app-docker-ce, altinity-setup-regression] steps: - name: Check out repository code - uses: Altinity/checkout@19599efdf36c4f3f30eb55d5bb388896faea69f + uses: Altinity/checkout@19599efdf36c4f3f30eb55d5bb388896faea69f6 with: clear-repository: true - name: Finish label diff --git a/.github/workflows/reusable_build.yml b/.github/workflows/reusable_build.yml index d417ef795103..45177ca2c132 100644 --- a/.github/workflows/reusable_build.yml +++ b/.github/workflows/reusable_build.yml @@ -44,7 +44,7 @@ jobs: runs-on: [self-hosted, altinity-setup-builder, altinity-type-ccx53, altinity-on-demand, altinity-in-ash, altinity-image-x86-app-docker-ce] steps: - name: Check out repository code - uses: Altinity/checkout@19599efdf36c4f3f30eb55d5bb388896faea69f + uses: Altinity/checkout@19599efdf36c4f3f30eb55d5bb388896faea69f6 with: clear-repository: true ref: ${{ fromJson(inputs.data).git_ref }} diff --git a/.github/workflows/reusable_docker.yml b/.github/workflows/reusable_docker.yml index 8c9a3bb47e0b..e8d89691b75f 100644 --- a/.github/workflows/reusable_docker.yml +++ b/.github/workflows/reusable_docker.yml @@ -18,7 +18,7 @@ jobs: !failure() && !cancelled() && toJson(fromJson(inputs.data).docker_data.missing_aarch64) != '[]' steps: - name: Check out repository code - uses: Altinity/checkout@19599efdf36c4f3f30eb55d5bb388896faea69f + uses: Altinity/checkout@19599efdf36c4f3f30eb55d5bb388896faea69f6 with: ref: ${{ fromJson(inputs.data).git_ref }} - name: Build images @@ -33,7 +33,7 @@ jobs: !failure() && !cancelled() && toJson(fromJson(inputs.data).docker_data.missing_amd64) != '[]' steps: - name: Check out repository code - uses: Altinity/checkout@19599efdf36c4f3f30eb55d5bb388896faea69f + uses: Altinity/checkout@19599efdf36c4f3f30eb55d5bb388896faea69f6 with: ref: ${{ fromJson(inputs.data).git_ref }} - name: Build images @@ -49,7 +49,7 @@ jobs: !failure() && !cancelled() && (toJson(fromJson(inputs.data).docker_data.missing_multi) != '[]' || inputs.set_latest) steps: - name: Check out repository code - uses: Altinity/checkout@19599efdf36c4f3f30eb55d5bb388896faea69f + uses: Altinity/checkout@19599efdf36c4f3f30eb55d5bb388896faea69f6 with: ref: ${{ fromJson(inputs.data).git_ref }} - name: Build images diff --git a/.github/workflows/reusable_test.yml b/.github/workflows/reusable_test.yml index 9028205598e2..2f63ab69509d 100644 --- a/.github/workflows/reusable_test.yml +++ b/.github/workflows/reusable_test.yml @@ -86,7 +86,7 @@ jobs: batch: ${{ fromJson(inputs.data).jobs_data.jobs_params[inputs.test_name].batches }} steps: - name: Check out repository code - uses: Altinity/checkout@19599efdf36c4f3f30eb55d5bb388896faea69f + uses: Altinity/checkout@19599efdf36c4f3f30eb55d5bb388896faea69f6 with: clear-repository: true ref: ${{ fromJson(inputs.data).git_ref }} From a4671e51063a52f45149f93d6e2b2e56303b0388 Mon Sep 17 00:00:00 2001 From: MyroTk Date: Tue, 14 May 2024 03:13:32 -0700 Subject: [PATCH 04/79] skip label check --- .github/workflows/release_branches.yml | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/.github/workflows/release_branches.yml b/.github/workflows/release_branches.yml index 0b2353980376..7437e9ae56d8 100644 --- a/.github/workflows/release_branches.yml +++ b/.github/workflows/release_branches.yml @@ -34,10 +34,10 @@ jobs: clear-repository: true # to ensure correct digests fetch-depth: 0 # to get version filter: tree:0 - - name: Labels check - run: | - cd "$GITHUB_WORKSPACE/tests/ci" - python3 run_check.py + # - name: Labels check + # run: | + # cd "$GITHUB_WORKSPACE/tests/ci" + # python3 run_check.py - name: Python unit tests run: | cd "$GITHUB_WORKSPACE/tests/ci" From e9a865253529191aee3fc07bbd70d210d9ad1b0c Mon Sep 17 00:00:00 2001 From: MyroTk Date: Tue, 14 May 2024 03:20:44 -0700 Subject: [PATCH 05/79] add secrets to env --- .github/workflows/release_branches.yml | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/.github/workflows/release_branches.yml b/.github/workflows/release_branches.yml index 7437e9ae56d8..92f93ca6dd7a 100644 --- a/.github/workflows/release_branches.yml +++ b/.github/workflows/release_branches.yml @@ -4,6 +4,11 @@ name: ReleaseBranchCI env: # Force the stdout and stderr streams to be unbuffered PYTHONUNBUFFERED: 1 + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} + AWS_DEFAULT_REGION: ${{ secrets.AWS_DEFAULT_REGION }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }} + DOCKER_PASSWORD: ${{ secrets.DOCKER_PASSWORD }} on: # yamllint disable-line rule:truthy pull_request: From 87f44967c026d1ab06f72e9c5a855f80483cfa59 Mon Sep 17 00:00:00 2001 From: MyroTk Date: Tue, 14 May 2024 03:28:15 -0700 Subject: [PATCH 06/79] update docker username and pwd --- tests/ci/docker_images_helper.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/tests/ci/docker_images_helper.py b/tests/ci/docker_images_helper.py index 6ea679e05973..fe7b80a3b669 100644 --- a/tests/ci/docker_images_helper.py +++ b/tests/ci/docker_images_helper.py @@ -25,9 +25,10 @@ def docker_login(relogin: bool = True) -> None: ).returncode == 1 ): + logging.info('Doing docker login') subprocess.check_output( # pylint: disable=unexpected-keyword-arg - "docker login --username 'robotclickhouse' --password-stdin", - input=get_parameter_from_ssm("dockerhub_robot_password"), + "docker login --username 'altinityinfra' --password-stdin", + input=get_parameter_from_ssm("dockerhub-password"), encoding="utf-8", shell=True, ) From 20feef9262bdaec598a9aaf0d77b8a75cde75e46 Mon Sep 17 00:00:00 2001 From: MyroTk Date: Tue, 14 May 2024 03:50:22 -0700 Subject: [PATCH 07/79] bring back our implementation of get_best_robot_token --- tests/ci/get_robot_token.py | 14 +++++++++++++- 1 file changed, 13 insertions(+), 1 deletion(-) diff --git a/tests/ci/get_robot_token.py b/tests/ci/get_robot_token.py index 3781cdc5cc80..5639b72fa3bc 100644 --- a/tests/ci/get_robot_token.py +++ b/tests/ci/get_robot_token.py @@ -55,8 +55,20 @@ def get_parameters_from_ssm( ROBOT_TOKEN = None # type: Optional[Token] +# NOTE(Arthur Passos): Original CI code uses the "_original" version of this method. Each robot token is rate limited +# and the original implementation selects the "best one". To make it simpler and iterate faster, +# we are using only one robot and keeping the method signature. In the future we might reconsider +# having multiple robot tokens +def get_best_robot_token(token_prefix_env_name="github_robot_token"): + # Re-use already fetched token (same as in get_best_robot_token_original) + # except here we assume it is always a string (since we use only one token and don't do token rotation) + global ROBOT_TOKEN + if ROBOT_TOKEN is not None: + return ROBOT_TOKEN + ROBOT_TOKEN = get_parameter_from_ssm(token_prefix_env_name) + return ROBOT_TOKEN -def get_best_robot_token(tokens_path: str = "/github-tokens") -> str: +def get_best_robot_token_original(tokens_path: str = "/github-tokens") -> str: global ROBOT_TOKEN if ROBOT_TOKEN is not None: return ROBOT_TOKEN.value From ec8b90b8c74c66c235d09a50e1e1c6e9e9add6d5 Mon Sep 17 00:00:00 2001 From: MyroTk Date: Tue, 14 May 2024 05:20:44 -0700 Subject: [PATCH 08/79] pass secrets --- .github/workflows/release_branches.yml | 45 ++++++++++++++++++++++++-- 1 file changed, 42 insertions(+), 3 deletions(-) diff --git a/.github/workflows/release_branches.yml b/.github/workflows/release_branches.yml index 92f93ca6dd7a..639215ae4429 100644 --- a/.github/workflows/release_branches.yml +++ b/.github/workflows/release_branches.yml @@ -7,8 +7,6 @@ env: AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} AWS_DEFAULT_REGION: ${{ secrets.AWS_DEFAULT_REGION }} AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} - DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }} - DOCKER_PASSWORD: ${{ secrets.DOCKER_PASSWORD }} on: # yamllint disable-line rule:truthy pull_request: @@ -73,12 +71,14 @@ jobs: needs: [RunConfig] if: ${{ !failure() && !cancelled() }} uses: ./.github/workflows/reusable_docker.yml + secrets: inherit with: data: ${{ needs.RunConfig.outputs.data }} CompatibilityCheckX86: needs: [RunConfig, BuilderDebRelease] if: ${{ !failure() && !cancelled() }} uses: ./.github/workflows/reusable_test.yml + secrets: inherit with: test_name: Compatibility check (amd64) runner_type: altinity-on-demand, altinity-type-cpx51, altinity-in-ash, altinity-image-x86-app-docker-ce @@ -87,6 +87,7 @@ jobs: needs: [RunConfig, BuilderDebAarch64] if: ${{ !failure() && !cancelled() }} uses: ./.github/workflows/reusable_test.yml + secrets: inherit with: test_name: Compatibility check (aarch64) runner_type: altinity-on-demand, altinity-type-cax41, altinity-in-hel1, altinity-image-arm-app-docker-ce @@ -98,6 +99,7 @@ jobs: needs: [RunConfig, BuildDockers] if: ${{ !failure() && !cancelled() }} uses: ./.github/workflows/reusable_build.yml + secrets: inherit with: build_name: package_release checkout_depth: 0 @@ -108,6 +110,7 @@ jobs: needs: [RunConfig, BuildDockers] if: ${{ !failure() && !cancelled() }} uses: ./.github/workflows/reusable_build.yml + secrets: inherit with: build_name: package_aarch64 checkout_depth: 0 @@ -118,6 +121,7 @@ jobs: needs: [RunConfig, BuildDockers] if: ${{ !failure() && !cancelled() }} uses: ./.github/workflows/reusable_build.yml + secrets: inherit with: build_name: package_asan data: ${{ needs.RunConfig.outputs.data }} @@ -125,6 +129,7 @@ jobs: needs: [RunConfig, BuildDockers] if: ${{ !failure() && !cancelled() }} uses: ./.github/workflows/reusable_build.yml + secrets: inherit with: build_name: package_ubsan data: ${{ needs.RunConfig.outputs.data }} @@ -132,6 +137,7 @@ jobs: needs: [RunConfig, BuildDockers] if: ${{ !failure() && !cancelled() }} uses: ./.github/workflows/reusable_build.yml + secrets: inherit with: build_name: package_tsan data: ${{ needs.RunConfig.outputs.data }} @@ -139,6 +145,7 @@ jobs: needs: [RunConfig, BuildDockers] if: ${{ !failure() && !cancelled() }} uses: ./.github/workflows/reusable_build.yml + secrets: inherit with: build_name: package_msan data: ${{ needs.RunConfig.outputs.data }} @@ -146,6 +153,7 @@ jobs: needs: [RunConfig, BuildDockers] if: ${{ !failure() && !cancelled() }} uses: ./.github/workflows/reusable_build.yml + secrets: inherit with: build_name: package_debug data: ${{ needs.RunConfig.outputs.data }} @@ -153,6 +161,7 @@ jobs: needs: [RunConfig, BuildDockers] if: ${{ !failure() && !cancelled() }} uses: ./.github/workflows/reusable_build.yml + secrets: inherit with: build_name: binary_darwin checkout_depth: 0 @@ -163,6 +172,7 @@ jobs: needs: [RunConfig, BuildDockers] if: ${{ !failure() && !cancelled() }} uses: ./.github/workflows/reusable_build.yml + secrets: inherit with: build_name: binary_darwin_aarch64 checkout_depth: 0 @@ -176,6 +186,7 @@ jobs: needs: [RunConfig, BuilderDebRelease, BuilderDebAarch64] if: ${{ !failure() && !cancelled() }} uses: ./.github/workflows/reusable_test.yml + secrets: inherit with: test_name: Docker server image runner_type: altinity-on-demand, altinity-type-cpx41, altinity-in-ash, altinity-image-x86-app-docker-ce @@ -184,6 +195,7 @@ jobs: needs: [RunConfig, BuilderDebRelease, BuilderDebAarch64] if: ${{ !failure() && !cancelled() }} uses: ./.github/workflows/reusable_test.yml + secrets: inherit with: test_name: Docker keeper image runner_type: altinity-on-demand, altinity-type-cpx41, altinity-in-ash, altinity-image-x86-app-docker-ce @@ -204,6 +216,7 @@ jobs: - BuilderDebMsan - BuilderDebDebug uses: ./.github/workflows/reusable_test.yml + secrets: inherit with: test_name: ClickHouse build check runner_type: altinity-on-demand, altinity-type-cax11, altinity-in-hel1, altinity-image-arm-app-docker-ce @@ -216,6 +229,7 @@ jobs: - BuilderBinDarwin - BuilderBinDarwinAarch64 uses: ./.github/workflows/reusable_test.yml + secrets: inherit with: test_name: ClickHouse special build check runner_type: altinity-on-demand, altinity-type-cax11, altinity-in-hel1, altinity-image-arm-app-docker-ce @@ -227,7 +241,7 @@ jobs: - BuilderBinDarwinAarch64 - BuilderDebRelease - BuilderDebAarch64 - runs-on: [self-hosted, style-checker-aarch64] + runs-on: [self-hosted, altinity-on-demand, altinity-type-cax11, altinity-in-hel1, altinity-image-arm-app-docker-ce] steps: - name: Debug run: | @@ -258,6 +272,7 @@ jobs: needs: [RunConfig, BuilderDebRelease] if: ${{ !failure() && !cancelled() }} uses: ./.github/workflows/reusable_test.yml + secrets: inherit with: test_name: Install packages (amd64) runner_type: altinity-on-demand, altinity-type-cpx51, altinity-in-ash, altinity-image-x86-app-docker-ce @@ -268,6 +283,7 @@ jobs: needs: [RunConfig, BuilderDebAarch64] if: ${{ !failure() && !cancelled() }} uses: ./.github/workflows/reusable_test.yml + secrets: inherit with: test_name: Install packages (arm64) runner_type: altinity-on-demand, altinity-type-cax41, altinity-in-hel1, altinity-image-arm-app-docker-ce @@ -281,6 +297,7 @@ jobs: needs: [RunConfig, BuilderDebRelease] if: ${{ !failure() && !cancelled() }} uses: ./.github/workflows/reusable_test.yml + secrets: inherit with: test_name: Stateless tests (release) runner_type: altinity-on-demand, altinity-type-cpx51, altinity-in-ash, altinity-image-x86-app-docker-ce @@ -289,6 +306,7 @@ jobs: needs: [RunConfig, BuilderDebAarch64] if: ${{ !failure() && !cancelled() }} uses: ./.github/workflows/reusable_test.yml + secrets: inherit with: test_name: Stateless tests (aarch64) runner_type: altinity-on-demand, altinity-type-cax41, altinity-in-hel1, altinity-image-arm-app-docker-ce @@ -297,6 +315,7 @@ jobs: needs: [RunConfig, BuilderDebAsan] if: ${{ !failure() && !cancelled() }} uses: ./.github/workflows/reusable_test.yml + secrets: inherit with: test_name: Stateless tests (asan) runner_type: altinity-on-demand, altinity-type-cpx51, altinity-in-ash, altinity-image-x86-app-docker-ce @@ -305,6 +324,7 @@ jobs: needs: [RunConfig, BuilderDebTsan] if: ${{ !failure() && !cancelled() }} uses: ./.github/workflows/reusable_test.yml + secrets: inherit with: test_name: Stateless tests (tsan) runner_type: altinity-on-demand, altinity-type-cpx51, altinity-in-ash, altinity-image-x86-app-docker-ce @@ -313,6 +333,7 @@ jobs: needs: [RunConfig, BuilderDebMsan] if: ${{ !failure() && !cancelled() }} uses: ./.github/workflows/reusable_test.yml + secrets: inherit with: test_name: Stateless tests (msan) runner_type: altinity-on-demand, altinity-type-cpx51, altinity-in-ash, altinity-image-x86-app-docker-ce @@ -321,6 +342,7 @@ jobs: needs: [RunConfig, BuilderDebUBsan] if: ${{ !failure() && !cancelled() }} uses: ./.github/workflows/reusable_test.yml + secrets: inherit with: test_name: Stateless tests (ubsan) runner_type: altinity-on-demand, altinity-type-cpx51, altinity-in-ash, altinity-image-x86-app-docker-ce @@ -329,6 +351,7 @@ jobs: needs: [RunConfig, BuilderDebDebug] if: ${{ !failure() && !cancelled() }} uses: ./.github/workflows/reusable_test.yml + secrets: inherit with: test_name: Stateless tests (debug) runner_type: altinity-on-demand, altinity-type-cpx51, altinity-in-ash, altinity-image-x86-app-docker-ce @@ -340,6 +363,7 @@ jobs: needs: [RunConfig, BuilderDebRelease] if: ${{ !failure() && !cancelled() }} uses: ./.github/workflows/reusable_test.yml + secrets: inherit with: test_name: Stateful tests (release) runner_type: altinity-on-demand, altinity-type-cpx51, altinity-in-ash, altinity-image-x86-app-docker-ce @@ -348,6 +372,7 @@ jobs: needs: [RunConfig, BuilderDebAarch64] if: ${{ !failure() && !cancelled() }} uses: ./.github/workflows/reusable_test.yml + secrets: inherit with: test_name: Stateful tests (aarch64) runner_type: altinity-on-demand, altinity-type-cax41, altinity-in-hel1, altinity-image-arm-app-docker-ce @@ -356,6 +381,7 @@ jobs: needs: [RunConfig, BuilderDebAsan] if: ${{ !failure() && !cancelled() }} uses: ./.github/workflows/reusable_test.yml + secrets: inherit with: test_name: Stateful tests (asan) runner_type: altinity-on-demand, altinity-type-cpx51, altinity-in-ash, altinity-image-x86-app-docker-ce @@ -364,6 +390,7 @@ jobs: needs: [RunConfig, BuilderDebTsan] if: ${{ !failure() && !cancelled() }} uses: ./.github/workflows/reusable_test.yml + secrets: inherit with: test_name: Stateful tests (tsan) runner_type: altinity-on-demand, altinity-type-cpx51, altinity-in-ash, altinity-image-x86-app-docker-ce @@ -372,6 +399,7 @@ jobs: needs: [RunConfig, BuilderDebMsan] if: ${{ !failure() && !cancelled() }} uses: ./.github/workflows/reusable_test.yml + secrets: inherit with: test_name: Stateful tests (msan) runner_type: altinity-on-demand, altinity-type-cpx51, altinity-in-ash, altinity-image-x86-app-docker-ce @@ -380,6 +408,7 @@ jobs: needs: [RunConfig, BuilderDebUBsan] if: ${{ !failure() && !cancelled() }} uses: ./.github/workflows/reusable_test.yml + secrets: inherit with: test_name: Stateful tests (ubsan) runner_type: altinity-on-demand, altinity-type-cpx51, altinity-in-ash, altinity-image-x86-app-docker-ce @@ -388,6 +417,7 @@ jobs: needs: [RunConfig, BuilderDebDebug] if: ${{ !failure() && !cancelled() }} uses: ./.github/workflows/reusable_test.yml + secrets: inherit with: test_name: Stateful tests (debug) runner_type: altinity-on-demand, altinity-type-cpx51, altinity-in-ash, altinity-image-x86-app-docker-ce @@ -399,6 +429,7 @@ jobs: needs: [RunConfig, BuilderDebAsan] if: ${{ !failure() && !cancelled() }} uses: ./.github/workflows/reusable_test.yml + secrets: inherit with: test_name: Stress test (asan) runner_type: altinity-on-demand, altinity-type-cpx51, altinity-in-ash, altinity-image-x86-app-docker-ce @@ -407,6 +438,7 @@ jobs: needs: [RunConfig, BuilderDebTsan] if: ${{ !failure() && !cancelled() }} uses: ./.github/workflows/reusable_test.yml + secrets: inherit with: test_name: Stress test (tsan) runner_type: altinity-on-demand, altinity-type-cpx51, altinity-in-ash, altinity-image-x86-app-docker-ce @@ -415,6 +447,7 @@ jobs: needs: [RunConfig, BuilderDebMsan] if: ${{ !failure() && !cancelled() }} uses: ./.github/workflows/reusable_test.yml + secrets: inherit with: test_name: Stress test (msan) runner_type: altinity-on-demand, altinity-type-cpx51, altinity-in-ash, altinity-image-x86-app-docker-ce @@ -423,6 +456,7 @@ jobs: needs: [RunConfig, BuilderDebUBsan] if: ${{ !failure() && !cancelled() }} uses: ./.github/workflows/reusable_test.yml + secrets: inherit with: test_name: Stress test (ubsan) runner_type: altinity-on-demand, altinity-type-cpx51, altinity-in-ash, altinity-image-x86-app-docker-ce @@ -431,6 +465,7 @@ jobs: needs: [RunConfig, BuilderDebDebug] if: ${{ !failure() && !cancelled() }} uses: ./.github/workflows/reusable_test.yml + secrets: inherit with: test_name: Stress test (debug) runner_type: altinity-on-demand, altinity-type-cpx51, altinity-in-ash, altinity-image-x86-app-docker-ce @@ -442,6 +477,7 @@ jobs: needs: [RunConfig, BuilderDebAsan] if: ${{ !failure() && !cancelled() }} uses: ./.github/workflows/reusable_test.yml + secrets: inherit with: test_name: Integration tests (asan) runner_type: altinity-on-demand, altinity-type-cpx51, altinity-in-ash, altinity-image-x86-app-docker-ce @@ -450,6 +486,7 @@ jobs: needs: [RunConfig, BuilderDebAsan] if: ${{ !failure() && !cancelled() }} uses: ./.github/workflows/reusable_test.yml + secrets: inherit with: test_name: Integration tests (asan, old analyzer) runner_type: altinity-on-demand, altinity-type-cpx51, altinity-in-ash, altinity-image-x86-app-docker-ce @@ -458,6 +495,7 @@ jobs: needs: [RunConfig, BuilderDebTsan] if: ${{ !failure() && !cancelled() }} uses: ./.github/workflows/reusable_test.yml + secrets: inherit with: test_name: Integration tests (tsan) runner_type: altinity-on-demand, altinity-type-cpx51, altinity-in-ash, altinity-image-x86-app-docker-ce @@ -466,6 +504,7 @@ jobs: needs: [RunConfig, BuilderDebRelease] if: ${{ !failure() && !cancelled() }} uses: ./.github/workflows/reusable_test.yml + secrets: inherit with: test_name: Integration tests (release) runner_type: altinity-on-demand, altinity-type-cpx51, altinity-in-ash, altinity-image-x86-app-docker-ce From 3f224dc4d7e8f203c527ec2f2d6bd8d86bd1d85d Mon Sep 17 00:00:00 2001 From: MyroTk Date: Tue, 14 May 2024 05:31:56 -0700 Subject: [PATCH 09/79] add secrets to env --- .github/workflows/reusable_build.yml | 13 +++++++++++++ .github/workflows/reusable_docker.yml | 16 ++++++++++++++++ .github/workflows/reusable_test.yml | 10 +++++++++- 3 files changed, 38 insertions(+), 1 deletion(-) diff --git a/.github/workflows/reusable_build.yml b/.github/workflows/reusable_build.yml index 45177ca2c132..27c87f8bc2a9 100644 --- a/.github/workflows/reusable_build.yml +++ b/.github/workflows/reusable_build.yml @@ -5,6 +5,9 @@ env: # Force the stdout and stderr streams to be unbuffered PYTHONUNBUFFERED: 1 CLICKHOUSE_STABLE_VERSION_SUFFIX: altinitystable + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} + AWS_DEFAULT_REGION: ${{ secrets.AWS_DEFAULT_REGION }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} name: Build ClickHouse 'on': @@ -34,6 +37,16 @@ name: Build ClickHouse additional_envs: description: additional ENV variables to setup the job type: string + secrets: + secret_envs: + description: if given, it's passed to the environments + required: false + AWS_SECRET_ACCESS_KEY: + description: the access key to the aws param store. + required: true + AWS_ACCESS_KEY_ID: + description: the access key id to the aws param store. + required: true jobs: Build: diff --git a/.github/workflows/reusable_docker.yml b/.github/workflows/reusable_docker.yml index e8d89691b75f..3c344a63f126 100644 --- a/.github/workflows/reusable_docker.yml +++ b/.github/workflows/reusable_docker.yml @@ -11,6 +11,22 @@ name: Build docker images required: false type: boolean default: false + secrets: + secret_envs: + description: if given, it's passed to the environments + required: false + AWS_SECRET_ACCESS_KEY: + description: the access key to the aws param store. + required: true + AWS_ACCESS_KEY_ID: + description: the access key id to the aws param store. + required: true + +env: + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} + AWS_DEFAULT_REGION: ${{ secrets.AWS_DEFAULT_REGION }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + jobs: DockerBuildAarch64: runs-on: [altinity-on-demand, altinity-type-cax41, altinity-in-hel1, altinity-image-arm-app-docker-ce] diff --git a/.github/workflows/reusable_test.yml b/.github/workflows/reusable_test.yml index 2f63ab69509d..31822916868f 100644 --- a/.github/workflows/reusable_test.yml +++ b/.github/workflows/reusable_test.yml @@ -43,12 +43,20 @@ name: Testing workflow secret_envs: description: if given, it's passed to the environments required: false - + AWS_SECRET_ACCESS_KEY: + description: the access key to the aws param store. + required: true + AWS_ACCESS_KEY_ID: + description: the access key id to the aws param store. + required: true env: # Force the stdout and stderr streams to be unbuffered PYTHONUNBUFFERED: 1 CHECK_NAME: ${{inputs.test_name}} + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} + AWS_DEFAULT_REGION: ${{ secrets.AWS_DEFAULT_REGION }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} jobs: runner_labels_setup: From 95b479ba1f3fab4f4652b9d4ec006de6ad209cf6 Mon Sep 17 00:00:00 2001 From: MyroTk Date: Tue, 14 May 2024 05:47:45 -0700 Subject: [PATCH 10/79] update images to be altinityinfra --- docker/images.json | 78 +++++++++++++++++++++++----------------------- 1 file changed, 39 insertions(+), 39 deletions(-) diff --git a/docker/images.json b/docker/images.json index 7439517379b4..03da98789250 100644 --- a/docker/images.json +++ b/docker/images.json @@ -1,126 +1,126 @@ { "docker/packager/binary-builder": { - "name": "clickhouse/binary-builder", + "name": "altinityinfra/binary-builder", "dependent": [] }, "docker/packager/cctools": { - "name": "clickhouse/cctools", + "name": "altinityinfra/cctools", "dependent": [] }, "docker/test/compatibility/centos": { - "name": "clickhouse/test-old-centos", + "name": "altinityinfra/test-old-centos", "dependent": [] }, "docker/test/compatibility/ubuntu": { - "name": "clickhouse/test-old-ubuntu", + "name": "altinityinfra/test-old-ubuntu", "dependent": [] }, "docker/test/integration/base": { - "name": "clickhouse/integration-test", + "name": "altinityinfra/integration-test", "dependent": [] }, "docker/test/fuzzer": { - "name": "clickhouse/fuzzer", + "name": "altinityinfra/fuzzer", "dependent": [] }, "docker/test/libfuzzer": { - "name": "clickhouse/libfuzzer", + "name": "altinityinfra/libfuzzer", "dependent": [] }, "docker/test/performance-comparison": { - "name": "clickhouse/performance-comparison", + "name": "altinityinfra/performance-comparison", "dependent": [] }, "docker/test/util": { - "name": "clickhouse/test-util", + "name": "altinityinfra/test-util", "dependent": [ "docker/test/base", "docker/test/fasttest" ] }, "docker/test/stateless": { - "name": "clickhouse/stateless-test", + "name": "altinityinfra/stateless-test", "dependent": [ "docker/test/stateful", "docker/test/unit" ] }, "docker/test/stateful": { - "name": "clickhouse/stateful-test", + "name": "altinityinfra/stateful-test", "dependent": [ "docker/test/stress", "docker/test/upgrade" ] }, "docker/test/unit": { - "name": "clickhouse/unit-test", + "name": "altinityinfra/unit-test", "dependent": [] }, "docker/test/stress": { - "name": "clickhouse/stress-test", + "name": "altinityinfra/stress-test", "dependent": [] }, "docker/test/upgrade": { - "name": "clickhouse/upgrade-check", + "name": "altinityinfra/upgrade-check", "dependent": [] }, "docker/test/integration/runner": { - "name": "clickhouse/integration-tests-runner", + "name": "altinityinfra/integration-tests-runner", "dependent": [] }, "docker/test/fasttest": { - "name": "clickhouse/fasttest", + "name": "altinityinfra/fasttest", "dependent": [ "docker/packager/binary-builder" ] }, "docker/test/style": { - "name": "clickhouse/style-test", + "name": "altinityinfra/style-test", "dependent": [] }, "docker/test/integration/s3_proxy": { - "name": "clickhouse/s3-proxy", + "name": "altinityinfra/s3-proxy", "dependent": [] }, "docker/test/integration/resolver": { - "name": "clickhouse/python-bottle", + "name": "altinityinfra/python-bottle", "dependent": [] }, "docker/test/integration/helper_container": { - "name": "clickhouse/integration-helper", + "name": "altinityinfra/integration-helper", "dependent": [] }, "docker/test/integration/mysql_golang_client": { - "name": "clickhouse/mysql-golang-client", + "name": "altinityinfra/mysql-golang-client", "dependent": [] }, "docker/test/integration/dotnet_client": { - "name": "clickhouse/dotnet-client", + "name": "altinityinfra/dotnet-client", "dependent": [] }, "docker/test/integration/mysql_java_client": { - "name": "clickhouse/mysql-java-client", + "name": "altinityinfra/mysql-java-client", "dependent": [] }, "docker/test/integration/mysql_js_client": { - "name": "clickhouse/mysql-js-client", + "name": "altinityinfra/mysql-js-client", "dependent": [] }, "docker/test/integration/mysql_php_client": { - "name": "clickhouse/mysql-php-client", + "name": "altinityinfra/mysql-php-client", "dependent": [] }, "docker/test/integration/postgresql_java_client": { - "name": "clickhouse/postgresql-java-client", + "name": "altinityinfra/postgresql-java-client", "dependent": [] }, "docker/test/integration/kerberos_kdc": { "only_amd64": true, - "name": "clickhouse/kerberos-kdc", + "name": "altinityinfra/kerberos-kdc", "dependent": [] }, "docker/test/base": { - "name": "clickhouse/test-base", + "name": "altinityinfra/test-base", "dependent": [ "docker/test/fuzzer", "docker/test/libfuzzer", @@ -135,47 +135,47 @@ }, "docker/test/integration/kerberized_hadoop": { "only_amd64": true, - "name": "clickhouse/kerberized-hadoop", + "name": "altinityinfra/kerberized-hadoop", "dependent": [] }, "docker/test/sqlancer": { - "name": "clickhouse/sqlancer-test", + "name": "altinityinfra/sqlancer-test", "dependent": [] }, "docker/test/keeper-jepsen": { - "name": "clickhouse/keeper-jepsen-test", + "name": "altinityinfra/keeper-jepsen-test", "dependent": [] }, "docker/test/server-jepsen": { - "name": "clickhouse/server-jepsen-test", + "name": "altinityinfra/server-jepsen-test", "dependent": [] }, "docker/test/clickbench": { - "name": "clickhouse/clickbench", + "name": "altinityinfra/clickbench", "dependent": [] }, "docker/test/install/deb": { - "name": "clickhouse/install-deb-test", + "name": "altinityinfra/install-deb-test", "dependent": [] }, "docker/test/install/rpm": { - "name": "clickhouse/install-rpm-test", + "name": "altinityinfra/install-rpm-test", "dependent": [] }, "docker/docs/builder": { - "name": "clickhouse/docs-builder", + "name": "altinityinfra/docs-builder", "dependent": [] }, "docker/test/sqllogic": { - "name": "clickhouse/sqllogic-test", + "name": "altinityinfra/sqllogic-test", "dependent": [] }, "docker/test/sqltest": { - "name": "clickhouse/sqltest", + "name": "altinityinfra/sqltest", "dependent": [] }, "docker/test/integration/nginx_dav": { - "name": "clickhouse/nginx-dav", + "name": "altinityinfra/nginx-dav", "dependent": [] } } From ca387efb83c48c3c49fb23338d153f7f638107fc Mon Sep 17 00:00:00 2001 From: MyroTk Date: Tue, 14 May 2024 05:56:15 -0700 Subject: [PATCH 11/79] more docker image renaming --- tests/ci/build_check.py | 2 +- tests/ci/ci_config.py | 30 ++++----- tests/ci/tests/docker_images_for_tests.json | 70 ++++++++++----------- 3 files changed, 51 insertions(+), 51 deletions(-) diff --git a/tests/ci/build_check.py b/tests/ci/build_check.py index fa738a9bd942..6bd1fb9bd293 100644 --- a/tests/ci/build_check.py +++ b/tests/ci/build_check.py @@ -31,7 +31,7 @@ ) from stopwatch import Stopwatch -IMAGE_NAME = "clickhouse/binary-builder" +IMAGE_NAME = "altinityinfra/binary-builder" BUILD_LOG_NAME = "build_log.log" diff --git a/tests/ci/ci_config.py b/tests/ci/ci_config.py index 8d1dcefcf1bd..3c6ea5cf0281 100644 --- a/tests/ci/ci_config.py +++ b/tests/ci/ci_config.py @@ -269,7 +269,7 @@ class JobConfig: "./tests/performance", ], exclude_files=[".md"], - docker=["clickhouse/binary-builder"], + docker=["altinityinfra/binary-builder"], git_submodules=True, ), run_command="build_check.py $BUILD_NAME", @@ -338,11 +338,11 @@ class TestConfig: # common digests configs compatibility_check_digest = DigestConfig( include_paths=["./tests/ci/compatibility_check.py"], - docker=["clickhouse/test-old-ubuntu", "clickhouse/test-old-centos"], + docker=["altinityinfra/test-old-ubuntu", "altinityinfra/test-old-centos"], ) install_check_digest = DigestConfig( include_paths=["./tests/ci/install_check.py"], - docker=["clickhouse/install-deb-test", "clickhouse/install-rpm-test"], + docker=["altinityinfra/install-deb-test", "altinityinfra/install-rpm-test"], ) stateless_check_digest = DigestConfig( include_paths=[ @@ -353,7 +353,7 @@ class TestConfig: "./tests/*.txt", ], exclude_files=[".md"], - docker=["clickhouse/stateless-test"], + docker=["altinityinfra/stateless-test"], ) stateful_check_digest = DigestConfig( include_paths=[ @@ -364,7 +364,7 @@ class TestConfig: "./tests/*.txt", ], exclude_files=[".md"], - docker=["clickhouse/stateful-test"], + docker=["altinityinfra/stateful-test"], ) stress_check_digest = DigestConfig( @@ -376,13 +376,13 @@ class TestConfig: "./tests/*.txt", ], exclude_files=[".md"], - docker=["clickhouse/stress-test"], + docker=["altinityinfra/stress-test"], ) # FIXME: which tests are upgrade? just python? upgrade_check_digest = DigestConfig( include_paths=["./tests/ci/upgrade_check.py"], exclude_files=[".md"], - docker=["clickhouse/upgrade-check"], + docker=["altinityinfra/upgrade-check"], ) integration_check_digest = DigestConfig( include_paths=[ @@ -402,7 +402,7 @@ class TestConfig: unit_check_digest = DigestConfig( include_paths=["./tests/ci/unit_tests_check.py"], exclude_files=[".md"], - docker=["clickhouse/unit-test"], + docker=["altinityinfra/unit-test"], ) perf_check_digest = DigestConfig( include_paths=[ @@ -410,7 +410,7 @@ class TestConfig: "./tests/performance/", ], exclude_files=[".md"], - docker=["clickhouse/performance-comparison"], + docker=["altinityinfra/performance-comparison"], ) sqllancer_check_digest = DigestConfig( # include_paths=["./tests/ci/sqlancer_check.py"], @@ -420,12 +420,12 @@ class TestConfig: sqllogic_check_digest = DigestConfig( include_paths=["./tests/ci/sqllogic_test.py"], exclude_files=[".md"], - docker=["clickhouse/sqllogic-test"], + docker=["altinityinfra/sqllogic-test"], ) sqltest_check_digest = DigestConfig( include_paths=["./tests/ci/sqltest.py"], exclude_files=[".md"], - docker=["clickhouse/sqltest"], + docker=["altinityinfra/sqltest"], ) bugfix_validate_check = DigestConfig( include_paths=[ @@ -437,7 +437,7 @@ class TestConfig: exclude_files=[".md"], docker=IMAGES.copy() + [ - "clickhouse/stateless-test", + "altinityinfra/stateless-test", ], ) # common test params @@ -513,7 +513,7 @@ class TestConfig: include_paths=[ "tests/ci/clickbench.py", ], - docker=["clickhouse/clickbench"], + docker=["altinityinfra/clickbench"], ), "run_command": 'clickbench.py "$CHECK_NAME"', } @@ -1074,7 +1074,7 @@ def validate(self) -> None: job_config=JobConfig( digest=DigestConfig( include_paths=["**/*.md", "./docs", "tests/ci/docs_check.py"], - docker=["clickhouse/docs-builder"], + docker=["altinityinfra/docs-builder"], ), run_command="docs_check.py", ), @@ -1086,7 +1086,7 @@ def validate(self) -> None: digest=DigestConfig( include_paths=["./tests/queries/0_stateless/"], exclude_files=[".md"], - docker=["clickhouse/fasttest"], + docker=["altinityinfra/fasttest"], ), ), ), diff --git a/tests/ci/tests/docker_images_for_tests.json b/tests/ci/tests/docker_images_for_tests.json index 70db87605616..e0fead2968d6 100644 --- a/tests/ci/tests/docker_images_for_tests.json +++ b/tests/ci/tests/docker_images_for_tests.json @@ -1,120 +1,120 @@ { "docker/packager/deb": { - "name": "clickhouse/deb-builder", + "name": "altinityinfra/deb-builder", "dependent": [] }, "docker/packager/binary": { - "name": "clickhouse/binary-builder", + "name": "altinityinfra/binary-builder", "dependent": [ "docker/test/codebrowser" ] }, "docker/test/compatibility/centos": { - "name": "clickhouse/test-old-centos", + "name": "altinityinfra/test-old-centos", "dependent": [] }, "docker/test/compatibility/ubuntu": { - "name": "clickhouse/test-old-ubuntu", + "name": "altinityinfra/test-old-ubuntu", "dependent": [] }, "docker/test/integration/base": { - "name": "clickhouse/integration-test", + "name": "altinityinfra/integration-test", "dependent": [] }, "docker/test/fuzzer": { - "name": "clickhouse/fuzzer", + "name": "altinityinfra/fuzzer", "dependent": [] }, "docker/test/performance-comparison": { - "name": "clickhouse/performance-comparison", + "name": "altinityinfra/performance-comparison", "dependent": [] }, "docker/test/util": { - "name": "clickhouse/test-util", + "name": "altinityinfra/test-util", "dependent": [ "docker/test/base", "docker/test/fasttest" ] }, "docker/test/stateless": { - "name": "clickhouse/stateless-test", + "name": "altinityinfra/stateless-test", "dependent": [ "docker/test/stateful", "docker/test/unit" ] }, "docker/test/stateful": { - "name": "clickhouse/stateful-test", + "name": "altinityinfra/stateful-test", "dependent": [ "docker/test/stress" ] }, "docker/test/unit": { - "name": "clickhouse/unit-test", + "name": "altinityinfra/unit-test", "dependent": [] }, "docker/test/stress": { - "name": "clickhouse/stress-test", + "name": "altinityinfra/stress-test", "dependent": [] }, "docker/test/codebrowser": { - "name": "clickhouse/codebrowser", + "name": "altinityinfra/codebrowser", "dependent": [] }, "docker/test/integration/runner": { - "name": "clickhouse/integration-tests-runner", + "name": "altinityinfra/integration-tests-runner", "dependent": [] }, "docker/test/fasttest": { - "name": "clickhouse/fasttest", + "name": "altinityinfra/fasttest", "dependent": [] }, "docker/test/style": { - "name": "clickhouse/style-test", + "name": "altinityinfra/style-test", "dependent": [] }, "docker/test/integration/s3_proxy": { - "name": "clickhouse/s3-proxy", + "name": "altinityinfra/s3-proxy", "dependent": [] }, "docker/test/integration/resolver": { - "name": "clickhouse/python-bottle", + "name": "altinityinfra/python-bottle", "dependent": [] }, "docker/test/integration/helper_container": { - "name": "clickhouse/integration-helper", + "name": "altinityinfra/integration-helper", "dependent": [] }, "docker/test/integration/mysql_golang_client": { - "name": "clickhouse/mysql-golang-client", + "name": "altinityinfra/mysql-golang-client", "dependent": [] }, "docker/test/integration/dotnet_client": { - "name": "clickhouse/dotnet-client", + "name": "altinityinfra/dotnet-client", "dependent": [] }, "docker/test/integration/mysql_java_client": { - "name": "clickhouse/mysql-java-client", + "name": "altinityinfra/mysql-java-client", "dependent": [] }, "docker/test/integration/mysql_js_client": { - "name": "clickhouse/mysql-js-client", + "name": "altinityinfra/mysql-js-client", "dependent": [] }, "docker/test/integration/mysql_php_client": { - "name": "clickhouse/mysql-php-client", + "name": "altinityinfra/mysql-php-client", "dependent": [] }, "docker/test/integration/postgresql_java_client": { - "name": "clickhouse/postgresql-java-client", + "name": "altinityinfra/postgresql-java-client", "dependent": [] }, "docker/test/integration/kerberos_kdc": { - "name": "clickhouse/kerberos-kdc", + "name": "altinityinfra/kerberos-kdc", "dependent": [] }, "docker/test/base": { - "name": "clickhouse/test-base", + "name": "altinityinfra/test-base", "dependent": [ "docker/test/stateless", "docker/test/integration/base", @@ -124,19 +124,19 @@ ] }, "docker/test/integration/kerberized_hadoop": { - "name": "clickhouse/kerberized-hadoop", + "name": "altinityinfra/kerberized-hadoop", "dependent": [] }, "docker/test/sqlancer": { - "name": "clickhouse/sqlancer-test", + "name": "altinityinfra/sqlancer-test", "dependent": [] }, "docker/test/keeper-jepsen": { - "name": "clickhouse/keeper-jepsen-test", + "name": "altinityinfra/keeper-jepsen-test", "dependent": [] }, "docker/docs/builder": { - "name": "clickhouse/docs-builder", + "name": "altinityinfra/docs-builder", "only_amd64": true, "dependent": [ "docker/docs/check", @@ -144,19 +144,19 @@ ] }, "docker/docs/check": { - "name": "clickhouse/docs-check", + "name": "altinityinfra/docs-check", "dependent": [] }, "docker/docs/release": { - "name": "clickhouse/docs-release", + "name": "altinityinfra/docs-release", "dependent": [] }, "docker/test/sqllogic": { - "name": "clickhouse/sqllogic-test", + "name": "altinityinfra/sqllogic-test", "dependent": [] }, "docker/test/sqltest": { - "name": "clickhouse/sqltest", + "name": "altinityinfra/sqltest", "dependent": [] } } From fd4d4ac4f933a49be1ceace3576f2481fd00aeea Mon Sep 17 00:00:00 2001 From: MyroTk Date: Tue, 14 May 2024 06:09:03 -0700 Subject: [PATCH 12/79] docker image renaming --- tests/ci/integration_test_images.py | 26 +++++++++++++------------- 1 file changed, 13 insertions(+), 13 deletions(-) diff --git a/tests/ci/integration_test_images.py b/tests/ci/integration_test_images.py index 8148ac611817..be760fe853e4 100644 --- a/tests/ci/integration_test_images.py +++ b/tests/ci/integration_test_images.py @@ -1,19 +1,19 @@ #!/usr/bin/env python3 IMAGES_ENV = { - "clickhouse/dotnet-client": "DOCKER_DOTNET_CLIENT_TAG", - "clickhouse/integration-helper": "DOCKER_HELPER_TAG", - "clickhouse/integration-test": "DOCKER_BASE_TAG", - "clickhouse/integration-tests-runner": "", - "clickhouse/kerberized-hadoop": "DOCKER_KERBERIZED_HADOOP_TAG", - "clickhouse/kerberos-kdc": "DOCKER_KERBEROS_KDC_TAG", - "clickhouse/mysql-golang-client": "DOCKER_MYSQL_GOLANG_CLIENT_TAG", - "clickhouse/mysql-java-client": "DOCKER_MYSQL_JAVA_CLIENT_TAG", - "clickhouse/mysql-js-client": "DOCKER_MYSQL_JS_CLIENT_TAG", - "clickhouse/mysql-php-client": "DOCKER_MYSQL_PHP_CLIENT_TAG", - "clickhouse/nginx-dav": "DOCKER_NGINX_DAV_TAG", - "clickhouse/postgresql-java-client": "DOCKER_POSTGRESQL_JAVA_CLIENT_TAG", - "clickhouse/python-bottle": "DOCKER_PYTHON_BOTTLE_TAG", + "altinityinfra/dotnet-client": "DOCKER_DOTNET_CLIENT_TAG", + "altinityinfra/integration-helper": "DOCKER_HELPER_TAG", + "altinityinfra/integration-test": "DOCKER_BASE_TAG", + "altinityinfra/integration-tests-runner": "", + "altinityinfra/kerberized-hadoop": "DOCKER_KERBERIZED_HADOOP_TAG", + "altinityinfra/kerberos-kdc": "DOCKER_KERBEROS_KDC_TAG", + "altinityinfra/mysql-golang-client": "DOCKER_MYSQL_GOLANG_CLIENT_TAG", + "altinityinfra/mysql-java-client": "DOCKER_MYSQL_JAVA_CLIENT_TAG", + "altinityinfra/mysql-js-client": "DOCKER_MYSQL_JS_CLIENT_TAG", + "altinityinfra/mysql-php-client": "DOCKER_MYSQL_PHP_CLIENT_TAG", + "altinityinfra/nginx-dav": "DOCKER_NGINX_DAV_TAG", + "altinityinfra/postgresql-java-client": "DOCKER_POSTGRESQL_JAVA_CLIENT_TAG", + "altinityinfra/python-bottle": "DOCKER_PYTHON_BOTTLE_TAG", } IMAGES = list(IMAGES_ENV.keys()) From 86fd4bf3f2540f1ad1203d73edc039966e0275de Mon Sep 17 00:00:00 2001 From: MyroTk Date: Tue, 14 May 2024 06:54:51 -0700 Subject: [PATCH 13/79] remove version from cctools --- docker/packager/binary-builder/Dockerfile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docker/packager/binary-builder/Dockerfile b/docker/packager/binary-builder/Dockerfile index 44897d572195..faa0dff49e13 100644 --- a/docker/packager/binary-builder/Dockerfile +++ b/docker/packager/binary-builder/Dockerfile @@ -6,7 +6,7 @@ ENV CXX=clang++-${LLVM_VERSION} # If the cctools is updated, then first build it in the CI, then update here in a different commit # TODO: (mtkachenko) change tag here. Decide what tag to use. -COPY --from=altinityinfra/cctools:$FROM_TAG /cctools /cctools +COPY --from=altinityinfra/cctools /cctools /cctools # Rust toolchain and libraries ENV RUSTUP_HOME=/rust/rustup From 1b43ccbc4d5b3d725138087c1c4e2d866246c136 Mon Sep 17 00:00:00 2001 From: MyroTk Date: Tue, 14 May 2024 07:12:00 -0700 Subject: [PATCH 14/79] set binary-builder as cctools dependant temporarily --- docker/images.json | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/docker/images.json b/docker/images.json index 03da98789250..03d11c5c6f48 100644 --- a/docker/images.json +++ b/docker/images.json @@ -5,7 +5,9 @@ }, "docker/packager/cctools": { "name": "altinityinfra/cctools", - "dependent": [] + "dependent": [ + "docker/packager/binary-builder" + ] }, "docker/test/compatibility/centos": { "name": "altinityinfra/test-old-centos", From 9e7baf9d40e9ade1c36a58ac62e049b1a2535751 Mon Sep 17 00:00:00 2001 From: MyroTk Date: Tue, 14 May 2024 07:25:33 -0700 Subject: [PATCH 15/79] build images in correct order --- docker/images.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docker/images.json b/docker/images.json index 03d11c5c6f48..96086047ea11 100644 --- a/docker/images.json +++ b/docker/images.json @@ -73,7 +73,7 @@ "docker/test/fasttest": { "name": "altinityinfra/fasttest", "dependent": [ - "docker/packager/binary-builder" + "docker/packager/cctools" ] }, "docker/test/style": { From 74059ea2bc3d36cca31e35c8d3dd31693716ebad Mon Sep 17 00:00:00 2001 From: MyroTk Date: Tue, 14 May 2024 07:38:05 -0700 Subject: [PATCH 16/79] more docker image renaming --- .github/workflows/release.yml | 4 ++-- .github/workflows/tags_stable.yml | 2 +- docker/packager/cctools/Dockerfile | 4 ++-- docker/test/base/Dockerfile | 2 +- docker/test/compatibility/centos/Dockerfile | 2 +- docker/test/compatibility/ubuntu/Dockerfile | 2 +- docker/test/fuzzer/Dockerfile | 2 +- docker/test/integration/helper_container/Dockerfile | 2 +- docker/test/integration/kerberized_hadoop/Dockerfile | 2 +- docker/test/integration/kerberos_kdc/Dockerfile | 2 +- docker/test/integration/mysql_golang_client/Dockerfile | 2 +- docker/test/integration/mysql_java_client/Dockerfile | 2 +- docker/test/integration/mysql_js_client/Dockerfile | 2 +- docker/test/integration/mysql_php_client/Dockerfile | 2 +- docker/test/integration/postgresql_java_client/Dockerfile | 2 +- docker/test/integration/resolver/Dockerfile | 2 +- docker/test/integration/runner/Dockerfile | 2 +- docker/test/integration/s3_proxy/Dockerfile | 2 +- docker/test/sqlancer/Dockerfile | 2 +- docker/test/style/Dockerfile | 2 +- docker/test/unit/Dockerfile | 2 +- docker/test/upgrade/Dockerfile | 2 +- docker/test/util/Dockerfile | 2 +- tests/ci/ast_fuzzer_check.py | 2 +- tests/ci/ci.py | 2 +- tests/ci/ci_config.py | 4 ++-- tests/ci/clickbench.py | 2 +- tests/ci/compatibility_check.py | 4 ++-- tests/ci/docker_images_check.py | 2 +- tests/ci/docker_manifests_merge.py | 2 +- tests/ci/docker_server.py | 4 ++-- tests/ci/docs_check.py | 2 +- tests/ci/fast_test_check.py | 2 +- tests/ci/functional_test_check.py | 4 ++-- tests/ci/install_check.py | 4 ++-- tests/ci/integration_tests_runner.py | 2 +- tests/ci/jepsen_check.py | 4 ++-- tests/ci/libfuzzer_test_check.py | 2 +- tests/ci/performance_comparison_check.py | 6 +++--- tests/ci/sqlancer_check.py | 2 +- tests/ci/sqllogic_test.py | 2 +- tests/ci/sqltest.py | 2 +- tests/ci/stress_check.py | 2 +- tests/ci/style_check.py | 2 +- tests/ci/unit_tests_check.py | 2 +- tests/ci/upgrade_check.py | 2 +- tests/integration/compose/docker_compose_clickhouse.yml | 2 +- tests/integration/compose/docker_compose_dotnet_client.yml | 2 +- tests/integration/compose/docker_compose_jdbc_bridge.yml | 3 ++- tests/integration/compose/docker_compose_keeper.yml | 6 +++--- .../integration/compose/docker_compose_kerberized_hdfs.yml | 4 ++-- .../integration/compose/docker_compose_kerberized_kafka.yml | 2 +- tests/integration/compose/docker_compose_kerberos_kdc.yml | 2 +- tests/integration/compose/docker_compose_minio.yml | 6 +++--- .../compose/docker_compose_mysql_golang_client.yml | 2 +- .../compose/docker_compose_mysql_java_client.yml | 2 +- .../integration/compose/docker_compose_mysql_js_client.yml | 2 +- .../integration/compose/docker_compose_mysql_php_client.yml | 2 +- tests/integration/compose/docker_compose_nginx.yml | 2 +- .../compose/docker_compose_postgresql_java_client.yml | 2 +- tests/integration/helpers/cluster.py | 6 +++--- tests/integration/helpers/network.py | 2 +- tests/integration/runner | 2 +- 63 files changed, 81 insertions(+), 80 deletions(-) diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 3bd6dfae6cac..b79f2b41985a 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -51,12 +51,12 @@ jobs: fetch-depth: 0 # otherwise we will have no version info filter: tree:0 ref: ${{ env.GITHUB_TAG }} - - name: Check docker clickhouse/clickhouse-server building + - name: Check docker altinityinfra/clickhouse-server building run: | cd "$GITHUB_WORKSPACE/tests/ci" export CHECK_NAME="Docker server image" python3 docker_server.py --release-type auto --version "$GITHUB_TAG" --check-name "$CHECK_NAME" --push - - name: Check docker clickhouse/clickhouse-keeper building + - name: Check docker altinityinfra/clickhouse-keeper building run: | cd "$GITHUB_WORKSPACE/tests/ci" export CHECK_NAME="Docker keeper image" diff --git a/.github/workflows/tags_stable.yml b/.github/workflows/tags_stable.yml index e4fc9f0b1d38..4c55630bd66b 100644 --- a/.github/workflows/tags_stable.yml +++ b/.github/workflows/tags_stable.yml @@ -47,7 +47,7 @@ jobs: ./utils/list-versions/update-docker-version.sh GID=$(id -g "${UID}") docker run -u "${UID}:${GID}" -e PYTHONUNBUFFERED=1 \ - --volume="${GITHUB_WORKSPACE}:/ClickHouse" clickhouse/style-test \ + --volume="${GITHUB_WORKSPACE}:/ClickHouse" altinityinfra/style-test \ /ClickHouse/utils/changelog/changelog.py -v --debug-helpers \ --gh-user-or-token="$GITHUB_TOKEN" --jobs=5 \ --output="/ClickHouse/docs/changelogs/${GITHUB_TAG}.md" "${GITHUB_TAG}" diff --git a/docker/packager/cctools/Dockerfile b/docker/packager/cctools/Dockerfile index 2f6dbba45df4..4ea729f852f7 100644 --- a/docker/packager/cctools/Dockerfile +++ b/docker/packager/cctools/Dockerfile @@ -1,6 +1,6 @@ -# This is a hack to significantly reduce the build time of the clickhouse/binary-builder +# This is a hack to significantly reduce the build time of the altinityinfra/binary-builder # It's based on the assumption that we don't care of the cctools version so much -# It event does not depend on the clickhouse/fasttest in the `docker/images.json` +# It event does not depend on the altinityinfra/fasttest in the `docker/images.json` ARG FROM_TAG=latest FROM altinityinfra/fasttest:$FROM_TAG as builder diff --git a/docker/test/base/Dockerfile b/docker/test/base/Dockerfile index 569db18788fb..261627c3660f 100644 --- a/docker/test/base/Dockerfile +++ b/docker/test/base/Dockerfile @@ -1,5 +1,5 @@ # rebuild in #33610 -# docker build -t clickhouse/test-base . +# docker build -t altinityinfra/test-base . ARG FROM_TAG=latest FROM altinityinfra/test-util:$FROM_TAG diff --git a/docker/test/compatibility/centos/Dockerfile b/docker/test/compatibility/centos/Dockerfile index 628609e374f6..1edb42422b1f 100644 --- a/docker/test/compatibility/centos/Dockerfile +++ b/docker/test/compatibility/centos/Dockerfile @@ -1,4 +1,4 @@ -# docker build -t clickhouse/test-old-centos . +# docker build -t altinityinfra/test-old-centos . FROM centos:5 CMD /bin/sh -c "/clickhouse server --config /config/config.xml > /var/log/clickhouse-server/stderr.log 2>&1 & \ diff --git a/docker/test/compatibility/ubuntu/Dockerfile b/docker/test/compatibility/ubuntu/Dockerfile index ddd0a76bd446..0eb283ff3daf 100644 --- a/docker/test/compatibility/ubuntu/Dockerfile +++ b/docker/test/compatibility/ubuntu/Dockerfile @@ -1,4 +1,4 @@ -# docker build -t clickhouse/test-old-ubuntu . +# docker build -t altinityinfra/test-old-ubuntu . FROM ubuntu:12.04 CMD /bin/sh -c "/clickhouse server --config /config/config.xml > /var/log/clickhouse-server/stderr.log 2>&1 & \ diff --git a/docker/test/fuzzer/Dockerfile b/docker/test/fuzzer/Dockerfile index 898f3c053ffa..05bdb6a95eb8 100644 --- a/docker/test/fuzzer/Dockerfile +++ b/docker/test/fuzzer/Dockerfile @@ -40,5 +40,5 @@ CMD set -o pipefail \ && cd /workspace \ && timeout -s 9 1h /run-fuzzer.sh 2>&1 | ts "$(printf '%%Y-%%m-%%d %%H:%%M:%%S\t')" | tee main.log -# docker run --network=host --volume :/workspace -e PR_TO_TEST=<> -e SHA_TO_TEST=<> clickhouse/fuzzer +# docker run --network=host --volume :/workspace -e PR_TO_TEST=<> -e SHA_TO_TEST=<> altinityinfra/fuzzer diff --git a/docker/test/integration/helper_container/Dockerfile b/docker/test/integration/helper_container/Dockerfile index 49a3d3cd84b8..a453533ce852 100644 --- a/docker/test/integration/helper_container/Dockerfile +++ b/docker/test/integration/helper_container/Dockerfile @@ -1,4 +1,4 @@ -# docker build -t clickhouse/integration-helper . +# docker build -t altinityinfra/integration-helper . # Helper docker container to run iptables without sudo FROM alpine:3.18 diff --git a/docker/test/integration/kerberized_hadoop/Dockerfile b/docker/test/integration/kerberized_hadoop/Dockerfile index 592c3e36ef7f..76cd955ef163 100644 --- a/docker/test/integration/kerberized_hadoop/Dockerfile +++ b/docker/test/integration/kerberized_hadoop/Dockerfile @@ -1,4 +1,4 @@ -# docker build -t clickhouse/kerberized-hadoop . +# docker build -t altinityinfra/kerberized-hadoop . FROM sequenceiq/hadoop-docker:2.7.0 diff --git a/docker/test/integration/kerberos_kdc/Dockerfile b/docker/test/integration/kerberos_kdc/Dockerfile index a203c33a3313..a7f989bf4a56 100644 --- a/docker/test/integration/kerberos_kdc/Dockerfile +++ b/docker/test/integration/kerberos_kdc/Dockerfile @@ -1,4 +1,4 @@ -# docker build -t clickhouse/kerberos-kdc . +# docker build -t altinityinfra/kerberos-kdc . FROM centos:6 RUN sed -i '/^mirrorlist/s/^/#/;/^#baseurl/{s/#//;s/mirror.centos.org\/centos\/$releasever/vault.centos.org\/6.10/}' /etc/yum.repos.d/*B* diff --git a/docker/test/integration/mysql_golang_client/Dockerfile b/docker/test/integration/mysql_golang_client/Dockerfile index 5281f786ae2d..52be68126e47 100644 --- a/docker/test/integration/mysql_golang_client/Dockerfile +++ b/docker/test/integration/mysql_golang_client/Dockerfile @@ -1,4 +1,4 @@ -# docker build -t clickhouse/mysql-golang-client . +# docker build -t altinityinfra/mysql-golang-client . # MySQL golang client docker container FROM golang:1.17 diff --git a/docker/test/integration/mysql_java_client/Dockerfile b/docker/test/integration/mysql_java_client/Dockerfile index 38fefac070e7..5826ee77d501 100644 --- a/docker/test/integration/mysql_java_client/Dockerfile +++ b/docker/test/integration/mysql_java_client/Dockerfile @@ -1,4 +1,4 @@ -# docker build -t clickhouse/mysql-java-client . +# docker build -t altinityinfra/mysql-java-client . # MySQL Java client docker container FROM openjdk:8-jdk-alpine diff --git a/docker/test/integration/mysql_js_client/Dockerfile b/docker/test/integration/mysql_js_client/Dockerfile index 4c9df10ace1c..2b821f243234 100644 --- a/docker/test/integration/mysql_js_client/Dockerfile +++ b/docker/test/integration/mysql_js_client/Dockerfile @@ -1,4 +1,4 @@ -# docker build -t clickhouse/mysql-js-client . +# docker build -t altinityinfra/mysql-js-client . # MySQL JavaScript client docker container FROM node:16.14.2 diff --git a/docker/test/integration/mysql_php_client/Dockerfile b/docker/test/integration/mysql_php_client/Dockerfile index 0e11ae023e63..b060e93f70a3 100644 --- a/docker/test/integration/mysql_php_client/Dockerfile +++ b/docker/test/integration/mysql_php_client/Dockerfile @@ -1,4 +1,4 @@ -# docker build -t clickhouse/mysql-php-client . +# docker build -t altinityinfra/mysql-php-client . # MySQL PHP client docker container FROM php:8-cli-alpine diff --git a/docker/test/integration/postgresql_java_client/Dockerfile b/docker/test/integration/postgresql_java_client/Dockerfile index c5583085ef37..5a7458cc1d2f 100644 --- a/docker/test/integration/postgresql_java_client/Dockerfile +++ b/docker/test/integration/postgresql_java_client/Dockerfile @@ -1,4 +1,4 @@ -# docker build -t clickhouse/postgresql-java-client . +# docker build -t altinityinfra/postgresql-java-client . # PostgreSQL Java client docker container FROM ubuntu:18.04 diff --git a/docker/test/integration/resolver/Dockerfile b/docker/test/integration/resolver/Dockerfile index 01b9b7776142..26bcec16c546 100644 --- a/docker/test/integration/resolver/Dockerfile +++ b/docker/test/integration/resolver/Dockerfile @@ -1,4 +1,4 @@ -# docker build -t clickhouse/python-bottle . +# docker build -t altinityinfra/python-bottle . # Helper docker container to run python bottle apps FROM python:3 diff --git a/docker/test/integration/runner/Dockerfile b/docker/test/integration/runner/Dockerfile index 8297a7100d15..4fad08e7d5b0 100644 --- a/docker/test/integration/runner/Dockerfile +++ b/docker/test/integration/runner/Dockerfile @@ -1,4 +1,4 @@ -# docker build -t clickhouse/integration-tests-runner . +# docker build -t altinityinfra/integration-tests-runner . FROM ubuntu:22.04 # ARG for quick switch to a given ubuntu mirror diff --git a/docker/test/integration/s3_proxy/Dockerfile b/docker/test/integration/s3_proxy/Dockerfile index 5858218e4e4c..df8d8f00f216 100644 --- a/docker/test/integration/s3_proxy/Dockerfile +++ b/docker/test/integration/s3_proxy/Dockerfile @@ -1,4 +1,4 @@ -# docker build -t clickhouse/s3-proxy . +# docker build -t altinityinfra/s3-proxy . FROM nginx:alpine COPY run.sh /run.sh diff --git a/docker/test/sqlancer/Dockerfile b/docker/test/sqlancer/Dockerfile index 82fc25983972..412865be803e 100644 --- a/docker/test/sqlancer/Dockerfile +++ b/docker/test/sqlancer/Dockerfile @@ -1,4 +1,4 @@ -# docker build -t clickhouse/sqlancer-test . +# docker build -t altinityinfra/sqlancer-test . FROM ubuntu:22.04 # ARG for quick switch to a given ubuntu mirror diff --git a/docker/test/style/Dockerfile b/docker/test/style/Dockerfile index 5d53d03606fe..8f594cec11b2 100644 --- a/docker/test/style/Dockerfile +++ b/docker/test/style/Dockerfile @@ -1,4 +1,4 @@ -# docker build -t clickhouse/style-test . +# docker build -t altinityinfra/style-test . FROM ubuntu:22.04 ARG ACT_VERSION=0.2.33 ARG ACTIONLINT_VERSION=1.6.22 diff --git a/docker/test/unit/Dockerfile b/docker/test/unit/Dockerfile index 804441e1d4d7..1106bda38c81 100644 --- a/docker/test/unit/Dockerfile +++ b/docker/test/unit/Dockerfile @@ -1,5 +1,5 @@ # rebuild in #33610 -# docker build -t clickhouse/unit-test . +# docker build -t altinityinfra/unit-test . ARG FROM_TAG=latest FROM altinityinfra/stateless-test:$FROM_TAG diff --git a/docker/test/upgrade/Dockerfile b/docker/test/upgrade/Dockerfile index 22e0486e890b..c66868c2a046 100644 --- a/docker/test/upgrade/Dockerfile +++ b/docker/test/upgrade/Dockerfile @@ -1,5 +1,5 @@ # rebuild in #33610 -# docker build -t clickhouse/upgrade-check . +# docker build -t altinityinfra/upgrade-check . ARG FROM_TAG=latest FROM altinityinfra/stateful-test:$FROM_TAG diff --git a/docker/test/util/Dockerfile b/docker/test/util/Dockerfile index 5446adf37934..91316346069d 100644 --- a/docker/test/util/Dockerfile +++ b/docker/test/util/Dockerfile @@ -1,4 +1,4 @@ -# docker build -t clickhouse/test-util . +# docker build -t altinityinfra/test-util . FROM ubuntu:22.04 # ARG for quick switch to a given ubuntu mirror diff --git a/tests/ci/ast_fuzzer_check.py b/tests/ci/ast_fuzzer_check.py index b88a9476a6d9..1757dc0ec2db 100644 --- a/tests/ci/ast_fuzzer_check.py +++ b/tests/ci/ast_fuzzer_check.py @@ -15,7 +15,7 @@ from stopwatch import Stopwatch from tee_popen import TeePopen -IMAGE_NAME = "clickhouse/fuzzer" +IMAGE_NAME = "altinityinfra/fuzzer" def get_run_command( diff --git a/tests/ci/ci.py b/tests/ci/ci.py index 0f57d3ba9911..bba20f4806dd 100644 --- a/tests/ci/ci.py +++ b/tests/ci/ci.py @@ -2111,7 +2111,7 @@ def main() -> int: job_report.check_name or _get_ext_check_name(args.job_name), ) ch_helper.insert_events_into( - db="default", table="checks", events=prepared_events + db="gh-data", table="checks", events=prepared_events ) else: # no job report diff --git a/tests/ci/ci_config.py b/tests/ci/ci_config.py index 3c6ea5cf0281..9cd7b32feaf7 100644 --- a/tests/ci/ci_config.py +++ b/tests/ci/ci_config.py @@ -397,7 +397,7 @@ class TestConfig: ast_fuzzer_check_digest = DigestConfig( # include_paths=["./tests/ci/ast_fuzzer_check.py"], # exclude_files=[".md"], - # docker=["clickhouse/fuzzer"], + # docker=["altinityinfra/fuzzer"], ) unit_check_digest = DigestConfig( include_paths=["./tests/ci/unit_tests_check.py"], @@ -415,7 +415,7 @@ class TestConfig: sqllancer_check_digest = DigestConfig( # include_paths=["./tests/ci/sqlancer_check.py"], # exclude_files=[".md"], - # docker=["clickhouse/sqlancer-test"], + # docker=["altinityinfra/sqlancer-test"], ) sqllogic_check_digest = DigestConfig( include_paths=["./tests/ci/sqllogic_test.py"], diff --git a/tests/ci/clickbench.py b/tests/ci/clickbench.py index a1988abb1f54..3d995289c530 100644 --- a/tests/ci/clickbench.py +++ b/tests/ci/clickbench.py @@ -22,7 +22,7 @@ def get_image_name() -> str: - return "clickhouse/clickbench" + return "altinityinfra/clickbench" def get_run_command( diff --git a/tests/ci/compatibility_check.py b/tests/ci/compatibility_check.py index e7fee827320d..015a31b5347e 100644 --- a/tests/ci/compatibility_check.py +++ b/tests/ci/compatibility_check.py @@ -16,8 +16,8 @@ from report import FAILURE, SUCCESS, JobReport, TestResult, TestResults from stopwatch import Stopwatch -IMAGE_UBUNTU = "clickhouse/test-old-ubuntu" -IMAGE_CENTOS = "clickhouse/test-old-centos" +IMAGE_UBUNTU = "altinityinfra/test-old-ubuntu" +IMAGE_CENTOS = "altinityinfra/test-old-centos" DOWNLOAD_RETRIES_COUNT = 5 diff --git a/tests/ci/docker_images_check.py b/tests/ci/docker_images_check.py index 786a529e0a98..452fff2b7911 100644 --- a/tests/ci/docker_images_check.py +++ b/tests/ci/docker_images_check.py @@ -273,7 +273,7 @@ def main(): NAME, ) ch_helper = ClickHouseHelper() - ch_helper.insert_events_into(db="default", table="checks", events=prepared_events) + ch_helper.insert_events_into(db="gh-data", table="checks", events=prepared_events) if status == FAILURE: sys.exit(1) diff --git a/tests/ci/docker_manifests_merge.py b/tests/ci/docker_manifests_merge.py index 6c6a88330eab..ec25e4cb9570 100644 --- a/tests/ci/docker_manifests_merge.py +++ b/tests/ci/docker_manifests_merge.py @@ -210,7 +210,7 @@ def main(): NAME, ) ch_helper = ClickHouseHelper() - ch_helper.insert_events_into(db="default", table="checks", events=prepared_events) + ch_helper.insert_events_into(db="gh-data", table="checks", events=prepared_events) if status == FAILURE: sys.exit(1) diff --git a/tests/ci/docker_server.py b/tests/ci/docker_server.py index 230f3e561108..b1a99d40677d 100644 --- a/tests/ci/docker_server.py +++ b/tests/ci/docker_server.py @@ -346,10 +346,10 @@ def main(): assert not args.image_path and not args.image_repo if "server image" in args.check_name: image_path = "docker/server" - image_repo = "clickhouse/clickhouse-server" + image_repo = "altinityinfra/clickhouse-server" elif "keeper image" in args.check_name: image_path = "docker/keeper" - image_repo = "clickhouse/clickhouse-keeper" + image_repo = "altinityinfra/clickhouse-keeper" else: assert False, "Invalid --check-name" else: diff --git a/tests/ci/docs_check.py b/tests/ci/docs_check.py index 6bd4ef496755..b850c28ab2b1 100644 --- a/tests/ci/docs_check.py +++ b/tests/ci/docs_check.py @@ -61,7 +61,7 @@ def main(): elif args.force: logging.info("Check the docs because of force flag") - docker_image = pull_image(get_docker_image("clickhouse/docs-builder")) + docker_image = pull_image(get_docker_image("altinityinfra/docs-builder")) test_output = temp_path / "docs_check_log" test_output.mkdir(parents=True, exist_ok=True) diff --git a/tests/ci/fast_test_check.py b/tests/ci/fast_test_check.py index 383f5b340c78..481359e03a53 100644 --- a/tests/ci/fast_test_check.py +++ b/tests/ci/fast_test_check.py @@ -110,7 +110,7 @@ def main(): pr_info = PRInfo() - docker_image = pull_image(get_docker_image("clickhouse/fasttest")) + docker_image = pull_image(get_docker_image("altinityinfra/fasttest")) workspace = temp_path / "fasttest-workspace" workspace.mkdir(parents=True, exist_ok=True) diff --git a/tests/ci/functional_test_check.py b/tests/ci/functional_test_check.py index a0c977ea25fb..8f1ffb05ac3c 100644 --- a/tests/ci/functional_test_check.py +++ b/tests/ci/functional_test_check.py @@ -50,9 +50,9 @@ def get_additional_envs( def get_image_name(check_name: str) -> str: if "stateless" in check_name.lower(): - return "clickhouse/stateless-test" + return "altinityinfra/stateless-test" if "stateful" in check_name.lower(): - return "clickhouse/stateful-test" + return "altinityinfra/stateful-test" raise ValueError(f"Cannot deduce image name based on check name {check_name}") diff --git a/tests/ci/install_check.py b/tests/ci/install_check.py index 71e2198f5331..9c3455df9812 100644 --- a/tests/ci/install_check.py +++ b/tests/ci/install_check.py @@ -21,8 +21,8 @@ from ci_utils import set_job_timeout -RPM_IMAGE = "clickhouse/install-rpm-test" -DEB_IMAGE = "clickhouse/install-deb-test" +RPM_IMAGE = "altinityinfra/install-rpm-test" +DEB_IMAGE = "altinityinfra/install-deb-test" TEMP_PATH = Path(TEMP) LOGS_PATH = TEMP_PATH / "tests_logs" diff --git a/tests/ci/integration_tests_runner.py b/tests/ci/integration_tests_runner.py index ead30bacc887..79ec911612ab 100755 --- a/tests/ci/integration_tests_runner.py +++ b/tests/ci/integration_tests_runner.py @@ -503,7 +503,7 @@ def _get_runner_image_cmd(self, repo_path): "--docker-image-version", ): for img in IMAGES: - if img == "clickhouse/integration-tests-runner": + if img == "altinityinfra/integration-tests-runner": runner_version = self.get_image_version(img) logging.info( "Can run with custom docker image version %s", runner_version diff --git a/tests/ci/jepsen_check.py b/tests/ci/jepsen_check.py index 011ecff635ea..47f063ee4dc1 100644 --- a/tests/ci/jepsen_check.py +++ b/tests/ci/jepsen_check.py @@ -30,10 +30,10 @@ KEEPER_DESIRED_INSTANCE_COUNT = 3 SERVER_DESIRED_INSTANCE_COUNT = 4 -KEEPER_IMAGE_NAME = "clickhouse/keeper-jepsen-test" +KEEPER_IMAGE_NAME = "altinityinfra/keeper-jepsen-test" KEEPER_CHECK_NAME = "ClickHouse Keeper Jepsen" -SERVER_IMAGE_NAME = "clickhouse/server-jepsen-test" +SERVER_IMAGE_NAME = "altinityinfra/server-jepsen-test" SERVER_CHECK_NAME = "ClickHouse Server Jepsen" diff --git a/tests/ci/libfuzzer_test_check.py b/tests/ci/libfuzzer_test_check.py index 4bb390109784..5c630710f8df 100644 --- a/tests/ci/libfuzzer_test_check.py +++ b/tests/ci/libfuzzer_test_check.py @@ -112,7 +112,7 @@ def main(): run_by_hash_num = 0 run_by_hash_total = 0 - docker_image = pull_image(get_docker_image("clickhouse/libfuzzer")) + docker_image = pull_image(get_docker_image("altinityinfra/libfuzzer")) fuzzers_path = temp_path / "fuzzers" fuzzers_path.mkdir(parents=True, exist_ok=True) diff --git a/tests/ci/performance_comparison_check.py b/tests/ci/performance_comparison_check.py index 0c779b515bdf..685007257cc6 100644 --- a/tests/ci/performance_comparison_check.py +++ b/tests/ci/performance_comparison_check.py @@ -31,7 +31,7 @@ from stopwatch import Stopwatch from tee_popen import TeePopen -IMAGE_NAME = "clickhouse/performance-comparison" +IMAGE_NAME = "altinityinfra/performance-comparison" def get_run_command( @@ -100,7 +100,7 @@ def main(): if pr_info.number == 0: pr_link = commit.html_url else: - pr_link = f"https://github.com/ClickHouse/ClickHouse/pull/{pr_info.number}" + pr_link = f"https://github.com/altinityinfra/altinityinfra/pull/{pr_info.number}" docker_env += ( f' -e CHPC_ADD_REPORT_LINKS="' @@ -188,7 +188,7 @@ def main(): def too_many_slow(msg): match = re.search(r"(|.* )(\d+) slower.*", msg) # This threshold should be synchronized with the value in - # https://github.com/ClickHouse/ClickHouse/blob/master/docker/test/performance-comparison/report.py#L629 + # https://github.com/altinityinfra/altinityinfra/blob/master/docker/test/performance-comparison/report.py#L629 threshold = 5 return int(match.group(2).strip()) > threshold if match else False diff --git a/tests/ci/sqlancer_check.py b/tests/ci/sqlancer_check.py index 9d33c4805985..56cc1005ffc4 100644 --- a/tests/ci/sqlancer_check.py +++ b/tests/ci/sqlancer_check.py @@ -13,7 +13,7 @@ from stopwatch import Stopwatch from tee_popen import TeePopen -IMAGE_NAME = "clickhouse/sqlancer-test" +IMAGE_NAME = "altinityinfra/sqlancer-test" def get_run_command(download_url: str, workspace_path: Path, image: DockerImage) -> str: diff --git a/tests/ci/sqllogic_test.py b/tests/ci/sqllogic_test.py index 6ea6fa19d91d..d9fe614e8e32 100755 --- a/tests/ci/sqllogic_test.py +++ b/tests/ci/sqllogic_test.py @@ -26,7 +26,7 @@ from tee_popen import TeePopen NO_CHANGES_MSG = "Nothing to run" -IMAGE_NAME = "clickhouse/sqllogic-test" +IMAGE_NAME = "altinityinfra/sqllogic-test" def get_run_command( diff --git a/tests/ci/sqltest.py b/tests/ci/sqltest.py index c8c2adbbd564..b893ddb22635 100644 --- a/tests/ci/sqltest.py +++ b/tests/ci/sqltest.py @@ -13,7 +13,7 @@ from report import SUCCESS, JobReport, TestResult from stopwatch import Stopwatch -IMAGE_NAME = "clickhouse/sqltest" +IMAGE_NAME = "altinityinfra/sqltest" def get_run_command(pr_number, sha, download_url, workspace_path, image): diff --git a/tests/ci/stress_check.py b/tests/ci/stress_check.py index 0f2c0cdd222d..6be914dc707b 100644 --- a/tests/ci/stress_check.py +++ b/tests/ci/stress_check.py @@ -194,4 +194,4 @@ def run_stress_test(docker_image_name: str) -> None: if __name__ == "__main__": - run_stress_test("clickhouse/stress-test") + run_stress_test("altinityinfra/stress-test") diff --git a/tests/ci/style_check.py b/tests/ci/style_check.py index 4c78effa98dc..9703e70a32e7 100644 --- a/tests/ci/style_check.py +++ b/tests/ci/style_check.py @@ -154,7 +154,7 @@ def main(): run_shell_check = any(is_shell(file) for file in pr_info.changed_files) run_python_check = any(is_python(file) for file in pr_info.changed_files) - IMAGE_NAME = "clickhouse/style-test" + IMAGE_NAME = "altinityinfra/style-test" image = pull_image(get_docker_image(IMAGE_NAME)) docker_command = ( f"docker run -u $(id -u ${{USER}}):$(id -g ${{USER}}) --cap-add=SYS_PTRACE " diff --git a/tests/ci/unit_tests_check.py b/tests/ci/unit_tests_check.py index b66a4312657b..bc1000709178 100644 --- a/tests/ci/unit_tests_check.py +++ b/tests/ci/unit_tests_check.py @@ -15,7 +15,7 @@ from stopwatch import Stopwatch from tee_popen import TeePopen -IMAGE_NAME = "clickhouse/unit-test" +IMAGE_NAME = "altinityinfra/unit-test" def get_test_name(line): diff --git a/tests/ci/upgrade_check.py b/tests/ci/upgrade_check.py index 83b6f9e299fd..f84451cad81d 100644 --- a/tests/ci/upgrade_check.py +++ b/tests/ci/upgrade_check.py @@ -1,4 +1,4 @@ import stress_check if __name__ == "__main__": - stress_check.run_stress_test("clickhouse/upgrade-check") + stress_check.run_stress_test("altinityinfra/upgrade-check") diff --git a/tests/integration/compose/docker_compose_clickhouse.yml b/tests/integration/compose/docker_compose_clickhouse.yml index fdd124ede91a..ff4523c5b0d7 100644 --- a/tests/integration/compose/docker_compose_clickhouse.yml +++ b/tests/integration/compose/docker_compose_clickhouse.yml @@ -2,4 +2,4 @@ version: '2.3' # Used to pre-pull images with docker-compose services: clickhouse1: - image: clickhouse/integration-test + image: altinityinfra/integration-test diff --git a/tests/integration/compose/docker_compose_dotnet_client.yml b/tests/integration/compose/docker_compose_dotnet_client.yml index b63dac51522c..e5746fa209fb 100644 --- a/tests/integration/compose/docker_compose_dotnet_client.yml +++ b/tests/integration/compose/docker_compose_dotnet_client.yml @@ -1,6 +1,6 @@ version: '2.3' services: dotnet1: - image: clickhouse/dotnet-client:${DOCKER_DOTNET_CLIENT_TAG:-latest} + image: altinityinfra/dotnet-client:${DOCKER_DOTNET_CLIENT_TAG:-latest} # to keep container running command: sleep infinity diff --git a/tests/integration/compose/docker_compose_jdbc_bridge.yml b/tests/integration/compose/docker_compose_jdbc_bridge.yml index b3686adc21c4..3f8edfb88268 100644 --- a/tests/integration/compose/docker_compose_jdbc_bridge.yml +++ b/tests/integration/compose/docker_compose_jdbc_bridge.yml @@ -1,6 +1,7 @@ version: '2.3' services: bridge1: + # NOTE(vnemkov): not produced by CI/CD, so must not be replaced with altinityinfra/jdbc-bridge image: clickhouse/jdbc-bridge command: | /bin/bash -c 'cat << EOF > config/datasources/self.json @@ -24,4 +25,4 @@ services: volumes: - type: ${JDBC_BRIDGE_FS:-tmpfs} source: ${JDBC_BRIDGE_LOGS:-} - target: /app/logs \ No newline at end of file + target: /app/logs diff --git a/tests/integration/compose/docker_compose_keeper.yml b/tests/integration/compose/docker_compose_keeper.yml index 91010c4aa83d..fba5bc728f88 100644 --- a/tests/integration/compose/docker_compose_keeper.yml +++ b/tests/integration/compose/docker_compose_keeper.yml @@ -1,7 +1,7 @@ version: '2.3' services: zoo1: - image: ${image:-clickhouse/integration-test} + image: ${image:-altinityinfra/integration-test} restart: always user: ${user:-} volumes: @@ -37,7 +37,7 @@ services: - inet6 - rotate zoo2: - image: ${image:-clickhouse/integration-test} + image: ${image:-altinityinfra/integration-test} restart: always user: ${user:-} volumes: @@ -73,7 +73,7 @@ services: - inet6 - rotate zoo3: - image: ${image:-clickhouse/integration-test} + image: ${image:-altinityinfra/integration-test} restart: always user: ${user:-} volumes: diff --git a/tests/integration/compose/docker_compose_kerberized_hdfs.yml b/tests/integration/compose/docker_compose_kerberized_hdfs.yml index e955a14eb3df..58d321177c0d 100644 --- a/tests/integration/compose/docker_compose_kerberized_hdfs.yml +++ b/tests/integration/compose/docker_compose_kerberized_hdfs.yml @@ -4,7 +4,7 @@ services: kerberizedhdfs1: cap_add: - DAC_READ_SEARCH - image: clickhouse/kerberized-hadoop:${DOCKER_KERBERIZED_HADOOP_TAG:-latest} + image: altinityinfra/kerberized-hadoop:${DOCKER_KERBERIZED_HADOOP_TAG:-latest} hostname: kerberizedhdfs1 restart: always volumes: @@ -24,7 +24,7 @@ services: net.ipv4.ip_local_port_range: '55000 65535' hdfskerberos: - image: clickhouse/kerberos-kdc:${DOCKER_KERBEROS_KDC_TAG:-latest} + image: altinityinfra/kerberos-kdc:${DOCKER_KERBEROS_KDC_TAG:-latest} hostname: hdfskerberos volumes: - ${KERBERIZED_HDFS_DIR}/secrets:/tmp/keytab diff --git a/tests/integration/compose/docker_compose_kerberized_kafka.yml b/tests/integration/compose/docker_compose_kerberized_kafka.yml index 49d4c1db90fe..7ae1011b1876 100644 --- a/tests/integration/compose/docker_compose_kerberized_kafka.yml +++ b/tests/integration/compose/docker_compose_kerberized_kafka.yml @@ -52,7 +52,7 @@ services: net.ipv4.ip_local_port_range: '55000 65535' kafka_kerberos: - image: clickhouse/kerberos-kdc:${DOCKER_KERBEROS_KDC_TAG:-latest} + image: altinityinfra/kerberos-kdc:${DOCKER_KERBEROS_KDC_TAG:-latest} hostname: kafka_kerberos volumes: - ${KERBERIZED_KAFKA_DIR}/secrets:/tmp/keytab diff --git a/tests/integration/compose/docker_compose_kerberos_kdc.yml b/tests/integration/compose/docker_compose_kerberos_kdc.yml index 3ce9a6df1fb6..062bdace6e9c 100644 --- a/tests/integration/compose/docker_compose_kerberos_kdc.yml +++ b/tests/integration/compose/docker_compose_kerberos_kdc.yml @@ -2,7 +2,7 @@ version: '2.3' services: kerberoskdc: - image: clickhouse/kerberos-kdc:${DOCKER_KERBEROS_KDC_TAG:-latest} + image: altinityinfra/kerberos-kdc:${DOCKER_KERBEROS_KDC_TAG:-latest} hostname: kerberoskdc volumes: - ${KERBEROS_KDC_DIR}/secrets:/tmp/keytab diff --git a/tests/integration/compose/docker_compose_minio.yml b/tests/integration/compose/docker_compose_minio.yml index 4255a529f6d1..53031fb1d94e 100644 --- a/tests/integration/compose/docker_compose_minio.yml +++ b/tests/integration/compose/docker_compose_minio.yml @@ -19,14 +19,14 @@ services: # HTTP proxies for Minio. proxy1: - image: clickhouse/s3-proxy + image: altinityinfra/s3-proxy expose: - "8080" # Redirect proxy port - "80" # Reverse proxy port - "443" # Reverse proxy port (secure) proxy2: - image: clickhouse/s3-proxy + image: altinityinfra/s3-proxy expose: - "8080" - "80" @@ -34,7 +34,7 @@ services: # Empty container to run proxy resolver. resolver: - image: clickhouse/python-bottle:${DOCKER_PYTHON_BOTTLE_TAG:-latest} + image: altinityinfra/python-bottle:${DOCKER_PYTHON_BOTTLE_TAG:-latest} expose: - "8080" tty: true diff --git a/tests/integration/compose/docker_compose_mysql_golang_client.yml b/tests/integration/compose/docker_compose_mysql_golang_client.yml index 56cc04105740..09154b584244 100644 --- a/tests/integration/compose/docker_compose_mysql_golang_client.yml +++ b/tests/integration/compose/docker_compose_mysql_golang_client.yml @@ -1,6 +1,6 @@ version: '2.3' services: golang1: - image: clickhouse/mysql-golang-client:${DOCKER_MYSQL_GOLANG_CLIENT_TAG:-latest} + image: altinityinfra/mysql-golang-client:${DOCKER_MYSQL_GOLANG_CLIENT_TAG:-latest} # to keep container running command: sleep infinity diff --git a/tests/integration/compose/docker_compose_mysql_java_client.yml b/tests/integration/compose/docker_compose_mysql_java_client.yml index 529974dd4bfe..ffda321958cf 100644 --- a/tests/integration/compose/docker_compose_mysql_java_client.yml +++ b/tests/integration/compose/docker_compose_mysql_java_client.yml @@ -1,6 +1,6 @@ version: '2.3' services: java1: - image: clickhouse/mysql-java-client:${DOCKER_MYSQL_JAVA_CLIENT_TAG:-latest} + image: altinityinfra/mysql-java-client:${DOCKER_MYSQL_JAVA_CLIENT_TAG:-latest} # to keep container running command: sleep 1d diff --git a/tests/integration/compose/docker_compose_mysql_js_client.yml b/tests/integration/compose/docker_compose_mysql_js_client.yml index 90939449c5f3..b46eb2706c47 100644 --- a/tests/integration/compose/docker_compose_mysql_js_client.yml +++ b/tests/integration/compose/docker_compose_mysql_js_client.yml @@ -1,6 +1,6 @@ version: '2.3' services: mysqljs1: - image: clickhouse/mysql-js-client:${DOCKER_MYSQL_JS_CLIENT_TAG:-latest} + image: altinityinfra/mysql-js-client:${DOCKER_MYSQL_JS_CLIENT_TAG:-latest} # to keep container running command: sleep infinity diff --git a/tests/integration/compose/docker_compose_mysql_php_client.yml b/tests/integration/compose/docker_compose_mysql_php_client.yml index 408b8ff089a9..662783a00a1f 100644 --- a/tests/integration/compose/docker_compose_mysql_php_client.yml +++ b/tests/integration/compose/docker_compose_mysql_php_client.yml @@ -1,6 +1,6 @@ version: '2.3' services: php1: - image: clickhouse/mysql-php-client:${DOCKER_MYSQL_PHP_CLIENT_TAG:-latest} + image: altinityinfra/mysql-php-client:${DOCKER_MYSQL_PHP_CLIENT_TAG:-latest} # to keep container running command: sleep infinity diff --git a/tests/integration/compose/docker_compose_nginx.yml b/tests/integration/compose/docker_compose_nginx.yml index 38d2a6d84c84..9d4403f283fb 100644 --- a/tests/integration/compose/docker_compose_nginx.yml +++ b/tests/integration/compose/docker_compose_nginx.yml @@ -5,7 +5,7 @@ services: # Files will be put into /usr/share/nginx/files. nginx: - image: clickhouse/nginx-dav:${DOCKER_NGINX_DAV_TAG:-latest} + image: altinityinfra/nginx-dav:${DOCKER_NGINX_DAV_TAG:-latest} restart: always ports: - 80:80 diff --git a/tests/integration/compose/docker_compose_postgresql_java_client.yml b/tests/integration/compose/docker_compose_postgresql_java_client.yml index 904bfffdfd5b..5c8673ae3eeb 100644 --- a/tests/integration/compose/docker_compose_postgresql_java_client.yml +++ b/tests/integration/compose/docker_compose_postgresql_java_client.yml @@ -1,6 +1,6 @@ version: '2.2' services: java: - image: clickhouse/postgresql-java-client:${DOCKER_POSTGRESQL_JAVA_CLIENT_TAG:-latest} + image: altinityinfra/postgresql-java-client:${DOCKER_POSTGRESQL_JAVA_CLIENT_TAG:-latest} # to keep container running command: sleep infinity diff --git a/tests/integration/helpers/cluster.py b/tests/integration/helpers/cluster.py index 1f29dfe8eee8..f65f88bccb84 100644 --- a/tests/integration/helpers/cluster.py +++ b/tests/integration/helpers/cluster.py @@ -994,7 +994,7 @@ def setup_keeper_cmd(self, instance, env_variables, docker_compose_yml_dir): env_variables["keeper_binary"] = binary_path env_variables["keeper_cmd_prefix"] = keeper_cmd_prefix - env_variables["image"] = "clickhouse/integration-test:" + self.docker_base_tag + env_variables["image"] = "altinityinfra/integration-test:" + self.docker_base_tag env_variables["user"] = str(os.getuid()) env_variables["keeper_fs"] = "bind" for i in range(1, 4): @@ -1605,7 +1605,7 @@ def add_instance( hostname=None, env_variables=None, instance_env_variables=False, - image="clickhouse/integration-test", + image="altinityinfra/integration-test", tag=None, stay_alive=False, ipv4_address=None, @@ -3278,7 +3278,7 @@ def __init__( hostname=None, env_variables=None, instance_env_variables=False, - image="clickhouse/integration-test", + image="altinityinfra/integration-test", tag="latest", stay_alive=False, ipv4_address=None, diff --git a/tests/integration/helpers/network.py b/tests/integration/helpers/network.py index e6e79dc79478..3689bb409d15 100644 --- a/tests/integration/helpers/network.py +++ b/tests/integration/helpers/network.py @@ -243,7 +243,7 @@ def __init__( def _ensure_container(self): if self._container is None or self._container_expire_time <= time.time(): - image_name = "clickhouse/integration-helper:" + os.getenv( + image_name = "altinityinfra/integration-helper:" + os.getenv( "DOCKER_HELPER_TAG", "latest" ) for i in range(5): diff --git a/tests/integration/runner b/tests/integration/runner index 97d06c2b78cd..95a498c8dd92 100755 --- a/tests/integration/runner +++ b/tests/integration/runner @@ -29,7 +29,7 @@ CONFIG_DIR_IN_REPO = "programs/server" INTEGRATION_DIR_IN_REPO = "tests/integration" UTILS_DIR_IN_REPO = "utils" -DIND_INTEGRATION_TESTS_IMAGE_NAME = "clickhouse/integration-tests-runner" +DIND_INTEGRATION_TESTS_IMAGE_NAME = "altinityinfra/integration-tests-runner" def check_args_and_update_paths(args): From 1bc0e6ee583d49a472fd01f0e9960a11bf240c6c Mon Sep 17 00:00:00 2001 From: MyroTk Date: Wed, 15 May 2024 00:17:12 -0700 Subject: [PATCH 17/79] temporary workaround to get all docker images built --- .github/workflows/release_branches.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/release_branches.yml b/.github/workflows/release_branches.yml index 639215ae4429..4457a7b2aa31 100644 --- a/.github/workflows/release_branches.yml +++ b/.github/workflows/release_branches.yml @@ -54,7 +54,7 @@ jobs: id: runconfig run: | echo "::group::configure CI run" - python3 "$GITHUB_WORKSPACE/tests/ci/ci.py" --configure --outfile ${{ runner.temp }}/ci_run_data.json + python3 "$GITHUB_WORKSPACE/tests/ci/ci.py" --docker-digest-or-latest --configure --outfile ${{ runner.temp }}/ci_run_data.json echo "::endgroup::" echo "::group::CI run configure results" python3 -m json.tool ${{ runner.temp }}/ci_run_data.json From 5d1ec8ed976b5dfc00e38bf403a456eed4026d89 Mon Sep 17 00:00:00 2001 From: MyroTk Date: Wed, 15 May 2024 03:38:35 -0700 Subject: [PATCH 18/79] remove scaffolding --- .github/workflows/release_branches.yml | 2 +- docker/images.json | 6 ++---- docker/packager/binary-builder/Dockerfile | 4 ++-- docker/test/libfuzzer/Dockerfile | 2 +- docker/test/performance-comparison/Dockerfile | 2 +- 5 files changed, 7 insertions(+), 9 deletions(-) diff --git a/.github/workflows/release_branches.yml b/.github/workflows/release_branches.yml index 4457a7b2aa31..639215ae4429 100644 --- a/.github/workflows/release_branches.yml +++ b/.github/workflows/release_branches.yml @@ -54,7 +54,7 @@ jobs: id: runconfig run: | echo "::group::configure CI run" - python3 "$GITHUB_WORKSPACE/tests/ci/ci.py" --docker-digest-or-latest --configure --outfile ${{ runner.temp }}/ci_run_data.json + python3 "$GITHUB_WORKSPACE/tests/ci/ci.py" --configure --outfile ${{ runner.temp }}/ci_run_data.json echo "::endgroup::" echo "::group::CI run configure results" python3 -m json.tool ${{ runner.temp }}/ci_run_data.json diff --git a/docker/images.json b/docker/images.json index 96086047ea11..03da98789250 100644 --- a/docker/images.json +++ b/docker/images.json @@ -5,9 +5,7 @@ }, "docker/packager/cctools": { "name": "altinityinfra/cctools", - "dependent": [ - "docker/packager/binary-builder" - ] + "dependent": [] }, "docker/test/compatibility/centos": { "name": "altinityinfra/test-old-centos", @@ -73,7 +71,7 @@ "docker/test/fasttest": { "name": "altinityinfra/fasttest", "dependent": [ - "docker/packager/cctools" + "docker/packager/binary-builder" ] }, "docker/test/style": { diff --git a/docker/packager/binary-builder/Dockerfile b/docker/packager/binary-builder/Dockerfile index faa0dff49e13..2315ef37bd23 100644 --- a/docker/packager/binary-builder/Dockerfile +++ b/docker/packager/binary-builder/Dockerfile @@ -5,8 +5,8 @@ ENV CC=clang-${LLVM_VERSION} ENV CXX=clang++-${LLVM_VERSION} # If the cctools is updated, then first build it in the CI, then update here in a different commit -# TODO: (mtkachenko) change tag here. Decide what tag to use. -COPY --from=altinityinfra/cctools /cctools /cctools +# NOTE: a9e509c9bbc0 created from commit 1bc0e6ee583d49a472fd01f0e9960a11bf240c6c +COPY --from=altinityinfra/cctools:a9e509c9bbc0 /cctools /cctools # Rust toolchain and libraries ENV RUSTUP_HOME=/rust/rustup diff --git a/docker/test/libfuzzer/Dockerfile b/docker/test/libfuzzer/Dockerfile index 3581d8605653..09578af226d7 100644 --- a/docker/test/libfuzzer/Dockerfile +++ b/docker/test/libfuzzer/Dockerfile @@ -39,5 +39,5 @@ SHELL ["/bin/bash", "-c"] CMD set -o pipefail \ && timeout -s 9 1h /run_libfuzzer.py 2>&1 | ts "$(printf '%%Y-%%m-%%d %%H:%%M:%%S\t')" | tee main.log -# docker run --network=host --volume :/workspace -e PR_TO_TEST=<> -e SHA_TO_TEST=<> clickhouse/libfuzzer +# docker run --network=host --volume :/workspace -e PR_TO_TEST=<> -e SHA_TO_TEST=<> altinityinfra/libfuzzer diff --git a/docker/test/performance-comparison/Dockerfile b/docker/test/performance-comparison/Dockerfile index a27e16587ff0..aa31bd7b3ba4 100644 --- a/docker/test/performance-comparison/Dockerfile +++ b/docker/test/performance-comparison/Dockerfile @@ -43,4 +43,4 @@ COPY run.sh / CMD ["bash", "/run.sh"] -# docker run --network=host --volume :/workspace --volume=:/output -e PR_TO_TEST=<> -e SHA_TO_TEST=<> clickhouse/performance-comparison +# docker run --network=host --volume :/workspace --volume=:/output -e PR_TO_TEST=<> -e SHA_TO_TEST=<> altinityinfra/performance-comparison From 9c2d698912c5a57ea04d6da91b57619ee76e7565 Mon Sep 17 00:00:00 2001 From: MyroTk Date: Thu, 16 May 2024 00:24:41 -0700 Subject: [PATCH 19/79] update versioning scripts --- cmake/version.cmake | 2 +- tests/ci/version_helper.py | 36 ++++++++++++++++++++++++++++-------- utils/clickhouse-docker | 6 +++--- 3 files changed, 32 insertions(+), 12 deletions(-) diff --git a/cmake/version.cmake b/cmake/version.cmake index 9ca21556f4d4..06fb783b88f2 100644 --- a/cmake/version.cmake +++ b/cmake/version.cmake @@ -19,5 +19,5 @@ set (VERSION_STRING_SHORT "${VERSION_MAJOR}.${VERSION_MINOR}") math (EXPR VERSION_INTEGER "${VERSION_PATCH} + ${VERSION_MINOR}*1000 + ${VERSION_MAJOR}*1000000") if(CLICKHOUSE_OFFICIAL_BUILD) - set(VERSION_OFFICIAL " (official build)") + set(VERSION_OFFICIAL " (altinity build)") endif() diff --git a/tests/ci/version_helper.py b/tests/ci/version_helper.py index 30b0c2d96be2..d609fb408402 100755 --- a/tests/ci/version_helper.py +++ b/tests/ci/version_helper.py @@ -48,6 +48,7 @@ def __init__( revision: Union[int, str], git: Optional[Git], tweak: Optional[str] = None, + flavour: Optional[str] = None, ): self._major = int(major) self._minor = int(minor) @@ -135,9 +136,12 @@ def description(self) -> str: @property def string(self): - return ".".join( + version_as_string = ".".join( (str(self.major), str(self.minor), str(self.patch), str(self.tweak)) ) + if self._flavour: + version_as_string = f"{version_as_string}.{self._flavour}" + return version_as_string @property def is_lts(self) -> bool: @@ -163,7 +167,10 @@ def with_description(self, version_type): if version_type not in VersionType.VALID: raise ValueError(f"version type {version_type} not in {VersionType.VALID}") self._description = version_type - self._describe = f"v{self.string}-{version_type}" + if version_type == self._flavour: + self._describe = f"v{self.string}" + else: + self._describe = f"v{self.string}-{version_type}" def __eq__(self, other: Any) -> bool: if not isinstance(self, type(other)): @@ -206,16 +213,17 @@ def __repr__(self): class VersionType: LTS = "lts" PRESTABLE = "prestable" - STABLE = "stable" + STABLE = "altinitystable" TESTING = "testing" VALID = (TESTING, PRESTABLE, STABLE, LTS) def validate_version(version: str) -> None: + # NOTE(vnemkov): minor but imporant fixes, so versions with 'flavour' are threated as valid (e.g. 22.8.8.4.altinitystable) parts = version.split(".") - if len(parts) != 4: + if len(parts) < 4: raise ValueError(f"{version} does not contain 4 parts") - for part in parts: + for part in parts[:4]: int(part) @@ -255,6 +263,9 @@ def get_version_from_repo( versions["patch"], versions["revision"], git, + # Explicitly use tweak value from version file + tweak=versions.get("tweak", versions["revision"]), + flavour=versions.get("flavour", None) ) @@ -262,8 +273,17 @@ def get_version_from_string( version: str, git: Optional[Git] = None ) -> ClickHouseVersion: validate_version(version) - parts = version.split(".") - return ClickHouseVersion(parts[0], parts[1], parts[2], -1, git, parts[3]) + # dict for simple handling of missing parts with parts.get(index, default) + parts = dict(enumerate(version.split("."))) + return ClickHouseVersion( + parts[0], + parts[1], + parts[2], + -1, + git, + parts.get(3, None), + parts.get(4, None) + ) def get_version_from_tag(tag: str) -> ClickHouseVersion: @@ -371,7 +391,7 @@ def update_contributors( cfd.write(content) -def update_version_local(version, version_type="testing"): +def update_version_local(version : ClickHouseVersion, version_type="testing"): update_contributors() version.with_description(version_type) update_cmake_version(version) diff --git a/utils/clickhouse-docker b/utils/clickhouse-docker index cfe515f1de54..62d980f2f407 100755 --- a/utils/clickhouse-docker +++ b/utils/clickhouse-docker @@ -24,13 +24,13 @@ param="$1" if [ "${param}" = "list" ] then # https://stackoverflow.com/a/39454426/1555175 - wget -nv https://registry.hub.docker.com/v1/repositories/clickhouse/clickhouse-server/tags -O - | sed -e 's/[][]//g' -e 's/"//g' -e 's/ //g' | tr '}' '\n' | awk -F: '{print $3}' + wget -nv https://registry.hub.docker.com/v1/repositories/altinity/clickhouse-server/tags -O - | sed -e 's/[][]//g' -e 's/"//g' -e 's/ //g' | tr '}' '\n' | awk -F: '{print $3}' else - docker pull clickhouse/clickhouse-server:${param} + docker pull altinity/clickhouse-server:${param} tmp_dir=$(mktemp -d -t ci-XXXXXXXXXX) # older version require /nonexistent folder to exist to run clickhouse client :D chmod 777 ${tmp_dir} set -e - containerid=`docker run -v${tmp_dir}:/nonexistent -d clickhouse/clickhouse-server:${param}` + containerid=`docker run -v${tmp_dir}:/nonexistent -d altinity/clickhouse-server:${param}` set +e while : do From 34be467d4fdde1928248cdb46d028a0357b753e1 Mon Sep 17 00:00:00 2001 From: MyroTk Date: Thu, 16 May 2024 00:37:01 -0700 Subject: [PATCH 20/79] patch version --- tests/ci/version_helper.py | 1 + 1 file changed, 1 insertion(+) diff --git a/tests/ci/version_helper.py b/tests/ci/version_helper.py index d609fb408402..7e377fa65813 100755 --- a/tests/ci/version_helper.py +++ b/tests/ci/version_helper.py @@ -62,6 +62,7 @@ def __init__( self._tweak = self._git.tweak self._describe = "" self._description = "" + self._flavour = flavour def update(self, part: PART_TYPE) -> "ClickHouseVersion": """If part is valid, returns a new version""" From 9b1895fe26700eeb35017b0638b79c9073522514 Mon Sep 17 00:00:00 2001 From: MyroTk Date: Thu, 16 May 2024 01:13:11 -0700 Subject: [PATCH 21/79] update version unit test --- tests/ci/test_version.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/ci/test_version.py b/tests/ci/test_version.py index 978edcc093ec..fa113f407c3b 100644 --- a/tests/ci/test_version.py +++ b/tests/ci/test_version.py @@ -13,7 +13,7 @@ def test_version_arg(self): ("1.1.1.2", vh.get_version_from_string("1.1.1.2")), ("v11.1.1.2-lts", vh.get_version_from_string("11.1.1.2")), ("v01.1.1.2-prestable", vh.get_version_from_string("1.1.1.2")), - ("v21.1.1.2-stable", vh.get_version_from_string("21.1.1.2")), + ("v21.1.1.2-altinitystable", vh.get_version_from_string("21.1.1.2")), ("v31.1.1.2-testing", vh.get_version_from_string("31.1.1.2")), ("refs/tags/v31.1.1.2-testing", vh.get_version_from_string("31.1.1.2")), ) From 5a82d9bb290a63cd0e8eeb9b4fe94d1cd4eef32c Mon Sep 17 00:00:00 2001 From: MyroTk Date: Thu, 16 May 2024 01:59:36 -0700 Subject: [PATCH 22/79] fix version unit test --- tests/ci/git_helper.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/ci/git_helper.py b/tests/ci/git_helper.py index f15f1273bb95..ef6144e25260 100644 --- a/tests/ci/git_helper.py +++ b/tests/ci/git_helper.py @@ -12,7 +12,7 @@ # \A and \Z match only start and end of the whole string RELEASE_BRANCH_REGEXP = r"\A\d+[.]\d+\Z" TAG_REGEXP = ( - r"\Av\d{2}[.][1-9]\d*[.][1-9]\d*[.][1-9]\d*-(testing|prestable|stable|lts)\Z" + r"\Av\d{2}[.][1-9]\d*[.][1-9]\d*[.][1-9]\d*-(testing|prestable|altinitystable|lts)\Z" ) SHA_REGEXP = re.compile(r"\A([0-9]|[a-f]){40}\Z") From 29843d4fccb47732bf7ef106e0a1e532a5939700 Mon Sep 17 00:00:00 2001 From: MyroTk Date: Thu, 16 May 2024 02:11:08 -0700 Subject: [PATCH 23/79] update test_git.py to use altinitystable --- tests/ci/test_git.py | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/tests/ci/test_git.py b/tests/ci/test_git.py index 3aedd8a8dea1..e8a2d0ee876c 100644 --- a/tests/ci/test_git.py +++ b/tests/ci/test_git.py @@ -46,7 +46,7 @@ def setUp(self): self.git.run("test") self.run_mock.assert_called_once() self.git.new_branch = "NEW_BRANCH_NAME" - self.git.new_tag = "v21.12.333.22222-stable" + self.git.new_tag = "v21.12.333.22222-altinitystable" self.git.branch = "old_branch" self.git.sha = "" self.git.sha_short = "" @@ -55,16 +55,16 @@ def setUp(self): self.git.commits_since_tag = 0 def test_tags(self): - self.git.new_tag = "v21.12.333.22222-stable" - self.git.latest_tag = "v21.12.333.22222-stable" + self.git.new_tag = "v21.12.333.22222-altinitystable" + self.git.latest_tag = "v21.12.333.22222-altinitystable" for tag_attr in ("new_tag", "latest_tag"): - self.assertEqual(getattr(self.git, tag_attr), "v21.12.333.22222-stable") + self.assertEqual(getattr(self.git, tag_attr), "v21.12.333.22222-altinitystable") setattr(self.git, tag_attr, "") self.assertEqual(getattr(self.git, tag_attr), "") for tag in ( - "v21.12.333-stable", + "v21.12.333-altinitystable", "v21.12.333-prestable", - "21.12.333.22222-stable", + "21.12.333.22222-altinitystable", "v21.12.333.22222-production", ): with self.assertRaises(Exception): From 3e61b4431fb7eac73a6109f8ad8d535318dd8693 Mon Sep 17 00:00:00 2001 From: MyroTk Date: Thu, 16 May 2024 02:57:52 -0700 Subject: [PATCH 24/79] disable unit tests temporarily --- .github/workflows/release_branches.yml | 18 +++++++++--------- 1 file changed, 9 insertions(+), 9 deletions(-) diff --git a/.github/workflows/release_branches.yml b/.github/workflows/release_branches.yml index 639215ae4429..994eb788a6ac 100644 --- a/.github/workflows/release_branches.yml +++ b/.github/workflows/release_branches.yml @@ -41,15 +41,15 @@ jobs: # run: | # cd "$GITHUB_WORKSPACE/tests/ci" # python3 run_check.py - - name: Python unit tests - run: | - cd "$GITHUB_WORKSPACE/tests/ci" - echo "Testing the main ci directory" - python3 -m unittest discover -s . -p 'test_*.py' - for dir in *_lambda/; do - echo "Testing $dir" - python3 -m unittest discover -s "$dir" -p 'test_*.py' - done + # - name: Python unit tests + # run: | + # cd "$GITHUB_WORKSPACE/tests/ci" + # echo "Testing the main ci directory" + # python3 -m unittest discover -s . -p 'test_*.py' + # for dir in *_lambda/; do + # echo "Testing $dir" + # python3 -m unittest discover -s "$dir" -p 'test_*.py' + # done - name: PrepareRunConfig id: runconfig run: | From 6811330960e61ea5e2427cbbd650c02067c7ced1 Mon Sep 17 00:00:00 2001 From: MyroTk Date: Thu, 16 May 2024 03:26:24 -0700 Subject: [PATCH 25/79] git regex fix --- .github/workflows/release_branches.yml | 18 +++++++++--------- tests/ci/git_helper.py | 4 +++- 2 files changed, 12 insertions(+), 10 deletions(-) diff --git a/.github/workflows/release_branches.yml b/.github/workflows/release_branches.yml index 994eb788a6ac..639215ae4429 100644 --- a/.github/workflows/release_branches.yml +++ b/.github/workflows/release_branches.yml @@ -41,15 +41,15 @@ jobs: # run: | # cd "$GITHUB_WORKSPACE/tests/ci" # python3 run_check.py - # - name: Python unit tests - # run: | - # cd "$GITHUB_WORKSPACE/tests/ci" - # echo "Testing the main ci directory" - # python3 -m unittest discover -s . -p 'test_*.py' - # for dir in *_lambda/; do - # echo "Testing $dir" - # python3 -m unittest discover -s "$dir" -p 'test_*.py' - # done + - name: Python unit tests + run: | + cd "$GITHUB_WORKSPACE/tests/ci" + echo "Testing the main ci directory" + python3 -m unittest discover -s . -p 'test_*.py' + for dir in *_lambda/; do + echo "Testing $dir" + python3 -m unittest discover -s "$dir" -p 'test_*.py' + done - name: PrepareRunConfig id: runconfig run: | diff --git a/tests/ci/git_helper.py b/tests/ci/git_helper.py index ef6144e25260..cfa5a1fb3ab2 100644 --- a/tests/ci/git_helper.py +++ b/tests/ci/git_helper.py @@ -10,9 +10,11 @@ # ^ and $ match subline in `multiple\nlines` # \A and \Z match only start and end of the whole string +# NOTE (vnemkov): support both upstream tag style: v22.x.y.z-lts and Altinity tag style: v22.x.y.z.altinitystable +# Because at early release stages there could be no Altinity tag set on commit, only upstream one. RELEASE_BRANCH_REGEXP = r"\A\d+[.]\d+\Z" TAG_REGEXP = ( - r"\Av\d{2}[.][1-9]\d*[.][1-9]\d*[.][1-9]\d*-(testing|prestable|altinitystable|lts)\Z" + r"\Av\d{2}[.][1-9]\d*[.][1-9]\d*[.][1-9]\d*-(testing|prestable|stable|lts|altinitystable)\Z" ) SHA_REGEXP = re.compile(r"\A([0-9]|[a-f]){40}\Z") From 10f5323ba18155039de50ca9e77ac33c83e9fbdb Mon Sep 17 00:00:00 2001 From: MyroTk Date: Thu, 16 May 2024 06:53:54 -0700 Subject: [PATCH 26/79] pin dependencies --- docker/test/integration/runner/Dockerfile | 134 +++++++++++----------- docker/test/stateless/Dockerfile | 76 ++++++------ 2 files changed, 106 insertions(+), 104 deletions(-) diff --git a/docker/test/integration/runner/Dockerfile b/docker/test/integration/runner/Dockerfile index 4fad08e7d5b0..967f94c1603a 100644 --- a/docker/test/integration/runner/Dockerfile +++ b/docker/test/integration/runner/Dockerfile @@ -8,35 +8,35 @@ RUN sed -i "s|http://archive.ubuntu.com|$apt_archive|g" /etc/apt/sources.list RUN apt-get update \ && env DEBIAN_FRONTEND=noninteractive apt-get install --yes \ - adduser \ - ca-certificates \ - bash \ - btrfs-progs \ - e2fsprogs \ - iptables \ - xfsprogs \ - tar \ - pigz \ - wget \ - git \ - iproute2 \ - cgroupfs-mount \ - python3-pip \ + adduser='3.11*' \ + ca-certificates='2023*' \ + bash='5.1-*' \ + btrfs-progs='5.16.*' \ + e2fsprogs='1.46.*' \ + iptables='1.8.*' \ + xfsprogs='5.13.*' \ + tar='1.34*' \ + pigz='2.6*' \ + wget='1.21.*' \ + git='1:2.34.*' \ + iproute2='5.15.*' \ + cgroupfs-mount=1.4 \ + python3-pip='22.0.*' \ tzdata \ - libicu-dev \ - bsdutils \ - curl \ - python3-pika \ + libicu-dev='70.1*' \ + bsdutils='1:2.37.*' \ + curl='7.81.*' \ + python3-pika='1.2.*' \ liblua5.1-dev \ - luajit \ - libssl-dev \ - libcurl4-openssl-dev \ - gdb \ - default-jdk \ - software-properties-common \ - libkrb5-dev \ - krb5-user \ - g++ \ + luajit='2.1.*' \ + libssl-dev='3.0.*' \ + libcurl4-openssl-dev='7.81.*' \ + gdb='12.1-*' \ + default-jdk='2:1.11-*' \ + software-properties-common='0.99.*' \ + libkrb5-dev='1.19.*' \ + krb5-user='1.19.*' \ + g++='4:11.2.*' \ && apt-get clean \ && rm -rf /var/lib/apt/lists/* /var/cache/debconf /tmp/* @@ -62,46 +62,46 @@ RUN curl -fsSL https://download.docker.com/linux/ubuntu/gpg | apt-key add - \ # kazoo 2.10.0 is broken # https://s3.amazonaws.com/clickhouse-test-reports/59337/524625a1d2f4cc608a3f1059e3df2c30f353a649/integration_tests__asan__analyzer__[5_6].html RUN python3 -m pip install --no-cache-dir \ - PyMySQL==1.1.0 \ - asyncio==3.4.3 \ - avro==1.10.2 \ - azure-storage-blob==12.19.0 \ - boto3==1.34.24 \ - cassandra-driver==3.29.0 \ - confluent-kafka==2.3.0 \ - delta-spark==2.3.0 \ - dict2xml==1.7.4 \ - dicttoxml==1.7.16 \ - docker==6.1.3 \ - docker-compose==1.29.2 \ - grpcio==1.60.0 \ - grpcio-tools==1.60.0 \ - kafka-python==2.0.2 \ - lz4==4.3.3 \ - minio==7.2.3 \ - nats-py==2.6.0 \ - protobuf==4.25.2 \ - kazoo==2.9.0 \ - psycopg2-binary==2.9.6 \ - pyhdfs==0.3.1 \ - pymongo==3.11.0 \ - pyspark==3.3.2 \ - pytest==7.4.4 \ - pytest-order==1.0.0 \ - pytest-random==0.2 \ - pytest-repeat==0.9.3 \ - pytest-timeout==2.2.0 \ - pytest-xdist==3.5.0 \ - pytest-reportlog==0.4.0 \ - pytz==2023.3.post1 \ - pyyaml==5.3.1 \ - redis==5.0.1 \ - requests-kerberos==0.14.0 \ - tzlocal==2.1 \ - retry==0.9.2 \ - bs4==0.0.2 \ - lxml==5.1.0 \ - urllib3==2.0.7 + PyMySQL~=1.1.0 \ + aerospike~=11.1.0 \ + asyncio~=3.4.3\ + avro~=1.10.2 \ + azure-storage-blob~=12.19.0\ + cassandra-driver~=3.28.0\ + confluent-kafka~=1.9.2 \ + delta-spark~=2.3.0 \ + dict2xml~=1.7.3 \ + dicttoxml~=1.7.16 \ + docker~=6.1.3 \ + docker-compose~=1.29.2 \ + grpcio~=1.59.3 \ + grpcio-tools~=1.59.3 \ + kafka-python~=2.0.2 \ + kazoo~=2.9.0 \ + lz4~=4.3.2 \ + meilisearch~=0.18.3 \ + minio~=7.2.0 \ + nats-py~=2.6.0 \ + protobuf~=4.25.1 \ + psycopg2-binary~=2.9.6 \ + pyhdfs~=0.3.1 \ + pymongo~=3.11.0 \ + pyspark~=3.3.2 \ + pytest~=7.4.3 \ + pytest-order~=1.0.0 \ + pytest-random~=0.2 \ + pytest-repeat~=0.9.3 \ + pytest-timeout~=2.2.0 \ + pytest-xdist~=3.5.0 \ + pytz~=2023.3.post1 \ + pyyaml~=5.3.1 \ + redis~=5.0.1 \ + requests-kerberos \ + tzlocal~=2.1 \ + retry~=0.9.2 \ + bs4~=0.0.1 \ + lxml~=4.9.3 \ + urllib3~=2.1.0 # bs4, lxml are for cloud tests, do not delete # Hudi supports only spark 3.3.*, not 3.4 diff --git a/docker/test/stateless/Dockerfile b/docker/test/stateless/Dockerfile index aabf4d02faa5..333962f375ec 100644 --- a/docker/test/stateless/Dockerfile +++ b/docker/test/stateless/Dockerfile @@ -9,44 +9,46 @@ ARG odbc_driver_url="https://github.com/ClickHouse/clickhouse-odbc/releases/down RUN apt-get update -y \ && env DEBIAN_FRONTEND=noninteractive \ apt-get install --yes --no-install-recommends \ - awscli \ - brotli \ - lz4 \ - expect \ - golang \ - lsof \ - mysql-client=8.0* \ - ncdu \ - netcat-openbsd \ - nodejs \ - npm \ - odbcinst \ - openjdk-11-jre-headless \ - openssl \ - postgresql-client \ - protobuf-compiler \ - python3 \ - python3-lxml \ - python3-pip \ - python3-requests \ - python3-termcolor \ - qemu-user-static \ - sqlite3 \ - sudo \ - tree \ - unixodbc \ - rustc \ - cargo \ - zstd \ - file \ - jq \ - pv \ - zip \ - p7zip-full \ + awscli='1.22.*' \ + brotli='1.0.*' \ + lz4='1.9.*' \ + expect='5.45.*' \ + golang='2:1.18~*' \ + lsof='4.93.*' \ + mysql-client='8.0*' \ + ncdu='1.15.*' \ + netcat-openbsd='1.218-*' \ + nodejs='12.22.*' \ + npm='8.5.*' \ + odbcinst='2.3.*' \ + openjdk-11-jre-headless='11.0.*' \ + openssl='3.0.*' \ + postgresql-client='14+*' \ + protobuf-compiler='3.12.*' \ + python3='3.10.*' \ + python3-lxml='4.8.*' \ + python3-pip='22.0.*' \ + python3-requests='2.25.*' \ + python3-termcolor='1.1.*' \ + qemu-user-static='1:6.2*' \ + sqlite3='3.37.*' \ + sudo='1.9.*' \ + tree='2.0.*' \ + unixodbc='2.3.*' \ + wget='1.21.*' \ + rustc='1.*' \ + cargo='1.*' \ + zstd='1.4.*' \ + file='1:5.41-*' \ + pv='1.6.*' \ + zip='3.0-*' \ + p7zip-full='16.02*' \ + rpm2cpio='4.17.*' \ + cpio='2.13*' \ && apt-get clean \ && rm -rf /var/lib/apt/lists/* /var/cache/debconf /tmp/* -RUN pip3 install numpy==1.26.3 scipy==1.12.0 pandas==1.5.3 Jinja2==3.1.3 pyarrow==15.0.0 +RUN pip3 install numpy~=1.26.3 scipy~=1.12.0 pandas~=1.5.3 Jinja2~=3.1.3 pyarrow~=15.0.0 RUN mkdir -p /tmp/clickhouse-odbc-tmp \ && cd /tmp/clickhouse-odbc-tmp \ @@ -84,8 +86,8 @@ ENV MINIO_ROOT_USER="clickhouse" ENV MINIO_ROOT_PASSWORD="clickhouse" ENV EXPORT_S3_STORAGE_POLICIES=1 -RUN npm install -g azurite \ - && npm install -g tslib +RUN npm install -g azurite@3.28.0 \ + && npm install -g tslib@2.6.2 COPY run.sh / COPY setup_minio.sh / From 6d87d9b8f8a5a72c2a80e685f905574e1178671a Mon Sep 17 00:00:00 2001 From: MyroTk Date: Thu, 16 May 2024 06:54:17 -0700 Subject: [PATCH 27/79] use altinity-robot --- tests/ci/git_helper.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/tests/ci/git_helper.py b/tests/ci/git_helper.py index cfa5a1fb3ab2..9e771b2296f7 100644 --- a/tests/ci/git_helper.py +++ b/tests/ci/git_helper.py @@ -21,9 +21,9 @@ CWD = p.dirname(p.realpath(__file__)) TWEAK = 1 -GIT_PREFIX = ( # All commits to remote are done as robot-clickhouse - "git -c user.email=robot-clickhouse@users.noreply.github.com " - "-c user.name=robot-clickhouse -c commit.gpgsign=false " +GIT_PREFIX = ( # All commits to remote are done as altinity-robot + "git -c user.email=altinity-robot@users.noreply.github.com " + "-c user.name=altinity-robot -c commit.gpgsign=false " "-c core.sshCommand=" "'ssh -o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no'" ) From b4e0baba2ad3c7cebc8e4909770ee56d57234844 Mon Sep 17 00:00:00 2001 From: MyroTk Date: Thu, 16 May 2024 08:44:52 -0700 Subject: [PATCH 28/79] version --- cmake/autogenerated_versions.txt | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/cmake/autogenerated_versions.txt b/cmake/autogenerated_versions.txt index 1768df3f83fd..cd8dc3a37dde 100644 --- a/cmake/autogenerated_versions.txt +++ b/cmake/autogenerated_versions.txt @@ -7,6 +7,9 @@ SET(VERSION_MAJOR 24) SET(VERSION_MINOR 3) SET(VERSION_PATCH 3) SET(VERSION_GITHASH 8b7d910960cc2c6a0db07991fe2576a67fe98146) -SET(VERSION_DESCRIBE v24.3.3.1-lts) -SET(VERSION_STRING 24.3.3.1) +SET(VERSION_TWEAK 103) +SET(VERSION_FLAVOUR altinitystable) + +SET(VERSION_DESCRIBE v24.3.3.103.altinitystable) +SET(VERSION_STRING 24.3.3.103.altinitystable) # end of autochange From 01bf6a49e71b2a9b90bb7d496b680d42faa2ec04 Mon Sep 17 00:00:00 2001 From: MyroTk Date: Thu, 16 May 2024 08:45:43 -0700 Subject: [PATCH 29/79] use altinity images for integration tests --- tests/integration/helpers/cluster.py | 2 +- tests/integration/test_backward_compatibility/test.py | 2 +- .../test_backward_compatibility/test_aggregate_fixed_key.py | 2 +- .../test_aggregate_function_state.py | 4 ++-- .../test_backward_compatibility/test_convert_ordinary.py | 2 +- .../test_backward_compatibility/test_cte_distributed.py | 2 +- .../test_backward_compatibility/test_functions.py | 2 +- .../test_insert_profile_events.py | 2 +- .../test_ip_types_binary_compatibility.py | 2 +- .../test_memory_bound_aggregation.py | 4 ++-- .../test_normalized_count_comparison.py | 2 +- .../test_select_aggregate_alias_column.py | 2 +- .../test_short_strings_aggregation.py | 4 ++-- .../test_vertical_merges_from_compact_parts.py | 2 +- tests/integration/test_disk_over_web_server/test.py | 2 +- .../test_distributed_insert_backward_compatibility/test.py | 2 +- tests/integration/test_old_versions/test.py | 2 +- tests/integration/test_polymorphic_parts/test.py | 2 +- .../test_replicated_merge_tree_compatibility/test.py | 4 ++-- tests/integration/test_replicating_constants/test.py | 4 ++-- tests/integration/test_storage_s3_queue/test.py | 4 ++-- tests/integration/test_ttl_replicated/test.py | 6 +++--- tests/integration/test_version_update/test.py | 2 +- .../integration/test_version_update_after_mutation/test.py | 6 +++--- 24 files changed, 34 insertions(+), 34 deletions(-) diff --git a/tests/integration/helpers/cluster.py b/tests/integration/helpers/cluster.py index f65f88bccb84..bd1edfec25b5 100644 --- a/tests/integration/helpers/cluster.py +++ b/tests/integration/helpers/cluster.py @@ -73,7 +73,7 @@ # Minimum version we use in integration tests to check compatibility with old releases # Keep in mind that we only support upgrading between releases that are at most 1 year different. # This means that this minimum need to be, at least, 1 year older than the current release -CLICKHOUSE_CI_MIN_TESTED_VERSION = "22.8" +CLICKHOUSE_CI_MIN_TESTED_VERSION = "22.8.13.21.altinitystable" # to create docker-compose env file diff --git a/tests/integration/test_backward_compatibility/test.py b/tests/integration/test_backward_compatibility/test.py index 7de5f51921b7..953ae47c109d 100644 --- a/tests/integration/test_backward_compatibility/test.py +++ b/tests/integration/test_backward_compatibility/test.py @@ -6,7 +6,7 @@ node1 = cluster.add_instance( "node1", with_zookeeper=True, - image="clickhouse/clickhouse-server", + image="altinity/clickhouse-server", tag=CLICKHOUSE_CI_MIN_TESTED_VERSION, stay_alive=True, with_installed_binary=True, diff --git a/tests/integration/test_backward_compatibility/test_aggregate_fixed_key.py b/tests/integration/test_backward_compatibility/test_aggregate_fixed_key.py index 6b385bf84020..d794d53a8401 100644 --- a/tests/integration/test_backward_compatibility/test_aggregate_fixed_key.py +++ b/tests/integration/test_backward_compatibility/test_aggregate_fixed_key.py @@ -6,7 +6,7 @@ node1 = cluster.add_instance( "node1", with_zookeeper=True, - image="clickhouse/clickhouse-server", + image="altinity/clickhouse-server", tag=CLICKHOUSE_CI_MIN_TESTED_VERSION, with_installed_binary=True, ) diff --git a/tests/integration/test_backward_compatibility/test_aggregate_function_state.py b/tests/integration/test_backward_compatibility/test_aggregate_function_state.py index 9878c1ed70ea..7775bbcb6153 100644 --- a/tests/integration/test_backward_compatibility/test_aggregate_function_state.py +++ b/tests/integration/test_backward_compatibility/test_aggregate_function_state.py @@ -6,7 +6,7 @@ node1 = cluster.add_instance( "node1", with_zookeeper=False, - image="clickhouse/clickhouse-server", + image="altinity/clickhouse-server", tag=CLICKHOUSE_CI_MIN_TESTED_VERSION, stay_alive=True, with_installed_binary=True, @@ -14,7 +14,7 @@ node2 = cluster.add_instance( "node2", with_zookeeper=False, - image="clickhouse/clickhouse-server", + image="altinity/clickhouse-server", tag=CLICKHOUSE_CI_MIN_TESTED_VERSION, stay_alive=True, with_installed_binary=True, diff --git a/tests/integration/test_backward_compatibility/test_convert_ordinary.py b/tests/integration/test_backward_compatibility/test_convert_ordinary.py index b8db4e005a46..6ef6bdd6a1e0 100644 --- a/tests/integration/test_backward_compatibility/test_convert_ordinary.py +++ b/tests/integration/test_backward_compatibility/test_convert_ordinary.py @@ -4,7 +4,7 @@ cluster = ClickHouseCluster(__file__) node = cluster.add_instance( "node", - image="clickhouse/clickhouse-server", + image="altinity/clickhouse-server", tag=CLICKHOUSE_CI_MIN_TESTED_VERSION, stay_alive=True, with_zookeeper=True, diff --git a/tests/integration/test_backward_compatibility/test_cte_distributed.py b/tests/integration/test_backward_compatibility/test_cte_distributed.py index e0be009e8744..73330ac04732 100644 --- a/tests/integration/test_backward_compatibility/test_cte_distributed.py +++ b/tests/integration/test_backward_compatibility/test_cte_distributed.py @@ -7,7 +7,7 @@ node2 = cluster.add_instance( "node2", with_zookeeper=False, - image="clickhouse/clickhouse-server", + image="altinity/clickhouse-server", tag=CLICKHOUSE_CI_MIN_TESTED_VERSION, stay_alive=True, with_installed_binary=True, diff --git a/tests/integration/test_backward_compatibility/test_functions.py b/tests/integration/test_backward_compatibility/test_functions.py index 1cf5c3deb81d..e057e12b84cb 100644 --- a/tests/integration/test_backward_compatibility/test_functions.py +++ b/tests/integration/test_backward_compatibility/test_functions.py @@ -12,7 +12,7 @@ upstream = cluster.add_instance("upstream", use_old_analyzer=True) backward = cluster.add_instance( "backward", - image="clickhouse/clickhouse-server", + image="altinity/clickhouse-server", tag=CLICKHOUSE_CI_MIN_TESTED_VERSION, with_installed_binary=True, ) diff --git a/tests/integration/test_backward_compatibility/test_insert_profile_events.py b/tests/integration/test_backward_compatibility/test_insert_profile_events.py index a90453d045be..d3955f5cf3c0 100644 --- a/tests/integration/test_backward_compatibility/test_insert_profile_events.py +++ b/tests/integration/test_backward_compatibility/test_insert_profile_events.py @@ -10,7 +10,7 @@ upstream_node = cluster.add_instance("upstream_node", use_old_analyzer=True) old_node = cluster.add_instance( "old_node", - image="clickhouse/clickhouse-server", + image="altinity/clickhouse-server", tag=CLICKHOUSE_CI_MIN_TESTED_VERSION, with_installed_binary=True, ) diff --git a/tests/integration/test_backward_compatibility/test_ip_types_binary_compatibility.py b/tests/integration/test_backward_compatibility/test_ip_types_binary_compatibility.py index 4752a589a44f..0b4178451986 100644 --- a/tests/integration/test_backward_compatibility/test_ip_types_binary_compatibility.py +++ b/tests/integration/test_backward_compatibility/test_ip_types_binary_compatibility.py @@ -6,7 +6,7 @@ # Version 21.6.3.14 has incompatible partition id for tables with UUID in partition key. node = cluster.add_instance( "node", - image="clickhouse/clickhouse-server", + image="altinity/clickhouse-server", tag=CLICKHOUSE_CI_MIN_TESTED_VERSION, stay_alive=True, with_installed_binary=True, diff --git a/tests/integration/test_backward_compatibility/test_memory_bound_aggregation.py b/tests/integration/test_backward_compatibility/test_memory_bound_aggregation.py index b13e6c975e80..c090b0f91be7 100644 --- a/tests/integration/test_backward_compatibility/test_memory_bound_aggregation.py +++ b/tests/integration/test_backward_compatibility/test_memory_bound_aggregation.py @@ -6,7 +6,7 @@ node1 = cluster.add_instance( "node1", with_zookeeper=False, - image="clickhouse/clickhouse-server", + image="altinity/clickhouse-server", tag=CLICKHOUSE_CI_MIN_TESTED_VERSION, stay_alive=True, with_installed_binary=True, @@ -14,7 +14,7 @@ node2 = cluster.add_instance( "node2", with_zookeeper=False, - image="clickhouse/clickhouse-server", + image="altinity/clickhouse-server", tag=CLICKHOUSE_CI_MIN_TESTED_VERSION, stay_alive=True, with_installed_binary=True, diff --git a/tests/integration/test_backward_compatibility/test_normalized_count_comparison.py b/tests/integration/test_backward_compatibility/test_normalized_count_comparison.py index 83be0e4c5a34..de8fcb3a940f 100644 --- a/tests/integration/test_backward_compatibility/test_normalized_count_comparison.py +++ b/tests/integration/test_backward_compatibility/test_normalized_count_comparison.py @@ -7,7 +7,7 @@ node2 = cluster.add_instance( "node2", with_zookeeper=False, - image="clickhouse/clickhouse-server", + image="altinity/clickhouse-server", tag=CLICKHOUSE_CI_MIN_TESTED_VERSION, stay_alive=True, with_installed_binary=True, diff --git a/tests/integration/test_backward_compatibility/test_select_aggregate_alias_column.py b/tests/integration/test_backward_compatibility/test_select_aggregate_alias_column.py index cbe147dc07bd..ba5d797982a3 100644 --- a/tests/integration/test_backward_compatibility/test_select_aggregate_alias_column.py +++ b/tests/integration/test_backward_compatibility/test_select_aggregate_alias_column.py @@ -7,7 +7,7 @@ node2 = cluster.add_instance( "node2", with_zookeeper=False, - image="clickhouse/clickhouse-server", + image="altinity/clickhouse-server", tag=CLICKHOUSE_CI_MIN_TESTED_VERSION, stay_alive=True, with_installed_binary=True, diff --git a/tests/integration/test_backward_compatibility/test_short_strings_aggregation.py b/tests/integration/test_backward_compatibility/test_short_strings_aggregation.py index 603751963660..4609aa2a9c85 100644 --- a/tests/integration/test_backward_compatibility/test_short_strings_aggregation.py +++ b/tests/integration/test_backward_compatibility/test_short_strings_aggregation.py @@ -6,7 +6,7 @@ node1 = cluster.add_instance( "node1", with_zookeeper=False, - image="clickhouse/clickhouse-server", + image="altinity/clickhouse-server", tag=CLICKHOUSE_CI_MIN_TESTED_VERSION, stay_alive=True, with_installed_binary=True, @@ -14,7 +14,7 @@ node2 = cluster.add_instance( "node2", with_zookeeper=False, - image="clickhouse/clickhouse-server", + image="altinity/clickhouse-server", tag=CLICKHOUSE_CI_MIN_TESTED_VERSION, stay_alive=True, with_installed_binary=True, diff --git a/tests/integration/test_backward_compatibility/test_vertical_merges_from_compact_parts.py b/tests/integration/test_backward_compatibility/test_vertical_merges_from_compact_parts.py index e36c3310e4ad..9401091821d8 100644 --- a/tests/integration/test_backward_compatibility/test_vertical_merges_from_compact_parts.py +++ b/tests/integration/test_backward_compatibility/test_vertical_merges_from_compact_parts.py @@ -6,7 +6,7 @@ node_old = cluster.add_instance( "node1", - image="clickhouse/clickhouse-server", + image="altinity/clickhouse-server", tag=CLICKHOUSE_CI_MIN_TESTED_VERSION, stay_alive=True, with_installed_binary=True, diff --git a/tests/integration/test_disk_over_web_server/test.py b/tests/integration/test_disk_over_web_server/test.py index 3d349129909c..a5f02276c00c 100644 --- a/tests/integration/test_disk_over_web_server/test.py +++ b/tests/integration/test_disk_over_web_server/test.py @@ -37,7 +37,7 @@ def cluster(): with_nginx=True, stay_alive=True, with_installed_binary=True, - image="clickhouse/clickhouse-server", + image="altinity/clickhouse-server", tag=CLICKHOUSE_CI_MIN_TESTED_VERSION, ) diff --git a/tests/integration/test_distributed_insert_backward_compatibility/test.py b/tests/integration/test_distributed_insert_backward_compatibility/test.py index 9e794555d49d..ed786eccc117 100644 --- a/tests/integration/test_distributed_insert_backward_compatibility/test.py +++ b/tests/integration/test_distributed_insert_backward_compatibility/test.py @@ -10,7 +10,7 @@ node_dist = cluster.add_instance( "node2", main_configs=["configs/remote_servers.xml"], - image="clickhouse/clickhouse-server", + image="altinity/clickhouse-server", tag=CLICKHOUSE_CI_MIN_TESTED_VERSION, stay_alive=True, with_installed_binary=True, diff --git a/tests/integration/test_old_versions/test.py b/tests/integration/test_old_versions/test.py index a5e62a380bfd..39ea7a78e815 100644 --- a/tests/integration/test_old_versions/test.py +++ b/tests/integration/test_old_versions/test.py @@ -6,7 +6,7 @@ cluster = ClickHouseCluster(__file__) node_oldest = cluster.add_instance( "node_oldest", - image="clickhouse/clickhouse-server", + image="altinity/clickhouse-server", tag=CLICKHOUSE_CI_MIN_TESTED_VERSION, with_installed_binary=True, main_configs=["configs/config.d/test_cluster.xml"], diff --git a/tests/integration/test_polymorphic_parts/test.py b/tests/integration/test_polymorphic_parts/test.py index 2b30170b203e..c3b8b14560c4 100644 --- a/tests/integration/test_polymorphic_parts/test.py +++ b/tests/integration/test_polymorphic_parts/test.py @@ -365,7 +365,7 @@ def test_different_part_types_on_replicas(start_cluster, table, part_type): "node7", user_configs=["configs_old/users.d/not_optimize_count.xml"], with_zookeeper=True, - image="clickhouse/clickhouse-server", + image="altinity/clickhouse-server", tag=CLICKHOUSE_CI_MIN_TESTED_VERSION, stay_alive=True, with_installed_binary=True, diff --git a/tests/integration/test_replicated_merge_tree_compatibility/test.py b/tests/integration/test_replicated_merge_tree_compatibility/test.py index a70f3234c1eb..22de74d073e4 100644 --- a/tests/integration/test_replicated_merge_tree_compatibility/test.py +++ b/tests/integration/test_replicated_merge_tree_compatibility/test.py @@ -5,7 +5,7 @@ node1 = cluster.add_instance( "node1", with_zookeeper=True, - image="clickhouse/clickhouse-server", + image="altinity/clickhouse-server", tag=CLICKHOUSE_CI_MIN_TESTED_VERSION, stay_alive=True, with_installed_binary=True, @@ -13,7 +13,7 @@ node2 = cluster.add_instance( "node2", with_zookeeper=True, - image="clickhouse/clickhouse-server", + image="altinity/clickhouse-server", tag=CLICKHOUSE_CI_MIN_TESTED_VERSION, stay_alive=True, with_installed_binary=True, diff --git a/tests/integration/test_replicating_constants/test.py b/tests/integration/test_replicating_constants/test.py index af8916dd625b..8da83038d146 100644 --- a/tests/integration/test_replicating_constants/test.py +++ b/tests/integration/test_replicating_constants/test.py @@ -8,8 +8,8 @@ node2 = cluster.add_instance( "node2", with_zookeeper=True, - image="clickhouse/clickhouse-server", - tag="23.3", + image="altinity/clickhouse-server", + tag="23.3.19.33.altinitystable", with_installed_binary=True, ) diff --git a/tests/integration/test_storage_s3_queue/test.py b/tests/integration/test_storage_s3_queue/test.py index c7893c3e643d..abad97ba8cf2 100644 --- a/tests/integration/test_storage_s3_queue/test.py +++ b/tests/integration/test_storage_s3_queue/test.py @@ -104,8 +104,8 @@ def started_cluster(): cluster.add_instance( "old_instance", with_zookeeper=True, - image="clickhouse/clickhouse-server", - tag="23.12", + image="altinity/clickhouse-server", + tag="23.8.11.29.altinitystable", #TODO: (mtkachenko) verify this substitution is ok. Originally 23.12 stay_alive=True, with_installed_binary=True, use_old_analyzer=True, diff --git a/tests/integration/test_ttl_replicated/test.py b/tests/integration/test_ttl_replicated/test.py index f944adbea418..777422ef4960 100644 --- a/tests/integration/test_ttl_replicated/test.py +++ b/tests/integration/test_ttl_replicated/test.py @@ -16,7 +16,7 @@ node4 = cluster.add_instance( "node4", with_zookeeper=True, - image="clickhouse/clickhouse-server", + image="altinity/clickhouse-server", tag=CLICKHOUSE_CI_MIN_TESTED_VERSION, stay_alive=True, with_installed_binary=True, @@ -28,7 +28,7 @@ node5 = cluster.add_instance( "node5", with_zookeeper=True, - image="clickhouse/clickhouse-server", + image="altinity/clickhouse-server", tag=CLICKHOUSE_CI_MIN_TESTED_VERSION, stay_alive=True, with_installed_binary=True, @@ -39,7 +39,7 @@ node6 = cluster.add_instance( "node6", with_zookeeper=True, - image="clickhouse/clickhouse-server", + image="altinity/clickhouse-server", tag=CLICKHOUSE_CI_MIN_TESTED_VERSION, stay_alive=True, with_installed_binary=True, diff --git a/tests/integration/test_version_update/test.py b/tests/integration/test_version_update/test.py index b386a79c932f..f42d791d1bda 100644 --- a/tests/integration/test_version_update/test.py +++ b/tests/integration/test_version_update/test.py @@ -10,7 +10,7 @@ node2 = cluster.add_instance( "node2", with_zookeeper=True, - image="clickhouse/clickhouse-server", + image="altinity/clickhouse-server", tag=CLICKHOUSE_CI_MIN_TESTED_VERSION, with_installed_binary=True, stay_alive=True, diff --git a/tests/integration/test_version_update_after_mutation/test.py b/tests/integration/test_version_update_after_mutation/test.py index 9365498f89d4..73a01a51ce74 100644 --- a/tests/integration/test_version_update_after_mutation/test.py +++ b/tests/integration/test_version_update_after_mutation/test.py @@ -9,7 +9,7 @@ node1 = cluster.add_instance( "node1", with_zookeeper=True, - image="clickhouse/clickhouse-server", + image="altinity/clickhouse-server", tag=CLICKHOUSE_CI_MIN_TESTED_VERSION, with_installed_binary=True, stay_alive=True, @@ -20,7 +20,7 @@ node2 = cluster.add_instance( "node2", with_zookeeper=True, - image="clickhouse/clickhouse-server", + image="altinity/clickhouse-server", tag=CLICKHOUSE_CI_MIN_TESTED_VERSION, with_installed_binary=True, stay_alive=True, @@ -31,7 +31,7 @@ node3 = cluster.add_instance( "node3", with_zookeeper=True, - image="clickhouse/clickhouse-server", + image="altinity/clickhouse-server", tag=CLICKHOUSE_CI_MIN_TESTED_VERSION, with_installed_binary=True, stay_alive=True, From b153c4f6cb2722c40d4e552d9d84186186a4ae3d Mon Sep 17 00:00:00 2001 From: MyroTk Date: Thu, 16 May 2024 09:26:57 -0700 Subject: [PATCH 30/79] make integration amd only --- docker/images.json | 3 +++ 1 file changed, 3 insertions(+) diff --git a/docker/images.json b/docker/images.json index 03da98789250..ef3c8309f763 100644 --- a/docker/images.json +++ b/docker/images.json @@ -16,6 +16,7 @@ "dependent": [] }, "docker/test/integration/base": { + "only_amd64": true, "name": "altinityinfra/integration-test", "dependent": [] }, @@ -65,6 +66,7 @@ "dependent": [] }, "docker/test/integration/runner": { + "only_amd64": true, "name": "altinityinfra/integration-tests-runner", "dependent": [] }, @@ -87,6 +89,7 @@ "dependent": [] }, "docker/test/integration/helper_container": { + "only_amd64": true, "name": "altinityinfra/integration-helper", "dependent": [] }, From 0ca87c88fd50d412cef2f1ed78b5453ea1c8822b Mon Sep 17 00:00:00 2001 From: MyroTk Date: Wed, 29 May 2024 21:58:29 -0700 Subject: [PATCH 31/79] update runner images --- .github/workflows/release_branches.yml | 74 +++++++++++++------------- .github/workflows/reusable_build.yml | 2 +- 2 files changed, 38 insertions(+), 38 deletions(-) diff --git a/.github/workflows/release_branches.yml b/.github/workflows/release_branches.yml index 639215ae4429..019dbd5bd99f 100644 --- a/.github/workflows/release_branches.yml +++ b/.github/workflows/release_branches.yml @@ -27,7 +27,7 @@ on: # yamllint disable-line rule:truthy jobs: RunConfig: - runs-on: [self-hosted, altinity-on-demand, altinity-type-cax41, altinity-in-hel1, altinity-image-arm-app-docker-ce] + runs-on: [self-hosted, altinity-on-demand, altinity-type-cax41, altinity-in-hel1, altinity-image-arm-system-ubuntu-22.04] outputs: data: ${{ steps.runconfig.outputs.CI_DATA }} steps: @@ -81,7 +81,7 @@ jobs: secrets: inherit with: test_name: Compatibility check (amd64) - runner_type: altinity-on-demand, altinity-type-cpx51, altinity-in-ash, altinity-image-x86-app-docker-ce + runner_type: altinity-on-demand, altinity-type-cpx51, altinity-in-ash, altinity-image-x86-system-ubuntu-22.04 data: ${{ needs.RunConfig.outputs.data }} CompatibilityCheckAarch64: needs: [RunConfig, BuilderDebAarch64] @@ -90,7 +90,7 @@ jobs: secrets: inherit with: test_name: Compatibility check (aarch64) - runner_type: altinity-on-demand, altinity-type-cax41, altinity-in-hel1, altinity-image-arm-app-docker-ce + runner_type: altinity-on-demand, altinity-type-cax41, altinity-in-hel1, altinity-image-arm-system-ubuntu-22.04 data: ${{ needs.RunConfig.outputs.data }} ######################################################################################### #################################### ORDINARY BUILDS #################################### @@ -189,7 +189,7 @@ jobs: secrets: inherit with: test_name: Docker server image - runner_type: altinity-on-demand, altinity-type-cpx41, altinity-in-ash, altinity-image-x86-app-docker-ce + runner_type: altinity-on-demand, altinity-type-cpx41, altinity-in-ash, altinity-image-x86-system-ubuntu-22.04 data: ${{ needs.RunConfig.outputs.data }} DockerKeeperImage: needs: [RunConfig, BuilderDebRelease, BuilderDebAarch64] @@ -198,7 +198,7 @@ jobs: secrets: inherit with: test_name: Docker keeper image - runner_type: altinity-on-demand, altinity-type-cpx41, altinity-in-ash, altinity-image-x86-app-docker-ce + runner_type: altinity-on-demand, altinity-type-cpx41, altinity-in-ash, altinity-image-x86-system-ubuntu-22.04 data: ${{ needs.RunConfig.outputs.data }} ############################################################################################ ##################################### BUILD REPORTER ####################################### @@ -219,7 +219,7 @@ jobs: secrets: inherit with: test_name: ClickHouse build check - runner_type: altinity-on-demand, altinity-type-cax11, altinity-in-hel1, altinity-image-arm-app-docker-ce + runner_type: altinity-on-demand, altinity-type-cax11, altinity-in-hel1, altinity-image-arm-system-ubuntu-22.04 data: ${{ needs.RunConfig.outputs.data }} BuilderSpecialReport: # run report check for failed builds to indicate the CI error @@ -232,7 +232,7 @@ jobs: secrets: inherit with: test_name: ClickHouse special build check - runner_type: altinity-on-demand, altinity-type-cax11, altinity-in-hel1, altinity-image-arm-app-docker-ce + runner_type: altinity-on-demand, altinity-type-cax11, altinity-in-hel1, altinity-image-arm-system-ubuntu-22.04 data: ${{ needs.RunConfig.outputs.data }} MarkReleaseReady: if: ${{ !failure() && !cancelled() }} @@ -241,7 +241,7 @@ jobs: - BuilderBinDarwinAarch64 - BuilderDebRelease - BuilderDebAarch64 - runs-on: [self-hosted, altinity-on-demand, altinity-type-cax11, altinity-in-hel1, altinity-image-arm-app-docker-ce] + runs-on: [self-hosted, altinity-on-demand, altinity-type-cax11, altinity-in-hel1, altinity-image-arm-system-ubuntu-22.04] steps: - name: Debug run: | @@ -275,7 +275,7 @@ jobs: secrets: inherit with: test_name: Install packages (amd64) - runner_type: altinity-on-demand, altinity-type-cpx51, altinity-in-ash, altinity-image-x86-app-docker-ce + runner_type: altinity-on-demand, altinity-type-cpx51, altinity-in-ash, altinity-image-x86-system-ubuntu-22.04 data: ${{ needs.RunConfig.outputs.data }} run_command: | python3 install_check.py "$CHECK_NAME" @@ -286,7 +286,7 @@ jobs: secrets: inherit with: test_name: Install packages (arm64) - runner_type: altinity-on-demand, altinity-type-cax41, altinity-in-hel1, altinity-image-arm-app-docker-ce + runner_type: altinity-on-demand, altinity-type-cax41, altinity-in-hel1, altinity-image-arm-system-ubuntu-22.04 data: ${{ needs.RunConfig.outputs.data }} run_command: | python3 install_check.py "$CHECK_NAME" @@ -300,7 +300,7 @@ jobs: secrets: inherit with: test_name: Stateless tests (release) - runner_type: altinity-on-demand, altinity-type-cpx51, altinity-in-ash, altinity-image-x86-app-docker-ce + runner_type: altinity-on-demand, altinity-type-cpx51, altinity-in-ash, altinity-image-x86-system-ubuntu-22.04 data: ${{ needs.RunConfig.outputs.data }} FunctionalStatelessTestAarch64: needs: [RunConfig, BuilderDebAarch64] @@ -309,7 +309,7 @@ jobs: secrets: inherit with: test_name: Stateless tests (aarch64) - runner_type: altinity-on-demand, altinity-type-cax41, altinity-in-hel1, altinity-image-arm-app-docker-ce + runner_type: altinity-on-demand, altinity-type-cax41, altinity-in-hel1, altinity-image-arm-system-ubuntu-22.04 data: ${{ needs.RunConfig.outputs.data }} FunctionalStatelessTestAsan: needs: [RunConfig, BuilderDebAsan] @@ -318,7 +318,7 @@ jobs: secrets: inherit with: test_name: Stateless tests (asan) - runner_type: altinity-on-demand, altinity-type-cpx51, altinity-in-ash, altinity-image-x86-app-docker-ce + runner_type: altinity-on-demand, altinity-type-cpx51, altinity-in-ash, altinity-image-x86-system-ubuntu-22.04 data: ${{ needs.RunConfig.outputs.data }} FunctionalStatelessTestTsan: needs: [RunConfig, BuilderDebTsan] @@ -327,7 +327,7 @@ jobs: secrets: inherit with: test_name: Stateless tests (tsan) - runner_type: altinity-on-demand, altinity-type-cpx51, altinity-in-ash, altinity-image-x86-app-docker-ce + runner_type: altinity-on-demand, altinity-type-cpx51, altinity-in-ash, altinity-image-x86-system-ubuntu-22.04 data: ${{ needs.RunConfig.outputs.data }} FunctionalStatelessTestMsan: needs: [RunConfig, BuilderDebMsan] @@ -336,7 +336,7 @@ jobs: secrets: inherit with: test_name: Stateless tests (msan) - runner_type: altinity-on-demand, altinity-type-cpx51, altinity-in-ash, altinity-image-x86-app-docker-ce + runner_type: altinity-on-demand, altinity-type-cpx51, altinity-in-ash, altinity-image-x86-system-ubuntu-22.04 data: ${{ needs.RunConfig.outputs.data }} FunctionalStatelessTestUBsan: needs: [RunConfig, BuilderDebUBsan] @@ -345,7 +345,7 @@ jobs: secrets: inherit with: test_name: Stateless tests (ubsan) - runner_type: altinity-on-demand, altinity-type-cpx51, altinity-in-ash, altinity-image-x86-app-docker-ce + runner_type: altinity-on-demand, altinity-type-cpx51, altinity-in-ash, altinity-image-x86-system-ubuntu-22.04 data: ${{ needs.RunConfig.outputs.data }} FunctionalStatelessTestDebug: needs: [RunConfig, BuilderDebDebug] @@ -354,7 +354,7 @@ jobs: secrets: inherit with: test_name: Stateless tests (debug) - runner_type: altinity-on-demand, altinity-type-cpx51, altinity-in-ash, altinity-image-x86-app-docker-ce + runner_type: altinity-on-demand, altinity-type-cpx51, altinity-in-ash, altinity-image-x86-system-ubuntu-22.04 data: ${{ needs.RunConfig.outputs.data }} ############################################################################################## ############################ FUNCTIONAl STATEFUL TESTS ####################################### @@ -366,7 +366,7 @@ jobs: secrets: inherit with: test_name: Stateful tests (release) - runner_type: altinity-on-demand, altinity-type-cpx51, altinity-in-ash, altinity-image-x86-app-docker-ce + runner_type: altinity-on-demand, altinity-type-cpx51, altinity-in-ash, altinity-image-x86-system-ubuntu-22.04 data: ${{ needs.RunConfig.outputs.data }} FunctionalStatefulTestAarch64: needs: [RunConfig, BuilderDebAarch64] @@ -375,7 +375,7 @@ jobs: secrets: inherit with: test_name: Stateful tests (aarch64) - runner_type: altinity-on-demand, altinity-type-cax41, altinity-in-hel1, altinity-image-arm-app-docker-ce + runner_type: altinity-on-demand, altinity-type-cax41, altinity-in-hel1, altinity-image-arm-system-ubuntu-22.04 data: ${{ needs.RunConfig.outputs.data }} FunctionalStatefulTestAsan: needs: [RunConfig, BuilderDebAsan] @@ -384,7 +384,7 @@ jobs: secrets: inherit with: test_name: Stateful tests (asan) - runner_type: altinity-on-demand, altinity-type-cpx51, altinity-in-ash, altinity-image-x86-app-docker-ce + runner_type: altinity-on-demand, altinity-type-cpx51, altinity-in-ash, altinity-image-x86-system-ubuntu-22.04 data: ${{ needs.RunConfig.outputs.data }} FunctionalStatefulTestTsan: needs: [RunConfig, BuilderDebTsan] @@ -393,7 +393,7 @@ jobs: secrets: inherit with: test_name: Stateful tests (tsan) - runner_type: altinity-on-demand, altinity-type-cpx51, altinity-in-ash, altinity-image-x86-app-docker-ce + runner_type: altinity-on-demand, altinity-type-cpx51, altinity-in-ash, altinity-image-x86-system-ubuntu-22.04 data: ${{ needs.RunConfig.outputs.data }} FunctionalStatefulTestMsan: needs: [RunConfig, BuilderDebMsan] @@ -402,7 +402,7 @@ jobs: secrets: inherit with: test_name: Stateful tests (msan) - runner_type: altinity-on-demand, altinity-type-cpx51, altinity-in-ash, altinity-image-x86-app-docker-ce + runner_type: altinity-on-demand, altinity-type-cpx51, altinity-in-ash, altinity-image-x86-system-ubuntu-22.04 data: ${{ needs.RunConfig.outputs.data }} FunctionalStatefulTestUBsan: needs: [RunConfig, BuilderDebUBsan] @@ -411,7 +411,7 @@ jobs: secrets: inherit with: test_name: Stateful tests (ubsan) - runner_type: altinity-on-demand, altinity-type-cpx51, altinity-in-ash, altinity-image-x86-app-docker-ce + runner_type: altinity-on-demand, altinity-type-cpx51, altinity-in-ash, altinity-image-x86-system-ubuntu-22.04 data: ${{ needs.RunConfig.outputs.data }} FunctionalStatefulTestDebug: needs: [RunConfig, BuilderDebDebug] @@ -420,7 +420,7 @@ jobs: secrets: inherit with: test_name: Stateful tests (debug) - runner_type: altinity-on-demand, altinity-type-cpx51, altinity-in-ash, altinity-image-x86-app-docker-ce + runner_type: altinity-on-demand, altinity-type-cpx51, altinity-in-ash, altinity-image-x86-system-ubuntu-22.04 data: ${{ needs.RunConfig.outputs.data }} ############################################################################################## ######################################### STRESS TESTS ####################################### @@ -432,7 +432,7 @@ jobs: secrets: inherit with: test_name: Stress test (asan) - runner_type: altinity-on-demand, altinity-type-cpx51, altinity-in-ash, altinity-image-x86-app-docker-ce + runner_type: altinity-on-demand, altinity-type-cpx51, altinity-in-ash, altinity-image-x86-system-ubuntu-22.04 data: ${{ needs.RunConfig.outputs.data }} StressTestTsan: needs: [RunConfig, BuilderDebTsan] @@ -441,7 +441,7 @@ jobs: secrets: inherit with: test_name: Stress test (tsan) - runner_type: altinity-on-demand, altinity-type-cpx51, altinity-in-ash, altinity-image-x86-app-docker-ce + runner_type: altinity-on-demand, altinity-type-cpx51, altinity-in-ash, altinity-image-x86-system-ubuntu-22.04 data: ${{ needs.RunConfig.outputs.data }} StressTestMsan: needs: [RunConfig, BuilderDebMsan] @@ -450,7 +450,7 @@ jobs: secrets: inherit with: test_name: Stress test (msan) - runner_type: altinity-on-demand, altinity-type-cpx51, altinity-in-ash, altinity-image-x86-app-docker-ce + runner_type: altinity-on-demand, altinity-type-cpx51, altinity-in-ash, altinity-image-x86-system-ubuntu-22.04 data: ${{ needs.RunConfig.outputs.data }} StressTestUBsan: needs: [RunConfig, BuilderDebUBsan] @@ -459,7 +459,7 @@ jobs: secrets: inherit with: test_name: Stress test (ubsan) - runner_type: altinity-on-demand, altinity-type-cpx51, altinity-in-ash, altinity-image-x86-app-docker-ce + runner_type: altinity-on-demand, altinity-type-cpx51, altinity-in-ash, altinity-image-x86-system-ubuntu-22.04 data: ${{ needs.RunConfig.outputs.data }} StressTestDebug: needs: [RunConfig, BuilderDebDebug] @@ -468,7 +468,7 @@ jobs: secrets: inherit with: test_name: Stress test (debug) - runner_type: altinity-on-demand, altinity-type-cpx51, altinity-in-ash, altinity-image-x86-app-docker-ce + runner_type: altinity-on-demand, altinity-type-cpx51, altinity-in-ash, altinity-image-x86-system-ubuntu-22.04 data: ${{ needs.RunConfig.outputs.data }} ############################################################################################# ############################# INTEGRATION TESTS ############################################# @@ -480,7 +480,7 @@ jobs: secrets: inherit with: test_name: Integration tests (asan) - runner_type: altinity-on-demand, altinity-type-cpx51, altinity-in-ash, altinity-image-x86-app-docker-ce + runner_type: altinity-on-demand, altinity-type-cpx51, altinity-in-ash, altinity-image-x86-system-ubuntu-22.04 data: ${{ needs.RunConfig.outputs.data }} IntegrationTestsAnalyzerAsan: needs: [RunConfig, BuilderDebAsan] @@ -489,7 +489,7 @@ jobs: secrets: inherit with: test_name: Integration tests (asan, old analyzer) - runner_type: altinity-on-demand, altinity-type-cpx51, altinity-in-ash, altinity-image-x86-app-docker-ce + runner_type: altinity-on-demand, altinity-type-cpx51, altinity-in-ash, altinity-image-x86-system-ubuntu-22.04 data: ${{ needs.RunConfig.outputs.data }} IntegrationTestsTsan: needs: [RunConfig, BuilderDebTsan] @@ -498,7 +498,7 @@ jobs: secrets: inherit with: test_name: Integration tests (tsan) - runner_type: altinity-on-demand, altinity-type-cpx51, altinity-in-ash, altinity-image-x86-app-docker-ce + runner_type: altinity-on-demand, altinity-type-cpx51, altinity-in-ash, altinity-image-x86-system-ubuntu-22.04 data: ${{ needs.RunConfig.outputs.data }} IntegrationTestsRelease: needs: [RunConfig, BuilderDebRelease] @@ -507,7 +507,7 @@ jobs: secrets: inherit with: test_name: Integration tests (release) - runner_type: altinity-on-demand, altinity-type-cpx51, altinity-in-ash, altinity-image-x86-app-docker-ce + runner_type: altinity-on-demand, altinity-type-cpx51, altinity-in-ash, altinity-image-x86-system-ubuntu-22.04 data: ${{ needs.RunConfig.outputs.data }} ############################################################################################# ##################################### REGRESSION TESTS ###################################### @@ -517,7 +517,7 @@ jobs: uses: ./.github/workflows/regression.yml secrets: inherit with: - runner_type: altinity-on-demand, altinity-type-cpx51, altinity-image-x86-app-docker-ce, altinity-setup-regression + runner_type: altinity-on-demand, altinity-type-cpx51, altinity-image-x86-system-ubuntu-22.04, altinity-setup-regression commit: 17a81c07fc1f41fbee651e0ef0ca4b44e537e5b1 arch: release build_sha: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }} @@ -526,13 +526,13 @@ jobs: uses: ./.github/workflows/regression.yml secrets: inherit with: - runner_type: altinity-on-demand, altinity-type-cax41, altinity-image-arm-app-docker-ce, altinity-setup-regression + runner_type: altinity-on-demand, altinity-type-cax41, altinity-image-arm-system-ubuntu-22.04, altinity-setup-regression commit: 17a81c07fc1f41fbee651e0ef0ca4b44e537e5b1 arch: aarch64 build_sha: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }} SignRelease: needs: [BuilderDebRelease] - runs-on: [self-hosted, altinity-on-demand, altinity-type-cpx41, altinity-in-ash, altinity-image-x86-app-docker-ce] + runs-on: [self-hosted, altinity-on-demand, altinity-type-cpx41, altinity-in-ash, altinity-image-x86-system-ubuntu-22.04] timeout-minutes: 180 steps: - name: Set envs @@ -604,7 +604,7 @@ jobs: - RegressionTestsRelease - RegressionTestsAarch64 - SignRelease - runs-on: [self-hosted, altinity-on-demand, altinity-type-cax11, altinity-image-arm-app-docker-ce, altinity-setup-regression] + runs-on: [self-hosted, altinity-on-demand, altinity-type-cax11, altinity-image-arm-system-ubuntu-22.04, altinity-setup-regression] steps: - name: Check out repository code uses: Altinity/checkout@19599efdf36c4f3f30eb55d5bb388896faea69f6 diff --git a/.github/workflows/reusable_build.yml b/.github/workflows/reusable_build.yml index 27c87f8bc2a9..14cd560f2f45 100644 --- a/.github/workflows/reusable_build.yml +++ b/.github/workflows/reusable_build.yml @@ -54,7 +54,7 @@ jobs: if: ${{ contains(fromJson(inputs.data).jobs_data.jobs_to_do, inputs.build_name) || inputs.force }} env: GITHUB_JOB_OVERRIDDEN: Build-${{inputs.build_name}} - runs-on: [self-hosted, altinity-setup-builder, altinity-type-ccx53, altinity-on-demand, altinity-in-ash, altinity-image-x86-app-docker-ce] + runs-on: [self-hosted, altinity-setup-builder, altinity-type-ccx53, altinity-on-demand, altinity-in-ash, altinity-image-x86-system-ubuntu-22.04] steps: - name: Check out repository code uses: Altinity/checkout@19599efdf36c4f3f30eb55d5bb388896faea69f6 From b63a6bda57bf98851dc7ee7b6e5199bd8240b2c1 Mon Sep 17 00:00:00 2001 From: MyroTk Date: Thu, 30 May 2024 09:47:44 -0700 Subject: [PATCH 32/79] force all tests to run and docker ipv6 config --- .github/actions/docker_setup/action.yml | 29 +++++++++++++++++++++++++ .github/workflows/reusable_test.yml | 4 +++- 2 files changed, 32 insertions(+), 1 deletion(-) create mode 100644 .github/actions/docker_setup/action.yml diff --git a/.github/actions/docker_setup/action.yml b/.github/actions/docker_setup/action.yml new file mode 100644 index 000000000000..06388e4175c1 --- /dev/null +++ b/.github/actions/docker_setup/action.yml @@ -0,0 +1,29 @@ +name: Docker setup +description: Setup docker +inputs: + nested_job: + description: the fuse for unintended use inside of the reusable callable jobs + default: true + type: boolean +runs: + using: "composite" + steps: + - name: Docker IPv6 configuration + shell: bash + run: | + # make sure docker uses proper IPv6 config + sudo touch /etc/docker/daemon.json + sudo chown ubuntu:ubuntu /etc/docker/daemon.json + sudo cat < /etc/docker/daemon.json + { + "ipv6": true, + "fixed-cidr-v6": "2001:3984:3989::/64" + } + EOT + sudo chown root:root /etc/docker/daemon.json + sudo systemctl restart docker + sudo systemctl status docker + - name: Docker info + shell: bash + run: | + docker info diff --git a/.github/workflows/reusable_test.yml b/.github/workflows/reusable_test.yml index 31822916868f..314493c94ffe 100644 --- a/.github/workflows/reusable_test.yml +++ b/.github/workflows/reusable_test.yml @@ -84,7 +84,6 @@ jobs: Test: needs: [runner_labels_setup] runs-on: ${{ fromJson(needs.runner_labels_setup.outputs.runner_labels) }} - if: ${{ !failure() && !cancelled() && contains(fromJson(inputs.data).jobs_data.jobs_to_do, inputs.test_name) }} name: ${{inputs.test_name}}${{ fromJson(inputs.data).jobs_data.jobs_params[inputs.test_name].num_batches > 1 && format('-{0}',matrix.batch) || '' }} env: GITHUB_JOB_OVERRIDDEN: ${{inputs.test_name}}${{ fromJson(inputs.data).jobs_data.jobs_params[inputs.test_name].num_batches > 1 && format('-{0}',matrix.batch) || '' }} @@ -115,6 +114,8 @@ jobs: uses: ./.github/actions/common_setup with: job_type: test + - name: Docker setup + uses: ./.github/actions/docker_setup - name: Setup batch if: ${{ fromJson(inputs.data).jobs_data.jobs_params[inputs.test_name].num_batches > 1 }} run: | @@ -132,6 +133,7 @@ jobs: --infile ${{ toJson(inputs.data) }} \ --job-name '${{inputs.test_name}}' \ --run \ + --force \ --run-command '''${{inputs.run_command}}''' - name: Post run if: ${{ !cancelled() }} From f6be6d25c101c2c3f3989ec8674a81626e15a7f6 Mon Sep 17 00:00:00 2001 From: MyroTk Date: Thu, 30 May 2024 12:00:47 -0700 Subject: [PATCH 33/79] setting tests to always run, and increase timeout --- tests/ci/ci_config.py | 4 ++++ tests/ci/integration_tests_runner.py | 6 +++--- 2 files changed, 7 insertions(+), 3 deletions(-) diff --git a/tests/ci/ci_config.py b/tests/ci/ci_config.py index 9cd7b32feaf7..6222c805372a 100644 --- a/tests/ci/ci_config.py +++ b/tests/ci/ci_config.py @@ -459,15 +459,18 @@ class TestConfig: "digest": stateless_check_digest, "run_command": 'functional_test_check.py "$CHECK_NAME" $KILL_TIMEOUT', "timeout": 10800, + "run_always": True, } stateful_test_common_params = { "digest": stateful_check_digest, "run_command": 'functional_test_check.py "$CHECK_NAME" $KILL_TIMEOUT', "timeout": 3600, + "run_always": True, } stress_test_common_params = { "digest": stress_check_digest, "run_command": "stress_check.py", + "run_always": True, } upgrade_test_common_params = { "digest": upgrade_check_digest, @@ -481,6 +484,7 @@ class TestConfig: integration_test_common_params = { "digest": integration_check_digest, "run_command": 'integration_test_check.py "$CHECK_NAME"', + "run_always": True, } unit_test_common_params = { "digest": unit_check_digest, diff --git a/tests/ci/integration_tests_runner.py b/tests/ci/integration_tests_runner.py index 79ec911612ab..d04b64cabeb7 100755 --- a/tests/ci/integration_tests_runner.py +++ b/tests/ci/integration_tests_runner.py @@ -310,7 +310,7 @@ def _pre_pull_images(self, repo_path): cmd = ( f"cd {repo_path}/tests/integration && " - f"timeout --signal=KILL 1h ./runner {self._get_runner_opts()} {image_cmd} " + f"timeout --signal=KILL 2h ./runner {self._get_runner_opts()} {image_cmd} " "--pre-pull --command ' echo Pre Pull finished ' " ) @@ -423,7 +423,7 @@ def _get_all_tests(self, repo_path): out_file_full = os.path.join(self.result_path, "runner_get_all_tests.log") cmd = ( f"cd {repo_path}/tests/integration && " - f"timeout --signal=KILL 1h ./runner {runner_opts} {image_cmd} -- --setup-plan " + f"timeout --signal=KILL 2h ./runner {runner_opts} {image_cmd} -- --setup-plan " ) logging.info( @@ -635,7 +635,7 @@ def run_test_group( # -s -- (s)kipped cmd = ( f"cd {repo_path}/tests/integration && " - f"timeout --signal=KILL 1h ./runner {self._get_runner_opts()} " + f"timeout --signal=KILL 2h ./runner {self._get_runner_opts()} " f"{image_cmd} -t {test_cmd} {parallel_cmd} -- -rfEps --run-id={i} " f"--color=no --durations=0 {_get_deselect_option(self.should_skip_tests())} " f"| tee {info_path}" From db13ffe08263223996e15588af1256f850b45a09 Mon Sep 17 00:00:00 2001 From: MyroTk Date: Thu, 30 May 2024 12:26:59 -0700 Subject: [PATCH 34/79] make stress and integration always run --- .github/workflows/release_branches.yml | 6 +++--- tests/ci/ci_config.py | 10 +++++----- 2 files changed, 8 insertions(+), 8 deletions(-) diff --git a/.github/workflows/release_branches.yml b/.github/workflows/release_branches.yml index 019dbd5bd99f..a8d74c26e528 100644 --- a/.github/workflows/release_branches.yml +++ b/.github/workflows/release_branches.yml @@ -219,7 +219,7 @@ jobs: secrets: inherit with: test_name: ClickHouse build check - runner_type: altinity-on-demand, altinity-type-cax11, altinity-in-hel1, altinity-image-arm-system-ubuntu-22.04 + runner_type: altinity-on-demand, altinity-setup-reporter, altinity-type-cax11, altinity-in-hel1, altinity-image-arm-system-ubuntu-22.04 data: ${{ needs.RunConfig.outputs.data }} BuilderSpecialReport: # run report check for failed builds to indicate the CI error @@ -232,7 +232,7 @@ jobs: secrets: inherit with: test_name: ClickHouse special build check - runner_type: altinity-on-demand, altinity-type-cax11, altinity-in-hel1, altinity-image-arm-system-ubuntu-22.04 + runner_type: altinity-on-demand, altinity-setup-reporter, altinity-type-cax11, altinity-in-hel1, altinity-image-arm-system-ubuntu-22.04 data: ${{ needs.RunConfig.outputs.data }} MarkReleaseReady: if: ${{ !failure() && !cancelled() }} @@ -241,7 +241,7 @@ jobs: - BuilderBinDarwinAarch64 - BuilderDebRelease - BuilderDebAarch64 - runs-on: [self-hosted, altinity-on-demand, altinity-type-cax11, altinity-in-hel1, altinity-image-arm-system-ubuntu-22.04] + runs-on: [self-hosted, altinity-on-demand, altinity-setup-reporter, altinity-type-cax11, altinity-in-hel1, altinity-image-arm-system-ubuntu-22.04] steps: - name: Debug run: | diff --git a/tests/ci/ci_config.py b/tests/ci/ci_config.py index 6222c805372a..4b79ab22a699 100644 --- a/tests/ci/ci_config.py +++ b/tests/ci/ci_config.py @@ -1208,13 +1208,13 @@ def validate(self) -> None: Build.PACKAGE_TSAN, job_config=JobConfig(**stress_test_common_params) # type: ignore ), JobNames.STRESS_TEST_ASAN: TestConfig( - Build.PACKAGE_ASAN, job_config=JobConfig(random_bucket="stress_with_sanitizer", **stress_test_common_params) # type: ignore + Build.PACKAGE_ASAN, job_config=JobConfig(**stress_test_common_params) # type: ignore ), JobNames.STRESS_TEST_UBSAN: TestConfig( - Build.PACKAGE_UBSAN, job_config=JobConfig(random_bucket="stress_with_sanitizer", **stress_test_common_params) # type: ignore + Build.PACKAGE_UBSAN, job_config=JobConfig(**stress_test_common_params) # type: ignore ), JobNames.STRESS_TEST_MSAN: TestConfig( - Build.PACKAGE_MSAN, job_config=JobConfig(random_bucket="stress_with_sanitizer", **stress_test_common_params) # type: ignore + Build.PACKAGE_MSAN, job_config=JobConfig(**stress_test_common_params) # type: ignore ), JobNames.UPGRADE_TEST_ASAN: TestConfig( Build.PACKAGE_ASAN, job_config=JobConfig(pr_only=True, random_bucket="upgrade_with_sanitizer", **upgrade_test_common_params) # type: ignore @@ -1230,7 +1230,7 @@ def validate(self) -> None: ), JobNames.INTEGRATION_TEST_ASAN: TestConfig( Build.PACKAGE_ASAN, - job_config=JobConfig(num_batches=4, **integration_test_common_params, release_only=True), # type: ignore + job_config=JobConfig(num_batches=4, **integration_test_common_params), # type: ignore ), JobNames.INTEGRATION_TEST_ASAN_ANALYZER: TestConfig( Build.PACKAGE_ASAN, @@ -1247,7 +1247,7 @@ def validate(self) -> None: ), JobNames.INTEGRATION_TEST: TestConfig( Build.PACKAGE_RELEASE, - job_config=JobConfig(num_batches=4, **integration_test_common_params, release_only=True), # type: ignore + job_config=JobConfig(num_batches=4, **integration_test_common_params), # type: ignore ), JobNames.INTEGRATION_TEST_FLAKY: TestConfig( Build.PACKAGE_ASAN, job_config=JobConfig(pr_only=True, **integration_test_common_params) # type: ignore From 8e6ca5ce9c9c7c19b6abcc91acadd233f659a97a Mon Sep 17 00:00:00 2001 From: MyroTk <44327070+MyroTk@users.noreply.github.com> Date: Fri, 31 May 2024 09:12:56 -0700 Subject: [PATCH 35/79] Update dockerd-entrypoint.sh --- docker/test/integration/runner/dockerd-entrypoint.sh | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/docker/test/integration/runner/dockerd-entrypoint.sh b/docker/test/integration/runner/dockerd-entrypoint.sh index 8882daa38ea3..84baa6b1f342 100755 --- a/docker/test/integration/runner/dockerd-entrypoint.sh +++ b/docker/test/integration/runner/dockerd-entrypoint.sh @@ -4,12 +4,12 @@ set -e mkdir -p /etc/docker/ echo '{ "ipv6": true, - "fixed-cidr-v6": "fd00::/8", + "fixed-cidr-v6": "2001:db8:1::/64", "ip-forward": true, "log-level": "debug", "storage-driver": "overlay2", - "insecure-registries" : ["dockerhub-proxy.dockerhub-proxy-zone:5000"], - "registry-mirrors" : ["http://dockerhub-proxy.dockerhub-proxy-zone:5000"] + "insecure-registries" : ["65.108.242.32:5000"], + "registry-mirrors" : ["http://65.108.242.32:5000"] }' | dd of=/etc/docker/daemon.json 2>/dev/null if [ -f /sys/fs/cgroup/cgroup.controllers ]; then From 554e672156860560bc5ae00fc6e5568954f6495b Mon Sep 17 00:00:00 2001 From: MyroTk <44327070+MyroTk@users.noreply.github.com> Date: Fri, 31 May 2024 11:00:51 -0700 Subject: [PATCH 36/79] Update reusable_docker.yml --- .github/workflows/reusable_docker.yml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/workflows/reusable_docker.yml b/.github/workflows/reusable_docker.yml index 3c344a63f126..fcaeee87b42d 100644 --- a/.github/workflows/reusable_docker.yml +++ b/.github/workflows/reusable_docker.yml @@ -29,7 +29,7 @@ env: jobs: DockerBuildAarch64: - runs-on: [altinity-on-demand, altinity-type-cax41, altinity-in-hel1, altinity-image-arm-app-docker-ce] + runs-on: [altinity-on-demand, altinity-type-cax41, altinity-in-hel1, altinity-image-arm-system-ubuntu-22.04] if: | !failure() && !cancelled() && toJson(fromJson(inputs.data).docker_data.missing_aarch64) != '[]' steps: @@ -44,7 +44,7 @@ jobs: --image-tags '${{ toJson(fromJson(inputs.data).docker_data.images) }}' \ --missing-images '${{ toJson(fromJson(inputs.data).docker_data.missing_aarch64) }}' DockerBuildAmd64: - runs-on: [self-hosted, altinity-on-demand, altinity-type-cpx51, altinity-in-ash, altinity-image-x86-app-docker-ce] + runs-on: [self-hosted, altinity-on-demand, altinity-type-cpx51, altinity-in-ash, altinity-image-x86-system-ubuntu-22.04] if: | !failure() && !cancelled() && toJson(fromJson(inputs.data).docker_data.missing_amd64) != '[]' steps: @@ -60,7 +60,7 @@ jobs: --missing-images '${{ toJson(fromJson(inputs.data).docker_data.missing_amd64) }}' DockerMultiArchManifest: needs: [DockerBuildAmd64, DockerBuildAarch64] - runs-on: [self-hosted, altinity-on-demand, altinity-type-cpx51, altinity-in-ash, altinity-image-x86-app-docker-ce] + runs-on: [self-hosted, altinity-on-demand, altinity-type-cpx51, altinity-in-ash, altinity-image-x86-system-ubuntu-22.04] if: | !failure() && !cancelled() && (toJson(fromJson(inputs.data).docker_data.missing_multi) != '[]' || inputs.set_latest) steps: From 5f1a2cd86bfb93788aab84722c3676beb54985c8 Mon Sep 17 00:00:00 2001 From: MyroTk Date: Mon, 3 Jun 2024 07:40:57 -0700 Subject: [PATCH 37/79] update Altinity message --- src/Daemon/BaseDaemon.cpp | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/src/Daemon/BaseDaemon.cpp b/src/Daemon/BaseDaemon.cpp index 7fc210a691a0..822870546677 100644 --- a/src/Daemon/BaseDaemon.cpp +++ b/src/Daemon/BaseDaemon.cpp @@ -511,6 +511,10 @@ class SignalListener : public Poco::Runnable LOG_FATAL(log, "ClickHouse version {} is old and should be upgraded to the latest version.", VERSION_STRING); } } + else if constexpr (std::string_view(VERSION_OFFICIAL).contains("altinity build")) + { + LOG_FATAL(log, "You are using an Altinity Stable Build. Please log issues at https://github.com/Altinity/ClickHouse/issues. Thank you!"); + } else { LOG_FATAL(log, "This ClickHouse version is not official and should be upgraded to the official build."); From 4a90ca2982991b3e268bb86aa6961e8b7e7c5a6b Mon Sep 17 00:00:00 2001 From: MyroTk Date: Mon, 3 Jun 2024 08:26:38 -0700 Subject: [PATCH 38/79] message fix --- src/Daemon/BaseDaemon.cpp | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/Daemon/BaseDaemon.cpp b/src/Daemon/BaseDaemon.cpp index 822870546677..023a58330a1f 100644 --- a/src/Daemon/BaseDaemon.cpp +++ b/src/Daemon/BaseDaemon.cpp @@ -511,7 +511,7 @@ class SignalListener : public Poco::Runnable LOG_FATAL(log, "ClickHouse version {} is old and should be upgraded to the latest version.", VERSION_STRING); } } - else if constexpr (std::string_view(VERSION_OFFICIAL).contains("altinity build")) + else if (std::string_view(VERSION_OFFICIAL).contains("altinity build")) { LOG_FATAL(log, "You are using an Altinity Stable Build. Please log issues at https://github.com/Altinity/ClickHouse/issues. Thank you!"); } From e5a7323a82b40cccd6c348f80863c31ee70a42bd Mon Sep 17 00:00:00 2001 From: MyroTk Date: Mon, 3 Jun 2024 09:47:06 -0700 Subject: [PATCH 39/79] pin python requests to 2.31.0 --- docker/test/integration/runner/Dockerfile | 1 + 1 file changed, 1 insertion(+) diff --git a/docker/test/integration/runner/Dockerfile b/docker/test/integration/runner/Dockerfile index 967f94c1603a..e03f0f6403a8 100644 --- a/docker/test/integration/runner/Dockerfile +++ b/docker/test/integration/runner/Dockerfile @@ -96,6 +96,7 @@ RUN python3 -m pip install --no-cache-dir \ pytz~=2023.3.post1 \ pyyaml~=5.3.1 \ redis~=5.0.1 \ + requests~=2.31.0 \ requests-kerberos \ tzlocal~=2.1 \ retry~=0.9.2 \ From c60050363aca314b5fb5e22ba91cd77a5b9a2926 Mon Sep 17 00:00:00 2001 From: MyroTk Date: Mon, 3 Jun 2024 11:40:28 -0700 Subject: [PATCH 40/79] update integration dockerfile --- docker/test/integration/runner/Dockerfile | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/docker/test/integration/runner/Dockerfile b/docker/test/integration/runner/Dockerfile index e03f0f6403a8..f0d4c5dbff25 100644 --- a/docker/test/integration/runner/Dockerfile +++ b/docker/test/integration/runner/Dockerfile @@ -63,10 +63,10 @@ RUN curl -fsSL https://download.docker.com/linux/ubuntu/gpg | apt-key add - \ # https://s3.amazonaws.com/clickhouse-test-reports/59337/524625a1d2f4cc608a3f1059e3df2c30f353a649/integration_tests__asan__analyzer__[5_6].html RUN python3 -m pip install --no-cache-dir \ PyMySQL~=1.1.0 \ - aerospike~=11.1.0 \ asyncio~=3.4.3\ avro~=1.10.2 \ azure-storage-blob~=12.19.0\ + boto3~=1.34.24 \ cassandra-driver~=3.28.0\ confluent-kafka~=1.9.2 \ delta-spark~=2.3.0 \ @@ -79,7 +79,6 @@ RUN python3 -m pip install --no-cache-dir \ kafka-python~=2.0.2 \ kazoo~=2.9.0 \ lz4~=4.3.2 \ - meilisearch~=0.18.3 \ minio~=7.2.0 \ nats-py~=2.6.0 \ protobuf~=4.25.1 \ @@ -91,6 +90,7 @@ RUN python3 -m pip install --no-cache-dir \ pytest-order~=1.0.0 \ pytest-random~=0.2 \ pytest-repeat~=0.9.3 \ + pytest-reportlog==0.4.0 \ pytest-timeout~=2.2.0 \ pytest-xdist~=3.5.0 \ pytz~=2023.3.post1 \ @@ -102,7 +102,8 @@ RUN python3 -m pip install --no-cache-dir \ retry~=0.9.2 \ bs4~=0.0.1 \ lxml~=4.9.3 \ - urllib3~=2.1.0 + urllib3~=2.1.0 \ + jwcrypto==1.5.6 # bs4, lxml are for cloud tests, do not delete # Hudi supports only spark 3.3.*, not 3.4 From 60bb8a1f9eb22255ce2557071c95b93b34f0162d Mon Sep 17 00:00:00 2001 From: MyroTk Date: Mon, 3 Jun 2024 16:49:34 -0700 Subject: [PATCH 41/79] hadoop fix --- docker/test/integration/kerberized_hadoop/Dockerfile | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/docker/test/integration/kerberized_hadoop/Dockerfile b/docker/test/integration/kerberized_hadoop/Dockerfile index 76cd955ef163..c06b7175941a 100644 --- a/docker/test/integration/kerberized_hadoop/Dockerfile +++ b/docker/test/integration/kerberized_hadoop/Dockerfile @@ -15,7 +15,10 @@ RUN curl -o krb5-libs-1.10.3-65.el6.x86_64.rpm ftp://ftp.pbone.net/mirror/vault. rm -fr *.rpm RUN cd /tmp && \ - curl http://archive.apache.org/dist/commons/daemon/source/commons-daemon-1.0.15-src.tar.gz -o commons-daemon-1.0.15-src.tar.gz && \ + curl -o wget.rpm ftp://ftp.pbone.net/mirror/vault.centos.org/6.9/os/x86_64/Packages/wget-1.12-10.el6.x86_64.rpm && \ + rpm -i wget.rpm && \ + rm -fr *.rpm && \ + wget --no-check-certificate https://archive.apache.org/dist/commons/daemon/source/commons-daemon-1.0.15-src.tar.gz && \ tar xzf commons-daemon-1.0.15-src.tar.gz && \ cd commons-daemon-1.0.15-src/src/native/unix && \ ./configure && \ From ff27d10de7140dea5b1a7bc927e234769e152aae Mon Sep 17 00:00:00 2001 From: MyroTk Date: Tue, 4 Jun 2024 11:49:49 -0700 Subject: [PATCH 42/79] add sign release --- tests/ci/ci_config.py | 14 +++--- tests/ci/sign_release.py | 95 ++++++++++++++++++++++++++++++++++++++++ 2 files changed, 101 insertions(+), 8 deletions(-) create mode 100644 tests/ci/sign_release.py diff --git a/tests/ci/ci_config.py b/tests/ci/ci_config.py index 4b79ab22a699..2295da6dac0f 100644 --- a/tests/ci/ci_config.py +++ b/tests/ci/ci_config.py @@ -227,7 +227,7 @@ class JobConfig: # label that enables job in CI, if set digest won't be used run_by_label: str = "" # to run always regardless of the job digest or/and label - run_always: bool = False + run_always: bool = True # if the job needs to be run on the release branch, including master (e.g. building packages, docker server). # NOTE: Subsequent runs on the same branch with the similar digest are still considered skippable. required_on_release_branch: bool = False @@ -455,22 +455,20 @@ class TestConfig: "digest": compatibility_check_digest, "run_command": "compatibility_check.py", } -statless_test_common_params = { +stateless_test_common_params = { "digest": stateless_check_digest, - "run_command": 'functional_test_check.py "$CHECK_NAME" $KILL_TIMEOUT', + "run_command": 'functional_test_check.py "$CHECK_NAME"', "timeout": 10800, - "run_always": True, } stateful_test_common_params = { "digest": stateful_check_digest, - "run_command": 'functional_test_check.py "$CHECK_NAME" $KILL_TIMEOUT', + "run_command": 'functional_test_check.py "$CHECK_NAME"', "timeout": 3600, - "run_always": True, } stress_test_common_params = { "digest": stress_check_digest, "run_command": "stress_check.py", - "run_always": True, + "timeout": 9000, } upgrade_test_common_params = { "digest": upgrade_check_digest, @@ -484,7 +482,6 @@ class TestConfig: integration_test_common_params = { "digest": integration_check_digest, "run_command": 'integration_test_check.py "$CHECK_NAME"', - "run_always": True, } unit_test_common_params = { "digest": unit_check_digest, @@ -520,6 +517,7 @@ class TestConfig: docker=["altinityinfra/clickbench"], ), "run_command": 'clickbench.py "$CHECK_NAME"', + "timeout": 900, } install_test_params = JobConfig( digest=install_check_digest, diff --git a/tests/ci/sign_release.py b/tests/ci/sign_release.py new file mode 100644 index 000000000000..872966a578a5 --- /dev/null +++ b/tests/ci/sign_release.py @@ -0,0 +1,95 @@ +#!/usr/bin/env python3 +import sys +import os +import logging +from env_helper import TEMP_PATH, REPO_COPY, REPORTS_PATH +from s3_helper import S3Helper +from pr_info import PRInfo +from build_download_helper import download_builds_filter +import hashlib +from pathlib import Path + +GPG_BINARY_SIGNING_KEY = os.getenv("GPG_BINARY_SIGNING_KEY") +GPG_BINARY_SIGNING_PASSPHRASE = os.getenv("GPG_BINARY_SIGNING_PASSPHRASE") + +CHECK_NAME = "Sign release (actions)" + +def hash_file(file_path): + BLOCK_SIZE = 65536 # The size of each read from the file + + file_hash = hashlib.sha256() # Create the hash object, can use something other than `.sha256()` if you wish + with open(file_path, 'rb') as f: # Open the file to read it's bytes + fb = f.read(BLOCK_SIZE) # Read from the file. Take in the amount declared above + while len(fb) > 0: # While there is still data being read from the file + file_hash.update(fb) # Update the hash + fb = f.read(BLOCK_SIZE) # Read the next block from the file + + hash_file_path = file_path + '.sha256' + with open(hash_file_path, 'x') as f: + digest = file_hash.hexdigest() + f.write(digest) + print(f'Hashed {file_path}: {digest}') + + return hash_file_path + +def sign_file(file_path): + priv_key_file_path = 'priv.key' + with open(priv_key_file_path, 'x') as f: + f.write(GPG_BINARY_SIGNING_KEY) + + out_file_path = f'{file_path}.gpg' + + os.system(f'echo {GPG_BINARY_SIGNING_PASSPHRASE} | gpg --batch --import {priv_key_file_path}') + os.system(f'gpg -o {out_file_path} --pinentry-mode=loopback --batch --yes --passphrase {GPG_BINARY_SIGNING_PASSPHRASE} --sign {file_path}') + print(f"Signed {file_path}") + os.remove(priv_key_file_path) + + return out_file_path + +def main(): + reports_path = REPORTS_PATH + + if not os.path.exists(TEMP_PATH): + os.makedirs(TEMP_PATH) + + pr_info = PRInfo() + + logging.info("Repo copy path %s", REPO_COPY) + + s3_helper = S3Helper() + + s3_path_prefix = Path(f"{pr_info.number}/{pr_info.sha}/" + CHECK_NAME.lower().replace( + " ", "_" + ).replace("(", "_").replace(")", "_").replace(",", "_")) + + # downloads `package_release` artifacts generated + download_builds_filter(CHECK_NAME, reports_path, TEMP_PATH) + + for f in os.listdir(TEMP_PATH): + full_path = os.path.join(TEMP_PATH, f) + hashed_file_path = hash_file(full_path) + signed_file_path = sign_file(hashed_file_path) + s3_path = s3_path_prefix / os.path.basename(signed_file_path) + s3_helper.upload_build_file_to_s3(Path(signed_file_path), str(s3_path)) + print(f'Uploaded file {signed_file_path} to {s3_path}') + + # Signed hashes are: + # clickhouse-client_22.3.15.2.altinitystable_amd64.deb.sha512.gpg clickhouse-keeper_22.3.15.2.altinitystable_x86_64.apk.sha512.gpg + # clickhouse-client-22.3.15.2.altinitystable-amd64.tgz.sha512.gpg clickhouse-keeper-22.3.15.2.altinitystable.x86_64.rpm.sha512.gpg + # clickhouse-client_22.3.15.2.altinitystable_x86_64.apk.sha512.gpg clickhouse-keeper-dbg_22.3.15.2.altinitystable_amd64.deb.sha512.gpg + # clickhouse-client-22.3.15.2.altinitystable.x86_64.rpm.sha512.gpg clickhouse-keeper-dbg-22.3.15.2.altinitystable-amd64.tgz.sha512.gpg + # clickhouse-common-static_22.3.15.2.altinitystable_amd64.deb.sha512.gpg clickhouse-keeper-dbg_22.3.15.2.altinitystable_x86_64.apk.sha512.gpg + # clickhouse-common-static-22.3.15.2.altinitystable-amd64.tgz.sha512.gpg clickhouse-keeper-dbg-22.3.15.2.altinitystable.x86_64.rpm.sha512.gpg + # clickhouse-common-static_22.3.15.2.altinitystable_x86_64.apk.sha512.gpg clickhouse-keeper.sha512.gpg + # clickhouse-common-static-22.3.15.2.altinitystable.x86_64.rpm.sha512.gpg clickhouse-library-bridge.sha512.gpg + # clickhouse-common-static-dbg_22.3.15.2.altinitystable_amd64.deb.sha512.gpg clickhouse-odbc-bridge.sha512.gpg + # clickhouse-common-static-dbg-22.3.15.2.altinitystable-amd64.tgz.sha512.gpg clickhouse-server_22.3.15.2.altinitystable_amd64.deb.sha512.gpg + # clickhouse-common-static-dbg_22.3.15.2.altinitystable_x86_64.apk.sha512.gpg clickhouse-server-22.3.15.2.altinitystable-amd64.tgz.sha512.gpg + # clickhouse-common-static-dbg-22.3.15.2.altinitystable.x86_64.rpm.sha512.gpg clickhouse-server_22.3.15.2.altinitystable_x86_64.apk.sha512.gpg + # clickhouse-keeper_22.3.15.2.altinitystable_amd64.deb.sha512.gpg clickhouse-server-22.3.15.2.altinitystable.x86_64.rpm.sha512.gpg + # clickhouse-keeper-22.3.15.2.altinitystable-amd64.tgz.sha512.gpg clickhouse.sha512.gpg + + sys.exit(0) + +if __name__ == "__main__": + main() From dbdafcf80b3cb84a202352b4f75c4d6692ce014e Mon Sep 17 00:00:00 2001 From: MyroTk Date: Tue, 4 Jun 2024 12:59:18 -0700 Subject: [PATCH 43/79] regression fix --- .github/workflows/regression.yml | 49 +++++++--------------------- .github/workflows/reusable_build.yml | 5 +++ tests/ci/s3_helper.py | 2 +- 3 files changed, 17 insertions(+), 39 deletions(-) diff --git a/.github/workflows/regression.yml b/.github/workflows/regression.yml index 39e789e63e9a..985dc6c8ba25 100644 --- a/.github/workflows/regression.yml +++ b/.github/workflows/regression.yml @@ -161,10 +161,7 @@ jobs: uses: actions/download-artifact@v3 with: path: ${{ env.REPORTS_PATH }} - name: build_urls_package_${{ inputs.arch }} - - name: Rename report - run: | - mv ${{ env.REPORTS_PATH }}/build_urls_package_${{ inputs.arch }}.json ${{ env.REPORTS_PATH }}/build_report_package_${{ inputs.arch }}.json + name: build_report_package_${{ inputs.arch }} - name: Setup run: .github/setup.sh - name: Get deb url @@ -183,12 +180,12 @@ jobs: with: name: ${{ env.SUITE }}-${{ inputs.arch }}-artifacts path: ${{ env.artifact_paths}} - + Alter: strategy: fail-fast: false matrix: - ONLY: [replace, attach, move] + ONLY: [replace, attach, move] needs: [runner_labels_setup] runs-on: ${{ fromJson(needs.runner_labels_setup.outputs.runner_labels) }} timeout-minutes: ${{ inputs.timeout_minutes }} @@ -208,10 +205,7 @@ jobs: uses: actions/download-artifact@v3 with: path: ${{ env.REPORTS_PATH }} - name: build_urls_package_${{ inputs.arch }} - - name: Rename report - run: | - mv ${{ env.REPORTS_PATH }}/build_urls_package_${{ inputs.arch }}.json ${{ env.REPORTS_PATH }}/build_report_package_${{ inputs.arch }}.json + name: build_report_package_${{ inputs.arch }} - name: Setup run: .github/setup.sh - name: Get deb url @@ -257,10 +251,7 @@ jobs: uses: actions/download-artifact@v3 with: path: ${{ env.REPORTS_PATH }} - name: build_urls_package_${{ inputs.arch }} - - name: Rename report - run: | - mv ${{ env.REPORTS_PATH }}/build_urls_package_${{ inputs.arch }}.json ${{ env.REPORTS_PATH }}/build_report_package_${{ inputs.arch }}.json + name: build_report_package_${{ inputs.arch }} - name: Setup run: .github/setup.sh - name: Get deb url @@ -309,10 +300,7 @@ jobs: uses: actions/download-artifact@v3 with: path: ${{ env.REPORTS_PATH }} - name: build_urls_package_${{ inputs.arch }} - - name: Rename report - run: | - mv ${{ env.REPORTS_PATH }}/build_urls_package_${{ inputs.arch }}.json ${{ env.REPORTS_PATH }}/build_report_package_${{ inputs.arch }}.json + name: build_report_package_${{ inputs.arch }} - name: Setup run: .github/setup.sh - name: Get deb url @@ -357,10 +345,7 @@ jobs: uses: actions/download-artifact@v3 with: path: ${{ env.REPORTS_PATH }} - name: build_urls_package_${{ inputs.arch }} - - name: Rename report - run: | - mv ${{ env.REPORTS_PATH }}/build_urls_package_${{ inputs.arch }}.json ${{ env.REPORTS_PATH }}/build_report_package_${{ inputs.arch }}.json + name: build_report_package_${{ inputs.arch }} - name: Setup run: .github/setup.sh - name: Get deb url @@ -400,10 +385,7 @@ jobs: uses: actions/download-artifact@v3 with: path: ${{ env.REPORTS_PATH }} - name: build_urls_package_${{ inputs.arch }} - - name: Rename report - run: | - mv ${{ env.REPORTS_PATH }}/build_urls_package_${{ inputs.arch }}.json ${{ env.REPORTS_PATH }}/build_report_package_${{ inputs.arch }}.json + name: build_report_package_${{ inputs.arch }} - name: Setup run: .github/setup.sh - name: Get deb url @@ -448,10 +430,7 @@ jobs: uses: actions/download-artifact@v3 with: path: ${{ env.REPORTS_PATH }} - name: build_urls_package_${{ inputs.arch }} - - name: Rename report - run: | - mv ${{ env.REPORTS_PATH }}/build_urls_package_${{ inputs.arch }}.json ${{ env.REPORTS_PATH }}/build_report_package_${{ inputs.arch }}.json + name: build_report_package_${{ inputs.arch }} - name: Setup run: .github/setup.sh - name: Get deb url @@ -501,10 +480,7 @@ jobs: uses: actions/download-artifact@v3 with: path: ${{ env.REPORTS_PATH }} - name: build_urls_package_${{ inputs.arch }} - - name: Rename report - run: | - mv ${{ env.REPORTS_PATH }}/build_urls_package_${{ inputs.arch }}.json ${{ env.REPORTS_PATH }}/build_report_package_${{ inputs.arch }}.json + name: build_report_package_${{ inputs.arch }} - name: Setup run: .github/setup.sh - name: Get deb url @@ -556,10 +532,7 @@ jobs: uses: actions/download-artifact@v3 with: path: ${{ env.REPORTS_PATH }} - name: build_urls_package_${{ inputs.arch }} - - name: Rename report - run: | - mv ${{ env.REPORTS_PATH }}/build_urls_package_${{ inputs.arch }}.json ${{ env.REPORTS_PATH }}/build_report_package_${{ inputs.arch }}.json + name: build_report_package_${{ inputs.arch }} - name: Setup run: .github/setup.sh - name: Get deb url diff --git a/.github/workflows/reusable_build.yml b/.github/workflows/reusable_build.yml index 14cd560f2f45..2d15c1fae213 100644 --- a/.github/workflows/reusable_build.yml +++ b/.github/workflows/reusable_build.yml @@ -107,6 +107,11 @@ jobs: if: ${{ !cancelled() }} run: | python3 "$GITHUB_WORKSPACE/tests/ci/ci.py" --infile ${{ toJson(inputs.data) }} --mark-success --job-name '${{inputs.build_name}}' + - name: Upload json report + uses: actions/upload-artifact@v4 + with: + path: ${{ env.TEMP_PATH }}/build_report_*.json + name: build_report_${{inputs.build_name}} - name: Clean if: always() uses: ./.github/actions/clean diff --git a/tests/ci/s3_helper.py b/tests/ci/s3_helper.py index bff53f00ad35..e073430df953 100644 --- a/tests/ci/s3_helper.py +++ b/tests/ci/s3_helper.py @@ -104,7 +104,7 @@ def _upload_file_to_s3( self.client.upload_file(file_path, bucket_name, s3_path, ExtraArgs=metadata) url = self.s3_url(bucket_name, s3_path) - logging.info("Upload %s to %s. Meta: %s", file_path, url, metadata) + logging.info("Upload %s to %s . Meta: %s", file_path, url, metadata) return url def delete_file_from_s3(self, bucket_name: str, s3_path: str) -> None: From d9db6d17efaf88c0f7bd2462c1e52c92bf479628 Mon Sep 17 00:00:00 2001 From: MyroTk Date: Tue, 4 Jun 2024 17:45:31 -0700 Subject: [PATCH 44/79] stateless fix --- tests/ci/ci_config.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/ci/ci_config.py b/tests/ci/ci_config.py index 2295da6dac0f..9c17bd05bd41 100644 --- a/tests/ci/ci_config.py +++ b/tests/ci/ci_config.py @@ -455,7 +455,7 @@ class TestConfig: "digest": compatibility_check_digest, "run_command": "compatibility_check.py", } -stateless_test_common_params = { +statless_test_common_params = { "digest": stateless_check_digest, "run_command": 'functional_test_check.py "$CHECK_NAME"', "timeout": 10800, From dde853a95c406f10b50379d6fd227cb0b6d1deb6 Mon Sep 17 00:00:00 2001 From: MyroTk Date: Tue, 4 Jun 2024 18:22:35 -0700 Subject: [PATCH 45/79] stateless and stateful timeout --- tests/ci/ci_config.py | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/tests/ci/ci_config.py b/tests/ci/ci_config.py index 9c17bd05bd41..263dc9a08f0d 100644 --- a/tests/ci/ci_config.py +++ b/tests/ci/ci_config.py @@ -457,13 +457,11 @@ class TestConfig: } statless_test_common_params = { "digest": stateless_check_digest, - "run_command": 'functional_test_check.py "$CHECK_NAME"', - "timeout": 10800, + "run_command": 'functional_test_check.py "$CHECK_NAME" $KILL_TIMEOUT', } stateful_test_common_params = { "digest": stateful_check_digest, - "run_command": 'functional_test_check.py "$CHECK_NAME"', - "timeout": 3600, + "run_command": 'functional_test_check.py "$CHECK_NAME" $KILL_TIMEOUT', } stress_test_common_params = { "digest": stress_check_digest, From e74b6e5978b194e6fa6ff9935d7fb94b16137fdf Mon Sep 17 00:00:00 2001 From: MyroTk Date: Tue, 4 Jun 2024 21:26:44 -0700 Subject: [PATCH 46/79] change regression dependancy --- .github/workflows/release_branches.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/release_branches.yml b/.github/workflows/release_branches.yml index a8d74c26e528..a56c717f3c5a 100644 --- a/.github/workflows/release_branches.yml +++ b/.github/workflows/release_branches.yml @@ -513,7 +513,7 @@ jobs: ##################################### REGRESSION TESTS ###################################### ############################################################################################# RegressionTestsRelease: - needs: [BuilderReport] + needs: [BuilderDebRelease] uses: ./.github/workflows/regression.yml secrets: inherit with: @@ -522,7 +522,7 @@ jobs: arch: release build_sha: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }} RegressionTestsAarch64: - needs: [BuilderReport] + needs: [BuilderDebAarch64] uses: ./.github/workflows/regression.yml secrets: inherit with: From 692c8d29ea16c0f790793f561f869a62c7f4f07e Mon Sep 17 00:00:00 2001 From: MyroTk Date: Wed, 5 Jun 2024 10:53:04 -0700 Subject: [PATCH 47/79] run regression --- .github/workflows/regression.yml | 54 +++++++++++++------------- .github/workflows/release_branches.yml | 9 +++-- 2 files changed, 33 insertions(+), 30 deletions(-) diff --git a/.github/workflows/regression.yml b/.github/workflows/regression.yml index 985dc6c8ba25..33fd8b9bf96a 100644 --- a/.github/workflows/regression.yml +++ b/.github/workflows/regression.yml @@ -147,7 +147,7 @@ jobs: timeout-minutes: ${{ inputs.timeout_minutes }} steps: - name: Checkout regression repo - uses: actions/checkout@v3 + uses: actions/checkout@v4 with: repository: Altinity/clickhouse-regression ref: ${{ inputs.commit }} @@ -158,7 +158,7 @@ jobs: SUITE=${{ matrix.SUITE }} EOF - name: Download json reports - uses: actions/download-artifact@v3 + uses: actions/download-artifact@v4 with: path: ${{ env.REPORTS_PATH }} name: build_report_package_${{ inputs.arch }} @@ -175,7 +175,7 @@ jobs: - name: Create and upload logs if: always() run: .github/create_and_upload_logs.sh 1 - - uses: actions/upload-artifact@v3 + - uses: actions/upload-artifact@v4 if: always() with: name: ${{ env.SUITE }}-${{ inputs.arch }}-artifacts @@ -191,7 +191,7 @@ jobs: timeout-minutes: ${{ inputs.timeout_minutes }} steps: - name: Checkout regression repo - uses: actions/checkout@v3 + uses: actions/checkout@v4 with: repository: Altinity/clickhouse-regression ref: ${{ inputs.commit }} @@ -202,7 +202,7 @@ jobs: SUITE=alter-${{ matrix.ONLY }}_partition EOF - name: Download json reports - uses: actions/download-artifact@v3 + uses: actions/download-artifact@v4 with: path: ${{ env.REPORTS_PATH }} name: build_report_package_${{ inputs.arch }} @@ -220,7 +220,7 @@ jobs: - name: Create and upload logs if: always() run: .github/create_and_upload_logs.sh 1 - - uses: actions/upload-artifact@v3 + - uses: actions/upload-artifact@v4 if: always() with: name: ${{ env.SUITE }}-${{ inputs.arch }}-artifacts @@ -236,7 +236,7 @@ jobs: timeout-minutes: ${{ inputs.timeout_minutes }} steps: - name: Checkout regression repo - uses: actions/checkout@v3 + uses: actions/checkout@v4 with: repository: Altinity/clickhouse-regression ref: ${{ inputs.commit }} @@ -248,7 +248,7 @@ jobs: STORAGE=/${{ matrix.STORAGE }} EOF - name: Download json reports - uses: actions/download-artifact@v3 + uses: actions/download-artifact@v4 with: path: ${{ env.REPORTS_PATH }} name: build_report_package_${{ inputs.arch }} @@ -273,7 +273,7 @@ jobs: - name: Create and upload logs if: always() run: .github/create_and_upload_logs.sh 1 - - uses: actions/upload-artifact@v3 + - uses: actions/upload-artifact@v4 if: always() with: name: benchmark-${{ matrix.STORAGE }}-${{ inputs.arch }}-artifacts @@ -285,7 +285,7 @@ jobs: timeout-minutes: ${{ inputs.timeout_minutes }} steps: - name: Checkout regression repo - uses: actions/checkout@v3 + uses: actions/checkout@v4 with: repository: Altinity/clickhouse-regression ref: ${{ inputs.commit }} @@ -297,7 +297,7 @@ jobs: STORAGE=/ssl EOF - name: Download json reports - uses: actions/download-artifact@v3 + uses: actions/download-artifact@v4 with: path: ${{ env.REPORTS_PATH }} name: build_report_package_${{ inputs.arch }} @@ -315,7 +315,7 @@ jobs: - name: Create and upload logs if: always() run: .github/create_and_upload_logs.sh 1 - - uses: actions/upload-artifact@v3 + - uses: actions/upload-artifact@v4 if: always() with: name: ${{ env.SUITE }}-${{ inputs.arch }}-ssl-artifacts @@ -331,7 +331,7 @@ jobs: timeout-minutes: ${{ inputs.timeout_minutes }} steps: - name: Checkout regression repo - uses: actions/checkout@v3 + uses: actions/checkout@v4 with: repository: Altinity/clickhouse-regression ref: ${{ inputs.commit }} @@ -342,7 +342,7 @@ jobs: SUITE=ldap/${{ matrix.SUITE }} EOF - name: Download json reports - uses: actions/download-artifact@v3 + uses: actions/download-artifact@v4 with: path: ${{ env.REPORTS_PATH }} name: build_report_package_${{ inputs.arch }} @@ -359,7 +359,7 @@ jobs: - name: Create and upload logs if: always() run: .github/create_and_upload_logs.sh 1 - - uses: actions/upload-artifact@v3 + - uses: actions/upload-artifact@v4 if: always() with: name: ldap-${{ matrix.SUITE }}-${{ inputs.arch }}-artifacts @@ -371,7 +371,7 @@ jobs: timeout-minutes: ${{ inputs.timeout_minutes }} steps: - name: Checkout regression repo - uses: actions/checkout@v3 + uses: actions/checkout@v4 with: repository: Altinity/clickhouse-regression ref: ${{ inputs.commit }} @@ -382,7 +382,7 @@ jobs: SUITE=parquet EOF - name: Download json reports - uses: actions/download-artifact@v3 + uses: actions/download-artifact@v4 with: path: ${{ env.REPORTS_PATH }} name: build_report_package_${{ inputs.arch }} @@ -399,7 +399,7 @@ jobs: - name: Create and upload logs if: always() run: .github/create_and_upload_logs.sh 1 - - uses: actions/upload-artifact@v3 + - uses: actions/upload-artifact@v4 if: always() with: name: ${{ env.SUITE }}-${{ inputs.arch }}-artifacts @@ -415,7 +415,7 @@ jobs: timeout-minutes: ${{ inputs.timeout_minutes }} steps: - name: Checkout regression repo - uses: actions/checkout@v3 + uses: actions/checkout@v4 with: repository: Altinity/clickhouse-regression ref: ${{ inputs.commit }} @@ -427,7 +427,7 @@ jobs: STORAGE=${{ matrix.STORAGE}} EOF - name: Download json reports - uses: actions/download-artifact@v3 + uses: actions/download-artifact@v4 with: path: ${{ env.REPORTS_PATH }} name: build_report_package_${{ inputs.arch }} @@ -449,7 +449,7 @@ jobs: - name: Create and upload logs if: always() run: .github/create_and_upload_logs.sh 1 - - uses: actions/upload-artifact@v3 + - uses: actions/upload-artifact@v4 if: always() with: name: ${{ env.SUITE }}-${{ env.STORAGE }}-${{ inputs.arch }}-artifacts @@ -465,7 +465,7 @@ jobs: timeout-minutes: ${{ inputs.timeout_minutes }} steps: - name: Checkout regression repo - uses: actions/checkout@v3 + uses: actions/checkout@v4 with: repository: Altinity/clickhouse-regression ref: ${{ inputs.commit }} @@ -477,7 +477,7 @@ jobs: STORAGE=/${{ matrix.STORAGE }} EOF - name: Download json reports - uses: actions/download-artifact@v3 + uses: actions/download-artifact@v4 with: path: ${{ env.REPORTS_PATH }} name: build_report_package_${{ inputs.arch }} @@ -502,7 +502,7 @@ jobs: - name: Create and upload logs if: always() run: .github/create_and_upload_logs.sh 1 - - uses: actions/upload-artifact@v3 + - uses: actions/upload-artifact@v4 if: always() with: name: ${{ env.SUITE }}-${{ matrix.STORAGE }}-${{ inputs.arch }}-artifacts @@ -518,7 +518,7 @@ jobs: timeout-minutes: ${{ inputs.timeout_minutes }} steps: - name: Checkout regression repo - uses: actions/checkout@v3 + uses: actions/checkout@v4 with: repository: Altinity/clickhouse-regression - name: Set envs @@ -529,7 +529,7 @@ jobs: STORAGE=/${{ matrix.STORAGE }} EOF - name: Download json reports - uses: actions/download-artifact@v3 + uses: actions/download-artifact@v4 with: path: ${{ env.REPORTS_PATH }} name: build_report_package_${{ inputs.arch }} @@ -553,7 +553,7 @@ jobs: - name: Create and upload logs if: always() run: .github/create_and_upload_logs.sh 1 - - uses: actions/upload-artifact@v3 + - uses: actions/upload-artifact@v4 if: always() with: name: ${{ env.SUITE }}-${{ matrix.STORAGE }}-${{ inputs.arch }}-artifacts diff --git a/.github/workflows/release_branches.yml b/.github/workflows/release_branches.yml index a56c717f3c5a..f6464155cbf1 100644 --- a/.github/workflows/release_branches.yml +++ b/.github/workflows/release_branches.yml @@ -514,6 +514,7 @@ jobs: ############################################################################################# RegressionTestsRelease: needs: [BuilderDebRelease] + if: ${{ !failure() && !cancelled() }} uses: ./.github/workflows/regression.yml secrets: inherit with: @@ -523,6 +524,7 @@ jobs: build_sha: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }} RegressionTestsAarch64: needs: [BuilderDebAarch64] + if: ${{ !failure() && !cancelled() }} uses: ./.github/workflows/regression.yml secrets: inherit with: @@ -532,6 +534,7 @@ jobs: build_sha: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }} SignRelease: needs: [BuilderDebRelease] + if: ${{ !failure() && !cancelled() }} runs-on: [self-hosted, altinity-on-demand, altinity-type-cpx41, altinity-in-ash, altinity-image-x86-system-ubuntu-22.04] timeout-minutes: 180 steps: @@ -545,9 +548,9 @@ jobs: run: | sudo rm -fr "$GITHUB_WORKSPACE" && mkdir "$GITHUB_WORKSPACE" - name: Check out repository code - uses: actions/checkout@v2 + uses: actions/checkout@v4 - name: Download json reports - uses: actions/download-artifact@v2 + uses: actions/download-artifact@v4 with: path: ${{ env.REPORTS_PATH }} - name: Sign release @@ -559,7 +562,7 @@ jobs: cd "$GITHUB_WORKSPACE/tests/ci" python3 sign_release.py - name: Upload signed hashes - uses: actions/upload-artifact@v2 + uses: actions/upload-artifact@v4 with: name: signed-hashes path: ${{ env.TEMP_PATH }}/*.gpg From 637cc1767a92f493b1126301c73f672e0f5ce2fe Mon Sep 17 00:00:00 2001 From: MyroTk Date: Wed, 5 Jun 2024 11:28:57 -0700 Subject: [PATCH 48/79] functional test timeout --- tests/ci/ci_config.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/tests/ci/ci_config.py b/tests/ci/ci_config.py index 263dc9a08f0d..51f93a25479a 100644 --- a/tests/ci/ci_config.py +++ b/tests/ci/ci_config.py @@ -458,10 +458,12 @@ class TestConfig: statless_test_common_params = { "digest": stateless_check_digest, "run_command": 'functional_test_check.py "$CHECK_NAME" $KILL_TIMEOUT', + "timeout": 10800, } stateful_test_common_params = { "digest": stateful_check_digest, "run_command": 'functional_test_check.py "$CHECK_NAME" $KILL_TIMEOUT', + "timeout": 3600, } stress_test_common_params = { "digest": stress_check_digest, From 804efb69dfb7666f3217546da5c0bd2161a2af0b Mon Sep 17 00:00:00 2001 From: MyroTk Date: Wed, 5 Jun 2024 23:05:35 -0700 Subject: [PATCH 49/79] url and artifacts change --- .github/workflows/reusable_build.yml | 2 +- tests/ci/clickhouse_helper.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/reusable_build.yml b/.github/workflows/reusable_build.yml index 2d15c1fae213..62ae1a018bfe 100644 --- a/.github/workflows/reusable_build.yml +++ b/.github/workflows/reusable_build.yml @@ -111,7 +111,7 @@ jobs: uses: actions/upload-artifact@v4 with: path: ${{ env.TEMP_PATH }}/build_report_*.json - name: build_report_${{inputs.build_name}} + name: build_report_${{inputs.build_name}}.json - name: Clean if: always() uses: ./.github/actions/clean diff --git a/tests/ci/clickhouse_helper.py b/tests/ci/clickhouse_helper.py index 637c4519d3d7..43fe915b765f 100644 --- a/tests/ci/clickhouse_helper.py +++ b/tests/ci/clickhouse_helper.py @@ -204,7 +204,7 @@ def prepare_tests_results_for_clickhouse( report_url: str, check_name: str, ) -> List[dict]: - pull_request_url = "https://github.com/ClickHouse/ClickHouse/commits/master" + pull_request_url = "https://github.com/Altinity/ClickHouse/commits/master" base_ref = "master" head_ref = "master" base_repo = pr_info.repo_full_name From 0f718217b23fcb42c46c77319360787210c6e854 Mon Sep 17 00:00:00 2001 From: MyroTk Date: Thu, 6 Jun 2024 00:17:55 -0700 Subject: [PATCH 50/79] sign release and artifacts for regression --- .github/workflows/regression.yml | 36 ++++++++++++++++++++++++++++ .github/workflows/reusable_build.yml | 2 +- tests/ci/sign_release.py | 4 ++-- 3 files changed, 39 insertions(+), 3 deletions(-) diff --git a/.github/workflows/regression.yml b/.github/workflows/regression.yml index 33fd8b9bf96a..c396669b9ba3 100644 --- a/.github/workflows/regression.yml +++ b/.github/workflows/regression.yml @@ -162,6 +162,10 @@ jobs: with: path: ${{ env.REPORTS_PATH }} name: build_report_package_${{ inputs.arch }} + - name: Unzip reports + run: | + unzip ${{ env.REPORTS_PATH }}/build_report_package_${{ inputs.arch }} -d ${{ env.REPORTS_PATH }} + mv ${{ env.REPORTS_PATH }}/build_report_*.json ${{ env.REPORTS_PATH }}/build_report_package_${{ inputs.arch }} - name: Setup run: .github/setup.sh - name: Get deb url @@ -206,6 +210,10 @@ jobs: with: path: ${{ env.REPORTS_PATH }} name: build_report_package_${{ inputs.arch }} + - name: Unzip reports + run: | + unzip ${{ env.REPORTS_PATH }}/build_report_package_${{ inputs.arch }} -d ${{ env.REPORTS_PATH }} + mv ${{ env.REPORTS_PATH }}/build_report_*.json ${{ env.REPORTS_PATH }}/build_report_package_${{ inputs.arch }} - name: Setup run: .github/setup.sh - name: Get deb url @@ -252,6 +260,10 @@ jobs: with: path: ${{ env.REPORTS_PATH }} name: build_report_package_${{ inputs.arch }} + - name: Unzip reports + run: | + unzip ${{ env.REPORTS_PATH }}/build_report_package_${{ inputs.arch }} -d ${{ env.REPORTS_PATH }} + mv ${{ env.REPORTS_PATH }}/build_report_*.json ${{ env.REPORTS_PATH }}/build_report_package_${{ inputs.arch }} - name: Setup run: .github/setup.sh - name: Get deb url @@ -301,6 +313,10 @@ jobs: with: path: ${{ env.REPORTS_PATH }} name: build_report_package_${{ inputs.arch }} + - name: Unzip reports + run: | + unzip ${{ env.REPORTS_PATH }}/build_report_package_${{ inputs.arch }} -d ${{ env.REPORTS_PATH }} + mv ${{ env.REPORTS_PATH }}/build_report_*.json ${{ env.REPORTS_PATH }}/build_report_package_${{ inputs.arch }} - name: Setup run: .github/setup.sh - name: Get deb url @@ -346,6 +362,10 @@ jobs: with: path: ${{ env.REPORTS_PATH }} name: build_report_package_${{ inputs.arch }} + - name: Unzip reports + run: | + unzip ${{ env.REPORTS_PATH }}/build_report_package_${{ inputs.arch }} -d ${{ env.REPORTS_PATH }} + mv ${{ env.REPORTS_PATH }}/build_report_*.json ${{ env.REPORTS_PATH }}/build_report_package_${{ inputs.arch }} - name: Setup run: .github/setup.sh - name: Get deb url @@ -386,6 +406,10 @@ jobs: with: path: ${{ env.REPORTS_PATH }} name: build_report_package_${{ inputs.arch }} + - name: Unzip reports + run: | + unzip ${{ env.REPORTS_PATH }}/build_report_package_${{ inputs.arch }} -d ${{ env.REPORTS_PATH }} + mv ${{ env.REPORTS_PATH }}/build_report_*.json ${{ env.REPORTS_PATH }}/build_report_package_${{ inputs.arch }} - name: Setup run: .github/setup.sh - name: Get deb url @@ -431,6 +455,10 @@ jobs: with: path: ${{ env.REPORTS_PATH }} name: build_report_package_${{ inputs.arch }} + - name: Unzip reports + run: | + unzip ${{ env.REPORTS_PATH }}/build_report_package_${{ inputs.arch }} -d ${{ env.REPORTS_PATH }} + mv ${{ env.REPORTS_PATH }}/build_report_*.json ${{ env.REPORTS_PATH }}/build_report_package_${{ inputs.arch }} - name: Setup run: .github/setup.sh - name: Get deb url @@ -481,6 +509,10 @@ jobs: with: path: ${{ env.REPORTS_PATH }} name: build_report_package_${{ inputs.arch }} + - name: Unzip reports + run: | + unzip ${{ env.REPORTS_PATH }}/build_report_package_${{ inputs.arch }} -d ${{ env.REPORTS_PATH }} + mv ${{ env.REPORTS_PATH }}/build_report_*.json ${{ env.REPORTS_PATH }}/build_report_package_${{ inputs.arch }} - name: Setup run: .github/setup.sh - name: Get deb url @@ -533,6 +565,10 @@ jobs: with: path: ${{ env.REPORTS_PATH }} name: build_report_package_${{ inputs.arch }} + - name: Unzip reports + run: | + unzip ${{ env.REPORTS_PATH }}/build_report_package_${{ inputs.arch }} -d ${{ env.REPORTS_PATH }} + mv ${{ env.REPORTS_PATH }}/build_report_*.json ${{ env.REPORTS_PATH }}/build_report_package_${{ inputs.arch }} - name: Setup run: .github/setup.sh - name: Get deb url diff --git a/.github/workflows/reusable_build.yml b/.github/workflows/reusable_build.yml index 62ae1a018bfe..2d15c1fae213 100644 --- a/.github/workflows/reusable_build.yml +++ b/.github/workflows/reusable_build.yml @@ -111,7 +111,7 @@ jobs: uses: actions/upload-artifact@v4 with: path: ${{ env.TEMP_PATH }}/build_report_*.json - name: build_report_${{inputs.build_name}}.json + name: build_report_${{inputs.build_name}} - name: Clean if: always() uses: ./.github/actions/clean diff --git a/tests/ci/sign_release.py b/tests/ci/sign_release.py index 872966a578a5..b0b6b3a2a60c 100644 --- a/tests/ci/sign_release.py +++ b/tests/ci/sign_release.py @@ -2,7 +2,7 @@ import sys import os import logging -from env_helper import TEMP_PATH, REPO_COPY, REPORTS_PATH +from env_helper import TEMP_PATH, REPO_COPY, REPORT_PATH from s3_helper import S3Helper from pr_info import PRInfo from build_download_helper import download_builds_filter @@ -47,7 +47,7 @@ def sign_file(file_path): return out_file_path def main(): - reports_path = REPORTS_PATH + reports_path = REPORT_PATH if not os.path.exists(TEMP_PATH): os.makedirs(TEMP_PATH) From e7acc020a63f67cd5404802b9ab52d5ff09ce44a Mon Sep 17 00:00:00 2001 From: MyroTk Date: Thu, 6 Jun 2024 08:41:12 -0700 Subject: [PATCH 51/79] remove unzip --- .github/workflows/regression.yml | 27 +++++++++------------------ 1 file changed, 9 insertions(+), 18 deletions(-) diff --git a/.github/workflows/regression.yml b/.github/workflows/regression.yml index c396669b9ba3..33ec1ce223fc 100644 --- a/.github/workflows/regression.yml +++ b/.github/workflows/regression.yml @@ -162,9 +162,8 @@ jobs: with: path: ${{ env.REPORTS_PATH }} name: build_report_package_${{ inputs.arch }} - - name: Unzip reports + - name: Rename reports run: | - unzip ${{ env.REPORTS_PATH }}/build_report_package_${{ inputs.arch }} -d ${{ env.REPORTS_PATH }} mv ${{ env.REPORTS_PATH }}/build_report_*.json ${{ env.REPORTS_PATH }}/build_report_package_${{ inputs.arch }} - name: Setup run: .github/setup.sh @@ -210,9 +209,8 @@ jobs: with: path: ${{ env.REPORTS_PATH }} name: build_report_package_${{ inputs.arch }} - - name: Unzip reports + - name: Rename reports run: | - unzip ${{ env.REPORTS_PATH }}/build_report_package_${{ inputs.arch }} -d ${{ env.REPORTS_PATH }} mv ${{ env.REPORTS_PATH }}/build_report_*.json ${{ env.REPORTS_PATH }}/build_report_package_${{ inputs.arch }} - name: Setup run: .github/setup.sh @@ -260,9 +258,8 @@ jobs: with: path: ${{ env.REPORTS_PATH }} name: build_report_package_${{ inputs.arch }} - - name: Unzip reports + - name: Rename reports run: | - unzip ${{ env.REPORTS_PATH }}/build_report_package_${{ inputs.arch }} -d ${{ env.REPORTS_PATH }} mv ${{ env.REPORTS_PATH }}/build_report_*.json ${{ env.REPORTS_PATH }}/build_report_package_${{ inputs.arch }} - name: Setup run: .github/setup.sh @@ -313,9 +310,8 @@ jobs: with: path: ${{ env.REPORTS_PATH }} name: build_report_package_${{ inputs.arch }} - - name: Unzip reports + - name: Rename reports run: | - unzip ${{ env.REPORTS_PATH }}/build_report_package_${{ inputs.arch }} -d ${{ env.REPORTS_PATH }} mv ${{ env.REPORTS_PATH }}/build_report_*.json ${{ env.REPORTS_PATH }}/build_report_package_${{ inputs.arch }} - name: Setup run: .github/setup.sh @@ -362,9 +358,8 @@ jobs: with: path: ${{ env.REPORTS_PATH }} name: build_report_package_${{ inputs.arch }} - - name: Unzip reports + - name: Rename reports run: | - unzip ${{ env.REPORTS_PATH }}/build_report_package_${{ inputs.arch }} -d ${{ env.REPORTS_PATH }} mv ${{ env.REPORTS_PATH }}/build_report_*.json ${{ env.REPORTS_PATH }}/build_report_package_${{ inputs.arch }} - name: Setup run: .github/setup.sh @@ -406,9 +401,8 @@ jobs: with: path: ${{ env.REPORTS_PATH }} name: build_report_package_${{ inputs.arch }} - - name: Unzip reports + - name: Rename reports run: | - unzip ${{ env.REPORTS_PATH }}/build_report_package_${{ inputs.arch }} -d ${{ env.REPORTS_PATH }} mv ${{ env.REPORTS_PATH }}/build_report_*.json ${{ env.REPORTS_PATH }}/build_report_package_${{ inputs.arch }} - name: Setup run: .github/setup.sh @@ -455,9 +449,8 @@ jobs: with: path: ${{ env.REPORTS_PATH }} name: build_report_package_${{ inputs.arch }} - - name: Unzip reports + - name: Rename reports run: | - unzip ${{ env.REPORTS_PATH }}/build_report_package_${{ inputs.arch }} -d ${{ env.REPORTS_PATH }} mv ${{ env.REPORTS_PATH }}/build_report_*.json ${{ env.REPORTS_PATH }}/build_report_package_${{ inputs.arch }} - name: Setup run: .github/setup.sh @@ -509,9 +502,8 @@ jobs: with: path: ${{ env.REPORTS_PATH }} name: build_report_package_${{ inputs.arch }} - - name: Unzip reports + - name: Rename reports run: | - unzip ${{ env.REPORTS_PATH }}/build_report_package_${{ inputs.arch }} -d ${{ env.REPORTS_PATH }} mv ${{ env.REPORTS_PATH }}/build_report_*.json ${{ env.REPORTS_PATH }}/build_report_package_${{ inputs.arch }} - name: Setup run: .github/setup.sh @@ -565,9 +557,8 @@ jobs: with: path: ${{ env.REPORTS_PATH }} name: build_report_package_${{ inputs.arch }} - - name: Unzip reports + - name: Rename reports run: | - unzip ${{ env.REPORTS_PATH }}/build_report_package_${{ inputs.arch }} -d ${{ env.REPORTS_PATH }} mv ${{ env.REPORTS_PATH }}/build_report_*.json ${{ env.REPORTS_PATH }}/build_report_package_${{ inputs.arch }} - name: Setup run: .github/setup.sh From 1de820e888dea65f6d81169af5e7d80f3be7ff60 Mon Sep 17 00:00:00 2001 From: MyroTk Date: Thu, 6 Jun 2024 10:17:08 -0700 Subject: [PATCH 52/79] regression --- .github/workflows/regression.yml | 18 +++++++++--------- 1 file changed, 9 insertions(+), 9 deletions(-) diff --git a/.github/workflows/regression.yml b/.github/workflows/regression.yml index 33ec1ce223fc..ff2c8c578415 100644 --- a/.github/workflows/regression.yml +++ b/.github/workflows/regression.yml @@ -164,7 +164,7 @@ jobs: name: build_report_package_${{ inputs.arch }} - name: Rename reports run: | - mv ${{ env.REPORTS_PATH }}/build_report_*.json ${{ env.REPORTS_PATH }}/build_report_package_${{ inputs.arch }} + mv ${{ env.REPORTS_PATH }}/build_report_*.json ${{ env.REPORTS_PATH }}/build_report_package_${{ inputs.arch }}.json - name: Setup run: .github/setup.sh - name: Get deb url @@ -211,7 +211,7 @@ jobs: name: build_report_package_${{ inputs.arch }} - name: Rename reports run: | - mv ${{ env.REPORTS_PATH }}/build_report_*.json ${{ env.REPORTS_PATH }}/build_report_package_${{ inputs.arch }} + mv ${{ env.REPORTS_PATH }}/build_report_*.json ${{ env.REPORTS_PATH }}/build_report_package_${{ inputs.arch }}.json - name: Setup run: .github/setup.sh - name: Get deb url @@ -260,7 +260,7 @@ jobs: name: build_report_package_${{ inputs.arch }} - name: Rename reports run: | - mv ${{ env.REPORTS_PATH }}/build_report_*.json ${{ env.REPORTS_PATH }}/build_report_package_${{ inputs.arch }} + mv ${{ env.REPORTS_PATH }}/build_report_*.json ${{ env.REPORTS_PATH }}/build_report_package_${{ inputs.arch }}.json - name: Setup run: .github/setup.sh - name: Get deb url @@ -312,7 +312,7 @@ jobs: name: build_report_package_${{ inputs.arch }} - name: Rename reports run: | - mv ${{ env.REPORTS_PATH }}/build_report_*.json ${{ env.REPORTS_PATH }}/build_report_package_${{ inputs.arch }} + mv ${{ env.REPORTS_PATH }}/build_report_*.json ${{ env.REPORTS_PATH }}/build_report_package_${{ inputs.arch }}.json - name: Setup run: .github/setup.sh - name: Get deb url @@ -360,7 +360,7 @@ jobs: name: build_report_package_${{ inputs.arch }} - name: Rename reports run: | - mv ${{ env.REPORTS_PATH }}/build_report_*.json ${{ env.REPORTS_PATH }}/build_report_package_${{ inputs.arch }} + mv ${{ env.REPORTS_PATH }}/build_report_*.json ${{ env.REPORTS_PATH }}/build_report_package_${{ inputs.arch }}.json - name: Setup run: .github/setup.sh - name: Get deb url @@ -403,7 +403,7 @@ jobs: name: build_report_package_${{ inputs.arch }} - name: Rename reports run: | - mv ${{ env.REPORTS_PATH }}/build_report_*.json ${{ env.REPORTS_PATH }}/build_report_package_${{ inputs.arch }} + mv ${{ env.REPORTS_PATH }}/build_report_*.json ${{ env.REPORTS_PATH }}/build_report_package_${{ inputs.arch }}.json - name: Setup run: .github/setup.sh - name: Get deb url @@ -451,7 +451,7 @@ jobs: name: build_report_package_${{ inputs.arch }} - name: Rename reports run: | - mv ${{ env.REPORTS_PATH }}/build_report_*.json ${{ env.REPORTS_PATH }}/build_report_package_${{ inputs.arch }} + mv ${{ env.REPORTS_PATH }}/build_report_*.json ${{ env.REPORTS_PATH }}/build_report_package_${{ inputs.arch }}.json - name: Setup run: .github/setup.sh - name: Get deb url @@ -504,7 +504,7 @@ jobs: name: build_report_package_${{ inputs.arch }} - name: Rename reports run: | - mv ${{ env.REPORTS_PATH }}/build_report_*.json ${{ env.REPORTS_PATH }}/build_report_package_${{ inputs.arch }} + mv ${{ env.REPORTS_PATH }}/build_report_*.json ${{ env.REPORTS_PATH }}/build_report_package_${{ inputs.arch }}.json - name: Setup run: .github/setup.sh - name: Get deb url @@ -559,7 +559,7 @@ jobs: name: build_report_package_${{ inputs.arch }} - name: Rename reports run: | - mv ${{ env.REPORTS_PATH }}/build_report_*.json ${{ env.REPORTS_PATH }}/build_report_package_${{ inputs.arch }} + mv ${{ env.REPORTS_PATH }}/build_report_*.json ${{ env.REPORTS_PATH }}/build_report_package_${{ inputs.arch }}.json - name: Setup run: .github/setup.sh - name: Get deb url From b26b956e62375c1fa7e213219ea0d3854b844d7c Mon Sep 17 00:00:00 2001 From: MyroTk Date: Wed, 12 Jun 2024 19:54:49 -0700 Subject: [PATCH 53/79] test fixes --- .github/workflows/release_branches.yml | 4 ++-- docker/test/stateless/Dockerfile | 1 + tests/analyzer_tech_debt.txt | 1 + 3 files changed, 4 insertions(+), 2 deletions(-) diff --git a/.github/workflows/release_branches.yml b/.github/workflows/release_branches.yml index f6464155cbf1..ff1fb85b764e 100644 --- a/.github/workflows/release_branches.yml +++ b/.github/workflows/release_branches.yml @@ -519,7 +519,7 @@ jobs: secrets: inherit with: runner_type: altinity-on-demand, altinity-type-cpx51, altinity-image-x86-system-ubuntu-22.04, altinity-setup-regression - commit: 17a81c07fc1f41fbee651e0ef0ca4b44e537e5b1 + commit: d1aebea79d8062b44ce9d2e940d3dfcb6225e71d arch: release build_sha: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }} RegressionTestsAarch64: @@ -529,7 +529,7 @@ jobs: secrets: inherit with: runner_type: altinity-on-demand, altinity-type-cax41, altinity-image-arm-system-ubuntu-22.04, altinity-setup-regression - commit: 17a81c07fc1f41fbee651e0ef0ca4b44e537e5b1 + commit: d1aebea79d8062b44ce9d2e940d3dfcb6225e71d arch: aarch64 build_sha: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }} SignRelease: diff --git a/docker/test/stateless/Dockerfile b/docker/test/stateless/Dockerfile index 333962f375ec..72a64fa635c0 100644 --- a/docker/test/stateless/Dockerfile +++ b/docker/test/stateless/Dockerfile @@ -40,6 +40,7 @@ RUN apt-get update -y \ cargo='1.*' \ zstd='1.4.*' \ file='1:5.41-*' \ + jq='1.6-*' \ pv='1.6.*' \ zip='3.0-*' \ p7zip-full='16.02*' \ diff --git a/tests/analyzer_tech_debt.txt b/tests/analyzer_tech_debt.txt index 89af93f581b5..5f798158a418 100644 --- a/tests/analyzer_tech_debt.txt +++ b/tests/analyzer_tech_debt.txt @@ -6,5 +6,6 @@ # Flaky list 01825_type_json_in_array 01414_mutations_and_errors_zookeeper +01287_max_execution_speed # Check after ConstantNode refactoring 02154_parser_backtracking From 6189f941cc4f14e31382198c2107810407d51df5 Mon Sep 17 00:00:00 2001 From: MyroTk Date: Mon, 17 Jun 2024 09:36:48 -0700 Subject: [PATCH 54/79] regression update --- .github/workflows/regression.yml | 65 ++++++++++++++++++-------- .github/workflows/release_branches.yml | 8 ++-- 2 files changed, 50 insertions(+), 23 deletions(-) diff --git a/.github/workflows/regression.yml b/.github/workflows/regression.yml index ff2c8c578415..7d73956429b7 100644 --- a/.github/workflows/regression.yml +++ b/.github/workflows/regression.yml @@ -141,7 +141,7 @@ jobs: strategy: fail-fast: false matrix: - SUITE: [aes_encryption, aggregate_functions, alter, atomic_insert, base_58, clickhouse_keeper, data_types, datetime64_extended_range, disk_level_encryption, dns, engines, example, extended_precision_data_types, kafka, kerberos, key_value, lightweight_delete, memory, part_moves_between_shards, rbac, selects, session_timezone, ssl_server, tiered_storage, window_functions] + SUITE: [aes_encryption, aggregate_functions, atomic_insert, base_58, clickhouse_keeper, data_types, datetime64_extended_range, disk_level_encryption, dns, engines, example, extended_precision_data_types, kafka, kerberos, key_value, lightweight_delete, memory, part_moves_between_shards, rbac, selects, session_timezone, ssl_server, tiered_storage, window_functions] needs: [runner_labels_setup] runs-on: ${{ fromJson(needs.runner_labels_setup.outputs.runner_labels) }} timeout-minutes: ${{ inputs.timeout_minutes }} @@ -170,11 +170,14 @@ jobs: - name: Get deb url run: python3 .github/get-deb-url.py --reports-path ${{ env.REPORTS_PATH }} --github-env $GITHUB_ENV - name: Run ${{ env.SUITE }} suite - run: python3 + run: EXITCODE=0; + python3 -u ${{ env.SUITE }}/regression.py --clickhouse-binary-path ${{ env.clickhouse_binary_path }} --attr project="$GITHUB_REPOSITORY" project.id="$GITHUB_REPOSITORY_ID" package="${{ env.clickhouse_binary_path }}" version="${{ env.version }}" user.name="$GITHUB_ACTOR" repository="https://github.com/Altinity/clickhouse-regression" commit.hash="$(git rev-parse HEAD)" job.id="$GITHUB_RUN_ID" job.url="$GITHUB_SERVER_URL/$GITHUB_REPOSITORY/actions/runs/$GITHUB_RUN_ID" arch="$(uname -i)" - ${{ env.args }} + ${{ env.args }} || EXITCODE=$?; + .github/add_link_to_logs.sh; + exit $EXITCODE - name: Create and upload logs if: always() run: .github/create_and_upload_logs.sh 1 @@ -217,12 +220,15 @@ jobs: - name: Get deb url run: python3 .github/get-deb-url.py --reports-path ${{ env.REPORTS_PATH }} --github-env $GITHUB_ENV - name: Run ${{ env.SUITE }} suite - run: python3 + run: EXITCODE=0; + python3 -u alter/regression.py --clickhouse-binary-path ${{ env.clickhouse_binary_path }} --only "/alter/${{ matrix.ONLY }} partition/*" --attr project="$GITHUB_REPOSITORY" project.id="$GITHUB_REPOSITORY_ID" package="${{ env.clickhouse_binary_path }}" version="${{ env.version }}" user.name="$GITHUB_ACTOR" repository="https://github.com/Altinity/clickhouse-regression" commit.hash="$(git rev-parse HEAD)" job.id="$GITHUB_RUN_ID" job.url="$GITHUB_SERVER_URL/$GITHUB_REPOSITORY/actions/runs/$GITHUB_RUN_ID" arch="$(uname -i)" - ${{ env.args }} + ${{ env.args }} || EXITCODE=$?; + .github/add_link_to_logs.sh; + exit $EXITCODE - name: Create and upload logs if: always() run: .github/create_and_upload_logs.sh 1 @@ -266,7 +272,8 @@ jobs: - name: Get deb url run: python3 .github/get-deb-url.py --reports-path ${{ env.REPORTS_PATH }} --github-env $GITHUB_ENV - name: Run ${{ env.SUITE }} suite - run: python3 + run: EXITCODE=0; + python3 -u ${{ env.SUITE }}/benchmark.py --clickhouse-binary-path ${{ env.clickhouse_binary_path }} --storage ${{ matrix.STORAGE }} @@ -278,7 +285,9 @@ jobs: --aws-s3-key-id ${{ secrets.REGRESSION_AWS_S3_KEY_ID }} --aws-s3-access-key ${{ secrets.REGRESSION_AWS_S3_SECRET_ACCESS_KEY }} --attr project="$GITHUB_REPOSITORY" project.id="$GITHUB_REPOSITORY_ID" package="${{ env.clickhouse_binary_path }}" version="${{ env.version }}" user.name="$GITHUB_ACTOR" repository="https://github.com/Altinity/clickhouse-regression" commit.hash="$(git rev-parse HEAD)" job.id="$GITHUB_RUN_ID" job.url="$GITHUB_SERVER_URL/$GITHUB_REPOSITORY/actions/runs/$GITHUB_RUN_ID" arch="$(uname -i)" - ${{ env.args }} + ${{ env.args }} || EXITCODE=$?; + .github/add_link_to_logs.sh; + exit $EXITCODE - name: Create and upload logs if: always() run: .github/create_and_upload_logs.sh 1 @@ -318,12 +327,15 @@ jobs: - name: Get deb url run: python3 .github/get-deb-url.py --reports-path ${{ env.REPORTS_PATH }} --github-env $GITHUB_ENV - name: Run ${{ env.SUITE }} suite - run: python3 + run: EXITCODE=0; + python3 -u ${{ env.SUITE }}/regression.py --ssl --clickhouse-binary-path ${{ env.clickhouse_binary_path }} --attr project="$GITHUB_REPOSITORY" project.id="$GITHUB_REPOSITORY_ID" package="${{ env.clickhouse_binary_path }}" version="${{ env.version }}" user.name="$GITHUB_ACTOR" repository="https://github.com/Altinity/clickhouse-regression" commit.hash="$(git rev-parse HEAD)" job.id="$GITHUB_RUN_ID" job.url="$GITHUB_SERVER_URL/$GITHUB_REPOSITORY/actions/runs/$GITHUB_RUN_ID" arch="$(uname -i)" - ${{ env.args }} + ${{ env.args }} || EXITCODE=$?; + .github/add_link_to_logs.sh; + exit $EXITCODE - name: Create and upload logs if: always() run: .github/create_and_upload_logs.sh 1 @@ -366,11 +378,14 @@ jobs: - name: Get deb url run: python3 .github/get-deb-url.py --reports-path ${{ env.REPORTS_PATH }} --github-env $GITHUB_ENV - name: Run ${{ env.SUITE }} suite - run: python3 + run: EXITCODE=0; + python3 -u ${{ env.SUITE }}/regression.py --clickhouse-binary-path ${{ env.clickhouse_binary_path }} --attr project="$GITHUB_REPOSITORY" project.id="$GITHUB_REPOSITORY_ID" package="${{ env.clickhouse_binary_path }}" version="${{ env.version }}" user.name="$GITHUB_ACTOR" repository="https://github.com/Altinity/clickhouse-regression" commit.hash="$(git rev-parse HEAD)" job.id="$GITHUB_RUN_ID" job.url="$GITHUB_SERVER_URL/$GITHUB_REPOSITORY/actions/runs/$GITHUB_RUN_ID" arch="$(uname -i)" - ${{ env.args }} + ${{ env.args }} || EXITCODE=$?; + .github/add_link_to_logs.sh; + exit $EXITCODE - name: Create and upload logs if: always() run: .github/create_and_upload_logs.sh 1 @@ -409,11 +424,14 @@ jobs: - name: Get deb url run: python3 .github/get-deb-url.py --reports-path ${{ env.REPORTS_PATH }} --github-env $GITHUB_ENV - name: Run ${{ env.SUITE }} suite - run: python3 + run: EXITCODE=0; + python3 -u ${{ env.SUITE }}/regression.py --clickhouse-binary-path ${{ env.clickhouse_binary_path }} --attr project="$GITHUB_REPOSITORY" project.id="$GITHUB_REPOSITORY_ID" package="${{ env.clickhouse_binary_path }}" version="${{ env.version }}" user.name="$GITHUB_ACTOR" repository="https://github.com/Altinity/clickhouse-regression" commit.hash="$(git rev-parse HEAD)" job.id="$GITHUB_RUN_ID" job.url="$GITHUB_SERVER_URL/$GITHUB_REPOSITORY/actions/runs/$GITHUB_RUN_ID" arch="$(uname -i)" - ${{ env.args }} + ${{ env.args }} || EXITCODE=$?; + .github/add_link_to_logs.sh; + exit $EXITCODE - name: Create and upload logs if: always() run: .github/create_and_upload_logs.sh 1 @@ -457,7 +475,8 @@ jobs: - name: Get deb url run: python3 .github/get-deb-url.py --reports-path ${{ env.REPORTS_PATH }} --github-env $GITHUB_ENV - name: Run ${{ env.SUITE }} suite - run: python3 + run: EXITCODE=0; + python3 -u ${{ env.SUITE }}/regression.py --clickhouse-binary-path ${{ env.clickhouse_binary_path }} --storage ${{ matrix.STORAGE }} @@ -466,7 +485,9 @@ jobs: --aws-s3-key-id ${{ secrets.REGRESSION_AWS_S3_KEY_ID }} --aws-s3-access-key ${{ secrets.REGRESSION_AWS_S3_SECRET_ACCESS_KEY }} --attr project="$GITHUB_REPOSITORY" project.id="$GITHUB_REPOSITORY_ID" package="${{ env.clickhouse_binary_path }}" version="${{ env.version }}" user.name="$GITHUB_ACTOR" repository="https://github.com/Altinity/clickhouse-regression" commit.hash="$(git rev-parse HEAD)" job.id="$GITHUB_RUN_ID" job.url="$GITHUB_SERVER_URL/$GITHUB_REPOSITORY/actions/runs/$GITHUB_RUN_ID" arch="$(uname -i)" - ${{ env.args }} + ${{ env.args }} || EXITCODE=$?; + .github/add_link_to_logs.sh; + exit $EXITCODE - name: Create and upload logs if: always() run: .github/create_and_upload_logs.sh 1 @@ -510,7 +531,8 @@ jobs: - name: Get deb url run: python3 .github/get-deb-url.py --reports-path ${{ env.REPORTS_PATH }} --github-env $GITHUB_ENV - name: Run ${{ env.SUITE }} suite - run: python3 + run: EXITCODE=0; + python3 -u ${{ env.SUITE }}/regression.py --clickhouse-binary-path ${{ env.clickhouse_binary_path }} --storage ${{ matrix.STORAGE }} @@ -522,7 +544,9 @@ jobs: --aws-s3-key-id ${{ secrets.REGRESSION_AWS_S3_KEY_ID }} --aws-s3-access-key ${{ secrets.REGRESSION_AWS_S3_SECRET_ACCESS_KEY }} --attr project="$GITHUB_REPOSITORY" project.id="$GITHUB_REPOSITORY_ID" package="${{ env.clickhouse_binary_path }}" version="${{ env.version }}" user.name="$GITHUB_ACTOR" repository="https://github.com/Altinity/clickhouse-regression" commit.hash="$(git rev-parse HEAD)" job.id="$GITHUB_RUN_ID" job.url="$GITHUB_SERVER_URL/$GITHUB_REPOSITORY/actions/runs/$GITHUB_RUN_ID" arch="$(uname -i)" - ${{ env.args }} + ${{ env.args }} || EXITCODE=$?; + .github/add_link_to_logs.sh; + exit $EXITCODE - name: Create and upload logs if: always() run: .github/create_and_upload_logs.sh 1 @@ -565,7 +589,8 @@ jobs: - name: Get deb url run: python3 .github/get-deb-url.py --reports-path ${{ env.REPORTS_PATH }} --github-env $GITHUB_ENV - name: Run ${{ env.SUITE }} suite - run: python3 + run: EXITCODE=0; + python3 -u ${{ env.SUITE }}/regression.py --clickhouse-binary-path ${{ env.clickhouse_binary_path }} --aws-s3-access-key ${{ secrets.REGRESSION_AWS_S3_SECRET_ACCESS_KEY }} @@ -576,7 +601,9 @@ jobs: --gcs-uri ${{ secrets.REGRESSION_GCS_URI }} --with-${{ matrix.STORAGE }} --attr project="$GITHUB_REPOSITORY" project.id="$GITHUB_REPOSITORY_ID" package="${{ env.clickhouse_binary_path }}" version="${{ env.version }}" user.name="$GITHUB_ACTOR" repository="https://github.com/Altinity/clickhouse-regression" commit.hash="$(git rev-parse HEAD)" job.id="$GITHUB_RUN_ID" job.url="$GITHUB_SERVER_URL/$GITHUB_REPOSITORY/actions/runs/$GITHUB_RUN_ID" arch="$(uname -i)" - ${{ env.args }} + ${{ env.args }} || EXITCODE=$?; + .github/add_link_to_logs.sh; + exit $EXITCODE - name: Create and upload logs if: always() run: .github/create_and_upload_logs.sh 1 diff --git a/.github/workflows/release_branches.yml b/.github/workflows/release_branches.yml index ff1fb85b764e..627309ef14a9 100644 --- a/.github/workflows/release_branches.yml +++ b/.github/workflows/release_branches.yml @@ -518,8 +518,8 @@ jobs: uses: ./.github/workflows/regression.yml secrets: inherit with: - runner_type: altinity-on-demand, altinity-type-cpx51, altinity-image-x86-system-ubuntu-22.04, altinity-setup-regression - commit: d1aebea79d8062b44ce9d2e940d3dfcb6225e71d + runner_type: altinity-on-demand, altinity-type-cpx51, altinity-image-x86-app-docker-ce, altinity-setup-regression + commit: 4ced11febe03ee8b9a18abcaa0d33e1161505e3c arch: release build_sha: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }} RegressionTestsAarch64: @@ -528,8 +528,8 @@ jobs: uses: ./.github/workflows/regression.yml secrets: inherit with: - runner_type: altinity-on-demand, altinity-type-cax41, altinity-image-arm-system-ubuntu-22.04, altinity-setup-regression - commit: d1aebea79d8062b44ce9d2e940d3dfcb6225e71d + runner_type: altinity-on-demand, altinity-type-cax41, altinity-image-x86-app-docker-ce, altinity-setup-regression + commit: 4ced11febe03ee8b9a18abcaa0d33e1161505e3c arch: aarch64 build_sha: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }} SignRelease: From 0e5cffa2cd1e743cd10d325d53136234fee371c6 Mon Sep 17 00:00:00 2001 From: MyroTk Date: Tue, 18 Jun 2024 08:10:58 -0700 Subject: [PATCH 55/79] regression arm --- .github/workflows/release_branches.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/release_branches.yml b/.github/workflows/release_branches.yml index 627309ef14a9..1267b6b4d53e 100644 --- a/.github/workflows/release_branches.yml +++ b/.github/workflows/release_branches.yml @@ -528,7 +528,7 @@ jobs: uses: ./.github/workflows/regression.yml secrets: inherit with: - runner_type: altinity-on-demand, altinity-type-cax41, altinity-image-x86-app-docker-ce, altinity-setup-regression + runner_type: altinity-on-demand, altinity-type-cax41, altinity-image-arm-app-docker-ce, altinity-setup-regression commit: 4ced11febe03ee8b9a18abcaa0d33e1161505e3c arch: aarch64 build_sha: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }} From 6c2859e394dc28db03282d0a89b62d709d75c3ba Mon Sep 17 00:00:00 2001 From: MyroTk Date: Thu, 20 Jun 2024 08:16:22 -0700 Subject: [PATCH 56/79] regresison update --- .github/workflows/release_branches.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/release_branches.yml b/.github/workflows/release_branches.yml index 1267b6b4d53e..5c4b3f811841 100644 --- a/.github/workflows/release_branches.yml +++ b/.github/workflows/release_branches.yml @@ -519,7 +519,7 @@ jobs: secrets: inherit with: runner_type: altinity-on-demand, altinity-type-cpx51, altinity-image-x86-app-docker-ce, altinity-setup-regression - commit: 4ced11febe03ee8b9a18abcaa0d33e1161505e3c + commit: f1d3a6169b73f86828da3fbe85c99263054f82fb arch: release build_sha: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }} RegressionTestsAarch64: @@ -529,7 +529,7 @@ jobs: secrets: inherit with: runner_type: altinity-on-demand, altinity-type-cax41, altinity-image-arm-app-docker-ce, altinity-setup-regression - commit: 4ced11febe03ee8b9a18abcaa0d33e1161505e3c + commit: f1d3a6169b73f86828da3fbe85c99263054f82fb arch: aarch64 build_sha: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }} SignRelease: From 6fc1623eb25e0e64779214f699a21068b60ac01c Mon Sep 17 00:00:00 2001 From: MyroTk Date: Tue, 25 Jun 2024 12:12:57 -0700 Subject: [PATCH 57/79] sign release job --- tests/ci/ci_config.py | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/tests/ci/ci_config.py b/tests/ci/ci_config.py index 51f93a25479a..b64c05074109 100644 --- a/tests/ci/ci_config.py +++ b/tests/ci/ci_config.py @@ -180,6 +180,8 @@ class JobNames(metaclass=WithIter): DOCS_CHECK = "Docs check" BUGFIX_VALIDATE = "Bugfix validation" + SIGN_RELEASE = "Sign release (actions)" + # dynamically update JobName with Build jobs for attr_name in dir(Build): @@ -1341,6 +1343,9 @@ def validate(self) -> None: run_command='libfuzzer_test_check.py "$CHECK_NAME" 10800', ), ), # type: ignore + JobNames.SIGN_RELEASE: TestConfig( + Build.PACKAGE_RELEASE + ), }, ) CI_CONFIG.validate() From e71568f696f1c2774507d8799361da3db3757c21 Mon Sep 17 00:00:00 2001 From: Vasily Nemkov Date: Wed, 26 Jun 2024 09:21:20 +0000 Subject: [PATCH 58/79] Attempt to fix flapping integration tests Increased parallelism and make re-runs non-parallel --- tests/ci/integration_tests_runner.py | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/tests/ci/integration_tests_runner.py b/tests/ci/integration_tests_runner.py index d04b64cabeb7..fe703e035661 100755 --- a/tests/ci/integration_tests_runner.py +++ b/tests/ci/integration_tests_runner.py @@ -21,8 +21,8 @@ from env_helper import CI from integration_test_images import IMAGES -MAX_RETRY = 1 -NUM_WORKERS = 5 +MAX_RETRY = 3 +NUM_WORKERS = 10 SLEEP_BETWEEN_RETRIES = 5 PARALLEL_GROUP_SIZE = 100 CLICKHOUSE_BINARY_PATH = "usr/bin/clickhouse" @@ -627,7 +627,8 @@ def run_test_group( info_path = os.path.join(repo_path, "tests/integration", info_basename) test_cmd = " ".join([shlex.quote(test) for test in sorted(test_names)]) - parallel_cmd = f" --parallel {num_workers} " if num_workers > 0 else "" + # run in parallel only the first time, re-runs are sequential to give chance to flappy tests to pass. + parallel_cmd = f" --parallel {num_workers} " if num_workers > 0 and i == 0 else "" # -r -- show extra test summary: # -f -- (f)ailed # -E -- (E)rror From ac87141d2d22f9d4ca10d96acebd6987e6b70f09 Mon Sep 17 00:00:00 2001 From: Vasily Nemkov Date: Wed, 26 Jun 2024 11:11:09 +0000 Subject: [PATCH 59/79] Temporary commented-out SIGN_RELEASE to allow tests --- tests/ci/ci_config.py | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/tests/ci/ci_config.py b/tests/ci/ci_config.py index b64c05074109..5adeb8c46e64 100644 --- a/tests/ci/ci_config.py +++ b/tests/ci/ci_config.py @@ -180,7 +180,7 @@ class JobNames(metaclass=WithIter): DOCS_CHECK = "Docs check" BUGFIX_VALIDATE = "Bugfix validation" - SIGN_RELEASE = "Sign release (actions)" + # SIGN_RELEASE = "Sign release (actions)" # dynamically update JobName with Build jobs @@ -1343,9 +1343,10 @@ def validate(self) -> None: run_command='libfuzzer_test_check.py "$CHECK_NAME" 10800', ), ), # type: ignore - JobNames.SIGN_RELEASE: TestConfig( - Build.PACKAGE_RELEASE - ), + # NOTE (vnemkov): temporary disabled to be able to execute tests, otherwise CI/CD job fails + # JobNames.SIGN_RELEASE: TestConfig( + # Build.PACKAGE_RELEASE + # ), }, ) CI_CONFIG.validate() From 8f4c0e24791ce27aa545d159e7ae6beadd390ca5 Mon Sep 17 00:00:00 2001 From: MyroTk Date: Wed, 26 Jun 2024 10:51:35 -0700 Subject: [PATCH 60/79] fix sign release --- tests/ci/ci_config.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/tests/ci/ci_config.py b/tests/ci/ci_config.py index 5adeb8c46e64..5dfd7916c429 100644 --- a/tests/ci/ci_config.py +++ b/tests/ci/ci_config.py @@ -180,7 +180,7 @@ class JobNames(metaclass=WithIter): DOCS_CHECK = "Docs check" BUGFIX_VALIDATE = "Bugfix validation" - # SIGN_RELEASE = "Sign release (actions)" + SIGN_RELEASE = "Sign release (actions)" # dynamically update JobName with Build jobs @@ -612,6 +612,7 @@ def get_runner_type(self, check_name: str) -> str: "build check", "jepsen", "style check", + "sign" ] ): result = Runners.STYLE_CHECKER @@ -1343,10 +1344,9 @@ def validate(self) -> None: run_command='libfuzzer_test_check.py "$CHECK_NAME" 10800', ), ), # type: ignore - # NOTE (vnemkov): temporary disabled to be able to execute tests, otherwise CI/CD job fails - # JobNames.SIGN_RELEASE: TestConfig( - # Build.PACKAGE_RELEASE - # ), + JobNames.SIGN_RELEASE: TestConfig( + Build.PACKAGE_RELEASE + ), }, ) CI_CONFIG.validate() From 72e2a83218e91a41eea0f1f9cb32c48b2f090f3d Mon Sep 17 00:00:00 2001 From: MyroTk Date: Wed, 26 Jun 2024 12:29:44 -0700 Subject: [PATCH 61/79] sign release and regression update --- .github/workflows/regression.yml | 5 +++-- .github/workflows/release_branches.yml | 7 +++++-- 2 files changed, 8 insertions(+), 4 deletions(-) diff --git a/.github/workflows/regression.yml b/.github/workflows/regression.yml index 7d73956429b7..b1749d27cef4 100644 --- a/.github/workflows/regression.yml +++ b/.github/workflows/regression.yml @@ -205,7 +205,8 @@ jobs: run: | cat >> "$GITHUB_ENV" << 'EOF' REPORTS_PATH=${{ runner.temp }}/reports_dir - SUITE=alter-${{ matrix.ONLY }}_partition + SUITE=alter + STORAGE=/${{ matrix.ONLY }}_partition EOF - name: Download json reports uses: actions/download-artifact@v4 @@ -235,7 +236,7 @@ jobs: - uses: actions/upload-artifact@v4 if: always() with: - name: ${{ env.SUITE }}-${{ inputs.arch }}-artifacts + name: alter-${{ matrix.ONLY }}-${{ inputs.arch }}-artifacts path: ${{ env.artifact_paths}} Benchmark: diff --git a/.github/workflows/release_branches.yml b/.github/workflows/release_branches.yml index 5c4b3f811841..4cac48adb85d 100644 --- a/.github/workflows/release_branches.yml +++ b/.github/workflows/release_branches.yml @@ -519,7 +519,7 @@ jobs: secrets: inherit with: runner_type: altinity-on-demand, altinity-type-cpx51, altinity-image-x86-app-docker-ce, altinity-setup-regression - commit: f1d3a6169b73f86828da3fbe85c99263054f82fb + commit: 86900811b0e15feef31a05c4da4966603b2f833e arch: release build_sha: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }} RegressionTestsAarch64: @@ -529,7 +529,7 @@ jobs: secrets: inherit with: runner_type: altinity-on-demand, altinity-type-cax41, altinity-image-arm-app-docker-ce, altinity-setup-regression - commit: f1d3a6169b73f86828da3fbe85c99263054f82fb + commit: 86900811b0e15feef31a05c4da4966603b2f833e arch: aarch64 build_sha: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }} SignRelease: @@ -547,6 +547,9 @@ jobs: - name: Clear repository run: | sudo rm -fr "$GITHUB_WORKSPACE" && mkdir "$GITHUB_WORKSPACE" + - name: Pre run + run: | + python3 "$GITHUB_WORKSPACE/tests/ci/ci.py" --infile ${{ toJson(inputs.data) }} --pre - name: Check out repository code uses: actions/checkout@v4 - name: Download json reports From 67d72d38419add71b44cae97386378932c5734fe Mon Sep 17 00:00:00 2001 From: MyroTk Date: Wed, 26 Jun 2024 14:44:53 -0700 Subject: [PATCH 62/79] move sign release --- .github/workflows/release_branches.yml | 8 ++------ 1 file changed, 2 insertions(+), 6 deletions(-) diff --git a/.github/workflows/release_branches.yml b/.github/workflows/release_branches.yml index 4cac48adb85d..d5bb42c713e1 100644 --- a/.github/workflows/release_branches.yml +++ b/.github/workflows/release_branches.yml @@ -547,15 +547,11 @@ jobs: - name: Clear repository run: | sudo rm -fr "$GITHUB_WORKSPACE" && mkdir "$GITHUB_WORKSPACE" - - name: Pre run - run: | - python3 "$GITHUB_WORKSPACE/tests/ci/ci.py" --infile ${{ toJson(inputs.data) }} --pre - name: Check out repository code uses: actions/checkout@v4 - name: Download json reports - uses: actions/download-artifact@v4 - with: - path: ${{ env.REPORTS_PATH }} + run: | + python3 "$GITHUB_WORKSPACE/tests/ci/ci.py" --infile ${{ toJson(inputs.data) }} --pre - name: Sign release env: GPG_BINARY_SIGNING_KEY: ${{ secrets.GPG_BINARY_SIGNING_KEY }} From cc0e4040a612822f1b1a90c422b3027c600464de Mon Sep 17 00:00:00 2001 From: MyroTk Date: Wed, 26 Jun 2024 16:02:00 -0700 Subject: [PATCH 63/79] sign aarch64 --- .github/workflows/release_branches.yml | 43 +++++++++++++++++++++++++- tests/ci/ci_config.py | 6 +++- tests/ci/sign_release.py | 2 +- 3 files changed, 48 insertions(+), 3 deletions(-) diff --git a/.github/workflows/release_branches.yml b/.github/workflows/release_branches.yml index d5bb42c713e1..bdb64cd08f2e 100644 --- a/.github/workflows/release_branches.yml +++ b/.github/workflows/release_branches.yml @@ -543,6 +543,7 @@ jobs: cat >> "$GITHUB_ENV" << 'EOF' TEMP_PATH=${{runner.temp}}/signed REPORTS_PATH=${{runner.temp}}/reports_dir + CHECK_NAME="Sign release" EOF - name: Clear repository run: | @@ -551,7 +552,47 @@ jobs: uses: actions/checkout@v4 - name: Download json reports run: | - python3 "$GITHUB_WORKSPACE/tests/ci/ci.py" --infile ${{ toJson(inputs.data) }} --pre + python3 "$GITHUB_WORKSPACE/tests/ci/ci.py" --infile ${{ toJson(inputs.data) }} --pre --job-name 'Sign release' + - name: Sign release + env: + GPG_BINARY_SIGNING_KEY: ${{ secrets.GPG_BINARY_SIGNING_KEY }} + GPG_BINARY_SIGNING_PASSPHRASE: ${{ secrets.GPG_BINARY_SIGNING_PASSPHRASE }} + REPORTS_PATH: ${{ env.REPORTS_PATH }} + run: | + cd "$GITHUB_WORKSPACE/tests/ci" + python3 sign_release.py + - name: Upload signed hashes + uses: actions/upload-artifact@v4 + with: + name: signed-hashes + path: ${{ env.TEMP_PATH }}/*.gpg + - name: Cleanup + if: always() + run: | + docker ps --quiet | xargs --no-run-if-empty docker kill ||: + docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: + sudo rm -fr "$TEMP_PATH" + SignAarch64: + needs: [BuilderDebAarch64] + if: ${{ !failure() && !cancelled() }} + runs-on: [self-hosted, altinity-on-demand, altinity-type-cpx41, altinity-in-ash, altinity-image-x86-system-ubuntu-22.04] + timeout-minutes: 180 + steps: + - name: Set envs + run: | + cat >> "$GITHUB_ENV" << 'EOF' + TEMP_PATH=${{runner.temp}}/signed + REPORTS_PATH=${{runner.temp}}/reports_dir + CHECK_NAME="Sign aarch64" + EOF + - name: Clear repository + run: | + sudo rm -fr "$GITHUB_WORKSPACE" && mkdir "$GITHUB_WORKSPACE" + - name: Check out repository code + uses: actions/checkout@v4 + - name: Download json reports + run: | + python3 "$GITHUB_WORKSPACE/tests/ci/ci.py" --infile ${{ toJson(inputs.data) }} --pre --job-name 'Sign aarch64' - name: Sign release env: GPG_BINARY_SIGNING_KEY: ${{ secrets.GPG_BINARY_SIGNING_KEY }} diff --git a/tests/ci/ci_config.py b/tests/ci/ci_config.py index 5dfd7916c429..26b2b1932231 100644 --- a/tests/ci/ci_config.py +++ b/tests/ci/ci_config.py @@ -180,7 +180,8 @@ class JobNames(metaclass=WithIter): DOCS_CHECK = "Docs check" BUGFIX_VALIDATE = "Bugfix validation" - SIGN_RELEASE = "Sign release (actions)" + SIGN_RELEASE = "Sign release" + SIGN_AARCH64 = "Sign aarch64" # dynamically update JobName with Build jobs @@ -1347,6 +1348,9 @@ def validate(self) -> None: JobNames.SIGN_RELEASE: TestConfig( Build.PACKAGE_RELEASE ), + JobNames.SIGN_AARCH64: TestConfig( + Build.PACKAGE_AARCH64 + ), }, ) CI_CONFIG.validate() diff --git a/tests/ci/sign_release.py b/tests/ci/sign_release.py index b0b6b3a2a60c..cbb651d805c9 100644 --- a/tests/ci/sign_release.py +++ b/tests/ci/sign_release.py @@ -12,7 +12,7 @@ GPG_BINARY_SIGNING_KEY = os.getenv("GPG_BINARY_SIGNING_KEY") GPG_BINARY_SIGNING_PASSPHRASE = os.getenv("GPG_BINARY_SIGNING_PASSPHRASE") -CHECK_NAME = "Sign release (actions)" +CHECK_NAME = os.getenv("CHECK_NAME", "Sign release") def hash_file(file_path): BLOCK_SIZE = 65536 # The size of each read from the file From 6848e0a0bf5d0fb2ddfe08925f33ac9cf52909f8 Mon Sep 17 00:00:00 2001 From: MyroTk Date: Wed, 26 Jun 2024 20:19:57 -0700 Subject: [PATCH 64/79] reusable sign workflow --- .github/workflows/release_branches.yml | 90 ++++---------------------- 1 file changed, 14 insertions(+), 76 deletions(-) diff --git a/.github/workflows/release_branches.yml b/.github/workflows/release_branches.yml index bdb64cd08f2e..13478b44da85 100644 --- a/.github/workflows/release_branches.yml +++ b/.github/workflows/release_branches.yml @@ -533,85 +533,23 @@ jobs: arch: aarch64 build_sha: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }} SignRelease: - needs: [BuilderDebRelease] + needs: [RunConfig, BuilderDebRelease] if: ${{ !failure() && !cancelled() }} - runs-on: [self-hosted, altinity-on-demand, altinity-type-cpx41, altinity-in-ash, altinity-image-x86-system-ubuntu-22.04] - timeout-minutes: 180 - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/signed - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME="Sign release" - EOF - - name: Clear repository - run: | - sudo rm -fr "$GITHUB_WORKSPACE" && mkdir "$GITHUB_WORKSPACE" - - name: Check out repository code - uses: actions/checkout@v4 - - name: Download json reports - run: | - python3 "$GITHUB_WORKSPACE/tests/ci/ci.py" --infile ${{ toJson(inputs.data) }} --pre --job-name 'Sign release' - - name: Sign release - env: - GPG_BINARY_SIGNING_KEY: ${{ secrets.GPG_BINARY_SIGNING_KEY }} - GPG_BINARY_SIGNING_PASSPHRASE: ${{ secrets.GPG_BINARY_SIGNING_PASSPHRASE }} - REPORTS_PATH: ${{ env.REPORTS_PATH }} - run: | - cd "$GITHUB_WORKSPACE/tests/ci" - python3 sign_release.py - - name: Upload signed hashes - uses: actions/upload-artifact@v4 - with: - name: signed-hashes - path: ${{ env.TEMP_PATH }}/*.gpg - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" + uses: ./.github/workflows/reusable_sign.yml + secrets: inherit + with: + test_name: Sign release + runner_type: altinity-on-demand, altinity-type-cpx41, altinity-in-ash, altinity-image-x86-system-ubuntu-22.04 + data: ${{ needs.RunConfig.outputs.data }} SignAarch64: - needs: [BuilderDebAarch64] + needs: [RunConfig, BuilderDebAarch64] if: ${{ !failure() && !cancelled() }} - runs-on: [self-hosted, altinity-on-demand, altinity-type-cpx41, altinity-in-ash, altinity-image-x86-system-ubuntu-22.04] - timeout-minutes: 180 - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/signed - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME="Sign aarch64" - EOF - - name: Clear repository - run: | - sudo rm -fr "$GITHUB_WORKSPACE" && mkdir "$GITHUB_WORKSPACE" - - name: Check out repository code - uses: actions/checkout@v4 - - name: Download json reports - run: | - python3 "$GITHUB_WORKSPACE/tests/ci/ci.py" --infile ${{ toJson(inputs.data) }} --pre --job-name 'Sign aarch64' - - name: Sign release - env: - GPG_BINARY_SIGNING_KEY: ${{ secrets.GPG_BINARY_SIGNING_KEY }} - GPG_BINARY_SIGNING_PASSPHRASE: ${{ secrets.GPG_BINARY_SIGNING_PASSPHRASE }} - REPORTS_PATH: ${{ env.REPORTS_PATH }} - run: | - cd "$GITHUB_WORKSPACE/tests/ci" - python3 sign_release.py - - name: Upload signed hashes - uses: actions/upload-artifact@v4 - with: - name: signed-hashes - path: ${{ env.TEMP_PATH }}/*.gpg - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" + uses: ./.github/workflows/reusable_sign.yml + secrets: inherit + with: + test_name: Sign aarch64 + runner_type: altinity-on-demand, altinity-type-cpx41, altinity-in-ash, altinity-image-x86-system-ubuntu-22.04 + data: ${{ needs.RunConfig.outputs.data }} FinishCheck: if: ${{ !failure() && !cancelled() }} needs: From 6df6bd5556117c86b84e756099e4ae82d432e9f2 Mon Sep 17 00:00:00 2001 From: MyroTk Date: Wed, 26 Jun 2024 20:49:20 -0700 Subject: [PATCH 65/79] add workflow file --- .github/workflows/reusable_sign.yml | 164 ++++++++++++++++++++++++++++ 1 file changed, 164 insertions(+) create mode 100644 .github/workflows/reusable_sign.yml diff --git a/.github/workflows/reusable_sign.yml b/.github/workflows/reusable_sign.yml new file mode 100644 index 000000000000..c5db056c9687 --- /dev/null +++ b/.github/workflows/reusable_sign.yml @@ -0,0 +1,164 @@ +### For the pure soul wishes to move it to another place +# https://github.com/orgs/community/discussions/9050 + +name: Testing workflow +'on': + workflow_call: + inputs: + test_name: + description: the value of test type from tests/ci/ci_config.py, ends up as $CHECK_NAME ENV + required: true + type: string + runner_type: + description: the label of runner to use + required: true + type: string + run_command: + description: the command to launch the check + default: "" + required: false + type: string + checkout_depth: + description: the value of the git shallow checkout + required: false + type: number + default: 1 + submodules: + description: if the submodules should be checked out + required: false + type: boolean + default: false + additional_envs: + description: additional ENV variables to setup the job + type: string + data: + description: ci data + type: string + required: true + working-directory: + description: sets custom working directory + type: string + default: "$GITHUB_WORKSPACE/tests/ci" + secrets: + secret_envs: + description: if given, it's passed to the environments + required: false + AWS_SECRET_ACCESS_KEY: + description: the access key to the aws param store. + required: true + AWS_ACCESS_KEY_ID: + description: the access key id to the aws param store. + required: true + GPG_BINARY_SIGNING_KEY: + description: gpg signing key for packages. + required: true + GPG_BINARY_SIGNING_PASSPHRASE: + description: gpg signing key passphrase. + required: true + +env: + # Force the stdout and stderr streams to be unbuffered + PYTHONUNBUFFERED: 1 + CHECK_NAME: ${{inputs.test_name}} + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} + AWS_DEFAULT_REGION: ${{ secrets.AWS_DEFAULT_REGION }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + +jobs: + runner_labels_setup: + name: Compute proper runner labels for the rest of the jobs + runs-on: ubuntu-latest + outputs: + runner_labels: ${{ steps.setVariables.outputs.runner_labels }} + steps: + - id: setVariables + name: Prepare runner_labels variables for the later steps + run: | + + # Prepend self-hosted + input="self-hosted, ${input}" + + # Remove all whitespace + input="$(echo ${input} | tr -d [:space:])" + # Make something like a JSON array from comma-separated list + input="[ '${input//\,/\'\, \'}' ]" + + echo "runner_labels=$input" >> ${GITHUB_OUTPUT} + env: + input: ${{ inputs.runner_type }} + + Test: + needs: [runner_labels_setup] + runs-on: ${{ fromJson(needs.runner_labels_setup.outputs.runner_labels) }} + name: ${{inputs.test_name}}${{ fromJson(inputs.data).jobs_data.jobs_params[inputs.test_name].num_batches > 1 && format('-{0}',matrix.batch) || '' }} + env: + GITHUB_JOB_OVERRIDDEN: ${{inputs.test_name}}${{ fromJson(inputs.data).jobs_data.jobs_params[inputs.test_name].num_batches > 1 && format('-{0}',matrix.batch) || '' }} + strategy: + fail-fast: false # we always wait for entire matrix + matrix: + batch: ${{ fromJson(inputs.data).jobs_data.jobs_params[inputs.test_name].batches }} + steps: + - name: Check out repository code + uses: Altinity/checkout@19599efdf36c4f3f30eb55d5bb388896faea69f6 + with: + clear-repository: true + ref: ${{ fromJson(inputs.data).git_ref }} + submodules: ${{inputs.submodules}} + fetch-depth: ${{inputs.checkout_depth}} + filter: tree:0 + - name: Set build envs + run: | + cat >> "$GITHUB_ENV" << 'EOF' + CHECK_NAME=${{ inputs.test_name }} + ${{inputs.additional_envs}} + ${{secrets.secret_envs}} + DOCKER_TAG< 1 }} + run: | + cat >> "$GITHUB_ENV" << 'EOF' + RUN_BY_HASH_NUM=${{matrix.batch}} + RUN_BY_HASH_TOTAL=${{ fromJson(inputs.data).jobs_data.jobs_params[inputs.test_name].num_batches }} + EOF + - name: Pre run + run: | + python3 "$GITHUB_WORKSPACE/tests/ci/ci.py" --infile ${{ toJson(inputs.data) }} --pre --job-name '${{inputs.test_name}}' + - name: Sign release + env: + GPG_BINARY_SIGNING_KEY: ${{ secrets.GPG_BINARY_SIGNING_KEY }} + GPG_BINARY_SIGNING_PASSPHRASE: ${{ secrets.GPG_BINARY_SIGNING_PASSPHRASE }} + run: | + cd "${{ inputs.working-directory }}" + python3 "$GITHUB_WORKSPACE/tests/ci/ci.py" \ + --infile ${{ toJson(inputs.data) }} \ + --job-name '${{inputs.test_name}}' \ + --run \ + --force \ + --run-command '''${{inputs.run_command}}''' + - name: Post run + if: ${{ !cancelled() }} + run: | + python3 "$GITHUB_WORKSPACE/tests/ci/ci.py" --infile ${{ toJson(inputs.data) }} --post --job-name '${{inputs.test_name}}' + - name: Mark as done + if: ${{ !cancelled() }} + run: | + python3 "$GITHUB_WORKSPACE/tests/ci/ci.py" --infile ${{ toJson(inputs.data) }} --mark-success --job-name '${{inputs.test_name}}' --batch ${{matrix.batch}} + - name: Upload signed hashes + uses: actions/upload-artifact@v4 + with: + name: signed-hashes + path: ${{ env.TEMP_PATH }}/*.gpg + - name: Clean + if: always() + uses: ./.github/actions/clean From 6f614d2e8b280d89c4a84147b404faa74b887940 Mon Sep 17 00:00:00 2001 From: Vasily Nemkov Date: Thu, 27 Jun 2024 11:11:14 +0000 Subject: [PATCH 66/79] Attempt to fix integration test test_storage_s3_queue --- tests/integration/test_storage_s3_queue/configs/users.xml | 1 + 1 file changed, 1 insertion(+) diff --git a/tests/integration/test_storage_s3_queue/configs/users.xml b/tests/integration/test_storage_s3_queue/configs/users.xml index e47df8e4b945..c1b6eb67eade 100644 --- a/tests/integration/test_storage_s3_queue/configs/users.xml +++ b/tests/integration/test_storage_s3_queue/configs/users.xml @@ -4,6 +4,7 @@ 1 1 1 + 1 From f630be491972b0b72eef9ccbc0f3e0a64f63795f Mon Sep 17 00:00:00 2001 From: MyroTk Date: Thu, 27 Jun 2024 08:13:30 -0700 Subject: [PATCH 67/79] sign run command --- .github/workflows/reusable_sign.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/reusable_sign.yml b/.github/workflows/reusable_sign.yml index c5db056c9687..e58f90817dd7 100644 --- a/.github/workflows/reusable_sign.yml +++ b/.github/workflows/reusable_sign.yml @@ -145,7 +145,7 @@ jobs: --job-name '${{inputs.test_name}}' \ --run \ --force \ - --run-command '''${{inputs.run_command}}''' + --run-command '''python3 sign_release.py''' - name: Post run if: ${{ !cancelled() }} run: | From 63a02a39a4890fb4252c173543295ea2b16437d5 Mon Sep 17 00:00:00 2001 From: Yarik Briukhovetskyi <114298166+yariks5s@users.noreply.github.com> Date: Wed, 17 Apr 2024 12:20:17 +0000 Subject: [PATCH 68/79] Merge pull request #62524 from helifu/master Fix a bug moving one partition from one to itself --- src/Storages/MergeTree/MergeTreeData.cpp | 35 +++++++++++-------- src/Storages/MergeTree/MergeTreeData.h | 3 ++ ...ve_partition_to_oneself_deadlock.reference | 2 ++ ...038_move_partition_to_oneself_deadlock.sql | 6 ++++ 4 files changed, 32 insertions(+), 14 deletions(-) create mode 100644 tests/queries/0_stateless/03038_move_partition_to_oneself_deadlock.reference create mode 100644 tests/queries/0_stateless/03038_move_partition_to_oneself_deadlock.sql diff --git a/src/Storages/MergeTree/MergeTreeData.cpp b/src/Storages/MergeTree/MergeTreeData.cpp index 1636807c58c8..d428dd583824 100644 --- a/src/Storages/MergeTree/MergeTreeData.cpp +++ b/src/Storages/MergeTree/MergeTreeData.cpp @@ -5095,6 +5095,25 @@ void MergeTreeData::movePartitionToVolume(const ASTPtr & partition, const String } } +void MergeTreeData::movePartitionToTable(const PartitionCommand & command, ContextPtr query_context) +{ + String dest_database = query_context->resolveDatabase(command.to_database); + auto dest_storage = DatabaseCatalog::instance().getTable({dest_database, command.to_table}, query_context); + + /// The target table and the source table are the same. + if (dest_storage->getStorageID() == this->getStorageID()) + return; + + auto * dest_storage_merge_tree = dynamic_cast(dest_storage.get()); + if (!dest_storage_merge_tree) + throw Exception(ErrorCodes::NOT_IMPLEMENTED, + "Cannot move partition from table {} to table {} with storage {}", + getStorageID().getNameForLogs(), dest_storage->getStorageID().getNameForLogs(), dest_storage->getName()); + + dest_storage_merge_tree->waitForOutdatedPartsToBeLoaded(); + movePartitionToTable(dest_storage, command.partition, query_context); +} + void MergeTreeData::movePartitionToShard(const ASTPtr & /*partition*/, bool /*move_part*/, const String & /*to*/, ContextPtr /*query_context*/) { throw Exception(ErrorCodes::NOT_IMPLEMENTED, "MOVE PARTITION TO SHARD is not supported by storage {}", getName()); @@ -5171,20 +5190,8 @@ Pipe MergeTreeData::alterPartition( break; case PartitionCommand::MoveDestinationType::TABLE: - { - String dest_database = query_context->resolveDatabase(command.to_database); - auto dest_storage = DatabaseCatalog::instance().getTable({dest_database, command.to_table}, query_context); - - auto * dest_storage_merge_tree = dynamic_cast(dest_storage.get()); - if (!dest_storage_merge_tree) - throw Exception(ErrorCodes::NOT_IMPLEMENTED, - "Cannot move partition from table {} to table {} with storage {}", - getStorageID().getNameForLogs(), dest_storage->getStorageID().getNameForLogs(), dest_storage->getName()); - - dest_storage_merge_tree->waitForOutdatedPartsToBeLoaded(); - movePartitionToTable(dest_storage, command.partition, query_context); - } - break; + movePartitionToTable(command, query_context); + break; case PartitionCommand::MoveDestinationType::SHARD: { diff --git a/src/Storages/MergeTree/MergeTreeData.h b/src/Storages/MergeTree/MergeTreeData.h index 2a4953d89a72..32da90337a65 100644 --- a/src/Storages/MergeTree/MergeTreeData.h +++ b/src/Storages/MergeTree/MergeTreeData.h @@ -790,6 +790,9 @@ class MergeTreeData : public IStorage, public WithMutableContext /// Moves partition to specified Volume void movePartitionToVolume(const ASTPtr & partition, const String & name, bool moving_part, ContextPtr context); + /// Moves partition to specified Table + void movePartitionToTable(const PartitionCommand & command, ContextPtr query_context); + /// Checks that Partition could be dropped right now /// Otherwise - throws an exception with detailed information. /// We do not use mutex because it is not very important that the size could change during the operation. diff --git a/tests/queries/0_stateless/03038_move_partition_to_oneself_deadlock.reference b/tests/queries/0_stateless/03038_move_partition_to_oneself_deadlock.reference new file mode 100644 index 000000000000..684861562a8f --- /dev/null +++ b/tests/queries/0_stateless/03038_move_partition_to_oneself_deadlock.reference @@ -0,0 +1,2 @@ +tuple() 1000000 +tuple() 1000000 diff --git a/tests/queries/0_stateless/03038_move_partition_to_oneself_deadlock.sql b/tests/queries/0_stateless/03038_move_partition_to_oneself_deadlock.sql new file mode 100644 index 000000000000..6eefa5270c54 --- /dev/null +++ b/tests/queries/0_stateless/03038_move_partition_to_oneself_deadlock.sql @@ -0,0 +1,6 @@ +DROP TABLE IF EXISTS move_partition_to_oneself; +CREATE TABLE move_partition_to_oneself (key UInt64 CODEC(NONE)) ENGINE = MergeTree ORDER BY tuple(); +INSERT INTO move_partition_to_oneself SELECT number FROM numbers(1e6); +SELECT partition, rows FROM system.parts WHERE database = currentDatabase() AND table = 'move_partition_to_oneself' and active; +ALTER TABLE move_partition_to_oneself MOVE PARTITION tuple() TO TABLE move_partition_to_oneself; +SELECT partition, rows FROM system.parts WHERE database = currentDatabase() AND table = 'move_partition_to_oneself' and active; From fa3488518aaecc326592c02f43bd83af63dda2ea Mon Sep 17 00:00:00 2001 From: MyroTk Date: Mon, 1 Jul 2024 11:53:58 -0700 Subject: [PATCH 69/79] sign release report_path type --- tests/ci/sign_release.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/ci/sign_release.py b/tests/ci/sign_release.py index cbb651d805c9..0dfe036a1de2 100644 --- a/tests/ci/sign_release.py +++ b/tests/ci/sign_release.py @@ -47,7 +47,7 @@ def sign_file(file_path): return out_file_path def main(): - reports_path = REPORT_PATH + reports_path = Path(REPORT_PATH) if not os.path.exists(TEMP_PATH): os.makedirs(TEMP_PATH) From 3b5a7dd33cdd91749e373ddad4a5b7465734b62c Mon Sep 17 00:00:00 2001 From: MyroTk Date: Mon, 1 Jul 2024 19:33:13 -0700 Subject: [PATCH 70/79] sign release result_path --- tests/ci/build_download_helper.py | 1 + tests/ci/sign_release.py | 2 +- 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/tests/ci/build_download_helper.py b/tests/ci/build_download_helper.py index 0f6c8e5aa8ab..2c6e839a35c1 100644 --- a/tests/ci/build_download_helper.py +++ b/tests/ci/build_download_helper.py @@ -203,6 +203,7 @@ def download_builds_filter( ) -> None: build_name = get_build_name_for_check(check_name) urls = read_build_urls(build_name, reports_path) + print(type(reports_path)) logging.info("The build report for %s contains the next URLs: %s", build_name, urls) if not urls: diff --git a/tests/ci/sign_release.py b/tests/ci/sign_release.py index 0dfe036a1de2..b7d4228090a0 100644 --- a/tests/ci/sign_release.py +++ b/tests/ci/sign_release.py @@ -63,7 +63,7 @@ def main(): ).replace("(", "_").replace(")", "_").replace(",", "_")) # downloads `package_release` artifacts generated - download_builds_filter(CHECK_NAME, reports_path, TEMP_PATH) + download_builds_filter(CHECK_NAME, reports_path, Path(TEMP_PATH)) for f in os.listdir(TEMP_PATH): full_path = os.path.join(TEMP_PATH, f) From 57993f4ed97ced5fbaaa593cbeb335c636dbfae2 Mon Sep 17 00:00:00 2001 From: MyroTk Date: Mon, 1 Jul 2024 23:47:57 -0700 Subject: [PATCH 71/79] skip directories when signing --- tests/ci/sign_release.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/tests/ci/sign_release.py b/tests/ci/sign_release.py index b7d4228090a0..8a5827097c8b 100644 --- a/tests/ci/sign_release.py +++ b/tests/ci/sign_release.py @@ -67,6 +67,8 @@ def main(): for f in os.listdir(TEMP_PATH): full_path = os.path.join(TEMP_PATH, f) + if os.path.isdir(full_path): + continue hashed_file_path = hash_file(full_path) signed_file_path = sign_file(hashed_file_path) s3_path = s3_path_prefix / os.path.basename(signed_file_path) From a9d9dcfcc2c847c5280f105648e139a07e950f5d Mon Sep 17 00:00:00 2001 From: MyroTk Date: Tue, 2 Jul 2024 06:58:49 -0700 Subject: [PATCH 72/79] sign artifact name --- .github/workflows/reusable_sign.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/reusable_sign.yml b/.github/workflows/reusable_sign.yml index e58f90817dd7..2bd8ae430fa5 100644 --- a/.github/workflows/reusable_sign.yml +++ b/.github/workflows/reusable_sign.yml @@ -157,7 +157,7 @@ jobs: - name: Upload signed hashes uses: actions/upload-artifact@v4 with: - name: signed-hashes + name: ${{inputs.test_name}} signed-hashes path: ${{ env.TEMP_PATH }}/*.gpg - name: Clean if: always() From 717cf5f987b512d31f000edaf21e3c73491c7800 Mon Sep 17 00:00:00 2001 From: Vasily Nemkov Date: Wed, 3 Jul 2024 13:08:55 +0000 Subject: [PATCH 73/79] Fix RunConfig failure test_get_version_from_repo (test_version.TestFunctions) --- tests/ci/test_version.py | 25 ++++++++++++----------- tests/ci/tests/autogenerated_versions.txt | 1 + tests/ci/version_helper.py | 4 +++- 3 files changed, 17 insertions(+), 13 deletions(-) diff --git a/tests/ci/test_version.py b/tests/ci/test_version.py index db5c2cb358e8..37ab00996429 100644 --- a/tests/ci/test_version.py +++ b/tests/ci/test_version.py @@ -60,7 +60,7 @@ class TestCase: 15, "v24.4.1.2088-stable", 415, - CHV(24, 5, 1, 54487, None, 16), + CHV(24, 5, 1, 54487, None, 15), ), TestCase( "v24.6.1.1-stable", @@ -78,14 +78,15 @@ class TestCase: ), ) git = Git(True) - for tc in cases: - git.latest_tag = tc.latest_tag - git.commits_since_latest = tc.commits_since_latest - git.new_tag = tc.new_tag - git.commits_since_new = tc.commits_since_new - self.assertEqual( - vh.get_version_from_repo( - Path("tests/ci/tests/autogenerated_versions.txt"), git - ), - tc.expected, - ) + for i, tc in enumerate(cases): + with self.subTest(tc, i=i): + git.latest_tag = tc.latest_tag + git.commits_since_latest = tc.commits_since_latest + git.new_tag = tc.new_tag + git.commits_since_new = tc.commits_since_new + self.assertEqual( + vh.get_version_from_repo( + Path("tests/ci/tests/autogenerated_versions.txt"), git + ), + tc.expected, + ) diff --git a/tests/ci/tests/autogenerated_versions.txt b/tests/ci/tests/autogenerated_versions.txt index 10028bf50c81..675fc161a2fc 100644 --- a/tests/ci/tests/autogenerated_versions.txt +++ b/tests/ci/tests/autogenerated_versions.txt @@ -6,6 +6,7 @@ SET(VERSION_REVISION 54487) SET(VERSION_MAJOR 24) SET(VERSION_MINOR 5) SET(VERSION_PATCH 1) +SET(VERSION_TWEAK 15) SET(VERSION_GITHASH 70a1d3a63d47f0be077d67b8deb907230fc7cfb0) SET(VERSION_DESCRIBE v24.5.1.1-testing) SET(VERSION_STRING 24.5.1.1) diff --git a/tests/ci/version_helper.py b/tests/ci/version_helper.py index 4cff133e36ef..4f3782ada5cb 100755 --- a/tests/ci/version_helper.py +++ b/tests/ci/version_helper.py @@ -238,7 +238,9 @@ class VersionType: PRESTABLE = "prestable" STABLE = "altinitystable" TESTING = "testing" - VALID = (NEW, TESTING, PRESTABLE, STABLE, LTS) + VALID = (NEW, TESTING, PRESTABLE, STABLE, LTS, + "stable" # NOTE (vnemkov): we don't use that directly, but it is used in unit-tests + ) def validate_version(version: str) -> None: From 88b624c6ad5cede62ec9b27ab2eedbc022db1661 Mon Sep 17 00:00:00 2001 From: Vasily Nemkov Date: Wed, 3 Jul 2024 13:28:11 +0000 Subject: [PATCH 74/79] Attempt to fix DockerBuildAmd64 issue --- docker/test/integration/runner/Dockerfile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docker/test/integration/runner/Dockerfile b/docker/test/integration/runner/Dockerfile index bc40a27dd57e..7036b68c9594 100644 --- a/docker/test/integration/runner/Dockerfile +++ b/docker/test/integration/runner/Dockerfile @@ -103,7 +103,7 @@ RUN python3 -m pip install --no-cache-dir \ bs4~=0.0.2 \ lxml~=5.1.0 \ urllib3~=2.1.0 \ - jwcrypto~=1.5.6 \ + jwcrypto~=1.5.6 # bs4, lxml are for cloud tests, do not delete # Hudi supports only spark 3.3.*, not 3.4 From 007c0ae2c47d9b9c7f04824e8b36a9c74623cac7 Mon Sep 17 00:00:00 2001 From: pufit Date: Mon, 17 Jun 2024 02:56:27 +0000 Subject: [PATCH 75/79] Merge pull request #64595 from ClickHouse/pufit/fix-definers-restore Fix restore from backup for definers --- src/Interpreters/InterpreterCreateQuery.cpp | 11 +++++--- src/Interpreters/InterpreterCreateQuery.h | 2 +- .../test_backup_restore_new/test.py | 26 +++++++++++++++++++ 3 files changed, 34 insertions(+), 5 deletions(-) diff --git a/src/Interpreters/InterpreterCreateQuery.cpp b/src/Interpreters/InterpreterCreateQuery.cpp index 08370a57e4be..e39037aaa7ba 100644 --- a/src/Interpreters/InterpreterCreateQuery.cpp +++ b/src/Interpreters/InterpreterCreateQuery.cpp @@ -1083,11 +1083,14 @@ BlockIO InterpreterCreateQuery::createTable(ASTCreateQuery & create) String current_database = getContext()->getCurrentDatabase(); auto database_name = create.database ? create.getDatabase() : current_database; + bool is_secondary_query = getContext()->getZooKeeperMetadataTransaction() && !getContext()->getZooKeeperMetadataTransaction()->isInitialQuery(); + auto mode = getLoadingStrictnessLevel(create.attach, /*force_attach*/ false, /*has_force_restore_data_flag*/ false, is_secondary_query || is_restore_from_backup); + if (!create.sql_security && create.supportSQLSecurity() && !getContext()->getServerSettings().ignore_empty_sql_security_in_create_view_query) create.sql_security = std::make_shared(); if (create.sql_security) - processSQLSecurityOption(getContext(), create.sql_security->as(), create.attach, create.is_materialized_view); + processSQLSecurityOption(getContext(), create.sql_security->as(), create.is_materialized_view, /* skip_check_permissions= */ mode >= LoadingStrictnessLevel::SECONDARY_CREATE); DDLGuardPtr ddl_guard; @@ -1885,7 +1888,7 @@ void InterpreterCreateQuery::addColumnsDescriptionToCreateQueryIfNecessary(ASTCr } } -void InterpreterCreateQuery::processSQLSecurityOption(ContextPtr context_, ASTSQLSecurity & sql_security, bool is_attach, bool is_materialized_view) +void InterpreterCreateQuery::processSQLSecurityOption(ContextPtr context_, ASTSQLSecurity & sql_security, bool is_materialized_view, bool skip_check_permissions) { /// If no SQL security is specified, apply default from default_*_view_sql_security setting. if (!sql_security.type) @@ -1926,7 +1929,7 @@ void InterpreterCreateQuery::processSQLSecurityOption(ContextPtr context_, ASTSQ } /// Checks the permissions for the specified definer user. - if (sql_security.definer && !sql_security.is_definer_current_user && !is_attach) + if (sql_security.definer && !sql_security.is_definer_current_user && !skip_check_permissions) { const auto definer_name = sql_security.definer->toString(); @@ -1936,7 +1939,7 @@ void InterpreterCreateQuery::processSQLSecurityOption(ContextPtr context_, ASTSQ context_->checkAccess(AccessType::SET_DEFINER, definer_name); } - if (sql_security.type == SQLSecurityType::NONE && !is_attach) + if (sql_security.type == SQLSecurityType::NONE && !skip_check_permissions) context_->checkAccess(AccessType::ALLOW_SQL_SECURITY_NONE); } diff --git a/src/Interpreters/InterpreterCreateQuery.h b/src/Interpreters/InterpreterCreateQuery.h index 865f27367842..91a6b005d451 100644 --- a/src/Interpreters/InterpreterCreateQuery.h +++ b/src/Interpreters/InterpreterCreateQuery.h @@ -81,7 +81,7 @@ class InterpreterCreateQuery : public IInterpreter, WithMutableContext void extendQueryLogElemImpl(QueryLogElement & elem, const ASTPtr & ast, ContextPtr) const override; /// Check access right, validate definer statement and replace `CURRENT USER` with actual name. - static void processSQLSecurityOption(ContextPtr context_, ASTSQLSecurity & sql_security, bool is_attach = false, bool is_materialized_view = false); + static void processSQLSecurityOption(ContextPtr context_, ASTSQLSecurity & sql_security, bool is_materialized_view = false, bool skip_check_permissions = false); private: struct TableProperties diff --git a/tests/integration/test_backup_restore_new/test.py b/tests/integration/test_backup_restore_new/test.py index 34ffdf7a8df7..e91699755831 100644 --- a/tests/integration/test_backup_restore_new/test.py +++ b/tests/integration/test_backup_restore_new/test.py @@ -168,6 +168,32 @@ def test_restore_table(engine): assert instance.query("SELECT count(), sum(x) FROM test.table") == "100\t4950\n" +def test_restore_materialized_view_with_definer(): + instance.query("CREATE DATABASE test") + instance.query( + "CREATE TABLE test.test_table (s String) ENGINE = MergeTree ORDER BY s" + ) + instance.query("CREATE USER u1") + instance.query("GRANT SELECT ON *.* TO u1") + instance.query("GRANT INSERT ON *.* TO u1") + + instance.query( + """ + CREATE MATERIALIZED VIEW test.test_mv_1 (s String) + ENGINE = MergeTree ORDER BY s + DEFINER = u1 SQL SECURITY DEFINER + AS SELECT * FROM test.test_table + """ + ) + + backup_name = new_backup_name() + instance.query(f"BACKUP DATABASE test TO {backup_name}") + instance.query("DROP DATABASE test") + instance.query("DROP USER u1") + + instance.query(f"RESTORE DATABASE test FROM {backup_name}") + + @pytest.mark.parametrize( "engine", ["MergeTree", "Log", "TinyLog", "StripeLog", "Memory"] ) From 7a1d00b90d1ea7249e34788ca6a8f86ec2972d1a Mon Sep 17 00:00:00 2001 From: MyroTk <44327070+MyroTk@users.noreply.github.com> Date: Fri, 5 Jul 2024 09:35:02 -0700 Subject: [PATCH 76/79] Update regression commit --- .github/workflows/release_branches.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/release_branches.yml b/.github/workflows/release_branches.yml index 13478b44da85..751a16cbe861 100644 --- a/.github/workflows/release_branches.yml +++ b/.github/workflows/release_branches.yml @@ -519,7 +519,7 @@ jobs: secrets: inherit with: runner_type: altinity-on-demand, altinity-type-cpx51, altinity-image-x86-app-docker-ce, altinity-setup-regression - commit: 86900811b0e15feef31a05c4da4966603b2f833e + commit: 4605d0c4d69b03750d491f6f8fa529742b490a27 arch: release build_sha: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }} RegressionTestsAarch64: @@ -529,7 +529,7 @@ jobs: secrets: inherit with: runner_type: altinity-on-demand, altinity-type-cax41, altinity-image-arm-app-docker-ce, altinity-setup-regression - commit: 86900811b0e15feef31a05c4da4966603b2f833e + commit: 4605d0c4d69b03750d491f6f8fa529742b490a27 arch: aarch64 build_sha: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }} SignRelease: From eb3835081621a73978c5db167b4230ccc76e574a Mon Sep 17 00:00:00 2001 From: Mikhail Filimonov Date: Mon, 8 Jul 2024 17:48:49 +0200 Subject: [PATCH 77/79] Fix detection of number of CPUs in containers In the case when the 'parent' cgroup was used (i.e. name of cgroup was empty, which is common for containers) ClickHouse was ignoring the CPU limits set for the container. --- base/base/cgroupsv2.cpp | 30 ++++++++++++---------- base/base/cgroupsv2.h | 6 ++--- base/base/getMemoryAmount.cpp | 6 +++-- src/Common/CgroupsMemoryUsageObserver.cpp | 5 ++-- src/Common/getNumberOfPhysicalCPUCores.cpp | 10 ++++---- 5 files changed, 32 insertions(+), 25 deletions(-) diff --git a/base/base/cgroupsv2.cpp b/base/base/cgroupsv2.cpp index 1686c6bd88cb..d3a1d972db70 100644 --- a/base/base/cgroupsv2.cpp +++ b/base/base/cgroupsv2.cpp @@ -1,4 +1,4 @@ -#include + #include #include @@ -23,15 +23,17 @@ bool cgroupsV2MemoryControllerEnabled() { #if defined(OS_LINUX) chassert(cgroupsV2Enabled()); - /// According to https://docs.kernel.org/admin-guide/cgroup-v2.html: - /// - file 'cgroup.controllers' defines which controllers *can* be enabled - /// - file 'cgroup.subtree_control' defines which controllers *are* enabled - /// Caveat: nested groups may disable controllers. For simplicity, check only the top-level group. - std::ifstream subtree_control_file(default_cgroups_mount / "cgroup.subtree_control"); - if (!subtree_control_file.is_open()) + /// According to https://docs.kernel.org/admin-guide/cgroup-v2.html, file "cgroup.controllers" defines which controllers are available + /// for the current + child cgroups. The set of available controllers can be restricted from level to level using file + /// "cgroups.subtree_control". It is therefore sufficient to check the bottom-most nested "cgroup.controllers" file. + auto cgroup_dir = currentCGroupV2Path(); + if (cgroup_dir.empty()) + return false; + std::ifstream controllers_file(cgroup_dir / "cgroup.controllers"); + if (!controllers_file.is_open()) return false; std::string controllers; - std::getline(subtree_control_file, controllers); + std::getline(controllers_file, controllers); if (controllers.find("memory") == std::string::npos) return false; return true; @@ -40,7 +42,7 @@ bool cgroupsV2MemoryControllerEnabled() #endif } -std::string cgroupV2OfProcess() +std::filesystem::path currentCGroupV2Path() { #if defined(OS_LINUX) chassert(cgroupsV2Enabled()); @@ -48,17 +50,19 @@ std::string cgroupV2OfProcess() /// A simpler way to get the membership is: std::ifstream cgroup_name_file("/proc/self/cgroup"); if (!cgroup_name_file.is_open()) - return ""; + return {}; /// With cgroups v2, there will be a *single* line with prefix "0::/" /// (see https://docs.kernel.org/admin-guide/cgroup-v2.html) std::string cgroup; std::getline(cgroup_name_file, cgroup); static const std::string v2_prefix = "0::/"; if (!cgroup.starts_with(v2_prefix)) - return ""; + return {}; + + // the 'root' cgroup can have empty path, which is valid cgroup = cgroup.substr(v2_prefix.length()); - return cgroup; + return default_cgroups_mount / cgroup; #else - return ""; + return {}; #endif } diff --git a/base/base/cgroupsv2.h b/base/base/cgroupsv2.h index 70219d87cd1d..2c58682ce31c 100644 --- a/base/base/cgroupsv2.h +++ b/base/base/cgroupsv2.h @@ -16,7 +16,7 @@ bool cgroupsV2Enabled(); /// Assumes that cgroupsV2Enabled() is enabled. bool cgroupsV2MemoryControllerEnabled(); -/// Which cgroup does the process belong to? -/// Returns an empty string if the cgroup cannot be determined. +/// Detects which cgroup the process belong and returns the path to it in sysfs (for cgroups v2). +/// Returns an empty path if the cgroup cannot be determined. /// Assumes that cgroupsV2Enabled() is enabled. -std::string cgroupV2OfProcess(); +std::filesystem::path currentCGroupV2Path(); diff --git a/base/base/getMemoryAmount.cpp b/base/base/getMemoryAmount.cpp index 3d01e301f45c..552f0ce99db1 100644 --- a/base/base/getMemoryAmount.cpp +++ b/base/base/getMemoryAmount.cpp @@ -23,8 +23,10 @@ std::optional getCgroupsV2MemoryLimit() if (!cgroupsV2MemoryControllerEnabled()) return {}; - std::string cgroup = cgroupV2OfProcess(); - auto current_cgroup = cgroup.empty() ? default_cgroups_mount : (default_cgroups_mount / cgroup); + auto current_cgroup = currentCGroupV2Path(); + + if (current_cgroup.empty()) + return {}; /// Open the bottom-most nested memory limit setting file. If there is no such file at the current /// level, try again at the parent level as memory settings are inherited. diff --git a/src/Common/CgroupsMemoryUsageObserver.cpp b/src/Common/CgroupsMemoryUsageObserver.cpp index dd68bd0da643..6a993d3bd14d 100644 --- a/src/Common/CgroupsMemoryUsageObserver.cpp +++ b/src/Common/CgroupsMemoryUsageObserver.cpp @@ -125,8 +125,9 @@ std::optional getCgroupsV2FileName() if (!cgroupsV2MemoryControllerEnabled()) return {}; - String cgroup = cgroupV2OfProcess(); - auto current_cgroup = cgroup.empty() ? default_cgroups_mount : (default_cgroups_mount / cgroup); + auto current_cgroup = currentCGroupV2Path(); + if (current_cgroup.empty()) + return {}; /// Return the bottom-most nested current memory file. If there is no such file at the current /// level, try again at the parent level as memory settings are inherited. diff --git a/src/Common/getNumberOfPhysicalCPUCores.cpp b/src/Common/getNumberOfPhysicalCPUCores.cpp index 7e18a93e6edf..b16c635f23e9 100644 --- a/src/Common/getNumberOfPhysicalCPUCores.cpp +++ b/src/Common/getNumberOfPhysicalCPUCores.cpp @@ -37,12 +37,12 @@ uint32_t getCGroupLimitedCPUCores(unsigned default_cpu_count) /// cgroupsv2 if (cgroupsV2Enabled()) { - /// First, we identify the cgroup the process belongs - std::string cgroup = cgroupV2OfProcess(); - if (cgroup.empty()) + /// First, we identify the path of the cgroup the process belongs + auto cgroup_path = currentCGroupV2Path(); + if (cgroup_path.empty()) return default_cpu_count; - auto current_cgroup = cgroup.empty() ? default_cgroups_mount : (default_cgroups_mount / cgroup); + auto current_cgroup = cgroup_path; // Looking for cpu.max in directories from the current cgroup to the top level // It does not stop on the first time since the child could have a greater value than parent @@ -62,7 +62,7 @@ uint32_t getCGroupLimitedCPUCores(unsigned default_cpu_count) } current_cgroup = current_cgroup.parent_path(); } - current_cgroup = default_cgroups_mount / cgroup; + current_cgroup = cgroup_path; // Looking for cpuset.cpus.effective in directories from the current cgroup to the top level while (current_cgroup != default_cgroups_mount.parent_path()) { From 175862e6f2defd2123ac48ab383095e783b978b4 Mon Sep 17 00:00:00 2001 From: MyroTk <44327070+MyroTk@users.noreply.github.com> Date: Tue, 16 Jul 2024 09:44:42 -0700 Subject: [PATCH 78/79] Update regression hash --- .github/workflows/release_branches.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/release_branches.yml b/.github/workflows/release_branches.yml index 751a16cbe861..6914adb0731a 100644 --- a/.github/workflows/release_branches.yml +++ b/.github/workflows/release_branches.yml @@ -519,7 +519,7 @@ jobs: secrets: inherit with: runner_type: altinity-on-demand, altinity-type-cpx51, altinity-image-x86-app-docker-ce, altinity-setup-regression - commit: 4605d0c4d69b03750d491f6f8fa529742b490a27 + commit: c5e1513a2214ee33696c29717935e0a94989ac2a arch: release build_sha: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }} RegressionTestsAarch64: @@ -529,7 +529,7 @@ jobs: secrets: inherit with: runner_type: altinity-on-demand, altinity-type-cax41, altinity-image-arm-app-docker-ce, altinity-setup-regression - commit: 4605d0c4d69b03750d491f6f8fa529742b490a27 + commit: c5e1513a2214ee33696c29717935e0a94989ac2a arch: aarch64 build_sha: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }} SignRelease: From 4a4f115cdf82c39872c7c590048b05274a773a58 Mon Sep 17 00:00:00 2001 From: Vasily Nemkov Date: Wed, 17 Jul 2024 13:15:48 +0000 Subject: [PATCH 79/79] Set keeper base image base version to alpine:3.16.9 All versions starting from 3.17, there is a critical CVE-2024-5535 https://security.alpinelinux.org/vuln/CVE-2024-5535 As of now, alpine:3.16.9 had only 1 medium --- docker/keeper/Dockerfile | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/docker/keeper/Dockerfile b/docker/keeper/Dockerfile index 50d3c5bcb331..18fde25d7488 100644 --- a/docker/keeper/Dockerfile +++ b/docker/keeper/Dockerfile @@ -12,7 +12,10 @@ RUN arch=${TARGETARCH:-amd64} \ && ln -s "${rarch}-linux-gnu" /lib/linux-gnu -FROM alpine +# All versions starting from 3.17, there is a critical CVE-2024-5535 +# https://security.alpinelinux.org/vuln/CVE-2024-5535 +# on 17th of July 2024, alpine:3.16.9 had only 1 medium +FROM alpine:3.16.9 ENV LANG=en_US.UTF-8 \ LANGUAGE=en_US:en \