diff --git a/azure-pipelines.yml b/azure-pipelines.yml index ebc7e5289d547..8963f07b03f68 100644 --- a/azure-pipelines.yml +++ b/azure-pipelines.yml @@ -73,16 +73,16 @@ stages: parameters: # see template file for a definition of the parameters. stage_name: ci_build test_pool_definition: - vmImage: 'ubuntu-22.04' + vmImage: 'ubuntu-24.04' e2e_pool_definition: - vmImage: 'ubuntu-22.04' + vmImage: 'ubuntu-24.04' environment: PROFILE="-Dflink.hadoop.version=2.10.2 -Djdk17 -Pjava17-target" run_end_to_end: false container: flink-build-container jdk: 17 - job: docs_404_check # run on a MSFT provided machine pool: - vmImage: 'ubuntu-22.04' + vmImage: 'ubuntu-24.04' steps: - task: GoTool@0 inputs: diff --git a/tools/azure-pipelines/build-apache-repo.yml b/tools/azure-pipelines/build-apache-repo.yml index 8c2eac1a0fa87..5911d16a936db 100644 --- a/tools/azure-pipelines/build-apache-repo.yml +++ b/tools/azure-pipelines/build-apache-repo.yml @@ -68,14 +68,14 @@ stages: test_pool_definition: name: Default e2e_pool_definition: - vmImage: 'ubuntu-22.04' + vmImage: 'ubuntu-24.04' environment: PROFILE="-Dflink.hadoop.version=2.10.2 -Djdk17 -Pjava17-target" run_end_to_end: false container: flink-build-container jdk: 17 - job: docs_404_check # run on a MSFT provided machine pool: - vmImage: 'ubuntu-22.04' + vmImage: 'ubuntu-24.04' steps: # Skip docs check if this is a pull request that doesn't contain a documentation change - task: GoTool@0 @@ -112,9 +112,9 @@ stages: parameters: stage_name: cron_azure test_pool_definition: - vmImage: 'ubuntu-22.04' + vmImage: 'ubuntu-24.04' e2e_pool_definition: - vmImage: 'ubuntu-22.04' + vmImage: 'ubuntu-24.04' environment: PROFILE="-Dflink.hadoop.version=2.10.2 -Djdk17 -Pjava17-target" run_end_to_end: true container: flink-build-container @@ -125,7 +125,7 @@ stages: test_pool_definition: name: Default e2e_pool_definition: - vmImage: 'ubuntu-22.04' + vmImage: 'ubuntu-24.04' environment: PROFILE="-Dflink.hadoop.version=3.2.3 -Phadoop3-tests,hive3 -Djdk17 -Pjava17-target" run_end_to_end: true container: flink-build-container @@ -136,7 +136,7 @@ stages: test_pool_definition: name: Default e2e_pool_definition: - vmImage: 'ubuntu-22.04' + vmImage: 'ubuntu-24.04' environment: PROFILE="-Dflink.hadoop.version=2.10.2 -Djdk11 -Pjava11-target" run_end_to_end: true container: flink-build-container @@ -147,7 +147,7 @@ stages: test_pool_definition: name: Default e2e_pool_definition: - vmImage: 'ubuntu-22.04' + vmImage: 'ubuntu-24.04' environment: PROFILE="-Dflink.hadoop.version=2.10.2 -Djdk21 -Pjava21-target" run_end_to_end: true container: flink-build-container @@ -158,14 +158,14 @@ stages: test_pool_definition: name: Default e2e_pool_definition: - vmImage: 'ubuntu-22.04' + vmImage: 'ubuntu-24.04' environment: PROFILE="-Dflink.hadoop.version=2.10.2 -Penable-adaptive-scheduler -Djdk17 -Pjava17-target" run_end_to_end: true container: flink-build-container jdk: 17 - job: docs_404_check # run on a MSFT provided machine pool: - vmImage: 'ubuntu-22.04' + vmImage: 'ubuntu-24.04' steps: - task: GoTool@0 inputs: diff --git a/tools/azure-pipelines/build-nightly-dist.yml b/tools/azure-pipelines/build-nightly-dist.yml index 61a5bb46b89df..2142036110606 100644 --- a/tools/azure-pipelines/build-nightly-dist.yml +++ b/tools/azure-pipelines/build-nightly-dist.yml @@ -19,7 +19,7 @@ parameters: jobs: - job: ${{parameters.stage_name}}_binary pool: - vmImage: 'ubuntu-22.04' + vmImage: 'ubuntu-24.04' container: flink-build-container workspace: clean: all @@ -69,7 +69,7 @@ jobs: # artifact: nightly-release - job: ${{parameters.stage_name}}_maven pool: - vmImage: 'ubuntu-22.04' + vmImage: 'ubuntu-24.04' container: flink-build-container timeoutInMinutes: 240 workspace: diff --git a/tools/azure-pipelines/build-python-wheels.yml b/tools/azure-pipelines/build-python-wheels.yml index d68069c1632fb..251a5b7844b4c 100644 --- a/tools/azure-pipelines/build-python-wheels.yml +++ b/tools/azure-pipelines/build-python-wheels.yml @@ -16,7 +16,7 @@ jobs: - job: build_wheels_on_Linux pool: - vmImage: 'ubuntu-22.04' + vmImage: 'ubuntu-24.04' steps: - script: | cd flink-python diff --git a/tools/azure-pipelines/e2e-template.yml b/tools/azure-pipelines/e2e-template.yml index ca0d6c8911116..2806e11926c30 100644 --- a/tools/azure-pipelines/e2e-template.yml +++ b/tools/azure-pipelines/e2e-template.yml @@ -48,6 +48,17 @@ jobs: - script: ./tools/azure-pipelines/free_disk_space.sh target: host displayName: Free up disk space + # Install upstream kubectl so we don't use the agent's FIPS-patched build. + - bash: | + set -euo pipefail + echo ">>> Installing upstream kubectl" + curl -sSL -o kubectl "https://dl.k8s.io/release/$(curl -sSL https://dl.k8s.io/release/stable.txt)/bin/linux/amd64/kubectl" + sudo install -m 0755 kubectl /usr/local/bin/kubectl + echo ">>> kubectl on PATH:" + which kubectl + kubectl version --client=true -o=yaml + displayName: "Use upstream kubectl (avoid host FIPS build)" + condition: not(eq(variables['SKIP'], '1')) # the cache task does not create directories on a cache miss, and can later fail when trying to tar the directory if the test haven't created it # this may for example happen if a given directory is only used by a subset of tests, which are run in a different 'group' - bash: |