Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 3 additions & 3 deletions azure-pipelines.yml
Original file line number Diff line number Diff line change
Expand Up @@ -73,16 +73,16 @@ stages:
parameters: # see template file for a definition of the parameters.
stage_name: ci_build
test_pool_definition:
vmImage: 'ubuntu-22.04'
vmImage: 'ubuntu-24.04'
e2e_pool_definition:
vmImage: 'ubuntu-22.04'
vmImage: 'ubuntu-24.04'
environment: PROFILE="-Dflink.hadoop.version=2.10.2 -Djdk17 -Pjava17-target"
run_end_to_end: false
container: flink-build-container
jdk: 17
- job: docs_404_check # run on a MSFT provided machine
pool:
vmImage: 'ubuntu-22.04'
vmImage: 'ubuntu-24.04'
steps:
- task: GoTool@0
inputs:
Expand Down
18 changes: 9 additions & 9 deletions tools/azure-pipelines/build-apache-repo.yml
Original file line number Diff line number Diff line change
Expand Up @@ -68,14 +68,14 @@ stages:
test_pool_definition:
name: Default
e2e_pool_definition:
vmImage: 'ubuntu-22.04'
vmImage: 'ubuntu-24.04'
environment: PROFILE="-Dflink.hadoop.version=2.10.2 -Djdk17 -Pjava17-target"
run_end_to_end: false
container: flink-build-container
jdk: 17
- job: docs_404_check # run on a MSFT provided machine
pool:
vmImage: 'ubuntu-22.04'
vmImage: 'ubuntu-24.04'
steps:
# Skip docs check if this is a pull request that doesn't contain a documentation change
- task: GoTool@0
Expand Down Expand Up @@ -112,9 +112,9 @@ stages:
parameters:
stage_name: cron_azure
test_pool_definition:
vmImage: 'ubuntu-22.04'
vmImage: 'ubuntu-24.04'
e2e_pool_definition:
vmImage: 'ubuntu-22.04'
vmImage: 'ubuntu-24.04'
environment: PROFILE="-Dflink.hadoop.version=2.10.2 -Djdk17 -Pjava17-target"
run_end_to_end: true
container: flink-build-container
Expand All @@ -125,7 +125,7 @@ stages:
test_pool_definition:
name: Default
e2e_pool_definition:
vmImage: 'ubuntu-22.04'
vmImage: 'ubuntu-24.04'
environment: PROFILE="-Dflink.hadoop.version=3.2.3 -Phadoop3-tests,hive3 -Djdk17 -Pjava17-target"
run_end_to_end: true
container: flink-build-container
Expand All @@ -136,7 +136,7 @@ stages:
test_pool_definition:
name: Default
e2e_pool_definition:
vmImage: 'ubuntu-22.04'
vmImage: 'ubuntu-24.04'
environment: PROFILE="-Dflink.hadoop.version=2.10.2 -Djdk11 -Pjava11-target"
run_end_to_end: true
container: flink-build-container
Expand All @@ -147,7 +147,7 @@ stages:
test_pool_definition:
name: Default
e2e_pool_definition:
vmImage: 'ubuntu-22.04'
vmImage: 'ubuntu-24.04'
environment: PROFILE="-Dflink.hadoop.version=2.10.2 -Djdk21 -Pjava21-target"
run_end_to_end: true
container: flink-build-container
Expand All @@ -158,14 +158,14 @@ stages:
test_pool_definition:
name: Default
e2e_pool_definition:
vmImage: 'ubuntu-22.04'
vmImage: 'ubuntu-24.04'
environment: PROFILE="-Dflink.hadoop.version=2.10.2 -Penable-adaptive-scheduler -Djdk17 -Pjava17-target"
run_end_to_end: true
container: flink-build-container
jdk: 17
- job: docs_404_check # run on a MSFT provided machine
pool:
vmImage: 'ubuntu-22.04'
vmImage: 'ubuntu-24.04'
steps:
- task: GoTool@0
inputs:
Expand Down
4 changes: 2 additions & 2 deletions tools/azure-pipelines/build-nightly-dist.yml
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@ parameters:
jobs:
- job: ${{parameters.stage_name}}_binary
pool:
vmImage: 'ubuntu-22.04'
vmImage: 'ubuntu-24.04'
container: flink-build-container
workspace:
clean: all
Expand Down Expand Up @@ -69,7 +69,7 @@ jobs:
# artifact: nightly-release
- job: ${{parameters.stage_name}}_maven
pool:
vmImage: 'ubuntu-22.04'
vmImage: 'ubuntu-24.04'
container: flink-build-container
timeoutInMinutes: 240
workspace:
Expand Down
2 changes: 1 addition & 1 deletion tools/azure-pipelines/build-python-wheels.yml
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@
jobs:
- job: build_wheels_on_Linux
pool:
vmImage: 'ubuntu-22.04'
vmImage: 'ubuntu-24.04'
steps:
- script: |
cd flink-python
Expand Down
11 changes: 11 additions & 0 deletions tools/azure-pipelines/e2e-template.yml
Original file line number Diff line number Diff line change
Expand Up @@ -48,6 +48,17 @@ jobs:
- script: ./tools/azure-pipelines/free_disk_space.sh
target: host
displayName: Free up disk space
# Install upstream kubectl so we don't use the agent's FIPS-patched build.
- bash: |
set -euo pipefail
echo ">>> Installing upstream kubectl"
curl -sSL -o kubectl "https://dl.k8s.io/release/$(curl -sSL https://dl.k8s.io/release/stable.txt)/bin/linux/amd64/kubectl"
sudo install -m 0755 kubectl /usr/local/bin/kubectl
echo ">>> kubectl on PATH:"
which kubectl
kubectl version --client=true -o=yaml
displayName: "Use upstream kubectl (avoid host FIPS build)"
condition: not(eq(variables['SKIP'], '1'))
# the cache task does not create directories on a cache miss, and can later fail when trying to tar the directory if the test haven't created it
# this may for example happen if a given directory is only used by a subset of tests, which are run in a different 'group'
- bash: |
Expand Down