From cb8f63f4991c09ee1c13fd1a2c25ee081191352d Mon Sep 17 00:00:00 2001 From: Hyunsu Cho Date: Mon, 18 Nov 2024 17:52:21 -0800 Subject: [PATCH] Consolidate workflow defs --- .github/workflows/i386.yml | 6 +- .github/workflows/jvm_tests.yml | 238 ++++++++-- .github/workflows/jvm_tests_runs_on.yml | 161 ------- .github/workflows/lint.yml | 144 ++++++ .github/workflows/macos.yml | 24 - .github/workflows/main.yml | 431 +++++++++++------- .github/workflows/main_runs_on.yml | 308 ------------- .github/workflows/misc.yml | 133 ++++++ .github/workflows/python_tests.yml | 204 +-------- ...hon_wheels.yml => python_wheels_macos.yml} | 28 +- .github/workflows/r_tests.yml | 84 ++-- .github/workflows/scorecards.yml | 2 +- .github/workflows/sycl_tests.yml | 94 ++++ .github/workflows/update_rapids.yml | 2 +- .../{windows_runs_on.yml => windows.yml} | 7 +- ops/{docker => }/conda_env/aarch64_test.yml | 0 ops/{docker => }/conda_env/cpp_test.yml | 0 ops/{docker => }/conda_env/jvm_tests.yml | 0 ops/{docker => }/conda_env/linux_cpu_test.yml | 0 .../conda_env/linux_sycl_test.yml | 0 ops/{docker => }/conda_env/macos_cpu_test.yml | 0 ops/{docker => }/conda_env/python_lint.yml | 0 ops/{docker => }/conda_env/sdist_test.yml | 0 ops/{docker => }/conda_env/win64_test.yml | 0 ops/docker/dockerfile/Dockerfile.aarch64 | 2 +- ops/docker/dockerfile/Dockerfile.clang_tidy | 2 +- ops/docker/dockerfile/Dockerfile.cpu | 2 +- ops/docker/dockerfile/Dockerfile.gpu | 2 +- .../Dockerfile.gpu_build_r_rockylinux8 | 2 +- ops/docker/dockerfile/Dockerfile.jvm | 2 +- .../dockerfile/Dockerfile.jvm_gpu_build | 2 +- .../Dockerfile.manylinux2014_aarch64 | 2 +- .../Dockerfile.manylinux2014_x86_64 | 2 +- .../Dockerfile.manylinux_2_28_x86_64 | 2 +- ops/docker_build.py | 6 +- ops/docker_run.py | 4 +- ...m1.sh => build-jvm-macos-apple-silicon.sh} | 8 +- ops/pipeline/build-jvm-macos-intel.sh | 44 ++ ops/pipeline/build-python-wheels-macos.sh | 1 - ops/pipeline/test-win64-gpu.ps1 | 2 +- ops/script/build_via_cmake.sh | 11 +- ops/script/lint_cmake.sh | 2 +- ops/script/run_clang_tidy.py | 4 +- .../test_gpu_with_dask/test_gpu_with_dask.py | 2 +- 44 files changed, 970 insertions(+), 1000 deletions(-) delete mode 100644 .github/workflows/jvm_tests_runs_on.yml create mode 100644 .github/workflows/lint.yml delete mode 100644 .github/workflows/macos.yml delete mode 100644 .github/workflows/main_runs_on.yml create mode 100644 .github/workflows/misc.yml rename .github/workflows/{python_wheels.yml => python_wheels_macos.yml} (55%) create mode 100644 .github/workflows/sycl_tests.yml rename .github/workflows/{windows_runs_on.yml => windows.yml} (93%) rename ops/{docker => }/conda_env/aarch64_test.yml (100%) rename ops/{docker => }/conda_env/cpp_test.yml (100%) rename ops/{docker => }/conda_env/jvm_tests.yml (100%) rename ops/{docker => }/conda_env/linux_cpu_test.yml (100%) rename ops/{docker => }/conda_env/linux_sycl_test.yml (100%) rename ops/{docker => }/conda_env/macos_cpu_test.yml (100%) rename ops/{docker => }/conda_env/python_lint.yml (100%) rename ops/{docker => }/conda_env/sdist_test.yml (100%) rename ops/{docker => }/conda_env/win64_test.yml (100%) rename ops/pipeline/{build-jvm-macos-m1.sh => build-jvm-macos-apple-silicon.sh} (85%) create mode 100755 ops/pipeline/build-jvm-macos-intel.sh mode change 100644 => 100755 ops/pipeline/build-python-wheels-macos.sh mode change 100644 => 100755 ops/script/lint_cmake.sh diff --git a/.github/workflows/i386.yml b/.github/workflows/i386.yml index aec7e9d31087..aa71147e2581 100644 --- a/.github/workflows/i386.yml +++ b/.github/workflows/i386.yml @@ -19,7 +19,7 @@ jobs: ports: - 5000:5000 steps: - - uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29 # v4.1.6 + - uses: actions/checkout@v4.2.2 with: submodules: 'true' - name: Set up Docker Buildx @@ -30,7 +30,7 @@ jobs: uses: docker/build-push-action@v6 with: context: . - file: tests/ci_build/Dockerfile.i386 + file: ops/docker/dockerfile/Dockerfile.i386 push: true tags: localhost:5000/xgboost/build-32bit:latest cache-from: type=gha @@ -40,4 +40,4 @@ jobs: docker run --rm -v $PWD:/workspace -w /workspace \ -e CXXFLAGS='-Wno-error=overloaded-virtual -Wno-error=maybe-uninitialized -Wno-error=redundant-move' \ localhost:5000/xgboost/build-32bit:latest \ - tests/ci_build/build_via_cmake.sh + bash ops/script/build_via_cmake.sh diff --git a/.github/workflows/jvm_tests.yml b/.github/workflows/jvm_tests.yml index dcbd9de55b50..f9385fa4acaf 100644 --- a/.github/workflows/jvm_tests.yml +++ b/.github/workflows/jvm_tests.yml @@ -1,44 +1,193 @@ -name: XGBoost-JVM-Tests +name: XGBoost CI (JVM packages) on: [push, pull_request] permissions: - contents: read # to fetch code (actions/checkout) + contents: read # to fetch code (actions/checkout) concurrency: group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }} cancel-in-progress: true +env: + BRANCH_NAME: >- + ${{ github.event.pull_request.number && 'PR-' }}${{ github.event.pull_request.number || github.ref_name }} + USE_DOCKER_CACHE: 1 + jobs: - test-with-jvm: - name: Test JVM on OS ${{ matrix.os }} + build-containers: + name: Build CI containers (${{ matrix.container_id }}) + runs-on: + - runs-on=${{ github.run_id }} + - runner=${{ matrix.runner }} + strategy: + matrix: + container_id: + - xgb-ci.manylinux2014_x86_64 + - xgb-ci.jvm + - xgb-ci.jvm_gpu_build + runner: [linux-amd64-cpu] + include: + - container_id: xgb-ci.manylinux2014_aarch64 + runner: linux-arm64-cpu + steps: + # Restart Docker daemon so that it recognizes the ephemeral disks + - run: sudo systemctl restart docker + - uses: actions/checkout@v4.2.2 + with: + submodules: "true" + - name: Build ${{ matrix.container_id }} + run: bash ops/docker_build.sh + env: + CONTAINER_ID: ${{ matrix.container_id }} + + build-jvm-manylinux2014: + name: >- + Build libxgboost4j.so targeting glibc 2.17 + (arch ${{ matrix.arch }}, runner ${{ matrix.runner }}) + needs: build-containers + runs-on: + - runs-on=${{ github.run_id }} + - runner=${{ matrix.runner }} + strategy: + fail-fast: false + matrix: + include: + - arch: aarch64 + runner: linux-arm64-cpu + - arch: x86_64 + runner: linux-amd64-cpu + steps: + # Restart Docker daemon so that it recognizes the ephemeral disks + - run: sudo systemctl restart docker + - uses: actions/checkout@v4.2.2 + with: + submodules: "true" + - name: Fetch container from cache + run: bash ops/docker_build.sh + env: + CONTAINER_ID: xgb-ci.manylinux2014_${{ matrix.arch }} + - run: bash ops/pipeline/build-jvm-manylinux2014.sh ${{ matrix.arch }} + + build-jvm-gpu: + name: Build libxgboost4j.so with CUDA + needs: build-containers + runs-on: + - runs-on=${{ github.run_id }} + - runner=linux-amd64-cpu + steps: + # Restart Docker daemon so that it recognizes the ephemeral disks + - run: sudo systemctl restart docker + - uses: actions/checkout@v4.2.2 + with: + submodules: "true" + - name: Fetch container from cache + run: bash ops/docker_build.sh + env: + CONTAINER_ID: xgb-ci.jvm_gpu_build + - run: bash ops/pipeline/build-jvm-gpu.sh + - name: Stash files + run: bash ops/stash_artifacts.sh lib/libxgboost4j.so + env: + COMMAND: upload + KEY: build-jvm-gpu + + build-jvm-mac: + name: "Build libxgboost4j.dylib for ${{ matrix.description }}" + runs-on: ${{ matrix.runner }} + strategy: + fail-fast: false + matrix: + include: + - description: "MacOS (Apple Silicon)" + script: ops/pipeline/build-jvm-macos-apple-silicon.sh + runner: macos-14 + - description: "MacOS (Intel)" + script: ops/pipeline/build-jvm-macos-intel.sh + runner: macos-13 + steps: + - uses: actions/checkout@v4.2.2 + with: + submodules: "true" + - run: bash ${{ matrix.script }} + + build-jvm-docs: + name: Build docs for JVM packages + needs: [build-jvm-gpu] + runs-on: + - runs-on=${{ github.run_id }} + - runner=linux-amd64-cpu + steps: + # Restart Docker daemon so that it recognizes the ephemeral disks + - run: sudo systemctl restart docker + - uses: actions/checkout@v4.2.2 + with: + submodules: "true" + - name: Fetch container from cache + run: bash ops/docker_build.sh + env: + CONTAINER_ID: xgb-ci.jvm_gpu_build + - name: Unstash files + run: bash ops/stash_artifacts.sh lib/libxgboost4j.so + env: + COMMAND: download + KEY: build-jvm-gpu + - run: bash ops/pipeline/build-jvm-doc.sh + + build-test-jvm-packages: + name: Build and test JVM packages (Linux) + needs: build-containers + runs-on: + - runs-on=${{ github.run_id }} + - runner=linux-amd64-cpu + steps: + # Restart Docker daemon so that it recognizes the ephemeral disks + - run: sudo systemctl restart docker + - uses: actions/checkout@v4.2.2 + with: + submodules: "true" + - name: Fetch container from cache + run: bash ops/docker_build.sh + env: + CONTAINER_ID: xgb-ci.jvm + - name: Build and test JVM packages (Scala 2.12) + run: bash ops/pipeline/build-test-jvm-packages.sh + env: + SCALA_VERSION: 2.12 + - name: Build and test JVM packages (Scala 2.13) + run: bash ops/pipeline/build-test-jvm-packages.sh + env: + SCALA_VERSION: 2.13 + + build-test-jvm-packages-other-os: + name: Build and test JVM packages (${{ matrix.os }}) timeout-minutes: 30 runs-on: ${{ matrix.os }} strategy: fail-fast: false matrix: - os: [windows-latest, ubuntu-latest, macos-13] + os: [windows-latest, macos-13] steps: - - uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29 # v4.1.6 + - uses: actions/checkout@v4.2.2 with: submodules: 'true' - - uses: actions/setup-java@b36c23c0d998641eff861008f374ee103c25ac73 # v4.4.0 + - uses: actions/setup-java@v4.5.0 with: distribution: 'temurin' java-version: '8' - - uses: conda-incubator/setup-miniconda@a4260408e20b96e80095f42ff7f1a15b27dd94ca # v3.0.4 + - uses: conda-incubator/setup-miniconda@v3.1.0 with: miniforge-variant: Miniforge3 miniforge-version: latest activate-environment: jvm_tests - environment-file: tests/ci_build/conda_env/jvm_tests.yml + environment-file: ops/conda_env/jvm_tests.yml use-mamba: true - name: Cache Maven packages - uses: actions/cache@6849a6489940f00c2f30c0fb92c6274307ccb58a # v4.1.2 + uses: actions/cache@v4.1.2 with: path: ~/.m2 key: ${{ runner.os }}-m2-${{ hashFiles('./jvm-packages/pom.xml') }} @@ -49,52 +198,41 @@ jobs: cd jvm-packages mvn test -B -pl :xgboost4j_2.12 - - name: Test XGBoost4J (Core, Spark, Examples) - run: | - rm -rfv build/ - cd jvm-packages - mvn -B test - if: matrix.os == 'ubuntu-latest' # Distributed training doesn't work on Windows - - - name: Extract branch name - shell: bash - run: | - echo "branch=${GITHUB_REF#refs/heads/}" >> "$GITHUB_OUTPUT" - id: extract_branch - if: | - (github.ref == 'refs/heads/master' || contains(github.ref, 'refs/heads/release_')) && - (matrix.os == 'windows-latest' || matrix.os == 'macos-13') - - name: Publish artifact xgboost4j.dll to S3 run: | cd lib/ Rename-Item -Path xgboost4j.dll -NewName xgboost4j_${{ github.sha }}.dll dir - python -m awscli s3 cp xgboost4j_${{ github.sha }}.dll s3://xgboost-nightly-builds/${{ steps.extract_branch.outputs.branch }}/libxgboost4j/ --acl public-read --region us-west-2 - if: | - (github.ref == 'refs/heads/master' || contains(github.ref, 'refs/heads/release_')) && - matrix.os == 'windows-latest' + python -m awscli s3 cp xgboost4j_${{ github.sha }}.dll ` + s3://xgboost-nightly-builds/${{ env.BRANCH_NAME }}/libxgboost4j/ ` + --acl public-read --region us-west-2 + if: matrix.os == 'windows-latest' + # if: | + # (github.ref == 'refs/heads/master' || contains(github.ref, 'refs/heads/release_')) && + # matrix.os == 'windows-latest' env: AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID_IAM_S3_UPLOADER }} AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY_IAM_S3_UPLOADER }} - - name: Publish artifact libxgboost4j.dylib to S3 - shell: bash -l {0} - run: | - cd lib/ - mv -v libxgboost4j.dylib libxgboost4j_${{ github.sha }}.dylib - ls - python -m awscli s3 cp libxgboost4j_${{ github.sha }}.dylib s3://xgboost-nightly-builds/${{ steps.extract_branch.outputs.branch }}/libxgboost4j/ --acl public-read --region us-west-2 - if: | - (github.ref == 'refs/heads/master' || contains(github.ref, 'refs/heads/release_')) && - matrix.os == 'macos-13' - env: - AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID_IAM_S3_UPLOADER }} - AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY_IAM_S3_UPLOADER }} - - - name: Build and Test XGBoost4J with scala 2.13 - run: | - rm -rfv build/ - cd jvm-packages - mvn -B clean install test -Pdefault,scala-2.13 - if: matrix.os == 'ubuntu-latest' # Distributed training doesn't work on Windows + test-jvm-packages-gpu: + name: Test JVM packages with CUDA + needs: [build-jvm-gpu] + runs-on: + - runs-on=${{ github.run_id }} + - runner=linux-amd64-mgpu + steps: + # Restart Docker daemon so that it recognizes the ephemeral disks + - run: sudo systemctl restart docker + - uses: actions/checkout@v4.2.2 + with: + submodules: "true" + - name: Fetch container from cache + run: bash ops/docker_build.sh + env: + CONTAINER_ID: xgb-ci.jvm_gpu_build + - name: Unstash files + run: bash ops/stash_artifacts.sh lib/libxgboost4j.so + env: + COMMAND: download + KEY: build-jvm-gpu + - run: bash ops/pipeline/test-jvm-gpu.sh diff --git a/.github/workflows/jvm_tests_runs_on.yml b/.github/workflows/jvm_tests_runs_on.yml deleted file mode 100644 index 5894a4bead7d..000000000000 --- a/.github/workflows/jvm_tests_runs_on.yml +++ /dev/null @@ -1,161 +0,0 @@ -name: XGBoost CI (JVM packages) - -on: [push, pull_request] - -permissions: - contents: read # to fetch code (actions/checkout) - -concurrency: - group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }} - cancel-in-progress: true - -env: - BRANCH_NAME: >- - ${{ github.event.pull_request.number && 'PR-' }}${{ github.event.pull_request.number || github.ref_name }} - USE_DOCKER_CACHE: 1 - -jobs: - build-containers: - name: Build CI containers - runs-on: - - runs-on=${{ github.run_id }} - - runner=${{ matrix.runner }} - strategy: - matrix: - container_id: - - xgb-ci.manylinux2014_x86_64 - - xgb-ci.jvm - - xgb-ci.jvm_gpu_build - runner: [linux-amd64-cpu] - include: - - container_id: xgb-ci.manylinux2014_aarch64 - runner: linux-arm64-cpu - steps: - # Restart Docker daemon so that it recognizes the ephemeral disks - - run: sudo systemctl restart docker - - uses: actions/checkout@v4 - with: - submodules: "true" - - name: Build ${{ matrix.container_id }} - run: bash ops/docker_build.sh - env: - CONTAINER_ID: ${{ matrix.container_id }} - - build-jvm-manylinux2014: - name: Build libxgboost4j.so targeting glibc 2.17 - needs: build-containers - runs-on: - - runs-on=${{ github.run_id }} - - runner=${{ matrix.runner }} - strategy: - matrix: - include: - - arch: aarch64 - runner: linux-arm64-cpu - - arch: x86_64 - runner: linux-amd64-cpu - steps: - # Restart Docker daemon so that it recognizes the ephemeral disks - - run: sudo systemctl restart docker - - uses: actions/checkout@v4 - with: - submodules: "true" - - name: Fetch container from cache - run: bash ops/docker_build.sh - env: - CONTAINER_ID: xgb-ci.manylinux2014_${{ matrix.arch }} - - run: bash ops/pipeline/build-jvm-manylinux2014.sh ${{ matrix.arch }} - - build-jvm-gpu: - name: Build libxgboost4j.so with CUDA - needs: build-containers - runs-on: - - runs-on=${{ github.run_id }} - - runner=linux-amd64-cpu - steps: - # Restart Docker daemon so that it recognizes the ephemeral disks - - run: sudo systemctl restart docker - - uses: actions/checkout@v4 - with: - submodules: "true" - - name: Fetch container from cache - run: bash ops/docker_build.sh - env: - CONTAINER_ID: xgb-ci.jvm_gpu_build - - run: bash ops/pipeline/build-jvm-gpu.sh - - name: Stash files - run: bash ops/stash_artifacts.sh lib/libxgboost4j.so - env: - COMMAND: upload - KEY: build-jvm-gpu - - build-jvm-docs: - name: Build docs for JVM packages - needs: [build-jvm-gpu] - runs-on: - - runs-on=${{ github.run_id }} - - runner=linux-amd64-cpu - steps: - # Restart Docker daemon so that it recognizes the ephemeral disks - - run: sudo systemctl restart docker - - uses: actions/checkout@v4 - with: - submodules: "true" - - name: Fetch container from cache - run: bash ops/docker_build.sh - env: - CONTAINER_ID: xgb-ci.jvm_gpu_build - - name: Unstash files - run: bash ops/stash_artifacts.sh lib/libxgboost4j.so - env: - COMMAND: download - KEY: build-jvm-gpu - - run: bash ops/pipeline/build-jvm-doc.sh - - build-test-jvm-packages: - name: Build and test JVM packages - needs: build-containers - runs-on: - - runs-on=${{ github.run_id }} - - runner=linux-amd64-cpu - steps: - # Restart Docker daemon so that it recognizes the ephemeral disks - - run: sudo systemctl restart docker - - uses: actions/checkout@v4 - with: - submodules: "true" - - name: Fetch container from cache - run: bash ops/docker_build.sh - env: - CONTAINER_ID: xgb-ci.jvm - - name: Build and test JVM packages (Scala 2.12) - run: bash ops/pipeline/build-test-jvm-packages.sh - env: - SCALA_VERSION: 2.12 - - name: Build and test JVM packages (Scala 2.13) - run: bash ops/pipeline/build-test-jvm-packages.sh - env: - SCALA_VERSION: 2.13 - - test-jvm-packages-gpu: - name: Test JVM packages with CUDA - needs: [build-jvm-gpu] - runs-on: - - runs-on=${{ github.run_id }} - - runner=linux-amd64-mgpu - steps: - # Restart Docker daemon so that it recognizes the ephemeral disks - - run: sudo systemctl restart docker - - uses: actions/checkout@v4 - with: - submodules: "true" - - name: Fetch container from cache - run: bash ops/docker_build.sh - env: - CONTAINER_ID: xgb-ci.jvm_gpu_build - - name: Unstash files - run: bash ops/stash_artifacts.sh lib/libxgboost4j.so - env: - COMMAND: download - KEY: build-jvm-gpu - - run: bash ops/pipeline/test-jvm-gpu.sh diff --git a/.github/workflows/lint.yml b/.github/workflows/lint.yml new file mode 100644 index 000000000000..caceb3e3893b --- /dev/null +++ b/.github/workflows/lint.yml @@ -0,0 +1,144 @@ +name: XGBoost CI (Lint) + +on: [push, pull_request] + +permissions: + contents: read # to fetch code (actions/checkout) + +concurrency: + group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }} + cancel-in-progress: true + +env: + BRANCH_NAME: >- + ${{ github.event.pull_request.number && 'PR-' }}${{ github.event.pull_request.number || github.ref_name }} + +jobs: + build-containers: + name: Build CI containers (${{ matrix.container_id }}) + runs-on: + - runs-on=${{ github.run_id }} + - runner=${{ matrix.runner }} + strategy: + fail-fast: false + matrix: + include: + - container_id: xgb-ci.clang_tidy + runner: linux-amd64-cpu + steps: + # Restart Docker daemon so that it recognizes the ephemeral disks + - run: sudo systemctl restart docker + - uses: actions/checkout@v4.2.2 + with: + submodules: "true" + - name: Build ${{ matrix.container_id }} + run: bash ops/docker_build.sh + env: + CONTAINER_ID: ${{ matrix.container_id }} + + clang-tidy: + name: Run clang-tidy + needs: build-containers + runs-on: + - runs-on=${{ github.run_id }} + - runner=linux-amd64-cpu + steps: + # Restart Docker daemon so that it recognizes the ephemeral disks + - run: sudo systemctl restart docker + - uses: actions/checkout@v4.2.2 + with: + submodules: "true" + - name: Fetch container from cache + run: bash ops/docker_build.sh + env: + CONTAINER_ID: xgb-ci.clang_tidy + - run: bash ops/pipeline/run-clang-tidy.sh + + python-mypy-lint: + runs-on: ubuntu-latest + name: Type and format checks for the Python package + steps: + - uses: actions/checkout@v4.2.2 + with: + submodules: 'true' + - uses: conda-incubator/setup-miniconda@v3.1.0 + with: + miniforge-variant: Miniforge3 + miniforge-version: latest + activate-environment: python_lint + environment-file: ops/conda_env/python_lint.yml + use-mamba: true + - name: Display Conda env + shell: bash -el {0} + run: | + conda info + conda list + - name: Run mypy + shell: bash -el {0} + run: | + python ops/script/lint_python.py --format=0 --type-check=1 --pylint=0 + - name: Run formatter + shell: bash -el {0} + run: | + python ops/script/lint_python.py --format=1 --type-check=0 --pylint=0 + - name: Run pylint + shell: bash -el {0} + run: | + python ops/script/lint_python.py --format=0 --type-check=0 --pylint=1 + + cpp-lint: + runs-on: ubuntu-latest + name: Code linting for C++ + steps: + - uses: actions/checkout@v4.2.2 + with: + submodules: 'true' + - uses: actions/setup-python@v5.3.0 + with: + python-version: "3.10" + architecture: 'x64' + - name: Install Python packages + run: | + python -m pip install wheel setuptools cmakelint cpplint==1.6.1 pylint + - name: Run lint + run: | + python3 ops/script/lint_cpp.py + bash ops/script/lint_cmake.sh + + lintr: + runs-on: ${{ matrix.os }} + name: Run R linters on OS ${{ matrix.os }}, R ${{ matrix.r }} + strategy: + fail-fast: false + matrix: + include: + - os: ubuntu-latest + r: "release" + env: + R_REMOTES_NO_ERRORS_FROM_WARNINGS: true + + steps: + - uses: actions/checkout@v4.2.2 + with: + submodules: 'true' + + - uses: r-lib/actions/setup-r@v2.11.0 + with: + r-version: ${{ matrix.r }} + + - name: Cache R packages + uses: actions/cache@v4.1.2 + with: + path: ${{ env.R_LIBS_USER }} + key: ${{ runner.os }}-r-${{ matrix.r }}-7-${{ hashFiles('R-package/DESCRIPTION') }} + restore-keys: ${{ runner.os }}-r-${{ matrix.r }}-7-${{ hashFiles('R-package/DESCRIPTION') }} + + - name: Install dependencies + shell: Rscript {0} + run: | + source("./R-package/tests/helper_scripts/install_deps.R") + + - name: Run lintr + run: | + MAKEFLAGS="-j$(nproc)" R CMD INSTALL R-package/ + Rscript ops/script/lint_r.R $(pwd) diff --git a/.github/workflows/macos.yml b/.github/workflows/macos.yml deleted file mode 100644 index 2bb3e1aba46c..000000000000 --- a/.github/workflows/macos.yml +++ /dev/null @@ -1,24 +0,0 @@ -name: Nextgen XGBoost CI, MacOS - -on: [push, pull_request] - -permissions: - contents: read # to fetch code (actions/checkout) - -concurrency: - group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }} - cancel-in-progress: true - -env: - BRANCH_NAME: >- - ${{ github.event.pull_request.number && 'PR-' }}${{ github.event.pull_request.number || github.ref_name }} - -jobs: - mac-m1-jvm: - name: "Build libxgboost4j.dylib for MacOS M1" - runs-on: macos-14 - steps: - - uses: actions/checkout@v4 - with: - submodules: "true" - - run: bash ops/pipeline/build-jvm-macos-m1.sh diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index 3c0a67b4f463..77208a146443 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -1,193 +1,294 @@ -# This is a basic workflow to help you get started with Actions +name: XGBoost CI -name: XGBoost-CI - -# Controls when the action will run. Triggers the workflow on push or pull request -# events but only for the master branch on: [push, pull_request] permissions: - contents: read # to fetch code (actions/checkout) + contents: read # to fetch code (actions/checkout) concurrency: group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }} cancel-in-progress: true -# A workflow run is made up of one or more jobs that can run sequentially or in parallel +env: + BRANCH_NAME: >- + ${{ github.event.pull_request.number && 'PR-' }}${{ github.event.pull_request.number || github.ref_name }} + USE_DOCKER_CACHE: 1 + jobs: - gtest-cpu: - name: Test Google C++ test (CPU) - runs-on: ${{ matrix.os }} + build-containers: + name: Build CI containers (${{ matrix.container_id }}) + runs-on: + - runs-on=${{ github.run_id }} + - runner=${{ matrix.runner }} strategy: - fail-fast: false matrix: - os: [macos-12] + container_id: + - xgb-ci.gpu_build_rockylinux8 + - xgb-ci.gpu_build_r_rockylinux8 + - xgb-ci.gpu + - xgb-ci.gpu_dev_ver + - xgb-ci.cpu + - xgb-ci.manylinux_2_28_x86_64 + - xgb-ci.manylinux2014_x86_64 + runner: [linux-amd64-cpu] + include: + - container_id: xgb-ci.manylinux2014_aarch64 + runner: linux-arm64-cpu + - container_id: xgb-ci.aarch64 + runner: linux-arm64-cpu + steps: + # Restart Docker daemon so that it recognizes the ephemeral disks + - run: sudo systemctl restart docker + - uses: actions/checkout@v4.2.2 + with: + submodules: "true" + - name: Build ${{ matrix.container_id }} + run: bash ops/docker_build.sh + env: + CONTAINER_ID: ${{ matrix.container_id }} + + build-cpu: + name: Build CPU + needs: build-containers + runs-on: + - runs-on=${{ github.run_id }} + - runner=linux-amd64-cpu + steps: + # Restart Docker daemon so that it recognizes the ephemeral disks + - run: sudo systemctl restart docker + - uses: actions/checkout@v4.2.2 + with: + submodules: "true" + - name: Fetch container from cache + run: bash ops/docker_build.sh + env: + CONTAINER_ID: xgb-ci.cpu + - run: bash ops/pipeline/build-cpu.sh + - name: Stash CLI executable + run: bash ops/stash_artifacts.sh ./xgboost + env: + COMMAND: upload + KEY: build-cpu + + build-cpu-arm64: + name: Build CPU ARM64 + manylinux_2_28_aarch64 wheel + needs: build-containers + runs-on: + - runs-on=${{ github.run_id }} + - runner=linux-arm64-cpu + steps: + # Restart Docker daemon so that it recognizes the ephemeral disks + - run: sudo systemctl restart docker + - uses: actions/checkout@v4.2.2 + with: + submodules: "true" + - name: Fetch container from cache + run: bash ops/docker_build.sh + env: + CONTAINER_ID: xgb-ci.aarch64 + - run: bash ops/pipeline/build-cpu-arm64.sh + - name: Stash files + run: bash ops/stash_artifacts.sh ./xgboost python-package/dist/*.whl + env: + COMMAND: upload + KEY: build-cpu-arm64 + + build-cuda: + name: Build CUDA + manylinux_2_28_x86_64 wheel + needs: build-containers + runs-on: + - runs-on=${{ github.run_id }} + - runner=linux-amd64-cpu steps: - - uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29 # v4.1.6 - with: - submodules: 'true' - - name: Install system packages - run: | - brew install ninja libomp - - name: Build gtest binary - run: | - mkdir build - cd build - cmake .. -DGOOGLE_TEST=ON -DUSE_OPENMP=ON -DUSE_DMLC_GTEST=ON -GNinja -DBUILD_DEPRECATED_CLI=ON -DUSE_SANITIZER=ON -DENABLED_SANITIZERS=address -DCMAKE_BUILD_TYPE=RelWithDebInfo - ninja -v - - name: Run gtest binary - run: | - cd build - ./testxgboost - ctest -R TestXGBoostCLI --extra-verbose + # Restart Docker daemon so that it recognizes the ephemeral disks + - run: sudo systemctl restart docker + - uses: actions/checkout@v4.2.2 + with: + submodules: "true" + - name: Fetch container from cache + run: bash ops/docker_build.sh + env: + CONTAINER_ID: xgb-ci.gpu_build_rockylinux8 + - name: Fetch container from cache + run: bash ops/docker_build.sh + env: + CONTAINER_ID: xgb-ci.manylinux_2_28_x86_64 + - run: bash ops/pipeline/build-cuda.sh + - name: Stash files + run: | + bash ops/stash_artifacts.sh \ + build/testxgboost ./xgboost python-package/dist/*.whl + env: + COMMAND: upload + KEY: build-cuda - gtest-cpu-nonomp: - name: Test Google C++ unittest (CPU Non-OMP) - runs-on: ${{ matrix.os }} + build-cuda-with-rmm: + name: Build CUDA with RMM + needs: build-containers + runs-on: + - runs-on=${{ github.run_id }} + - runner=linux-amd64-cpu + steps: + # Restart Docker daemon so that it recognizes the ephemeral disks + - run: sudo systemctl restart docker + - uses: actions/checkout@v4.2.2 + with: + submodules: "true" + - name: Fetch container from cache + run: bash ops/docker_build.sh + env: + CONTAINER_ID: xgb-ci.gpu_build_rockylinux8 + - name: Fetch container from cache + run: bash ops/docker_build.sh + env: + CONTAINER_ID: xgb-ci.manylinux_2_28_x86_64 + - run: bash ops/pipeline/build-cuda-with-rmm.sh + - name: Stash files + run: bash ops/stash_artifacts.sh build/testxgboost + env: + COMMAND: upload + KEY: build-cuda-with-rmm + + build-manylinux2014: + name: Build manylinux2014_${{ matrix.arch }} wheel + needs: build-containers + runs-on: + - runs-on=${{ github.run_id }} + - runner=${{ matrix.runner }} strategy: fail-fast: false matrix: - os: [ubuntu-latest] + include: + - arch: aarch64 + runner: linux-arm64-cpu + - arch: x86_64 + runner: linux-amd64-cpu + steps: + # Restart Docker daemon so that it recognizes the ephemeral disks + - run: sudo systemctl restart docker + - uses: actions/checkout@v4.2.2 + with: + submodules: "true" + - name: Fetch container from cache + run: bash ops/docker_build.sh + env: + CONTAINER_ID: xgb-ci.manylinux2014_${{ matrix.arch }} + - run: bash ops/pipeline/build-manylinux2014.sh ${{ matrix.arch }} + + build-gpu-rpkg: + name: Build GPU-enabled R package + needs: build-containers + runs-on: + - runs-on=${{ github.run_id }} + - runner=linux-amd64-cpu steps: - - uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29 # v4.1.6 - with: - submodules: 'true' - - name: Install system packages - run: | - sudo apt-get install -y --no-install-recommends ninja-build - - name: Build and install XGBoost - shell: bash -l {0} - run: | - mkdir build - cd build - cmake .. -GNinja -DGOOGLE_TEST=ON -DUSE_DMLC_GTEST=ON -DUSE_OPENMP=OFF -DBUILD_DEPRECATED_CLI=ON - ninja -v - - name: Run gtest binary - run: | - cd build - ctest --extra-verbose + # Restart Docker daemon so that it recognizes the ephemeral disks + - run: sudo systemctl restart docker + - uses: actions/checkout@v4.2.2 + with: + submodules: "true" + - name: Fetch container from cache + run: bash ops/docker_build.sh + env: + CONTAINER_ID: xgb-ci.gpu_build_r_rockylinux8 + - run: bash ops/pipeline/build-gpu-rpkg.sh - gtest-cpu-sycl: - name: Test Google C++ unittest (CPU SYCL) - runs-on: ${{ matrix.os }} + test-cpp-gpu: + name: >- + Run Google Tests with GPUs + (Suite ${{ matrix.suite }}, Runner ${{ matrix.runner }}) + needs: [build-cuda, build-cuda-with-rmm] + runs-on: + - runs-on=${{ github.run_id }} + - runner=${{ matrix.runner }} strategy: fail-fast: false matrix: - os: [ubuntu-latest] - python-version: ["3.10"] + include: + - suite: gpu + runner: linux-amd64-gpu + artifact_from: build-cuda + - suite: gpu-rmm + runner: linux-amd64-gpu + artifact_from: build-cuda-with-rmm + - suite: mgpu + runner: linux-amd64-mgpu + artifact_from: build-cuda steps: - - uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29 # v4.1.6 - with: - submodules: 'true' - - uses: conda-incubator/setup-miniconda@a4260408e20b96e80095f42ff7f1a15b27dd94ca # v3.0.4 - with: - miniforge-variant: Miniforge3 - miniforge-version: latest - activate-environment: linux_sycl_test - environment-file: tests/ci_build/conda_env/linux_sycl_test.yml - use-mamba: true - - name: Display Conda env - run: | - conda info - conda list - - name: Build and install XGBoost - shell: bash -l {0} - run: | - mkdir build - cd build - cmake .. -DGOOGLE_TEST=ON -DUSE_DMLC_GTEST=ON -DPLUGIN_SYCL=ON -DCMAKE_CXX_COMPILER=g++ -DCMAKE_C_COMPILER=gcc -DCMAKE_INSTALL_PREFIX=$CONDA_PREFIX - make -j$(nproc) - - name: Run gtest binary for SYCL - run: | - cd build - ./testxgboost --gtest_filter=Sycl* - - name: Run gtest binary for non SYCL - run: | - cd build - ./testxgboost --gtest_filter=-Sycl* + # Restart Docker daemon so that it recognizes the ephemeral disks + - run: sudo systemctl restart docker + - uses: actions/checkout@v4.2.2 + with: + submodules: "true" + - name: Fetch container from cache + run: bash ops/docker_build.sh + env: + CONTAINER_ID: xgb-ci.gpu + - name: Unstash gtest + run: | + bash ops/stash_artifacts.sh build/testxgboost + chmod +x build/testxgboost + env: + COMMAND: download + KEY: ${{ matrix.artifact_from }} + - run: bash ops/pipeline/test-cpp-gpu.sh ${{ matrix.suite }} - c-api-demo: - name: Test installing XGBoost lib + building the C API demo - runs-on: ${{ matrix.os }} - defaults: - run: - shell: bash -l {0} + test-python: + name: Run Python tests (${{ matrix.description }}) + needs: [build-cuda, build-cpu-arm64] + runs-on: + - runs-on=${{ github.run_id }} + - runner=${{ matrix.runner }} strategy: fail-fast: false matrix: - os: ["ubuntu-latest"] - python-version: ["3.10"] - steps: - - uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29 # v4.1.6 - with: - submodules: 'true' - - uses: conda-incubator/setup-miniconda@a4260408e20b96e80095f42ff7f1a15b27dd94ca # v3.0.4 - with: - miniforge-variant: Miniforge3 - miniforge-version: latest - activate-environment: cpp_test - environment-file: tests/ci_build/conda_env/cpp_test.yml - use-mamba: true - - name: Display Conda env - run: | - conda info - conda list - - - name: Build and install XGBoost static library - run: | - mkdir build - cd build - cmake .. -DBUILD_STATIC_LIB=ON -DCMAKE_INSTALL_PREFIX=$CONDA_PREFIX -GNinja - ninja -v install - cd - - - name: Build and run C API demo with static - run: | - pushd . - cd demo/c-api/ - mkdir build - cd build - cmake .. -GNinja -DCMAKE_PREFIX_PATH=$CONDA_PREFIX - ninja -v - ctest - cd .. - rm -rf ./build - popd - - - name: Build and install XGBoost shared library - run: | - cd build - cmake .. -DBUILD_STATIC_LIB=OFF -DCMAKE_INSTALL_PREFIX=$CONDA_PREFIX -GNinja -DPLUGIN_FEDERATED=ON -DGOOGLE_TEST=ON - ninja -v install - ./testxgboost - cd - - - name: Build and run C API demo with shared - run: | - pushd . - cd demo/c-api/ - mkdir build - cd build - cmake .. -GNinja -DCMAKE_PREFIX_PATH=$CONDA_PREFIX - ninja -v - ctest - popd - ./tests/ci_build/verify_link.sh ./demo/c-api/build/basic/api-demo - ./tests/ci_build/verify_link.sh ./demo/c-api/build/external-memory/external-memory-demo - - cpp-lint: - runs-on: ubuntu-latest - name: Code linting for C++ + include: + - description: "single GPU" + container: xgb-ci.gpu + suite: gpu + runner: linux-amd64-gpu + artifact_from: build-cuda + - description: "single GPU, nightly deps" + container: xgb-ci.gpu_dev_ver + suite: gpu + runner: linux-amd64-gpu + artifact_from: build-cuda + - description: "multiple GPUs" + container: xgb-ci.gpu + suite: mgpu + runner: linux-amd64-mgpu + artifact_from: build-cuda + - description: "multiple GPUs, nightly deps" + container: xgb-ci.gpu_dev_ver + suite: mgpu + runner: linux-amd64-mgpu + artifact_from: build-cuda + - description: "CPU" + container: xgb-ci.cpu + suite: cpu + runner: linux-amd64-cpu + artifact_from: build-cuda + - description: "CPU ARM64" + container: xgb-ci.aarch64 + suite: cpu-arm64 + runner: linux-arm64-cpu + artifact_from: build-cpu-arm64 steps: - - uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29 # v4.1.6 - with: - submodules: 'true' - - uses: actions/setup-python@0b93645e9fea7318ecaed2b359559ac225c90a2b # v5.3.0 - with: - python-version: "3.10" - architecture: 'x64' - - name: Install Python packages - run: | - python -m pip install wheel setuptools cmakelint cpplint==1.6.1 pylint - - name: Run lint - run: | - python3 tests/ci_build/lint_cpp.py - sh ./tests/ci_build/lint_cmake.sh + # Restart Docker daemon so that it recognizes the ephemeral disks + - run: sudo systemctl restart docker + - uses: actions/checkout@v4.2.2 + with: + submodules: "true" + - name: Fetch container from cache + run: bash ops/docker_build.sh + env: + CONTAINER_ID: ${{ matrix.container }} + - name: Unstash Python wheel + run: | + bash ops/stash_artifacts.sh python-package/dist/*.whl ./xgboost + chmod +x ./xgboost + env: + COMMAND: download + KEY: ${{ matrix.artifact_from }} + - name: Run Python tests, ${{ matrix.description }} + run: bash ops/pipeline/test-python.sh ${{ matrix.suite }} ${{ matrix.container }} diff --git a/.github/workflows/main_runs_on.yml b/.github/workflows/main_runs_on.yml deleted file mode 100644 index 80e6db40cfb6..000000000000 --- a/.github/workflows/main_runs_on.yml +++ /dev/null @@ -1,308 +0,0 @@ -name: Nextgen XGBoost CI - -on: [push, pull_request] - -permissions: - contents: read # to fetch code (actions/checkout) - -concurrency: - group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }} - cancel-in-progress: true - -env: - BRANCH_NAME: >- - ${{ github.event.pull_request.number && 'PR-' }}${{ github.event.pull_request.number || github.ref_name }} - USE_DOCKER_CACHE: 1 - -jobs: - build-containers: - name: Build CI containers - runs-on: - - runs-on=${{ github.run_id }} - - runner=${{ matrix.runner }} - strategy: - matrix: - container_id: - - xgb-ci.gpu_build_rockylinux8 - - xgb-ci.gpu_build_r_rockylinux8 - - xgb-ci.gpu - - xgb-ci.gpu_dev_ver - - xgb-ci.cpu - - xgb-ci.clang_tidy - - xgb-ci.manylinux_2_28_x86_64 - - xgb-ci.manylinux2014_x86_64 - runner: [linux-amd64-cpu] - include: - - container_id: xgb-ci.manylinux2014_aarch64 - runner: linux-arm64-cpu - - container_id: xgb-ci.aarch64 - runner: linux-arm64-cpu - steps: - # Restart Docker daemon so that it recognizes the ephemeral disks - - run: sudo systemctl restart docker - - uses: actions/checkout@v4 - with: - submodules: "true" - - name: Build ${{ matrix.container_id }} - run: bash ops/docker_build.sh - env: - CONTAINER_ID: ${{ matrix.container_id }} - - clang-tidy: - name: Run clang-tidy - needs: build-containers - runs-on: - - runs-on=${{ github.run_id }} - - runner=linux-amd64-cpu - steps: - # Restart Docker daemon so that it recognizes the ephemeral disks - - run: sudo systemctl restart docker - - uses: actions/checkout@v4 - with: - submodules: "true" - - name: Fetch container from cache - run: bash ops/docker_build.sh - env: - CONTAINER_ID: xgb-ci.clang_tidy - - run: bash ops/pipeline/run-clang-tidy.sh - - build-cpu: - name: Build CPU - needs: build-containers - runs-on: - - runs-on=${{ github.run_id }} - - runner=linux-amd64-cpu - steps: - # Restart Docker daemon so that it recognizes the ephemeral disks - - run: sudo systemctl restart docker - - uses: actions/checkout@v4 - with: - submodules: "true" - - name: Fetch container from cache - run: bash ops/docker_build.sh - env: - CONTAINER_ID: xgb-ci.cpu - - run: bash ops/pipeline/build-cpu.sh - - name: Stash CLI executable - run: bash ops/stash_artifacts.sh ./xgboost - env: - COMMAND: upload - KEY: build-cpu - - build-cpu-arm64: - name: Build CPU ARM64 + manylinux_2_28_aarch64 wheel - needs: build-containers - runs-on: - - runs-on=${{ github.run_id }} - - runner=linux-arm64-cpu - steps: - # Restart Docker daemon so that it recognizes the ephemeral disks - - run: sudo systemctl restart docker - - uses: actions/checkout@v4 - with: - submodules: "true" - - name: Fetch container from cache - run: bash ops/docker_build.sh - env: - CONTAINER_ID: xgb-ci.aarch64 - - run: bash ops/pipeline/build-cpu-arm64.sh - - name: Stash files - run: bash ops/stash_artifacts.sh ./xgboost python-package/dist/*.whl - env: - COMMAND: upload - KEY: build-cpu-arm64 - - build-cuda: - name: Build CUDA + manylinux_2_28_x86_64 wheel - needs: build-containers - runs-on: - - runs-on=${{ github.run_id }} - - runner=linux-amd64-cpu - steps: - # Restart Docker daemon so that it recognizes the ephemeral disks - - run: sudo systemctl restart docker - - uses: actions/checkout@v4 - with: - submodules: "true" - - name: Fetch container from cache - run: bash ops/docker_build.sh - env: - CONTAINER_ID: xgb-ci.gpu_build_rockylinux8 - - name: Fetch container from cache - run: bash ops/docker_build.sh - env: - CONTAINER_ID: xgb-ci.manylinux_2_28_x86_64 - - run: bash ops/pipeline/build-cuda.sh - - name: Stash files - run: | - bash ops/stash_artifacts.sh \ - build/testxgboost ./xgboost python-package/dist/*.whl - env: - COMMAND: upload - KEY: build-cuda - - build-cuda-with-rmm: - name: Build CUDA with RMM - needs: build-containers - runs-on: - - runs-on=${{ github.run_id }} - - runner=linux-amd64-cpu - steps: - # Restart Docker daemon so that it recognizes the ephemeral disks - - run: sudo systemctl restart docker - - uses: actions/checkout@v4 - with: - submodules: "true" - - name: Fetch container from cache - run: bash ops/docker_build.sh - env: - CONTAINER_ID: xgb-ci.gpu_build_rockylinux8 - - name: Fetch container from cache - run: bash ops/docker_build.sh - env: - CONTAINER_ID: xgb-ci.manylinux_2_28_x86_64 - - run: bash ops/pipeline/build-cuda-with-rmm.sh - - name: Stash files - run: bash ops/stash_artifacts.sh build/testxgboost - env: - COMMAND: upload - KEY: build-cuda-with-rmm - - build-manylinux2014: - name: Build manylinux2014_${{ matrix.arch }} wheel - needs: build-containers - runs-on: - - runs-on=${{ github.run_id }} - - runner=${{ matrix.runner }} - strategy: - matrix: - include: - - arch: aarch64 - runner: linux-arm64-cpu - - arch: x86_64 - runner: linux-amd64-cpu - steps: - # Restart Docker daemon so that it recognizes the ephemeral disks - - run: sudo systemctl restart docker - - uses: actions/checkout@v4 - with: - submodules: "true" - - name: Fetch container from cache - run: bash ops/docker_build.sh - env: - CONTAINER_ID: xgb-ci.manylinux2014_${{ matrix.arch }} - - run: bash ops/pipeline/build-manylinux2014.sh ${{ matrix.arch }} - - build-gpu-rpkg: - name: Build GPU-enabled R package - needs: build-containers - runs-on: - - runs-on=${{ github.run_id }} - - runner=linux-amd64-cpu - steps: - # Restart Docker daemon so that it recognizes the ephemeral disks - - run: sudo systemctl restart docker - - uses: actions/checkout@v4 - with: - submodules: "true" - - name: Fetch container from cache - run: bash ops/docker_build.sh - env: - CONTAINER_ID: xgb-ci.gpu_build_r_rockylinux8 - - run: bash ops/pipeline/build-gpu-rpkg.sh - - test-cpp-gpu: - name: Run Google Tests with GPU(s) - needs: [build-cuda, build-cuda-with-rmm] - runs-on: - - runs-on=${{ github.run_id }} - - runner=${{ matrix.runner }} - strategy: - matrix: - include: - - suite: gpu - runner: linux-amd64-gpu - artifact_from: build-cuda - - suite: gpu-rmm - runner: linux-amd64-gpu - artifact_from: build-cuda-with-rmm - - suite: mgpu - runner: linux-amd64-mgpu - artifact_from: build-cuda - steps: - # Restart Docker daemon so that it recognizes the ephemeral disks - - run: sudo systemctl restart docker - - uses: actions/checkout@v4 - with: - submodules: "true" - - name: Fetch container from cache - run: bash ops/docker_build.sh - env: - CONTAINER_ID: xgb-ci.gpu - - name: Unstash gtest - run: | - bash ops/stash_artifacts.sh build/testxgboost - chmod +x build/testxgboost - env: - COMMAND: download - KEY: ${{ matrix.artifact_from }} - - run: bash ops/pipeline/test-cpp-gpu.sh ${{ matrix.suite }} - - test-python: - name: Run Python tests - needs: [build-cuda, build-cpu-arm64] - runs-on: - - runs-on=${{ github.run_id }} - - runner=${{ matrix.runner }} - strategy: - matrix: - include: - - description: "single GPU" - container: xgb-ci.gpu - suite: gpu - runner: linux-amd64-gpu - artifact_from: build-cuda - - description: "single GPU, nightly deps" - container: xgb-ci.gpu_dev_ver - suite: gpu - runner: linux-amd64-gpu - artifact_from: build-cuda - - description: "multiple GPUs" - container: xgb-ci.gpu - suite: mgpu - runner: linux-amd64-mgpu - artifact_from: build-cuda - - description: "multiple GPUs, nightly deps" - container: xgb-ci.gpu_dev_ver - suite: mgpu - runner: linux-amd64-mgpu - artifact_from: build-cuda - - description: "CPU" - container: xgb-ci.cpu - suite: cpu - runner: linux-amd64-cpu - artifact_from: build-cuda - - description: "CPU ARM64" - container: xgb-ci.aarch64 - suite: cpu-arm64 - runner: linux-arm64-cpu - artifact_from: build-cpu-arm64 - steps: - # Restart Docker daemon so that it recognizes the ephemeral disks - - run: sudo systemctl restart docker - - uses: actions/checkout@v4 - with: - submodules: "true" - - name: Fetch container from cache - run: bash ops/docker_build.sh - env: - CONTAINER_ID: ${{ matrix.container }} - - name: Unstash Python wheel - run: | - bash ops/stash_artifacts.sh python-package/dist/*.whl ./xgboost - chmod +x ./xgboost - env: - COMMAND: download - KEY: ${{ matrix.artifact_from }} - - name: Run Python tests, ${{ matrix.description }} - run: bash ops/pipeline/test-python.sh ${{ matrix.suite }} ${{ matrix.container }} diff --git a/.github/workflows/misc.yml b/.github/workflows/misc.yml new file mode 100644 index 000000000000..7294faa0d93b --- /dev/null +++ b/.github/workflows/misc.yml @@ -0,0 +1,133 @@ +name: XGBoost CI (misc) + +on: [push, pull_request] + +permissions: + contents: read # to fetch code (actions/checkout) + +concurrency: + group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }} + cancel-in-progress: true + +env: + BRANCH_NAME: >- + ${{ github.event.pull_request.number && 'PR-' }}${{ github.event.pull_request.number || github.ref_name }} + +jobs: + gtest-cpu: + name: Test Google C++ test (CPU) + runs-on: ${{ matrix.os }} + strategy: + fail-fast: false + matrix: + os: [macos-13] + steps: + - uses: actions/checkout@v4.2.2 + with: + submodules: 'true' + - name: Install system packages + run: | + brew install ninja libomp + - name: Build gtest binary + run: | + mkdir build + cd build + cmake .. -DGOOGLE_TEST=ON -DUSE_OPENMP=ON -DUSE_DMLC_GTEST=ON -GNinja -DBUILD_DEPRECATED_CLI=ON -DUSE_SANITIZER=ON -DENABLED_SANITIZERS=address -DCMAKE_BUILD_TYPE=RelWithDebInfo + ninja -v + - name: Run gtest binary + run: | + cd build + ./testxgboost + ctest -R TestXGBoostCLI --extra-verbose + + gtest-cpu-nonomp: + name: Test Google C++ unittest (CPU Non-OMP) + runs-on: ${{ matrix.os }} + strategy: + fail-fast: false + matrix: + os: [ubuntu-latest] + steps: + - uses: actions/checkout@v4.2.2 + with: + submodules: 'true' + - name: Install system packages + run: | + sudo apt-get install -y --no-install-recommends ninja-build + - name: Build and install XGBoost + shell: bash -l {0} + run: | + mkdir build + cd build + cmake .. -GNinja -DGOOGLE_TEST=ON -DUSE_DMLC_GTEST=ON -DUSE_OPENMP=OFF -DBUILD_DEPRECATED_CLI=ON + ninja -v + - name: Run gtest binary + run: | + cd build + ctest --extra-verbose + + c-api-demo: + name: Test installing XGBoost lib + building the C API demo + runs-on: ${{ matrix.os }} + defaults: + run: + shell: bash -l {0} + strategy: + fail-fast: false + matrix: + os: ["ubuntu-latest"] + python-version: ["3.10"] + steps: + - uses: actions/checkout@v4.2.2 + with: + submodules: 'true' + - uses: conda-incubator/setup-miniconda@v3.1.0 + with: + miniforge-variant: Miniforge3 + miniforge-version: latest + activate-environment: cpp_test + environment-file: ops/conda_env/cpp_test.yml + use-mamba: true + - name: Display Conda env + run: | + conda info + conda list + - name: Build and install XGBoost static library + run: | + mkdir build + cd build + cmake .. -DBUILD_STATIC_LIB=ON -DCMAKE_INSTALL_PREFIX=$CONDA_PREFIX -GNinja + ninja -v install + cd - + - name: Build and run C API demo with static + run: | + pushd . + cd demo/c-api/ + mkdir build + cd build + cmake .. -GNinja -DCMAKE_PREFIX_PATH=$CONDA_PREFIX + ninja -v + ctest + cd .. + rm -rf ./build + popd + + - name: Build and install XGBoost shared library + run: | + cd build + cmake .. -DBUILD_STATIC_LIB=OFF -DCMAKE_INSTALL_PREFIX=$CONDA_PREFIX -GNinja -DPLUGIN_FEDERATED=ON -DGOOGLE_TEST=ON + ninja -v install + ./testxgboost + cd - + - name: Build and run C API demo with shared + run: | + pushd . + cd demo/c-api/ + mkdir build + cd build + cmake .. -GNinja -DCMAKE_PREFIX_PATH=$CONDA_PREFIX + ninja -v + ctest + popd + ./ops/script/verify_link.sh ./demo/c-api/build/basic/api-demo + ./ops/script/verify_link.sh ./demo/c-api/build/external-memory/external-memory-demo diff --git a/.github/workflows/python_tests.yml b/.github/workflows/python_tests.yml index 907cf98e1011..c43d8b056c8d 100644 --- a/.github/workflows/python_tests.yml +++ b/.github/workflows/python_tests.yml @@ -1,4 +1,4 @@ -name: XGBoost-Python-Tests +name: XGBoost CI (Python tests) on: [push, pull_request] @@ -14,54 +14,23 @@ concurrency: cancel-in-progress: true jobs: - python-mypy-lint: - runs-on: ubuntu-latest - name: Type and format checks for the Python package - strategy: - matrix: - os: [ubuntu-latest] - steps: - - uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29 # v4.1.6 - with: - submodules: 'true' - - uses: conda-incubator/setup-miniconda@a4260408e20b96e80095f42ff7f1a15b27dd94ca # v3.0.4 - with: - miniforge-variant: Miniforge3 - miniforge-version: latest - activate-environment: python_lint - environment-file: tests/ci_build/conda_env/python_lint.yml - use-mamba: true - - name: Display Conda env - run: | - conda info - conda list - - name: Run mypy - run: | - python tests/ci_build/lint_python.py --format=0 --type-check=1 --pylint=0 - - name: Run formatter - run: | - python tests/ci_build/lint_python.py --format=1 --type-check=0 --pylint=0 - - name: Run pylint - run: | - python tests/ci_build/lint_python.py --format=0 --type-check=0 --pylint=1 - python-sdist-test-on-Linux: - # Mismatched glibcxx version between system and conda forge. runs-on: ${{ matrix.os }} name: Test installing XGBoost Python source package on ${{ matrix.os }} strategy: + fail-fast: false matrix: os: [ubuntu-latest] steps: - - uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29 # v4.1.6 + - uses: actions/checkout@v4.2.2 with: submodules: 'true' - - uses: conda-incubator/setup-miniconda@a4260408e20b96e80095f42ff7f1a15b27dd94ca # v3.0.4 + - uses: conda-incubator/setup-miniconda@v3.1.0 with: miniforge-variant: Miniforge3 miniforge-version: latest activate-environment: sdist_test - environment-file: tests/ci_build/conda_env/sdist_test.yml + environment-file: ops/conda_env/sdist_test.yml use-mamba: true - name: Display Conda env run: | @@ -82,18 +51,19 @@ jobs: runs-on: ${{ matrix.os }} name: Test installing XGBoost Python source package on ${{ matrix.os }} strategy: + fail-fast: false matrix: os: [macos-13, windows-latest] python-version: ["3.10"] steps: - - uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29 # v4.1.6 + - uses: actions/checkout@v4.2.2 with: submodules: 'true' - name: Install osx system dependencies if: matrix.os == 'macos-13' run: | brew install ninja libomp - - uses: conda-incubator/setup-miniconda@a4260408e20b96e80095f42ff7f1a15b27dd94ca # v3.0.4 + - uses: conda-incubator/setup-miniconda@v3.1.0 with: auto-update-conda: true python-version: ${{ matrix.python-version }} @@ -115,25 +85,25 @@ jobs: python -c 'import xgboost' python-tests-on-macos: - name: Test XGBoost Python package on ${{ matrix.config.os }} - runs-on: ${{ matrix.config.os }} + name: Test XGBoost Python package on ${{ matrix.os }} + runs-on: ${{ matrix.os }} timeout-minutes: 60 strategy: + fail-fast: false matrix: - config: - - {os: macos-13} + os: [macos-13] steps: - - uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29 # v4.1.6 + - uses: actions/checkout@v4.2.2 with: submodules: 'true' - - uses: conda-incubator/setup-miniconda@a4260408e20b96e80095f42ff7f1a15b27dd94ca # v3.0.4 + - uses: conda-incubator/setup-miniconda@v3.1.0 with: miniforge-variant: Miniforge3 miniforge-version: latest activate-environment: macos_cpu_test - environment-file: tests/ci_build/conda_env/macos_cpu_test.yml + environment-file: ops/conda_env/macos_cpu_test.yml use-mamba: true - name: Display Conda env @@ -167,159 +137,21 @@ jobs: run: | pytest -s -v -rxXs --durations=0 ./tests/test_distributed/test_with_dask - python-tests-on-win: - name: Test XGBoost Python package on ${{ matrix.config.os }} - runs-on: ${{ matrix.config.os }} - timeout-minutes: 60 - strategy: - matrix: - config: - - {os: windows-latest, python-version: '3.10'} - - steps: - - uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29 # v4.1.6 - with: - submodules: 'true' - - - uses: conda-incubator/setup-miniconda@a4260408e20b96e80095f42ff7f1a15b27dd94ca # v3.0.4 - with: - auto-update-conda: true - python-version: ${{ matrix.config.python-version }} - activate-environment: win64_env - environment-file: tests/ci_build/conda_env/win64_cpu_test.yml - - - name: Display Conda env - run: | - conda info - conda list - - - name: Build XGBoost on Windows - run: | - mkdir build_msvc - cd build_msvc - cmake .. -G"Visual Studio 17 2022" -DCMAKE_CONFIGURATION_TYPES="Release" -A x64 -DBUILD_DEPRECATED_CLI=ON - cmake --build . --config Release --parallel $(nproc) - - - name: Install Python package - run: | - cd python-package - python --version - pip wheel -v . --wheel-dir dist/ - pip install ./dist/*.whl - - - name: Test Python package - run: | - pytest -s -v -rxXs --durations=0 ./tests/python - - python-tests-on-ubuntu: - name: Test XGBoost Python package on ${{ matrix.config.os }} - runs-on: ${{ matrix.config.os }} - timeout-minutes: 90 - strategy: - matrix: - config: - - {os: ubuntu-latest, python-version: "3.10"} - - steps: - - uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29 # v4.1.6 - with: - submodules: 'true' - - - uses: conda-incubator/setup-miniconda@a4260408e20b96e80095f42ff7f1a15b27dd94ca # v3.0.4 - with: - miniforge-variant: Miniforge3 - miniforge-version: latest - activate-environment: linux_cpu_test - environment-file: tests/ci_build/conda_env/linux_cpu_test.yml - use-mamba: true - - - name: Display Conda env - run: | - conda info - conda list - - - name: Build XGBoost on Ubuntu - run: | - mkdir build - cd build - cmake .. -GNinja -DCMAKE_PREFIX_PATH=$CONDA_PREFIX -DBUILD_DEPRECATED_CLI=ON - ninja - - - name: Install Python package - run: | - cd python-package - python --version - pip install -v . - - - name: Test Python package - run: | - pytest -s -v -rxXs --durations=0 ./tests/python - - - name: Test Dask Interface - run: | - pytest -s -v -rxXs --durations=0 ./tests/test_distributed/test_with_dask - - - name: Test PySpark Interface - shell: bash -l {0} - run: | - pytest -s -v -rxXs --durations=0 ./tests/test_distributed/test_with_spark - - python-sycl-tests-on-ubuntu: - name: Test XGBoost Python package with SYCL on ${{ matrix.config.os }} - runs-on: ${{ matrix.config.os }} - timeout-minutes: 90 - strategy: - matrix: - config: - - {os: ubuntu-latest, python-version: "3.10"} - - steps: - - uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29 # v4.1.6 - with: - submodules: 'true' - - - uses: conda-incubator/setup-miniconda@a4260408e20b96e80095f42ff7f1a15b27dd94ca # v3.0.4 - with: - miniforge-variant: Miniforge3 - miniforge-version: latest - activate-environment: linux_sycl_test - environment-file: tests/ci_build/conda_env/linux_sycl_test.yml - use-mamba: true - - - name: Display Conda env - run: | - conda info - conda list - - name: Build XGBoost on Ubuntu - run: | - mkdir build - cd build - cmake .. -DPLUGIN_SYCL=ON -DCMAKE_CXX_COMPILER=g++ -DCMAKE_C_COMPILER=gcc -DCMAKE_PREFIX_PATH=$CONDA_PREFIX - make -j$(nproc) - - name: Install Python package - run: | - cd python-package - python --version - pip install -v . - - name: Test Python package - run: | - pytest -s -v -rxXs --durations=0 ./tests/python-sycl/ - - python-system-installation-on-ubuntu: name: Test XGBoost Python package System Installation on ${{ matrix.os }} runs-on: ${{ matrix.os }} strategy: + fail-fast: false matrix: os: [ubuntu-latest] steps: - - uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29 # v4.1.6 + - uses: actions/checkout@v4.2.2 with: submodules: 'true' - name: Set up Python 3.10 - uses: actions/setup-python@0b93645e9fea7318ecaed2b359559ac225c90a2b # v5.3.0 + uses: actions/setup-python@v5.3.0 with: python-version: "3.10" diff --git a/.github/workflows/python_wheels.yml b/.github/workflows/python_wheels_macos.yml similarity index 55% rename from .github/workflows/python_wheels.yml rename to .github/workflows/python_wheels_macos.yml index 3b7a8072c109..a4cff8eb0e6f 100644 --- a/.github/workflows/python_wheels.yml +++ b/.github/workflows/python_wheels_macos.yml @@ -1,9 +1,9 @@ -name: XGBoost-Python-Wheels +name: Build Python wheels targeting MacOS on: [push, pull_request] permissions: - contents: read # to fetch code (actions/checkout) + contents: read # to fetch code (actions/checkout) defaults: run: @@ -13,11 +13,16 @@ concurrency: group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }} cancel-in-progress: true +env: + BRANCH_NAME: >- + ${{ github.event.pull_request.number && 'PR-' }}${{ github.event.pull_request.number || github.ref_name }} + jobs: - python-wheels: + python-wheels-macos: name: Build wheel for ${{ matrix.platform_id }} runs-on: ${{ matrix.os }} strategy: + fail-fast: false matrix: include: - os: macos-13 @@ -25,31 +30,26 @@ jobs: - os: macos-14 platform_id: macosx_arm64 steps: - - uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29 # v4.1.6 + - uses: actions/checkout@v4.2.2 with: submodules: 'true' - name: Set up homebrew - uses: Homebrew/actions/setup-homebrew@68fa6aeb1ccb0596d311f2b34ec74ec21ee68e54 + uses: Homebrew/actions/setup-homebrew@13341b4d5e459a98bbe0b122b12c11bf90518cc8 - name: Install libomp run: brew install libomp - - uses: conda-incubator/setup-miniconda@a4260408e20b96e80095f42ff7f1a15b27dd94ca # v3.0.4 + - uses: conda-incubator/setup-miniconda@v3.1.0 with: miniforge-variant: Miniforge3 miniforge-version: latest python-version: "3.10" use-mamba: true - name: Build wheels - run: bash tests/ci_build/build_python_wheels.sh ${{ matrix.platform_id }} ${{ github.sha }} - - name: Extract branch name - run: | - echo "branch=${GITHUB_REF#refs/heads/}" >> "$GITHUB_OUTPUT" - id: extract_branch - if: github.ref == 'refs/heads/master' || contains(github.ref, 'refs/heads/release_') + run: bash ops/pipeline/build-python-wheels-macos.sh ${{ matrix.platform_id }} ${{ github.sha }} - name: Upload Python wheel - if: github.ref == 'refs/heads/master' || contains(github.ref, 'refs/heads/release_') + # if: github.ref == 'refs/heads/master' || contains(github.ref, 'refs/heads/release_') run: | python -m pip install awscli - python -m awscli s3 cp wheelhouse/*.whl s3://xgboost-nightly-builds/${{ steps.extract_branch.outputs.branch }}/ --acl public-read --region us-west-2 + python -m awscli s3 cp wheelhouse/*.whl s3://xgboost-nightly-builds/${{ env.BRANCH_NAME }}/ --acl public-read --region us-west-2 env: AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID_IAM_S3_UPLOADER }} AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY_IAM_S3_UPLOADER }} diff --git a/.github/workflows/r_tests.yml b/.github/workflows/r_tests.yml index c56d1f8ef943..3885c126f11e 100644 --- a/.github/workflows/r_tests.yml +++ b/.github/workflows/r_tests.yml @@ -13,78 +13,46 @@ concurrency: cancel-in-progress: true jobs: - lintr: - runs-on: ${{ matrix.config.os }} - name: Run R linters on OS ${{ matrix.config.os }}, R ${{ matrix.config.r }}, Compiler ${{ matrix.config.compiler }}, Build ${{ matrix.config.build }} - strategy: - matrix: - config: - - {os: ubuntu-latest, r: 'release'} - env: - R_REMOTES_NO_ERRORS_FROM_WARNINGS: true - RSPM: ${{ matrix.config.rspm }} - - steps: - - uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29 # v4.1.6 - with: - submodules: 'true' - - - uses: r-lib/actions/setup-r@929c772977a3a13c8733b363bf5a2f685c25dd91 # v2.9.0 - with: - r-version: ${{ matrix.config.r }} - - - name: Cache R packages - uses: actions/cache@6849a6489940f00c2f30c0fb92c6274307ccb58a # v4.1.2 - with: - path: ${{ env.R_LIBS_USER }} - key: ${{ runner.os }}-r-${{ matrix.config.r }}-7-${{ hashFiles('R-package/DESCRIPTION') }} - restore-keys: ${{ runner.os }}-r-${{ matrix.config.r }}-7-${{ hashFiles('R-package/DESCRIPTION') }} - - - name: Install dependencies - shell: Rscript {0} - run: | - source("./R-package/tests/helper_scripts/install_deps.R") - - - name: Run lintr - run: | - MAKEFLAGS="-j$(nproc)" R CMD INSTALL R-package/ - Rscript tests/ci_build/lint_r.R $(pwd) - test-Rpkg: - runs-on: ${{ matrix.config.os }} - name: Test R on OS ${{ matrix.config.os }}, R ${{ matrix.config.r }}, Compiler ${{ matrix.config.compiler }}, Build ${{ matrix.config.build }} + runs-on: ${{ matrix.os }} + name: Test R on OS ${{ matrix.os }}, R ${{ matrix.r }}, Compiler ${{ matrix.compiler }}, Build ${{ matrix.build }} strategy: fail-fast: false matrix: - config: - - {os: windows-latest, r: 'release', compiler: 'mingw', build: 'autotools'} - - {os: ubuntu-latest, r: 'release', compiler: 'none', build: 'cmake'} + include: + - os: windows-latest + r: release + compiler: mingw + build: autotools + - os: ubuntu-latest + r: release + compiler: none + build: cmake env: R_REMOTES_NO_ERRORS_FROM_WARNINGS: true - RSPM: ${{ matrix.config.rspm }} steps: - name: Install system dependencies run: | sudo apt update sudo apt install libcurl4-openssl-dev libssl-dev libssh2-1-dev libgit2-dev libglpk-dev libxml2-dev libharfbuzz-dev libfribidi-dev - if: matrix.config.os == 'ubuntu-latest' - - uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29 # v4.1.6 + if: matrix.os == 'ubuntu-latest' + - uses: actions/checkout@v4.2.2 with: submodules: 'true' - - uses: r-lib/actions/setup-r@929c772977a3a13c8733b363bf5a2f685c25dd91 # v2.9.0 + - uses: r-lib/actions/setup-r@v2.11.0 with: - r-version: ${{ matrix.config.r }} + r-version: ${{ matrix.r }} - name: Cache R packages - uses: actions/cache@6849a6489940f00c2f30c0fb92c6274307ccb58a # v4.1.2 + uses: actions/cache@v4.1.2 with: path: ${{ env.R_LIBS_USER }} - key: ${{ runner.os }}-r-${{ matrix.config.r }}-7-${{ hashFiles('R-package/DESCRIPTION') }} - restore-keys: ${{ runner.os }}-r-${{ matrix.config.r }}-7-${{ hashFiles('R-package/DESCRIPTION') }} + key: ${{ runner.os }}-r-${{ matrix.r }}-7-${{ hashFiles('R-package/DESCRIPTION') }} + restore-keys: ${{ runner.os }}-r-${{ matrix.r }}-7-${{ hashFiles('R-package/DESCRIPTION') }} - - uses: actions/setup-python@0b93645e9fea7318ecaed2b359559ac225c90a2b # v5.3.0 + - uses: actions/setup-python@v5.3.0 with: python-version: "3.10" architecture: 'x64' @@ -98,13 +66,13 @@ jobs: - name: Test R run: | - python tests/ci_build/test_r_package.py --compiler='${{ matrix.config.compiler }}' --build-tool="${{ matrix.config.build }}" --task=check - if: matrix.config.compiler != 'none' + python ops/script/test_r_package.py --compiler='${{ matrix.compiler }}' --build-tool="${{ matrix.build }}" --task=check + if: matrix.compiler != 'none' - name: Test R run: | - python tests/ci_build/test_r_package.py --build-tool="${{ matrix.config.build }}" --task=check - if: matrix.config.compiler == 'none' + python ops/script/test_r_package.py --build-tool="${{ matrix.build }}" --task=check + if: matrix.compiler == 'none' test-R-on-Debian: name: Test R package on Debian @@ -123,7 +91,7 @@ jobs: run: | git config --global --add safe.directory "${GITHUB_WORKSPACE}" - - uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29 # v4.1.6 + - uses: actions/checkout@v4.2.2 with: submodules: 'true' @@ -135,7 +103,7 @@ jobs: - name: Test R shell: bash -l {0} run: | - python3 tests/ci_build/test_r_package.py --r=/usr/bin/R --build-tool=autotools --task=check + python3 ops/script/test_r_package.py --r=/usr/bin/R --build-tool=autotools --task=check - uses: dorny/paths-filter@v3 id: changes @@ -147,4 +115,4 @@ jobs: - name: Run document check if: steps.changes.outputs.r_package == 'true' run: | - python3 tests/ci_build/test_r_package.py --r=/usr/bin/R --task=doc + python3 ops/script/test_r_package.py --r=/usr/bin/R --task=doc diff --git a/.github/workflows/scorecards.yml b/.github/workflows/scorecards.yml index 85a9abb57e1b..8ab77ec4c382 100644 --- a/.github/workflows/scorecards.yml +++ b/.github/workflows/scorecards.yml @@ -22,7 +22,7 @@ jobs: steps: - name: "Checkout code" - uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29 # v4.1.6 + uses: actions/checkout@v4.2.2 with: persist-credentials: false diff --git a/.github/workflows/sycl_tests.yml b/.github/workflows/sycl_tests.yml new file mode 100644 index 000000000000..54ebcb5f9532 --- /dev/null +++ b/.github/workflows/sycl_tests.yml @@ -0,0 +1,94 @@ +name: XGBoost CI (oneAPI) + +on: [push, pull_request] + +permissions: + contents: read # to fetch code (actions/checkout) + +concurrency: + group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }} + cancel-in-progress: true + +env: + BRANCH_NAME: >- + ${{ github.event.pull_request.number && 'PR-' }}${{ github.event.pull_request.number || github.ref_name }} + +jobs: + gtest-cpu-sycl: + name: Test Google C++ unittest (CPU SYCL) + runs-on: ${{ matrix.os }} + strategy: + fail-fast: false + matrix: + os: [ubuntu-latest] + steps: + - uses: actions/checkout@v4.2.2 + with: + submodules: 'true' + - uses: conda-incubator/setup-miniconda@v3.1.0 + with: + miniforge-variant: Miniforge3 + miniforge-version: latest + activate-environment: linux_sycl_test + environment-file: ops/conda_env/linux_sycl_test.yml + use-mamba: true + - name: Display Conda env + run: | + conda info + conda list + - name: Build and install XGBoost + shell: bash -l {0} + run: | + mkdir build + cd build + cmake .. -DGOOGLE_TEST=ON -DUSE_DMLC_GTEST=ON -DPLUGIN_SYCL=ON -DCMAKE_CXX_COMPILER=g++ -DCMAKE_C_COMPILER=gcc -DCMAKE_INSTALL_PREFIX=$CONDA_PREFIX + make -j$(nproc) + - name: Run gtest binary for SYCL + run: | + cd build + ./testxgboost --gtest_filter=Sycl* + - name: Run gtest binary for non SYCL + run: | + cd build + ./testxgboost --gtest_filter=-Sycl* + + python-sycl-tests-on-ubuntu: + name: Test XGBoost Python package with SYCL on ${{ matrix.os }} + runs-on: ${{ matrix.os }} + timeout-minutes: 90 + strategy: + fail-fast: false + matrix: + os: [ubuntu-latest] + + steps: + - uses: actions/checkout@v4.2.2 + with: + submodules: 'true' + + - uses: conda-incubator/setup-miniconda@v3.1.0 + with: + miniforge-variant: Miniforge3 + miniforge-version: latest + activate-environment: linux_sycl_test + environment-file: ops/conda_env/linux_sycl_test.yml + use-mamba: true + + - name: Display Conda env + run: | + conda info + conda list + - name: Build XGBoost on Ubuntu + run: | + mkdir build + cd build + cmake .. -DPLUGIN_SYCL=ON -DCMAKE_CXX_COMPILER=g++ -DCMAKE_C_COMPILER=gcc -DCMAKE_PREFIX_PATH=$CONDA_PREFIX + make -j$(nproc) + - name: Install Python package + run: | + cd python-package + python --version + pip install -v . + - name: Test Python package + run: | + pytest -s -v -rxXs --durations=0 ./tests/python-sycl/ diff --git a/.github/workflows/update_rapids.yml b/.github/workflows/update_rapids.yml index 5e229db4c050..636661db46b8 100644 --- a/.github/workflows/update_rapids.yml +++ b/.github/workflows/update_rapids.yml @@ -25,7 +25,7 @@ jobs: name: Check latest RAPIDS runs-on: ubuntu-latest steps: - - uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29 # v4.1.6 + - uses: actions/checkout@v4.2.2 with: submodules: 'true' - name: Check latest RAPIDS and update conftest.sh diff --git a/.github/workflows/windows_runs_on.yml b/.github/workflows/windows.yml similarity index 93% rename from .github/workflows/windows_runs_on.yml rename to .github/workflows/windows.yml index 73a258158b12..3dc9c4962646 100644 --- a/.github/workflows/windows_runs_on.yml +++ b/.github/workflows/windows.yml @@ -1,4 +1,4 @@ -name: Nextgen XGBoost CI Windows +name: XGBoost CI (Windows) on: [push, pull_request] @@ -27,7 +27,7 @@ jobs: - runs-on=${{ github.run_id }} - runner=windows-cpu steps: - - uses: actions/checkout@v4 + - uses: actions/checkout@v4.2.2 with: submodules: "true" - run: powershell ops/pipeline/build-win64-gpu.ps1 @@ -39,6 +39,7 @@ jobs: env: COMMAND: upload KEY: build-win64-gpu + test-win64-gpu: name: Test XGBoost on Windows needs: build-win64-gpu @@ -46,7 +47,7 @@ jobs: - runs-on=${{ github.run_id }} - runner=windows-gpu steps: - - uses: actions/checkout@v4 + - uses: actions/checkout@v4.2.2 with: submodules: "true" - name: Unstash files diff --git a/ops/docker/conda_env/aarch64_test.yml b/ops/conda_env/aarch64_test.yml similarity index 100% rename from ops/docker/conda_env/aarch64_test.yml rename to ops/conda_env/aarch64_test.yml diff --git a/ops/docker/conda_env/cpp_test.yml b/ops/conda_env/cpp_test.yml similarity index 100% rename from ops/docker/conda_env/cpp_test.yml rename to ops/conda_env/cpp_test.yml diff --git a/ops/docker/conda_env/jvm_tests.yml b/ops/conda_env/jvm_tests.yml similarity index 100% rename from ops/docker/conda_env/jvm_tests.yml rename to ops/conda_env/jvm_tests.yml diff --git a/ops/docker/conda_env/linux_cpu_test.yml b/ops/conda_env/linux_cpu_test.yml similarity index 100% rename from ops/docker/conda_env/linux_cpu_test.yml rename to ops/conda_env/linux_cpu_test.yml diff --git a/ops/docker/conda_env/linux_sycl_test.yml b/ops/conda_env/linux_sycl_test.yml similarity index 100% rename from ops/docker/conda_env/linux_sycl_test.yml rename to ops/conda_env/linux_sycl_test.yml diff --git a/ops/docker/conda_env/macos_cpu_test.yml b/ops/conda_env/macos_cpu_test.yml similarity index 100% rename from ops/docker/conda_env/macos_cpu_test.yml rename to ops/conda_env/macos_cpu_test.yml diff --git a/ops/docker/conda_env/python_lint.yml b/ops/conda_env/python_lint.yml similarity index 100% rename from ops/docker/conda_env/python_lint.yml rename to ops/conda_env/python_lint.yml diff --git a/ops/docker/conda_env/sdist_test.yml b/ops/conda_env/sdist_test.yml similarity index 100% rename from ops/docker/conda_env/sdist_test.yml rename to ops/conda_env/sdist_test.yml diff --git a/ops/docker/conda_env/win64_test.yml b/ops/conda_env/win64_test.yml similarity index 100% rename from ops/docker/conda_env/win64_test.yml rename to ops/conda_env/win64_test.yml diff --git a/ops/docker/dockerfile/Dockerfile.aarch64 b/ops/docker/dockerfile/Dockerfile.aarch64 index 8d6cfaca39fa..9dff2a05230b 100644 --- a/ops/docker/dockerfile/Dockerfile.aarch64 +++ b/ops/docker/dockerfile/Dockerfile.aarch64 @@ -32,7 +32,7 @@ RUN set -ex; \ # Default entry-point to use if running locally # It will preserve attributes of created files -COPY entrypoint.sh /scripts/ +COPY docker/entrypoint.sh /scripts/ WORKDIR /workspace ENTRYPOINT ["/scripts/entrypoint.sh"] diff --git a/ops/docker/dockerfile/Dockerfile.clang_tidy b/ops/docker/dockerfile/Dockerfile.clang_tidy index c9528015c17e..de7d9bd3f254 100644 --- a/ops/docker/dockerfile/Dockerfile.clang_tidy +++ b/ops/docker/dockerfile/Dockerfile.clang_tidy @@ -44,7 +44,7 @@ RUN set -ex; \ # Default entry-point to use if running locally # It will preserve attributes of created files -COPY entrypoint.sh /scripts/ +COPY docker/entrypoint.sh /scripts/ WORKDIR /workspace ENTRYPOINT ["/scripts/entrypoint.sh"] diff --git a/ops/docker/dockerfile/Dockerfile.cpu b/ops/docker/dockerfile/Dockerfile.cpu index 64b28026a89c..a426ce5da30c 100644 --- a/ops/docker/dockerfile/Dockerfile.cpu +++ b/ops/docker/dockerfile/Dockerfile.cpu @@ -51,7 +51,7 @@ RUN set -ex; \ # Default entry-point to use if running locally # It will preserve attributes of created files -COPY entrypoint.sh /scripts/ +COPY docker/entrypoint.sh /scripts/ WORKDIR /workspace ENTRYPOINT ["/scripts/entrypoint.sh"] diff --git a/ops/docker/dockerfile/Dockerfile.gpu b/ops/docker/dockerfile/Dockerfile.gpu index d8be4d3b07ef..96a532fc2ff1 100644 --- a/ops/docker/dockerfile/Dockerfile.gpu +++ b/ops/docker/dockerfile/Dockerfile.gpu @@ -48,7 +48,7 @@ RUN set -ex; \ # Default entry-point to use if running locally # It will preserve attributes of created files -COPY entrypoint.sh /scripts/ +COPY docker/entrypoint.sh /scripts/ WORKDIR /workspace ENTRYPOINT ["/scripts/entrypoint.sh"] diff --git a/ops/docker/dockerfile/Dockerfile.gpu_build_r_rockylinux8 b/ops/docker/dockerfile/Dockerfile.gpu_build_r_rockylinux8 index 7c1d4e8ef642..2d18b1eeb315 100644 --- a/ops/docker/dockerfile/Dockerfile.gpu_build_r_rockylinux8 +++ b/ops/docker/dockerfile/Dockerfile.gpu_build_r_rockylinux8 @@ -52,7 +52,7 @@ RUN set -ex; \ # Default entry-point to use if running locally # It will preserve attributes of created files -COPY entrypoint.sh /scripts/ +COPY docker/entrypoint.sh /scripts/ WORKDIR /workspace ENTRYPOINT ["/scripts/entrypoint.sh"] diff --git a/ops/docker/dockerfile/Dockerfile.jvm b/ops/docker/dockerfile/Dockerfile.jvm index c4584747f5db..9fd62e52de93 100644 --- a/ops/docker/dockerfile/Dockerfile.jvm +++ b/ops/docker/dockerfile/Dockerfile.jvm @@ -37,7 +37,7 @@ RUN set -ex; \ # Default entry-point to use if running locally # It will preserve attributes of created files -COPY entrypoint.sh /scripts/ +COPY docker/entrypoint.sh /scripts/ WORKDIR /workspace ENTRYPOINT ["/scripts/entrypoint.sh"] diff --git a/ops/docker/dockerfile/Dockerfile.jvm_gpu_build b/ops/docker/dockerfile/Dockerfile.jvm_gpu_build index 7f0168df467f..4983493a6878 100644 --- a/ops/docker/dockerfile/Dockerfile.jvm_gpu_build +++ b/ops/docker/dockerfile/Dockerfile.jvm_gpu_build @@ -48,7 +48,7 @@ RUN set -ex; \ # Default entry-point to use if running locally # It will preserve attributes of created files -COPY entrypoint.sh /scripts/ +COPY docker/entrypoint.sh /scripts/ WORKDIR /workspace ENTRYPOINT ["/scripts/entrypoint.sh"] diff --git a/ops/docker/dockerfile/Dockerfile.manylinux2014_aarch64 b/ops/docker/dockerfile/Dockerfile.manylinux2014_aarch64 index 52baff43bb6f..7800033f552d 100644 --- a/ops/docker/dockerfile/Dockerfile.manylinux2014_aarch64 +++ b/ops/docker/dockerfile/Dockerfile.manylinux2014_aarch64 @@ -11,7 +11,7 @@ RUN set -ex; \ # Default entry-point to use if running locally # It will preserve attributes of created files -COPY entrypoint.sh /scripts/ +COPY docker/entrypoint.sh /scripts/ WORKDIR /workspace ENTRYPOINT ["/scripts/entrypoint.sh"] diff --git a/ops/docker/dockerfile/Dockerfile.manylinux2014_x86_64 b/ops/docker/dockerfile/Dockerfile.manylinux2014_x86_64 index fdfcbd277360..8214b598d8d4 100644 --- a/ops/docker/dockerfile/Dockerfile.manylinux2014_x86_64 +++ b/ops/docker/dockerfile/Dockerfile.manylinux2014_x86_64 @@ -11,7 +11,7 @@ RUN set -ex; \ # Default entry-point to use if running locally # It will preserve attributes of created files -COPY entrypoint.sh /scripts/ +COPY docker/entrypoint.sh /scripts/ WORKDIR /workspace ENTRYPOINT ["/scripts/entrypoint.sh"] diff --git a/ops/docker/dockerfile/Dockerfile.manylinux_2_28_x86_64 b/ops/docker/dockerfile/Dockerfile.manylinux_2_28_x86_64 index 5e264e2f16e6..f5dac54b9b8f 100644 --- a/ops/docker/dockerfile/Dockerfile.manylinux_2_28_x86_64 +++ b/ops/docker/dockerfile/Dockerfile.manylinux_2_28_x86_64 @@ -9,7 +9,7 @@ RUN set -ex; \ # Default entry-point to use if running locally # It will preserve attributes of created files -COPY entrypoint.sh /scripts/ +COPY docker/entrypoint.sh /scripts/ WORKDIR /workspace ENTRYPOINT ["/scripts/entrypoint.sh"] diff --git a/ops/docker_build.py b/ops/docker_build.py index 922d528814a4..b096d9201d0f 100644 --- a/ops/docker_build.py +++ b/ops/docker_build.py @@ -9,7 +9,7 @@ import sys from typing import Optional -from docker_run import SCRIPT_DIR, fancy_print_cli_args +from docker_run import OPS_DIR, fancy_print_cli_args def parse_build_args(raw_build_args: list[str]) -> list[dict[str, str]]: @@ -71,9 +71,9 @@ def docker_build( def main(args: argparse.Namespace) -> None: # Dockerfile to be used in docker build dockerfile_path = ( - SCRIPT_DIR / "docker" / "dockerfile" / f"Dockerfile.{args.container_def}" + OPS_DIR / "docker" / "dockerfile" / f"Dockerfile.{args.container_def}" ) - docker_context_path = SCRIPT_DIR / "docker" + docker_context_path = OPS_DIR build_args = parse_build_args(args.build_arg) diff --git a/ops/docker_run.py b/ops/docker_run.py index 161c81b477b0..41ec9acb17c2 100644 --- a/ops/docker_run.py +++ b/ops/docker_run.py @@ -12,8 +12,8 @@ import sys import textwrap -SCRIPT_DIR = pathlib.Path(__file__).expanduser().resolve().parent -PROJECT_ROOT_DIR = SCRIPT_DIR.parent +OPS_DIR = pathlib.Path(__file__).expanduser().resolve().parent +PROJECT_ROOT_DIR = OPS_DIR.parent LINEWIDTH = 88 TEXT_WRAPPER = textwrap.TextWrapper( width=LINEWIDTH, diff --git a/ops/pipeline/build-jvm-macos-m1.sh b/ops/pipeline/build-jvm-macos-apple-silicon.sh similarity index 85% rename from ops/pipeline/build-jvm-macos-m1.sh rename to ops/pipeline/build-jvm-macos-apple-silicon.sh index 75785aa03eba..0c0aa6300729 100755 --- a/ops/pipeline/build-jvm-macos-m1.sh +++ b/ops/pipeline/build-jvm-macos-apple-silicon.sh @@ -1,5 +1,5 @@ #!/bin/bash -## Build libxgboost4j.dylib targeting MacOS +## Build libxgboost4j.dylib targeting MacOS (Apple Silicon) set -euox pipefail @@ -34,11 +34,11 @@ pushd lib libname=libxgboost4j_m1_${GITHUB_SHA}.dylib mv -v libxgboost4j.dylib ${libname} -if [[ ($is_pull_request == 0) && ($is_release_branch == 1) ]] -then +# if [[ ($is_pull_request == 0) && ($is_release_branch == 1) ]] +# then aws s3 cp ${libname} \ s3://xgboost-nightly-builds/${BRANCH_NAME}/libxgboost4j/ \ --acl public-read --no-progress -fi +# fi popd set +x diff --git a/ops/pipeline/build-jvm-macos-intel.sh b/ops/pipeline/build-jvm-macos-intel.sh new file mode 100755 index 000000000000..ee71a8b13078 --- /dev/null +++ b/ops/pipeline/build-jvm-macos-intel.sh @@ -0,0 +1,44 @@ +#!/bin/bash +## Build libxgboost4j.dylib targeting MacOS (Intel) + +set -euox pipefail + +source ops/pipeline/enforce-ci.sh + +# Display system info +echo "--- Display system information" +set -x +system_profiler SPSoftwareDataType +sysctl -n machdep.cpu.brand_string +uname -m +set +x + +brew install ninja libomp + +# Build XGBoost4J binary +echo "--- Build libxgboost4j.dylib" +set -x +mkdir build +pushd build +export JAVA_HOME=$(/usr/libexec/java_home) +cmake .. -GNinja -DJVM_BINDINGS=ON -DUSE_OPENMP=ON -DCMAKE_OSX_DEPLOYMENT_TARGET=10.15 +ninja -v +popd +rm -rf build +otool -L lib/libxgboost.dylib +set +x + +echo "--- Upload libxgboost4j.dylib" +set -x +pushd lib +libname=libxgboost4j_intel_${GITHUB_SHA}.dylib +mv -v libxgboost4j.dylib ${libname} + +# if [[ ($is_pull_request == 0) && ($is_release_branch == 1) ]] +# then + aws s3 cp ${libname} \ + s3://xgboost-nightly-builds/${BRANCH_NAME}/libxgboost4j/ \ + --acl public-read --no-progress +# fi +popd +set +x diff --git a/ops/pipeline/build-python-wheels-macos.sh b/ops/pipeline/build-python-wheels-macos.sh old mode 100644 new mode 100755 index 3715ec9e7e0f..697514c0c3ad --- a/ops/pipeline/build-python-wheels-macos.sh +++ b/ops/pipeline/build-python-wheels-macos.sh @@ -30,7 +30,6 @@ if [[ "$platform_id" == macosx_* ]]; then # Set up environment variables to configure cibuildwheel export CIBW_BUILD=cp${cpython_ver}-${platform_id} export CIBW_ARCHS=${cibw_archs} - export CIBW_ENVIRONMENT=${setup_env_var} export CIBW_TEST_SKIP='*-macosx_arm64' export CIBW_BUILD_VERBOSITY=3 else diff --git a/ops/pipeline/test-win64-gpu.ps1 b/ops/pipeline/test-win64-gpu.ps1 index e4a55c77b2bd..2416d53b3f85 100644 --- a/ops/pipeline/test-win64-gpu.ps1 +++ b/ops/pipeline/test-win64-gpu.ps1 @@ -13,7 +13,7 @@ if ($LASTEXITCODE -ne 0) { throw "Last command failed" } Write-Host "--- Set up Python env" conda activate $env_name = -join("win64_", (New-Guid).ToString().replace("-", "")) -mamba env create -n ${env_name} --file=ops/docker/conda_env/win64_test.yml +mamba env create -n ${env_name} --file=ops/conda_env/win64_test.yml conda activate ${env_name} python -m pip install ` (Get-ChildItem python-package/dist/*.whl | Select-Object -Expand FullName) diff --git a/ops/script/build_via_cmake.sh b/ops/script/build_via_cmake.sh index 857ebbbec0c2..86e3677f4392 100755 --- a/ops/script/build_via_cmake.sh +++ b/ops/script/build_via_cmake.sh @@ -2,9 +2,16 @@ set -euo pipefail -if [[ "$1" == --conda-env=* ]] +if [[ "$#" -lt 1 ]] then - conda_env=$(echo "$1" | sed 's/^--conda-env=//g' -) + conda_env="" +else + conda_env="$1" +fi + +if [[ "${conda_env}" == --conda-env=* ]] +then + conda_env=$(echo "${conda_env}" | sed 's/^--conda-env=//g' -) echo "Activating Conda environment ${conda_env}" shift 1 cmake_args="$@" diff --git a/ops/script/lint_cmake.sh b/ops/script/lint_cmake.sh old mode 100644 new mode 100755 index d67ecd0844ed..55aeb20e8fb2 --- a/ops/script/lint_cmake.sh +++ b/ops/script/lint_cmake.sh @@ -1,6 +1,6 @@ #!/bin/bash -set -e +set -euo pipefail cmake_files=$( find . -name CMakeLists.txt -o -path "./cmake/*.cmake" \ diff --git a/ops/script/run_clang_tidy.py b/ops/script/run_clang_tidy.py index aaeccdaf3718..dca5d1069598 100755 --- a/ops/script/run_clang_tidy.py +++ b/ops/script/run_clang_tidy.py @@ -19,7 +19,9 @@ def call(args: list[str]) -> tuple[int, int, str, list[str]]: # `workspace` is a name used in the CI container. Normally we should keep the dir # as `xgboost`. matched = re.search( - "(workspace|xgboost)/.*(ops|src|tests|include)/.*warning:", error_msg, re.MULTILINE + "(workspace|xgboost)/.*(ops|src|tests|include)/.*warning:", + error_msg, + re.MULTILINE, ) if matched is None: diff --git a/tests/test_distributed/test_gpu_with_dask/test_gpu_with_dask.py b/tests/test_distributed/test_gpu_with_dask/test_gpu_with_dask.py index b7be3c44c1df..5746f33044e9 100644 --- a/tests/test_distributed/test_gpu_with_dask/test_gpu_with_dask.py +++ b/tests/test_distributed/test_gpu_with_dask/test_gpu_with_dask.py @@ -101,7 +101,7 @@ def is_df(part: T) -> T: X.columns = X.columns.astype("object") # Make sure the output can be integrated back to original dataframe X.columns = X.columns.astype("object") - # Work around https://github.com/dmlc/xgboost/issues/10752 + # Work around https://github.com/dmlc/xgboost/issues/10752 X["predict"] = predictions X["inplace_predict"] = series_predictions