diff --git a/.github/FUNDING.yml b/.github/FUNDING.yml
new file mode 100644
index 000000000000..9b70bd171b6f
--- /dev/null
+++ b/.github/FUNDING.yml
@@ -0,0 +1,2 @@
+github: [eli-schwartz]
+custom: ['https://mesonbuild.com/Donating.html']
diff --git a/.github/codecov.yml b/.github/codecov.yml
deleted file mode 100644
index fa7b82a052b8..000000000000
--- a/.github/codecov.yml
+++ /dev/null
@@ -1,11 +0,0 @@
-coverage:
- status:
- project:
- default:
- informational: true
- patch:
- default:
- informational: true
-comment: false
-github_checks:
- annotations: false
diff --git a/.github/codeql/codeql-config.yml b/.github/codeql/codeql-config.yml
deleted file mode 100644
index 9b144885f41f..000000000000
--- a/.github/codeql/codeql-config.yml
+++ /dev/null
@@ -1,5 +0,0 @@
-queries:
- - uses: ./.github/codeql/lgtm.qls
-
-paths-ignore:
- - 'test cases'
diff --git a/.github/codeql/lgtm.qls b/.github/codeql/lgtm.qls
deleted file mode 100644
index 9befc76ead95..000000000000
--- a/.github/codeql/lgtm.qls
+++ /dev/null
@@ -1,4 +0,0 @@
-# for some reason this doesn't work by default any way I can see
-
-- import: codeql-suites/python-lgtm.qls
- from: codeql/python-queries
diff --git a/.github/workflows/codeql-analysis.yml b/.github/workflows/codeql-analysis.yml
deleted file mode 100644
index 6a78a36c62a2..000000000000
--- a/.github/workflows/codeql-analysis.yml
+++ /dev/null
@@ -1,32 +0,0 @@
-name: "CodeQL"
-
-on:
- push:
- branches: [ "master" ]
- pull_request:
- branches: [ "master" ]
-
-jobs:
- analyze:
- # lgtm.com does not run in forks, for good reason
- if: github.repository == 'mesonbuild/meson'
- name: Analyze
- runs-on: ubuntu-latest
- permissions:
- security-events: write
-
- steps:
- - name: Checkout repository
- uses: actions/checkout@v4
-
- - name: Initialize CodeQL
- uses: github/codeql-action/init@v3
- with:
- # bypass cache: https://github.com/github/codeql-action/issues/1445
- tools: linked
- config-file: .github/codeql/codeql-config.yml
- languages: python
- # we have none
-
- - name: Perform CodeQL Analysis
- uses: github/codeql-action/analyze@v3
diff --git a/.github/workflows/cygwin.yml b/.github/workflows/cygwin.yml
index d819f802f1fe..2ba1ff2071ef 100644
--- a/.github/workflows/cygwin.yml
+++ b/.github/workflows/cygwin.yml
@@ -41,6 +41,10 @@ jobs:
MESON_CI_JOBNAME: cygwin-${{ matrix.NAME }}
steps:
+ # remove inheritable permissions since they break assumptions testsuite
+ # makes about file modes
+ - run: icacls . /inheritance:r /T /C
+
- uses: actions/cache/restore@v4
id: restore-cache
with:
@@ -53,7 +57,7 @@ jobs:
- uses: actions/checkout@v4
- - uses: cygwin/cygwin-install-action@master
+ - uses: cygwin/cygwin-install-action@v5
with:
platform: ${{ matrix.ARCH }}
packages: |
@@ -83,7 +87,7 @@ jobs:
- name: Run pip
run: |
export PATH=/usr/bin:/usr/local/bin:$(cygpath ${SYSTEMROOT})/system32
- python3 -m pip --disable-pip-version-check install gcovr fastjsonschema pefile pytest pytest-subtests pytest-xdist coverage
+ python3 -m pip --disable-pip-version-check install gcovr fastjsonschema pefile pytest pytest-subtests pytest-xdist
shell: C:\cygwin\bin\bash.exe --noprofile --norc -o igncr -eo pipefail '{0}'
- uses: actions/cache/save@v4
@@ -95,7 +99,7 @@ jobs:
- name: Run tests
run: |
export PATH=/usr/bin:/usr/local/bin:$(cygpath ${SYSTEMROOT})/system32
- python3 ./tools/run_with_cov.py run_tests.py --backend=ninja
+ python3 ./run_tests.py --backend=ninja
env:
# Cygwin's static boost installation is broken (some static library
# variants such as boost_thread are not present)
@@ -108,17 +112,3 @@ jobs:
path: meson-test-run.*
# test log should be saved on failure
if: ${{ !cancelled() }}
-
- - name: Aggregate coverage reports
- run: |
- export PATH=/usr/bin:/usr/local/bin:$(cygpath ${SYSTEMROOT})/system32
- ./ci/combine_cov.sh
- shell: C:\cygwin\bin\bash.exe --noprofile --norc -o igncr -eo pipefail '{0}'
-
- - name: Upload coverage report
- uses: codecov/codecov-action@v3
- with:
- files: .coverage/coverage.xml
- name: "${{ matrix.NAME }}"
- fail_ci_if_error: false
- verbose: true
diff --git a/.github/workflows/lint.yml b/.github/workflows/lint.yml
index 5588034723b8..ef5889579009 100644
--- a/.github/workflows/lint.yml
+++ b/.github/workflows/lint.yml
@@ -53,7 +53,7 @@ jobs:
with:
python-version: '3.x'
# Pin mypy to version 1.8, so we retain the ability to lint for Python 3.7
- - run: python -m pip install "mypy==1.8" coverage strictyaml types-PyYAML types-tqdm types-chevron
+ - run: python -m pip install "mypy==1.8" strictyaml types-PyYAML types-tqdm types-chevron
- run: python run_mypy.py --allver
env:
PYTHONUNBUFFERED: 1
diff --git a/.github/workflows/macos.yml b/.github/workflows/macos.yml
index 88acbef90206..3afb4baca353 100644
--- a/.github/workflows/macos.yml
+++ b/.github/workflows/macos.yml
@@ -32,12 +32,10 @@ jobs:
runs-on: macos-latest
steps:
- uses: actions/checkout@v4
- - uses: actions/setup-python@v5
- with:
- python-version: '3.x'
- run: |
- python -m pip install --upgrade pip
- python -m pip install pytest pytest-xdist pytest-subtests fastjsonschema coverage
+ export PATH="$HOME/Library/Python/3.9/bin:$PATH"
+ /usr/bin/python3 -m pip install --upgrade pip
+ /usr/bin/python3 -m pip install pytest pytest-xdist pytest-subtests fastjsonschema
- run: brew install pkg-config ninja llvm qt@5
- env:
CPPFLAGS: "-I/opt/homebrew/include"
@@ -48,20 +46,9 @@ jobs:
# These cannot evaluate anything, so we cannot set PATH or SDKROOT here
run: |
export SDKROOT="$(xcodebuild -version -sdk macosx Path)"
- export PATH="$HOME/tools:/opt/homebrew/opt/qt@5/bin:/opt/homebrew/opt/llvm/bin:$PATH"
- export PKG_CONFIG_PATH="/opt/homebrew/opt/qt@5/lib/pkgconfig:$PKG_CONFIG_PATH"
- ./tools/run_with_cov.py ./run_unittests.py
-
- - name: Aggregate coverage reports
- run: ./ci/combine_cov.sh
-
- - name: Upload coverage report
- uses: codecov/codecov-action@v3
- with:
- files: .coverage/coverage.xml
- name: "appleclang [unit tests]"
- fail_ci_if_error: false
- verbose: true
+ export PATH="$HOME/Library/Python/3.9/bin:$HOME/tools:/opt/homebrew/opt/qt@5/bin:/opt/homebrew/opt/llvm/bin:$PATH"
+ export PKG_CONFIG_PATH="/Applications/Xcode.app/Contents/Developer/Library/Frameworks/Python3.framework/Versions/Current/lib/pkgconfig:/opt/homebrew/opt/qt@5/lib/pkgconfig:$PKG_CONFIG_PATH"
+ /usr/bin/python3 ./run_unittests.py
project-tests-appleclang:
@@ -98,9 +85,7 @@ jobs:
# https://github.com/actions/setup-python/issues/58
- run: brew install pkg-config ninja llvm qt@5 boost ldc hdf5 openmpi lapack scalapack sdl2 boost-python3 gtk-doc zstd ncurses objfw libomp
- run: |
- python3 -m pip install --upgrade setuptools
- python3 -m pip install --upgrade pip
- python3 -m pip install cython coverage
+ python3 -m pip install cython
- env:
CPPFLAGS: "-I/opt/homebrew/include"
LDFLAGS: "-L/opt/homebrew/lib"
@@ -110,20 +95,11 @@ jobs:
# These cannot evaluate anything, so we cannot set PATH or SDKROOT here
run: |
export SDKROOT="$(xcodebuild -version -sdk macosx Path)"
- export PATH="$HOME/tools:/opt/homebrew/opt/qt@5/bin:/opt/homebrew/opt/llvm/bin:/opt/homebrew/opt/ncurses/bin:$PATH"
+ # Append LLVM's bin directory to PATH to prioritise Apple Clang over Homebrew Clang.
+ # We need this to avoid objfw test failures.
+ export PATH="$HOME/tools:/opt/homebrew/opt/qt@5/bin:/opt/homebrew/opt/ncurses/bin:$PATH:/opt/homebrew/opt/llvm/bin"
export PKG_CONFIG_PATH="/opt/homebrew/opt/qt@5/lib/pkgconfig:/opt/homebrew/opt/lapack/lib/pkgconfig:/opt/homebrew/opt/ncurses/lib/pkgconfig:$PKG_CONFIG_PATH"
- ./tools/run_with_cov.py ./run_project_tests.py --backend=ninja
-
- - name: Aggregate coverage reports
- run: ./ci/combine_cov.sh
-
- - name: Upload coverage report
- uses: codecov/codecov-action@v3
- with:
- files: .coverage/coverage.xml
- name: "appleclang [project tests; unity=${{ matrix.unity }}]"
- fail_ci_if_error: false
- verbose: true
+ ./run_project_tests.py --backend=ninja
Qt4macos:
# This job only works on Intel Macs, because OpenSSL 1.0 doesn't build on
diff --git a/.github/workflows/msys2.yml b/.github/workflows/msys2.yml
index 3b518fee7607..b926d189847d 100644
--- a/.github/workflows/msys2.yml
+++ b/.github/workflows/msys2.yml
@@ -29,7 +29,7 @@ permissions:
jobs:
test:
- runs-on: windows-2019
+ runs-on: windows-2022
name: ${{ matrix.NAME }}
strategy:
fail-fast: false
@@ -79,18 +79,18 @@ jobs:
mingw-w64-${{ matrix.MSYS2_ARCH }}-libxml2
mingw-w64-${{ matrix.MSYS2_ARCH }}-ninja
mingw-w64-${{ matrix.MSYS2_ARCH }}-pkg-config
- mingw-w64-${{ matrix.MSYS2_ARCH }}-python2
mingw-w64-${{ matrix.MSYS2_ARCH }}-python
mingw-w64-${{ matrix.MSYS2_ARCH }}-python-lxml
mingw-w64-${{ matrix.MSYS2_ARCH }}-python-setuptools
mingw-w64-${{ matrix.MSYS2_ARCH }}-python-pip
mingw-w64-${{ matrix.MSYS2_ARCH }}-python-fastjsonschema
mingw-w64-${{ matrix.MSYS2_ARCH }}-objfw
+ mingw-w64-${{ matrix.MSYS2_ARCH }}-llvm
mingw-w64-${{ matrix.MSYS2_ARCH }}-${{ matrix.TOOLCHAIN }}
- name: Install dependencies
run: |
- python3 -m pip --disable-pip-version-check install gcovr pefile pytest pytest-subtests pytest-xdist coverage
+ python3 -m pip --disable-pip-version-check install gcovr pefile pytest pytest-subtests pytest-xdist
- name: Install pypy3 on x86_64
run: |
@@ -125,20 +125,9 @@ jobs:
pacman --noconfirm --needed -S mingw-w64-${{ matrix.MSYS2_ARCH }}-${{ matrix.MSYS2_CURSES }}
fi
- MSYSTEM= python3 ./tools/run_with_cov.py run_tests.py --backend=ninja
+ MSYSTEM= python3 ./run_tests.py --backend=ninja
- uses: actions/upload-artifact@v4
with:
name: ${{ matrix.NAME }}
path: meson-test-run.*
-
- - name: Aggregate coverage reports
- run: ./ci/combine_cov.sh
-
- - name: Upload coverage report
- uses: codecov/codecov-action@v3
- with:
- files: .coverage/coverage.xml
- name: "${{ matrix.NAME }}"
- fail_ci_if_error: false
- verbose: true
diff --git a/.github/workflows/nightly.yml b/.github/workflows/nightly.yml
new file mode 100644
index 000000000000..79332196c173
--- /dev/null
+++ b/.github/workflows/nightly.yml
@@ -0,0 +1,32 @@
+name: Nightly Wheels
+
+concurrency:
+ group: wheels
+ cancel-in-progress: true
+
+on:
+ push:
+ branches:
+ - master
+ paths:
+ - "mesonbuild/**"
+
+permissions:
+ contents: read
+
+jobs:
+ wheel:
+ runs-on: ubuntu-latest
+ steps:
+ - uses: actions/checkout@v4
+ with:
+ fetch-depth: 0
+ - name: Build wheel
+ run: |
+ python -m pip install build setuptools_scm
+ python -m build -nwx
+ - name: Upload wheel
+ uses: scientific-python/upload-nightly-action@main
+ with:
+ artifacts_path: dist
+ anaconda_nightly_upload_token: ${{secrets.ANACONDA_ORG_UPLOAD_TOKEN}}
diff --git a/.github/workflows/nonnative.yml b/.github/workflows/nonnative.yml
index 2712d1032935..1714edda463c 100644
--- a/.github/workflows/nonnative.yml
+++ b/.github/workflows/nonnative.yml
@@ -36,18 +36,6 @@ jobs:
- run: |
apt-get -y purge clang gcc gdc
apt-get -y autoremove
- python3 -m pip install coverage
- uses: actions/checkout@v4
- name: Run tests
- run: bash -c 'source /ci/env_vars.sh; cd $GITHUB_WORKSPACE; ./tools/run_with_cov.py ./run_tests.py $CI_ARGS --cross ubuntu-armhf.json --cross-only'
-
- - name: Aggregate coverage reports
- run: ./ci/combine_cov.sh
-
- - name: Upload coverage report
- uses: codecov/codecov-action@v3
- with:
- files: .coverage/coverage.xml
- name: "Ubuntu nonnative"
- fail_ci_if_error: false
- verbose: true
+ run: bash -c 'source /ci/env_vars.sh; cd $GITHUB_WORKSPACE; ./run_tests.py $CI_ARGS --cross ubuntu-armhf.json --cross-only'
diff --git a/.github/workflows/os_comp.yml b/.github/workflows/os_comp.yml
index 56859ec3db01..4b9b7a4a6eae 100644
--- a/.github/workflows/os_comp.yml
+++ b/.github/workflows/os_comp.yml
@@ -26,12 +26,6 @@ on:
- ".github/workflows/os_comp.yml"
- "run*tests.py"
-# make GHA actions use node16 which still works with bionic
-# See https://github.blog/changelog/2024-03-07-github-actions-all-actions-will-run-on-node20-instead-of-node16-by-default/
-# Unclear how long this will work though
-env:
- ACTIONS_ALLOW_USE_UNSECURE_NODE_VERSION: true
-
permissions:
contents: read
@@ -78,18 +72,7 @@ jobs:
source /ci/env_vars.sh
cd $GITHUB_WORKSPACE
- ./tools/run_with_cov.py ./run_tests.py $CI_ARGS
-
- - name: Aggregate coverage reports
- run: ./ci/combine_cov.sh
-
- - name: Upload coverage report
- uses: codecov/codecov-action@v3
- with:
- files: .coverage/coverage.xml
- name: "OS Comp [${{ matrix.cfg.name }}]"
- fail_ci_if_error: false
- verbose: true
+ ./run_tests.py $CI_ARGS
pypy:
name: 'Arch / PyPy'
@@ -178,15 +161,4 @@ jobs:
update-alternatives --set i686-w64-mingw32-gcc /usr/bin/i686-w64-mingw32-gcc-posix
update-alternatives --set i686-w64-mingw32-g++ /usr/bin/i686-w64-mingw32-g++-posix
- ./tools/run_with_cov.py ./run_tests.py $RUN_TESTS_ARGS -- $MESON_ARGS
-
- - name: Aggregate coverage reports
- run: ./ci/combine_cov.sh
-
- - name: Upload coverage report
- uses: codecov/codecov-action@v3
- with:
- files: .coverage/coverage.xml
- name: "Ubuntu [${{ matrix.cfg.CC }} ${{ matrix.cfg.RUN_TESTS_ARGS }} ${{ matrix.cfg.MESON_ARGS }}]"
- fail_ci_if_error: false
- verbose: true
+ ./run_tests.py $RUN_TESTS_ARGS -- $MESON_ARGS
diff --git a/.github/workflows/unusedargs_missingreturn.yml b/.github/workflows/unusedargs_missingreturn.yml
index 72f39b511f45..4367ce556caf 100644
--- a/.github/workflows/unusedargs_missingreturn.yml
+++ b/.github/workflows/unusedargs_missingreturn.yml
@@ -42,7 +42,7 @@ permissions:
jobs:
linux:
- runs-on: ubuntu-20.04
+ runs-on: ubuntu-22.04
steps:
- uses: actions/checkout@v4
- uses: actions/setup-python@v5
@@ -52,22 +52,10 @@ jobs:
run: |
sudo apt update -yq
sudo apt install -yq --no-install-recommends g++ gfortran ninja-build gobjc gobjc++
- python -m pip install coverage
- - run: ./tools/run_with_cov.py run_project_tests.py --only cmake common fortran platform-linux "objective c" "objective c++"
+ - run: ./run_project_tests.py --only cmake common fortran platform-linux "objective c" "objective c++"
env:
MESON_CI_JOBNAME: linux-ubuntu-gcc-werror
- - name: Aggregate coverage reports
- run: ./ci/combine_cov.sh
-
- - name: Upload coverage report
- uses: codecov/codecov-action@v3
- with:
- files: .coverage/coverage.xml
- name: "UnusedMissingReturn"
- fail_ci_if_error: false
- verbose: true
-
windows:
runs-on: windows-latest
steps:
@@ -76,23 +64,11 @@ jobs:
with:
python-version: '3.x'
- - run: pip install ninja pefile coverage
+ - run: pip install ninja pefile
- - run: python ./tools/run_with_cov.py run_project_tests.py --only platform-windows
+ - run: python ./run_project_tests.py --only platform-windows
env:
CC: gcc
CXX: g++
FC: gfortran
MESON_CI_JOBNAME: msys2-gcc-werror
-
- - name: Aggregate coverage reports
- run: ./ci/combine_cov.sh
- shell: C:\msys64\usr\bin\bash.exe --noprofile --norc -o igncr -eo pipefail '{0}'
-
- - name: Upload coverage report
- uses: codecov/codecov-action@v3
- with:
- files: .coverage/coverage.xml
- name: "UnusedMissingReturn Windows"
- fail_ci_if_error: false
- verbose: true
diff --git a/.github/workflows/website.yml b/.github/workflows/website.yml
index fdb7d1400919..13c690207a91 100644
--- a/.github/workflows/website.yml
+++ b/.github/workflows/website.yml
@@ -45,7 +45,7 @@ jobs:
- name: Install package
run: |
sudo apt-get -y install python3-pip ninja-build libjson-glib-dev
- pip install hotdoc chevron strictyaml
+ pip install hotdoc chevron strictyaml aiohttp
- uses: actions/cache/save@v4
with:
diff --git a/README.md b/README.md
index f3a2657b7d05..3d01b3ff3769 100644
--- a/README.md
+++ b/README.md
@@ -22,7 +22,7 @@ Latest Meson version supporting previous Python versions:
#### Installing from source
-Meson is available on [PyPi](https://pypi.python.org/pypi/meson), so
+Meson is available on [PyPI](https://pypi.python.org/pypi/meson), so
it can be installed with `pip3 install meson`. The exact command to
type to install with `pip` can vary between systems, be sure to use
the Python 3 version of `pip`.
@@ -35,7 +35,7 @@ python3 -m pip install meson
For builds using Ninja, Ninja can be downloaded directly from Ninja
[GitHub release page](https://github.com/ninja-build/ninja/releases)
-or via [PyPi](https://pypi.python.org/pypi/ninja)
+or via [PyPI](https://pypi.python.org/pypi/ninja)
```console
python3 -m pip install ninja
diff --git a/azure-pipelines.yml b/azure-pipelines.yml
index ea511f33f94e..86c6b3aa23f9 100644
--- a/azure-pipelines.yml
+++ b/azure-pipelines.yml
@@ -114,8 +114,3 @@ jobs:
filePath: .\ci\run.ps1
env:
MESON_CI_JOBNAME: azure-$(System.JobName)
- - task: PowerShell@2
- displayName: Gathering coverage report
- inputs:
- targetType: 'filePath'
- filePath: .\ci\coverage.ps1
diff --git a/ci/ciimage/arch/install.sh b/ci/ciimage/arch/install.sh
index 8f5245149688..de43cb2ea467 100755
--- a/ci/ciimage/arch/install.sh
+++ b/ci/ciimage/arch/install.sh
@@ -12,9 +12,9 @@ pkgs=(
libelf gcc gcc-fortran gcc-objc vala rust bison flex cython go dlang-dmd
mono boost qt5-base gtkmm3 gtest gmock protobuf gobject-introspection
itstool glib2-devel gtk3 java-environment=8 gtk-doc llvm clang sdl2 graphviz
- doxygen vulkan-validation-layers openssh mercurial gtk-sharp-2 qt5-tools
+ doxygen vulkan-headers vulkan-icd-loader vulkan-validation-layers openssh mercurial gtk-sharp-2 qt5-tools
libwmf cmake netcdf-fortran openmpi nasm gnustep-base gettext
- python-lxml hotdoc rust-bindgen qt6-base qt6-tools wayland wayland-protocols
+ python-lxml hotdoc rust-bindgen qt6-base qt6-tools qt6-declarative wayland wayland-protocols
# cuda
)
@@ -24,6 +24,9 @@ cleanup_pkgs=(go)
AUR_USER=docker
PACMAN_OPTS='--needed --noprogressbar --noconfirm'
+# Workaround for cmake-4.0 vs wxwidgets-gtk2
+export CMAKE_POLICY_VERSION_MINIMUM=3.5
+
# Patch config files
sed -i 's/#Color/Color/g' /etc/pacman.conf
sed -i 's,#MAKEFLAGS="-j2",MAKEFLAGS="-j$(nproc)",g' /etc/makepkg.conf
diff --git a/ci/ciimage/fedora/install.sh b/ci/ciimage/fedora/install.sh
index 65b2012d5000..aa8765561d1e 100755
--- a/ci/ciimage/fedora/install.sh
+++ b/ci/ciimage/fedora/install.sh
@@ -12,8 +12,9 @@ pkgs=(
boost-python3-devel
itstool gtk3-devel java-latest-openjdk-devel gtk-doc llvm-devel clang-devel SDL2-devel graphviz-devel zlib zlib-devel zlib-static
#hdf5-openmpi-devel hdf5-devel netcdf-openmpi-devel netcdf-devel netcdf-fortran-openmpi-devel netcdf-fortran-devel scalapack-openmpi-devel
- doxygen vulkan-devel vulkan-validation-layers-devel openssh objfw mercurial gtk-sharp2-devel libpcap-devel gpgme-devel
+ doxygen vulkan-devel vulkan-validation-layers-devel openssh lksctp-tools-devel objfw mercurial gtk-sharp2-devel libpcap-devel gpgme-devel
qt5-qtbase-devel qt5-qttools-devel qt5-linguist qt5-qtbase-private-devel
+ qt6-qtdeclarative-devel qt6-qtbase-devel qt6-qttools-devel qt6-linguist qt6-qtbase-private-devel
libwmf-devel valgrind cmake openmpi-devel nasm gnustep-base-devel gettext-devel ncurses-devel
libxml2-devel libxslt-devel libyaml-devel glib2-devel json-glib-devel libgcrypt-devel wayland-devel wayland-protocols-devel
)
diff --git a/ci/ciimage/gentoo/install.sh b/ci/ciimage/gentoo/install.sh
index caf21a1fad69..30b0299dac9e 100755
--- a/ci/ciimage/gentoo/install.sh
+++ b/ci/ciimage/gentoo/install.sh
@@ -20,8 +20,7 @@ pkgs_stable=(
dev-lang/vala
dev-lang/python:2.7
dev-java/openjdk-bin
- # requires rustfmt, bin rebuild (TODO: file bug)
- #dev-util/bindgen
+ dev-util/bindgen
dev-libs/elfutils
dev-util/gdbus-codegen
@@ -45,7 +44,6 @@ pkgs_stable=(
# misc
app-admin/sudo
app-text/doxygen
- sys-apps/fakeroot
sys-devel/bison
sys-devel/gettext
@@ -104,8 +102,16 @@ mkdir /etc/portage/binrepos.conf || true
mkdir /etc/portage/profile || true
cat <<-EOF > /etc/portage/package.use/ci
dev-cpp/gtkmm X
-
+ dev-lang/rust clippy rustfmt
+ dev-lang/rust-bin clippy rustfmt
dev-libs/boost python
+
+ # Some of these settings are needed just to get the binpkg but
+ # aren't negative to have anyway
+ sys-devel/gcc ada d
+ >=sys-devel/gcc-13 ada objc objc++
+ sys-devel/gcc pgo lto
+
sys-libs/zlib static-libs
EOF
diff --git a/ci/ciimage/opensuse/install.sh b/ci/ciimage/opensuse/install.sh
index fdfedcb1bf2e..7a76071e5d60 100755
--- a/ci/ciimage/opensuse/install.sh
+++ b/ci/ciimage/opensuse/install.sh
@@ -9,10 +9,11 @@ pkgs=(
ninja make git autoconf automake patch libjpeg-devel
elfutils gcc gcc-c++ gcc-fortran gcc-objc gcc-obj-c++ vala rust bison flex curl lcov
mono-core gtkmm3-devel gtest gmock protobuf-devel wxGTK3-3_2-devel gobject-introspection-devel
- itstool gtk3-devel java-17-openjdk-devel gtk-doc llvm-devel clang-devel libSDL2-devel graphviz-devel zlib-devel zlib-devel-static
+ itstool gtk3-devel java-17-openjdk-devel gtk-doc llvm-devel clang-devel sdl2-compat-devel graphviz-devel zlib-devel zlib-devel-static
#hdf5-devel netcdf-devel libscalapack2-openmpi3-devel libscalapack2-gnu-openmpi3-hpc-devel openmpi3-devel
- doxygen vulkan-devel vulkan-validationlayers openssh mercurial gtk-sharp3-complete gtk-sharp2-complete libpcap-devel libgpgme-devel
+ doxygen vulkan-devel vulkan-validationlayers openssh mercurial libpcap-devel libgpgme-devel
libqt5-qtbase-devel libqt5-qttools-devel libqt5-linguist libqt5-qtbase-private-headers-devel
+ qt6-declarative-devel qt6-base-devel qt6-tools qt6-tools-linguist qt6-declarative-tools qt6-core-private-devel
libwmf-devel valgrind cmake nasm gnustep-base-devel gettext-tools gettext-runtime gettext-csharp ncurses-devel
libxml2-devel libxslt-devel libyaml-devel glib2-devel json-glib-devel
boost-devel libboost_date_time-devel libboost_filesystem-devel libboost_locale-devel libboost_system-devel
@@ -41,11 +42,12 @@ echo 'ulimit -n -S 10000' >> /ci/env_vars.sh
source /ci/env_vars.sh
-dub_fetch urld
-dub build --deep urld --arch=x86_64 --compiler=dmd --build=debug
-dub_fetch dubtestproject
-dub build dubtestproject:test1 --compiler=dmd
-dub build dubtestproject:test2 --compiler=dmd
+dub_fetch dubtestproject@1.2.0
+dub build dubtestproject:test1 --compiler=dmd --arch=x86_64
+dub build dubtestproject:test2 --compiler=dmd --arch=x86_64
+dub build dubtestproject:test3 --compiler=dmd --arch=x86_64
+dub_fetch urld@3.0.0
+dub build urld --compiler=dmd --arch=x86_64
# Cleanup
zypper --non-interactive clean --all
diff --git a/ci/ciimage/ubuntu-rolling/install.sh b/ci/ciimage/ubuntu-rolling/install.sh
index 2066944e4cf5..1c0891c239a4 100755
--- a/ci/ciimage/ubuntu-rolling/install.sh
+++ b/ci/ciimage/ubuntu-rolling/install.sh
@@ -12,6 +12,7 @@ pkgs=(
python3-pip libxml2-dev libxslt1-dev libyaml-dev libjson-glib-dev
wget unzip
qt5-qmake qtbase5-dev qtchooser qtbase5-dev-tools clang
+ qmake6 qt6-base-dev qt6-base-private-dev qt6-declarative-dev qt6-declarative-dev-tools qt6-l10n-tools qt6-base-dev-tools
libomp-dev
llvm lcov
dub ldc
@@ -49,11 +50,12 @@ install_python_packages hotdoc
echo 'ulimit -n -S 10000' >> /ci/env_vars.sh
ulimit -n -S 10000
# dub stuff
-dub_fetch urld
-dub build --deep urld --arch=x86_64 --compiler=gdc --build=debug
-dub_fetch dubtestproject
-dub build dubtestproject:test1 --compiler=ldc2
-dub build dubtestproject:test2 --compiler=ldc2
+dub_fetch dubtestproject@1.2.0
+dub build dubtestproject:test1 --compiler=ldc2 --arch=x86_64
+dub build dubtestproject:test2 --compiler=ldc2 --arch=x86_64
+dub build dubtestproject:test3 --compiler=gdc --arch=x86_64
+dub_fetch urld@3.0.0
+dub build urld --compiler=gdc --arch=x86_64
# Remove debian version of Rust and install latest with rustup.
# This is needed to get the cross toolchain as well.
diff --git a/ci/combine_cov.sh b/ci/combine_cov.sh
deleted file mode 100755
index 99a503bb8db4..000000000000
--- a/ci/combine_cov.sh
+++ /dev/null
@@ -1,10 +0,0 @@
-#!/bin/bash
-
-echo "Combining coverage reports..."
-coverage combine
-
-echo "Generating XML report..."
-coverage xml
-
-echo "Printing report"
-coverage report
diff --git a/ci/coverage.ps1 b/ci/coverage.ps1
deleted file mode 100644
index ebd7cd4c3fd4..000000000000
--- a/ci/coverage.ps1
+++ /dev/null
@@ -1,14 +0,0 @@
-echo ""
-echo ""
-echo "=== Gathering coverage report ==="
-echo ""
-
-python3 -m coverage combine
-python3 -m coverage xml
-python3 -m coverage report
-
-# Currently codecov.py does not handle Azure, use this fork of a fork to get it
-# working without requiring a token
-git clone https://github.com/mensinda/codecov-python
-python3 -m pip install --ignore-installed ./codecov-python
-python3 -m codecov -f .coverage/coverage.xml -n "VS$env:compiler $env:arch $env:backend" -c $env:SOURCE_VERSION
diff --git a/ci/run.ps1 b/ci/run.ps1
index d3fda2d8b70e..5b754bd3647b 100644
--- a/ci/run.ps1
+++ b/ci/run.ps1
@@ -8,22 +8,21 @@ if ($LastExitCode -ne 0) {
$env:Path = ($env:Path.Split(';') | Where-Object { $_ -notmatch 'mingw|Strawberry|Chocolatey|PostgreSQL' }) -join ';'
if ($env:arch -eq 'x64') {
- rustup default 1.77
# Rust puts its shared stdlib in a secret place, but it is needed to run tests.
- $env:Path += ";$HOME/.rustup/toolchains/1.77-x86_64-pc-windows-msvc/bin"
+ $env:Path += ";$HOME/.rustup/toolchains/stable-x86_64-pc-windows-msvc/bin"
} elseif ($env:arch -eq 'x86') {
# Switch to the x86 Rust toolchain
- rustup default 1.77-i686-pc-windows-msvc
+ rustup default stable-i686-pc-windows-msvc
+
+ # Also install clippy
+ rustup component add clippy
# Rust puts its shared stdlib in a secret place, but it is needed to run tests.
- $env:Path += ";$HOME/.rustup/toolchains/1.77-i686-pc-windows-msvc/bin"
+ $env:Path += ";$HOME/.rustup/toolchains/stable-i686-pc-windows-msvc/bin"
# Need 32-bit Python for tests that need the Python dependency
$env:Path = "C:\hostedtoolcache\windows\Python\3.7.9\x86;C:\hostedtoolcache\windows\Python\3.7.9\x86\Scripts;$env:Path"
}
-# Also install clippy
-rustup component add clippy
-
# Set the CI env var for the meson test framework
$env:CI = '1'
@@ -93,7 +92,7 @@ python --version
# Needed for running unit tests in parallel.
echo ""
-python -m pip --disable-pip-version-check install --upgrade pefile pytest-xdist pytest-subtests fastjsonschema coverage
+python -m pip --disable-pip-version-check install --upgrade pefile pytest-xdist pytest-subtests fastjsonschema
# Needed for running the Cython tests
python -m pip --disable-pip-version-check install cython
@@ -103,6 +102,6 @@ echo "=== Start running tests ==="
# Starting from VS2019 Powershell(?) will fail the test run
# if it prints anything to stderr. Python's test runner
# does that by default so we need to forward it.
-cmd /c "python 2>&1 ./tools/run_with_cov.py run_tests.py --backend $env:backend $env:extraargs"
+cmd /c "python 2>&1 run_tests.py --backend $env:backend $env:extraargs"
exit $LastExitCode
diff --git a/ci/usercustomize.py b/ci/usercustomize.py
deleted file mode 100644
index d72c6ad2d4c3..000000000000
--- a/ci/usercustomize.py
+++ /dev/null
@@ -1,5 +0,0 @@
-# SPDX-License-Identifier: Apache-2.0
-# Copyright 2021 The Meson development team
-
-import coverage
-coverage.process_startup()
diff --git a/cross/ubuntu-armhf.txt b/cross/ubuntu-armhf.txt
index 6409e396b577..97a1c21e8682 100644
--- a/cross/ubuntu-armhf.txt
+++ b/cross/ubuntu-armhf.txt
@@ -4,6 +4,7 @@
c = ['/usr/bin/arm-linux-gnueabihf-gcc']
cpp = ['/usr/bin/arm-linux-gnueabihf-g++']
rust = ['rustc', '--target', 'arm-unknown-linux-gnueabihf', '-C', 'linker=/usr/bin/arm-linux-gnueabihf-gcc-7']
+rustdoc = ['rustdoc', '--target', 'arm-unknown-linux-gnueabihf', '-C', 'linker=/usr/bin/arm-linux-gnueabihf-gcc-7']
ar = '/usr/arm-linux-gnueabihf/bin/ar'
strip = '/usr/arm-linux-gnueabihf/bin/strip'
pkg-config = '/usr/bin/arm-linux-gnueabihf-pkg-config'
diff --git a/cross/wasm.txt b/cross/wasm.txt
index 2a64319789a5..1b589d61ba33 100644
--- a/cross/wasm.txt
+++ b/cross/wasm.txt
@@ -2,6 +2,7 @@
c = 'emcc'
cpp = 'em++'
ar = 'emar'
+exe_wrapper = 'node'
[built-in options]
c_args = []
diff --git a/data/macros.meson b/data/macros.meson
index dcac9d98553c..597741092734 100644
--- a/data/macros.meson
+++ b/data/macros.meson
@@ -45,3 +45,11 @@
--num-processes %{_smp_build_ncpus} \
--print-errorlogs \
%{nil}}
+
+# Declarative buildsystem, requires RPM 4.20+ to work
+# https://rpm-software-management.github.io/rpm/manual/buildsystem.html
+%buildsystem_meson_conf() %meson %*
+%buildsystem_meson_generate_buildrequires() %{nil}
+%buildsystem_meson_build() %meson_build %*
+%buildsystem_meson_install() %meson_install %*
+%buildsystem_meson_check() %meson_test %*
diff --git a/data/shell-completions/bash/meson b/data/shell-completions/bash/meson
index 0814342dbe2a..404369818dad 100644
--- a/data/shell-completions/bash/meson
+++ b/data/shell-completions/bash/meson
@@ -408,6 +408,7 @@ _meson-install() {
shortopts=(
h
n
+ q
C
)
diff --git a/data/shell-completions/zsh/_meson b/data/shell-completions/zsh/_meson
index 8178060b4eda..6507b6819b4d 100644
--- a/data/shell-completions/zsh/_meson
+++ b/data/shell-completions/zsh/_meson
@@ -215,9 +215,14 @@ local -a meson_commands=(
local curcontext="$curcontext"
local -a specs=(
"$__meson_cd"
- '--no-rebuild[Do not rebuild before installing]'
- '--only-changed[Do not overwrite files that are older than the copied file]'
- '--quiet[Do not print every file that was installed]'
+ '--no-rebuild[do not rebuild before installing]'
+ '--only-changed[do not overwrite files that are older than the copied file]'
+ '(--quiet -q)'{'--quiet','-q'}'[do not print every file that was installed]'
+ '--destdir[set or override DESTDIR environment]: :_directories'
+ '(--dry-run -d)'{'--dry-run','-d'}'[do not actually install, only print logs]'
+ '--skip-subprojects[do not install files from given subprojects]: : '
+ '--tags[install only targets having one of the given tags]: :_values -s , tag devel runtime python-runtime man doc i18n typelib bin bin-devel tests systemtap'
+ '--strip[strip targets even if strip option was not set during configure]'
)
_arguments \
'(: -)'{'--help','-h'}'[show a help message and quit]' \
@@ -251,7 +256,7 @@ _arguments \
'(-n --name)'{'-n','--name'}'=[the name of the project (defaults to directory name)]'
'(-e --executable)'{'-e','--executable'}'=[the name of the executable target to create (defaults to project name)]'
'(-d --deps)'{'-d','--deps'}'=[comma separated list of dependencies]'
- '(-l --language)'{'-l','--language'}'=[comma separated list of languages (autodetected based on sources if unset)]:languages:_values , (c cpp cs cuda d fortran java objc objcpp rust)'
+ '(-l --language)'{'-l','--language'}'=[comma separated list of languages (autodetected based on sources if unset)]:languages:_values -s , language c cpp cs cuda d fortran java objc objcpp rust'
'(-b --build)'{'-b','--build'}'[build the project immediately after generation]'
'--builddir=[directory for building]:directory:_directories'
'(-f --force)'{'-f','--force'}'[overwrite any existing files and directories]'
diff --git a/docs/markdown/Build-options.md b/docs/markdown/Build-options.md
index d615db6891b0..190705d77802 100644
--- a/docs/markdown/Build-options.md
+++ b/docs/markdown/Build-options.md
@@ -222,7 +222,7 @@ a colon:
$ meson configure -Dsubproject:option=newvalue
```
-**NOTE:** If you cannot call `meson configure` you likely have a old
+**NOTE:** If you cannot call `meson configure` you likely have an old
version of Meson. In that case you can call `mesonconf` instead, but
that is deprecated in newer versions
diff --git a/docs/markdown/Builtin-options.md b/docs/markdown/Builtin-options.md
index f16a46ffebea..faf7a6088d06 100644
--- a/docs/markdown/Builtin-options.md
+++ b/docs/markdown/Builtin-options.md
@@ -76,7 +76,7 @@ machine](#specifying-options-per-machine) section for details.
| -------------------------------------- | ------------- | ----------- | -------------- | ----------------- |
| auto_features {enabled, disabled, auto} | auto | Override value of all 'auto' features | no | no |
| backend {ninja, vs, vs2010, vs2012, vs2013, vs2015, vs2017, vs2019, vs2022, xcode, none} | ninja | Backend to use | no | no |
-| genvslite {vs2022} | vs2022 | Setup multi-builtype ninja build directories and Visual Studio solution | no | no |
+| genvslite {vs2022} | vs2022 | Setup multi-buildtype ninja build directories and Visual Studio solution | no | no |
| buildtype {plain, debug, debugoptimized, release, minsize, custom} | debug | Build type to use | no | no |
| debug | true | Enable debug symbols and other information | no | no |
| default_both_libraries {shared, static, auto} | shared | Default library type for both_libraries | no | no |
@@ -98,6 +98,8 @@ machine](#specifying-options-per-machine) section for details.
| force_fallback_for | [] | Force fallback for those dependencies | no | no |
| vsenv | false | Activate Visual Studio environment | no | no |
+(For the Rust language only, `warning_level=0` disables all warnings).
+
#### Details for `backend`
Several build file formats are supported as command runners to build the
@@ -113,7 +115,7 @@ for a lighter automated build pipeline.
Setup multiple buildtype-suffixed, ninja-backend build directories (e.g.
[builddir]_[debug/release/etc.]) and generate [builddir]_vs containing a Visual
Studio solution with multiple configurations that invoke a meson compile of the
-setup build directories, as appropriate for the current configuration (builtype).
+setup build directories, as appropriate for the current configuration (buildtype).
This has the effect of a simple setup macro of multiple 'meson setup ...'
invocations with a set of different buildtype values. E.g.
@@ -150,7 +152,7 @@ All other combinations of `debug` and `optimization` set `buildtype` to `'custom
#### Details for `warning_level`
-Exact flags per warning level is compiler specific, but there is an approximative
+Exact flags per warning level is compiler specific, but there is an approximate
table for most common compilers.
| Warning level | GCC/Clang | MSVC |
@@ -181,11 +183,16 @@ fails.
#### Details for `default_both_libraries`
-Since `1.6.0`, you can select the default type of library selected when using
-a `both_libraries` object. This can be either 'shared' (default value, compatible
-with previous meson versions), 'static', or 'auto'. With auto, the value from
-`default_library` option is used, unless it is 'both', in which case 'shared'
-is used instead.
+Since `1.6.0`, you can specify the default type of library selected when using a
+`both_libraries` object with `default_both_libraries`. Note that, unlike
+`default_library`, this option does not affect how the library artifacts are
+built, but how they are internally linked to the dependent targets within the
+same project.
+
+The possible values of this option are 'shared' (default value, compatible with
+previous meson versions), 'static', and 'auto'. With auto, the value from the
+`default_library` option is used, unless it is 'both', in which case 'shared' is
+used instead.
When `default_both_libraries` is 'auto', passing a [[@both_libs]] dependency
in [[both_libraries]] will link the static dependency with the static lib,
@@ -224,10 +231,20 @@ available on all platforms or with all compilers:
| b_pie | false | true, false | Build position-independent executables (since 0.49.0) |
| b_vscrt | from_buildtype | none, md, mdd, mt, mtd, from_buildtype, static_from_buildtype | VS runtime library to use (since 0.48.0) (static_from_buildtype since 0.56.0) |
-The value of `b_sanitize` can be one of: `none`, `address`, `thread`,
-`undefined`, `memory`, `leak`, `address,undefined`, but note that some
-compilers might not support all of them. For example Visual Studio
-only supports the address sanitizer.
+The default and possible values of sanitizers changed in 1.8. Before 1.8 they
+were string values, and restricted to a specific subset of values: `none`,
+`address`, `thread`, `undefined`, `memory`, `leak`, or `address,undefined`. In
+1.8 it was changed to a free form array of sanitizers, which are checked by a
+compiler and linker check. For backwards compatibility reasons
+`get_option('b_sanitize')` continues to return a string with the array values
+separated by a comma. Furthermore:
+
+ - If the `b_sanitize` option is empty, the `'none'` string is returned.
+
+ - If it contains only the values `'address'` and `'undefined'`, they are
+ always returned as the `'address,undefined'` string, in this order.
+
+ - Otherwise, the array elements are returned in undefined order.
\* < 0 means disable, == 0 means automatic selection, > 0 sets a specific number to use
@@ -255,8 +272,7 @@ with `b_asneeded`, so that option will be silently disabled.
[[shared_module]]s will not have
bitcode embedded because `-Wl,-bitcode_bundle` is incompatible with
-both `-bundle` and `-Wl,-undefined,dynamic_lookup` which are necessary
-for shared modules to work.
+`-Wl,-undefined,dynamic_lookup` which is necessary for shared modules to work.
## Compiler options
@@ -271,7 +287,7 @@ or compiler being used:
| ------ | ------------- | --------------- | ----------- |
| c_args | | free-form comma-separated list | C compile arguments to use |
| c_link_args | | free-form comma-separated list | C link arguments to use |
-| c_std | none | none, c89, c99, c11, c17, c18, c2x, c23, gnu89, gnu99, gnu11, gnu17, gnu18, gnu2x, gnu23 | C language standard to use |
+| c_std | none | none, c89, c99, c11, c17, c18, c2x, c23, c2y, gnu89, gnu99, gnu11, gnu17, gnu18, gnu2x, gnu23, gnu2y | C language standard to use |
| c_winlibs | see below | free-form comma-separated list | Standard Windows libs to link against |
| c_thread_count | 4 | integer value ≥ 0 | Number of threads to use with emcc when using threads |
| cpp_args | | free-form comma-separated list | C++ compile arguments to use |
diff --git a/docs/markdown/CMake-module.md b/docs/markdown/CMake-module.md
index f8275c981733..982fa35d086e 100644
--- a/docs/markdown/CMake-module.md
+++ b/docs/markdown/CMake-module.md
@@ -138,8 +138,8 @@ and supports the following methods:
`include_type` kwarg *(new in 0.56.0)* controls the include type of the
returned dependency object similar to the same kwarg in the
[[dependency]] function.
- - `include_directories(target)` returns a Meson [[@inc]]
- object for the specified target. Using this method is not necessary
+ - `include_directories(target)` returns an array of Meson [[@inc]]
+ objects for the specified target. Using this method is not necessary
if the dependency object is used.
- `target(target)` returns the raw build target.
- `target_type(target)` returns the type of the target as a string
diff --git a/docs/markdown/Commands.md b/docs/markdown/Commands.md
index 8e34800a44b3..247f2d74e272 100644
--- a/docs/markdown/Commands.md
+++ b/docs/markdown/Commands.md
@@ -473,7 +473,7 @@ The following options are recognized:
- tab_width (int): Width of tab stops, used to compute line length
when `indent_by` uses tab characters (default is 4).
- sort_files (bool): When true, arguments of `files()` function are
- sorted alphabetically (default is false).
+ sorted alphabetically (default is true).
- group_arg_value (bool): When true, string argument with `--` prefix
followed by string argument without `--` prefix are grouped on the
same line, in multiline arguments (default is false).
@@ -489,6 +489,10 @@ When `--recursive` option is specified, `meson.build` files from
`subdir` are also analyzed (must be used in conjunction with `--inplace`
or `--check-only` option).
+*Since 1.7.0* You can use `-` as source file name to read source from standard
+input instead of reading it from a file. This cannot be used with `--recursive`
+or `--inline` arguments.
+
#### Differences with `muon fmt`
diff --git a/docs/markdown/Compiler-properties.md b/docs/markdown/Compiler-properties.md
index 6d04c8bd79aa..7eea36891db8 100644
--- a/docs/markdown/Compiler-properties.md
+++ b/docs/markdown/Compiler-properties.md
@@ -194,6 +194,18 @@ correctly report the function as missing. Without the header however,
it would lack the necessary availability information and incorrectly
report the function as available.
+## Is a macro defined?
+
+Macro detection can often be useful to determine if non-standard features
+are supported on your target platform. Fortunately, Meson makes it
+easy to check if a macro is defined:
+
+```meson
+if [[#compiler.has_define]]('__SIZEOF_INT128__')
+ # macro is defined, do whatever is required
+endif
+```
+
## Does a structure contain a member?
Some platforms have different standard structures. Here's how one
diff --git a/docs/markdown/Configuring-a-build-directory.md b/docs/markdown/Configuring-a-build-directory.md
index db6fc03ef453..974b7ae998d2 100644
--- a/docs/markdown/Configuring-a-build-directory.md
+++ b/docs/markdown/Configuring-a-build-directory.md
@@ -61,7 +61,7 @@ a sample output for a simple project.
------ ------------- --------------- -----------
c_args [] Extra arguments passed to the C compiler
c_link_args [] Extra arguments passed to the C linker
- c_std c99 [none, c89, c99, c11, c17, c18, c2x, c23, gnu89, gnu99, gnu11, gnu17, gnu18, gnu2x, gnu23] C language standard to use
+ c_std c99 [none, c89, c99, c11, c17, c18, c2x, c23, c2y, gnu89, gnu99, gnu11, gnu17, gnu18, gnu2x, gnu23, gnu2y] C language standard to use
cpp_args [] Extra arguments passed to the C++ compiler
cpp_debugstl false [true, false] STL debug mode
cpp_link_args [] Extra arguments passed to the C++ linker
@@ -119,3 +119,40 @@ by invoking [`meson configure`](Commands.md#configure) with the
project source directory or the path to the root `meson.build`. In
this case, Meson will print the default values of all options similar
to the example output from above.
+
+## Per project subproject options rewrite (Since 1.8)
+
+A common requirement when building large projects with many
+subprojects is to build some (or all) subprojects with project options
+that are different from the "main project". This has been sort of
+possible in a limited way but is now natively supported. Per project
+options can be added, changed and removed at runtime using the command
+line, in other words, without editing existing `meson.build` files.
+
+Starting with version 1.8 you can specify per-project option settings.
+These can be specified for every top level (i.e. not project) options.
+Suppose you have a project that has a single subproject called
+`numbercruncher` that does heavy computation. During development you
+want to build that subproject with optimizations enabled but your main
+project without optimizations. This can be done by specifying a custom
+value to the given subproject:
+
+ meson configure -Dnumbercruncher:optimization=3
+
+Another case might be that you want to build with warnings as errors,
+but some subproject does not support it. To configure `werror` per
+subproject you can do:
+
+ meson configure -Dwerror=true -Dnaughty:werror=false
+
+You can also specify a different value on the top level project. For
+example you could enable optimizations on all subprojects but not the
+top level project:
+
+ meson configure -Doptimization=2 -D:optimization=0
+
+Note the colon after the second `D`.
+
+Subproject specific values can be removed with -U
+
+ meson configure -Usubproject:optionnname
diff --git a/docs/markdown/Contributing.md b/docs/markdown/Contributing.md
index 8f796ab65530..729dabf5cca0 100644
--- a/docs/markdown/Contributing.md
+++ b/docs/markdown/Contributing.md
@@ -501,11 +501,58 @@ those are simple.
- indent 4 spaces, no tabs ever
- brace always on the same line as if/for/else/function definition
-## External dependencies
+## Dependency support policy
The goal of Meson is to be as easily usable as possible. The user
experience should be "get Python3 and Ninja, run", even on
-Windows. Unfortunately this means that we can't have dependencies on
+Windows.
+
+Additionally, Meson is popularly used in many core infrastructure packages in a
+Unix (and particularly, Linux) userland. This includes:
+- package managers, such as pacman (Arch Linux) and portage (Gentoo)
+- init systems (systemd, openrc, dinit)
+- graphics stacks (xorg, wayland, libdrm, Mesa, gtk)
+
+As such it needs to be able to run early on when bootstrapping a system from
+scratch.
+
+### Python
+
+We will always support all non EOL versions of CPython. Yes, there are people
+out there using and depending on every old version of python. In fact, there
+are people using and depending on systems that had a brand new python at the
+time of release, but with a much longer support cycle than Python itself. We
+need to balance the tradeoff between supporting those systems and being able to
+improve our own codebase and code quality.
+
+Meson will also be *honest* about what versions of python it supports. When a
+version of CPython becomes EOL, it becomes eligible to be removed from our
+support policy. We cannot guarantee continued support forever for software that
+is not supported by its own developers, even if some deprecated LTS systems out
+there still ship it. However, that doesn't mean we will drop support for those
+versions simply because they are old. If we are not using new functionality
+from new python versions, we will continue to mark Meson as compatible with the
+older version -- and test it in CI!
+
+(Note that contrary to popular belief, it is actually easier to test support
+for very old versions of python than it is to drop support for it. We already
+have the CI setup necessary for testing. Upgrading the CI to use newer versions
+of python, on the other hand, represents mildly painful administrative work
+that has to be done.)
+
+So, in order to start requiring a newer version of python, one should check a
+few factors:
+- are the older versions being dropped, already EOL? [Python EOL chart](https://endoflife.date/python)
+- document the new minimum version of corresponding OSes
+- rationalize the benefit of the change in terms of improvements to development
+ and maintenance of Meson. What new language features will be unlocked by the
+ upgrade, that Meson will be able to make good use of? Not every version has
+ new features requiring an upgrade, and not every new feature is so great we
+ need to drop everything to use it
+
+### External dependencies
+
+Unfortunately this also means that we can't have dependencies on
projects outside of Python's standard library. This applies only to
core functionality, though. For additional helper programs etc the use
of external dependencies may be ok. If you feel that you are dealing
diff --git a/docs/markdown/Creating-OSX-packages.md b/docs/markdown/Creating-OSX-packages.md
index 7e0d05bf42ca..f31249f3f7d3 100644
--- a/docs/markdown/Creating-OSX-packages.md
+++ b/docs/markdown/Creating-OSX-packages.md
@@ -151,7 +151,7 @@ More information is available on the tool's documentation page.
A .dmg installer is similarly quite simple, at its core it is
basically a fancy compressed archive. A good description can be found
-on [this page](https://el-tramo.be/guides/fancy-dmg/). Please read it
+on [this page](https://mko.re/blog/fancy-dmg/). Please read it
and create a template image file according to its instructions.
The actual process of creating the installer is very simple: you mount
diff --git a/docs/markdown/Creating-releases.md b/docs/markdown/Creating-releases.md
index e0c80af5b0e8..44c127eec57b 100644
--- a/docs/markdown/Creating-releases.md
+++ b/docs/markdown/Creating-releases.md
@@ -31,6 +31,13 @@ Meson then takes this archive and tests that it works by doing a full
`compile` + `test` + `install` cycle. If all these pass, Meson will
then create a `SHA-256` checksum file next to the archive.
+## Modifying the dist directory before creating the archive
+
+Modification to the checked out files like generating files or
+setting version info can be done with dist scripts:
+```meson
+[[#meson.add_dist_script]]('distscript.sh')
+```
## Autotools dist VS Meson dist
diff --git a/docs/markdown/Cross-compilation.md b/docs/markdown/Cross-compilation.md
index 0cfef71b9cd0..64196f3bde10 100644
--- a/docs/markdown/Cross-compilation.md
+++ b/docs/markdown/Cross-compilation.md
@@ -85,7 +85,7 @@ their user interface much more complex.
The most complicated case is when you cross-compile a cross compiler.
As an example you can, on a Linux machine, generate a cross compiler
-that runs on Windows but produces binaries on MIPS Linux. In this case
+that runs on Windows but produces binaries for MIPS Linux. In this case
*build machine* is x86 Linux, *host machine* is x86 Windows and
*target machine* is MIPS Linux. This setup is known as the [Canadian
Cross](https://en.wikipedia.org/wiki/Cross_compiler#Canadian_Cross).
diff --git a/docs/markdown/Dependencies.md b/docs/markdown/Dependencies.md
index e624909e0875..097856b4cb00 100644
--- a/docs/markdown/Dependencies.md
+++ b/docs/markdown/Dependencies.md
@@ -475,6 +475,15 @@ accelerate_dep = dependency('accelerate',
)
```
+## atomic (stdatomic)
+
+*(added 1.7.0)*
+
+Provides access to the atomic operations library. This first attempts
+to look for a valid atomic external library before trying to fallback
+to what is provided by the C runtime libraries.
+
+`method` may be `auto`, `builtin` or `system`.
## Blocks
@@ -576,6 +585,34 @@ foreach h : check_headers
endforeach
```
+## DIA SDK
+
+*(added 1.6.0)*
+
+Microsoft's Debug Interface Access SDK (DIA SDK) is available only on Windows,
+when using msvc, clang-cl or clang compiler from Microsoft Visual Studio.
+
+The DIA SDK runtime is not statically linked to target. The default usage
+method requires the runtime DLL (msdiaXXX.dll) to be manually registered in the
+OS with `regsrv32.exe` command, so it can be loaded using `CoCreateInstance`
+Windows function.
+
+Alternatively, you can use meson to copy the DIA runtime DLL to your build
+directory, and load it dynamically using `NoRegCoCreate` function provided by
+the DIA SDK. To facilitate this, you can read DLL path from dependency's
+variable 'dll' and use fs module to copy it. Example:
+
+```meson
+dia = dependency('diasdk', required: true)
+fs = import('fs')
+fs.copyfile(dia.get_variable('dll'))
+
+conf = configuration_data()
+conf.set('msdia_dll_name', fs.name(dia_dll_name))
+```
+
+Only the major version is available (eg. version is `14` for msdia140.dll).
+
## dl (libdl)
*(added 0.62.0)*
@@ -597,18 +634,18 @@ providing them instead.
GCC will use OpenCoarrays if present to implement coarrays, while Intel and NAG
use internal coarray support.
-## GPGME
-
-*(added 0.51.0)*
-
-`method` may be `auto`, `config-tool` or `pkg-config`.
-
## GL
This finds the OpenGL library in a way appropriate to the platform.
`method` may be `auto`, `pkg-config` or `system`.
+## GPGME
+
+*(added 0.51.0)*
+
+`method` may be `auto`, `config-tool` or `pkg-config`.
+
## GTest and GMock
GTest and GMock come as sources that must be compiled as part of your
@@ -798,6 +835,14 @@ language-specific, you must specify the requested language using the
Meson uses pkg-config to find NetCDF.
+## NumPy
+
+*(added 1.4.0)*
+
+`method` may be `auto`, `pkg-config`, or `config-tool`.
+`dependency('numpy')` supports regular use of the NumPy C API.
+Use of `numpy.f2py` for binding Fortran code isn't yet supported.
+
## ObjFW
*(added 1.5.0)*
@@ -843,14 +888,6 @@ The `language` keyword may used.
`method` may be `auto`, `pkg-config`, `system` or `cmake`.
-## NumPy
-
-*(added 1.4.0)*
-
-`method` may be `auto`, `pkg-config`, or `config-tool`.
-`dependency('numpy')` supports regular use of the NumPy C API.
-Use of `numpy.f2py` for binding Fortran code isn't yet supported.
-
## pcap
*(added 0.42.0)*
@@ -1008,34 +1045,6 @@ version.
*New in 0.54.0* the `system` method.
-## DIA SDK
-
-*(added 1.6.0)*
-
-Microsoft's Debug Interface Access SDK (DIA SDK) is available only on Windows,
-when using msvc, clang-cl or clang compiler from Microsoft Visual Studio.
-
-The DIA SDK runtime is not statically linked to target. The default usage
-method requires the runtime DLL (msdiaXXX.dll) to be manually registered in the
-OS with `regsrv32.exe` command, so it can be loaded using `CoCreateInstance`
-Windows function.
-
-Alternatively, you can use meson to copy the DIA runtime DLL to your build
-directory, and load it dynamically using `NoRegCoCreate` function provided by
-the DIA SDK. To facilitate this, you can read DLL path from dependency's
-variable 'dll' and use fs module to copy it. Example:
-
-```meson
-dia = dependency('diasdk', required: true)
-fs = import('fs')
-fs.copyfile(dia.get_variable('dll'))
-
-conf = configuration_data()
-conf.set('msdia_dll_name', fs.name(dia_dll_name))
-```
-
-Only the major version is available (eg. version is `14` for msdia140.dll).
-
1 : They may appear to be case-insensitive, if the
underlying file system happens to be case-insensitive.
diff --git a/docs/markdown/Design-rationale.md b/docs/markdown/Design-rationale.md
index 67fec0ab36d2..41339794d542 100644
--- a/docs/markdown/Design-rationale.md
+++ b/docs/markdown/Design-rationale.md
@@ -236,7 +236,7 @@ Above we mentioned precompiled headers as a feature not supported by
other build systems. Here's how you would use them.
```meson
-project('pch demo', 'cxx')
+project('pch demo', 'cpp')
executable('myapp', 'myapp.cpp', pch : 'pch/myapp.hh')
```
diff --git a/docs/markdown/Donating.md b/docs/markdown/Donating.md
new file mode 100644
index 000000000000..a7f823ebaf52
--- /dev/null
+++ b/docs/markdown/Donating.md
@@ -0,0 +1,15 @@
+---
+short-description: Donating to support Meson development
+...
+
+# Donating to support Meson development
+
+Currently, meson takes good advantage of various hosting resources for OSS, and
+does not have any real project expenses. As a result, the project does not
+directly accept donations.
+
+The best way to fund Meson development is to directly donate to the core
+maintainers team, whether as a "thank you" for the work we have already done or
+to help make it easier for us to spend more time working on improving Meson. The current Meson maintainers offer ways to sponsor them:
+
+- [Eli Schwartz](https://github.com/eli-schwartz) has enabled [GitHub Sponsors](https://github.com/sponsors/eli-schwartz)
diff --git a/docs/markdown/External-Project-module.md b/docs/markdown/External-Project-module.md
index 615c6c117d0c..39449fe21102 100644
--- a/docs/markdown/External-Project-module.md
+++ b/docs/markdown/External-Project-module.md
@@ -5,6 +5,8 @@
*This is an experimental module, API could change.*
+*Added 0.56.0*
+
This module allows building code that uses build systems other than
Meson. This module is intended to be used to build Autotools
subprojects as fallback if the dependency couldn't be found on the
@@ -47,7 +49,8 @@ Known limitations:
from `-uninstalled.pc` files. This is arguably a bug that could be fixed in
future version of pkg-config/pkgconf.
-*Added 0.56.0*
+*Since 1.7.0* [Meson devenv][Commands.md#devenv] setup `PATH` and
+`LD_LIBRARY_PATH` to be able to run programs.
## Functions
@@ -78,7 +81,8 @@ Keyword arguments:
added in case some tags are not found in `configure_options`:
`'--prefix=@PREFIX@'`, `'--libdir=@PREFIX@/@LIBDIR@'`, and
`'--includedir=@PREFIX@/@INCLUDEDIR@'`. It was previously considered a fatal
- error to not specify them.
+ error to not specify them. *Since 1.7.0* `@BINDIR@` and `'--bindir=@PREFIX@/@BINDIR@'`
+ default argument have been added.
- `cross_configure_options`: Extra options appended to `configure_options` only
when cross compiling. special tag `@HOST@` will be replaced by
`'{}-{}-{}'.format(host_machine.cpu_family(), build_machine.system(), host_machine.system()`.
diff --git a/docs/markdown/FAQ.md b/docs/markdown/FAQ.md
index 810e9da23e95..faa4becbe130 100644
--- a/docs/markdown/FAQ.md
+++ b/docs/markdown/FAQ.md
@@ -285,7 +285,7 @@ or they are not called (due to e.g. `if/else`) then nothing is
downloaded.
If this is not sufficient for you, starting from release 0.40.0 Meson
-has a option called `wrap-mode` which can be used to disable wrap
+has an option called `wrap-mode` which can be used to disable wrap
downloads altogether with `--wrap-mode=nodownload`. You can also
disable dependency fallbacks altogether with `--wrap-mode=nofallback`,
which also implies the `nodownload` option.
@@ -333,6 +333,24 @@ that could fulfill these requirements:
Out of these we have chosen Python because it is the best fit for our
needs.
+## Do you at least support my ancient python install?
+
+Yes! :) We have a relatively sedate version support policy. You can read about
+it in the [Contributing documentation](Contributing.md#python)
+
+We are also willing to support old versions of meson as LTS releases,
+particularly, if it is the final version to support a given python version. If
+you have a use case, please discuss it with us and be willing to help backport
+bug fixes.
+
+- python 3.5: [supported through Meson 0.56.2](Release-notes-for-0.56.0.md#python-35-support-will-be-dropped-in-the-next-release)
+- python 3.6: [supported through Meson 0.61.5](Release-notes-for-0.61.0.md#python-36-support-will-be-dropped-in-the-next-release)
+- python 3.7: currently actively supported by Meson
+
+We encourage projects to support a wide range of Meson versions if they are not
+actually using the latest features anyway. In many, many cases it is quite
+practical to support e.g. Meson 0.61.
+
## But I really want a version of Meson that doesn't use python!
Ecosystem diversity is good. We encourage interested users to write this
diff --git a/docs/markdown/Gnome-module.md b/docs/markdown/Gnome-module.md
index 013e8c8241ce..e8953efc9cd5 100644
--- a/docs/markdown/Gnome-module.md
+++ b/docs/markdown/Gnome-module.md
@@ -93,6 +93,7 @@ There are several keyword arguments. Many of these map directly to the
`g-ir-scanner` tool so see its documentation for more information.
* `dependencies`: deps to use during introspection scanning
+* `doc_format`: (*Added 1.8.0*) format of the inline documentation
* `extra_args`: command line arguments to pass to gir compiler
* `env`: (*Added 1.2.0*) environment variables to set, such as
`{'NAME1': 'value1', 'NAME2': 'value2'}` or `['NAME1=value1', 'NAME2=value2']`,
@@ -105,7 +106,8 @@ There are several keyword arguments. Many of these map directly to the
* `identifier_prefix`: the identifier prefix for the gir object,
e.g. `Gtk`
* `includes`: list of gir names to be included, can also be a GirTarget
-* `header`: *(Added 0.43.0)* name of main c header to include for the library, e.g. `glib.h`
+* `header`: *(Added 0.43.0)* name of main c header to include for the library,
+ e.g. `glib.h`, (*Since 0.61.0*) a list of headers is allowed
* `include_directories`: extra include paths to look for gir files
* `install`: if true, install the generated files
* `install_gir`: (*Added 0.61.0*) overrides `install`, whether to install the
diff --git a/docs/markdown/Hotdoc-module.md b/docs/markdown/Hotdoc-module.md
index d33dd3e7c304..cf972ca35186 100644
--- a/docs/markdown/Hotdoc-module.md
+++ b/docs/markdown/Hotdoc-module.md
@@ -33,12 +33,19 @@ Generates documentation using [hotdoc] and installs it into `$prefix/share/doc/h
* `sitemap` ([[@str]] or [[@file]]) (**required**): The hotdoc sitemap file
* `index` ([[@str]] or [[@file]]) (**required**): Location of the index file
-* `dependencies`([[@build_tgt]]): Targets on which the documentation generation depends on.
+* `dependencies`([[@build_tgt]]): Build targets to use when generating documentation.
+* `depends`([[@custom_tgt]]): Custom targets on which this documentation target depends.
* `subprojects`: A list of `HotdocTarget` that are used as subprojects for hotdoc to generate
the documentation.
* ... Any argument of `hotdoc` can be used replacing dashes (`-`) with underscores (`_`).
For a full list of available parameters, just have a look at `hotdoc help`.
+*Changed in 0.64.1:* `depends:` added.
+Previously, all types of targets were accepted by `dependencies:`.
+This is now deprecated.
+Use `dependencies:` only with build targets, to pass their configuration to hotdoc.
+Use `depends:` to set up dependency relationships on custom targets.
+
**Returns:**
`HotdocTarget`: A [[custom_target]] with the
@@ -72,4 +79,4 @@ hotdoc.generate_doc('foobar',
)
```
-[hotdoc]: https://hotdoc.github.io/
\ No newline at end of file
+[hotdoc]: https://hotdoc.github.io/
diff --git a/docs/markdown/IDE-integration.md b/docs/markdown/IDE-integration.md
index 77edb755d388..a8e9197f785c 100644
--- a/docs/markdown/IDE-integration.md
+++ b/docs/markdown/IDE-integration.md
@@ -10,7 +10,7 @@ this problem, Meson provides an API that makes it easy for any IDE or
build tools to integrate Meson builds and provide an experience
comparable to a solution native to the IDE.
-All the resources required for such a IDE integration can be found in
+All the resources required for such an IDE integration can be found in
the `meson-info` directory in the build directory.
The first thing to do when setting up a Meson project in an IDE is to
@@ -93,6 +93,7 @@ can provide code completion for all source files.
```json
{
"language": "language ID",
+ "machine": "build" / "host",
"compiler": ["The", "compiler", "command"],
"parameters": ["list", "of", "compiler", "parameters"],
"sources": ["list", "of", "all", "source", "files", "for", "this", "language"],
@@ -100,6 +101,13 @@ can provide code completion for all source files.
}
```
+*(New in 1.7.0)* The `machine` and `language` keys make it possible to
+to access further information about the compiler in the `compilers`
+introspection information. `machine` can be absent if `language` is
+`unknown`. In this case, information about the compiler is not
+available; Meson is therefore unable to know if the output relates
+to either the build of the host machine.
+
It should be noted that the compiler parameters stored in the
`parameters` differ from the actual parameters used to compile the
file. This is because the parameters are optimized for the usage in an
diff --git a/docs/markdown/Include-directories.md b/docs/markdown/Include-directories.md
index f9850ac91451..8b0b420f1dc7 100644
--- a/docs/markdown/Include-directories.md
+++ b/docs/markdown/Include-directories.md
@@ -7,7 +7,7 @@ short-description: Instructions on handling include directories
Most `C`/`C++` projects have headers in different directories than
sources. Thus you need to specify include directories. Let's assume
that we are at some subdirectory and wish to add its `include`
-subdirectory to some target's search path. To create a include
+subdirectory to some target's search path. To create an include
directory object we do this:
```meson
diff --git a/docs/markdown/Installing.md b/docs/markdown/Installing.md
index 2d18c178fccd..0a7ca9f16a49 100644
--- a/docs/markdown/Installing.md
+++ b/docs/markdown/Installing.md
@@ -115,7 +115,7 @@ running `sudo meson install` will drop permissions and rebuild out of date
targets as the original user, not as root.
*(since 1.1.0)* Re-invoking as root will try to guess the user's preferred method for
-re-running commands as root. The order of precedence is: sudo, doas, pkexec
+re-running commands as root. The order of precedence is: sudo, doas, run0, pkexec
(polkit). An elevation tool can be forced by setting `$MESON_ROOT_CMD`.
## DESTDIR support
diff --git a/docs/markdown/Python-module.md b/docs/markdown/Python-module.md
index f3ee9ffc5a98..66081762d795 100644
--- a/docs/markdown/Python-module.md
+++ b/docs/markdown/Python-module.md
@@ -18,7 +18,7 @@ compatible with [PEP-517](https://peps.python.org/pep-0517/), check out
If you are building Python extension modules against a Python interpreter
located in a venv or Conda environment, you probably want to set
-`python.install_venv=auto`;
+`python.install_env=auto`;
see [Python module options](Builtin-options.md#python-module) for details.
*Added 0.46.0*
@@ -87,10 +87,22 @@ added methods.
str py_installation.path()
```
-*Added 0.50.0*
+*(since 0.50.0)*
-Works like the path method of other `ExternalProgram` objects. Was not
-provided prior to 0.50.0 due to a bug.
+*Deprecated in 0.55: use `full_path()` instead*
+
+Works like the path method of `ExternalProgram` objects. Was not provided prior
+to 0.50.0 due to a bug.
+
+#### `full_path()`
+
+```meson
+str py_installation.full_path()
+```
+
+*(since 0.55.0)*
+
+Works like the `full_path()` method of `ExternalProgram` objects: [[external_program.full_path]]
#### `extension_module()`
@@ -121,7 +133,7 @@ Additionally, the following diverge from [[shared_module]]'s default behavior:
- `gnu_symbol_visibility`: if unset, it will default to `'hidden'` on versions
of Python that support this (the python headers define `PyMODINIT_FUNC` has
default visibility).
-
+
Note that Cython support uses `extension_module`, see [the reference for Cython](Cython.md).
*since 0.63.0* `extension_module` automatically adds a dependency to the library
@@ -158,11 +170,8 @@ void py_installation.install_sources(list_of_files, ...)
Install actual python sources (`.py`).
-All positional and keyword arguments are the same as for
-[[install_data]], with the addition of the following:
-
-*Since 0.60.0* `python.platlibdir` and `python.purelibdir` options can be used
-to control the default installation path. See [Python module options](Builtin-options.md#python-module).
+Source files to install are given as positional argument, in the same way as for
+[[install_data]]. Supported keyword arguments are:
- `pure`: On some platforms, architecture independent files are
expected to be placed in a separate directory. However, if the
@@ -177,6 +186,12 @@ to control the default installation path. See [Python module options](Builtin-op
- `install_tag` *(since 0.60.0)*: A string used by `meson install --tags` command
to install only a subset of the files. By default it has the tag `python-runtime`.
+- `preserve_path`: if `true`, disable stripping child-directories from data
+ files when installing. Default is `false`. *(since 0.64.0)*
+
+*Since 0.60.0* `python.platlibdir` and `python.purelibdir` options can be used
+to control the default installation path. See [Python module options](Builtin-options.md#python-module).
+
#### `get_install_dir()`
``` meson
diff --git a/docs/markdown/Qt6-module.md b/docs/markdown/Qt6-module.md
index 7b6f94715882..ebe42a50cc8d 100644
--- a/docs/markdown/Qt6-module.md
+++ b/docs/markdown/Qt6-module.md
@@ -69,6 +69,8 @@ It takes no positional arguments, and the following keyword arguments:
directory. For instance, when a file called `subdir/one.input` is processed
it generates a file `{target private directory}/subdir/one.out` when `true`,
and `{target private directory}/one.out` when `false` (default).
+ - `output_json` bool: *New in 1.7.0*. If `true`, generates additionally a
+ JSON representation which may be used by external tools such as qmltyperegistrar
## preprocess
@@ -111,6 +113,8 @@ This method takes the following keyword arguments:
directory. For instance, when a file called `subdir/one.input` is processed
it generates a file `{target private directory}/subdir/one.out` when `true`,
and `{target private directory}/one.out` when `false` (default).
+ - `moc_output_json` bool: *New in 1.7.0*. If `true`, generates additionally a
+ JSON representation which may be used by external tools such as qmltyperegistrar
It returns an array of targets and sources to pass to a compilation target.
@@ -160,6 +164,81 @@ This method takes the following keyword arguments:
are `moc`, `uic`, `rcc` and `lrelease`. By default `tools` is set to `['moc',
'uic', 'rcc', 'lrelease']`
+## qml_module
+
+*New in 1.7.0*
+
+This function requires one positional argument: the URI of the module as dotted
+identifier string. For instance `Foo.Bar`
+
+This method takes the following keyword arguments:
+
+ - `version`: string: the module version in the form `Major.Minor` with an
+ optional `.Patch`. For instance `1.0`
+ - `qml_sources` (File | string | custom_target | custom_target index | generator_output)[]:
+ A list of qml to be embedded in the module
+ - `qml_singletons` (File | string | custom_target | custom_target index | generator_output)[]:
+ A list of qml to be embedded in the module and marked as singletons
+ - `qml_internals` (File | string | custom_target | custom_target index | generator_output)[]:
+ A list of qml to be embedded in the module and marked as internal files
+ - `resources_prefix` string: By default `resources_prefix` is set to
+ `qt/qml`. Prefix resources in the generated QRC with the given prefix
+ - `imports`: string[]: List of other QML modules imported by this module. Version
+ can be specified as `Module/1.0` or `Module/auto`. See qmldir documentation
+ - `optional_imports`: string[]: List of other QML modules that may be imported by this
+ module. See `imports` for expected format and qmldir documentation
+ - `default_imports`: string[]: List QML modules that may be loaded by
+ tooling. See `imports` for expected format and qmldir documentation
+ - `depends_imports`: string[]: List of QML extra dependencies that may not be
+ imported by QML, such as dependencies existing in C++ code. See `imports` for
+ expected format and qmldir documentation
+ - `designer_supported` bool: If `true` specifies that the module supports Qt
+ Quick Designer
+ - `moc_headers` (File | string | custom_target | custom_target index | generator_output)[]:
+ A list of headers to be transpiled into .cpp files. See [Qt
+ documentation](https://doc.qt.io/qt-6/qtqml-cppintegration-definetypes.html)
+ regarding how to register C++ class as Qml elements. Note: due to some
+ limitations of qmltyperegistrar, all headers that declare QML types need to
+ be accessible in the project's include path.
+ - `namespace`: str: optional C++ namespace for plugin and generation code
+ - `typeinfo`: str: optional name for the generated qmltype file, by default it
+ will be generated as `{target_name}.qmltype`
+ - `rcc_extra_arguments`: string[]: Extra arguments to pass directly to `qt-rcc`
+ - `moc_extra_arguments`: string[]: Extra arguments to pass directly to `qt-moc`
+ - `qmlcachegen_extra_arguments`: string[]: Extra arguments to pass directly to
+ `qmlcachegen`
+ - `qmltyperegistrar_extra_arguments`: string[]: Extra arguments to pass directly to
+ `qmltyperegistrar`
+ - `generate_qmldir`: bool: If `true` (default) auto generate the `qmldir` file
+ - `generate_qmltype`: bool: If `true` (default) auto generate `qmltype` file
+ - `cachegen`: bool: If `true` (default) preprocess QML and JS files with
+ qmlcachegen
+ - `method` string: The method to use to detect Qt, see [[dependency]]
+ - `preserve_paths` bool: If `true`, specifies that the output
+ files need to maintain their directory structure inside the target temporary
+ directory. For instance, when a file called `subdir/one.input` is processed
+ it generates a file `{target private directory}/subdir/one.out` when `true`,
+ and `{target private directory}/one.out` when `false` (default).
+ - `dependencies`: dependency objects whose include directories are used by
+ moc.
+ - `include_directories` (string | IncludeDirectory)[]: A list of `include_directory()`
+ objects used when transpiling the .moc files
+ - `install` bool: when true, this target is installed during the install step (optional).
+ - `install_dir` string: directory to install to (optional).
+
+
+Note: Qt uses static initialization to register its resources, if you're
+building a static library you may need to call these entry points
+explicitly. For a module `Foo.Bar42` the generated resources are `Foo_Bar42`
+and `qmlcache_Foo_Bar42` when qmlcache is used, they can be imported using
+`Q_INIT_RESOURCE`. All non-alphanumeric characters from the module name are
+replaced with `_`. Type registration may be invoked explicitly using
+`extern void qml_register_types_Foo_Bar42()`.
+
+See [Qt documentation](https://doc.qt.io/qt-6/resources.html#explicit-loading-and-unloading-of-embedded-resources)
+for more information
+
+
## Dependencies
See [Qt dependencies](Dependencies.md#qt)
@@ -200,4 +279,3 @@ lang_cpp = qt6.compile_translations(qresource: 'lang.qrc')
executable('myprog', 'main.cpp', lang_cpp,
dependencies: qt6_dep)
```
-
diff --git a/docs/markdown/Quick-guide.md b/docs/markdown/Quick-guide.md
index 14914567fda3..9b5cc14090c8 100644
--- a/docs/markdown/Quick-guide.md
+++ b/docs/markdown/Quick-guide.md
@@ -35,11 +35,10 @@ generate native VS and Xcode project files.*
Installation using package manager
--
-Ubuntu:
+Debian or Ubuntu:
```console
-$ sudo apt-get install python3 python3-pip python3-setuptools \
- python3-wheel ninja-build
+$ sudo apt-get install python3 ninja-build meson
```
*Due to our frequent release cycle and development speed, distro packaged software may quickly become outdated.*
@@ -47,13 +46,19 @@ Installation using Python
--
Requirements: **pip3**
-The best way to receive the most up-to-date version of Mesonbuild.
+This is the best way to receive the most up-to-date version of Mesonbuild.
+
+First, install dependencies using the package manager:
+```console
+$ sudo apt-get install python3 python3-pip python3-setuptools \
+ python3-wheel ninja-build
+```
-Install as a local user (recommended):
+Then, install meson as a local user (recommended):
```console
$ pip3 install --user meson
```
-Install as root:
+Or, install meson as root:
```console
# pip3 install meson
```
diff --git a/docs/markdown/Reference-tables.md b/docs/markdown/Reference-tables.md
index 6fec71b6ef1e..a5d27858e473 100644
--- a/docs/markdown/Reference-tables.md
+++ b/docs/markdown/Reference-tables.md
@@ -49,6 +49,7 @@ These are return values of the `get_id` (Compiler family) and
| armasm | Microsoft Macro Assembler for ARM and AARCH64 (Since 0.64.0) | |
| mwasmarm | Metrowerks Assembler for Embedded ARM | |
| mwasmeppc | Metrowerks Assembler for Embedded PowerPC | |
+| tasking | TASKING VX-toolset | |
## Linker ids
@@ -80,6 +81,7 @@ These are return values of the `get_linker_id` method in a compiler object.
| ccomp | CompCert used as the linker driver |
| mwldarm | The Metrowerks Linker with the ARM interface, used with mwccarm only |
| mwldeppc | The Metrowerks Linker with the PowerPC interface, used with mwcceppc only |
+| tasking | TASKING VX-toolset |
For languages that don't have separate dynamic linkers such as C# and Java, the
`get_linker_id` will return the compiler name.
@@ -139,6 +141,7 @@ set in the cross file.
| wasm64 | 64 bit Webassembly |
| x86 | 32 bit x86 processor |
| x86_64 | 64 bit x86 processor |
+| tricore | Tricore 32 bit processor |
Any cpu family not listed in the above list is not guaranteed to
@@ -215,22 +218,23 @@ Meson natively.
These are the parameter names for passing language specific arguments
to your build target.
-| Language | compiler name | linker name |
-| ------------- | ------------- | ----------------- |
-| C | c_args | c_link_args |
-| C++ | cpp_args | cpp_link_args |
-| C# | cs_args | cs_link_args |
-| CUDA | cuda_args | cuda_link_args |
-| D | d_args | d_link_args |
-| Fortran | fortran_args | fortran_link_args |
-| Java | java_args | java_link_args |
-| Objective C | objc_args | objc_link_args |
-| Objective C++ | objcpp_args | objcpp_link_args |
-| Rust | rust_args | rust_link_args |
-| Vala | vala_args | vala_link_args |
-| Cython | cython_args | cython_link_args |
-| NASM | nasm_args | N/A |
-| MASM | masm_args | N/A |
+| Language | compiler name | linker name |
+| ------------- | -------------- | ----------------- |
+| C | c_args | c_link_args |
+| C++ | cpp_args | cpp_link_args |
+| C# | cs_args | cs_link_args |
+| CUDA | cuda_args | cuda_link_args |
+| D | d_args | d_link_args |
+| Fortran | fortran_args | fortran_link_args |
+| Java | java_args | java_link_args |
+| Objective C | objc_args | objc_link_args |
+| Objective C++ | objcpp_args | objcpp_link_args |
+| Rust | rust_args | rust_link_args |
+| Vala | vala_args | vala_link_args |
+| Cython | cython_args | cython_link_args |
+| NASM | nasm_args | N/A |
+| MASM | masm_args | N/A |
+| Linear ASM | linearasm_args | N/A |
All these `_*` options are specified per machine. See in
[specifying options per
@@ -388,6 +392,7 @@ machine](#Environment-variables-per-machine) section for details.
| C# | CSC | CSC | The linker is the compiler |
| Cython | CYTHON | | |
| nasm | NASM | | Uses the C linker |
+| archiver | | AR | |
*The old environment variables are still supported, but are deprecated
and will be removed in a future version of Meson.
diff --git a/docs/markdown/Release-notes-for-1.3.0.md b/docs/markdown/Release-notes-for-1.3.0.md
index 0c7660f230ca..c2687039ab59 100644
--- a/docs/markdown/Release-notes-for-1.3.0.md
+++ b/docs/markdown/Release-notes-for-1.3.0.md
@@ -6,7 +6,7 @@ short-description: Release notes for 1.3.0
# New features
Meson 1.3.0 was released on 19 November 2023
-## Clarify of implicitly-included headers in C-like compiler checks
+## Clarify implicitly-included headers in C-like compiler checks
Compiler check methods `compiler.compute_int()`, `compiler.alignment()`
and `compiler.sizeof()` now have their implicitly-included headers
diff --git a/docs/markdown/Release-notes-for-1.6.0.md b/docs/markdown/Release-notes-for-1.6.0.md
index 9e5cea6d17d5..4b2948771cc5 100644
--- a/docs/markdown/Release-notes-for-1.6.0.md
+++ b/docs/markdown/Release-notes-for-1.6.0.md
@@ -126,7 +126,7 @@ valid tools are `moc`, `uic`, `rcc` and `lrelease`.
## Simple tool to test build reproducibility
Meson now ships with a command for testing whether your project can be
-[built reprodicibly](https://reproducible-builds.org/). It can be used
+[built reproducibly](https://reproducible-builds.org/). It can be used
by running a command like the following in the source root of your
project:
diff --git a/docs/markdown/Release-notes-for-1.7.0.md b/docs/markdown/Release-notes-for-1.7.0.md
new file mode 100644
index 000000000000..5024b32c20f1
--- /dev/null
+++ b/docs/markdown/Release-notes-for-1.7.0.md
@@ -0,0 +1,155 @@
+---
+title: Release 1.7.0
+short-description: Release notes for 1.7.0
+...
+
+# New features
+
+Meson 1.7.0 was released on 26 January 2025
+
+## Call for testing for next release
+
+At the beginning of next cycle we aim to merge the [option refactor
+branch](https://github.com/mesonbuild/meson/pull/13441). This is a
+_huge_ change that will touch pretty much all code.
+
+The main change it brings is that you can override any builtin option
+value for any subproject (even the top one) entirely from the command
+line. This means that you can, for example, enable optimizations on
+all subprojects but not on the top level project.
+
+We have done extensive testing and all our tests currently
+pass. However it is expected that this will break some workflows. So
+please test the branch when it lands and report issues. We want to fix
+all regressions as soon as possible, preferably far before the next rc
+release.
+
+## New custom dependency for atomic
+
+```
+dependency('atomic')
+```
+
+checks for the availability of the atomic operation library. First, it looks
+for the atomic library. If that is not found, then it will try to use what is
+provided by the libc.
+
+## `--cap-lints allow` used for Cargo subprojects
+
+Similar to Cargo itself, all downloaded Cargo subprojects automatically
+add the `--cap-lints allow` compiler argument, thus hiding any warnings
+from the compiler.
+
+Related to this, `warning_level=0` now translates into `--cap-lints allow`
+for Rust targets instead of `-A warnings`.
+
+## Cargo features are resolved globally
+
+When configuring a Cargo dependency, Meson will now resolve its complete
+dependency tree and feature set before generating the subproject AST.
+This solves many cases of Cargo subprojects being configured with missing
+features that the main project had to enable by hand using e.g.
+`default_options: ['foo-rs:feature-default=true']`.
+
+Note that there could still be issues in the case there are multiple Cargo
+entry points. That happens if the main Meson project makes multiple `dependency()`
+calls for different Cargo crates that have common dependencies.
+
+Breaks: This change removes per feature Meson options that were previously
+possible to set as shown above or from command line `-Dfoo-rs:feature-foo=true`.
+
+## Meson can run "clippy" on Rust projects
+
+Meson now defines a `clippy` target if the project uses the Rust programming
+language. The target runs clippy on all Rust sources, using the `clippy-driver`
+program from the same Rust toolchain as the `rustc` compiler.
+
+Using `clippy-driver` as the Rust compiler will now emit a warning, as it
+is not meant to be a general-purpose compiler front-end.
+
+## Devenv support in external project module
+
+The [external project module](External-Project-module.md) now setups `PATH` and
+`LD_LIBRARY_PATH` to be able to run programs.
+
+`@BINDIR@` is now substitued in arguments and `'--bindir=@PREFIX@/@BINDIR@'`
+default argument have been added.
+
+## Fixed `sizeof` and `find_library` methods for Fortran compilers
+
+The implementation of the `.sizeof()` method has been fixed for Fortran
+compilers (it was previously broken since it would try to compile a C code
+snippet). Note that this functionality requires Fortran 2008 support.
+
+Incidentally this also fixes the `.find_library()` method for Fortran compilers
+when the `prefer_static` built-in option is set to true.
+
+## format command now accept stdin argument
+
+You can now use `-` argument for `meson format` to read input from stdin
+instead of reading it from a file.
+
+## "machine" entry in target introspection data
+
+The JSON data returned by `meson introspect --targets` now has a `machine`
+entry in each `target_sources` block. The new entry can be one of `build`
+or `host` for compiler-built targets, or absent for `custom_target` targets.
+
+## Add new language Linear Asm
+
+TI C6000 compiler supports a dialect of TI asm, so we add a new language for it.
+
+## Control the number of child processes with an environment variable
+
+Previously, `meson test` checked the `MESON_TESTTHREADS` variable to control
+the amount of parallel jobs to run; this was useful when `meson test` is
+invoked through `ninja test` for example. With this version, a new variable
+`MESON_NUM_PROCESSES` is supported with a broader scope: in addition to
+`meson test`, it is also used by the `external_project` module and by
+Ninja targets that invoke `clang-tidy`, `clang-format` and `clippy`.
+
+## Support for Rust 2024
+
+Meson can now request the compiler to use the 2024 edition of Rust. Use
+`rust_std=2024` to activate it. Rust 2024 requires the 1.85.0 version
+(or newer) of the compiler.
+
+## Support TASKING VX-Toolset
+
+Meson now supports the TASKING VX-Toolset compiler family for the Tricore cpu family.
+
+## Test targets no longer built by default
+
+`meson test` and the `ninja all` rule have been reworked to no longer force
+unnecessary rebuilds.
+
+`meson test` was invoking `ninja all` due to a bug if the chosen set of tests
+had no build dependencies. The behavior is now the same as when tests do have
+build dependencies, i.e. to only build the actual set of targets that are used
+by the test. This change could cause failures when upgrading to Meson 1.7.0, if
+the dependencies are not specified correctly in meson.build. Using `ninja test`
+has always been guaranteed to "do the right thing" and rebuild `all` as well;
+this continues to work.
+
+`ninja all` does not rebuild all tests anymore; it should be noted that this
+change means test programs are no longer guaranteed to have been built,
+depending on whether those test programs were *also* defined to build by
+default / marked as installable. This avoids building test-only binaries as
+part of installing the project (`ninja && ninja install`), which is unnecessary
+and has no use case.
+
+Some users might have been relying on the "all" target building test
+dependencies in combination with `meson test --no-rebuild` in order to skip
+calling out to ninja when running tests. This might break with this change
+because, when given `--no-rebuild`, Meson provides no guarantee that test
+dependencies are present and up to date. The recommended workflow is to use
+either `ninja test` or `ninja && meson test` but, if you wish to build test
+programs and dependencies in a separate stage, you can use for example `ninja
+all meson-test-prereq meson-benchmark-prereq` before `meson test --no-rebuild`.
+These prereq targets have been available since meson 0.63.0.
+
+## Install vcs_tag() output
+
+[[vcs_tag]] now has `install`, `install_dir`, `install_tag` and `install_mode`
+keyword arguments to install the generated file.
+
diff --git a/docs/markdown/Releasing.md b/docs/markdown/Releasing.md
new file mode 100644
index 000000000000..5fec13c6e6f7
--- /dev/null
+++ b/docs/markdown/Releasing.md
@@ -0,0 +1,24 @@
+---
+short-description: Release Policy
+...
+
+# Releasing a new Meson version
+
+For each new meson release, several different artifacts are created:
+
+- Github Releases:
+ - canonical source tarball, PGP signed: `packaging/builddist.sh`
+ - Windows installer: `packaging/createmsi.py`
+ - macOS installer: `packaging/createpkg.py`
+- PyPI:
+ - pip install-compatible release, as produced by builddist.sh
+- Debian package: `packaging/mpackage.py`
+
+# Release team
+
+
+- Jussi Pakkanen. PGP key: [19E2D6D9B46D8DAA6288F877C24E631BABB1FE70](https://keyserver.ubuntu.com/pks/lookup?search=0x19E2D6D9B46D8DAA6288F877C24E631BABB1FE70&op=index)
+- Eli Schwartz. PGP key: [BD27B07A5EF45C2ADAF70E0484818A6819AF4A9B](https://keyserver.ubuntu.com/pks/lookup?search=0xBD27B07A5EF45C2ADAF70E0484818A6819AF4A9B&op=index)
+- Dylan Baker. PGP key: [71C4B75620BC75708B4BDB254C95FAAB3EB073EC](https://keyserver.ubuntu.com/pks/lookup?search=0x71C4B75620BC75708B4BDB254C95FAAB3EB073EC&op=index)
+
+The default release manager for new versions of Meson is Jussi Pakkanen. Starting with meson 1.8.0, the release team has been expanded with fallback options to reduce the bus factor, but will continue to be done by Jussi when possible.
diff --git a/docs/markdown/Rust-module.md b/docs/markdown/Rust-module.md
index ee095e9d6da1..35eaf39c3ac9 100644
--- a/docs/markdown/Rust-module.md
+++ b/docs/markdown/Rust-module.md
@@ -36,11 +36,40 @@ It also takes the following keyword arguments:
- `dependencies`: a list of test-only Dependencies
- `link_with`: a list of additional build Targets to link with (*since 1.2.0*)
+- `link_whole`: a list of additional build Targets to link with in their entirety (*since 1.8.0*)
- `rust_args`: a list of extra arguments passed to the Rust compiler (*since 1.2.0*)
This function also accepts all of the keyword arguments accepted by the
[[test]] function except `protocol`, it will set that automatically.
+### doctest()
+
+```meson
+rustmod.doctest(name, target, ...)
+```
+
+*Since 1.8.0*
+
+This function creates a new `test()` target from an existing rust
+based library target. The test will use `rustdoc` to extract and run
+the doctests that are included in `target`'s sources.
+
+This function takes two positional arguments, the first is the name of the
+test and the second is the library or executable that is the rust based target.
+It also takes the following keyword arguments:
+
+- `dependencies`: a list of test-only Dependencies
+- `link_with`: a list of additional build Targets to link with
+- `link_whole`: a list of additional build Targets to link with in their entirety
+- `rust_args`: a list of extra arguments passed to the Rust compiler
+
+The target is linked automatically into the doctests.
+
+This function also accepts all of the keyword arguments accepted by the
+[[test]] function except `protocol`, it will set that automatically.
+However, arguments are limited to strings that do not contain spaces
+due to limitations of `rustdoc`.
+
### bindgen()
This function wraps bindgen to simplify creating rust bindings around C
diff --git a/docs/markdown/Syntax.md b/docs/markdown/Syntax.md
index 59ec5f7ba07d..05f503880e23 100644
--- a/docs/markdown/Syntax.md
+++ b/docs/markdown/Syntax.md
@@ -150,6 +150,8 @@ combined = str1 + '_' + str2 # combined is now abc_xyz
You can concatenate any two strings using `/` as an operator to build paths.
This will always use `/` as the path separator on all platforms.
+If any one of the individual segments is an absolute path, all segments before
+it are dropped. For example:
```meson
joined = '/usr/share' / 'projectname' # => /usr/share/projectname
diff --git a/docs/markdown/Tutorial.md b/docs/markdown/Tutorial.md
index 7aff164a266a..355dd125a1a0 100644
--- a/docs/markdown/Tutorial.md
+++ b/docs/markdown/Tutorial.md
@@ -120,7 +120,7 @@ use GTK+. The new version looks like this.
#include
//
-// Should provided the active view for a GTK application
+// Should provide the active view for a GTK application
//
static void activate(GtkApplication* app, gpointer user_data)
{
diff --git a/docs/markdown/Unit-tests.md b/docs/markdown/Unit-tests.md
index 898366095b05..9654bc4fc099 100644
--- a/docs/markdown/Unit-tests.md
+++ b/docs/markdown/Unit-tests.md
@@ -83,16 +83,18 @@ possible.
By default Meson uses as many concurrent processes as there are cores
on the test machine. You can override this with the environment
-variable `MESON_TESTTHREADS` like this.
+variable `MESON_TESTTHREADS` or, *since 1.7.0*, `MESON_NUM_PROCESSES`:
```console
-$ MESON_TESTTHREADS=5 meson test
+$ MESON_NUM_PROCESSES=5 meson test
```
-Setting `MESON_TESTTHREADS` to 0 enables the default behavior (core
+Setting `MESON_NUM_PROCESSES` to 0 enables the default behavior (core
count), whereas setting an invalid value results in setting the job
count to 1.
+If both environment variables are present, `MESON_NUM_PROCESSES` prevails.
+
## Priorities
*(added in version 0.52.0)*
@@ -204,6 +206,11 @@ name(s), the test name(s) must be contained in the suite(s). This
however is redundant-- it would be more useful to specify either
specific test names or suite(s).
+Since version *1.8.0*, you can pass `--slice i/n` to split up the set of tests
+into `n` slices and execute the `ith` such slice. This allows you to distribute
+a set of long-running tests across multiple machines to decrease the overall
+runtime of tests.
+
### Other test options
Sometimes you need to run the tests multiple times, which is done like this:
@@ -216,16 +223,16 @@ Meson will set the `MESON_TEST_ITERATION` environment variable to the
current iteration of the test *(added 1.5.0)*.
Invoking tests via a helper executable such as Valgrind can be done with the
-`--wrap` argument
+`--wrapper` argument
```console
-$ meson test --wrap=valgrind testname
+$ meson test --wrapper=valgrind testname
```
Arguments to the wrapper binary can be given like this:
```console
-$ meson test --wrap='valgrind --tool=helgrind' testname
+$ meson test --wrapper='valgrind --tool=helgrind' testname
```
Meson also supports running the tests under GDB. Just doing this:
@@ -259,6 +266,11 @@ be specified *(added 0.52.0)*:
$ meson test --gdb --gdb-path /path/to/gdb testname
```
+Meson can print the error logs produced by failing tests via the
+`--print-errorlogs` option. The logs can include stack traces and environmental
+variables. This is especially useful when you run the tests on GitHub, Travis,
+Jenkins and the like:
+
```console
$ meson test --print-errorlogs
```
@@ -273,11 +285,6 @@ shell is spawned if it fails *(added 1.5.0)*:
$ meson test --interactive testname
```
-Meson will report the output produced by the failing tests along with
-other useful information as the environmental variables. This is
-useful, for example, when you run the tests on Travis-CI, Jenkins and
-the like.
-
By default, the output from tests will be limited to the last 100 lines. The
maximum number of lines to show can be configured with the `--max-lines` option
*(added 1.5.0)*:
@@ -305,7 +312,7 @@ For further information see the command line help of Meson by running
## Legacy notes
-If `meson test` does not work for you, you likely have a old version
+If `meson test` does not work for you, you likely have an old version
of Meson. In that case you should call `mesontest` instead. If
`mesontest` doesn't work either you have a very old version prior to
0.37.0 and should upgrade.
diff --git a/docs/markdown/Users.md b/docs/markdown/Users.md
index 0b1de42f4daa..a515b24af7bb 100644
--- a/docs/markdown/Users.md
+++ b/docs/markdown/Users.md
@@ -2,14 +2,13 @@
title: Users
...
-# List of projects using Meson
+# Notable projects using Meson
-If you have a project that uses Meson that you want to add to this
-list, please [file a
-pull-request](https://github.com/mesonbuild/meson/edit/master/docs/markdown/Users.md)
-for it. All the software on this list is tested for regressions before
-release, so it's highly recommended that projects add themselves
-here. Some additional projects are listed in the [`meson` GitHub
+If you're aware of a notable project that uses Meson, please
+[file a pull-request](https://github.com/mesonbuild/meson/edit/master/docs/markdown/Users.md)
+for it. For other projects using Meson, you may be interested in this
+[GitHub search](https://github.com/search?q=path%3A%2F%28%5E%7C%5C%2F%29meson%5C.build%24%2F&type=code).
+Some additional projects are listed in the [`meson` GitHub
topic](https://github.com/topics/meson).
- [2048.cpp](https://github.com/plibither8/2048.cpp), a fully featured terminal version of the game "2048" written in C++
@@ -51,8 +50,9 @@ topic](https://github.com/topics/meson).
- [Frida](https://github.com/frida/frida-core), a dynamic binary instrumentation toolkit
- [fwupd](https://github.com/hughsie/fwupd), a simple daemon to allow session software to update firmware
- [GameMode](https://github.com/FeralInteractive/gamemode), a daemon/lib combo for Linux that allows games to request a set of optimisations be temporarily applied to the host OS
- - [Geary](https://wiki.gnome.org/Apps/Geary), an email application built around conversations, for the GNOME 3 desktop.
+ - [Geary](https://wiki.gnome.org/Apps/Geary), an email application built around conversations, for the GNOME 3 desktop
- [GIMP](https://gitlab.gnome.org/GNOME/gimp), an image manipulation program (master branch)
+ - [Git](https://git-scm.com/), ["the information manager from hell"](https://github.com/git/git/commit/e83c5163316f89bfbde7d9ab23ca2e25604af290)
- [GLib](https://gitlab.gnome.org/GNOME/glib), cross-platform C library used by GTK+ and GStreamer
- [Glorytun](https://github.com/angt/glorytun), a multipath UDP tunnel
- [GNOME Boxes](https://gitlab.gnome.org/GNOME/gnome-boxes), a GNOME hypervisor
@@ -97,10 +97,10 @@ topic](https://github.com/topics/meson).
- [Libgit2-glib](https://git.gnome.org/browse/libgit2-glib), a GLib wrapper for libgit2
- [libglvnd](https://gitlab.freedesktop.org/glvnd/libglvnd), Vendor neutral OpenGL dispatch library for Unix-like OSes
- [Libhttpseverywhere](https://git.gnome.org/browse/libhttpseverywhere), a library to enable httpseverywhere on any desktop app
- - [libmodulemd](https://github.com/fedora-modularity/libmodulemd), a GObject Introspected library for managing [Fedora Project](https://getfedora.org/) module metadata.
+ - [libmodulemd](https://github.com/fedora-modularity/libmodulemd), a GObject Introspected library for managing [Fedora Project](https://getfedora.org/) module metadata
- [Libosmscout](https://github.com/Framstag/libosmscout), a C++ library for offline map rendering, routing and location
lookup based on OpenStreetMap data
- - [libratbag](https://github.com/libratbag/libratbag), provides a DBus daemon to configure input devices, mainly gaming mice.
+ - [libratbag](https://github.com/libratbag/libratbag), provides a DBus daemon to configure input devices, mainly gaming mice
- [libspng](https://github.com/randy408/libspng), a C library for reading and writing Portable Network Graphics (PNG)
format files
- [libSRTP](https://github.com/cisco/libsrtp) (from Cisco Systems), a library for SRTP (Secure Realtime Transport Protocol)
@@ -109,6 +109,7 @@ format files
- [libvips](https://github.com/libvips/libvips), a fast image processing library with low memory needs
- [Libvirt](https://libvirt.org), a toolkit to manage virtualization platforms
- [Libzim](https://github.com/openzim/libzim), the reference implementation for the ZIM file format
+ - [Linux PAM](https://github.com/linux-pam/linux-pam), The Pluggable Authentication Modules project for Linux
- [LXC](https://github.com/lxc/lxc), Linux container runtime
- [Marker](https://github.com/fabiocolacio/Marker), a GTK-3 markdown editor
- [mcfgthread](https://github.com/lhmouse/mcfgthread), cornerstone library for C++11 threading on mingw-w64
@@ -119,15 +120,15 @@ format files
- [mrsh](https://github.com/emersion/mrsh), a minimal POSIX shell
- [Nautilus](https://gitlab.gnome.org/GNOME/nautilus), the GNOME file manager
- [Nemo](https://github.com/linuxmint/nemo), the file manager for the Cinnamon desktop environment
+ - [netatalk](https://netatalk.io/), a free and open source AFP file server for Mac and Apple II
- [NetPanzer](https://github.com/netpanzer/netpanzer), a 2D online multiplayer tactical warfare game designed for fast action combat
- [NumPy](https://numpy.org/), a Python package for scientific computing
- [nvme-cli](https://github.com/linux-nvme/nvme-cli), NVMe management command line interface
- - [OcherBook](https://github.com/ccoffing/OcherBook), an open source book reader for Kobo devices
- [oomd](https://github.com/facebookincubator/oomd), a userspace Out-Of-Memory (OOM) killer for Linux systems
- [OpenH264](https://github.com/cisco/openh264), open source H.264 codec
- [OpenHMD](https://github.com/OpenHMD/OpenHMD), a free and open source API and drivers for immersive technology, such as head mounted displays with built in head tracking
- [OpenRC](https://github.com/OpenRC/openrc), an init system for Unix-like operating systems
- - [OpenTitan](https://github.com/lowRISC/opentitan), an open source silicon Root of Trust (RoT) project.
+ - [OpenTitan](https://github.com/lowRISC/opentitan), an open source silicon Root of Trust (RoT) project
- [Orc](https://gitlab.freedesktop.org/gstreamer/orc), the Optimized Inner Loop Runtime Compiler
- [OTS](https://github.com/khaledhosny/ots), the OpenType Sanitizer, parses and serializes OpenType files (OTF, TTF) and WOFF and WOFF2 font files, validating and sanitizing them as it goes. Used by Chromium and Firefox
- [Outlier](https://github.com/kerolasa/outlier), a small Hello World style Meson example project
@@ -138,7 +139,6 @@ format files
- [Peek](https://github.com/phw/peek), simple animated GIF screen recorder with an easy to use interface
- [PicoLibc](https://github.com/keith-packard/picolibc), a standard C library for small embedded systems with limited RAM
- [PipeWire](https://github.com/PipeWire/pipewire), a framework for video and audio for containerized applications
- - [Paper Rock Scissors](https://github.com/michaelbrockus/paper_rock_scissors), a game with weapons themed at home paper rock scissors style.
- [Pistache](https://github.com/pistacheio/pistache), a high performance REST toolkit written in C++
- [Pithos](https://github.com/pithos/pithos), a Pandora Radio client
- [Pitivi](https://github.com/pitivi/pitivi/), a nonlinear video editor
@@ -149,9 +149,9 @@ format files
- [qboot](https://github.com/bonzini/qboot), a minimal x86 firmware for booting Linux kernels
- [QEMU](https://qemu.org), a processor emulator and virtualizer
- [radare2](https://github.com/radare/radare2), unix-like reverse engineering framework and commandline tools (not the default)
- - [refivar](https://github.com/nvinson/refivar), A reimplementation of efivar in Rust.
+ - [refivar](https://github.com/nvinson/refivar), A reimplementation of efivar in Rust
- [Rizin](https://rizin.re), Free and Open Source Reverse Engineering Framework
- - [rmw](https://remove-to-waste.info), safe-remove utility for the command line
+ - [rmw](https://theimpossibleastronaut.com/rmw-website/), safe-remove utility for the command line
- [RxDock](https://gitlab.com/rxdock/rxdock), a protein-ligand docking software designed for high throughput virtual screening (fork of rDock)
- [SciPy](https://scipy.org/), an open-source software for mathematics, science, and engineering
- [scrcpy](https://github.com/Genymobile/scrcpy), a cross platform application that provides display and control of Android devices connected on USB or over TCP/IP
@@ -169,7 +169,7 @@ format files
- [ThorVG](https://www.thorvg.org/), vector-based scenes and animations library
- [Tilix](https://github.com/gnunn1/tilix), a tiling terminal emulator for Linux using GTK+ 3
- [Tizonia](https://github.com/tizonia/tizonia-openmax-il), a command-line cloud music player for Linux with support for Spotify, Google Play Music, YouTube, SoundCloud, TuneIn, Plex servers and Chromecast devices
- - [Fossil Logic](https://github.com/fossillogic), Fossil Logic is a cutting-edge software development company specializing in C/C++, Python, programming, Android development using Kotlin, and SQL solutions.
+ - [Fossil Logic](https://github.com/fossillogic), Fossil Logic is a cutting-edge software development company specializing in C/C++, Python, programming, Android development using Kotlin, and SQL solutions
- [UFJF-MLTK](https://github.com/mateus558/UFJF-Machine-Learning-Toolkit), A C++ cross-platform framework for machine learning algorithms development and testing
- [Vala Language Server](https://github.com/benwaffle/vala-language-server), code intelligence engine for the Vala and Genie programming languages
- [Valum](https://github.com/valum-framework/valum), a micro web framework written in Vala
diff --git a/docs/markdown/Wayland-module.md b/docs/markdown/Wayland-module.md
index 679329373713..ca7b567178f0 100644
--- a/docs/markdown/Wayland-module.md
+++ b/docs/markdown/Wayland-module.md
@@ -1,21 +1,18 @@
-# Unstable Wayland Module
+# Wayland Module
-This module is available since version 0.62.0.
+This module is available since version 0.62.0, and has been stable since version
+1.8.0.
This module provides helper functions to find wayland protocol
xmls and to generate .c and .h files using wayland-scanner
-**Note**: this module is unstable. It is only provided as a technology
-preview. Its API may change in arbitrary ways between releases or it
-might be removed from Meson altogether.
-
## Quick Usage
```meson
project('hello-wayland', 'c')
wl_dep = dependency('wayland-client')
-wl_mod = import('unstable-wayland')
+wl_mod = import('wayland')
xml = wl_mod.find_protocol('xdg-shell')
xdg_shell = wl_mod.scan_xml(xml)
@@ -53,7 +50,8 @@ generated = wl_mod.scan_xml(
include_core_only : true,
)
```
-This function accepts one or more arguments of either string or file type.
+This function accepts one or more arguments of either string or file type, so
+it can be used in conjunction with `find_protocol` or not.
It takes the following keyword arguments:
- `public` Optional arg that specifies the scope of the generated code.
@@ -63,7 +61,7 @@ It takes the following keyword arguments:
- `server` Optional arg that specifies if server side header file is
generated. The default is false.
- `include_core_only` Optional arg that specifies that generated headers only include
- `wayland--core.h` instead of `wayland-.h`.
+ `wayland--core.h` instead of `wayland-.h`.
The default is true. Since *0.64.0*
**Returns**: a list of [[@custom_tgt]] in the order source, client side header,
diff --git a/docs/markdown/Wrap-dependency-system-manual.md b/docs/markdown/Wrap-dependency-system-manual.md
index d84e4aa186d2..73358e7cfc2f 100644
--- a/docs/markdown/Wrap-dependency-system-manual.md
+++ b/docs/markdown/Wrap-dependency-system-manual.md
@@ -66,7 +66,7 @@ An example wrap-git will look like this:
```ini
[wrap-git]
url = https://github.com/libfoobar/libfoobar.git
-revision = head
+revision = HEAD
depth = 1
```
@@ -124,7 +124,7 @@ case, the directory will be copied into `subprojects/` before applying patches.
- `url` - name of the wrap-git repository to clone. Required.
- `revision` - name of the revision to checkout. Must be either: a
valid value (such as a git tag) for the VCS's `checkout` command, or
- (for git) `head` to track upstream's default branch. Required.
+ (for git) `HEAD` to track upstream's default branch. Required.
### Specific to wrap-git
- `depth` - shallowly clone the repository to X number of commits. This saves bandwidth and disk
@@ -354,27 +354,6 @@ method = cargo
dependency_names = foo-bar-0.1-rs
```
-Cargo features are exposed as Meson boolean options, with the `feature-` prefix.
-For example the `default` feature is named `feature-default` and can be set from
-the command line with `-Dfoo-1-rs:feature-default=false`. When a cargo subproject
-depends on another cargo subproject, it will automatically enable features it
-needs using the `dependency('foo-1-rs', default_options: ...)` mechanism. However,
-unlike Cargo, the set of enabled features is not managed globally. Let's assume
-the main project depends on `foo-1-rs` and `bar-1-rs`, and they both depend on
-`common-1-rs`. The main project will first look up `foo-1-rs` which itself will
-configure `common-rs` with a set of features. Later, when `bar-1-rs` does a lookup
-for `common-1-rs` it has already been configured and the set of features cannot be
-changed. If `bar-1-rs` wants extra features from `common-1-rs`, Meson will error out.
-It is currently the responsibility of the main project to resolve those
-issues by enabling extra features on each subproject:
-```meson
-project(...,
- default_options: {
- 'common-1-rs:feature-something': true,
- },
-)
-```
-
In addition, if the file `meson/meson.build` exists, Meson will call `subdir('meson')`
where the project can add manual logic that would usually be part of `build.rs`.
Some naming conventions need to be respected:
diff --git a/docs/markdown/howtox.md b/docs/markdown/howtox.md
index 4a57e8569137..ba6a3b8f8d63 100644
--- a/docs/markdown/howtox.md
+++ b/docs/markdown/howtox.md
@@ -239,6 +239,26 @@ And then pass it through the variable (remember to use absolute path):
$ SCANBUILD=$(pwd)/my-scan-build.sh ninja -C builddir scan-build
```
+## Use clippy
+
+If your project includes Rust targets, you can invoke clippy like this:
+
+```console
+$ meson setup builddir
+$ ninja -C builddir clippy
+```
+
+Clippy will also obey the `werror` [builtin option](Builtin-options.md#core-options).
+
+By default Meson uses as many concurrent processes as there are cores
+on the test machine. You can override this with the environment
+variable `MESON_NUM_PROCESSES`.
+
+Meson will look for `clippy-driver` in the same directory as `rustc`,
+or try to invoke it using `rustup` if `rustc` points to a `rustup`
+binary. If `clippy-driver` is not detected properly, you can add it to
+a [machine file](Machine-files.md).
+
## Use profile guided optimization
Using profile guided optimization with GCC is a two phase
diff --git a/docs/markdown/i18n-module.md b/docs/markdown/i18n-module.md
index a939a34738b5..da6fce74e875 100644
--- a/docs/markdown/i18n-module.md
+++ b/docs/markdown/i18n-module.md
@@ -74,3 +74,50 @@ for normal keywords. In addition it accepts these keywords:
* `mo_targets` *required*: mo file generation targets as returned by `i18n.gettext()`.
*Added 0.62.0*
+
+
+### i18n.xgettext()
+
+``` meson
+i18n.xgettext(name, sources..., args: [...], recursive: false)
+```
+
+Invokes the `xgettext` program on given sources, to generate a `.pot` file.
+This function is to be used when the `gettext` function workflow it not suitable
+for your project. For example, it can be used to produce separate `.pot` files
+for each executable.
+
+Positional arguments are the following:
+
+* name `str`: the name of the resulting pot file.
+* sources `list[str|File|build_tgt|custom_tgt]`:
+ source files or targets. May be a list of `string`, `File`, [[@build_tgt]],
+ or [[@custom_tgt]] returned from other calls to this function.
+
+Keyword arguments are the following:
+
+- recursive `bool`:
+ if `true`, will merge the resulting pot file with extracted pot files
+ related to dependencies of the given source targets. For instance,
+ if you build an executable, then you may want to merge the executable
+ translations with the translations from the dependent libraries.
+- install `bool`: if `true`, will add the resulting pot file to install targets.
+- install_tag `str`: install tag to use for the install target.
+- install_dir `str`: directory where to install the resulting pot file.
+
+The `i18n.xgettext()` function returns a [[@custom_tgt]].
+
+Usually, you want to pass one build target as sources, and the list of header files
+for that target. If the number of source files would result in a command line that
+is too long, the list of source files is written to a file at config time, to be
+used as input for the `xgettext` program.
+
+The `recursive: true` argument is to be given to targets that will actually read
+the resulting `.mo` file. Each time you call the `i18n.xgettext()` function,
+it maps the source targets to the resulting pot file. When `recursive: true` is
+given, all generated pot files from dependencies of the source targets are
+included to generate the final pot file. Therefore, adding a dependency to
+source target will automatically add the translations of that dependency to the
+needed translations for that source target.
+
+*Added 1.8.0*
diff --git a/docs/markdown/index.md b/docs/markdown/index.md
index 427cc0508f6e..cbb79018fc84 100644
--- a/docs/markdown/index.md
+++ b/docs/markdown/index.md
@@ -17,7 +17,7 @@ code.
## Features
* multiplatform support for Linux, macOS, Windows, GCC, Clang, Visual Studio and others
-* supported languages include C, C++, D, Fortran, Java, Rust
+* supported languages include C, C++, C#, D, Fortran, Java, Rust
* build definitions in a very readable and user friendly non-Turing complete DSL
* cross compilation for many operating systems as well as bare metal
* optimized for extremely fast full and incremental builds without sacrificing correctness
@@ -29,6 +29,21 @@ code.
Are you an absolute beginner when it comes to programming? No worries,
read [this beginner guide](SimpleStart.md) to get started.
+## Tutorials
+
+- [Get it](Getting-meson.md)
+- [Build a Gtk app from scratch](Tutorial.md)
+- [Build a SDL app from scratch](GuiTutorial.md)
+- [How do I do X in Meson?](howtox.md)
+
+## Manual
+
+- [Usage manual](Manual.md)
+- [API reference manual](Reference-manual.md)
+- [Modules documentation](Modules.md)
+- [Frequently Asked Questions](FAQ.md)
+- [Release Notes](Release-notes.md)
+
## Community
The easiest way for most people to connect to other Meson developers is
@@ -42,6 +57,16 @@ Google Groups) and the
[Discussions](https://github.com/mesonbuild/meson/discussions) section
of the Meson GitHub repository.
+### Development
+
+All development on Meson is done on the [GitHub
+project](https://github.com/mesonbuild/meson). Instructions for
+contributing can be found on the [contribution page](Contributing.md).
+
+You do not need to sign a CLA to contribute to Meson.
+
+The release process is separately covered at [Releasing](Releasing.md).
+
### [Projects using Meson](Users.md)
Many projects are using Meson and they're
@@ -51,12 +76,3 @@ converting existing projects to Meson.
[A short list of Meson users can be found here](Users.md)
but there are many more. We would love to hear about your success
stories too and how things could be improved too!
-
-## Development
-
-All development on Meson is done on the [GitHub
-project](https://github.com/mesonbuild/meson). Instructions for
-contributing can be found on the [contribution page](Contributing.md).
-
-
-You do not need to sign a CLA to contribute to Meson.
diff --git a/docs/markdown/snippets/android-exe-type.md b/docs/markdown/snippets/android-exe-type.md
new file mode 100644
index 000000000000..ce4d94647a84
--- /dev/null
+++ b/docs/markdown/snippets/android-exe-type.md
@@ -0,0 +1,10 @@
+## New argument `android_exe_type` for executables
+
+Android application executables actually need to be linked
+as a shared object, which is loaded from a pre-warmed JVM.
+Meson projects can now specify a new argument `android_exe_type`
+and set it to `application`, in order produce such a shared library
+only on Android targets.
+
+This makes it possible to use the same `meson.build` file
+for both Android and non-Android systems.
diff --git a/docs/markdown/snippets/b_sanitizer_changes.md b/docs/markdown/snippets/b_sanitizer_changes.md
new file mode 100644
index 000000000000..f726d700ae32
--- /dev/null
+++ b/docs/markdown/snippets/b_sanitizer_changes.md
@@ -0,0 +1,17 @@
+## Changes to the b_sanitize option
+
+Before 1.8 the `b_sanitize` option was a combo option, which is an enumerated
+set of values. In 1.8 this was changed to a free-form array of options where
+available sanitizers are not hardcoded anymore but instead verified via a
+compiler check.
+
+This solves a number of longstanding issues such as:
+
+ - Sanitizers may be supported by a compiler, but not on a specific platform
+ (OpenBSD).
+ - New sanitizers are not recognized by Meson.
+ - Using sanitizers in previously-unsupported combinations.
+
+To not break backwards compatibility, calling `get_option('b_sanitize')`
+continues to return the configured value as a string, with a guarantee that
+`address,undefined` remains ordered.
diff --git a/docs/markdown/snippets/c2y.md b/docs/markdown/snippets/c2y.md
new file mode 100644
index 000000000000..4b647f8cac23
--- /dev/null
+++ b/docs/markdown/snippets/c2y.md
@@ -0,0 +1,4 @@
+## New C standard `c2y` (and `gnu2y`)
+
+The `c2y`` standard and its companion `gnu2y` are now supported
+when using either Clang 19.0.0 or newer, or GCC 15.0.0 or newer.
diff --git a/docs/markdown/snippets/i18n_xgettext.md b/docs/markdown/snippets/i18n_xgettext.md
new file mode 100644
index 000000000000..0ad0a14b1f13
--- /dev/null
+++ b/docs/markdown/snippets/i18n_xgettext.md
@@ -0,0 +1,12 @@
+## i18n module xgettext
+
+There is a new `xgettext` function in `i18n` module that acts as a
+wrapper around `xgettext`. It allows to extract strings to translate from
+source files.
+
+This function is convenient, because:
+- It can find the sources files from a build target;
+- It will use an intermediate file when the number of source files is too
+ big to be handled directly from the command line;
+- It is able to get strings to translate from the dependencies of the given
+ targets.
diff --git a/docs/markdown/snippets/multiple_version_compare.md b/docs/markdown/snippets/multiple_version_compare.md
new file mode 100644
index 000000000000..5e8c7582f074
--- /dev/null
+++ b/docs/markdown/snippets/multiple_version_compare.md
@@ -0,0 +1,8 @@
+## `version_compare` now accept multiple compare strings
+
+Is it now possible to compare version against multiple values, to check for
+a range of version for instance.
+
+```meson
+'1.5'.version_compare('>=1', '<2')
+```
diff --git a/docs/markdown/snippets/objc-cpp.md b/docs/markdown/snippets/objc-cpp.md
new file mode 100644
index 000000000000..3d22ccb7ae7e
--- /dev/null
+++ b/docs/markdown/snippets/objc-cpp.md
@@ -0,0 +1,8 @@
+## Improvements to Objective-C and Objective-C++
+
+Meson does not assume anymore that gcc/g++ always support
+Objective-C and Objective-C++, and instead checks that they
+can actually do a basic compile.
+
+Furthermore, Objective-C and Objective-C++ now support the
+same language standards as C and C++ respectively.
diff --git a/docs/markdown/snippets/optionrefactor.md b/docs/markdown/snippets/optionrefactor.md
new file mode 100644
index 000000000000..53dbdbc42a33
--- /dev/null
+++ b/docs/markdown/snippets/optionrefactor.md
@@ -0,0 +1,19 @@
+## Per project subproject options rewrite
+
+You can now define per-subproject values for all shared configuration
+options. As an example you might want to enable optimizations on only
+one subproject:
+
+ meson configure -Dnumbercruncher:optimization=3
+
+Subproject specific values can be removed with -U
+
+ meson configure -Unumbercruncher:optimization
+
+This is a major change in how options are handled, and the
+implementation will evolve over the next few releases of Meson. If
+this change causes an error in your builds, please [report an issue on
+GitHub](https://github.com/mesonbuild/meson/issues/new).
+
+We have tried to keep backwards compatibility as much as possible, but
+this may lead to some build breakage.
diff --git a/docs/markdown/snippets/rust-objects.md b/docs/markdown/snippets/rust-objects.md
new file mode 100644
index 000000000000..575e1f6e38ed
--- /dev/null
+++ b/docs/markdown/snippets/rust-objects.md
@@ -0,0 +1,4 @@
+## `objects` added correctly to Rust executables
+
+Any objects included in a Rust executable were previously ignored. They
+are now added correctly.
diff --git a/docs/markdown/snippets/rust-test-link-whole.md b/docs/markdown/snippets/rust-test-link-whole.md
new file mode 100644
index 000000000000..f3d006d5389d
--- /dev/null
+++ b/docs/markdown/snippets/rust-test-link-whole.md
@@ -0,0 +1,4 @@
+## `rust.test` now supports `link_whole`
+
+The `test` function in the `rust` module now supports the `link_whole`
+keyword argument in addition to `link_with` and `dependencies`.
diff --git a/docs/markdown/snippets/rustdoc.md b/docs/markdown/snippets/rustdoc.md
new file mode 100644
index 000000000000..b0b64aaeb7b1
--- /dev/null
+++ b/docs/markdown/snippets/rustdoc.md
@@ -0,0 +1,6 @@
+## Meson can run "rustdoc" on Rust projects
+
+Meson now defines a `rustdoc` target if the project
+uses the Rust programming language. The target runs rustdoc on all Rust
+sources, using the `rustdoc` program from the same Rust toolchain as the
+`rustc` compiler.
diff --git a/docs/markdown/snippets/stabilized-wayland.md b/docs/markdown/snippets/stabilized-wayland.md
new file mode 100644
index 000000000000..3b132e68d542
--- /dev/null
+++ b/docs/markdown/snippets/stabilized-wayland.md
@@ -0,0 +1,4 @@
+## The Wayland module is stable
+
+The Wayland module has been tested in several projects and had the
+last breaking change in Meson 0.64.0; it is now marked as stable.
diff --git a/docs/markdown/snippets/swift-std.md b/docs/markdown/snippets/swift-std.md
new file mode 100644
index 000000000000..64fe70c174ee
--- /dev/null
+++ b/docs/markdown/snippets/swift-std.md
@@ -0,0 +1,4 @@
+## New `swift_std` compiler option
+
+A new compiler option allows to set the language version that is passed
+to swiftc (`none`, `4`, `4.2`, `5` or `6`).
diff --git a/docs/markdown/snippets/test-slicing.md b/docs/markdown/snippets/test-slicing.md
new file mode 100644
index 000000000000..180b9ace513d
--- /dev/null
+++ b/docs/markdown/snippets/test-slicing.md
@@ -0,0 +1,6 @@
+## New option to execute a slice of tests
+
+When tests take a long time to run a common strategy is to slice up the tests
+into multiple sets, where each set is executed on a separate machine. You can
+now use the `--slice i/n` argument for `meson test` to create `n` slices and
+execute the `ith` slice.
diff --git a/docs/markdown/snippets/valgrind_test.md b/docs/markdown/snippets/valgrind_test.md
new file mode 100644
index 000000000000..078730058033
--- /dev/null
+++ b/docs/markdown/snippets/valgrind_test.md
@@ -0,0 +1,6 @@
+## Valgrind now fails tests if errors are found
+
+Valgrind does not reflect an error in its exit code by default, meaning
+a test may silently pass despite memory errors. Meson now exports
+`VALGRIND_OPTS` such that Valgrind will exit with status 1 to indicate
+an error if `VALGRIND_OPTS` is not set in the environment.
diff --git a/docs/meson.build b/docs/meson.build
index 3ad12b7fd71f..c476b59dbaa6 100644
--- a/docs/meson.build
+++ b/docs/meson.build
@@ -145,3 +145,5 @@ run_target('upload',
],
depends: documentation,
)
+
+test('validate_links', find_program('./validatelinks.py'), args: meson.current_source_dir() / 'markdown' / 'Users.md')
diff --git a/docs/refman/templates/notes.mustache b/docs/refman/templates/notes.mustache
index de550c53c759..2172aafd5d30 100644
--- a/docs/refman/templates/notes.mustache
+++ b/docs/refman/templates/notes.mustache
@@ -1,14 +1,25 @@
{{#notes}}
-
-
Note:
+
{{/notes}}
+
{{#warnings}}
-
-
Warning:
+
+
+
+ Warning:
+
+
{{&.}}
+
{{/warnings}}
diff --git a/docs/sitemap.txt b/docs/sitemap.txt
index 2e80840578f6..a82c3671ddf0 100644
--- a/docs/sitemap.txt
+++ b/docs/sitemap.txt
@@ -89,6 +89,7 @@ index.md
Wrap-best-practices-and-tips.md
Shipping-prebuilt-binaries-as-wraps.md
Release-notes.md
+ Release-notes-for-1.7.0.md
Release-notes-for-1.6.0.md
Release-notes-for-1.5.0.md
Release-notes-for-1.4.0.md
@@ -134,6 +135,7 @@ index.md
Contact-information.md
Continuous-Integration.md
Design-rationale.md
+ Donating.md
IndepthTutorial.md
In-the-press.md
Mixing-build-systems.md
@@ -147,5 +149,6 @@ index.md
Contributing.md
Yaml-RefMan.md
MesonCI.md
+ Releasing.md
legal.md
Videos.md
diff --git a/docs/theme/extra/css/notes.css b/docs/theme/extra/css/notes.css
new file mode 100644
index 000000000000..70f364e0a0af
--- /dev/null
+++ b/docs/theme/extra/css/notes.css
@@ -0,0 +1,41 @@
+.note {
+ border: 0.2rem solid;
+ border-left-width: 0.5rem;
+ border-radius: 0.2rem;
+ margin: 2rem 1rem;
+}
+.note-topbar {
+ padding: 0.4rem 1rem;
+ border-radius: 1rem;
+ position: relative;
+ top: -1rem;
+ left: -1rem;
+ margin-right: auto;
+ min-width: min-content;
+ font-weight: bold;
+ font-size: 120%;
+ color: #000;
+}
+.note-topbar .glyphicon {
+ top: 2px;
+}
+
+.note-content {
+ padding: 0 1rem;
+ margin-top: -0.5rem;
+}
+
+/* Colors taken from hotdoc_bootstrap_theme */
+.note-info {
+ border-color: #3dced8;
+}
+.note-topbar-info {
+ background-color: #3dced8;
+}
+
+.note-warning {
+ border-color: #e96506;
+}
+.note-topbar-warning {
+ background-color: #e96506;
+}
diff --git a/docs/validatelinks.py b/docs/validatelinks.py
new file mode 100644
index 000000000000..69544ab0e98b
--- /dev/null
+++ b/docs/validatelinks.py
@@ -0,0 +1,42 @@
+#!/usr/bin/env python3
+
+# SPDX-License-Identifier: Apache-2.0
+# Copyright 2025 The Meson development team
+
+import sys
+import re
+import aiohttp
+import asyncio
+
+LINK = re.compile(r'\[(?P
[A-Za-z0-9 ]+)\]\((?P.*?)\)')
+
+
+async def fetch(session, name, url, timeout):
+ try:
+ async with session.get(url, timeout=timeout) as r:
+ if not r.ok:
+ return (name, url, r.status)
+ except Exception as e:
+ return (name, url, str(e))
+
+
+async def main(filename):
+ with open(filename) as f:
+ text = f.read()
+ timeout = aiohttp.ClientTimeout(total=60)
+ async with aiohttp.ClientSession() as session:
+ tasks = []
+ for link in LINK.finditer(text):
+ name, url = link.groups()
+ task = asyncio.ensure_future(fetch(session, name, url, timeout))
+ tasks.append(task)
+ responses = asyncio.gather(*tasks)
+ errors = [r for r in await responses if r is not None]
+ for name, url, result in errors:
+ print(f'"{name}" {url} {result}')
+ if errors:
+ sys.exit(1)
+
+
+if __name__ == '__main__':
+ asyncio.run(main(sys.argv[1]))
diff --git a/docs/yaml/elementary/str.yml b/docs/yaml/elementary/str.yml
index 9d059cc092d3..44aa74240943 100644
--- a/docs/yaml/elementary/str.yml
+++ b/docs/yaml/elementary/str.yml
@@ -306,6 +306,12 @@ methods:
It is best to be unambiguous and specify the full revision level to compare.
+ *Since 1.8.0* multiple versions can be compared:
+
+ ```meson
+ '3.6'.version_compare('>=3', '<4.0') == true
+ ```
+
posargs:
compare_string:
type: str
diff --git a/docs/yaml/functions/declare_dependency.yaml b/docs/yaml/functions/declare_dependency.yaml
index 9d085fdf8251..848082d1beb4 100644
--- a/docs/yaml/functions/declare_dependency.yaml
+++ b/docs/yaml/functions/declare_dependency.yaml
@@ -3,10 +3,12 @@ returns: dep
description: |
This function returns a [[@dep]] object that
behaves like the return value of [[dependency]] but is
- internal to the current build. The main use case for this is in
+ internal to the current build. One use case for this is in
subprojects. This allows a subproject to easily specify how it should
be used. This makes it interchangeable with the same dependency that
- is provided externally by the system.
+ is provided externally by the system. Another common use case is to
+ declare project targets as dependencies so they may be used as
+ dependencies of other build targets.
kwargs:
compile_args:
@@ -49,7 +51,7 @@ kwargs:
description: |
extra files to add to targets.
mostly used for IDE integration.
-
+
version:
type: str
description: |
diff --git a/docs/yaml/functions/executable.yaml b/docs/yaml/functions/executable.yaml
index abbc5feee909..df71b79fadc5 100644
--- a/docs/yaml/functions/executable.yaml
+++ b/docs/yaml/functions/executable.yaml
@@ -21,6 +21,17 @@ varargs_inherit: _build_target_base
kwargs_inherit: _build_target_base
kwargs:
+ android_exe_type:
+ type: str
+ default: "'executable'"
+ since: 1.8.0
+ description: |
+ Specifies the intended target of the executable. This can either be
+ `executable`, if the intended usecase is to run the executable using
+ fork + exec, or `application` if the executable is supposed to be
+ loaded as shared object by the android runtime.
+
+
export_dynamic:
type: bool
since: 0.45.0
diff --git a/docs/yaml/functions/get_option.yaml b/docs/yaml/functions/get_option.yaml
index 0934758bc868..b8b1fc4cdddf 100644
--- a/docs/yaml/functions/get_option.yaml
+++ b/docs/yaml/functions/get_option.yaml
@@ -20,6 +20,11 @@ description: |
See [`feature` options](Build-options.md#features)
documentation for more details.
+ For options that are [specified
+ per-machine](Builtin-options.md#specifying-options-per-machine)
+ `get_option()` retrieves the value of the option for the
+ build machine if the argument starts with `build.`.
+
posargs:
option_name:
type: str
diff --git a/docs/yaml/functions/vcs_tag.yaml b/docs/yaml/functions/vcs_tag.yaml
index b4aad12c60de..3a3568429edc 100644
--- a/docs/yaml/functions/vcs_tag.yaml
+++ b/docs/yaml/functions/vcs_tag.yaml
@@ -55,3 +55,34 @@ kwargs:
type: str
default: "'@VCS_TAG@'"
description: String in the input file to substitute with the commit information.
+
+ install:
+ type: bool
+ default: false
+ since: 1.7.0
+ description: |
+ When true, this generated file is installed during
+ the install step, and `install_dir` must be set and not empty.
+
+ install_dir:
+ type: str
+ since: 1.7.0
+ description: |
+ The subdirectory to install the generated file to (e.g. `share/myproject`).
+
+ install_mode:
+ type: list[str | int]
+ since: 1.7.0
+ description: |
+ Specify the file mode in symbolic format
+ and optionally the owner/uid and group/gid for the installed files.
+
+ See the `install_mode` kwarg of [[install_data]] for more information.
+
+ install_tag:
+ type: str
+ since: 1.7.0
+ description: |
+ A string used by the `meson install --tags` command
+ to install only a subset of the files. By default the file has no install
+ tag which means it is not being installed when `--tags` argument is specified.
diff --git a/man/meson.1 b/man/meson.1
index 9ecbb99d0439..caf0f5b4baf9 100644
--- a/man/meson.1
+++ b/man/meson.1
@@ -1,4 +1,4 @@
-.TH MESON "1" "December 2024" "meson 1.6.1" "User Commands"
+.TH MESON "1" "July 2025" "meson 1.8.3" "User Commands"
.SH NAME
meson - a high productivity build system
.SH DESCRIPTION
@@ -40,18 +40,13 @@ your build dir. After that you just run the build command. Meson will
autodetect changes in your source tree and regenerate all files
needed to build the project.
-The setup command is the default operation. If no actual command is
-specified, Meson will assume you meant to do a setup. That means
-that you can set up a build directory without the setup command
-like this:
+Meson includes many built-in options that can be used to tweak the
+configuration of a build directory; projects can also add their
+own options. To set values for the options, use the \-D command
+line argument like this:
-.B meson [
-.I options
-.B ] [
-.I build directory
-.B ] [
-.I source directory
-.B ]
+.B meson setup \-Dopt1=value1 \-Dopt2=value2
+.I rest of the command line...
.SS "options:"
.TP
@@ -328,6 +323,9 @@ a multiplier to use for test timeout values (usually something like 100 for Valg
.TP
\fB\-\-setup\fR
use the specified test setup
+.Tp
+\fB\-\-slice SLICE/NUM_SLICES\fR
+Split tests into NUM_SLICES slices and execute slice number SLICE. (Since 1.8.0)
.SH The wrap command
@@ -655,6 +653,13 @@ try to read configuration from .editorconfig
\fB-o OUTPUT, --output OUTPUT\fR
output file (implies having exactly one input)
+.SH When no command is specified
+
+If you run Meson without a subcommand, it will assume you meant
+\fBmeson setup\fR. However, this syntax is deprecated, and Meson
+will print a warning message if it is used. You should always use
+\fBmeson setup\fR explicitly, instead of relying on the default.
+
.SH EXIT STATUS
.TP
diff --git a/mesonbuild/arglist.py b/mesonbuild/arglist.py
index 54d7157e2ccf..4f4d18c55485 100644
--- a/mesonbuild/arglist.py
+++ b/mesonbuild/arglist.py
@@ -29,7 +29,7 @@ class Dedup(enum.Enum):
same is true for include paths and library paths with -I and -L.
UNIQUE - Arguments that once specified cannot be undone, such as `-c` or
`-pipe`. New instances of these can be completely skipped.
- NO_DEDUP - Whether it matters where or how many times on the command-line
+ NO_DEDUP - When it matters where or how many times on the command-line
a particular argument is present. This can matter for symbol
resolution in static or shared libraries, so we cannot de-dup or
reorder them.
@@ -74,12 +74,12 @@ class CompilerArgs(T.MutableSequence[str]):
# Arg prefixes that override by prepending instead of appending
prepend_prefixes: T.Tuple[str, ...] = ()
- # Arg prefixes and args that must be de-duped by returning 2
+ # Arg prefixes and standalone args that must be de-duped by returning 2
dedup2_prefixes: T.Tuple[str, ...] = ()
dedup2_suffixes: T.Tuple[str, ...] = ()
dedup2_args: T.Tuple[str, ...] = ()
- # Arg prefixes and args that must be de-duped by returning 1
+ # Arg prefixes and standalone args that must be de-duped by returning 1
#
# NOTE: not thorough. A list of potential corner cases can be found in
# https://github.com/mesonbuild/meson/pull/4593#pullrequestreview-182016038
@@ -97,15 +97,32 @@ class CompilerArgs(T.MutableSequence[str]):
def __init__(self, compiler: T.Union['Compiler', 'StaticLinker'],
iterable: T.Optional[T.Iterable[str]] = None):
self.compiler = compiler
+
+ if isinstance(iterable, CompilerArgs):
+ iterable.flush_pre_post()
+ # list(iter(x)) is over two times slower than list(x), so
+ # pass the underlying list to list() directly, instead of an iterator
+ iterable = iterable._container
self._container: T.List[str] = list(iterable) if iterable is not None else []
+
self.pre: T.Deque[str] = collections.deque()
- self.post: T.Deque[str] = collections.deque()
+ self.post: T.List[str] = []
+ self.needs_override_check: bool = False
# Flush the saved pre and post list into the _container list
#
# This correctly deduplicates the entries after _can_dedup definition
# Note: This function is designed to work without delete operations, as deletions are worsening the performance a lot.
def flush_pre_post(self) -> None:
+ if not self.needs_override_check:
+ if self.pre:
+ self._container[0:0] = self.pre
+ self.pre.clear()
+ if self.post:
+ self._container.extend(self.post)
+ self.post.clear()
+ return
+
new: T.List[str] = []
pre_flush_set: T.Set[str] = set()
post_flush: T.Deque[str] = collections.deque()
@@ -127,19 +144,18 @@ def flush_pre_post(self) -> None:
#pre and post will overwrite every element that is in the container
#only copy over args that are in _container but not in the post flush or pre flush set
- if pre_flush_set or post_flush_set:
- for a in self._container:
- if a not in post_flush_set and a not in pre_flush_set:
- new.append(a)
- else:
- new.extend(self._container)
+ for a in self._container:
+ if a not in post_flush_set and a not in pre_flush_set:
+ new.append(a)
new.extend(post_flush)
self._container = new
self.pre.clear()
self.post.clear()
+ self.needs_override_check = False
def __iter__(self) -> T.Iterator[str]:
+ # see also __init__, where this method is essentially inlined
self.flush_pre_post()
return iter(self._container)
@@ -193,15 +209,16 @@ def _can_dedup(cls, arg: str) -> Dedup:
with other linkers.
"""
- # A standalone argument must never be deduplicated because it is
- # defined by what comes _after_ it. Thus deduping this:
+ # Argument prefixes that are actually not used as a prefix must never
+ # be deduplicated because they are defined by what comes _after_ them.
+ # Thus deduping this:
# -D FOO -D BAR
# would yield either
# -D FOO BAR
# or
# FOO -D BAR
# both of which are invalid.
- if arg in cls.dedup2_prefixes:
+ if arg in cls.dedup1_prefixes or arg in cls.dedup2_prefixes:
return Dedup.NO_DEDUP
if arg in cls.dedup2_args or \
arg.startswith(cls.dedup2_prefixes) or \
@@ -287,6 +304,8 @@ def __iadd__(self, args: T.Iterable[str]) -> 'CompilerArgs':
# Argument already exists and adding a new instance is useless
if arg in self._container or arg in self.pre or arg in self.post:
continue
+ elif dedup is Dedup.OVERRIDDEN:
+ self.needs_override_check = True
if self._should_prepend(arg):
tmp_pre.appendleft(arg)
else:
diff --git a/mesonbuild/ast/interpreter.py b/mesonbuild/ast/interpreter.py
index 5edd9b3d972c..cd8156a3f235 100644
--- a/mesonbuild/ast/interpreter.py
+++ b/mesonbuild/ast/interpreter.py
@@ -86,10 +86,9 @@ class MockRunTarget(MesonInterpreterObject):
class AstInterpreter(InterpreterBase):
- def __init__(self, source_root: str, subdir: str, subproject: SubProject, visitors: T.Optional[T.List[AstVisitor]] = None):
- super().__init__(source_root, subdir, subproject)
+ def __init__(self, source_root: str, subdir: str, subproject: SubProject, subproject_dir: str, env: environment.Environment, visitors: T.Optional[T.List[AstVisitor]] = None):
+ super().__init__(source_root, subdir, subproject, subproject_dir, env)
self.visitors = visitors if visitors is not None else []
- self.processed_buildfiles: T.Set[str] = set()
self.assignments: T.Dict[str, BaseNode] = {}
self.assign_vals: T.Dict[str, T.Any] = {}
self.reverse_assignment: T.Dict[str, BaseNode] = {}
@@ -174,33 +173,14 @@ def func_subdir(self, node: BaseNode, args: T.List[TYPE_var], kwargs: T.Dict[str
sys.stderr.write(f'Unable to evaluate subdir({args}) in AstInterpreter --> Skipping\n')
return
- prev_subdir = self.subdir
- subdir = os.path.join(prev_subdir, args[0])
- absdir = os.path.join(self.source_root, subdir)
- buildfilename = os.path.join(subdir, environment.build_filename)
- absname = os.path.join(self.source_root, buildfilename)
- symlinkless_dir = os.path.realpath(absdir)
- build_file = os.path.join(symlinkless_dir, 'meson.build')
- if build_file in self.processed_buildfiles:
+ subdir, is_new = self._resolve_subdir(self.source_root, args[0])
+ if not is_new:
sys.stderr.write('Trying to enter {} which has already been visited --> Skipping\n'.format(args[0]))
return
- self.processed_buildfiles.add(build_file)
- if not os.path.isfile(absname):
+ if not self._evaluate_subdir(self.source_root, subdir, self.visitors):
+ buildfilename = os.path.join(subdir, environment.build_filename)
sys.stderr.write(f'Unable to find build file {buildfilename} --> Skipping\n')
- return
- code = self.read_buildfile(absname, buildfilename)
- try:
- codeblock = mparser.Parser(code, absname).parse()
- except mesonlib.MesonException as me:
- me.file = absname
- raise me
-
- self.subdir = subdir
- for i in self.visitors:
- codeblock.accept(i)
- self.evaluate_codeblock(codeblock)
- self.subdir = prev_subdir
def method_call(self, node: BaseNode) -> bool:
return True
@@ -424,16 +404,5 @@ def flatten_args(self, args_raw: T.Union[TYPE_var, T.Sequence[TYPE_var]], includ
flattened_args += [i]
return flattened_args
- def flatten_kwargs(self, kwargs: T.Dict[str, TYPE_var], include_unknown_args: bool = False) -> T.Dict[str, TYPE_var]:
- flattened_kwargs = {}
- for key, val in kwargs.items():
- if isinstance(val, BaseNode):
- resolved = self.resolve_node(val, include_unknown_args)
- if resolved is not None:
- flattened_kwargs[key] = resolved
- elif isinstance(val, (str, bool, int, float)) or include_unknown_args:
- flattened_kwargs[key] = val
- return flattened_kwargs
-
def evaluate_testcase(self, node: TestCaseClauseNode) -> Disabler | None:
return Disabler(subproject=self.subproject)
diff --git a/mesonbuild/ast/introspection.py b/mesonbuild/ast/introspection.py
index b2eb1f8cd633..4eb3fec3e352 100644
--- a/mesonbuild/ast/introspection.py
+++ b/mesonbuild/ast/introspection.py
@@ -1,6 +1,6 @@
# SPDX-License-Identifier: Apache-2.0
# Copyright 2018 The Meson development team
-# Copyright © 2024 Intel Corporation
+# Copyright © 2024-2025 Intel Corporation
# This class contains the basic functionality needed to run any interpreter
# or an interpreter-based tool
@@ -10,7 +10,7 @@
import os
import typing as T
-from .. import compilers, environment, mesonlib, optinterpreter, options
+from .. import compilers, environment, mesonlib, options
from .. import coredata as cdata
from ..build import Executable, Jar, SharedLibrary, SharedModule, StaticLibrary
from ..compilers import detect_compiler_for
@@ -55,17 +55,11 @@ def __init__(self,
subproject: SubProject = SubProject(''),
subproject_dir: str = 'subprojects',
env: T.Optional[environment.Environment] = None):
- visitors = visitors if visitors is not None else []
- super().__init__(source_root, subdir, subproject, visitors=visitors)
-
options = IntrospectionHelper(cross_file)
+ env_ = env or environment.Environment(source_root, None, options)
+ super().__init__(source_root, subdir, subproject, subproject_dir, env_, visitors=visitors)
+
self.cross_file = cross_file
- if env is None:
- self.environment = environment.Environment(source_root, None, options)
- else:
- self.environment = env
- self.subproject_dir = subproject_dir
- self.coredata = self.environment.get_coredata()
self.backend = backend
self.default_options = {OptionKey('backend'): self.backend}
self.project_data: T.Dict[str, T.Any] = {}
@@ -118,20 +112,26 @@ def _str_list(node: T.Any) -> T.Optional[T.List[str]]:
proj_license_files = _str_list(kwargs.get('license_files', None)) or []
self.project_data = {'descriptive_name': proj_name, 'version': proj_vers, 'license': proj_license, 'license_files': proj_license_files}
- optfile = os.path.join(self.source_root, self.subdir, 'meson.options')
- if not os.path.exists(optfile):
- optfile = os.path.join(self.source_root, self.subdir, 'meson_options.txt')
- if os.path.exists(optfile):
- oi = optinterpreter.OptionInterpreter(self.coredata.optstore, self.subproject)
- oi.process(optfile)
- assert isinstance(proj_name, str), 'for mypy'
- self.coredata.update_project_options(oi.options, T.cast('SubProject', proj_name))
+ self._load_option_file()
def_opts = self.flatten_args(kwargs.get('default_options', []))
_project_default_options = mesonlib.stringlistify(def_opts)
- self.project_default_options = cdata.create_options_dict(_project_default_options, self.subproject)
+ string_dict = cdata.create_options_dict(_project_default_options, self.subproject)
+ self.project_default_options = {OptionKey(s): v for s, v in string_dict.items()}
self.default_options.update(self.project_default_options)
- self.coredata.set_default_options(self.default_options, self.subproject, self.environment)
+ if self.environment.first_invocation or (self.subproject != '' and self.subproject not in self.coredata.initialized_subprojects):
+ if self.subproject == '':
+ self.coredata.optstore.initialize_from_top_level_project_call(
+ T.cast('T.Dict[T.Union[OptionKey, str], str]', string_dict),
+ {}, # TODO: not handled by this Interpreter.
+ self.environment.options)
+ else:
+ self.coredata.optstore.initialize_from_subproject_call(
+ self.subproject,
+ {}, # TODO: this isn't handled by the introspection interpreter...
+ T.cast('T.Dict[T.Union[OptionKey, str], str]', string_dict),
+ {}) # TODO: this isn't handled by the introspection interpreter...
+ self.coredata.initialized_subprojects.add(self.subproject)
if not self.is_subproject() and 'subproject_dir' in kwargs:
spdirname = kwargs['subproject_dir']
@@ -284,7 +284,7 @@ def traverse_nodes(inqueue: T.List[BaseNode]) -> T.List[BaseNode]:
kwargs_reduced = {k: v for k, v in kwargs.items() if k in targetclass.known_kwargs and k in {'install', 'build_by_default', 'build_always'}}
kwargs_reduced = {k: v.value if isinstance(v, ElementaryNode) else v for k, v in kwargs_reduced.items()}
kwargs_reduced = {k: v for k, v in kwargs_reduced.items() if not isinstance(v, BaseNode)}
- for_machine = MachineChoice.HOST
+ for_machine = MachineChoice.BUILD if kwargs.get('native', False) else MachineChoice.HOST
objects: T.List[T.Any] = []
empty_sources: T.List[T.Any] = []
# Passing the unresolved sources list causes errors
@@ -295,6 +295,7 @@ def traverse_nodes(inqueue: T.List[BaseNode]) -> T.List[BaseNode]:
new_target = {
'name': target.get_basename(),
+ 'machine': target.for_machine.get_lower_case_name(),
'id': target.get_id(),
'type': target.get_typename(),
'defined_in': os.path.normpath(os.path.join(self.source_root, self.subdir, environment.build_filename)),
@@ -312,7 +313,7 @@ def traverse_nodes(inqueue: T.List[BaseNode]) -> T.List[BaseNode]:
return new_target
def build_library(self, node: BaseNode, args: T.List[TYPE_var], kwargs: T.Dict[str, TYPE_var]) -> T.Optional[T.Dict[str, T.Any]]:
- default_library = self.coredata.get_option(OptionKey('default_library'))
+ default_library = self.coredata.optstore.get_value_for(OptionKey('default_library'))
if default_library == 'shared':
return self.build_target(node, args, kwargs, SharedLibrary)
elif default_library == 'static':
@@ -389,3 +390,14 @@ def extract_subproject_dir(self) -> T.Optional[str]:
if isinstance(val, StringNode):
return val.value
return None
+
+ def flatten_kwargs(self, kwargs: T.Dict[str, TYPE_var], include_unknown_args: bool = False) -> T.Dict[str, TYPE_var]:
+ flattened_kwargs = {}
+ for key, val in kwargs.items():
+ if isinstance(val, BaseNode):
+ resolved = self.resolve_node(val, include_unknown_args)
+ if resolved is not None:
+ flattened_kwargs[key] = resolved
+ elif isinstance(val, (str, bool, int, float)) or include_unknown_args:
+ flattened_kwargs[key] = val
+ return flattened_kwargs
diff --git a/mesonbuild/backend/backends.py b/mesonbuild/backend/backends.py
index 079b62dbdeb9..9eac7daa4121 100644
--- a/mesonbuild/backend/backends.py
+++ b/mesonbuild/backend/backends.py
@@ -24,10 +24,12 @@
from .. import programs
from .. import mesonlib
from .. import mlog
-from ..compilers import LANGUAGES_USING_LDFLAGS, detect
+from ..compilers import LANGUAGES_USING_LDFLAGS, detect, lang_suffixes
from ..mesonlib import (
- File, MachineChoice, MesonException, OrderedSet,
- ExecutableSerialisation, classify_unity_sources,
+ File, MachineChoice, MesonException, MesonBugException, OrderedSet,
+ ExecutableSerialisation, EnvironmentException,
+ classify_unity_sources, get_compiler_for_source,
+ is_parent_path, get_rsp_threshold,
)
from ..options import OptionKey
@@ -40,14 +42,16 @@
from ..interpreter import Interpreter, Test
from ..linkers.linkers import StaticLinker
from ..mesonlib import FileMode, FileOrString
+ from ..options import ElementaryOptionValues
- from typing_extensions import TypedDict
+ from typing_extensions import TypedDict, NotRequired
_ALL_SOURCES_TYPE = T.List[T.Union[File, build.CustomTarget, build.CustomTargetIndex, build.GeneratedList]]
class TargetIntrospectionData(TypedDict):
language: str
+ machine: NotRequired[str]
compiler: T.List[str]
parameters: T.List[str]
sources: T.List[str]
@@ -369,7 +373,7 @@ def get_target_dir(self, target: T.Union[build.Target, build.CustomTargetIndex])
if isinstance(target, build.RunTarget):
# this produces no output, only a dummy top-level name
dirname = ''
- elif self.environment.coredata.get_option(OptionKey('layout')) == 'mirror':
+ elif self.environment.coredata.optstore.get_value_for(OptionKey('layout')) == 'mirror':
dirname = target.get_subdir()
else:
dirname = 'meson-out'
@@ -422,7 +426,7 @@ def generate_unity_files(self, target: build.BuildTarget, unity_src: str) -> T.L
abs_files: T.List[str] = []
result: T.List[mesonlib.File] = []
compsrcs = classify_unity_sources(target.compilers.values(), unity_src)
- unity_size = target.get_option(OptionKey('unity_size'))
+ unity_size = self.get_target_option(target, 'unity_size')
assert isinstance(unity_size, int), 'for mypy'
def init_language_file(suffix: str, unity_file_number: int) -> T.TextIO:
@@ -470,8 +474,8 @@ def flatten_object_list(self, target: build.BuildTarget, proj_dir_to_build_root:
obj_list, deps = self._flatten_object_list(target, target.get_objects(), proj_dir_to_build_root)
return list(dict.fromkeys(obj_list)), deps
- def determine_ext_objs(self, objects: build.ExtractedObjects, proj_dir_to_build_root: str = '') -> T.List[str]:
- obj_list, _ = self._flatten_object_list(objects.target, [objects], proj_dir_to_build_root)
+ def determine_ext_objs(self, objects: build.ExtractedObjects) -> T.List[str]:
+ obj_list, _ = self._flatten_object_list(objects.target, [objects], '')
return list(dict.fromkeys(obj_list))
def _flatten_object_list(self, target: build.BuildTarget,
@@ -498,7 +502,12 @@ def _flatten_object_list(self, target: build.BuildTarget,
objs, d = self._flatten_object_list(obj.target, obj.objlist, proj_dir_to_build_root)
obj_list.extend(objs)
deps.extend(d)
- obj_list.extend(self._determine_ext_objs(obj, proj_dir_to_build_root))
+ new_objs = self._determine_ext_objs(obj)
+ if proj_dir_to_build_root:
+ for o in new_objs:
+ obj_list.append(os.path.join(proj_dir_to_build_root, o))
+ else:
+ obj_list.extend(new_objs)
deps.append(obj.target)
else:
raise MesonException('Unknown data type in object list.')
@@ -524,6 +533,7 @@ def get_executable_serialisation(
capture: T.Optional[str] = None,
feed: T.Optional[str] = None,
env: T.Optional[mesonlib.EnvironmentVariables] = None,
+ can_use_rsp_file: bool = False,
tag: T.Optional[str] = None,
verbose: bool = False,
installdir_map: T.Optional[T.Dict[str, str]] = None) -> 'ExecutableSerialisation':
@@ -573,7 +583,7 @@ def get_executable_serialisation(
is_cross_built = not self.environment.machines.matches_build_machine(exe_for_machine)
if is_cross_built and self.environment.need_exe_wrapper():
if not self.environment.has_exe_wrapper():
- msg = 'An exe_wrapper is needed but was not found. Please define one ' \
+ msg = 'An exe_wrapper is needed for ' + exe_cmd[0] + ' but was not found. Please define one ' \
'in cross file and check the command and/or add it to PATH.'
raise MesonException(msg)
exe_wrapper = self.environment.get_exe_wrapper()
@@ -585,6 +595,21 @@ def get_executable_serialisation(
exe_wrapper = None
workdir = workdir or self.environment.get_build_dir()
+
+ # Must include separators as well
+ needs_rsp_file = can_use_rsp_file and sum(len(i) + 1 for i in cmd_args) >= get_rsp_threshold()
+
+ if needs_rsp_file:
+ hasher = hashlib.sha1()
+ args = ' '.join(mesonlib.quote_arg(arg) for arg in cmd_args)
+ hasher.update(args.encode(encoding='utf-8', errors='ignore'))
+ digest = hasher.hexdigest()
+ scratch_file = f'meson_rsp_{digest}.rsp'
+ rsp_file = os.path.join(self.environment.get_scratch_dir(), scratch_file)
+ with open(rsp_file, 'w', encoding='utf-8', newline='\n') as f:
+ f.write(args)
+ cmd_args = [f'@{rsp_file}']
+
return ExecutableSerialisation(exe_cmd + cmd_args, env,
exe_wrapper, workdir,
extra_paths, capture, feed, tag, verbose, installdir_map)
@@ -597,6 +622,7 @@ def as_meson_exe_cmdline(self, exe: T.Union[str, mesonlib.File, build.BuildTarge
feed: T.Optional[str] = None,
force_serialize: bool = False,
env: T.Optional[mesonlib.EnvironmentVariables] = None,
+ can_use_rsp_file: bool = False,
verbose: bool = False) -> T.Tuple[T.List[str], str]:
'''
Serialize an executable for running with a generator or a custom target
@@ -604,7 +630,7 @@ def as_meson_exe_cmdline(self, exe: T.Union[str, mesonlib.File, build.BuildTarge
cmd: T.List[T.Union[str, mesonlib.File, build.BuildTarget, build.CustomTarget, programs.ExternalProgram]] = []
cmd.append(exe)
cmd.extend(cmd_args)
- es = self.get_executable_serialisation(cmd, workdir, extra_bdeps, capture, feed, env, verbose=verbose)
+ es = self.get_executable_serialisation(cmd, workdir, extra_bdeps, capture, feed, env, can_use_rsp_file, verbose=verbose)
reasons: T.List[str] = []
if es.extra_paths:
reasons.append('to set PATH')
@@ -644,6 +670,9 @@ def as_meson_exe_cmdline(self, exe: T.Union[str, mesonlib.File, build.BuildTarge
envlist.append(f'{k}={v}')
return ['env'] + envlist + es.cmd_args, ', '.join(reasons)
+ if any(a.startswith('@') for a in es.cmd_args):
+ reasons.append('because command is too long')
+
if not force_serialize:
if not capture and not feed:
return es.cmd_args, ''
@@ -789,12 +818,7 @@ def rpaths_for_non_system_absolute_shared_libraries(self, target: build.BuildTar
):
continue
- try:
- commonpath = os.path.commonpath((libdir, srcdir))
- except ValueError: # when paths are on different drives on Windows
- commonpath = ''
-
- if commonpath == srcdir:
+ if is_parent_path(srcdir, libdir):
rel_to_src = libdir[len(srcdir) + 1:]
assert not os.path.isabs(rel_to_src), f'rel_to_src: {rel_to_src} is absolute'
paths.add(os.path.join(self.build_to_src, rel_to_src))
@@ -811,7 +835,7 @@ def rpaths_for_non_system_absolute_shared_libraries(self, target: build.BuildTar
def determine_rpath_dirs(self, target: T.Union[build.BuildTarget, build.CustomTarget, build.CustomTargetIndex]
) -> T.Tuple[str, ...]:
result: OrderedSet[str]
- if self.environment.coredata.get_option(OptionKey('layout')) == 'mirror':
+ if self.environment.coredata.optstore.get_value_for(OptionKey('layout')) == 'mirror':
# Need a copy here
result = OrderedSet(target.get_link_dep_subdirs())
else:
@@ -823,8 +847,11 @@ def determine_rpath_dirs(self, target: T.Union[build.BuildTarget, build.CustomTa
return tuple(result)
@staticmethod
+ @lru_cache(maxsize=None)
def canonicalize_filename(fname: str) -> str:
- parts = Path(fname).parts
+ if os.path.altsep is not None:
+ fname = fname.replace(os.path.altsep, os.path.sep)
+ parts = fname.split(os.path.sep)
hashed = ''
if len(parts) > 5:
temp = '/'.join(parts[-5:])
@@ -836,7 +863,7 @@ def canonicalize_filename(fname: str) -> str:
fname = fname.replace(ch, '_')
return hashed + fname
- def object_filename_from_source(self, target: build.BuildTarget, source: 'FileOrString', targetdir: T.Optional[str] = None) -> str:
+ def object_filename_from_source(self, target: build.BuildTarget, compiler: Compiler, source: 'FileOrString', targetdir: T.Optional[str] = None) -> str:
assert isinstance(source, mesonlib.File)
if isinstance(target, build.CompileTarget):
return target.sources_map[source]
@@ -867,12 +894,23 @@ def object_filename_from_source(self, target: build.BuildTarget, source: 'FileOr
gen_source = os.path.relpath(os.path.join(build_dir, rel_src),
os.path.join(self.environment.get_source_dir(), target.get_subdir()))
machine = self.environment.machines[target.for_machine]
- ret = self.canonicalize_filename(gen_source) + '.' + machine.get_object_suffix()
+ object_suffix = machine.get_object_suffix()
+ # For the TASKING compiler, in case of LTO or prelinking the object suffix has to be .mil
+ if compiler.get_id() == 'tasking':
+ use_lto = self.get_target_option(target, 'b_lto')
+ if use_lto or (isinstance(target, build.StaticLibrary) and target.prelink):
+ if not source.rsplit('.', 1)[1] in lang_suffixes['c']:
+ if isinstance(target, build.StaticLibrary) and not target.prelink:
+ raise EnvironmentException('Tried using MIL linking for a static library with a assembly file. This can only be done if the static library is prelinked or disable \'b_lto\'.')
+ else:
+ object_suffix = 'mil'
+ ret = self.canonicalize_filename(gen_source) + '.' + object_suffix
if targetdir is not None:
return os.path.join(targetdir, ret)
return ret
- def _determine_ext_objs(self, extobj: 'build.ExtractedObjects', proj_dir_to_build_root: str) -> T.List[str]:
+ @lru_cache(maxsize=None)
+ def _determine_ext_objs(self, extobj: 'build.ExtractedObjects') -> T.List[str]:
result: T.List[str] = []
targetdir = self.get_target_private_dir(extobj.target)
@@ -882,8 +920,7 @@ def _determine_ext_objs(self, extobj: 'build.ExtractedObjects', proj_dir_to_buil
for gensrc in extobj.genlist:
for r in gensrc.get_outputs():
path = self.get_target_generated_dir(extobj.target, gensrc, r)
- dirpart, fnamepart = os.path.split(path)
- raw_sources.append(File(True, dirpart, fnamepart))
+ raw_sources.append(File.from_built_relative(path))
# Filter out headers and all non-source files
sources: T.List['FileOrString'] = []
@@ -899,7 +936,7 @@ def _determine_ext_objs(self, extobj: 'build.ExtractedObjects', proj_dir_to_buil
compiler = extobj.target.compilers[lang]
if compiler.get_argument_syntax() == 'msvc':
objname = self.get_msvc_pch_objname(lang, pch)
- result.append(os.path.join(proj_dir_to_build_root, targetdir, objname))
+ result.append(os.path.join(targetdir, objname))
# extobj could contain only objects and no sources
if not sources:
@@ -908,10 +945,10 @@ def _determine_ext_objs(self, extobj: 'build.ExtractedObjects', proj_dir_to_buil
# With unity builds, sources don't map directly to objects,
# we only support extracting all the objects in this mode,
# so just return all object files.
- if extobj.target.is_unity:
+ if self.is_unity(extobj.target):
compsrcs = classify_unity_sources(extobj.target.compilers.values(), sources)
sources = []
- unity_size = extobj.target.get_option(OptionKey('unity_size'))
+ unity_size = self.get_target_option(extobj.target, 'unity_size')
assert isinstance(unity_size, int), 'for mypy'
for comp, srcs in compsrcs.items():
@@ -924,9 +961,9 @@ def _determine_ext_objs(self, extobj: 'build.ExtractedObjects', proj_dir_to_buil
sources.append(_src)
for osrc in sources:
- objname = self.object_filename_from_source(extobj.target, osrc, targetdir)
- objpath = os.path.join(proj_dir_to_build_root, objname)
- result.append(objpath)
+ compiler = get_compiler_for_source(extobj.target.compilers.values(), osrc)
+ objname = self.object_filename_from_source(extobj.target, compiler, osrc, targetdir)
+ result.append(objname)
return result
@@ -964,7 +1001,7 @@ def create_msvc_pch_implementation(self, target: build.BuildTarget, lang: str, p
def target_uses_pch(self, target: build.BuildTarget) -> bool:
try:
- return T.cast('bool', target.get_option(OptionKey('b_pch')))
+ return T.cast('bool', self.get_target_option(target, 'b_pch'))
except (KeyError, AttributeError):
return False
@@ -990,7 +1027,6 @@ def generate_basic_compiler_args(self, target: build.BuildTarget, compiler: 'Com
# starting from hard-coded defaults followed by build options and so on.
commands = compiler.compiler_args()
- copt_proxy = target.get_options()
# First, the trivial ones that are impossible to override.
#
# Add -nostdinc/-nostdinc++ if needed; can't be overridden
@@ -998,22 +1034,23 @@ def generate_basic_compiler_args(self, target: build.BuildTarget, compiler: 'Com
# Add things like /NOLOGO or -pipe; usually can't be overridden
commands += compiler.get_always_args()
# warning_level is a string, but mypy can't determine that
- commands += compiler.get_warn_args(T.cast('str', target.get_option(OptionKey('warning_level'))))
+ commands += compiler.get_warn_args(T.cast('str', self.get_target_option(target, 'warning_level')))
# Add -Werror if werror=true is set in the build options set on the
# command-line or default_options inside project(). This only sets the
# action to be done for warnings if/when they are emitted, so it's ok
# to set it after or get_warn_args().
- if target.get_option(OptionKey('werror')):
+ if self.get_target_option(target, 'werror'):
commands += compiler.get_werror_args()
# Add compile args for c_* or cpp_* build options set on the
# command-line or default_options inside project().
- commands += compiler.get_option_compile_args(copt_proxy)
+ commands += compiler.get_option_compile_args(target, self.environment, target.subproject)
+ commands += compiler.get_option_std_args(target, self.environment, target.subproject)
- optimization = target.get_option(OptionKey('optimization'))
+ optimization = self.get_target_option(target, 'optimization')
assert isinstance(optimization, str), 'for mypy'
commands += compiler.get_optimization_args(optimization)
- debug = target.get_option(OptionKey('debug'))
+ debug = self.get_target_option(target, 'debug')
assert isinstance(debug, bool), 'for mypy'
commands += compiler.get_debug_args(debug)
@@ -1179,25 +1216,31 @@ def determine_windows_extra_paths(
links to and return them so they can be used in unit
tests.
"""
- result: T.Set[str] = set()
prospectives: T.Set[build.BuildTargetTypes] = set()
+ internal_deps: T.Set[str] = set()
+ external_deps: T.Set[str] = set()
+
if isinstance(target, build.BuildTarget):
- prospectives.update(target.get_transitive_link_deps())
- # External deps
- result.update(self.extract_dll_paths(target))
+ prospectives.update(target.get_all_link_deps())
for bdep in extra_bdeps:
prospectives.add(bdep)
if isinstance(bdep, build.BuildTarget):
- prospectives.update(bdep.get_transitive_link_deps())
+ prospectives.update(bdep.get_all_link_deps())
+
# Internal deps
for ld in prospectives:
dirseg = os.path.join(self.environment.get_build_dir(), self.get_target_dir(ld))
- result.add(dirseg)
- if (isinstance(target, build.BuildTarget) and
- not self.environment.machines.matches_build_machine(target.for_machine)):
- result.update(self.get_mingw_extra_paths(target))
- return list(result)
+ internal_deps.add(dirseg)
+
+ if isinstance(target, build.BuildTarget):
+ # External deps
+ external_deps.update(self.extract_dll_paths(target))
+
+ if not self.environment.machines.matches_build_machine(target.for_machine):
+ external_deps.update(self.get_mingw_extra_paths(target))
+
+ return list(internal_deps) + list(external_deps)
def write_benchmark_file(self, datafile: T.BinaryIO) -> None:
self.write_test_serialisation(self.build.get_benchmarks(), datafile)
@@ -1235,12 +1278,9 @@ def create_test_serialisation(self, tests: T.List['Test']) -> T.List[TestSeriali
extra_bdeps: T.List[T.Union[build.BuildTarget, build.CustomTarget, build.CustomTargetIndex]] = []
if isinstance(exe, build.CustomTarget):
extra_bdeps = list(exe.get_transitive_build_target_deps())
+ extra_bdeps.extend(t.depends)
+ extra_bdeps.extend(a for a in t.cmd_args if isinstance(a, build.BuildTarget))
extra_paths = self.determine_windows_extra_paths(exe, extra_bdeps)
- for a in t.cmd_args:
- if isinstance(a, build.BuildTarget):
- for p in self.determine_windows_extra_paths(a, []):
- if p not in extra_paths:
- extra_paths.append(p)
else:
extra_paths = []
@@ -1266,8 +1306,12 @@ def create_test_serialisation(self, tests: T.List['Test']) -> T.List[TestSeriali
else:
raise MesonException('Bad object in test command.')
+ # set LD_LIBRARY_PATH for
+ # a) dependencies, as relying on rpath is not very safe:
+ # https://github.com/mesonbuild/meson/pull/11119
+ # b) depends and targets passed via args.
t_env = copy.deepcopy(t.env)
- if not machine.is_windows() and not machine.is_cygwin() and not machine.is_darwin():
+ if not machine.is_windows() and not machine.is_cygwin():
ld_lib_path_libs: T.Set[build.SharedLibrary] = set()
for d in depends:
if isinstance(d, build.BuildTarget):
@@ -1280,6 +1324,8 @@ def create_test_serialisation(self, tests: T.List['Test']) -> T.List[TestSeriali
if ld_lib_path:
t_env.prepend('LD_LIBRARY_PATH', list(ld_lib_path), ':')
+ if machine.is_darwin():
+ t_env.prepend('DYLD_LIBRARY_PATH', list(ld_lib_path), ':')
ts = TestSerialisation(t.get_name(), t.project_name, t.suite, cmd, is_cross,
exe_wrapper, self.environment.need_exe_wrapper(),
@@ -1319,7 +1365,7 @@ def construct_target_rel_paths(self, t: T.Union[build.Target, build.CustomTarget
def generate_depmf_install(self, d: InstallData) -> None:
depmf_path = self.build.dep_manifest_name
if depmf_path is None:
- option_dir = self.environment.coredata.get_option(OptionKey('licensedir'))
+ option_dir = self.environment.coredata.optstore.get_value_for(OptionKey('licensedir'))
assert isinstance(option_dir, str), 'for mypy'
if option_dir:
depmf_path = os.path.join(option_dir, 'depmf.json')
@@ -1338,9 +1384,9 @@ def generate_depmf_install(self, d: InstallData) -> None:
d.data.append(InstallDataBase(ifilename, ofilename, out_name, None, '',
tag='devel', data_type='depmf'))
for m in self.build.dep_manifest.values():
- for ifilename, name in m.license_files:
- ofilename = os.path.join(odirname, name.relative_name())
- out_name = os.path.join(out_dir, name.relative_name())
+ for ifilename, name in m.license_mapping():
+ ofilename = os.path.join(odirname, name)
+ out_name = os.path.join(out_dir, name)
d.data.append(InstallDataBase(ifilename, ofilename, out_name, None,
m.subproject, tag='devel', data_type='depmf'))
@@ -1539,7 +1585,7 @@ def get_custom_target_dir_include_args(
def eval_custom_target_command(
self, target: build.CustomTarget, absolute_outputs: bool = False) -> \
- T.Tuple[T.List[str], T.List[str], T.List[str]]:
+ T.Tuple[T.List[str], T.List[str], T.List[str | programs.ExternalProgram]]:
# We want the outputs to be absolute only when using the VS backend
# XXX: Maybe allow the vs backend to use relative paths too?
source_root = self.build_to_src
@@ -1552,7 +1598,7 @@ def eval_custom_target_command(
outputs = [os.path.join(outdir, i) for i in target.get_outputs()]
inputs = self.get_custom_target_sources(target)
# Evaluate the command list
- cmd: T.List[str] = []
+ cmd: T.List[str | programs.ExternalProgram] = []
for i in target.command:
if isinstance(i, build.BuildTarget):
cmd += self.build_target_to_cmd_array(i)
@@ -1588,6 +1634,9 @@ def eval_custom_target_command(
if not target.absolute_paths:
pdir = self.get_target_private_dir(target)
i = i.replace('@PRIVATE_DIR@', pdir)
+ elif isinstance(i, programs.ExternalProgram):
+ # Let it pass and be extended elsewhere
+ pass
else:
raise RuntimeError(f'Argument {i} is of unknown type {type(i)}')
cmd.append(i)
@@ -1612,7 +1661,7 @@ def eval_custom_target_command(
# fixed.
#
# https://github.com/mesonbuild/meson/pull/737
- cmd = [i.replace('\\', '/') for i in cmd]
+ cmd = [i.replace('\\', '/') if isinstance(i, str) else i for i in cmd]
return inputs, outputs, cmd
def get_introspect_command(self) -> str:
@@ -1650,7 +1699,7 @@ def create_install_data(self) -> InstallData:
# TODO go through all candidates, like others
strip_bin = [detect.defaults['strip'][0]]
- umask = self.environment.coredata.get_option(OptionKey('install_umask'))
+ umask = self.environment.coredata.optstore.get_value_for(OptionKey('install_umask'))
assert isinstance(umask, (str, int)), 'for mypy'
d = InstallData(self.environment.get_source_dir(),
@@ -1682,7 +1731,7 @@ def guess_install_tag(self, fname: str, outdir: T.Optional[str] = None) -> T.Opt
bindir = Path(prefix, self.environment.get_bindir())
libdir = Path(prefix, self.environment.get_libdir())
incdir = Path(prefix, self.environment.get_includedir())
- _ldir = self.environment.coredata.get_option(OptionKey('localedir'))
+ _ldir = self.environment.coredata.optstore.get_value_for(OptionKey('localedir'))
assert isinstance(_ldir, str), 'for mypy'
localedir = Path(prefix, _ldir)
dest_path = Path(prefix, outdir, Path(fname).name) if outdir else Path(prefix, fname)
@@ -1738,7 +1787,7 @@ def generate_target_install(self, d: InstallData) -> None:
# TODO: Create GNUStrip/AppleStrip/etc. hierarchy for more
# fine-grained stripping of static archives.
can_strip = not isinstance(t, build.StaticLibrary)
- should_strip = can_strip and t.get_option(OptionKey('strip'))
+ should_strip = can_strip and self.get_target_option(t, 'strip')
assert isinstance(should_strip, bool), 'for mypy'
# Install primary build output (library/executable/jar, etc)
# Done separately because of strip/aliases/rpath
@@ -1973,6 +2022,8 @@ def get_introspection_data(self, target_id: str, target: build.Target) -> T.List
compiler += [j]
elif isinstance(j, (build.BuildTarget, build.CustomTarget)):
compiler += j.get_outputs()
+ elif isinstance(j, programs.ExternalProgram):
+ compiler += j.get_command()
else:
raise RuntimeError(f'Type "{type(j).__name__}" is not supported in get_introspection_data. This is a bug')
@@ -1987,49 +2038,36 @@ def get_introspection_data(self, target_id: str, target: build.Target) -> T.List
return []
def get_devenv(self) -> mesonlib.EnvironmentVariables:
- env = mesonlib.EnvironmentVariables()
extra_paths = set()
library_paths = set()
- build_machine = self.environment.machines[MachineChoice.BUILD]
host_machine = self.environment.machines[MachineChoice.HOST]
- need_wine = not build_machine.is_windows() and host_machine.is_windows()
for t in self.build.get_targets().values():
- in_default_dir = t.should_install() and not t.get_install_dir()[2]
- if t.for_machine != MachineChoice.HOST or not in_default_dir:
+ if t.for_machine is not MachineChoice.HOST or not t.should_install():
continue
+
+ if (host_machine.is_windows() or host_machine.is_cygwin()) and isinstance(t, (build.Executable, build.SharedModule)):
+ # On windows we cannot rely on rpath to run executables from build
+ # directory. We have to add in PATH the location of every DLL needed.
+ library_paths.update(self.determine_windows_extra_paths(t, []))
+
+ if t.get_install_dir()[2]:
+ # Do not update paths for target installed in non default location
+ continue
+
tdir = os.path.join(self.environment.get_build_dir(), self.get_target_dir(t))
if isinstance(t, build.Executable):
# Add binaries that are going to be installed in bindir into PATH
# so they get used by default instead of searching on system when
# in developer environment.
extra_paths.add(tdir)
- if host_machine.is_windows() or host_machine.is_cygwin():
- # On windows we cannot rely on rpath to run executables from build
- # directory. We have to add in PATH the location of every DLL needed.
- library_paths.update(self.determine_windows_extra_paths(t, []))
+
elif isinstance(t, build.SharedLibrary):
# Add libraries that are going to be installed in libdir into
# LD_LIBRARY_PATH. This allows running system applications using
# that library.
library_paths.add(tdir)
- if need_wine:
- # Executable paths should be in both PATH and WINEPATH.
- # - Having them in PATH makes bash completion find it,
- # and make running "foo.exe" find it when wine-binfmt is installed.
- # - Having them in WINEPATH makes "wine foo.exe" find it.
- library_paths.update(extra_paths)
- if library_paths:
- if need_wine:
- env.prepend('WINEPATH', list(library_paths), separator=';')
- elif host_machine.is_windows() or host_machine.is_cygwin():
- extra_paths.update(library_paths)
- elif host_machine.is_darwin():
- env.prepend('DYLD_LIBRARY_PATH', list(library_paths))
- else:
- env.prepend('LD_LIBRARY_PATH', list(library_paths))
- if extra_paths:
- env.prepend('PATH', list(extra_paths))
- return env
+
+ return self.environment.get_env_for_paths(library_paths, extra_paths)
def compiler_to_generator_args(self, target: build.BuildTarget,
compiler: 'Compiler', output: str = '@OUTPUT@',
@@ -2059,6 +2097,12 @@ def compiler_to_generator_args(self, target: build.BuildTarget,
commands += [input]
return commands
+ def have_language(self, langname: str) -> bool:
+ for for_machine in MachineChoice:
+ if langname in self.environment.coredata.compilers[for_machine]:
+ return True
+ return False
+
def compiler_to_generator(self, target: build.BuildTarget,
compiler: 'Compiler',
sources: _ALL_SOURCES_TYPE,
@@ -2069,9 +2113,8 @@ def compiler_to_generator(self, target: build.BuildTarget,
Some backends don't support custom compilers. This is a convenience
method to convert a Compiler to a Generator.
'''
- exelist = compiler.get_exelist()
- exe = programs.ExternalProgram(exelist[0])
- args = exelist[1:]
+ exe = programs.ExternalProgram(compiler.get_exe())
+ args = compiler.get_exe_args()
commands = self.compiler_to_generator_args(target, compiler)
generator = build.Generator(exe, args + commands.to_native(),
[output_templ], depfile='@PLAINNAME@.d',
@@ -2082,3 +2125,24 @@ def compile_target_to_generator(self, target: build.CompileTarget) -> build.Gene
all_sources = T.cast('_ALL_SOURCES_TYPE', target.sources) + T.cast('_ALL_SOURCES_TYPE', target.generated)
return self.compiler_to_generator(target, target.compiler, all_sources,
target.output_templ, target.depends)
+
+ def is_unity(self, target: build.BuildTarget) -> bool:
+ if isinstance(target, build.CompileTarget):
+ return False
+ val = self.get_target_option(target, 'unity')
+ if val == 'on':
+ return True
+ if val == 'off':
+ return False
+ if val == 'subprojects':
+ return target.subproject != ''
+ raise MesonException(f'Internal error: invalid option type for "unity": {val}')
+
+ def get_target_option(self, target: build.BuildTarget, name: T.Union[str, OptionKey]) -> ElementaryOptionValues:
+ if isinstance(name, str):
+ key = OptionKey(name, subproject=target.subproject)
+ elif isinstance(name, OptionKey):
+ key = name
+ else:
+ raise MesonBugException('Internal error: invalid option type.')
+ return self.environment.coredata.get_option_for_target(target, key)
diff --git a/mesonbuild/backend/ninjabackend.py b/mesonbuild/backend/ninjabackend.py
index b78ad076d24b..b783417cad2d 100644
--- a/mesonbuild/backend/ninjabackend.py
+++ b/mesonbuild/backend/ninjabackend.py
@@ -1,10 +1,10 @@
# SPDX-License-Identifier: Apache-2.0
# Copyright 2012-2017 The Meson development team
-# Copyright © 2023-2024 Intel Corporation
+# Copyright © 2023-2025 Intel Corporation
from __future__ import annotations
-from collections import OrderedDict
+from collections import defaultdict, OrderedDict
from dataclasses import dataclass
from enum import Enum, unique
from functools import lru_cache
@@ -31,7 +31,7 @@
File, LibType, MachineChoice, MesonBugException, MesonException, OrderedSet, PerMachine,
ProgressBar, quote_arg
)
-from ..mesonlib import get_compiler_for_source, has_path_sep
+from ..mesonlib import get_compiler_for_source, has_path_sep, is_parent_path
from ..options import OptionKey
from .backends import CleanTrees
from ..build import GeneratedList, InvalidArguments
@@ -45,6 +45,7 @@
from ..linkers.linkers import DynamicLinker, StaticLinker
from ..compilers.cs import CsCompiler
from ..compilers.fortran import FortranCompiler
+ from ..compilers.rust import RustCompiler
from ..mesonlib import FileOrString
from .backends import TargetIntrospectionData
@@ -91,27 +92,8 @@ def gcc_rsp_quote(s: str) -> str:
return quote_func(s)
-def get_rsp_threshold() -> int:
- '''Return a conservative estimate of the commandline size in bytes
- above which a response file should be used. May be overridden for
- debugging by setting environment variable MESON_RSP_THRESHOLD.'''
-
- if mesonlib.is_windows():
- # Usually 32k, but some projects might use cmd.exe,
- # and that has a limit of 8k.
- limit = 8192
- else:
- # Unix-like OSes usually have very large command line limits, (On Linux,
- # for example, this is limited by the kernel's MAX_ARG_STRLEN). However,
- # some programs place much lower limits, notably Wine which enforces a
- # 32k limit like Windows. Therefore, we limit the command line to 32k.
- limit = 32768
- # Be conservative
- limit = limit // 2
- return int(os.environ.get('MESON_RSP_THRESHOLD', limit))
-
# a conservative estimate of the command-line length limit
-rsp_threshold = get_rsp_threshold()
+rsp_threshold = mesonlib.get_rsp_threshold()
# ninja variables whose value should remain unquoted. The value of these ninja
# variables (or variables we use them in) is interpreted directly by ninja
@@ -123,13 +105,6 @@ def get_rsp_threshold() -> int:
NINJA_QUOTE_VAR_PAT = re.compile(r"[$ \n]")
def ninja_quote(text: str, is_build_line: bool = False) -> str:
- if is_build_line:
- quote_re = NINJA_QUOTE_BUILD_PAT
- else:
- quote_re = NINJA_QUOTE_VAR_PAT
- # Fast path for when no quoting is necessary
- if not quote_re.search(text):
- return text
if '\n' in text:
errmsg = f'''Ninja does not support newlines in rules. The content was:
@@ -137,7 +112,12 @@ def ninja_quote(text: str, is_build_line: bool = False) -> str:
Please report this error with a test case to the Meson bug tracker.'''
raise MesonException(errmsg)
- return quote_re.sub(r'$\g<0>', text)
+
+ quote_re = NINJA_QUOTE_BUILD_PAT if is_build_line else NINJA_QUOTE_VAR_PAT
+ if ' ' in text or '$' in text or (is_build_line and ':' in text):
+ return quote_re.sub(r'$\g<0>', text)
+
+ return text
@dataclass
@@ -228,6 +208,9 @@ def strToCommandArg(c: T.Union[NinjaCommandArg, str]) -> NinjaCommandArg:
self.refcount = 0
self.rsprefcount = 0
self.rspfile_quote_style = rspfile_quote_style
+ self.command_str = ' '.join([self._quoter(x) for x in self.command + self.args])
+ self.var_refs = [m for m in re.finditer(r'(\${\w+}|\$\w+)?[^$]*', self.command_str)
+ if m.start(1) != -1]
if self.depfile == '$DEPFILE':
self.depfile += '_UNQUOTED'
@@ -245,7 +228,7 @@ def _quoter(x: NinjaCommandArg, qf: T.Callable[[str], str] = quote_func) -> str:
def write(self, outfile: T.TextIO) -> None:
rspfile_args = self.args
rspfile_quote_func: T.Callable[[str], str]
- if self.rspfile_quote_style is RSPFileSyntax.MSVC:
+ if self.rspfile_quote_style in {RSPFileSyntax.MSVC, RSPFileSyntax.TASKING}:
rspfile_quote_func = cmd_quote
rspfile_args = [NinjaCommandArg('$in_newline', arg.quoting) if arg.s == '$in' else arg for arg in rspfile_args]
else:
@@ -260,11 +243,14 @@ def rule_iter() -> T.Iterable[str]:
for rsp in rule_iter():
outfile.write(f'rule {self.name}{rsp}\n')
if rsp == '_RSP':
- outfile.write(' command = {} @$out.rsp\n'.format(' '.join([self._quoter(x) for x in self.command])))
+ if self.rspfile_quote_style is RSPFileSyntax.TASKING:
+ outfile.write(' command = {} --option-file=$out.rsp\n'.format(' '.join([self._quoter(x) for x in self.command])))
+ else:
+ outfile.write(' command = {} @$out.rsp\n'.format(' '.join([self._quoter(x) for x in self.command])))
outfile.write(' rspfile = $out.rsp\n')
outfile.write(' rspfile_content = {}\n'.format(' '.join([self._quoter(x, rspfile_quote_func) for x in rspfile_args])))
else:
- outfile.write(' command = {}\n'.format(' '.join([self._quoter(x) for x in self.command + self.args])))
+ outfile.write(' command = {}\n'.format(self.command_str))
if self.deps:
outfile.write(f' deps = {self.deps}\n')
if self.depfile:
@@ -291,18 +277,16 @@ def length_estimate(self, infiles: str, outfiles: str,
ninja_vars['out'] = [outfiles]
# expand variables in command
- command = ' '.join([self._quoter(x) for x in self.command + self.args])
- estimate = len(command)
- for m in re.finditer(r'(\${\w+}|\$\w+)?[^$]*', command):
- if m.start(1) != -1:
- estimate -= m.end(1) - m.start(1)
- chunk = m.group(1)
- if chunk[1] == '{':
- chunk = chunk[2:-1]
- else:
- chunk = chunk[1:]
- chunk = ninja_vars.get(chunk, []) # undefined ninja variables are empty
- estimate += len(' '.join(chunk))
+ estimate = len(self.command_str)
+ for m in self.var_refs:
+ estimate -= m.end(1) - m.start(1)
+ chunk = m.group(1)
+ if chunk[1] == '{':
+ chunk = chunk[2:-1]
+ else:
+ chunk = chunk[1:]
+ chunk = ninja_vars.get(chunk, []) # undefined ninja variables are empty
+ estimate += len(' '.join(chunk))
# determine command length
return estimate
@@ -323,8 +307,8 @@ def __init__(self, all_outputs: T.Set[str], outfilenames, rulename, infilenames,
self.infilenames = [infilenames]
else:
self.infilenames = infilenames
- self.deps = OrderedSet()
- self.orderdeps = OrderedSet()
+ self.deps = set()
+ self.orderdeps = set()
self.elems = []
self.all_outputs = all_outputs
self.output_errors = ''
@@ -353,6 +337,7 @@ def add_item(self, name: str, elems: T.Union[str, T.List[str], CompilerArgs]) ->
if name == 'DEPFILE':
self.elems.append((name + '_UNQUOTED', elems))
+ @mesonlib.lazy_property
def _should_use_rspfile(self) -> bool:
# 'phony' is a rule built-in to ninja
if self.rulename == 'phony':
@@ -370,7 +355,7 @@ def _should_use_rspfile(self) -> bool:
def count_rule_references(self) -> None:
if self.rulename != 'phony':
- if self._should_use_rspfile():
+ if self._should_use_rspfile:
self.rule.rsprefcount += 1
else:
self.rule.refcount += 1
@@ -383,7 +368,7 @@ def write(self, outfile: T.TextIO) -> None:
implicit_outs = ' '.join([ninja_quote(i, True) for i in self.implicit_outfilenames])
if implicit_outs:
implicit_outs = ' | ' + implicit_outs
- use_rspfile = self._should_use_rspfile()
+ use_rspfile = self._should_use_rspfile
if use_rspfile:
rulename = self.rulename + '_RSP'
mlog.debug(f'Command line for building {self.outfilenames} is long, using a response file')
@@ -412,7 +397,7 @@ def write(self, outfile: T.TextIO) -> None:
outfile.write(line)
if use_rspfile:
- if self.rule.rspfile_quote_style is RSPFileSyntax.MSVC:
+ if self.rule.rspfile_quote_style in {RSPFileSyntax.MSVC, RSPFileSyntax.TASKING}:
qf = cmd_quote
else:
qf = gcc_rsp_quote
@@ -463,6 +448,8 @@ class RustCrate:
display_name: str
root_module: str
+ crate_type: str
+ target_name: str
edition: RUST_EDITIONS
deps: T.List[RustDep]
cfg: T.List[str]
@@ -498,6 +485,8 @@ def __init__(self, build: T.Optional[build.Build], interpreter: T.Optional[Inter
self.ninja_filename = 'build.ninja'
self.fortran_deps: T.Dict[str, T.Dict[str, File]] = {}
self.all_outputs: T.Set[str] = set()
+ self.all_pch: T.Dict[str, T.Set[str]] = defaultdict(set)
+ self.all_structured_sources: T.Set[str] = set()
self.introspection_data = {}
self.created_llvm_ir_rule = PerMachine(False, False)
self.rust_crates: T.Dict[str, RustCrate] = {}
@@ -511,11 +500,6 @@ def __init__(self, build: T.Optional[build.Build], interpreter: T.Optional[Inter
# - https://github.com/mesonbuild/meson/pull/9453
# - https://github.com/mesonbuild/meson/issues/9479#issuecomment-953485040
self.allow_thin_archives = PerMachine[bool](True, True)
- if self.environment:
- for for_machine in MachineChoice:
- if 'cuda' in self.environment.coredata.compilers[for_machine]:
- mlog.debug('cuda enabled globally, disabling thin archives for {}, since nvcc/nvlink cannot handle thin archives natively'.format(for_machine))
- self.allow_thin_archives[for_machine] = False
def create_phony_target(self, dummy_outfile: str, rulename: str, phony_infilename: str) -> NinjaBuildElement:
'''
@@ -606,8 +590,14 @@ def generate(self, capture: bool = False, vslite_ctx: T.Optional[T.Dict] = None)
# We don't yet have a use case where we'd expect to make use of this,
# so no harm in catching and reporting something unexpected.
raise MesonBugException('We do not expect the ninja backend to be given a valid \'vslite_ctx\'')
+ if self.environment:
+ for for_machine in MachineChoice:
+ if 'cuda' in self.environment.coredata.compilers[for_machine]:
+ mlog.debug('cuda enabled globally, disabling thin archives for {}, since nvcc/nvlink cannot handle thin archives natively'.format(for_machine))
+ self.allow_thin_archives[for_machine] = False
+
ninja = environment.detect_ninja_command_and_version(log=True)
- if self.environment.coredata.get_option(OptionKey('vsenv')):
+ if self.environment.coredata.optstore.get_value_for(OptionKey('vsenv')):
builddir = Path(self.environment.get_build_dir())
try:
# For prettier printing, reduce to a relative path. If
@@ -623,6 +613,7 @@ def generate(self, capture: bool = False, vslite_ctx: T.Optional[T.Dict] = None)
if ninja is None:
raise MesonException('Could not detect Ninja v1.8.2 or newer')
(self.ninja_command, self.ninja_version) = ninja
+ self.ninja_has_dyndeps = mesonlib.version_compare(self.ninja_version, '>=1.10.0')
outfilename = os.path.join(self.environment.get_build_dir(), self.ninja_filename)
tempfilename = outfilename + '~'
with open(tempfilename, 'w', encoding='utf-8') as outfile:
@@ -631,7 +622,7 @@ def generate(self, capture: bool = False, vslite_ctx: T.Optional[T.Dict] = None)
outfile.write('# Do not edit by hand.\n\n')
outfile.write('ninja_required_version = 1.8.2\n\n')
- num_pools = self.environment.coredata.optstore.get_value('backend_max_links')
+ num_pools = self.environment.coredata.optstore.get_value_for('backend_max_links')
if num_pools > 0:
outfile.write(f'''pool link_pool
depth = {num_pools}
@@ -664,8 +655,8 @@ def generate(self, capture: bool = False, vslite_ctx: T.Optional[T.Dict] = None)
self.generate_dist()
mlog.log_timestamp("Dist generated")
key = OptionKey('b_coverage')
- if (key in self.environment.coredata.optstore and
- self.environment.coredata.optstore.get_value(key)):
+ if key in self.environment.coredata.optstore and\
+ self.environment.coredata.optstore.get_value_for('b_coverage'):
gcovr_exe, gcovr_version, lcov_exe, lcov_version, genhtml_exe, llvm_cov_exe = environment.find_coverage_tools(self.environment.coredata)
mlog.debug(f'Using {gcovr_exe} ({gcovr_version}), {lcov_exe} and {llvm_cov_exe} for code coverage')
if gcovr_exe or (lcov_exe and genhtml_exe):
@@ -729,6 +720,11 @@ def generate_compdb(self) -> None:
for ext in ['', '_RSP']]
rules += [f"{rule}{ext}" for rule in [self.compiler_to_pch_rule_name(compiler)]
for ext in ['', '_RSP']]
+ # Add custom MIL link rules to get the files compiled by the TASKING compiler family to MIL files included in the database
+ if compiler.get_id() == 'tasking':
+ rule = self.get_compiler_rule_name('tasking_mil_compile', compiler.for_machine)
+ rules.append(rule)
+ rules.append(f'{rule}_RSP')
compdb_options = ['-x'] if mesonlib.version_compare(self.ninja_version, '>=1.9') else []
ninja_compdb = self.ninja_command + ['-t', 'compdb'] + compdb_options + rules
builddir = self.environment.get_build_dir()
@@ -836,6 +832,7 @@ def create_target_source_introspection(self, target: build.Target, comp: compile
# The new entry
src_block = {
'language': lang,
+ 'machine': comp.for_machine.get_lower_case_name(),
'compiler': comp.get_exelist(),
'parameters': parameters,
'sources': [],
@@ -944,7 +941,7 @@ def generate_target(self, target) -> None:
# Generate rules for building the remaining source files in this target
outname = self.get_target_filename(target)
obj_list = []
- is_unity = target.is_unity
+ is_unity = self.is_unity(target)
header_deps = []
unity_src = []
unity_deps = [] # Generated sources that must be built before compiling a Unity target.
@@ -1012,14 +1009,20 @@ def generate_target(self, target) -> None:
pch_objects = []
o, od = self.flatten_object_list(target)
- obj_targets = [t for t in od if t.uses_fortran()]
obj_list.extend(o)
+ fortran_order_deps = self.get_fortran_order_deps(od)
- fortran_order_deps = [File(True, *os.path.split(self.get_target_filename(t))) for t in obj_targets]
fortran_inc_args: T.List[str] = []
if target.uses_fortran():
fortran_inc_args = mesonlib.listify([target.compilers['fortran'].get_include_args(
- self.get_target_private_dir(t), is_system=False) for t in obj_targets])
+ self.get_target_private_dir(t), is_system=False) for t in od if t.uses_fortran()])
+
+ # add the private directories of all transitive dependencies, which
+ # are needed for their mod files
+ fc = target.compilers['fortran']
+ for t in target.get_all_linked_targets():
+ fortran_inc_args.extend(fc.get_include_args(
+ self.get_target_private_dir(t), False))
# Generate compilation targets for sources generated by transpilers.
#
@@ -1074,12 +1077,15 @@ def generate_target(self, target) -> None:
# Skip the link stage for this special type of target
return
linker, stdlib_args = self.determine_linker_and_stdlib_args(target)
- if isinstance(target, build.StaticLibrary) and target.prelink:
+
+ if not isinstance(target, build.StaticLibrary):
+ final_obj_list = obj_list
+ elif target.prelink:
final_obj_list = self.generate_prelink(target, obj_list)
else:
final_obj_list = obj_list
elem = self.generate_link(target, outname, final_obj_list, linker, pch_objects, stdlib_args=stdlib_args)
- self.generate_dependency_scan_target(target, compiled_sources, source2object, generated_source_files, fortran_order_deps)
+ self.generate_dependency_scan_target(target, compiled_sources, source2object, fortran_order_deps)
self.add_build(elem)
#In AIX, we archive shared libraries. If the instance is a shared library, we add a command to archive the shared library
#object and create the build element.
@@ -1089,7 +1095,7 @@ def generate_target(self, target) -> None:
self.add_build(elem)
def should_use_dyndeps_for_target(self, target: 'build.BuildTarget') -> bool:
- if mesonlib.version_compare(self.ninja_version, '<1.10.0'):
+ if not self.ninja_has_dyndeps:
return False
if 'fortran' in target.compilers:
return True
@@ -1101,7 +1107,9 @@ def should_use_dyndeps_for_target(self, target: 'build.BuildTarget') -> bool:
cpp = target.compilers['cpp']
if cpp.get_id() != 'msvc':
return False
- cppversion = target.get_option(OptionKey('cpp_std', machine=target.for_machine))
+ cppversion = self.get_target_option(target, OptionKey('cpp_std',
+ machine=target.for_machine,
+ subproject=target.subproject))
if cppversion not in ('latest', 'c++latest', 'vc++latest'):
return False
if not mesonlib.current_vs_supports_modules():
@@ -1113,12 +1121,11 @@ def should_use_dyndeps_for_target(self, target: 'build.BuildTarget') -> bool:
def generate_dependency_scan_target(self, target: build.BuildTarget,
compiled_sources: T.List[str],
source2object: T.Dict[str, str],
- generated_source_files: T.List[mesonlib.File],
object_deps: T.List[FileOrString]) -> None:
if not self.should_use_dyndeps_for_target(target):
return
self._uses_dyndeps = True
- depscan_file = self.get_dep_scan_file_for(target)
+ json_file, depscan_file = self.get_dep_scan_file_for(target)
pickle_base = target.name + '.dat'
pickle_file = os.path.join(self.get_target_private_dir(target), pickle_base).replace('\\', '/')
pickle_abs = os.path.join(self.get_target_private_dir_abs(target), pickle_base).replace('\\', '/')
@@ -1138,20 +1145,35 @@ def generate_dependency_scan_target(self, target: build.BuildTarget,
with open(pickle_abs, 'wb') as p:
pickle.dump(scaninfo, p)
- elem = NinjaBuildElement(self.all_outputs, depscan_file, rule_name, pickle_file)
- # Add any generated outputs to the order deps of the scan target, so
- # that those sources are present
- for g in generated_source_files:
- elem.orderdeps.add(g.relative_name())
+ elem = NinjaBuildElement(self.all_outputs, json_file, rule_name, pickle_file)
+ # A full dependency is required on all scanned sources, if any of them
+ # are updated we need to rescan, as they may have changed the modules
+ # they use or export.
+ for s in scan_sources:
+ elem.deps.add(s[0])
elem.orderdeps.update(object_deps)
+ elem.add_item('name', target.name)
self.add_build(elem)
- def select_sources_to_scan(self, compiled_sources: T.List[str]
+ infiles: T.Set[str] = set()
+ for t in target.get_all_linked_targets():
+ if self.should_use_dyndeps_for_target(t):
+ infiles.add(self.get_dep_scan_file_for(t)[0])
+ _, od = self.flatten_object_list(target)
+ infiles.update({self.get_dep_scan_file_for(t)[0] for t in od if t.uses_fortran()})
+
+ elem = NinjaBuildElement(self.all_outputs, depscan_file, 'depaccumulate', [json_file] + sorted(infiles))
+ elem.add_item('name', target.name)
+ self.add_build(elem)
+
+ def select_sources_to_scan(self, compiled_sources: T.List[str],
) -> T.Iterable[T.Tuple[str, Literal['cpp', 'fortran']]]:
# in practice pick up C++ and Fortran files. If some other language
# requires scanning (possibly Java to deal with inner class files)
# then add them here.
for source in compiled_sources:
+ if isinstance(source, mesonlib.File):
+ source = source.rel_to_builddir(self.build_to_src)
ext = os.path.splitext(source)[1][1:]
if ext.lower() in compilers.lang_suffixes['cpp'] or ext == 'C':
yield source, 'cpp'
@@ -1202,6 +1224,7 @@ def generate_custom_target(self, target: build.CustomTarget) -> None:
capture=ofilenames[0] if target.capture else None,
feed=srcs[0] if target.feed else None,
env=target.env,
+ can_use_rsp_file=target.rspable,
verbose=target.console)
if reason:
cmd_type = f' (wrapped by meson {reason})'
@@ -1282,38 +1305,38 @@ def generate_coverage_command(self, elem: NinjaBuildElement, outputs: T.List[str
def generate_coverage_rules(self, gcovr_exe: T.Optional[str], gcovr_version: T.Optional[str], llvm_cov_exe: T.Optional[str]) -> None:
e = self.create_phony_target('coverage', 'CUSTOM_COMMAND', 'PHONY')
self.generate_coverage_command(e, [], gcovr_exe, llvm_cov_exe)
- e.add_item('description', 'Generates coverage reports')
+ e.add_item('description', 'Generating coverage reports')
self.add_build(e)
self.generate_coverage_legacy_rules(gcovr_exe, gcovr_version, llvm_cov_exe)
def generate_coverage_legacy_rules(self, gcovr_exe: T.Optional[str], gcovr_version: T.Optional[str], llvm_cov_exe: T.Optional[str]) -> None:
e = self.create_phony_target('coverage-html', 'CUSTOM_COMMAND', 'PHONY')
self.generate_coverage_command(e, ['--html'], gcovr_exe, llvm_cov_exe)
- e.add_item('description', 'Generates HTML coverage report')
+ e.add_item('description', 'Generating HTML coverage report')
self.add_build(e)
if gcovr_exe:
e = self.create_phony_target('coverage-xml', 'CUSTOM_COMMAND', 'PHONY')
self.generate_coverage_command(e, ['--xml'], gcovr_exe, llvm_cov_exe)
- e.add_item('description', 'Generates XML coverage report')
+ e.add_item('description', 'Generating XML coverage report')
self.add_build(e)
e = self.create_phony_target('coverage-text', 'CUSTOM_COMMAND', 'PHONY')
self.generate_coverage_command(e, ['--text'], gcovr_exe, llvm_cov_exe)
- e.add_item('description', 'Generates text coverage report')
+ e.add_item('description', 'Generating text coverage report')
self.add_build(e)
if mesonlib.version_compare(gcovr_version, '>=4.2'):
e = self.create_phony_target('coverage-sonarqube', 'CUSTOM_COMMAND', 'PHONY')
self.generate_coverage_command(e, ['--sonarqube'], gcovr_exe, llvm_cov_exe)
- e.add_item('description', 'Generates Sonarqube XML coverage report')
+ e.add_item('description', 'Generating Sonarqube XML coverage report')
self.add_build(e)
def generate_install(self) -> None:
self.create_install_data_files()
elem = self.create_phony_target('install', 'CUSTOM_COMMAND', 'PHONY')
elem.add_dep('all')
- elem.add_item('DESC', 'Installing files.')
+ elem.add_item('DESC', 'Installing files')
elem.add_item('COMMAND', self.environment.get_build_command() + ['install', '--no-rebuild'])
elem.add_item('pool', 'console')
self.add_build(elem)
@@ -1321,13 +1344,13 @@ def generate_install(self) -> None:
def generate_tests(self) -> None:
self.serialize_tests()
cmd = self.environment.get_build_command(True) + ['test', '--no-rebuild']
- if not self.environment.coredata.get_option(OptionKey('stdsplit')):
+ if not self.environment.coredata.optstore.get_value_for(OptionKey('stdsplit')):
cmd += ['--no-stdsplit']
- if self.environment.coredata.get_option(OptionKey('errorlogs')):
+ if self.environment.coredata.optstore.get_value_for(OptionKey('errorlogs')):
cmd += ['--print-errorlogs']
- elem = self.create_phony_target('test', 'CUSTOM_COMMAND', ['all', 'PHONY'])
+ elem = self.create_phony_target('test', 'CUSTOM_COMMAND', ['all', 'meson-test-prereq', 'PHONY'])
elem.add_item('COMMAND', cmd)
- elem.add_item('DESC', 'Running all tests.')
+ elem.add_item('DESC', 'Running all tests')
elem.add_item('pool', 'console')
self.add_build(elem)
@@ -1335,9 +1358,9 @@ def generate_tests(self) -> None:
cmd = self.environment.get_build_command(True) + [
'test', '--benchmark', '--logbase',
'benchmarklog', '--num-processes=1', '--no-rebuild']
- elem = self.create_phony_target('benchmark', 'CUSTOM_COMMAND', ['all', 'PHONY'])
+ elem = self.create_phony_target('benchmark', 'CUSTOM_COMMAND', ['all', 'meson-benchmark-prereq', 'PHONY'])
elem.add_item('COMMAND', cmd)
- elem.add_item('DESC', 'Running benchmark suite.')
+ elem.add_item('DESC', 'Running benchmark suite')
elem.add_item('pool', 'console')
self.add_build(elem)
@@ -1372,7 +1395,7 @@ def generate_rules(self) -> None:
'.']
self.add_rule(NinjaRule('REGENERATE_BUILD',
c, [],
- 'Regenerating build files.',
+ 'Regenerating build files',
extra='generator = 1'))
def add_rule_comment(self, comment: NinjaComment) -> None:
@@ -1694,7 +1717,7 @@ def generate_vala_compile(self, target: build.BuildTarget) -> \
# Check if the vala file is in a subdir of --basedir
abs_srcbasedir = os.path.join(self.environment.get_source_dir(), target.get_subdir())
abs_vala_file = os.path.join(self.environment.get_build_dir(), vala_file)
- if PurePath(os.path.commonpath((abs_srcbasedir, abs_vala_file))) == PurePath(abs_srcbasedir):
+ if is_parent_path(abs_srcbasedir, abs_vala_file):
vala_c_subdir = PurePath(abs_vala_file).parent.relative_to(abs_srcbasedir)
vala_c_file = os.path.join(str(vala_c_subdir), vala_c_file)
else:
@@ -1709,7 +1732,7 @@ def generate_vala_compile(self, target: build.BuildTarget) -> \
valac_outputs.append(vala_c_file)
args = self.generate_basic_compiler_args(target, valac)
- args += valac.get_colorout_args(target.get_option(OptionKey('b_colorout')))
+ args += valac.get_colorout_args(self.get_target_option(target, 'b_colorout'))
# Tell Valac to output everything in our private directory. Sadly this
# means it will also preserve the directory components of Vala sources
# found inside the build tree (generated sources).
@@ -1721,7 +1744,7 @@ def generate_vala_compile(self, target: build.BuildTarget) -> \
# Outputted header
hname = os.path.join(self.get_target_dir(target), target.vala_header)
args += ['--header', hname]
- if target.is_unity:
+ if self.is_unity(target):
# Without this the declarations will get duplicated in the .c
# files and cause a build failure when all of them are
# #include-d in one .c file.
@@ -1744,6 +1767,9 @@ def generate_vala_compile(self, target: build.BuildTarget) -> \
girname = os.path.join(self.get_target_dir(target), target.vala_gir)
args += ['--gir', os.path.join('..', target.vala_gir)]
valac_outputs.append(girname)
+ shared_target = target.get('shared')
+ if isinstance(shared_target, build.SharedLibrary):
+ args += ['--shared-library', self.get_target_filename_for_linking(shared_target)]
# Install GIR to default location if requested by user
if len(target.install_dir) > 3 and target.install_dir[3] is True:
target.install_dir[3] = os.path.join(self.environment.get_datadir(), 'gir-1.0')
@@ -1754,7 +1780,7 @@ def generate_vala_compile(self, target: build.BuildTarget) -> \
gres_xml, = self.get_custom_target_sources(gensrc)
args += ['--gresources=' + gres_xml]
for source_dir in gensrc.source_dirs:
- gres_dirs += [os.path.join(self.get_target_dir(gensrc), source_dir)]
+ gres_dirs += [source_dir]
# Ensure that resources are built before vala sources
# This is required since vala code using [GtkTemplate] effectively depends on .ui files
# GResourceHeaderTarget is not suitable due to lacking depfile
@@ -1787,14 +1813,15 @@ def generate_cython_transpile(self, target: build.BuildTarget) -> \
args: T.List[str] = []
args += cython.get_always_args()
- args += cython.get_debug_args(target.get_option(OptionKey('debug')))
- args += cython.get_optimization_args(target.get_option(OptionKey('optimization')))
- args += cython.get_option_compile_args(target.get_options())
+ args += cython.get_debug_args(self.get_target_option(target, 'debug'))
+ args += cython.get_optimization_args(self.get_target_option(target, 'optimization'))
+ args += cython.get_option_compile_args(target, self.environment, target.subproject)
+ args += cython.get_option_std_args(target, self.environment, target.subproject)
args += self.build.get_global_args(cython, target.for_machine)
args += self.build.get_project_args(cython, target.subproject, target.for_machine)
args += target.get_extra_args('cython')
- ext = target.get_option(OptionKey('cython_language', machine=target.for_machine))
+ ext = self.get_target_option(target, OptionKey('cython_language', machine=target.for_machine))
pyx_sources = [] # Keep track of sources we're adding to build
@@ -1876,6 +1903,7 @@ def __generate_sources_structure(self, root: Path, structured_sources: build.Str
return orderdeps, first_file
def _add_rust_project_entry(self, name: str, main_rust_file: str, args: CompilerArgs,
+ crate_type: str, target_name: str,
from_subproject: bool, proc_macro_dylib_path: T.Optional[str],
deps: T.List[RustDep]) -> None:
raw_edition: T.Optional[str] = mesonlib.first(reversed(args), lambda x: x.startswith('--edition'))
@@ -1893,6 +1921,8 @@ def _add_rust_project_entry(self, name: str, main_rust_file: str, args: Compiler
len(self.rust_crates),
name,
main_rust_file,
+ crate_type,
+ target_name,
edition,
deps,
cfg,
@@ -1909,31 +1939,17 @@ def _get_rust_dependency_name(self, target: build.BuildTarget, dependency: LibTy
# in Rust
return target.rust_dependency_map.get(dependency.name, dependency.name).replace('-', '_')
- def generate_rust_target(self, target: build.BuildTarget) -> None:
- rustc = target.compilers['rust']
+ def generate_rust_sources(self, target: build.BuildTarget) -> T.Tuple[T.List[str], str]:
+ orderdeps: T.List[str] = []
+
# Rust compiler takes only the main file as input and
# figures out what other files are needed via import
# statements and magic.
- base_proxy = target.get_options()
- args = rustc.compiler_args()
- # Compiler args for compiling this target
- args += compilers.get_base_compile_args(base_proxy, rustc, self.environment)
- self.generate_generator_list_rules(target)
-
- # dependencies need to cause a relink, they're not just for ordering
- deps: T.List[str] = []
-
- # Dependencies for rust-project.json
- project_deps: T.List[RustDep] = []
-
- orderdeps: T.List[str] = []
-
main_rust_file = None
if target.structured_sources:
if target.structured_sources.needs_copy():
_ods, main_rust_file = self.__generate_sources_structure(Path(
self.get_target_private_dir(target)) / 'structured', target.structured_sources)
- orderdeps.extend(_ods)
else:
# The only way to get here is to have only files in the "root"
# positional argument, which are all generated into the same
@@ -1947,22 +1963,21 @@ def generate_rust_target(self, target: build.BuildTarget) -> None:
else:
main_rust_file = os.path.join(g.get_subdir(), g.get_outputs()[0])
+ _ods = []
for f in target.structured_sources.as_list():
if isinstance(f, File):
- orderdeps.append(f.rel_to_builddir(self.build_to_src))
+ _ods.append(f.rel_to_builddir(self.build_to_src))
else:
- orderdeps.extend([os.path.join(self.build_to_src, f.subdir, s)
- for s in f.get_outputs()])
+ _ods.extend([os.path.join(self.build_to_src, f.subdir, s)
+ for s in f.get_outputs()])
+ self.all_structured_sources.update(_ods)
+ orderdeps.extend(_ods)
for i in target.get_sources():
- if not rustc.can_compile(i):
- raise InvalidArguments(f'Rust target {target.get_basename()} contains a non-rust source file.')
if main_rust_file is None:
main_rust_file = i.rel_to_builddir(self.build_to_src)
for g in target.get_generated_sources():
for i in g.get_outputs():
- if not rustc.can_compile(i):
- raise InvalidArguments(f'Rust target {target.get_basename()} contains a non-rust source file.')
if isinstance(g, GeneratedList):
fname = os.path.join(self.get_target_private_dir(target), i)
else:
@@ -1970,35 +1985,43 @@ def generate_rust_target(self, target: build.BuildTarget) -> None:
if main_rust_file is None:
main_rust_file = fname
orderdeps.append(fname)
- if main_rust_file is None:
- raise RuntimeError('A Rust target has no Rust sources. This is weird. Also a bug. Please report')
- target_name = os.path.join(target.subdir, target.get_filename())
- cratetype = target.rust_crate_type
- args.extend(['--crate-type', cratetype])
+
+ return orderdeps, main_rust_file
+
+ def get_rust_compiler_args(self, target: build.BuildTarget, rustc: Compiler, src_crate_type: str,
+ depfile: T.Optional[str] = None) -> T.List[str]:
+ # Compiler args for compiling this target
+ args = compilers.get_base_compile_args(target, rustc, self.environment)
+
+ target_name = self.get_target_filename(target)
+ args.extend(['--crate-type', src_crate_type])
# If we're dynamically linking, add those arguments
- #
- # Rust is super annoying, calling -C link-arg foo does not work, it has
- # to be -C link-arg=foo
- if cratetype in {'bin', 'dylib'}:
+ if target.rust_crate_type in {'bin', 'dylib'}:
args.extend(rustc.get_linker_always_args())
args += self.generate_basic_compiler_args(target, rustc)
# Rustc replaces - with _. spaces or dots are not allowed, so we replace them with underscores
args += ['--crate-name', target.name.replace('-', '_').replace(' ', '_').replace('.', '_')]
- depfile = os.path.join(target.subdir, target.name + '.d')
- args += ['--emit', f'dep-info={depfile}', '--emit', f'link={target_name}']
- args += ['--out-dir', self.get_target_private_dir(target)]
+ if depfile:
+ args += rustc.get_dependency_gen_args(target_name, depfile)
+ args += rustc.get_output_args(target_name)
args += ['-C', 'metadata=' + target.get_id()]
args += target.get_extra_args('rust')
+ return args
+
+ def get_rust_compiler_deps_and_args(self, target: build.BuildTarget, rustc: Compiler) -> T.Tuple[T.List[str], T.List[str], T.List[RustDep], T.List[str]]:
+ deps: T.List[str] = []
+ project_deps: T.List[RustDep] = []
+ args: T.List[str] = []
# Rustc always use non-debug Windows runtime. Inject the one selected
# by Meson options instead.
# https://github.com/rust-lang/rust/issues/39016
if not isinstance(target, build.StaticLibrary):
try:
- buildtype = target.get_option(OptionKey('buildtype'))
- crt = target.get_option(OptionKey('b_vscrt'))
+ buildtype = self.get_target_option(target, 'buildtype')
+ crt = self.get_target_option(target, 'b_vscrt')
args += rustc.get_crt_link_args(crt, buildtype)
except (KeyError, AttributeError):
pass
@@ -2019,6 +2042,12 @@ def _link_library(libname: str, static: bool, bundle: bool = False):
type_ += ':' + ','.join(modifiers)
args.append(f'-l{type_}={libname}')
+ objs, od = self.flatten_object_list(target)
+ for o in objs:
+ args.append(f'-Clink-arg={o}')
+ deps.append(o)
+ fortran_order_deps = self.get_fortran_order_deps(od)
+
linkdirs = mesonlib.OrderedSet()
external_deps = target.external_deps.copy()
target_deps = target.get_dependencies()
@@ -2035,7 +2064,7 @@ def _link_library(libname: str, static: bool, bundle: bool = False):
# dependency, so that collisions with libraries in rustc's
# sysroot don't cause ambiguity
d_name = self._get_rust_dependency_name(target, d)
- args += ['--extern', '{}={}'.format(d_name, os.path.join(d.subdir, d.filename))]
+ args += ['--extern', '{}={}'.format(d_name, self.get_target_filename(d))]
project_deps.append(RustDep(d_name, self.rust_crates[d.name].order))
continue
@@ -2095,44 +2124,49 @@ def _link_library(libname: str, static: bool, bundle: bool = False):
and dep.rust_crate_type == 'dylib'
for dep in target_deps)
- if cratetype in {'dylib', 'proc-macro'} or has_rust_shared_deps:
+ if target.rust_crate_type in {'dylib', 'proc-macro'} or has_rust_shared_deps:
# add prefer-dynamic if any of the Rust libraries we link
# against are dynamic or this is a dynamic library itself,
# otherwise we'll end up with multiple implementations of libstd.
args += ['-C', 'prefer-dynamic']
if isinstance(target, build.SharedLibrary) or has_shared_deps:
- # build the usual rpath arguments as well...
-
- # Set runtime-paths so we can run executables without needing to set
- # LD_LIBRARY_PATH, etc in the environment. Doesn't work on Windows.
- if has_path_sep(target.name):
- # Target names really should not have slashes in them, but
- # unfortunately we did not check for that and some downstream projects
- # now have them. Once slashes are forbidden, remove this bit.
- target_slashname_workaround_dir = os.path.join(os.path.dirname(target.name),
- self.get_target_dir(target))
- else:
- target_slashname_workaround_dir = self.get_target_dir(target)
- rpath_args, target.rpath_dirs_to_remove = (
- rustc.build_rpath_args(self.environment,
- self.environment.get_build_dir(),
- target_slashname_workaround_dir,
- self.determine_rpath_dirs(target),
- target.build_rpath,
- target.install_rpath))
- # ... but then add rustc's sysroot to account for rustup
- # installations
- for rpath_arg in rpath_args:
- args += ['-C', 'link-arg=' + rpath_arg + ':' + rustc.get_target_libdir()]
+ args += self.get_build_rpath_args(target, rustc)
+
+ return deps, fortran_order_deps, project_deps, args
+
+ def generate_rust_target(self, target: build.BuildTarget) -> None:
+ rustc = T.cast('RustCompiler', target.compilers['rust'])
+ self.generate_generator_list_rules(target)
+
+ for i in target.get_sources():
+ if not rustc.can_compile(i):
+ raise InvalidArguments(f'Rust target {target.get_basename()} contains a non-rust source file.')
+ for g in target.get_generated_sources():
+ for i in g.get_outputs():
+ if not rustc.can_compile(i):
+ raise InvalidArguments(f'Rust target {target.get_basename()} contains a non-rust source file.')
+
+ orderdeps, main_rust_file = self.generate_rust_sources(target)
+ target_name = self.get_target_filename(target)
+ if main_rust_file is None:
+ raise RuntimeError('A Rust target has no Rust sources. This is weird. Also a bug. Please report')
+
+ args = rustc.compiler_args()
+
+ depfile = os.path.join(self.get_target_private_dir(target), target.name + '.d')
+ args += self.get_rust_compiler_args(target, rustc, target.rust_crate_type, depfile)
+
+ deps, fortran_order_deps, project_deps, deps_args = self.get_rust_compiler_deps_and_args(target, rustc)
+ args += deps_args
proc_macro_dylib_path = None
- if getattr(target, 'rust_crate_type', '') == 'proc-macro':
- proc_macro_dylib_path = os.path.abspath(os.path.join(target.subdir, target.get_filename()))
+ if target.rust_crate_type == 'proc-macro':
+ proc_macro_dylib_path = self.get_target_filename_abs(target)
self._add_rust_project_entry(target.name,
os.path.abspath(os.path.join(self.environment.build_dir, main_rust_file)),
- args,
+ args, target.rust_crate_type, target_name,
bool(target.subproject),
proc_macro_dylib_path,
project_deps)
@@ -2141,16 +2175,27 @@ def _link_library(libname: str, static: bool, bundle: bool = False):
element = NinjaBuildElement(self.all_outputs, target_name, compiler_name, main_rust_file)
if orderdeps:
element.add_orderdep(orderdeps)
+ if fortran_order_deps:
+ element.add_orderdep(fortran_order_deps)
if deps:
+ # dependencies need to cause a relink, they're not just for ordering
element.add_dep(deps)
element.add_item('ARGS', args)
element.add_item('targetdep', depfile)
- element.add_item('cratetype', cratetype)
self.add_build(element)
if isinstance(target, build.SharedLibrary):
self.generate_shsym(target)
self.create_target_source_introspection(target, rustc, args, [main_rust_file], [])
+ if target.doctests:
+ assert target.doctests.target is not None
+ rustdoc = rustc.get_rustdoc(self.environment)
+ args = rustdoc.get_exe_args()
+ args += self.get_rust_compiler_args(target.doctests.target, rustdoc, target.rust_crate_type)
+ _, _, _, deps_args = self.get_rust_compiler_deps_and_args(target.doctests.target, rustdoc)
+ args += deps_args
+ target.doctests.cmd_args = args.to_native() + [main_rust_file] + target.doctests.cmd_args
+
@staticmethod
def get_rule_suffix(for_machine: MachineChoice) -> str:
return PerMachine('_FOR_BUILD', '')[for_machine]
@@ -2220,7 +2265,6 @@ def generate_swift_target(self, target) -> None:
raise InvalidArguments(f'Swift target {target.get_basename()} contains a non-swift source file.')
os.makedirs(self.get_target_private_dir_abs(target), exist_ok=True)
compile_args = self.generate_basic_compiler_args(target, swiftc)
- compile_args += swiftc.get_compile_only_args()
compile_args += swiftc.get_module_args(module_name)
for i in reversed(target.get_include_dirs()):
basedir = i.get_curdir()
@@ -2268,12 +2312,16 @@ def generate_swift_target(self, target) -> None:
elem = NinjaBuildElement(self.all_outputs, rel_objects, rulename, abssrc)
elem.add_dep(in_module_files + rel_generated)
elem.add_dep(abs_headers)
- elem.add_item('ARGS', compile_args + header_imports + abs_generated + module_includes)
+ elem.add_item('ARGS', swiftc.get_compile_only_args() + compile_args + header_imports + abs_generated + module_includes)
elem.add_item('RUNDIR', rundir)
self.add_build(elem)
+
+ # -g makes swiftc create a .o file with potentially the same name as one of the compile target generated ones.
+ mod_gen_args = [el for el in compile_args if el != '-g']
+
elem = NinjaBuildElement(self.all_outputs, out_module_name, rulename, abssrc)
elem.add_dep(in_module_files + rel_generated)
- elem.add_item('ARGS', compile_args + abs_generated + module_includes + swiftc.get_mod_gen_args())
+ elem.add_item('ARGS', swiftc.get_mod_gen_args() + mod_gen_args + abs_generated + module_includes)
elem.add_item('RUNDIR', rundir)
self.add_build(elem)
if isinstance(target, build.StaticLibrary):
@@ -2304,7 +2352,7 @@ def _rsp_options(self, tool: T.Union['Compiler', 'StaticLinker', 'DynamicLinker'
return options
def generate_static_link_rules(self) -> None:
- num_pools = self.environment.coredata.optstore.get_value('backend_max_links')
+ num_pools = self.environment.coredata.optstore.get_value_for('backend_max_links')
if 'java' in self.environment.coredata.compilers.host:
self.generate_java_link()
for for_machine in MachineChoice:
@@ -2352,7 +2400,7 @@ def generate_static_link_rules(self) -> None:
self.add_rule(NinjaRule(rule, cmdlist, args, description, **options, extra=pool))
def generate_dynamic_link_rules(self) -> None:
- num_pools = self.environment.coredata.optstore.get_value('backend_max_links')
+ num_pools = self.environment.coredata.optstore.get_value_for('backend_max_links')
for for_machine in MachineChoice:
complist = self.environment.coredata.compilers[for_machine]
for langname, compiler in complist.items():
@@ -2437,21 +2485,37 @@ def generate_rust_compile_rules(self, compiler) -> None:
def generate_swift_compile_rules(self, compiler) -> None:
rule = self.compiler_to_rule_name(compiler)
- full_exe = self.environment.get_build_command() + [
- '--internal',
- 'dirchanger',
- '$RUNDIR',
- ]
- invoc = full_exe + compiler.get_exelist()
+ wd_args = compiler.get_working_directory_args('$RUNDIR')
+
+ if wd_args is not None:
+ invoc = compiler.get_exelist() + wd_args
+ else:
+ full_exe = self.environment.get_build_command() + [
+ '--internal',
+ 'dirchanger',
+ '$RUNDIR',
+ ]
+ invoc = full_exe + compiler.get_exelist()
+
command = invoc + ['$ARGS', '$in']
description = 'Compiling Swift source $in'
- self.add_rule(NinjaRule(rule, command, [], description))
+ self.add_rule(NinjaRule(rule, command, [], description, extra='restat = 1'))
def use_dyndeps_for_fortran(self) -> bool:
'''Use the new Ninja feature for scanning dependencies during build,
rather than up front. Remove this and all old scanning code once Ninja
minimum version is bumped to 1.10.'''
- return mesonlib.version_compare(self.ninja_version, '>=1.10.0')
+ return self.ninja_has_dyndeps
+
+ def get_fortran_order_deps(self, deps: T.List[build.BuildTarget]) -> T.List[File]:
+ # We don't need this order dep if we're using dyndeps, as the
+ # depscanner will handle this for us, which produces a better dependency
+ # graph
+ if self.use_dyndeps_for_fortran():
+ return []
+
+ return [File(True, *os.path.split(self.get_target_filename(t))) for t in deps
+ if t.uses_fortran()]
def generate_fortran_dep_hack(self, crstr: str) -> None:
if self.use_dyndeps_for_fortran():
@@ -2479,6 +2543,33 @@ def generate_llvm_ir_compile_rule(self, compiler) -> None:
self.add_rule(NinjaRule(rule, command, args, description, **options))
self.created_llvm_ir_rule[compiler.for_machine] = True
+ def generate_tasking_mil_compile_rules(self, compiler: Compiler) -> None:
+ rule = self.get_compiler_rule_name('tasking_mil_compile', compiler.for_machine)
+ depargs = NinjaCommandArg.list(compiler.get_dependency_gen_args('$out', '$DEPFILE'), Quoting.none)
+ command = compiler.get_exelist()
+ args = ['$ARGS'] + depargs + NinjaCommandArg.list(compiler.get_output_args('$out'), Quoting.none) + ['-cm', '$in']
+ description = 'Compiling to C object $in'
+ if compiler.get_argument_syntax() == 'msvc':
+ deps = 'msvc'
+ depfile = None
+ else:
+ deps = 'gcc'
+ depfile = '$DEPFILE'
+
+ options = self._rsp_options(compiler)
+
+ self.add_rule(NinjaRule(rule, command, args, description, **options, deps=deps, depfile=depfile))
+
+ def generate_tasking_mil_link_rules(self, compiler: Compiler) -> None:
+ rule = self.get_compiler_rule_name('tasking_mil_link', compiler.for_machine)
+ command = compiler.get_exelist()
+ args = ['$ARGS', '--mil-link'] + NinjaCommandArg.list(compiler.get_output_args('$out'), Quoting.none) + ['-c', '$in']
+ description = 'MIL linking object $out'
+
+ options = self._rsp_options(compiler)
+
+ self.add_rule(NinjaRule(rule, command, args, description, **options))
+
def generate_compile_rule_for(self, langname: str, compiler: Compiler) -> None:
if langname == 'java':
self.generate_java_compile_rule(compiler)
@@ -2556,10 +2647,19 @@ def generate_scanner_rules(self) -> None:
if rulename in self.ruledict:
# Scanning command is the same for native and cross compilation.
return
+
command = self.environment.get_build_command() + \
['--internal', 'depscan']
args = ['$picklefile', '$out', '$in']
- description = 'Module scanner.'
+ description = 'Scanning target $name for modules'
+ rule = NinjaRule(rulename, command, args, description)
+ self.add_rule(rule)
+
+ rulename = 'depaccumulate'
+ command = self.environment.get_build_command() + \
+ ['--internal', 'depaccumulate']
+ args = ['$out', '$in']
+ description = 'Generating dynamic dependency information for target $name'
rule = NinjaRule(rulename, command, args, description)
self.add_rule(rule)
@@ -2569,6 +2669,8 @@ def generate_compile_rules(self) -> None:
for langname, compiler in clist.items():
if compiler.get_id() == 'clang':
self.generate_llvm_ir_compile_rule(compiler)
+ if compiler.get_id() == 'tasking':
+ self.generate_tasking_mil_compile_rules(compiler)
self.generate_compile_rule_for(langname, compiler)
self.generate_pch_rule_for(langname, compiler)
for mode in compiler.get_modes():
@@ -2800,11 +2902,10 @@ def get_link_debugfile_args(self, linker: T.Union[Compiler, StaticLinker], targe
return []
def generate_llvm_ir_compile(self, target, src: FileOrString):
- base_proxy = target.get_options()
compiler = get_compiler_for_source(target.compilers.values(), src)
commands = compiler.compiler_args()
# Compiler args for compiling this target
- commands += compilers.get_base_compile_args(base_proxy, compiler, self.environment)
+ commands += compilers.get_base_compile_args(target, compiler, self.environment)
if isinstance(src, File):
if src.is_built:
src_filename = os.path.join(src.subdir, src.fname)
@@ -2860,7 +2961,6 @@ def _generate_single_compile(self, target: build.BuildTarget, compiler: Compiler
return commands
def _generate_single_compile_base_args(self, target: build.BuildTarget, compiler: 'Compiler') -> 'CompilerArgs':
- base_proxy = target.get_options()
# Create an empty commands list, and start adding arguments from
# various sources in the order in which they must override each other
commands = compiler.compiler_args()
@@ -2869,7 +2969,7 @@ def _generate_single_compile_base_args(self, target: build.BuildTarget, compiler
# Add compiler args for compiling this target derived from 'base' build
# options passed on the command-line, in default_options, etc.
# These have the lowest priority.
- commands += compilers.get_base_compile_args(base_proxy,
+ commands += compilers.get_base_compile_args(target,
compiler, self.environment)
return commands
@@ -3008,7 +3108,7 @@ def generate_single_compile(self, target: build.BuildTarget, src,
raise AssertionError(f'BUG: broken generated source file handling for {src!r}')
else:
raise InvalidArguments(f'Invalid source type: {src!r}')
- obj_basename = self.object_filename_from_source(target, src)
+ obj_basename = self.object_filename_from_source(target, compiler, src)
rel_obj = os.path.join(self.get_target_private_dir(target), obj_basename)
dep_file = compiler.depfile_for_object(rel_obj)
@@ -3029,8 +3129,17 @@ def generate_single_compile(self, target: build.BuildTarget, src,
i = os.path.join(self.get_target_private_dir(target), compiler.get_pch_name(pchlist[0]))
arr.append(i)
pch_dep = arr
-
- compiler_name = self.compiler_to_rule_name(compiler)
+ # If TASKING compiler family is used and MIL linking is enabled for the target,
+ # then compilation rule name is a special one to output MIL files
+ # instead of object files for .c files
+ if compiler.get_id() == 'tasking':
+ target_lto = self.get_target_option(target, OptionKey('b_lto', machine=target.for_machine, subproject=target.subproject))
+ if ((isinstance(target, build.StaticLibrary) and target.prelink) or target_lto) and src.rsplit('.', 1)[1] in compilers.lang_suffixes['c']:
+ compiler_name = self.get_compiler_rule_name('tasking_mil_compile', compiler.for_machine)
+ else:
+ compiler_name = self.compiler_to_rule_name(compiler)
+ else:
+ compiler_name = self.compiler_to_rule_name(compiler)
extra_deps = []
if compiler.get_language() == 'fortran':
# Can't read source file to scan for deps if it's generated later
@@ -3069,8 +3178,9 @@ def generate_single_compile(self, target: build.BuildTarget, src,
d = os.path.join(self.get_target_private_dir(target), d)
element.add_orderdep(d)
element.add_dep(pch_dep)
- for i in self.get_fortran_orderdeps(target, compiler):
- element.add_orderdep(i)
+ if not self.use_dyndeps_for_fortran():
+ for i in self.get_fortran_module_deps(target, compiler):
+ element.add_dep(i)
if dep_file:
element.add_item('DEPFILE', dep_file)
if compiler.get_language() == 'cuda':
@@ -3113,12 +3223,13 @@ def add_dependency_scanner_entries_to_element(self, target: build.BuildTarget, c
extension = extension.lower()
if not (extension in compilers.lang_suffixes['fortran'] or extension in compilers.lang_suffixes['cpp']):
return
- dep_scan_file = self.get_dep_scan_file_for(target)
+ dep_scan_file = self.get_dep_scan_file_for(target)[1]
element.add_item('dyndep', dep_scan_file)
element.add_orderdep(dep_scan_file)
- def get_dep_scan_file_for(self, target: build.BuildTarget) -> str:
- return os.path.join(self.get_target_private_dir(target), 'depscan.dd')
+ def get_dep_scan_file_for(self, target: build.BuildTarget) -> T.Tuple[str, str]:
+ priv = self.get_target_private_dir(target)
+ return os.path.join(priv, 'depscan.json'), os.path.join(priv, 'depscan.dd')
def add_header_deps(self, target, ninja_element, header_deps):
for d in header_deps:
@@ -3126,7 +3237,7 @@ def add_header_deps(self, target, ninja_element, header_deps):
d = d.rel_to_builddir(self.build_to_src)
elif not self.has_dir_part(d):
d = os.path.join(self.get_target_private_dir(target), d)
- ninja_element.add_dep(d)
+ ninja_element.add_orderdep(d)
def has_dir_part(self, fname: FileOrString) -> bool:
# FIXME FIXME: The usage of this is a terrible and unreliable hack
@@ -3137,11 +3248,15 @@ def has_dir_part(self, fname: FileOrString) -> bool:
# Fortran is a bit weird (again). When you link against a library, just compiling a source file
# requires the mod files that are output when single files are built. To do this right we would need to
# scan all inputs and write out explicit deps for each file. That is too slow and too much effort so
- # instead just have an ordered dependency on the library. This ensures all required mod files are created.
+ # instead just have a full dependency on the library. This ensures all required mod files are created.
# The real deps are then detected via dep file generation from the compiler. This breaks on compilers that
- # produce incorrect dep files but such is life.
- def get_fortran_orderdeps(self, target, compiler):
- if compiler.language != 'fortran':
+ # produce incorrect dep files but such is life. A full dependency is
+ # required to ensure that if a new module is added to an existing file that
+ # we correctly rebuild
+ def get_fortran_module_deps(self, target: build.BuildTarget, compiler: Compiler) -> T.List[str]:
+ # If we have dyndeps then we don't need this, since the depscanner will
+ # do all of things described above.
+ if compiler.language != 'fortran' or self.use_dyndeps_for_fortran():
return []
return [
os.path.join(self.get_target_dir(lt), lt.get_filename())
@@ -3228,6 +3343,7 @@ def generate_pch(self, target, header_deps=None):
elem.add_item('ARGS', commands)
elem.add_item('DEPFILE', dep)
self.add_build(elem)
+ self.all_pch[compiler.id].update(objs + [dst])
return pch_objects
def get_target_shsym_filename(self, target):
@@ -3237,6 +3353,12 @@ def get_target_shsym_filename(self, target):
def generate_shsym(self, target) -> None:
target_file = self.get_target_filename(target)
+ if isinstance(target, build.SharedLibrary) and target.aix_so_archive:
+ if self.environment.machines[target.for_machine].is_aix():
+ linker, stdlib_args = target.get_clink_dynamic_linker_and_stdlibs()
+ target.get_outputs()[0] = linker.get_archive_name(target.get_outputs()[0])
+ target_file = target.get_outputs()[0]
+ target_file = os.path.join(self.get_target_dir(target), target_file)
symname = self.get_target_shsym_filename(target)
elem = NinjaBuildElement(self.all_outputs, symname, 'SHSYM', target_file)
# The library we will actually link to, which is an import library on Windows (not the DLL)
@@ -3265,7 +3387,7 @@ def get_target_type_link_args(self, target, linker):
commands += linker.gen_vs_module_defs_args(target.vs_module_defs.rel_to_builddir(self.build_to_src))
elif isinstance(target, build.SharedLibrary):
if isinstance(target, build.SharedModule):
- commands += linker.get_std_shared_module_link_args(target.get_options())
+ commands += linker.get_std_shared_module_link_args(target)
else:
commands += linker.get_std_shared_lib_link_args()
# All shared libraries are PIC
@@ -3312,7 +3434,7 @@ def get_link_whole_args(self, linker: DynamicLinker, target):
objects_from_static_libs: T.List[ExtractedObjects] = []
for dep in target.link_whole_targets:
l = dep.extract_all_objects(False)
- objects_from_static_libs += self.determine_ext_objs(l, '')
+ objects_from_static_libs += self.determine_ext_objs(l)
objects_from_static_libs.extend(self.flatten_object_list(dep)[0])
return objects_from_static_libs
@@ -3407,13 +3529,37 @@ def generate_prelink(self, target, obj_list):
prelinker = target.get_prelinker()
cmd = prelinker.exelist[:]
- cmd += prelinker.get_prelink_args(prelink_name, obj_list)
+ obj_list, args = prelinker.get_prelink_args(prelink_name, obj_list)
+ cmd += args
+ if prelinker.get_prelink_append_compile_args():
+ compile_args = self._generate_single_compile_base_args(target, prelinker)
+ compile_args += self._generate_single_compile_target_args(target, prelinker)
+ compile_args = compile_args.compiler.compiler_args(compile_args)
+ cmd += compile_args.to_native()
cmd = self.replace_paths(target, cmd)
elem.add_item('COMMAND', cmd)
- elem.add_item('description', f'Prelinking {prelink_name}.')
+ elem.add_item('description', f'Prelinking {prelink_name}')
self.add_build(elem)
- return [prelink_name]
+ return obj_list
+
+ def get_build_rpath_args(self, target: build.BuildTarget, linker: T.Union[Compiler, StaticLinker]) -> T.List[str]:
+ if has_path_sep(target.name):
+ # Target names really should not have slashes in them, but
+ # unfortunately we did not check for that and some downstream projects
+ # now have them. Once slashes are forbidden, remove this bit.
+ target_slashname_workaround_dir = os.path.join(os.path.dirname(target.name),
+ self.get_target_dir(target))
+ else:
+ target_slashname_workaround_dir = self.get_target_dir(target)
+ (rpath_args, target.rpath_dirs_to_remove) = (
+ linker.build_rpath_args(self.environment,
+ self.environment.get_build_dir(),
+ target_slashname_workaround_dir,
+ self.determine_rpath_dirs(target),
+ target.build_rpath,
+ target.install_rpath))
+ return rpath_args
def generate_link(self, target: build.BuildTarget, outname, obj_list, linker: T.Union['Compiler', 'StaticLinker'], extra_args=None, stdlib_args=None):
extra_args = extra_args if extra_args is not None else []
@@ -3440,20 +3586,19 @@ def generate_link(self, target: build.BuildTarget, outname, obj_list, linker: T.
# options passed on the command-line, in default_options, etc.
# These have the lowest priority.
if isinstance(target, build.StaticLibrary):
- commands += linker.get_base_link_args(target.get_options())
+ commands += linker.get_base_link_args(target, linker, self.environment)
else:
- commands += compilers.get_base_link_args(target.get_options(),
+ commands += compilers.get_base_link_args(target,
linker,
- isinstance(target, build.SharedModule),
- self.environment.get_build_dir())
+ self.environment)
# Add -nostdlib if needed; can't be overridden
commands += self.get_no_stdlib_link_args(target, linker)
# Add things like /NOLOGO; usually can't be overridden
commands += linker.get_linker_always_args()
# Add buildtype linker args: optimization level, etc.
- commands += linker.get_optimization_link_args(target.get_option(OptionKey('optimization')))
+ commands += linker.get_optimization_link_args(self.get_target_option(target, 'optimization'))
# Add /DEBUG and the pdb filename when using MSVC
- if target.get_option(OptionKey('debug')):
+ if self.get_target_option(target, 'debug'):
commands += self.get_link_debugfile_args(linker, target)
debugfile = self.get_link_debugfile_name(linker, target)
if debugfile is not None:
@@ -3480,23 +3625,7 @@ def generate_link(self, target: build.BuildTarget, outname, obj_list, linker: T.
# Set runtime-paths so we can run executables without needing to set
# LD_LIBRARY_PATH, etc in the environment. Doesn't work on Windows.
- if has_path_sep(target.name):
- # Target names really should not have slashes in them, but
- # unfortunately we did not check for that and some downstream projects
- # now have them. Once slashes are forbidden, remove this bit.
- target_slashname_workaround_dir = os.path.join(
- os.path.dirname(target.name),
- self.get_target_dir(target))
- else:
- target_slashname_workaround_dir = self.get_target_dir(target)
- (rpath_args, target.rpath_dirs_to_remove) = (
- linker.build_rpath_args(self.environment,
- self.environment.get_build_dir(),
- target_slashname_workaround_dir,
- self.determine_rpath_dirs(target),
- target.build_rpath,
- target.install_rpath))
- commands += rpath_args
+ commands += self.get_build_rpath_args(target, linker)
# Add link args to link to all internal libraries (link_with:) and
# internal dependencies needed by this target.
@@ -3523,7 +3652,15 @@ def generate_link(self, target: build.BuildTarget, outname, obj_list, linker: T.
for d in target.get_dependencies():
if isinstance(d, build.StaticLibrary):
for dep in d.get_external_deps():
- commands.extend_preserving_lflags(linker.get_dependency_link_args(dep))
+ link_args = linker.get_dependency_link_args(dep)
+ # Ensure that native static libraries use Unix-style naming if necessary.
+ # Depending on the target/linker, rustc --print native-static-libs may
+ # output MSVC-style names. Converting these to Unix-style is safe, as the
+ # list contains only native static libraries.
+ if dep.name == '_rust_native_static_libs' and linker.get_argument_syntax() != 'msvc':
+ from ..linkers.linkers import VisualStudioLikeLinker
+ link_args = VisualStudioLikeLinker.native_args_to_unix(link_args)
+ commands.extend_preserving_lflags(link_args)
# Add link args specific to this BuildTarget type that must not be overridden by dependencies
commands += self.get_target_type_link_args_post_dependencies(target, linker)
@@ -3540,7 +3677,7 @@ def generate_link(self, target: build.BuildTarget, outname, obj_list, linker: T.
#
# We shouldn't check whether we are making a static library, because
# in the LTO case we do use a real compiler here.
- commands += linker.get_option_link_args(target.get_options())
+ commands += linker.get_option_link_args(target, self.environment)
dep_targets = []
dep_targets.extend(self.guess_external_link_dependencies(linker, target, commands, internal))
@@ -3555,6 +3692,20 @@ def generate_link(self, target: build.BuildTarget, outname, obj_list, linker: T.
for t in target.link_depends])
elem = NinjaBuildElement(self.all_outputs, outname, linker_rule, obj_list, implicit_outs=implicit_outs)
elem.add_dep(dep_targets + custom_target_libraries)
+ if linker.get_id() == 'tasking':
+ if len([x for x in dep_targets + custom_target_libraries if x.endswith('.ma')]) > 0 and not self.get_target_option(target, OptionKey('b_lto', target.subproject, target.for_machine)):
+ raise MesonException(f'Tried to link the target named \'{target.name}\' with a MIL archive without LTO enabled! This causes the compiler to ignore the archive.')
+
+ # Compiler args must be included in TI C28x linker commands.
+ if linker.get_id() in {'c2000', 'c6000', 'ti'}:
+ compile_args = []
+ for for_machine in MachineChoice:
+ clist = self.environment.coredata.compilers[for_machine]
+ for langname, compiler in clist.items():
+ if langname in {'c', 'cpp'} and compiler.get_id() in {'c2000', 'c6000', 'ti'}:
+ compile_args += self.generate_basic_compiler_args(target, compiler)
+ elem.add_item('ARGS', compile_args)
+
elem.add_item('LINK_ARGS', commands)
self.create_target_linker_introspection(target, linker, commands)
return elem
@@ -3610,7 +3761,9 @@ def generate_gcov_clean(self) -> None:
gcda_elem.add_item('description', 'Deleting gcda files')
self.add_build(gcda_elem)
- def get_user_option_args(self) -> T.List[str]:
+ def get_user_option_args(self, shut_up_pylint: bool = True) -> T.List[str]:
+ if shut_up_pylint:
+ return []
cmds = []
for k, v in self.environment.coredata.optstore.items():
if self.environment.coredata.optstore.is_project_option(k):
@@ -3627,6 +3780,36 @@ def generate_dist(self) -> None:
elem.add_item('pool', 'console')
self.add_build(elem)
+ def generate_clippy(self) -> None:
+ if 'clippy' in self.all_outputs or not self.have_language('rust'):
+ return
+
+ cmd = self.environment.get_build_command() + \
+ ['--internal', 'clippy', self.environment.build_dir]
+ elem = self.create_phony_target('clippy', 'CUSTOM_COMMAND', 'PHONY')
+ elem.add_item('COMMAND', cmd)
+ elem.add_item('pool', 'console')
+ for crate in self.rust_crates.values():
+ if crate.crate_type in {'rlib', 'dylib', 'proc-macro'}:
+ elem.add_dep(crate.target_name)
+ elem.add_dep(list(self.all_structured_sources))
+ self.add_build(elem)
+
+ def generate_rustdoc(self) -> None:
+ if 'rustdoc' in self.all_outputs or not self.have_language('rust'):
+ return
+
+ cmd = self.environment.get_build_command() + \
+ ['--internal', 'rustdoc', self.environment.build_dir]
+ elem = self.create_phony_target('rustdoc', 'CUSTOM_COMMAND', 'PHONY')
+ elem.add_item('COMMAND', cmd)
+ elem.add_item('pool', 'console')
+ for crate in self.rust_crates.values():
+ if crate.crate_type in {'rlib', 'dylib', 'proc-macro'}:
+ elem.add_dep(crate.target_name)
+ elem.add_dep(list(self.all_structured_sources))
+ self.add_build(elem)
+
def generate_scanbuild(self) -> None:
if not environment.detect_scanbuild():
return
@@ -3640,23 +3823,31 @@ def generate_scanbuild(self) -> None:
elem.add_item('pool', 'console')
self.add_build(elem)
- def generate_clangtool(self, name: str, extra_arg: T.Optional[str] = None) -> None:
+ def generate_clangtool(self, name: str, extra_arg: T.Optional[str] = None, need_pch: bool = False) -> None:
target_name = 'clang-' + name
extra_args = []
if extra_arg:
target_name += f'-{extra_arg}'
extra_args.append(f'--{extra_arg}')
+ colorout = self.environment.coredata.optstore.get_value('b_colorout') \
+ if OptionKey('b_colorout') in self.environment.coredata.optstore else 'always'
+ extra_args.extend(['--color', colorout])
if not os.path.exists(os.path.join(self.environment.source_dir, '.clang-' + name)) and \
not os.path.exists(os.path.join(self.environment.source_dir, '_clang-' + name)):
return
if target_name in self.all_outputs:
return
+ if need_pch and not set(self.all_pch.keys()) <= {'clang'}:
+ return
+
cmd = self.environment.get_build_command() + \
['--internal', 'clang' + name, self.environment.source_dir, self.environment.build_dir] + \
extra_args
elem = self.create_phony_target(target_name, 'CUSTOM_COMMAND', 'PHONY')
elem.add_item('COMMAND', cmd)
elem.add_item('pool', 'console')
+ if need_pch:
+ elem.add_dep(list(self.all_pch['clang']))
self.add_build(elem)
def generate_clangformat(self) -> None:
@@ -3668,10 +3859,10 @@ def generate_clangformat(self) -> None:
def generate_clangtidy(self) -> None:
if not environment.detect_clangtidy():
return
- self.generate_clangtool('tidy')
+ self.generate_clangtool('tidy', need_pch=True)
if not environment.detect_clangapply():
return
- self.generate_clangtool('tidy', 'fix')
+ self.generate_clangtool('tidy', 'fix', need_pch=True)
def generate_tags(self, tool: str, target_name: str) -> None:
import shutil
@@ -3691,6 +3882,8 @@ def generate_utils(self) -> None:
self.generate_scanbuild()
self.generate_clangformat()
self.generate_clangtidy()
+ self.generate_clippy()
+ self.generate_rustdoc()
self.generate_tags('etags', 'TAGS')
self.generate_tags('ctags', 'ctags')
self.generate_tags('cscope', 'cscope')
@@ -3706,10 +3899,6 @@ def generate_ending(self) -> None:
('meson-test-prereq', self.get_testlike_targets()),
('meson-benchmark-prereq', self.get_testlike_targets(True))]:
targetlist = []
- # These must also be built by default.
- # XXX: Sometime in the future these should be built only before running tests.
- if targ == 'all':
- targetlist.extend(['meson-test-prereq', 'meson-benchmark-prereq'])
for t in deps.values():
# Add the first output of each target to the 'all' target so that
# they are all built
@@ -3743,7 +3932,7 @@ def generate_ending(self) -> None:
elem.add_dep(self.generate_custom_target_clean(ctlist))
if OptionKey('b_coverage') in self.environment.coredata.optstore and \
- self.environment.coredata.optstore.get_value('b_coverage'):
+ self.environment.coredata.optstore.get_value_for('b_coverage'):
self.generate_gcov_clean()
elem.add_dep('clean-gcda')
elem.add_dep('clean-gcno')
diff --git a/mesonbuild/backend/vs2010backend.py b/mesonbuild/backend/vs2010backend.py
index 08a19c659e44..fbb113faf998 100644
--- a/mesonbuild/backend/vs2010backend.py
+++ b/mesonbuild/backend/vs2010backend.py
@@ -1,5 +1,6 @@
# SPDX-License-Identifier: Apache-2.0
# Copyright 2014-2016 The Meson development team
+# Copyright © 2023-2024 Intel Corporation
from __future__ import annotations
import copy
@@ -146,6 +147,9 @@ def __init__(self, build: T.Optional[build.Build], interpreter: T.Optional[Inter
self.handled_target_deps = {}
self.gen_lite = gen_lite # Synonymous with generating the simpler makefile-style multi-config projects that invoke 'meson compile' builds, avoiding native MSBuild complications
+ def detect_toolset(self) -> None:
+ pass
+
def get_target_private_dir(self, target):
return os.path.join(self.get_target_dir(target), target.get_id())
@@ -226,6 +230,7 @@ def generate(self,
# Check for (currently) unexpected capture arg use cases -
if capture:
raise MesonBugException('We do not expect any vs backend to generate with \'capture = True\'')
+ self.detect_toolset()
host_machine = self.environment.machines.host.cpu_family
if host_machine in {'64', 'x86_64'}:
# amd64 or x86_64
@@ -266,13 +271,13 @@ def generate(self,
else:
raise MesonException('Unsupported Visual Studio platform: ' + build_machine)
- self.buildtype = self.environment.coredata.get_option(OptionKey('buildtype'))
- self.optimization = self.environment.coredata.get_option(OptionKey('optimization'))
- self.debug = self.environment.coredata.get_option(OptionKey('debug'))
+ self.buildtype = self.environment.coredata.optstore.get_value_for(OptionKey('buildtype'))
+ self.optimization = self.environment.coredata.optstore.get_value_for(OptionKey('optimization'))
+ self.debug = self.environment.coredata.optstore.get_value_for(OptionKey('debug'))
try:
- self.sanitize = self.environment.coredata.get_option(OptionKey('b_sanitize'))
- except MesonException:
- self.sanitize = 'none'
+ self.sanitize = self.environment.coredata.optstore.get_value_for(OptionKey('b_sanitize'))
+ except KeyError:
+ self.sanitize = []
sln_filename = os.path.join(self.environment.get_build_dir(), self.build.project_name + '.sln')
projlist = self.generate_projects(vslite_ctx)
self.gen_testproj()
@@ -421,7 +426,7 @@ def generate_solution(self, sln_filename: str, projlist: T.List[Project]) -> Non
ofile.write('# Visual Studio %s\n' % self.sln_version_comment)
prj_templ = 'Project("{%s}") = "%s", "%s", "{%s}"\n'
for prj in projlist:
- if self.environment.coredata.get_option(OptionKey('layout')) == 'mirror':
+ if self.environment.coredata.optstore.get_value_for(OptionKey('layout')) == 'mirror':
self.generate_solution_dirs(ofile, prj[1].parents)
target = self.build.targets[prj[0]]
lang = 'default'
@@ -691,9 +696,8 @@ def create_basic_project(self, target_name, *,
if target_ext:
ET.SubElement(direlem, 'TargetExt').text = target_ext
- ET.SubElement(direlem, 'EmbedManifest').text = 'false'
- if not gen_manifest:
- ET.SubElement(direlem, 'GenerateManifest').text = 'false'
+ ET.SubElement(direlem, 'EmbedManifest').text = 'true' if gen_manifest == 'embed' else 'false'
+ ET.SubElement(direlem, 'GenerateManifest').text = 'true' if gen_manifest else 'false'
return (root, type_config)
@@ -996,9 +1000,11 @@ def get_args_defines_and_inc_dirs(self, target, compiler, generated_files_includ
for l, comp in target.compilers.items():
if l in file_args:
file_args[l] += compilers.get_base_compile_args(
- target.get_options(), comp, self.environment)
+ target, comp, self.environment)
file_args[l] += comp.get_option_compile_args(
- target.get_options())
+ target, self.environment, target.subproject)
+ file_args[l] += comp.get_option_std_args(
+ target, self.environment, target.subproject)
# Add compile args added using add_project_arguments()
for l, args in self.build.projects_args[target.for_machine].get(target.subproject, {}).items():
@@ -1012,7 +1018,7 @@ def get_args_defines_and_inc_dirs(self, target, compiler, generated_files_includ
# Compile args added from the env or cross file: CFLAGS/CXXFLAGS, etc. We want these
# to override all the defaults, but not the per-target compile args.
for lang in file_args.keys():
- file_args[lang] += target.get_option(OptionKey(f'{lang}_args', machine=target.for_machine))
+ file_args[lang] += self.get_target_option(target, OptionKey(f'{lang}_args', machine=target.for_machine))
for args in file_args.values():
# This is where Visual Studio will insert target_args, target_defines,
# etc, which are added later from external deps (see below).
@@ -1302,7 +1308,7 @@ def add_non_makefile_vcxproj_elements(
if True in ((dep.name == 'openmp') for dep in target.get_external_deps()):
ET.SubElement(clconf, 'OpenMPSupport').text = 'true'
# CRT type; debug or release
- vscrt_type = target.get_option(OptionKey('b_vscrt'))
+ vscrt_type = self.get_target_option(target, 'b_vscrt')
vscrt_val = compiler.get_crt_val(vscrt_type, self.buildtype)
if vscrt_val == 'mdd':
ET.SubElement(type_config, 'UseDebugLibraries').text = 'true'
@@ -1340,7 +1346,7 @@ def add_non_makefile_vcxproj_elements(
# Exception handling has to be set in the xml in addition to the "AdditionalOptions" because otherwise
# cl will give warning D9025: overriding '/Ehs' with cpp_eh value
if 'cpp' in target.compilers:
- eh = target.get_option(OptionKey('cpp_eh', machine=target.for_machine))
+ eh = self.environment.coredata.get_option_for_target(target, OptionKey('cpp_eh', machine=target.for_machine))
if eh == 'a':
ET.SubElement(clconf, 'ExceptionHandling').text = 'Async'
elif eh == 's':
@@ -1358,10 +1364,10 @@ def add_non_makefile_vcxproj_elements(
ET.SubElement(clconf, 'PreprocessorDefinitions').text = ';'.join(target_defines)
ET.SubElement(clconf, 'FunctionLevelLinking').text = 'true'
# Warning level
- warning_level = T.cast('str', target.get_option(OptionKey('warning_level')))
+ warning_level = T.cast('str', self.get_target_option(target, 'warning_level'))
warning_level = 'EnableAllWarnings' if warning_level == 'everything' else 'Level' + str(1 + int(warning_level))
ET.SubElement(clconf, 'WarningLevel').text = warning_level
- if target.get_option(OptionKey('werror')):
+ if self.get_target_option(target, 'werror'):
ET.SubElement(clconf, 'TreatWarningAsError').text = 'true'
# Optimization flags
o_flags = split_o_flags_args(build_args)
@@ -1402,7 +1408,7 @@ def add_non_makefile_vcxproj_elements(
ET.SubElement(link, 'GenerateDebugInformation').text = 'false'
if not isinstance(target, build.StaticLibrary):
if isinstance(target, build.SharedModule):
- extra_link_args += compiler.get_std_shared_module_link_args(target.get_options())
+ extra_link_args += compiler.get_std_shared_module_link_args(target)
# Add link args added using add_project_link_arguments()
extra_link_args += self.build.get_project_link_args(compiler, target.subproject, target.for_machine)
# Add link args added using add_global_link_arguments()
@@ -1435,7 +1441,7 @@ def add_non_makefile_vcxproj_elements(
# to be after all internal and external libraries so that unresolved
# symbols from those can be found here. This is needed when the
# *_winlibs that we want to link to are static mingw64 libraries.
- extra_link_args += compiler.get_option_link_args(target.get_options())
+ extra_link_args += compiler.get_option_link_args(target, self.environment, target.subproject)
(additional_libpaths, additional_links, extra_link_args) = self.split_link_args(extra_link_args.to_native())
# Add more libraries to be linked if needed
@@ -1463,7 +1469,7 @@ def add_non_makefile_vcxproj_elements(
if self.environment.is_source(src):
target_private_dir = self.relpath(self.get_target_private_dir(t),
self.get_target_dir(t))
- rel_obj = self.object_filename_from_source(t, src, target_private_dir)
+ rel_obj = self.object_filename_from_source(t, compiler, src, target_private_dir)
extra_link_args.append(rel_obj)
extra_link_args.extend(self.flatten_object_list(t))
@@ -1490,8 +1496,9 @@ def add_non_makefile_vcxproj_elements(
additional_links.append(self.relpath(lib, self.get_target_dir(target)))
if len(extra_link_args) > 0:
- extra_link_args.append('%(AdditionalOptions)')
- ET.SubElement(link, "AdditionalOptions").text = ' '.join(extra_link_args)
+ args = [self.escape_additional_option(arg) for arg in extra_link_args]
+ args.append('%(AdditionalOptions)')
+ ET.SubElement(link, "AdditionalOptions").text = ' '.join(args)
if len(additional_libpaths) > 0:
additional_libpaths.insert(0, '%(AdditionalLibraryDirectories)')
ET.SubElement(link, 'AdditionalLibraryDirectories').text = ';'.join(additional_libpaths)
@@ -1534,7 +1541,8 @@ def add_non_makefile_vcxproj_elements(
# /nologo
ET.SubElement(link, 'SuppressStartupBanner').text = 'true'
# /release
- if not target.get_option(OptionKey('debug')):
+ addchecksum = self.get_target_option(target, 'buildtype') != 'debug'
+ if addchecksum:
ET.SubElement(link, 'SetChecksum').text = 'true'
# Visual studio doesn't simply allow the src files of a project to be added with the 'Condition=...' attribute,
@@ -1596,7 +1604,7 @@ def gen_vcxproj(self, target: build.BuildTarget, ofname: str, guid: str, vslite_
raise MesonException(f'Unknown target type for {target.get_basename()}')
(sources, headers, objects, _languages) = self.split_sources(target.sources)
- if target.is_unity:
+ if self.is_unity(target):
sources = self.generate_unity_files(target, sources)
if target.for_machine is MachineChoice.BUILD:
platform = self.build_platform
@@ -1725,7 +1733,7 @@ def path_normalize_add(path, lis):
self.add_preprocessor_defines(lang, inc_cl, file_defines)
self.add_include_dirs(lang, inc_cl, file_inc_dirs)
ET.SubElement(inc_cl, 'ObjectFileName').text = "$(IntDir)" + \
- self.object_filename_from_source(target, s)
+ self.object_filename_from_source(target, compiler, s)
for s in gen_src:
if path_normalize_add(s, previous_sources):
inc_cl = ET.SubElement(inc_src, 'CLCompile', Include=s)
@@ -1739,7 +1747,7 @@ def path_normalize_add(path, lis):
self.add_include_dirs(lang, inc_cl, file_inc_dirs)
s = File.from_built_file(target.get_subdir(), s)
ET.SubElement(inc_cl, 'ObjectFileName').text = "$(IntDir)" + \
- self.object_filename_from_source(target, s)
+ self.object_filename_from_source(target, compiler, s)
for lang, headers in pch_sources.items():
impl = headers[1]
if impl and path_normalize_add(impl, previous_sources):
@@ -1789,7 +1797,7 @@ def path_normalize_add(path, lis):
# build system as possible.
self.add_target_deps(root, target)
self._prettyprint_vcxproj_xml(ET.ElementTree(root), ofname)
- if self.environment.coredata.get_option(OptionKey('layout')) == 'mirror':
+ if self.environment.coredata.optstore.get_value_for(OptionKey('layout')) == 'mirror':
self.gen_vcxproj_filters(target, ofname)
return True
@@ -1958,9 +1966,9 @@ def gen_testproj(self):
meson_build_dir_for_buildtype = build_dir_tail[:-2] + buildtype # Get the buildtype suffixed 'builddir_[debug/release/etc]' from 'builddir_vs', for example.
proj_to_build_dir_for_buildtype = str(os.path.join(proj_to_multiconfigured_builds_parent_dir, meson_build_dir_for_buildtype))
test_cmd = f'{nmake_base_meson_command} test -C "{proj_to_build_dir_for_buildtype}" --no-rebuild'
- if not self.environment.coredata.get_option(OptionKey('stdsplit')):
+ if not self.environment.coredata.optstore.get_value_for(OptionKey('stdsplit')):
test_cmd += ' --no-stdsplit'
- if self.environment.coredata.get_option(OptionKey('errorlogs')):
+ if self.environment.coredata.optstore.get_value_for(OptionKey('errorlogs')):
test_cmd += ' --print-errorlogs'
condition = f'\'$(Configuration)|$(Platform)\'==\'{buildtype}|{self.platform}\''
prop_group = ET.SubElement(root, 'PropertyGroup', Condition=condition)
@@ -1982,9 +1990,9 @@ def gen_testproj(self):
ET.SubElement(midl, 'ProxyFileName').text = '%(Filename)_p.c'
# FIXME: No benchmarks?
test_command = self.environment.get_build_command() + ['test', '--no-rebuild']
- if not self.environment.coredata.get_option(OptionKey('stdsplit')):
+ if not self.environment.coredata.optstore.get_value_for(OptionKey('stdsplit')):
test_command += ['--no-stdsplit']
- if self.environment.coredata.get_option(OptionKey('errorlogs')):
+ if self.environment.coredata.optstore.get_value_for(OptionKey('errorlogs')):
test_command += ['--print-errorlogs']
self.serialize_tests()
self.add_custom_build(root, 'run_tests', '"%s"' % ('" "'.join(test_command)))
@@ -2092,6 +2100,7 @@ def generate_lang_standard_info(self, file_args: T.Dict[str, CompilerArgs], clco
pass
# Returns if a target generates a manifest or not.
+ # Returns 'embed' if the generated manifest is embedded.
def get_gen_manifest(self, target):
if not isinstance(target, build.BuildTarget):
return True
@@ -2109,6 +2118,8 @@ def get_gen_manifest(self, target):
arg = arg.upper()
if arg == '/MANIFEST:NO':
return False
+ if arg.startswith('/MANIFEST:EMBED'):
+ return 'embed'
if arg == '/MANIFEST' or arg.startswith('/MANIFEST:'):
break
return True
diff --git a/mesonbuild/backend/vs2012backend.py b/mesonbuild/backend/vs2012backend.py
index 307964bdd1c2..922cd60d49d9 100644
--- a/mesonbuild/backend/vs2012backend.py
+++ b/mesonbuild/backend/vs2012backend.py
@@ -21,6 +21,8 @@ def __init__(self, build: T.Optional[Build], interpreter: T.Optional[Interpreter
self.vs_version = '2012'
self.sln_file_version = '12.00'
self.sln_version_comment = '2012'
+
+ def detect_toolset(self) -> None:
if self.environment is not None:
# TODO: we assume host == build
comps = self.environment.coredata.compilers.host
diff --git a/mesonbuild/backend/vs2013backend.py b/mesonbuild/backend/vs2013backend.py
index ae0b68bbef2f..cf5d5980e8bb 100644
--- a/mesonbuild/backend/vs2013backend.py
+++ b/mesonbuild/backend/vs2013backend.py
@@ -20,6 +20,8 @@ def __init__(self, build: T.Optional[Build], interpreter: T.Optional[Interpreter
self.vs_version = '2013'
self.sln_file_version = '12.00'
self.sln_version_comment = '2013'
+
+ def detect_toolset(self) -> None:
if self.environment is not None:
# TODO: we assume host == build
comps = self.environment.coredata.compilers.host
diff --git a/mesonbuild/backend/vs2015backend.py b/mesonbuild/backend/vs2015backend.py
index 4c515cca6f1e..1862def1abba 100644
--- a/mesonbuild/backend/vs2015backend.py
+++ b/mesonbuild/backend/vs2015backend.py
@@ -21,6 +21,8 @@ def __init__(self, build: T.Optional[Build], interpreter: T.Optional[Interpreter
self.vs_version = '2015'
self.sln_file_version = '12.00'
self.sln_version_comment = '14'
+
+ def detect_toolset(self) -> None:
if self.environment is not None:
# TODO: we assume host == build
comps = self.environment.coredata.compilers.host
diff --git a/mesonbuild/backend/vs2017backend.py b/mesonbuild/backend/vs2017backend.py
index 393544febb60..372e1ce0d097 100644
--- a/mesonbuild/backend/vs2017backend.py
+++ b/mesonbuild/backend/vs2017backend.py
@@ -24,6 +24,8 @@ def __init__(self, build: T.Optional[Build], interpreter: T.Optional[Interpreter
self.vs_version = '2017'
self.sln_file_version = '12.00'
self.sln_version_comment = '15'
+
+ def detect_toolset(self) -> None:
# We assume that host == build
if self.environment is not None:
comps = self.environment.coredata.compilers.host
diff --git a/mesonbuild/backend/vs2019backend.py b/mesonbuild/backend/vs2019backend.py
index 4d6e226d16ff..61ad75d5ea7e 100644
--- a/mesonbuild/backend/vs2019backend.py
+++ b/mesonbuild/backend/vs2019backend.py
@@ -22,6 +22,8 @@ def __init__(self, build: T.Optional[Build], interpreter: T.Optional[Interpreter
super().__init__(build, interpreter)
self.sln_file_version = '12.00'
self.sln_version_comment = 'Version 16'
+
+ def detect_toolset(self) -> None:
if self.environment is not None:
comps = self.environment.coredata.compilers.host
if comps and all(c.id == 'clang-cl' for c in comps.values()):
diff --git a/mesonbuild/backend/vs2022backend.py b/mesonbuild/backend/vs2022backend.py
index 27e0438c7075..ca449a4e57f8 100644
--- a/mesonbuild/backend/vs2022backend.py
+++ b/mesonbuild/backend/vs2022backend.py
@@ -22,6 +22,8 @@ def __init__(self, build: T.Optional[Build], interpreter: T.Optional[Interpreter
super().__init__(build, interpreter, gen_lite=gen_lite)
self.sln_file_version = '12.00'
self.sln_version_comment = 'Version 17'
+
+ def detect_toolset(self) -> None:
if self.environment is not None:
comps = self.environment.coredata.compilers.host
if comps and all(c.id == 'clang-cl' for c in comps.values()):
diff --git a/mesonbuild/backend/xcodebackend.py b/mesonbuild/backend/xcodebackend.py
index 31fd272b3f0b..587404a01330 100644
--- a/mesonbuild/backend/xcodebackend.py
+++ b/mesonbuild/backend/xcodebackend.py
@@ -230,7 +230,7 @@ class XCodeBackend(backends.Backend):
def __init__(self, build: T.Optional[build.Build], interpreter: T.Optional[Interpreter]):
super().__init__(build, interpreter)
self.project_uid = self.environment.coredata.lang_guids['default'].replace('-', '')[:24]
- self.buildtype = T.cast('str', self.environment.coredata.get_option(OptionKey('buildtype')))
+ self.buildtype = T.cast('str', self.environment.coredata.optstore.get_value_for(OptionKey('buildtype')))
self.project_conflist = self.gen_id()
self.maingroup_id = self.gen_id()
self.all_id = self.gen_id()
@@ -272,7 +272,7 @@ def gen_id(self) -> str:
@functools.lru_cache(maxsize=None)
def get_target_dir(self, target: T.Union[build.Target, build.CustomTargetIndex]) -> str:
- dirname = os.path.join(target.get_subdir(), T.cast('str', self.environment.coredata.get_option(OptionKey('buildtype'))))
+ dirname = os.path.join(target.get_subdir(), T.cast('str', self.environment.coredata.optstore.get_value_for(OptionKey('buildtype'))))
#os.makedirs(os.path.join(self.environment.get_build_dir(), dirname), exist_ok=True)
return dirname
@@ -1686,9 +1686,9 @@ def generate_single_build_target(self, objects_dict, target_name, target) -> Non
if compiler is None:
continue
# Start with warning args
- warn_args = compiler.get_warn_args(target.get_option(OptionKey('warning_level')))
- copt_proxy = target.get_options()
- std_args = compiler.get_option_compile_args(copt_proxy)
+ warn_args = compiler.get_warn_args(self.get_target_option(target, 'warning_level'))
+ std_args = compiler.get_option_compile_args(target, self.environment, target.subproject)
+ std_args += compiler.get_option_std_args(target, self.environment, target.subproject)
# Add compile args added using add_project_arguments()
pargs = self.build.projects_args[target.for_machine].get(target.subproject, {}).get(lang, [])
# Add compile args added using add_global_arguments()
@@ -1703,12 +1703,12 @@ def generate_single_build_target(self, objects_dict, target_name, target) -> Non
for d in swift_dep_dirs:
args += compiler.get_include_args(d, False)
if args:
- lang_cargs = cargs
+ cti_args = []
if compiler and target.implicit_include_directories:
# It is unclear what is the cwd when xcode runs. -I. does not seem to
# add the root build dir to the search path. So add an absolute path instead.
# This may break reproducible builds, in which case patches are welcome.
- lang_cargs += self.get_custom_target_dir_include_args(target, compiler, absolute_path=True)
+ cti_args = self.get_custom_target_dir_include_args(target, compiler, absolute_path=True)
# Xcode cannot handle separate compilation flags for C and ObjectiveC. They are both
# put in OTHER_CFLAGS. Same with C++ and ObjectiveC++.
if lang == 'objc':
@@ -1716,12 +1716,9 @@ def generate_single_build_target(self, objects_dict, target_name, target) -> Non
elif lang == 'objcpp':
lang = 'cpp'
langname = LANGNAMEMAP[lang]
- if langname in langargs:
- langargs[langname] += args
- else:
- langargs[langname] = args
- langargs[langname] += lang_cargs
- symroot = os.path.join(self.environment.get_build_dir(), target.subdir)
+ langargs.setdefault(langname, [])
+ langargs[langname] = cargs + cti_args + args
+ symroot = os.path.join(self.environment.get_build_dir(), target.subdir).rstrip('/')
bt_dict = PbxDict()
objects_dict.add_item(valid, bt_dict, buildtype)
bt_dict.add_item('isa', 'XCBuildConfiguration')
@@ -1739,9 +1736,9 @@ def generate_single_build_target(self, objects_dict, target_name, target) -> Non
if target.suffix:
suffix = '.' + target.suffix
settings_dict.add_item('EXECUTABLE_SUFFIX', suffix)
- settings_dict.add_item('GCC_GENERATE_DEBUGGING_SYMBOLS', BOOL2XCODEBOOL[target.get_option(OptionKey('debug'))])
+ settings_dict.add_item('GCC_GENERATE_DEBUGGING_SYMBOLS', BOOL2XCODEBOOL[self.get_target_option(target, 'debug')])
settings_dict.add_item('GCC_INLINES_ARE_PRIVATE_EXTERN', 'NO')
- opt_flag = OPT2XCODEOPT[target.get_option(OptionKey('optimization'))]
+ opt_flag = OPT2XCODEOPT[self.get_target_option(target, 'optimization')]
if opt_flag is not None:
settings_dict.add_item('GCC_OPTIMIZATION_LEVEL', opt_flag)
if target.has_pch:
@@ -1759,19 +1756,12 @@ def generate_single_build_target(self, objects_dict, target_name, target) -> Non
settings_dict.add_item('GCC_PREFIX_HEADER', f'$(PROJECT_DIR)/{relative_pch_path}')
settings_dict.add_item('GCC_PREPROCESSOR_DEFINITIONS', '')
settings_dict.add_item('GCC_SYMBOLS_PRIVATE_EXTERN', 'NO')
- header_arr = PbxArray()
- unquoted_headers = []
- unquoted_headers.append(self.get_target_private_dir_abs(target))
+ unquoted_headers = [self.get_target_private_dir_abs(target)]
if target.implicit_include_directories:
unquoted_headers.append(os.path.join(self.environment.get_build_dir(), target.get_subdir()))
unquoted_headers.append(os.path.join(self.environment.get_source_dir(), target.get_subdir()))
- if headerdirs:
- for i in headerdirs:
- i = os.path.normpath(i)
- unquoted_headers.append(i)
- for i in unquoted_headers:
- header_arr.add_item(f'"{i}"')
- settings_dict.add_item('HEADER_SEARCH_PATHS', header_arr)
+ unquoted_headers += headerdirs
+ settings_dict.add_item('HEADER_SEARCH_PATHS', self.normalize_header_search_paths(unquoted_headers))
settings_dict.add_item('INSTALL_PATH', install_path)
settings_dict.add_item('LIBRARY_SEARCH_PATHS', '')
if isinstance(target, build.SharedModule):
@@ -1799,6 +1789,15 @@ def generate_single_build_target(self, objects_dict, target_name, target) -> Non
warn_array.add_item('"$(inherited)"')
bt_dict.add_item('name', buildtype)
+ def normalize_header_search_paths(self, header_dirs) -> PbxArray:
+ header_arr = PbxArray()
+ for i in header_dirs:
+ np = os.path.normpath(i)
+ # Make sure Xcode will not split single path into separate entries, escaping space with a slash is not enough
+ item = f'"\\\"{np}\\\""' if ' ' in np else f'"{np}"'
+ header_arr.add_item(item)
+ return header_arr
+
def add_otherargs(self, settings_dict, langargs):
for langname, args in langargs.items():
if args:
diff --git a/mesonbuild/build.py b/mesonbuild/build.py
index f5be34daa05d..7cf85de97ddb 100644
--- a/mesonbuild/build.py
+++ b/mesonbuild/build.py
@@ -2,10 +2,11 @@
# Copyright 2012-2017 The Meson development team
from __future__ import annotations
-from collections import defaultdict, OrderedDict
-from dataclasses import dataclass, field, InitVar
+from collections import defaultdict, deque, OrderedDict
+from dataclasses import dataclass, field
from functools import lru_cache
import abc
+import copy
import hashlib
import itertools, pathlib
import os
@@ -23,8 +24,8 @@
File, MesonException, MachineChoice, PerMachine, OrderedSet, listify,
extract_as_list, typeslistify, stringlistify, classify_unity_sources,
get_filenames_templates_dict, substitute_values, has_path_sep,
- PerMachineDefaultable,
- MesonBugException, EnvironmentVariables, pickle_load,
+ is_parent_path, PerMachineDefaultable,
+ MesonBugException, EnvironmentVariables, pickle_load, lazy_property,
)
from .options import OptionKey
@@ -42,7 +43,7 @@
from .backend.backends import Backend
from .compilers import Compiler
from .interpreter.interpreter import SourceOutputs, Interpreter
- from .interpreter.interpreterobjects import Test
+ from .interpreter.interpreterobjects import Test, Doctest
from .interpreterbase import SubProject
from .linkers.linkers import StaticLinker
from .mesonlib import ExecutableSerialisation, FileMode, FileOrString
@@ -111,7 +112,7 @@ class DFeatures(TypedDict):
rust_kwargs |
cs_kwargs)
-known_exe_kwargs = known_build_target_kwargs | {'implib', 'export_dynamic', 'pie', 'vs_module_defs'}
+known_exe_kwargs = known_build_target_kwargs | {'implib', 'export_dynamic', 'pie', 'vs_module_defs', 'android_exe_type'}
known_shlib_kwargs = known_build_target_kwargs | {'version', 'soversion', 'vs_module_defs', 'darwin_versions', 'rust_abi'}
known_shmod_kwargs = known_build_target_kwargs | {'vs_module_defs', 'rust_abi'}
known_stlib_kwargs = known_build_target_kwargs | {'pic', 'prelink', 'rust_abi'}
@@ -222,11 +223,18 @@ class DepManifest:
license_files: T.List[T.Tuple[str, File]]
subproject: str
+ def license_mapping(self) -> T.List[T.Tuple[str, str]]:
+ ret = []
+ for ifilename, name in self.license_files:
+ fname = os.path.join(*(x for x in pathlib.PurePath(os.path.normpath(name.fname)).parts if x != '..'))
+ ret.append((ifilename, os.path.join(name.subdir, fname)))
+ return ret
+
def to_json(self) -> T.Dict[str, T.Union[str, T.List[str]]]:
return {
'version': self.version,
'license': self.license,
- 'license_files': [l[1].relative_name() for l in self.license_files],
+ 'license_files': [l[1] for l in self.license_mapping()],
}
@@ -275,7 +283,11 @@ def __init__(self, environment: environment.Environment):
self.dependency_overrides: PerMachine[T.Dict[T.Tuple, DependencyOverride]] = PerMachineDefaultable.default(
environment.is_cross_build(), {}, {})
self.devenv: T.List[EnvironmentVariables] = []
- self.modules: T.List[str] = []
+ self.modules: T.Set[str] = set()
+ """Used to track which modules are enabled in all subprojects.
+
+ Needed for tracking whether a modules options needs to be exposed to the user.
+ """
def get_build_targets(self):
build_targets = OrderedDict()
@@ -416,10 +428,6 @@ class ExtractedObjects(HoldableObject):
recursive: bool = True
pch: bool = False
- def __post_init__(self) -> None:
- if self.target.is_unity:
- self.check_unity_compatible()
-
def __repr__(self) -> str:
r = '<{0} {1!r}: {2}>'
return r.format(self.__class__.__name__, self.target.name, self.srclist)
@@ -520,7 +528,6 @@ class Target(HoldableObject, metaclass=abc.ABCMeta):
install: bool = False
build_always_stale: bool = False
extra_files: T.List[File] = field(default_factory=list)
- override_options: InitVar[T.Optional[T.Dict[OptionKey, str]]] = None
@abc.abstractproperty
def typename(self) -> str:
@@ -530,13 +537,7 @@ def typename(self) -> str:
def type_suffix(self) -> str:
pass
- def __post_init__(self, overrides: T.Optional[T.Dict[OptionKey, str]]) -> None:
- if overrides:
- ovr = {k.evolve(machine=self.for_machine) if k.lang else k: v
- for k, v in overrides.items()}
- else:
- ovr = {}
- self.options = coredata.OptionsView(self.environment.coredata.optstore, self.subproject, ovr)
+ def __post_init__(self) -> None:
# XXX: this should happen in the interpreter
if has_path_sep(self.name):
# Fix failing test 53 when this becomes an error.
@@ -633,13 +634,17 @@ def construct_id_from_path(subdir: str, name: str, type_suffix: str) -> str:
return subdir_part + '@@' + my_id
return my_id
- def get_id(self) -> str:
+ @lazy_property
+ def id(self) -> str:
name = self.name
if getattr(self, 'name_suffix_set', False):
name += '.' + self.suffix
return self.construct_id_from_path(
self.subdir, name, self.type_suffix())
+ def get_id(self) -> str:
+ return self.id
+
def process_kwargs_base(self, kwargs: T.Dict[str, T.Any]) -> None:
if 'build_by_default' in kwargs:
self.build_by_default = kwargs['build_by_default']
@@ -651,36 +656,13 @@ def process_kwargs_base(self, kwargs: T.Dict[str, T.Any]) -> None:
# set, use the value of 'install' if it's enabled.
self.build_by_default = True
- self.set_option_overrides(self.parse_overrides(kwargs))
-
- def is_compiler_option_hack(self, key):
- # FIXME this method must be deleted when OptionsView goes away.
- # At that point the build target only stores the original string.
- # The decision on how to use those pieces of data is done elsewhere.
- from .compilers import all_languages
- if '_' not in key.name:
- return False
- prefix = key.name.split('_')[0]
- return prefix in all_languages
-
- def set_option_overrides(self, option_overrides: T.Dict[OptionKey, str]) -> None:
- self.options.overrides = {}
- for k, v in option_overrides.items():
- if self.is_compiler_option_hack(k):
- self.options.overrides[k.evolve(machine=self.for_machine)] = v
- else:
- self.options.overrides[k] = v
-
- def get_options(self) -> coredata.OptionsView:
- return self.options
+ self.raw_overrides = self.parse_overrides(kwargs)
- def get_option(self, key: 'OptionKey') -> T.Union[str, int, bool]:
- # TODO: if it's possible to annotate get_option or validate_option_value
- # in the future we might be able to remove the cast here
- return T.cast('T.Union[str, int, bool]', self.options.get_value(key))
+ def get_override(self, name: str) -> T.Optional[str]:
+ return self.raw_overrides.get(name, None)
@staticmethod
- def parse_overrides(kwargs: T.Dict[str, T.Any]) -> T.Dict[OptionKey, str]:
+ def parse_overrides(kwargs: T.Dict[str, T.Any]) -> T.Dict[str, str]:
opts = kwargs.get('override_options', [])
# In this case we have an already parsed and ready to go dictionary
@@ -688,15 +670,13 @@ def parse_overrides(kwargs: T.Dict[str, T.Any]) -> T.Dict[OptionKey, str]:
if isinstance(opts, dict):
return T.cast('T.Dict[OptionKey, str]', opts)
- result: T.Dict[OptionKey, str] = {}
+ result: T.Dict[str, str] = {}
overrides = stringlistify(opts)
for o in overrides:
if '=' not in o:
raise InvalidArguments('Overrides must be of form "key=value"')
k, v = o.split('=', 1)
- key = OptionKey.from_string(k.strip())
- v = v.strip()
- result[key] = v
+ result[k] = v
return result
def is_linkable_target(self) -> bool:
@@ -750,6 +730,7 @@ def __init__(
self.name_prefix_set = False
self.name_suffix_set = False
self.filename = 'no_name'
+ self.doctests: T.Optional[Doctest] = None
# The debugging information file this target will generate
self.debug_filename = None
# The list of all files outputted by this target. Useful in cases such
@@ -828,11 +809,6 @@ def __repr__(self):
def __str__(self):
return f"{self.name}"
- @property
- def is_unity(self) -> bool:
- unity_opt = self.get_option(OptionKey('unity'))
- return unity_opt == 'on' or (unity_opt == 'subprojects' and self.subproject != '')
-
def validate_install(self):
if self.for_machine is MachineChoice.BUILD and self.install:
if self.environment.is_cross_build():
@@ -1015,9 +991,14 @@ def process_compilers(self) -> T.List[str]:
if 'vala' in self.compilers and 'c' not in self.compilers:
self.compilers['c'] = self.all_compilers['c']
if 'cython' in self.compilers:
- key = OptionKey('cython_language', machine=self.for_machine)
- value = self.get_option(key)
-
+ # Not great, but we can't ask for the override value from "the system"
+ # because this object is currently being constructed so it is not
+ # yet placed in the data store. Grab it directly from override strings
+ # instead.
+ value = self.get_override('cython_language')
+ if value is None:
+ key = OptionKey('cython_language', machine=self.for_machine)
+ value = self.environment.coredata.optstore.get_value_for(key)
try:
self.compilers[value] = self.all_compilers[value]
except KeyError:
@@ -1053,7 +1034,7 @@ def process_link_depends(self, sources):
'Link_depends arguments must be strings, Files, '
'or a Custom Target, or lists thereof.')
- def extract_objects(self, srclist: T.List[T.Union['FileOrString', 'GeneratedTypes']]) -> ExtractedObjects:
+ def extract_objects(self, srclist: T.List[T.Union['FileOrString', 'GeneratedTypes']], is_unity: bool) -> ExtractedObjects:
sources_set = set(self.sources)
generated_set = set(self.generated)
@@ -1076,21 +1057,71 @@ def extract_objects(self, srclist: T.List[T.Union['FileOrString', 'GeneratedType
obj_gen.append(src)
else:
raise MesonException(f'Object extraction arguments must be strings, Files or targets (got {type(src).__name__}).')
- return ExtractedObjects(self, obj_src, obj_gen)
+ eobjs = ExtractedObjects(self, obj_src, obj_gen)
+ if is_unity:
+ eobjs.check_unity_compatible()
+ return eobjs
def extract_all_objects(self, recursive: bool = True) -> ExtractedObjects:
return ExtractedObjects(self, self.sources, self.generated, self.objects,
recursive, pch=True)
+ @lru_cache(maxsize=None)
def get_all_link_deps(self) -> ImmutableListProtocol[BuildTargetTypes]:
- return self.get_transitive_link_deps()
+ """ Get all shared libraries dependencies
+ This returns all shared libraries in the entire dependency tree. Those
+ are libraries needed at runtime which is different from the set needed
+ at link time, see get_dependencies() for that.
+ """
+ result: OrderedSet[BuildTargetTypes] = OrderedSet()
+ stack: T.Deque[BuildTargetTypes] = deque()
+ stack.appendleft(self)
+ while stack:
+ t = stack.pop()
+ if t in result:
+ continue
+ if isinstance(t, CustomTargetIndex):
+ stack.appendleft(t.target)
+ continue
+ if isinstance(t, SharedLibrary):
+ result.add(t)
+ if isinstance(t, BuildTarget):
+ stack.extendleft(t.link_targets)
+ stack.extendleft(t.link_whole_targets)
+ return list(result)
@lru_cache(maxsize=None)
- def get_transitive_link_deps(self) -> ImmutableListProtocol[BuildTargetTypes]:
- result: T.List[Target] = []
- for i in self.link_targets:
- result += i.get_all_link_deps()
- return result
+ def get_all_linked_targets(self) -> ImmutableListProtocol[BuildTargetTypes]:
+ """Get all targets that have been linked with this one.
+
+ This is useful for cases where we need to analyze these links, such as
+ for module information.
+
+ This includes static libraries and static libraries linked with static
+ libraries. This differs from :method:`get_all_link_deps` in that it does
+ add static libs, and differs from `:method:`get_dependencies`, which
+ does not look for targets that are not directly linked, such as those
+ that are added with `link_whole`.
+
+ :returns: An immutable list of BuildTargets
+ """
+ result: OrderedSet[BuildTargetTypes] = OrderedSet()
+ stack: T.Deque[BuildTargetTypes] = deque()
+ stack.extendleft(self.link_targets)
+ stack.extendleft(self.link_whole_targets)
+ while stack:
+ t = stack.pop()
+ if t in result:
+ continue
+ if isinstance(t, CustomTargetIndex):
+ stack.appendleft(t.target)
+ continue
+ if isinstance(t, BuildTarget):
+ result.add(t)
+ stack.extendleft(t.link_targets)
+ stack.extendleft(t.link_whole_targets)
+ assert self not in result, 'should not have self'
+ return list(result)
def get_link_deps_mapping(self, prefix: str) -> T.Mapping[str, str]:
return self.get_transitive_link_deps_mapping(prefix)
@@ -1258,7 +1289,7 @@ def _extract_pic_pie(self, kwargs: T.Dict[str, T.Any], arg: str, option: str) ->
if kwargs.get(arg) is not None:
val = T.cast('bool', kwargs[arg])
elif k in self.environment.coredata.optstore:
- val = self.environment.coredata.optstore.get_value(k)
+ val = self.environment.coredata.optstore.get_value_for(k.name, k.subproject)
else:
val = False
@@ -1319,7 +1350,7 @@ def get_dependencies_recurse(self, result: OrderedSet[BuildTargetTypes], include
def get_source_subdir(self):
return self.subdir
- def get_sources(self):
+ def get_sources(self) -> T.List[File]:
return self.sources
def get_objects(self) -> T.List[T.Union[str, 'File', 'ExtractedObjects']]:
@@ -1360,7 +1391,8 @@ def add_deps(self, deps):
[],
dep.get_compile_args(),
dep.get_link_args(),
- [], [], [], [], [], {}, [], [], [])
+ [], [], [], [], [], {}, [], [], [],
+ dep.name)
self.external_deps.append(extpart)
# Deps of deps.
self.add_deps(dep.ext_deps)
@@ -1441,7 +1473,6 @@ def link_whole(self, targets: T.List[BuildTargetTypes], promoted: bool = False)
msg = f"Can't link non-PIC static library {t.name!r} into shared library {self.name!r}. "
msg += "Use the 'pic' option to static_library to build with PIC."
raise InvalidArguments(msg)
-
self.check_can_link_together(t)
if isinstance(self, StaticLibrary):
# When we're a static library and we link_whole: to another static
@@ -1463,29 +1494,11 @@ def get_internal_static_libraries(self) -> OrderedSet[BuildTargetTypes]:
def get_internal_static_libraries_recurse(self, result: OrderedSet[BuildTargetTypes]) -> None:
for t in self.link_targets:
if t.is_internal() and t not in result:
- self.check_can_extract_objects(t, origin, promoted=True)
result.add(t)
- t.get_internal_static_libraries_recurse(result, origin)
+ t.get_internal_static_libraries_recurse(result)
for t in self.link_whole_targets:
if t.is_internal():
- t.get_internal_static_libraries_recurse(result, origin)
-
- def check_can_extract_objects(self, t: T.Union[Target, CustomTargetIndex], origin: StaticLibrary, promoted: bool = False) -> None:
- if isinstance(t, (CustomTarget, CustomTargetIndex)) or t.uses_rust():
- # To extract objects from a custom target we would have to extract
- # the archive, WIP implementation can be found in
- # https://github.com/mesonbuild/meson/pull/9218.
- # For Rust C ABI we could in theory have access to objects, but there
- # are several meson issues that need to be fixed:
- # https://github.com/mesonbuild/meson/issues/10722
- # https://github.com/mesonbuild/meson/issues/10723
- # https://github.com/mesonbuild/meson/issues/10724
- m = (f'Cannot link_whole a custom or Rust target {t.name!r} into a static library {origin.name!r}. '
- 'Instead, pass individual object files with the "objects:" keyword argument if possible.')
- if promoted:
- m += (f' Meson had to promote link to link_whole because {origin.name!r} is installed but not {t.name!r},'
- f' and thus has to include objects from {t.name!r} to be usable.')
- raise InvalidArguments(m)
+ t.get_internal_static_libraries_recurse(result)
def _bundle_static_library(self, t: T.Union[BuildTargetTypes], promoted: bool = False) -> None:
if self.uses_rust():
@@ -1514,7 +1527,7 @@ def check_can_link_together(self, t: BuildTargetTypes) -> None:
if not self.uses_rust() and links_with_rust_abi:
raise InvalidArguments(f'Try to link Rust ABI library {t.name!r} with a non-Rust target {self.name!r}')
if self.for_machine is not t.for_machine and (not links_with_rust_abi or t.rust_crate_type != 'proc-macro'):
- msg = f'Tried to tied to mix a {t.for_machine} library ("{t.name}") with a {self.for_machine} target "{self.name}"'
+ msg = f'Tried to mix a {t.for_machine} library ("{t.name}") with a {self.for_machine} target "{self.name}"'
if self.environment.is_cross_build():
raise InvalidArguments(msg + ' This is not possible in a cross build.')
else:
@@ -1754,7 +1767,7 @@ def process_vs_module_defs_kw(self, kwargs: T.Dict[str, T.Any]) -> None:
self.process_link_depends(path)
def extract_targets_as_list(self, kwargs: T.Dict[str, T.Union[LibTypes, T.Sequence[LibTypes]]], key: T.Literal['link_with', 'link_whole']) -> T.List[LibTypes]:
- bl_type = self.environment.coredata.get_option(OptionKey('default_both_libraries'))
+ bl_type = self.environment.coredata.optstore.get_value_for(OptionKey('default_both_libraries'))
if bl_type == 'auto':
if isinstance(self, StaticLibrary):
bl_type = 'static'
@@ -1771,7 +1784,7 @@ def extract_targets_as_list(self, kwargs: T.Dict[str, T.Union[LibTypes, T.Sequen
lib_list.append(lib)
return lib_list
- def get(self, lib_type: T.Literal['static', 'shared', 'auto']) -> LibTypes:
+ def get(self, lib_type: T.Literal['static', 'shared']) -> LibTypes:
"""Base case used by BothLibraries"""
return self
@@ -1852,14 +1865,6 @@ def get_arglist(self, inname: str) -> T.List[str]:
basename = os.path.splitext(plainname)[0]
return [x.replace('@BASENAME@', basename).replace('@PLAINNAME@', plainname) for x in self.arglist]
- @staticmethod
- def is_parent_path(parent: str, trial: str) -> bool:
- try:
- common = os.path.commonpath((parent, trial))
- except ValueError: # Windows on different drives
- return False
- return pathlib.PurePath(common) == pathlib.PurePath(parent)
-
def process_files(self, files: T.Iterable[T.Union[str, File, 'CustomTarget', 'CustomTargetIndex', 'GeneratedList']],
state: T.Union['Interpreter', 'ModuleState'],
preserve_path_from: T.Optional[str] = None,
@@ -1889,7 +1894,7 @@ def process_files(self, files: T.Iterable[T.Union[str, File, 'CustomTarget', 'Cu
for f in fs:
if preserve_path_from:
abs_f = f.absolute_path(state.environment.source_dir, state.environment.build_dir)
- if not self.is_parent_path(preserve_path_from, abs_f):
+ if not is_parent_path(preserve_path_from, abs_f):
raise InvalidArguments('generator.process: When using preserve_path_from, all input files must be in a subdirectory of the given dir.')
f = FileMaybeInTargetPrivateDir(f)
output.add_file(f, state)
@@ -1987,10 +1992,11 @@ def __init__(
kwargs):
key = OptionKey('b_pie')
if 'pie' not in kwargs and key in environment.coredata.optstore:
- kwargs['pie'] = environment.coredata.optstore.get_value(key)
+ kwargs['pie'] = environment.coredata.optstore.get_value_for(key)
super().__init__(name, subdir, subproject, for_machine, sources, structured_sources, objects,
environment, compilers, kwargs)
self.win_subsystem = kwargs.get('win_subsystem') or 'console'
+ assert kwargs.get('android_exe_type') is None or kwargs.get('android_exe_type') in {'application', 'executable'}
# Check for export_dynamic
self.export_dynamic = kwargs.get('export_dynamic', False)
if not isinstance(self.export_dynamic, bool):
@@ -2033,9 +2039,13 @@ def post_init(self) -> None:
elif ('c' in self.compilers and self.compilers['c'].get_id() in {'mwccarm', 'mwcceppc'} or
'cpp' in self.compilers and self.compilers['cpp'].get_id() in {'mwccarm', 'mwcceppc'}):
self.suffix = 'nef'
+ elif ('c' in self.compilers and self.compilers['c'].get_id() == 'tasking'):
+ self.suffix = 'elf'
else:
self.suffix = machine.get_exe_suffix()
self.filename = self.name
+ if self.prefix:
+ self.filename = self.prefix + self.filename
if self.suffix:
self.filename += '.' + self.suffix
self.outputs[0] = self.filename
@@ -2060,7 +2070,7 @@ def post_init(self) -> None:
machine.is_windows()
and ('cs' in self.compilers or self.uses_rust() or self.get_using_msvc())
# .pdb file is created only when debug symbols are enabled
- and self.environment.coredata.get_option(OptionKey("debug"))
+ and self.environment.coredata.optstore.get_value_for(OptionKey("debug"))
)
if create_debug_file:
# If the target is has a standard exe extension (i.e. 'foo.exe'),
@@ -2166,7 +2176,8 @@ def post_init(self) -> None:
rustc = self.compilers['rust']
d = dependencies.InternalDependency('undefined', [], [],
rustc.native_static_libs,
- [], [], [], [], [], {}, [], [], [])
+ [], [], [], [], [], {}, [], [], [],
+ '_rust_native_static_libs')
self.external_deps.append(d)
# By default a static library is named libfoo.a even on Windows because
# MSVC does not have a consistent convention for what static libraries
@@ -2189,6 +2200,15 @@ def post_init(self) -> None:
self.suffix = 'a'
else:
self.suffix = 'a'
+ if 'c' in self.compilers and self.compilers['c'].get_id() == 'tasking' and not self.prelink:
+ key = OptionKey('b_lto', self.subproject, self.for_machine)
+ try:
+ v = self.environment.coredata.get_option_for_target(self, key)
+ except KeyError:
+ v = self.environment.coredata.optstore.get_value_for(key)
+ assert isinstance(v, bool), 'for mypy'
+ if v:
+ self.suffix = 'ma'
self.filename = self.prefix + self.name + '.' + self.suffix
self.outputs[0] = self.filename
@@ -2225,12 +2245,16 @@ def is_internal(self) -> bool:
return not self.install
def set_shared(self, shared_library: SharedLibrary) -> None:
- self.both_lib = shared_library
+ self.both_lib = copy.copy(shared_library)
+ self.both_lib.both_lib = None
- def get(self, lib_type: T.Literal['static', 'shared', 'auto']) -> LibTypes:
+ def get(self, lib_type: T.Literal['static', 'shared'], recursive: bool = False) -> LibTypes:
+ result = self
if lib_type == 'shared':
- return self.both_lib or self
- return self
+ result = self.both_lib or self
+ if recursive:
+ result.link_targets = [t.get(lib_type, True) for t in self.link_targets]
+ return result
class SharedLibrary(BuildTarget):
known_kwargs = known_shlib_kwargs
@@ -2335,14 +2359,14 @@ def determine_filenames(self):
# Import library is called foo.dll.lib
import_filename_tpl = '{0.prefix}{0.name}.dll.lib'
# .pdb file is only created when debug symbols are enabled
- create_debug_file = self.environment.coredata.get_option(OptionKey("debug"))
+ create_debug_file = self.environment.coredata.optstore.get_value_for(OptionKey("debug"))
elif self.get_using_msvc():
# Shared library is of the form foo.dll
prefix = ''
# Import library is called foo.lib
import_filename_tpl = '{0.prefix}{0.name}.lib'
# .pdb file is only created when debug symbols are enabled
- create_debug_file = self.environment.coredata.get_option(OptionKey("debug"))
+ create_debug_file = self.environment.coredata.optstore.get_value_for(OptionKey("debug"))
# Assume GCC-compatible naming
else:
# Shared library is of the form libfoo.dll
@@ -2457,9 +2481,6 @@ def get_debug_filename(self) -> T.Optional[str]:
"""
return self.debug_filename
- def get_all_link_deps(self):
- return [self] + self.get_transitive_link_deps()
-
def get_aliases(self) -> T.List[T.Tuple[str, str, str]]:
"""
If the versioned library name is libfoo.so.0.100.0, aliases are:
@@ -2499,12 +2520,16 @@ def is_linkable_target(self):
return True
def set_static(self, static_library: StaticLibrary) -> None:
- self.both_lib = static_library
+ self.both_lib = copy.copy(static_library)
+ self.both_lib.both_lib = None
- def get(self, lib_type: T.Literal['static', 'shared']) -> LibTypes:
+ def get(self, lib_type: T.Literal['static', 'shared'], recursive: bool = False) -> LibTypes:
+ result = self
if lib_type == 'static':
- return self.both_lib or self
- return self
+ result = self.both_lib or self
+ if recursive:
+ result.link_targets = [t.get(lib_type, True) for t in self.link_targets]
+ return result
# A shared library that is meant to be used with dlopen rather than linking
# into something else.
@@ -2551,7 +2576,7 @@ def __init__(self, shared: SharedLibrary, static: StaticLibrary, preferred_libra
def __repr__(self) -> str:
return f''
- def get(self, lib_type: T.Literal['static', 'shared', 'auto']) -> LibTypes:
+ def get(self, lib_type: T.Literal['static', 'shared']) -> T.Union[StaticLibrary, SharedLibrary]:
if lib_type == 'static':
return self.static
if lib_type == 'shared':
@@ -2575,7 +2600,7 @@ class CommandBase:
subproject: str
def flatten_command(self, cmd: T.Sequence[T.Union[str, File, programs.ExternalProgram, BuildTargetTypes]]) -> \
- T.List[T.Union[str, File, BuildTarget, 'CustomTarget']]:
+ T.List[T.Union[str, File, BuildTarget, CustomTarget, programs.ExternalProgram]]:
cmd = listify(cmd)
final_cmd: T.List[T.Union[str, File, BuildTarget, 'CustomTarget']] = []
for c in cmd:
@@ -2592,7 +2617,8 @@ def flatten_command(self, cmd: T.Sequence[T.Union[str, File, programs.ExternalPr
# Can only add a dependency on an external program which we
# know the absolute path of
self.depend_files.append(File.from_absolute_file(path))
- final_cmd += c.get_command()
+ # Do NOT flatten -- it is needed for later parsing
+ final_cmd.append(c)
elif isinstance(c, (BuildTarget, CustomTarget)):
self.dependencies.append(c)
final_cmd.append(c)
@@ -2625,7 +2651,10 @@ def get_internal_static_libraries(self) -> OrderedSet[BuildTargetTypes]:
def get_internal_static_libraries_recurse(self, result: OrderedSet[BuildTargetTypes]) -> None:
pass
- def get(self, lib_type: T.Literal['static', 'shared', 'auto']) -> LibTypes:
+ def get_all_linked_targets(self) -> ImmutableListProtocol[BuildTargetTypes]:
+ return []
+
+ def get(self, lib_type: T.Literal['static', 'shared'], recursive: bool = False) -> LibTypes:
"""Base case used by BothLibraries"""
return self
@@ -2659,6 +2688,7 @@ def __init__(self,
install_dir: T.Optional[T.List[T.Union[str, Literal[False]]]] = None,
install_mode: T.Optional[FileMode] = None,
install_tag: T.Optional[T.List[T.Optional[str]]] = None,
+ rspable: bool = False,
absolute_paths: bool = False,
backend: T.Optional['Backend'] = None,
description: str = 'Generating {} with a custom command',
@@ -2691,6 +2721,9 @@ def __init__(self,
# Whether to use absolute paths for all files on the commandline
self.absolute_paths = absolute_paths
+ # Whether to enable using response files for the underlying tool
+ self.rspable = rspable
+
def get_default_install_dir(self) -> T.Union[T.Tuple[str, str], T.Tuple[None, None]]:
return None, None
@@ -2878,10 +2911,6 @@ def __init__(self,
def type_suffix(self) -> str:
return "@compile"
- @property
- def is_unity(self) -> bool:
- return False
-
def _add_output(self, f: File) -> None:
plainname = os.path.basename(f.fname)
basename = os.path.splitext(plainname)[0]
diff --git a/mesonbuild/cargo/__init__.py b/mesonbuild/cargo/__init__.py
index 10cb0be103c0..0a4d5f2abdcb 100644
--- a/mesonbuild/cargo/__init__.py
+++ b/mesonbuild/cargo/__init__.py
@@ -1,6 +1,6 @@
__all__ = [
- 'interpret',
+ 'Interpreter',
'load_wraps',
]
-from .interpreter import interpret, load_wraps
+from .interpreter import Interpreter, load_wraps
diff --git a/mesonbuild/cargo/interpreter.py b/mesonbuild/cargo/interpreter.py
index a95ee5be39be..78bce95d227e 100644
--- a/mesonbuild/cargo/interpreter.py
+++ b/mesonbuild/cargo/interpreter.py
@@ -11,30 +11,39 @@
from __future__ import annotations
import dataclasses
-import glob
import importlib
-import itertools
import json
import os
import shutil
import collections
import urllib.parse
+import itertools
import typing as T
from . import builder
from . import version
from ..mesonlib import MesonException, Popen_safe
-from ..options import OptionKey
-from .. import coredata, options, mlog
+from .. import coredata, mlog
from ..wrap.wrap import PackageDefinition
if T.TYPE_CHECKING:
from types import ModuleType
- from typing import Any
+
+ from typing_extensions import Protocol, Self
from . import manifest
from .. import mparser
from ..environment import Environment
+ from ..interpreterbase import SubProject
+
+ # Copied from typeshed. Blarg that they don't expose this
+ class DataclassInstance(Protocol):
+ __dataclass_fields__: T.ClassVar[dict[str, dataclasses.Field[T.Any]]]
+
+ _UnknownKeysT = T.TypeVar('_UnknownKeysT', manifest.FixedPackage,
+ manifest.FixedDependency, manifest.FixedLibTarget,
+ manifest.FixedBuildTarget)
+
# tomllib is present in python 3.11, before that it is a pypi module called tomli,
# we try to import tomllib, then tomli,
@@ -55,6 +64,14 @@
toml2json = shutil.which('toml2json')
+_EXTRA_KEYS_WARNING = (
+ "This may (unlikely) be an error in the cargo manifest, or may be a missing "
+ "implementation in Meson. If this issue can be reproduced with the latest "
+ "version of Meson, please help us by opening an issue at "
+ "https://github.com/mesonbuild/meson/issues. Please include the crate and "
+ "version that is generating this warning if possible."
+)
+
class TomlImplementationMissing(MesonException):
pass
@@ -119,6 +136,30 @@ def _fixup_raw_mappings(d: T.Union[manifest.BuildTarget, manifest.LibTarget, man
return T.cast('T.Union[manifest.FixedBuildTarget, manifest.FixedLibTarget, manifest.FixedDependency]', raw)
+def _handle_unknown_keys(data: _UnknownKeysT, cls: T.Union[DataclassInstance, T.Type[DataclassInstance]],
+ msg: str) -> _UnknownKeysT:
+ """Remove and warn on keys that are coming from cargo, but are unknown to
+ our representations.
+
+ This is intended to give users the possibility of things proceeding when a
+ new key is added to Cargo.toml that we don't yet handle, but to still warn
+ them that things might not work.
+
+ :param data: The raw data to look at
+ :param cls: The Dataclass derived type that will be created
+ :param msg: the header for the error message. Usually something like "In N structure".
+ :return: The original data structure, but with all unknown keys removed.
+ """
+ unexpected = set(data) - {x.name for x in dataclasses.fields(cls)}
+ if unexpected:
+ mlog.warning(msg, 'has unexpected keys', '"{}".'.format(', '.join(sorted(unexpected))),
+ _EXTRA_KEYS_WARNING)
+ for k in unexpected:
+ # Mypy and Pyright can't prove that this is okay
+ del data[k] # type: ignore[misc]
+ return data
+
+
@dataclasses.dataclass
class Package:
@@ -145,14 +186,63 @@ class Package:
exclude: T.List[str] = dataclasses.field(default_factory=list)
include: T.List[str] = dataclasses.field(default_factory=list)
publish: bool = True
- metadata: T.Dict[str, T.Dict[str, str]] = dataclasses.field(default_factory=dict)
+ metadata: T.Dict[str, T.Any] = dataclasses.field(default_factory=dict)
default_run: T.Optional[str] = None
autolib: bool = True
autobins: bool = True
autoexamples: bool = True
autotests: bool = True
autobenches: bool = True
+ api: str = dataclasses.field(init=False)
+ def __post_init__(self) -> None:
+ self.api = _version_to_api(self.version)
+
+ @classmethod
+ def from_raw(cls, raw: manifest.Package) -> Self:
+ pkg = T.cast('manifest.FixedPackage',
+ {fixup_meson_varname(k): v for k, v in raw.items()})
+ pkg = _handle_unknown_keys(pkg, cls, f'Package entry {pkg["name"]}')
+ return cls(**pkg)
+
+@dataclasses.dataclass
+class SystemDependency:
+
+ """ Representation of a Cargo system-deps entry
+ https://docs.rs/system-deps/latest/system_deps
+ """
+
+ name: str
+ version: T.List[str]
+ optional: bool = False
+ feature: T.Optional[str] = None
+ feature_overrides: T.Dict[str, T.Dict[str, str]] = dataclasses.field(default_factory=dict)
+
+ @classmethod
+ def from_raw(cls, name: str, raw: T.Any) -> SystemDependency:
+ if isinstance(raw, str):
+ return cls(name, SystemDependency.convert_version(raw))
+ name = raw.get('name', name)
+ version = SystemDependency.convert_version(raw.get('version'))
+ optional = raw.get('optional', False)
+ feature = raw.get('feature')
+ # Everything else are overrides when certain features are enabled.
+ feature_overrides = {k: v for k, v in raw.items() if k not in {'name', 'version', 'optional', 'feature'}}
+ return cls(name, version, optional, feature, feature_overrides)
+
+ @staticmethod
+ def convert_version(version: T.Optional[str]) -> T.List[str]:
+ vers = version.split(',') if version is not None else []
+ result: T.List[str] = []
+ for v in vers:
+ v = v.strip()
+ if v[0] not in '><=':
+ v = f'>={v}'
+ result.append(v)
+ return result
+
+ def enabled(self, features: T.Set[str]) -> bool:
+ return self.feature is None or self.feature in features
@dataclasses.dataclass
class Dependency:
@@ -193,7 +283,8 @@ def from_raw(cls, name: str, raw: manifest.DependencyV) -> Dependency:
"""Create a dependency from a raw cargo dictionary"""
if isinstance(raw, str):
return cls(name, version.convert(raw))
- return cls(name, **_fixup_raw_mappings(raw))
+ fixed = _handle_unknown_keys(_fixup_raw_mappings(raw), cls, f'Dependency entry {name}')
+ return cls(name, **fixed)
@dataclasses.dataclass
@@ -224,6 +315,11 @@ class BuildTarget:
required_features: T.List[str] = dataclasses.field(default_factory=list)
plugin: bool = False
+ @classmethod
+ def from_raw(cls, raw: manifest.BuildTarget) -> Self:
+ name = raw.get('name', '')
+ build = _handle_unknown_keys(_fixup_raw_mappings(raw), cls, f'Binary entry {name}')
+ return cls(**build)
@dataclasses.dataclass
class Library(BuildTarget):
@@ -237,6 +333,18 @@ class Library(BuildTarget):
crate_type: T.List[manifest.CRATE_TYPE] = dataclasses.field(default_factory=lambda: ['lib'])
doc_scrape_examples: bool = True
+ @classmethod
+ def from_raw(cls, raw: manifest.LibTarget, fallback_name: str) -> Self: # type: ignore[override]
+ fixed = _fixup_raw_mappings(raw)
+
+ # We need to set the name field if it's not set manually, including if
+ # other fields are set in the lib section
+ if 'name' not in fixed:
+ fixed['name'] = fallback_name
+ fixed = _handle_unknown_keys(fixed, cls, f'Library entry {fixed["name"]}')
+
+ return cls(**fixed)
+
@dataclasses.dataclass
class Binary(BuildTarget):
@@ -281,7 +389,6 @@ class Manifest:
Cargo subprojects can contain what Meson wants to treat as multiple,
interdependent, subprojects.
- :param subdir: the subdirectory that this cargo project is in
:param path: the path within the cargo subproject.
"""
@@ -289,6 +396,7 @@ class Manifest:
dependencies: T.Dict[str, Dependency]
dev_dependencies: T.Dict[str, Dependency]
build_dependencies: T.Dict[str, Dependency]
+ system_dependencies: T.Dict[str, SystemDependency] = dataclasses.field(init=False)
lib: Library
bin: T.List[Binary]
test: T.List[Test]
@@ -296,83 +404,31 @@ class Manifest:
example: T.List[Example]
features: T.Dict[str, T.List[str]]
target: T.Dict[str, T.Dict[str, Dependency]]
- subdir: str
path: str = ''
def __post_init__(self) -> None:
self.features.setdefault('default', [])
+ self.system_dependencies = {k: SystemDependency.from_raw(k, v) for k, v in self.package.metadata.get('system-deps', {}).items()}
def _convert_manifest(raw_manifest: manifest.Manifest, subdir: str, path: str = '') -> Manifest:
- # This cast is a bit of a hack to deal with proc-macro
- lib = _fixup_raw_mappings(raw_manifest.get('lib', {}))
-
- # We need to set the name field if it's not set manually,
- # including if other fields are set in the lib section
- lib.setdefault('name', raw_manifest['package']['name'])
-
- pkg = T.cast('manifest.FixedPackage',
- {fixup_meson_varname(k): v for k, v in raw_manifest['package'].items()})
-
return Manifest(
- Package(**pkg),
+ Package.from_raw(raw_manifest['package']),
{k: Dependency.from_raw(k, v) for k, v in raw_manifest.get('dependencies', {}).items()},
{k: Dependency.from_raw(k, v) for k, v in raw_manifest.get('dev-dependencies', {}).items()},
{k: Dependency.from_raw(k, v) for k, v in raw_manifest.get('build-dependencies', {}).items()},
- Library(**lib),
- [Binary(**_fixup_raw_mappings(b)) for b in raw_manifest.get('bin', {})],
- [Test(**_fixup_raw_mappings(b)) for b in raw_manifest.get('test', {})],
- [Benchmark(**_fixup_raw_mappings(b)) for b in raw_manifest.get('bench', {})],
- [Example(**_fixup_raw_mappings(b)) for b in raw_manifest.get('example', {})],
+ Library.from_raw(raw_manifest.get('lib', {}), raw_manifest['package']['name']),
+ [Binary.from_raw(b) for b in raw_manifest.get('bin', {})],
+ [Test.from_raw(b) for b in raw_manifest.get('test', {})],
+ [Benchmark.from_raw(b) for b in raw_manifest.get('bench', {})],
+ [Example.from_raw(b) for b in raw_manifest.get('example', {})],
raw_manifest.get('features', {}),
{k: {k2: Dependency.from_raw(k2, v2) for k2, v2 in v.get('dependencies', {}).items()}
for k, v in raw_manifest.get('target', {}).items()},
- subdir,
path,
)
-def _load_manifests(subdir: str) -> T.Dict[str, Manifest]:
- filename = os.path.join(subdir, 'Cargo.toml')
- raw = load_toml(filename)
-
- manifests: T.Dict[str, Manifest] = {}
-
- raw_manifest: T.Union[manifest.Manifest, manifest.VirtualManifest]
- if 'package' in raw:
- raw_manifest = T.cast('manifest.Manifest', raw)
- manifest_ = _convert_manifest(raw_manifest, subdir)
- manifests[manifest_.package.name] = manifest_
- else:
- raw_manifest = T.cast('manifest.VirtualManifest', raw)
-
- if 'workspace' in raw_manifest:
- # XXX: need to verify that python glob and cargo globbing are the
- # same and probably write a glob implementation. Blarg
-
- # We need to chdir here to make the glob work correctly
- pwd = os.getcwd()
- os.chdir(subdir)
- members: T.Iterable[str]
- try:
- members = itertools.chain.from_iterable(
- glob.glob(m) for m in raw_manifest['workspace']['members'])
- finally:
- os.chdir(pwd)
- if 'exclude' in raw_manifest['workspace']:
- members = (x for x in members if x not in raw_manifest['workspace']['exclude'])
-
- for m in members:
- filename = os.path.join(subdir, m, 'Cargo.toml')
- raw = load_toml(filename)
-
- raw_manifest = T.cast('manifest.Manifest', raw)
- man = _convert_manifest(raw_manifest, subdir, m)
- manifests[man.package.name] = man
-
- return manifests
-
-
def _version_to_api(version: str) -> str:
# x.y.z -> x
# 0.x.y -> 0.x
@@ -394,18 +450,6 @@ def _dependency_varname(package_name: str) -> str:
return f'{fixup_meson_varname(package_name)}_dep'
-_OPTION_NAME_PREFIX = 'feature-'
-
-
-def _option_name(feature: str) -> str:
- # Add a prefix to avoid collision with Meson reserved options (e.g. "debug")
- return _OPTION_NAME_PREFIX + feature
-
-
-def _options_varname(depname: str) -> str:
- return f'{fixup_meson_varname(depname)}_options'
-
-
def _extra_args_varname() -> str:
return 'extra_args'
@@ -414,128 +458,222 @@ def _extra_deps_varname() -> str:
return 'extra_deps'
-def _create_project(cargo: Manifest, build: builder.Builder) -> T.List[mparser.BaseNode]:
- """Create a function call
-
- :param cargo: The Manifest to generate from
- :param build: The AST builder
- :return: a list nodes
- """
- args: T.List[mparser.BaseNode] = []
- args.extend([
- build.string(cargo.package.name),
- build.string('rust'),
- ])
- kwargs: T.Dict[str, mparser.BaseNode] = {
- 'version': build.string(cargo.package.version),
- # Always assume that the generated meson is using the latest features
- # This will warn when when we generate deprecated code, which is helpful
- # for the upkeep of the module
- 'meson_version': build.string(f'>= {coredata.stable_version}'),
- 'default_options': build.array([build.string(f'rust_std={cargo.package.edition}')]),
- }
- if cargo.package.license:
- kwargs['license'] = build.string(cargo.package.license)
- elif cargo.package.license_file:
- kwargs['license_files'] = build.string(cargo.package.license_file)
-
- return [build.function('project', args, kwargs)]
-
-
-def _process_feature(cargo: Manifest, feature: str) -> T.Tuple[T.Set[str], T.Dict[str, T.Set[str]], T.Set[str]]:
- # Set of features that must also be enabled if this feature is enabled.
- features: T.Set[str] = set()
- # Map dependency name to a set of features that must also be enabled on that
- # dependency if this feature is enabled.
- dep_features: T.Dict[str, T.Set[str]] = collections.defaultdict(set)
- # Set of dependencies that are required if this feature is enabled.
- required_deps: T.Set[str] = set()
- # Set of features that must be processed recursively.
- to_process: T.Set[str] = {feature}
- while to_process:
- f = to_process.pop()
- if '/' in f:
- dep, dep_f = f.split('/', 1)
- if dep[-1] == '?':
- dep = dep[:-1]
- else:
- required_deps.add(dep)
- dep_features[dep].add(dep_f)
- elif f.startswith('dep:'):
- required_deps.add(f[4:])
- elif f not in features:
- features.add(f)
- to_process.update(cargo.features.get(f, []))
- # A feature can also be a dependency
- if f in cargo.dependencies:
- required_deps.add(f)
- return features, dep_features, required_deps
-
-
-def _create_features(cargo: Manifest, build: builder.Builder) -> T.List[mparser.BaseNode]:
- # https://doc.rust-lang.org/cargo/reference/features.html#the-features-section
-
- # Declare a dict that map enabled features to true. One for current project
- # and one per dependency.
- ast: T.List[mparser.BaseNode] = []
- ast.append(build.assign(build.dict({}), 'features'))
- for depname in cargo.dependencies:
- ast.append(build.assign(build.dict({}), _options_varname(depname)))
-
- # Declare a dict that map required dependencies to true
- ast.append(build.assign(build.dict({}), 'required_deps'))
-
- for feature in cargo.features:
- # if get_option(feature)
- # required_deps += {'dep': true, ...}
- # features += {'foo': true, ...}
- # xxx_options += {'feature-foo': true, ...}
- # ...
- # endif
- features, dep_features, required_deps = _process_feature(cargo, feature)
- lines: T.List[mparser.BaseNode] = [
- build.plusassign(
- build.dict({build.string(d): build.bool(True) for d in required_deps}),
- 'required_deps'),
- build.plusassign(
- build.dict({build.string(f): build.bool(True) for f in features}),
- 'features'),
+class PackageState:
+ def __init__(self, manifest: Manifest, downloaded: bool) -> None:
+ self.manifest = manifest
+ self.downloaded = downloaded
+ self.features: T.Set[str] = set()
+ self.required_deps: T.Set[str] = set()
+ self.optional_deps_features: T.Dict[str, T.Set[str]] = collections.defaultdict(set)
+
+
+@dataclasses.dataclass(frozen=True)
+class PackageKey:
+ package_name: str
+ api: str
+
+
+class Interpreter:
+ def __init__(self, env: Environment) -> None:
+ self.environment = env
+ # Map Cargo.toml's subdir to loaded manifest.
+ self.manifests: T.Dict[str, Manifest] = {}
+ # Map of cargo package (name + api) to its state
+ self.packages: T.Dict[PackageKey, PackageState] = {}
+
+ def interpret(self, subdir: str) -> mparser.CodeBlockNode:
+ manifest = self._load_manifest(subdir)
+ pkg, cached = self._fetch_package(manifest.package.name, manifest.package.api)
+ if not cached:
+ # This is an entry point, always enable the 'default' feature.
+ # FIXME: We should have a Meson option similar to `cargo build --no-default-features`
+ self._enable_feature(pkg, 'default')
+
+ # Build an AST for this package
+ filename = os.path.join(self.environment.source_dir, subdir, 'Cargo.toml')
+ build = builder.Builder(filename)
+ ast = self._create_project(pkg, build)
+ ast += [
+ build.assign(build.function('import', [build.string('rust')]), 'rust'),
+ build.function('message', [
+ build.string('Enabled features:'),
+ build.array([build.string(f) for f in pkg.features]),
+ ]),
]
- for depname, enabled_features in dep_features.items():
- lines.append(build.plusassign(
- build.dict({build.string(_option_name(f)): build.bool(True) for f in enabled_features}),
- _options_varname(depname)))
-
- ast.append(build.if_(build.function('get_option', [build.string(_option_name(feature))]), build.block(lines)))
-
- ast.append(build.function('message', [
- build.string('Enabled features:'),
- build.method('keys', build.identifier('features'))],
- ))
-
- return ast
-
-
-def _create_dependencies(cargo: Manifest, build: builder.Builder) -> T.List[mparser.BaseNode]:
- ast: T.List[mparser.BaseNode] = []
- for name, dep in cargo.dependencies.items():
- # xxx_options += {'feature-default': true, ...}
- extra_options: T.Dict[mparser.BaseNode, mparser.BaseNode] = {
- build.string(_option_name('default')): build.bool(dep.default_features),
- }
+ ast += self._create_dependencies(pkg, build)
+ ast += self._create_meson_subdir(build)
+
+ # Libs are always auto-discovered and there's no other way to handle them,
+ # which is unfortunate for reproducability
+ if os.path.exists(os.path.join(self.environment.source_dir, subdir, pkg.manifest.path, pkg.manifest.lib.path)):
+ for crate_type in pkg.manifest.lib.crate_type:
+ ast.extend(self._create_lib(pkg, build, crate_type))
+
+ return build.block(ast)
+
+ def _fetch_package(self, package_name: str, api: str) -> T.Tuple[PackageState, bool]:
+ key = PackageKey(package_name, api)
+ pkg = self.packages.get(key)
+ if pkg:
+ return pkg, True
+ meson_depname = _dependency_name(package_name, api)
+ subdir, _ = self.environment.wrap_resolver.resolve(meson_depname)
+ subprojects_dir = os.path.join(subdir, 'subprojects')
+ self.environment.wrap_resolver.load_and_merge(subprojects_dir, T.cast('SubProject', meson_depname))
+ manifest = self._load_manifest(subdir)
+ downloaded = \
+ meson_depname in self.environment.wrap_resolver.wraps and \
+ self.environment.wrap_resolver.wraps[meson_depname].type is not None
+ pkg = PackageState(manifest, downloaded)
+ self.packages[key] = pkg
+ # Fetch required dependencies recursively.
+ for depname, dep in manifest.dependencies.items():
+ if not dep.optional:
+ self._add_dependency(pkg, depname)
+ return pkg, False
+
+ def _dep_package(self, dep: Dependency) -> PackageState:
+ return self.packages[PackageKey(dep.package, dep.api)]
+
+ def _load_manifest(self, subdir: str) -> Manifest:
+ manifest_ = self.manifests.get(subdir)
+ if not manifest_:
+ filename = os.path.join(self.environment.source_dir, subdir, 'Cargo.toml')
+ raw = load_toml(filename)
+ if 'package' in raw:
+ raw_manifest = T.cast('manifest.Manifest', raw)
+ manifest_ = _convert_manifest(raw_manifest, subdir)
+ self.manifests[subdir] = manifest_
+ else:
+ raise MesonException(f'{subdir}/Cargo.toml does not have [package] section')
+ return manifest_
+
+ def _add_dependency(self, pkg: PackageState, depname: str) -> None:
+ if depname in pkg.required_deps:
+ return
+ dep = pkg.manifest.dependencies.get(depname)
+ if not dep:
+ if depname in itertools.chain(pkg.manifest.dev_dependencies, pkg.manifest.build_dependencies):
+ # FIXME: Not supported yet
+ return
+ raise MesonException(f'Dependency {depname} not defined in {pkg.manifest.package.name} manifest')
+ pkg.required_deps.add(depname)
+ dep_pkg, _ = self._fetch_package(dep.package, dep.api)
+ if dep.default_features:
+ self._enable_feature(dep_pkg, 'default')
for f in dep.features:
- extra_options[build.string(_option_name(f))] = build.bool(True)
- ast.append(build.plusassign(build.dict(extra_options), _options_varname(name)))
-
+ self._enable_feature(dep_pkg, f)
+ for f in pkg.optional_deps_features[depname]:
+ self._enable_feature(dep_pkg, f)
+
+ def _enable_feature(self, pkg: PackageState, feature: str) -> None:
+ if feature in pkg.features:
+ return
+ pkg.features.add(feature)
+ # A feature can also be a dependency.
+ if feature in pkg.manifest.dependencies:
+ self._add_dependency(pkg, feature)
+ # Recurse on extra features and dependencies this feature pulls.
+ # https://doc.rust-lang.org/cargo/reference/features.html#the-features-section
+ for f in pkg.manifest.features.get(feature, []):
+ if '/' in f:
+ depname, dep_f = f.split('/', 1)
+ if depname[-1] == '?':
+ depname = depname[:-1]
+ if depname in pkg.required_deps:
+ dep = pkg.manifest.dependencies[depname]
+ dep_pkg = self._dep_package(dep)
+ self._enable_feature(dep_pkg, dep_f)
+ else:
+ # This feature will be enabled only if that dependency
+ # is later added.
+ pkg.optional_deps_features[depname].add(dep_f)
+ else:
+ self._add_dependency(pkg, depname)
+ dep = pkg.manifest.dependencies.get(depname)
+ if dep:
+ dep_pkg = self._dep_package(dep)
+ self._enable_feature(dep_pkg, dep_f)
+ elif f.startswith('dep:'):
+ self._add_dependency(pkg, f[4:])
+ else:
+ self._enable_feature(pkg, f)
+
+ def _create_project(self, pkg: PackageState, build: builder.Builder) -> T.List[mparser.BaseNode]:
+ """Create the project() function call
+
+ :param pkg: The package to generate from
+ :param build: The AST builder
+ :return: a list nodes
+ """
+ default_options: T.List[mparser.BaseNode] = []
+ default_options.append(build.string(f'rust_std={pkg.manifest.package.edition}'))
+ default_options.append(build.string(f'build.rust_std={pkg.manifest.package.edition}'))
+ if pkg.downloaded:
+ default_options.append(build.string('warning_level=0'))
+
+ args: T.List[mparser.BaseNode] = []
+ args.extend([
+ build.string(pkg.manifest.package.name),
+ build.string('rust'),
+ ])
+ kwargs: T.Dict[str, mparser.BaseNode] = {
+ 'version': build.string(pkg.manifest.package.version),
+ # Always assume that the generated meson is using the latest features
+ # This will warn when when we generate deprecated code, which is helpful
+ # for the upkeep of the module
+ 'meson_version': build.string(f'>= {coredata.stable_version}'),
+ 'default_options': build.array(default_options),
+ }
+ if pkg.manifest.package.license:
+ kwargs['license'] = build.string(pkg.manifest.package.license)
+ elif pkg.manifest.package.license_file:
+ kwargs['license_files'] = build.string(pkg.manifest.package.license_file)
+
+ return [build.function('project', args, kwargs)]
+
+ def _create_dependencies(self, pkg: PackageState, build: builder.Builder) -> T.List[mparser.BaseNode]:
+ ast: T.List[mparser.BaseNode] = []
+ for depname in pkg.required_deps:
+ dep = pkg.manifest.dependencies[depname]
+ ast += self._create_dependency(dep, build)
+ ast.append(build.assign(build.array([]), 'system_deps_args'))
+ for name, sys_dep in pkg.manifest.system_dependencies.items():
+ if sys_dep.enabled(pkg.features):
+ ast += self._create_system_dependency(name, sys_dep, build)
+ return ast
+
+ def _create_system_dependency(self, name: str, dep: SystemDependency, build: builder.Builder) -> T.List[mparser.BaseNode]:
kw = {
'version': build.array([build.string(s) for s in dep.version]),
- 'default_options': build.identifier(_options_varname(name)),
+ 'required': build.bool(not dep.optional),
}
- if dep.optional:
- kw['required'] = build.method('get', build.identifier('required_deps'), [
- build.string(name), build.bool(False)
- ])
+ varname = f'{fixup_meson_varname(name)}_system_dep'
+ cfg = f'system_deps_have_{fixup_meson_varname(name)}'
+ return [
+ build.assign(
+ build.function(
+ 'dependency',
+ [build.string(dep.name)],
+ kw,
+ ),
+ varname,
+ ),
+ build.if_(
+ build.method('found', build.identifier(varname)), build.block([
+ build.plusassign(
+ build.array([build.string('--cfg'), build.string(cfg)]),
+ 'system_deps_args'
+ ),
+ ])
+ ),
+ ]
+ def _create_dependency(self, dep: Dependency, build: builder.Builder) -> T.List[mparser.BaseNode]:
+ pkg = self._dep_package(dep)
+ kw = {
+ 'version': build.array([build.string(s) for s in dep.version]),
+ }
# Lookup for this dependency with the features we want in default_options kwarg.
#
# However, this subproject could have been previously configured with a
@@ -547,8 +685,8 @@ def _create_dependencies(cargo: Manifest, build: builder.Builder) -> T.List[mpar
# otherwise abort with an error message. The user has to set the corresponding
# option manually with -Dxxx-rs:feature-yyy=true, or the main project can do
# that in its project(..., default_options: ['xxx-rs:feature-yyy=true']).
- ast.extend([
- # xxx_dep = dependency('xxx', version : ..., default_options : xxx_options)
+ return [
+ # xxx_dep = dependency('xxx', version : ...)
build.assign(
build.function(
'dependency',
@@ -557,188 +695,138 @@ def _create_dependencies(cargo: Manifest, build: builder.Builder) -> T.List[mpar
),
_dependency_varname(dep.package),
),
- # if xxx_dep.found()
- build.if_(build.method('found', build.identifier(_dependency_varname(dep.package))), build.block([
- # actual_features = xxx_dep.get_variable('features', default_value : '').split(',')
- build.assign(
+ # actual_features = xxx_dep.get_variable('features', default_value : '').split(',')
+ build.assign(
+ build.method(
+ 'split',
build.method(
- 'split',
- build.method(
- 'get_variable',
- build.identifier(_dependency_varname(dep.package)),
- [build.string('features')],
- {'default_value': build.string('')}
- ),
- [build.string(',')],
+ 'get_variable',
+ build.identifier(_dependency_varname(dep.package)),
+ [build.string('features')],
+ {'default_value': build.string('')}
),
- 'actual_features'
+ [build.string(',')],
),
- # needed_features = []
- # foreach f, _ : xxx_options
- # needed_features += f.substring(8)
- # endforeach
- build.assign(build.array([]), 'needed_features'),
- build.foreach(['f', 'enabled'], build.identifier(_options_varname(name)), build.block([
- build.if_(build.identifier('enabled'), build.block([
- build.plusassign(
- build.method('substring', build.identifier('f'), [build.number(len(_OPTION_NAME_PREFIX))]),
- 'needed_features'),
- ])),
- ])),
- # foreach f : needed_features
- # if f not in actual_features
- # error()
- # endif
- # endforeach
- build.foreach(['f'], build.identifier('needed_features'), build.block([
- build.if_(build.not_in(build.identifier('f'), build.identifier('actual_features')), build.block([
- build.function('error', [
- build.string('Dependency'),
- build.string(_dependency_name(dep.package, dep.api)),
- build.string('previously configured with features'),
- build.identifier('actual_features'),
- build.string('but need'),
- build.identifier('needed_features'),
- ])
- ]))
- ])),
+ 'actual_features'
+ ),
+ # needed_features = [f1, f2, ...]
+ # foreach f : needed_features
+ # if f not in actual_features
+ # error()
+ # endif
+ # endforeach
+ build.assign(build.array([build.string(f) for f in pkg.features]), 'needed_features'),
+ build.foreach(['f'], build.identifier('needed_features'), build.block([
+ build.if_(build.not_in(build.identifier('f'), build.identifier('actual_features')), build.block([
+ build.function('error', [
+ build.string('Dependency'),
+ build.string(_dependency_name(dep.package, dep.api)),
+ build.string('previously configured with features'),
+ build.identifier('actual_features'),
+ build.string('but need'),
+ build.identifier('needed_features'),
+ ])
+ ]))
])),
- ])
- return ast
-
-
-def _create_meson_subdir(cargo: Manifest, build: builder.Builder) -> T.List[mparser.BaseNode]:
- # Allow Cargo subprojects to add extra Rust args in meson/meson.build file.
- # This is used to replace build.rs logic.
-
- # extra_args = []
- # extra_deps = []
- # fs = import('fs')
- # if fs.is_dir('meson')
- # subdir('meson')
- # endif
- return [
- build.assign(build.array([]), _extra_args_varname()),
- build.assign(build.array([]), _extra_deps_varname()),
- build.assign(build.function('import', [build.string('fs')]), 'fs'),
- build.if_(build.method('is_dir', build.identifier('fs'), [build.string('meson')]),
- build.block([build.function('subdir', [build.string('meson')])]))
- ]
-
-
-def _create_lib(cargo: Manifest, build: builder.Builder, crate_type: manifest.CRATE_TYPE) -> T.List[mparser.BaseNode]:
- dependencies: T.List[mparser.BaseNode] = []
- dependency_map: T.Dict[mparser.BaseNode, mparser.BaseNode] = {}
- for name, dep in cargo.dependencies.items():
- dependencies.append(build.identifier(_dependency_varname(dep.package)))
- if name != dep.package:
- dependency_map[build.string(fixup_meson_varname(dep.package))] = build.string(name)
-
- rust_args: T.List[mparser.BaseNode] = [
- build.identifier('features_args'),
- build.identifier(_extra_args_varname())
- ]
-
- dependencies.append(build.identifier(_extra_deps_varname()))
-
- posargs: T.List[mparser.BaseNode] = [
- build.string(fixup_meson_varname(cargo.package.name)),
- build.string(cargo.lib.path),
- ]
-
- kwargs: T.Dict[str, mparser.BaseNode] = {
- 'dependencies': build.array(dependencies),
- 'rust_dependency_map': build.dict(dependency_map),
- 'rust_args': build.array(rust_args),
- }
-
- lib: mparser.BaseNode
- if cargo.lib.proc_macro or crate_type == 'proc-macro':
- lib = build.method('proc_macro', build.identifier('rust'), posargs, kwargs)
- else:
- if crate_type in {'lib', 'rlib', 'staticlib'}:
- target_type = 'static_library'
- elif crate_type in {'dylib', 'cdylib'}:
- target_type = 'shared_library'
+ ]
+
+ def _create_meson_subdir(self, build: builder.Builder) -> T.List[mparser.BaseNode]:
+ # Allow Cargo subprojects to add extra Rust args in meson/meson.build file.
+ # This is used to replace build.rs logic.
+
+ # extra_args = []
+ # extra_deps = []
+ # fs = import('fs')
+ # if fs.is_dir('meson')
+ # subdir('meson')
+ # endif
+ return [
+ build.assign(build.array([]), _extra_args_varname()),
+ build.assign(build.array([]), _extra_deps_varname()),
+ build.assign(build.function('import', [build.string('fs')]), 'fs'),
+ build.if_(build.method('is_dir', build.identifier('fs'), [build.string('meson')]),
+ build.block([build.function('subdir', [build.string('meson')])]))
+ ]
+
+ def _create_lib(self, pkg: PackageState, build: builder.Builder, crate_type: manifest.CRATE_TYPE) -> T.List[mparser.BaseNode]:
+ dependencies: T.List[mparser.BaseNode] = []
+ dependency_map: T.Dict[mparser.BaseNode, mparser.BaseNode] = {}
+ for name in pkg.required_deps:
+ dep = pkg.manifest.dependencies[name]
+ dependencies.append(build.identifier(_dependency_varname(dep.package)))
+ if name != dep.package:
+ dep_pkg = self._dep_package(dep)
+ dep_lib_name = dep_pkg.manifest.lib.name
+ dependency_map[build.string(fixup_meson_varname(dep_lib_name))] = build.string(name)
+ for name, sys_dep in pkg.manifest.system_dependencies.items():
+ if sys_dep.enabled(pkg.features):
+ dependencies.append(build.identifier(f'{fixup_meson_varname(name)}_system_dep'))
+
+ rust_args: T.List[mparser.BaseNode] = [
+ build.identifier('features_args'),
+ build.identifier(_extra_args_varname()),
+ build.identifier('system_deps_args'),
+ ]
+
+ dependencies.append(build.identifier(_extra_deps_varname()))
+
+ posargs: T.List[mparser.BaseNode] = [
+ build.string(fixup_meson_varname(pkg.manifest.lib.name)),
+ build.string(pkg.manifest.lib.path),
+ ]
+
+ kwargs: T.Dict[str, mparser.BaseNode] = {
+ 'dependencies': build.array(dependencies),
+ 'rust_dependency_map': build.dict(dependency_map),
+ 'rust_args': build.array(rust_args),
+ }
+
+ lib: mparser.BaseNode
+ if pkg.manifest.lib.proc_macro or crate_type == 'proc-macro':
+ lib = build.method('proc_macro', build.identifier('rust'), posargs, kwargs)
else:
- raise MesonException(f'Unsupported crate type {crate_type}')
- if crate_type in {'staticlib', 'cdylib'}:
- kwargs['rust_abi'] = build.string('c')
- lib = build.function(target_type, posargs, kwargs)
-
- # features_args = []
- # foreach f, _ : features
- # features_args += ['--cfg', 'feature="' + f + '"']
- # endforeach
- # lib = xxx_library()
- # dep = declare_dependency()
- # meson.override_dependency()
- return [
- build.assign(build.array([]), 'features_args'),
- build.foreach(['f', '_'], build.identifier('features'), build.block([
- build.plusassign(
- build.array([
- build.string('--cfg'),
- build.plus(build.string('feature="'), build.plus(build.identifier('f'), build.string('"'))),
- ]),
- 'features_args')
- ])
- ),
- build.assign(lib, 'lib'),
- build.assign(
- build.function(
- 'declare_dependency',
- kw={
- 'link_with': build.identifier('lib'),
- 'variables': build.dict({
- build.string('features'): build.method('join', build.string(','), [build.method('keys', build.identifier('features'))]),
- })
- },
+ if crate_type in {'lib', 'rlib', 'staticlib'}:
+ target_type = 'static_library'
+ elif crate_type in {'dylib', 'cdylib'}:
+ target_type = 'shared_library'
+ else:
+ raise MesonException(f'Unsupported crate type {crate_type}')
+ if crate_type in {'staticlib', 'cdylib'}:
+ kwargs['rust_abi'] = build.string('c')
+ lib = build.function(target_type, posargs, kwargs)
+
+ features_args: T.List[mparser.BaseNode] = []
+ for f in pkg.features:
+ features_args += [build.string('--cfg'), build.string(f'feature="{f}"')]
+
+ # features_args = ['--cfg', 'feature="f1"', ...]
+ # lib = xxx_library()
+ # dep = declare_dependency()
+ # meson.override_dependency()
+ return [
+ build.assign(build.array(features_args), 'features_args'),
+ build.assign(lib, 'lib'),
+ build.assign(
+ build.function(
+ 'declare_dependency',
+ kw={
+ 'link_with': build.identifier('lib'),
+ 'variables': build.dict({
+ build.string('features'): build.string(','.join(pkg.features)),
+ })
+ },
+ ),
+ 'dep'
+ ),
+ build.method(
+ 'override_dependency',
+ build.identifier('meson'),
+ [
+ build.string(_dependency_name(pkg.manifest.package.name, pkg.manifest.package.api)),
+ build.identifier('dep'),
+ ],
),
- 'dep'
- ),
- build.method(
- 'override_dependency',
- build.identifier('meson'),
- [
- build.string(_dependency_name(cargo.package.name, _version_to_api(cargo.package.version))),
- build.identifier('dep'),
- ],
- ),
- ]
-
-
-def interpret(subp_name: str, subdir: str, env: Environment) -> T.Tuple[mparser.CodeBlockNode, dict[OptionKey, options.UserOption[Any]]]:
- # subp_name should be in the form "foo-0.1-rs"
- package_name = subp_name.rsplit('-', 2)[0]
- manifests = _load_manifests(os.path.join(env.source_dir, subdir))
- cargo = manifests.get(package_name)
- if not cargo:
- raise MesonException(f'Cargo package {package_name!r} not found in {subdir}')
-
- filename = os.path.join(cargo.subdir, cargo.path, 'Cargo.toml')
- build = builder.Builder(filename)
-
- # Generate project options
- project_options: T.Dict[OptionKey, options.UserOption] = {}
- for feature in cargo.features:
- key = OptionKey(_option_name(feature), subproject=subp_name)
- enabled = feature == 'default'
- project_options[key] = options.UserBooleanOption(key.name, f'Cargo {feature} feature', enabled)
-
- ast = _create_project(cargo, build)
- ast += [build.assign(build.function('import', [build.string('rust')]), 'rust')]
- ast += _create_features(cargo, build)
- ast += _create_dependencies(cargo, build)
- ast += _create_meson_subdir(cargo, build)
-
- # Libs are always auto-discovered and there's no other way to handle them,
- # which is unfortunate for reproducibility
- if os.path.exists(os.path.join(env.source_dir, cargo.subdir, cargo.path, cargo.lib.path)):
- for crate_type in cargo.lib.crate_type:
- ast.extend(_create_lib(cargo, build, crate_type))
-
- return build.block(ast), project_options
+ ]
def load_wraps(source_dir: str, subproject_dir: str) -> T.List[PackageDefinition]:
diff --git a/mesonbuild/cargo/manifest.py b/mesonbuild/cargo/manifest.py
index 95b0d4bb8cb2..d95df7f4fe3c 100644
--- a/mesonbuild/cargo/manifest.py
+++ b/mesonbuild/cargo/manifest.py
@@ -195,7 +195,7 @@ class Workspace(TypedDict):
Manifest = TypedDict(
'Manifest',
{
- 'package': Package,
+ 'package': Required[Package],
'badges': T.Dict[str, Badge],
'dependencies': T.Dict[str, DependencyV],
'dev-dependencies': T.Dict[str, DependencyV],
diff --git a/mesonbuild/cmake/common.py b/mesonbuild/cmake/common.py
index d9ff559971f7..b7ab1ba2288f 100644
--- a/mesonbuild/cmake/common.py
+++ b/mesonbuild/cmake/common.py
@@ -19,6 +19,7 @@
'cuda': 'CUDA',
'objc': 'OBJC',
'objcpp': 'OBJCXX',
+ 'nasm': 'ASM_NASM',
'cs': 'CSharp',
'java': 'Java',
'fortran': 'Fortran',
@@ -52,14 +53,14 @@
]
def cmake_is_debug(env: 'Environment') -> bool:
- if OptionKey('b_vscrt') in env.coredata.optstore:
- is_debug = env.coredata.get_option(OptionKey('buildtype')) == 'debug'
- if env.coredata.optstore.get_value('b_vscrt') in {'mdd', 'mtd'}:
+ if 'b_vscrt' in env.coredata.optstore:
+ is_debug = env.coredata.optstore.get_value_for('buildtype') == 'debug'
+ if env.coredata.optstore.get_value_for('b_vscrt') in {'mdd', 'mtd'}:
is_debug = True
return is_debug
else:
# Don't directly assign to is_debug to make mypy happy
- debug_opt = env.coredata.get_option(OptionKey('debug'))
+ debug_opt = env.coredata.optstore.get_value_for('debug')
assert isinstance(debug_opt, bool)
return debug_opt
@@ -105,7 +106,7 @@ def _flags_to_list(raw: str) -> T.List[str]:
return res
def cmake_get_generator_args(env: 'Environment') -> T.List[str]:
- backend_name = env.coredata.get_option(OptionKey('backend'))
+ backend_name = env.coredata.optstore.get_value_for(OptionKey('backend'))
assert isinstance(backend_name, str)
assert backend_name in backend_generator_map
return ['-G', backend_generator_map[backend_name]]
diff --git a/mesonbuild/cmake/executor.py b/mesonbuild/cmake/executor.py
index cbe75f36c688..0c704f94ab0f 100644
--- a/mesonbuild/cmake/executor.py
+++ b/mesonbuild/cmake/executor.py
@@ -11,7 +11,6 @@
from .. import mlog
from ..mesonlib import PerMachine, Popen_safe, version_compare, is_windows
-from ..options import OptionKey
from ..programs import find_external_program, NonExistingExternalProgram
if T.TYPE_CHECKING:
@@ -52,7 +51,9 @@ def __init__(self, environment: 'Environment', version: str, for_machine: Machin
self.cmakebin = None
return
- self.prefix_paths = self.environment.coredata.optstore.get_value(OptionKey('cmake_prefix_path', machine=self.for_machine))
+ prefpath = self.environment.coredata.optstore.get_value_for('cmake_prefix_path')
+ assert isinstance(prefpath, list)
+ self.prefix_paths = prefpath
if self.prefix_paths:
self.extra_cmake_args += ['-DCMAKE_PREFIX_PATH={}'.format(';'.join(self.prefix_paths))]
diff --git a/mesonbuild/cmake/interpreter.py b/mesonbuild/cmake/interpreter.py
index fafee86abd4f..609038ddf930 100644
--- a/mesonbuild/cmake/interpreter.py
+++ b/mesonbuild/cmake/interpreter.py
@@ -19,6 +19,7 @@
from .traceparser import CMakeTraceParser
from .tracetargets import resolve_cmake_trace_targets
from .. import mlog, mesonlib
+from .. import options
from ..mesonlib import MachineChoice, OrderedSet, path_is_in_root, relative_to_if_possible
from ..options import OptionKey
from ..mesondata import DataFile
@@ -222,6 +223,7 @@ def __init__(self, target: CMakeTarget, env: 'Environment', for_machine: Machine
self.install = target.install
self.install_dir: T.Optional[Path] = None
self.link_libraries = target.link_libraries
+ self.link_targets: T.List[str] = []
self.link_flags = target.link_flags + target.link_lang_flags
self.public_link_flags: T.List[str] = []
self.depends_raw: T.List[str] = []
@@ -294,6 +296,17 @@ def __init__(self, target: CMakeTarget, env: 'Environment', for_machine: Machine
else:
self.sources += i.sources
+ self.clib_compiler = None
+ compilers = self.env.coredata.compilers[self.for_machine]
+
+ for lang in ['objcpp', 'cpp', 'objc', 'fortran', 'c']:
+ if lang in self.languages:
+ try:
+ self.clib_compiler = compilers[lang]
+ break
+ except KeyError:
+ pass
+
def __repr__(self) -> str:
return f'<{self.__class__.__name__}: {self.name}>'
@@ -345,12 +358,14 @@ def postprocess(self, output_target_map: OutputTargetMap, root_src_dir: Path, su
if tgt:
self.depends_raw = trace.targets[self.cmake_name].depends
- rtgt = resolve_cmake_trace_targets(self.cmake_name, trace, self.env)
+ rtgt = resolve_cmake_trace_targets(self.cmake_name, trace, self.env, clib_compiler=self.clib_compiler)
self.includes += [Path(x) for x in rtgt.include_directories]
self.link_flags += rtgt.link_flags
self.public_link_flags += rtgt.public_link_flags
self.public_compile_opts += rtgt.public_compile_opts
self.link_libraries += rtgt.libraries
+ self.depends_raw += rtgt.target_dependencies
+ self.link_targets += rtgt.target_dependencies
elif self.type.upper() not in ['EXECUTABLE', 'OBJECT_LIBRARY']:
mlog.warning('CMake: Target', mlog.bold(self.cmake_name), 'not found in CMake trace. This can lead to build errors')
@@ -420,9 +435,8 @@ def rel_path(x: Path, is_header: bool, is_generated: bool) -> T.Optional[Path]:
def non_optional(inputs: T.Iterable[T.Optional[Path]]) -> T.List[Path]:
return [p for p in inputs if p is not None]
- build_dir_rel = self.build_dir.relative_to(Path(self.env.get_build_dir()) / subdir)
self.generated_raw = non_optional(rel_path(x, False, True) for x in self.generated_raw)
- self.includes = non_optional(itertools.chain((rel_path(x, True, False) for x in OrderedSet(self.includes)), [build_dir_rel]))
+ self.includes = non_optional(itertools.chain((rel_path(x, True, False) for x in OrderedSet(self.includes))))
self.sys_includes = non_optional(rel_path(x, True, False) for x in OrderedSet(self.sys_includes))
self.sources = non_optional(rel_path(x, False, False) for x in self.sources)
@@ -533,17 +547,12 @@ def _all_source_suffixes(self) -> 'ImmutableListProtocol[str]':
@lru_cache(maxsize=None)
def _all_lang_stds(self, lang: str) -> 'ImmutableListProtocol[str]':
try:
- res = self.env.coredata.optstore.get_value_object(OptionKey(f'{lang}_std', machine=MachineChoice.BUILD)).choices
+ opt = self.env.coredata.optstore.get_value_object(OptionKey(f'{lang}_std', machine=MachineChoice.BUILD))
+ assert isinstance(opt, (options.UserStdOption, options.UserComboOption)), 'for mypy'
+ return opt.choices or []
except KeyError:
return []
- # TODO: Get rid of this once we have proper typing for options
- assert isinstance(res, list)
- for i in res:
- assert isinstance(i, str)
-
- return res
-
def process_inter_target_dependencies(self) -> None:
# Move the dependencies from all TRANSFER_DEPENDENCIES_FROM to the target
to_process = list(self.depends)
@@ -832,7 +841,7 @@ def configure(self, extra_cmake_options: T.List[str]) -> CMakeExecutor:
cmake_args += extra_cmake_options
if not any(arg.startswith('-DCMAKE_BUILD_TYPE=') for arg in cmake_args):
# Our build type is favored over any CMAKE_BUILD_TYPE environment variable
- buildtype = T.cast('str', self.env.coredata.get_option(OptionKey('buildtype')))
+ buildtype = T.cast('str', self.env.coredata.optstore.get_value_for(OptionKey('buildtype')))
if buildtype in BUILDTYPE_MAP:
cmake_args += [f'-DCMAKE_BUILD_TYPE={BUILDTYPE_MAP[buildtype]}']
trace_args = self.trace.trace_args()
@@ -951,17 +960,27 @@ def analyse(self) -> None:
object_libs += [tgt]
self.languages += [x for x in tgt.languages if x not in self.languages]
- # Second pass: Detect object library dependencies
+ # Second pass: Populate link_with project internal targets
+ for tgt in self.targets:
+ for i in tgt.link_targets:
+ # Handle target-based link libraries
+ link_with = self.output_target_map.target(i)
+ if not link_with or isinstance(link_with, ConverterCustomTarget):
+ # Generated file etc.
+ continue
+ tgt.link_with.append(link_with)
+
+ # Third pass: Detect object library dependencies
for tgt in self.targets:
tgt.process_object_libs(object_libs, self._object_lib_workaround)
- # Third pass: Reassign dependencies to avoid some loops
+ # Fourth pass: Reassign dependencies to avoid some loops
for tgt in self.targets:
tgt.process_inter_target_dependencies()
for ctgt in self.custom_targets:
ctgt.process_inter_target_dependencies()
- # Fourth pass: Remove rassigned dependencies
+ # Fifth pass: Remove reassigned dependencies
for tgt in self.targets:
tgt.cleanup_dependencies()
diff --git a/mesonbuild/cmake/toolchain.py b/mesonbuild/cmake/toolchain.py
index 9eb961c52bc7..11a00be5d5f5 100644
--- a/mesonbuild/cmake/toolchain.py
+++ b/mesonbuild/cmake/toolchain.py
@@ -9,6 +9,7 @@
from .common import language_map, cmake_get_generator_args
from .. import mlog
+import os.path
import shutil
import typing as T
from enum import Enum
@@ -174,7 +175,12 @@ def make_abs(exe: str) -> str:
# Set the compiler variables
for lang, comp_obj in self.compilers.items():
- prefix = 'CMAKE_{}_'.format(language_map.get(lang, lang.upper()))
+ language = language_map.get(lang, None)
+
+ if not language:
+ continue # unsupported language
+
+ prefix = 'CMAKE_{}_'.format(language)
exe_list = comp_obj.get_exelist()
if not exe_list:
@@ -198,7 +204,7 @@ def is_cmdline_option(compiler: 'Compiler', arg: str) -> bool:
if compiler.get_argument_syntax() == 'msvc':
return arg.startswith('/')
else:
- if compiler.exelist[0] == 'zig' and arg in {'ar', 'cc', 'c++', 'dlltool', 'lib', 'ranlib', 'objcopy', 'rc'}:
+ if os.path.basename(compiler.get_exe()) == 'zig' and arg in {'ar', 'cc', 'c++', 'dlltool', 'lib', 'ranlib', 'objcopy', 'rc'}:
return True
return arg.startswith('-')
@@ -210,7 +216,7 @@ def update_cmake_compiler_state(self) -> None:
# Generate the CMakeLists.txt
mlog.debug('CMake Toolchain: Calling CMake once to generate the compiler state')
languages = list(self.compilers.keys())
- lang_ids = [language_map.get(x, x.upper()) for x in languages]
+ lang_ids = [language_map.get(x) for x in languages if x in language_map]
cmake_content = dedent(f'''
cmake_minimum_required(VERSION 3.10)
project(CompInfo {' '.join(lang_ids)})
diff --git a/mesonbuild/cmake/tracetargets.py b/mesonbuild/cmake/tracetargets.py
index 2cc0c1722c3a..2b2b93de7eb5 100644
--- a/mesonbuild/cmake/tracetargets.py
+++ b/mesonbuild/cmake/tracetargets.py
@@ -45,6 +45,7 @@ def __init__(self) -> None:
self.public_link_flags: T.List[str] = []
self.public_compile_opts: T.List[str] = []
self.libraries: T.List[str] = []
+ self.target_dependencies: T.List[str] = []
def resolve_cmake_trace_targets(target_name: str,
trace: 'CMakeTraceParser',
@@ -86,6 +87,7 @@ def resolve_cmake_trace_targets(target_name: str,
curr_path = Path(*path_to_framework)
framework_path = curr_path.parent
framework_name = curr_path.stem
+ res.public_compile_opts += [f"-F{framework_path}"]
res.libraries += [f'-F{framework_path}', '-framework', framework_name]
else:
res.libraries += [curr]
@@ -144,9 +146,13 @@ def resolve_cmake_trace_targets(target_name: str,
targets += [x for x in tgt.properties['IMPORTED_LOCATION'] if x]
if 'LINK_LIBRARIES' in tgt.properties:
- targets += [x for x in tgt.properties['LINK_LIBRARIES'] if x]
+ link_libraries = [x for x in tgt.properties['LINK_LIBRARIES'] if x]
+ targets += link_libraries
+ res.target_dependencies += link_libraries
if 'INTERFACE_LINK_LIBRARIES' in tgt.properties:
- targets += [x for x in tgt.properties['INTERFACE_LINK_LIBRARIES'] if x]
+ link_libraries = [x for x in tgt.properties['INTERFACE_LINK_LIBRARIES'] if x]
+ targets += link_libraries
+ res.target_dependencies += link_libraries
if f'IMPORTED_LINK_DEPENDENT_LIBRARIES_{cfg}' in tgt.properties:
targets += [x for x in tgt.properties[f'IMPORTED_LINK_DEPENDENT_LIBRARIES_{cfg}'] if x]
diff --git a/mesonbuild/compilers/__init__.py b/mesonbuild/compilers/__init__.py
index 078617867d60..ac5a7f73d7ea 100644
--- a/mesonbuild/compilers/__init__.py
+++ b/mesonbuild/compilers/__init__.py
@@ -8,7 +8,6 @@
'RunResult',
'all_languages',
- 'base_options',
'clib_langs',
'clink_langs',
'c_suffixes',
@@ -21,6 +20,7 @@
'is_llvm_ir',
'is_object',
'is_source',
+ 'is_java',
'is_known_suffix',
'lang_suffixes',
'LANGUAGES_USING_LDFLAGS',
@@ -50,7 +50,6 @@
CompileResult,
RunResult,
all_languages,
- base_options,
clib_langs,
clink_langs,
c_suffixes,
@@ -59,6 +58,7 @@
get_base_link_args,
is_header,
is_source,
+ is_java,
is_assembly,
is_llvm_ir,
is_object,
diff --git a/mesonbuild/compilers/asm.py b/mesonbuild/compilers/asm.py
index 8cd5e28dc47f..d358ca992a17 100644
--- a/mesonbuild/compilers/asm.py
+++ b/mesonbuild/compilers/asm.py
@@ -7,6 +7,7 @@
from ..options import OptionKey
from .compilers import Compiler
from .mixins.metrowerks import MetrowerksCompiler, mwasmarm_instruction_set_args, mwasmeppc_instruction_set_args
+from .mixins.ti import TICompiler
if T.TYPE_CHECKING:
from ..environment import Environment
@@ -259,6 +260,34 @@ def depfile_for_object(self, objfile: str) -> T.Optional[str]:
return None
+# https://downloads.ti.com/docs/esd/SPRUI04/
+class TILinearAsmCompiler(TICompiler, Compiler):
+ language = 'linearasm'
+
+ def __init__(self, ccache: T.List[str], exelist: T.List[str], version: str,
+ for_machine: MachineChoice, info: MachineInfo,
+ linker: T.Optional[DynamicLinker] = None,
+ full_version: T.Optional[str] = None, is_cross: bool = False):
+ Compiler.__init__(self, ccache, exelist, version, for_machine, info, linker, full_version, is_cross)
+ TICompiler.__init__(self)
+
+ def needs_static_linker(self) -> bool:
+ return True
+
+ def get_always_args(self) -> T.List[str]:
+ return []
+
+ def get_crt_compile_args(self, crt_val: str, buildtype: str) -> T.List[str]:
+ return []
+
+ def sanity_check(self, work_dir: str, environment: Environment) -> None:
+ if self.info.cpu_family not in {'c6000'}:
+ raise EnvironmentException(f'TI Linear ASM compiler {self.id!r} does not support {self.info.cpu_family} CPU family')
+
+ def get_depfile_suffix(self) -> str:
+ return 'd'
+
+
class MetrowerksAsmCompiler(MetrowerksCompiler, Compiler):
language = 'nasm'
diff --git a/mesonbuild/compilers/c.py b/mesonbuild/compilers/c.py
index f67281f04a51..424b61251618 100644
--- a/mesonbuild/compilers/c.py
+++ b/mesonbuild/compilers/c.py
@@ -1,5 +1,6 @@
# SPDX-License-Identifier: Apache-2.0
# Copyright 2012-2020 The Meson development team
+# Copyright © 2024-2025 Intel Corporation
from __future__ import annotations
@@ -7,10 +8,11 @@
import typing as T
from .. import options
+from ..options import OptionKey
from .. import mlog
from ..mesonlib import MesonException, version_compare
from .c_function_attributes import C_FUNC_ATTRIBUTES
-from .mixins.apple import AppleCompilerMixin
+from .mixins.apple import AppleCompilerMixin, AppleCStdsMixin
from .mixins.clike import CLikeCompiler
from .mixins.ccrx import CcrxCompiler
from .mixins.xc16 import Xc16Compiler
@@ -18,15 +20,16 @@
from .mixins.ti import TICompiler
from .mixins.arm import ArmCompiler, ArmclangCompiler
from .mixins.visualstudio import MSVCCompiler, ClangClCompiler
-from .mixins.gnu import GnuCompiler
+from .mixins.gnu import GnuCompiler, GnuCStds
from .mixins.gnu import gnu_common_warning_args, gnu_c_warning_args
from .mixins.intel import IntelGnuLikeCompiler, IntelVisualStudioLikeCompiler
-from .mixins.clang import ClangCompiler
+from .mixins.clang import ClangCompiler, ClangCStds
from .mixins.elbrus import ElbrusCompiler
from .mixins.pgi import PGICompiler
from .mixins.emscripten import EmscriptenMixin
from .mixins.metrowerks import MetrowerksCompiler
from .mixins.metrowerks import mwccarm_instruction_set_args, mwcceppc_instruction_set_args
+from .mixins.tasking import TaskingCompiler
from .compilers import (
gnu_winlibs,
msvc_winlibs,
@@ -34,21 +37,22 @@
)
if T.TYPE_CHECKING:
- from ..coredata import MutableKeyedOptionDictType, KeyedOptionDictType
+ from ..options import MutableKeyedOptionDictType
from ..dependencies import Dependency
from ..envconfig import MachineInfo
from ..environment import Environment
from ..linkers.linkers import DynamicLinker
from ..mesonlib import MachineChoice
from .compilers import CompileCheckMode
+ from ..build import BuildTarget
CompilerMixinBase = Compiler
else:
CompilerMixinBase = object
-_ALL_STDS = ['c89', 'c9x', 'c90', 'c99', 'c1x', 'c11', 'c17', 'c18', 'c2x', 'c23']
-_ALL_STDS += [f'gnu{std[1:]}' for std in _ALL_STDS]
-_ALL_STDS += ['iso9899:1990', 'iso9899:199409', 'iso9899:1999', 'iso9899:2011', 'iso9899:2017', 'iso9899:2018']
+ALL_STDS = ['c89', 'c9x', 'c90', 'c99', 'c1x', 'c11', 'c17', 'c18', 'c2x', 'c23', 'c2y']
+ALL_STDS += [f'gnu{std[1:]}' for std in ALL_STDS]
+ALL_STDS += ['iso9899:1990', 'iso9899:199409', 'iso9899:1999', 'iso9899:2011', 'iso9899:2017', 'iso9899:2018']
class CCompiler(CLikeCompiler, Compiler):
@@ -97,45 +101,12 @@ def get_options(self) -> 'MutableKeyedOptionDictType':
opts = super().get_options()
key = self.form_compileropt_key('std')
opts.update({
- key: options.UserStdOption('C', _ALL_STDS),
+ key: options.UserStdOption('c', ALL_STDS),
})
return opts
-class _ClangCStds(CompilerMixinBase):
-
- """Mixin class for clang based compilers for setting C standards.
-
- This is used by both ClangCCompiler and ClangClCompiler, as they share
- the same versions
- """
-
- _C17_VERSION = '>=6.0.0'
- _C18_VERSION = '>=8.0.0'
- _C2X_VERSION = '>=9.0.0'
- _C23_VERSION = '>=18.0.0'
-
- def get_options(self) -> 'MutableKeyedOptionDictType':
- opts = super().get_options()
- stds = ['c89', 'c99', 'c11']
- # https://releases.llvm.org/6.0.0/tools/clang/docs/ReleaseNotes.html
- # https://en.wikipedia.org/wiki/Xcode#Latest_versions
- if version_compare(self.version, self._C17_VERSION):
- stds += ['c17']
- if version_compare(self.version, self._C18_VERSION):
- stds += ['c18']
- if version_compare(self.version, self._C2X_VERSION):
- stds += ['c2x']
- if version_compare(self.version, self._C23_VERSION):
- stds += ['c23']
- key = self.form_compileropt_key('std')
- std_opt = opts[key]
- assert isinstance(std_opt, options.UserStdOption), 'for mypy'
- std_opt.set_versions(stds, gnu=True)
- return opts
-
-
-class ClangCCompiler(_ClangCStds, ClangCompiler, CCompiler):
+class ClangCCompiler(ClangCStds, ClangCompiler, CCompiler):
def __init__(self, ccache: T.List[str], exelist: T.List[str], version: str, for_machine: MachineChoice, is_cross: bool,
info: 'MachineInfo',
@@ -154,29 +125,26 @@ def __init__(self, ccache: T.List[str], exelist: T.List[str], version: str, for_
def get_options(self) -> 'MutableKeyedOptionDictType':
opts = super().get_options()
if self.info.is_windows() or self.info.is_cygwin():
- self.update_options(
- opts,
- self.create_option(options.UserArrayOption,
- self.form_compileropt_key('winlibs'),
- 'Standard Win libraries to link against',
- gnu_winlibs),
- )
+ key = self.form_compileropt_key('winlibs')
+ opts[key] = options.UserStringArrayOption(
+ self.make_option_name(key),
+ 'Standard Windows libraries to link against',
+ gnu_winlibs)
return opts
- def get_option_compile_args(self, options: 'KeyedOptionDictType') -> T.List[str]:
+ def get_option_std_args(self, target: BuildTarget, env: Environment, subproject: T.Optional[str] = None) -> T.List[str]:
args = []
- key = self.form_compileropt_key('std')
- std = options.get_value(key)
+ std = self.get_compileropt_value('std', env, target, subproject)
+ assert isinstance(std, str)
if std != 'none':
args.append('-std=' + std)
return args
- def get_option_link_args(self, options: 'KeyedOptionDictType') -> T.List[str]:
+ def get_option_link_args(self, target: 'BuildTarget', env: 'Environment', subproject: T.Optional[str] = None) -> T.List[str]:
if self.info.is_windows() or self.info.is_cygwin():
- # without a typedict mypy can't understand this.
- key = self.form_compileropt_key('winlibs')
- libs = options.get_value(key).copy()
- assert isinstance(libs, list)
+ retval = self.get_compileropt_value('winlibs', env, target, subproject)
+ assert isinstance(retval, list)
+ libs: T.List[str] = retval.copy()
for l in libs:
assert isinstance(l, str)
return libs
@@ -188,7 +156,7 @@ class ArmLtdClangCCompiler(ClangCCompiler):
id = 'armltdclang'
-class AppleClangCCompiler(AppleCompilerMixin, ClangCCompiler):
+class AppleClangCCompiler(AppleCompilerMixin, AppleCStdsMixin, ClangCCompiler):
"""Handle the differences between Apple Clang and Vanilla Clang.
@@ -196,10 +164,6 @@ class AppleClangCCompiler(AppleCompilerMixin, ClangCCompiler):
C standards were added.
"""
- _C17_VERSION = '>=10.0.0'
- _C18_VERSION = '>=11.0.0'
- _C2X_VERSION = '>=11.0.0'
-
class EmscriptenCCompiler(EmscriptenMixin, ClangCCompiler):
@@ -249,30 +213,27 @@ def __init__(self, ccache: T.List[str], exelist: T.List[str], version: str, for_
'everything': ['-Weverything']}
def get_options(self) -> 'MutableKeyedOptionDictType':
- opts = CCompiler.get_options(self)
+ opts = super().get_options()
key = self.form_compileropt_key('std')
std_opt = opts[key]
assert isinstance(std_opt, options.UserStdOption), 'for mypy'
std_opt.set_versions(['c90', 'c99', 'c11'], gnu=True)
return opts
- def get_option_compile_args(self, options: 'KeyedOptionDictType') -> T.List[str]:
+ def get_option_std_args(self, target: BuildTarget, env: Environment, subproject: T.Optional[str] = None) -> T.List[str]:
args = []
- key = self.form_compileropt_key('std')
- std = options.get_value(key)
+ std = self.get_compileropt_value('std', env, target, subproject)
+ assert isinstance(std, str)
if std != 'none':
args.append('-std=' + std)
return args
- def get_option_link_args(self, options: 'KeyedOptionDictType') -> T.List[str]:
+ def get_option_link_args(self, target: 'BuildTarget', env: 'Environment', subproject: T.Optional[str] = None) -> T.List[str]:
return []
-class GnuCCompiler(GnuCompiler, CCompiler):
+class GnuCCompiler(GnuCStds, GnuCompiler, CCompiler):
- _C18_VERSION = '>=8.0.0'
- _C2X_VERSION = '>=9.0.0'
- _C23_VERSION = '>=14.0.0'
_INVALID_PCH_VERSION = ">=3.4.0"
def __init__(self, ccache: T.List[str], exelist: T.List[str], version: str, for_machine: MachineChoice, is_cross: bool,
@@ -294,42 +255,31 @@ def __init__(self, ccache: T.List[str], exelist: T.List[str], version: str, for_
self.supported_warn_args(gnu_c_warning_args))}
def get_options(self) -> 'MutableKeyedOptionDictType':
- opts = CCompiler.get_options(self)
- stds = ['c89', 'c99', 'c11']
- if version_compare(self.version, self._C18_VERSION):
- stds += ['c17', 'c18']
- if version_compare(self.version, self._C2X_VERSION):
- stds += ['c2x']
- if version_compare(self.version, self._C23_VERSION):
- stds += ['c23']
- key = self.form_compileropt_key('std')
- std_opt = opts[key]
- assert isinstance(std_opt, options.UserStdOption), 'for mypy'
- std_opt.set_versions(stds, gnu=True)
+ opts = super().get_options()
if self.info.is_windows() or self.info.is_cygwin():
- self.update_options(
- opts,
- self.create_option(options.UserArrayOption,
- key.evolve('c_winlibs'),
- 'Standard Win libraries to link against',
- gnu_winlibs),
- )
+ key = self.form_compileropt_key('winlibs')
+ opts[key] = options.UserStringArrayOption(
+ self.make_option_name(key),
+ 'Standard Windows libraries to link against',
+ gnu_winlibs)
return opts
- def get_option_compile_args(self, options: 'KeyedOptionDictType') -> T.List[str]:
+ def get_option_std_args(self, target: BuildTarget, env: Environment, subproject: T.Optional[str] = None) -> T.List[str]:
args = []
- key = self.form_compileropt_key('std')
- std = options.get_value(key)
+ key = OptionKey('c_std', machine=self.for_machine)
+ std = self.get_compileropt_value(key, env, target, subproject)
+ assert isinstance(std, str)
if std != 'none':
args.append('-std=' + std)
return args
- def get_option_link_args(self, options: 'KeyedOptionDictType') -> T.List[str]:
+ def get_option_link_args(self, target: 'BuildTarget', env: 'Environment', subproject: T.Optional[str] = None) -> T.List[str]:
if self.info.is_windows() or self.info.is_cygwin():
# without a typeddict mypy can't figure this out
- key = self.form_compileropt_key('winlibs')
- libs: T.List[str] = options.get_value(key).copy()
- assert isinstance(libs, list)
+ retval = self.get_compileropt_value('winlibs', env, target, subproject)
+
+ assert isinstance(retval, list)
+ libs: T.List[str] = retval.copy()
for l in libs:
assert isinstance(l, str)
return libs
@@ -362,7 +312,7 @@ def __init__(self, ccache: T.List[str], exelist: T.List[str], version: str, for_
PGICompiler.__init__(self)
def get_options(self) -> 'MutableKeyedOptionDictType':
- opts = CCompiler.get_options(self)
+ opts = super().get_options()
cppstd_choices = ['c89', 'c90', 'c99', 'c11', 'c17', 'c18']
std_opt = opts[self.form_compileropt_key('std')]
assert isinstance(std_opt, options.UserStdOption), 'for mypy'
@@ -381,7 +331,7 @@ def __init__(self, ccache: T.List[str], exelist: T.List[str], version: str, for_
ElbrusCompiler.__init__(self)
def get_options(self) -> 'MutableKeyedOptionDictType':
- opts = CCompiler.get_options(self)
+ opts = super().get_options()
stds = ['c89', 'c9x', 'c99', 'gnu89', 'gnu9x', 'gnu99']
stds += ['iso9899:1990', 'iso9899:199409', 'iso9899:1999']
if version_compare(self.version, '>=1.20.00'):
@@ -428,7 +378,7 @@ def __init__(self, ccache: T.List[str], exelist: T.List[str], version: str, for_
'everything': default_warn_args + ['-Wextra']}
def get_options(self) -> 'MutableKeyedOptionDictType':
- opts = CCompiler.get_options(self)
+ opts = super().get_options()
stds = ['c89', 'c99']
if version_compare(self.version, '>=16.0.0'):
stds += ['c11']
@@ -438,10 +388,10 @@ def get_options(self) -> 'MutableKeyedOptionDictType':
std_opt.set_versions(stds, gnu=True)
return opts
- def get_option_compile_args(self, options: 'KeyedOptionDictType') -> T.List[str]:
- args = []
- key = self.form_compileropt_key('std')
- std = options.get_value(key)
+ def get_option_std_args(self, target: BuildTarget, env: Environment, subproject: T.Optional[str] = None) -> T.List[str]:
+ args: T.List[str] = []
+ std = self.get_compileropt_value('std', env, target, subproject)
+ assert isinstance(std, str)
if std != 'none':
args.append('-std=' + std)
return args
@@ -457,21 +407,18 @@ class VisualStudioLikeCCompilerMixin(CompilerMixinBase):
"""Shared methods that apply to MSVC-like C compilers."""
def get_options(self) -> MutableKeyedOptionDictType:
- return self.update_options(
- super().get_options(),
- self.create_option(
- options.UserArrayOption,
- self.form_compileropt_key('winlibs'),
- 'Windows libs to link against.',
- msvc_winlibs,
- ),
- )
-
- def get_option_link_args(self, options: 'KeyedOptionDictType') -> T.List[str]:
- # need a TypeDict to make this work
+ opts = super().get_options()
key = self.form_compileropt_key('winlibs')
- libs = options.get_value(key).copy()
- assert isinstance(libs, list)
+ opts[key] = options.UserStringArrayOption(
+ self.make_option_name(key),
+ 'Standard Windows libraries to link against',
+ msvc_winlibs)
+ return opts
+
+ def get_option_link_args(self, target: 'BuildTarget', env: 'Environment', subproject: T.Optional[str] = None) -> T.List[str]:
+ retval = self.get_compileropt_value('winlibs', env, target, subproject)
+ assert isinstance(retval, list)
+ libs: T.List[str] = retval.copy()
for l in libs:
assert isinstance(l, str)
return libs
@@ -504,19 +451,19 @@ def get_options(self) -> 'MutableKeyedOptionDictType':
std_opt.set_versions(stds, gnu=True, gnu_deprecated=True)
return opts
- def get_option_compile_args(self, options: 'KeyedOptionDictType') -> T.List[str]:
+ def get_option_std_args(self, target: BuildTarget, env: Environment, subproject: T.Optional[str] = None) -> T.List[str]:
args = []
- key = self.form_compileropt_key('std')
- std = options.get_value(key)
+ std = self.get_compileropt_value('std', env, target, subproject)
+
# As of MVSC 16.8, /std:c11 and /std:c17 are the only valid C standard options.
- if std == 'c11':
+ if std in {'c11'}:
args.append('/std:c11')
elif std in {'c17', 'c18'}:
args.append('/std:c17')
return args
-class ClangClCCompiler(_ClangCStds, ClangClCompiler, VisualStudioLikeCCompilerMixin, CCompiler):
+class ClangClCCompiler(ClangCStds, ClangClCompiler, VisualStudioLikeCCompilerMixin, CCompiler):
def __init__(self, exelist: T.List[str], version: str, for_machine: MachineChoice,
is_cross: bool, info: 'MachineInfo', target: str,
linker: T.Optional['DynamicLinker'] = None,
@@ -526,9 +473,9 @@ def __init__(self, exelist: T.List[str], version: str, for_machine: MachineChoic
full_version=full_version)
ClangClCompiler.__init__(self, target)
- def get_option_compile_args(self, options: 'KeyedOptionDictType') -> T.List[str]:
- key = self.form_compileropt_key('std')
- std = options.get_value(key)
+ def get_option_std_args(self, target: BuildTarget, env: Environment, subproject: T.Optional[str] = None) -> T.List[str]:
+ std = self.get_compileropt_value('std', env, target, subproject)
+ assert isinstance(std, str)
if std != "none":
return [f'/clang:-std={std}']
return []
@@ -550,18 +497,15 @@ def __init__(self, exelist: T.List[str], version: str, for_machine: MachineChoic
def get_options(self) -> 'MutableKeyedOptionDictType':
opts = super().get_options()
key = self.form_compileropt_key('std')
- # To shut up mypy.
- if isinstance(opts, dict):
- raise RuntimeError('This is a transitory issue that should not happen. Please report with full backtrace.')
- std_opt = opts.get_value_object(key)
+ std_opt = opts[key]
assert isinstance(std_opt, options.UserStdOption), 'for mypy'
std_opt.set_versions(['c89', 'c99', 'c11'])
return opts
- def get_option_compile_args(self, options: 'KeyedOptionDictType') -> T.List[str]:
- args = []
- key = self.form_compileropt_key('std')
- std = options.get_value(key)
+ def get_option_std_args(self, target: BuildTarget, env: Environment, subproject: T.Optional[str] = None) -> T.List[str]:
+ args: T.List[str] = []
+ std = self.get_compileropt_value('std', env, target, subproject)
+ assert isinstance(std, str)
if std == 'c89':
mlog.log("ICL doesn't explicitly implement c89, setting the standard to 'none', which is close.", once=True)
elif std != 'none':
@@ -585,17 +529,17 @@ def __init__(self, ccache: T.List[str], exelist: T.List[str], version: str, for_
ArmCompiler.__init__(self)
def get_options(self) -> 'MutableKeyedOptionDictType':
- opts = CCompiler.get_options(self)
+ opts = super().get_options()
key = self.form_compileropt_key('std')
std_opt = opts[key]
assert isinstance(std_opt, options.UserStdOption), 'for mypy'
std_opt.set_versions(['c89', 'c99', 'c11'])
return opts
- def get_option_compile_args(self, options: 'KeyedOptionDictType') -> T.List[str]:
+ def get_option_std_args(self, target: BuildTarget, env: Environment, subproject: T.Optional[str] = None) -> T.List[str]:
args = []
- key = self.form_compileropt_key('std')
- std = options.get_value(key)
+ std = self.get_compileropt_value('std', env, target, subproject)
+ assert isinstance(std, str)
if std != 'none':
args.append('--' + std)
return args
@@ -615,7 +559,7 @@ def get_always_args(self) -> T.List[str]:
return ['-nologo']
def get_options(self) -> 'MutableKeyedOptionDictType':
- opts = CCompiler.get_options(self)
+ opts = super().get_options()
key = self.form_compileropt_key('std')
std_opt = opts[key]
assert isinstance(std_opt, options.UserStdOption), 'for mypy'
@@ -625,10 +569,10 @@ def get_options(self) -> 'MutableKeyedOptionDictType':
def get_no_stdinc_args(self) -> T.List[str]:
return []
- def get_option_compile_args(self, options: 'KeyedOptionDictType') -> T.List[str]:
+ def get_option_std_args(self, target: BuildTarget, env: Environment, subproject: T.Optional[str] = None) -> T.List[str]:
args = []
- key = self.form_compileropt_key('std')
- std = options.get_value(key)
+ std = self.get_compileropt_value('std', env, target, subproject)
+ assert isinstance(std, str)
if std == 'c89':
args.append('-lang=c')
elif std == 'c99':
@@ -663,7 +607,7 @@ def __init__(self, ccache: T.List[str], exelist: T.List[str], version: str, for_
Xc16Compiler.__init__(self)
def get_options(self) -> 'MutableKeyedOptionDictType':
- opts = CCompiler.get_options(self)
+ opts = super().get_options()
key = self.form_compileropt_key('std')
std_opt = opts[key]
assert isinstance(std_opt, options.UserStdOption), 'for mypy'
@@ -673,10 +617,10 @@ def get_options(self) -> 'MutableKeyedOptionDictType':
def get_no_stdinc_args(self) -> T.List[str]:
return []
- def get_option_compile_args(self, options: 'KeyedOptionDictType') -> T.List[str]:
+ def get_option_std_args(self, target: BuildTarget, env: Environment, subproject: T.Optional[str] = None) -> T.List[str]:
args = []
- key = self.form_compileropt_key('std')
- std = options.get_value(key)
+ std = self.get_compileropt_value('std', env, target, subproject)
+ assert isinstance(std, str)
if std != 'none':
args.append('-ansi')
args.append('-std=' + std)
@@ -709,16 +653,13 @@ def __init__(self, ccache: T.List[str], exelist: T.List[str], version: str, for_
CompCertCompiler.__init__(self)
def get_options(self) -> 'MutableKeyedOptionDictType':
- opts = CCompiler.get_options(self)
+ opts = super().get_options()
key = self.form_compileropt_key('std')
std_opt = opts[key]
assert isinstance(std_opt, options.UserStdOption), 'for mypy'
std_opt.set_versions(['c89', 'c99'])
return opts
- def get_option_compile_args(self, options: 'KeyedOptionDictType') -> T.List[str]:
- return []
-
def get_no_optimization_args(self) -> T.List[str]:
return ['-O0']
@@ -747,7 +688,7 @@ def get_always_args(self) -> T.List[str]:
return []
def get_options(self) -> 'MutableKeyedOptionDictType':
- opts = CCompiler.get_options(self)
+ opts = super().get_options()
key = self.form_compileropt_key('std')
std_opt = opts[key]
assert isinstance(std_opt, options.UserStdOption), 'for mypy'
@@ -757,10 +698,10 @@ def get_options(self) -> 'MutableKeyedOptionDictType':
def get_no_stdinc_args(self) -> T.List[str]:
return []
- def get_option_compile_args(self, options: 'KeyedOptionDictType') -> T.List[str]:
+ def get_option_std_args(self, target: BuildTarget, env: Environment, subproject: T.Optional[str] = None) -> T.List[str]:
args = []
- key = self.form_compileropt_key('std')
- std = options.get_value(key)
+ std = self.get_compileropt_value('std', env, target, subproject)
+ assert isinstance(std, str)
if std != 'none':
args.append('--' + std)
return args
@@ -787,16 +728,14 @@ def get_instruction_set_args(self, instruction_set: str) -> T.Optional[T.List[st
return mwccarm_instruction_set_args.get(instruction_set, None)
def get_options(self) -> 'MutableKeyedOptionDictType':
- opts = CCompiler.get_options(self)
- c_stds = ['c99']
- key = self.form_compileropt_key('std')
- opts[key].choices = ['none'] + c_stds
+ opts = super().get_options()
+ self._update_language_stds(opts, ['c99'])
return opts
- def get_option_compile_args(self, options: 'KeyedOptionDictType') -> T.List[str]:
+ def get_option_std_args(self, target: BuildTarget, env: Environment, subproject: T.Optional[str] = None) -> T.List[str]:
args = []
- key = self.form_compileropt_key('std')
- std = options.get_value(key)
+ std = self.get_compileropt_value('std', env, target, subproject)
+ assert isinstance(std, str)
if std != 'none':
args.append('-lang')
args.append(std)
@@ -817,16 +756,25 @@ def get_instruction_set_args(self, instruction_set: str) -> T.Optional[T.List[st
return mwcceppc_instruction_set_args.get(instruction_set, None)
def get_options(self) -> 'MutableKeyedOptionDictType':
- opts = CCompiler.get_options(self)
- c_stds = ['c99']
- key = self.form_compileropt_key('std')
- opts[key].choices = ['none'] + c_stds
+ opts = super().get_options()
+ self._update_language_stds(opts, ['c99'])
return opts
- def get_option_compile_args(self, options: 'KeyedOptionDictType') -> T.List[str]:
+ def get_option_std_args(self, target: BuildTarget, env: Environment, subproject: T.Optional[str] = None) -> T.List[str]:
args = []
- key = self.form_compileropt_key('std')
- std = options.get_value(key)
+ std = self.get_compileropt_value('std', env, target, subproject)
+ assert isinstance(std, str)
if std != 'none':
args.append('-lang ' + std)
return args
+
+class TaskingCCompiler(TaskingCompiler, CCompiler):
+ id = 'tasking'
+
+ def __init__(self, ccache: T.List[str], exelist: T.List[str], version: str, for_machine: MachineChoice,
+ is_cross: bool, info: 'MachineInfo',
+ linker: T.Optional['DynamicLinker'] = None,
+ full_version: T.Optional[str] = None):
+ CCompiler.__init__(self, ccache, exelist, version, for_machine, is_cross,
+ info, linker=linker, full_version=full_version)
+ TaskingCompiler.__init__(self)
diff --git a/mesonbuild/compilers/compilers.py b/mesonbuild/compilers/compilers.py
index 603a3eb484de..00be201de5d2 100644
--- a/mesonbuild/compilers/compilers.py
+++ b/mesonbuild/compilers/compilers.py
@@ -1,6 +1,6 @@
# SPDX-License-Identifier: Apache-2.0
# Copyright 2012-2022 The Meson development team
-# Copyright © 2023-2024 Intel Corporation
+# Copyright © 2023-2025 Intel Corporation
from __future__ import annotations
@@ -20,15 +20,13 @@
EnvironmentException, MesonException,
Popen_safe_logged, LibType, TemporaryDirectoryWinProof,
)
-
from ..options import OptionKey
-
from ..arglist import CompilerArgs
if T.TYPE_CHECKING:
from .. import coredata
from ..build import BuildTarget, DFeatures
- from ..coredata import MutableKeyedOptionDictType, KeyedOptionDictType
+ from ..options import MutableKeyedOptionDictType
from ..envconfig import MachineInfo
from ..environment import Environment
from ..linkers import RSPFileSyntax
@@ -37,8 +35,8 @@
from ..dependencies import Dependency
CompilerType = T.TypeVar('CompilerType', bound='Compiler')
- _T = T.TypeVar('_T')
- UserOptionType = T.TypeVar('UserOptionType', bound=options.UserOption)
+
+_T = T.TypeVar('_T')
"""This file contains the data files of all compilers Meson knows
about. To support a new compiler, add its information below.
@@ -69,6 +67,7 @@
'cython': ('pyx', ),
'nasm': ('asm', 'nasm',),
'masm': ('masm',),
+ 'linearasm': ('sa',),
}
all_languages = lang_suffixes.keys()
c_cpp_suffixes = {'h'}
@@ -133,11 +132,15 @@ def is_header(fname: 'mesonlib.FileOrString') -> bool:
def is_source_suffix(suffix: str) -> bool:
return suffix in source_suffixes
+@lru_cache(maxsize=None)
+def cached_is_source_by_name(fname: str) -> bool:
+ suffix = fname.split('.')[-1].lower()
+ return is_source_suffix(suffix)
+
def is_source(fname: 'mesonlib.FileOrString') -> bool:
if isinstance(fname, mesonlib.File):
fname = fname.fname
- suffix = fname.split('.')[-1].lower()
- return is_source_suffix(suffix)
+ return cached_is_source_by_name(fname)
def is_assembly(fname: 'mesonlib.FileOrString') -> bool:
if isinstance(fname, mesonlib.File):
@@ -145,6 +148,12 @@ def is_assembly(fname: 'mesonlib.FileOrString') -> bool:
suffix = fname.split('.')[-1]
return suffix in assembler_suffixes
+def is_java(fname: mesonlib.FileOrString) -> bool:
+ if isinstance(fname, mesonlib.File):
+ fname = fname.fname
+ suffix = fname.split('.')[-1]
+ return suffix in lang_suffixes['java']
+
def is_llvm_ir(fname: 'mesonlib.FileOrString') -> bool:
if isinstance(fname, mesonlib.File):
fname = fname.fname
@@ -152,14 +161,14 @@ def is_llvm_ir(fname: 'mesonlib.FileOrString') -> bool:
return suffix in llvm_ir_suffixes
@lru_cache(maxsize=None)
-def cached_by_name(fname: 'mesonlib.FileOrString') -> bool:
+def cached_is_object_by_name(fname: str) -> bool:
suffix = fname.split('.')[-1]
return suffix in obj_suffixes
def is_object(fname: 'mesonlib.FileOrString') -> bool:
if isinstance(fname, mesonlib.File):
fname = fname.fname
- return cached_by_name(fname)
+ return cached_is_object_by_name(fname)
def is_library(fname: 'mesonlib.FileOrString') -> bool:
if isinstance(fname, mesonlib.File):
@@ -209,64 +218,26 @@ class CompileCheckMode(enum.Enum):
}
-MSCRT_VALS = ['none', 'md', 'mdd', 'mt', 'mtd']
-
-@dataclass
-class BaseOption(T.Generic[options._T, options._U]):
- opt_type: T.Type[options._U]
- description: str
- default: T.Any = None
- choices: T.Any = None
-
- def init_option(self, name: OptionKey) -> options._U:
- keywords = {'value': self.default}
- if self.choices:
- keywords['choices'] = self.choices
- return self.opt_type(name.name, self.description, **keywords)
-
-BASE_OPTIONS: T.Mapping[OptionKey, BaseOption] = {
- OptionKey('b_pch'): BaseOption(options.UserBooleanOption, 'Use precompiled headers', True),
- OptionKey('b_lto'): BaseOption(options.UserBooleanOption, 'Use link time optimization', False),
- OptionKey('b_lto_threads'): BaseOption(options.UserIntegerOption, 'Use multiple threads for Link Time Optimization', (None, None, 0)),
- OptionKey('b_lto_mode'): BaseOption(options.UserComboOption, 'Select between different LTO modes.', 'default',
- choices=['default', 'thin']),
- OptionKey('b_thinlto_cache'): BaseOption(options.UserBooleanOption, 'Use LLVM ThinLTO caching for faster incremental builds', False),
- OptionKey('b_thinlto_cache_dir'): BaseOption(options.UserStringOption, 'Directory to store ThinLTO cache objects', ''),
- OptionKey('b_sanitize'): BaseOption(options.UserComboOption, 'Code sanitizer to use', 'none',
- choices=['none', 'address', 'thread', 'undefined', 'memory', 'leak', 'address,undefined']),
- OptionKey('b_lundef'): BaseOption(options.UserBooleanOption, 'Use -Wl,--no-undefined when linking', True),
- OptionKey('b_asneeded'): BaseOption(options.UserBooleanOption, 'Use -Wl,--as-needed when linking', True),
- OptionKey('b_pgo'): BaseOption(options.UserComboOption, 'Use profile guided optimization', 'off',
- choices=['off', 'generate', 'use']),
- OptionKey('b_coverage'): BaseOption(options.UserBooleanOption, 'Enable coverage tracking.', False),
- OptionKey('b_colorout'): BaseOption(options.UserComboOption, 'Use colored output', 'always',
- choices=['auto', 'always', 'never']),
- OptionKey('b_ndebug'): BaseOption(options.UserComboOption, 'Disable asserts', 'false', choices=['true', 'false', 'if-release']),
- OptionKey('b_staticpic'): BaseOption(options.UserBooleanOption, 'Build static libraries as position independent', True),
- OptionKey('b_pie'): BaseOption(options.UserBooleanOption, 'Build executables as position independent', False),
- OptionKey('b_bitcode'): BaseOption(options.UserBooleanOption, 'Generate and embed bitcode (only macOS/iOS/tvOS)', False),
- OptionKey('b_vscrt'): BaseOption(options.UserComboOption, 'VS run-time library type to use.', 'from_buildtype',
- choices=MSCRT_VALS + ['from_buildtype', 'static_from_buildtype']),
-}
-
-base_options = {key: base_opt.init_option(key) for key, base_opt in BASE_OPTIONS.items()}
-
-def option_enabled(boptions: T.Set[OptionKey], options: 'KeyedOptionDictType',
- option: OptionKey) -> bool:
+def option_enabled(boptions: T.Set[OptionKey],
+ target: 'BuildTarget',
+ env: 'Environment',
+ option: T.Union[str, OptionKey]) -> bool:
+ if isinstance(option, str):
+ option = OptionKey(option)
try:
if option not in boptions:
return False
- ret = options.get_value(option)
+ ret = env.coredata.get_option_for_target(target, option)
assert isinstance(ret, bool), 'must return bool' # could also be str
return ret
except KeyError:
return False
-def get_option_value(options: 'KeyedOptionDictType', opt: OptionKey, fallback: '_T') -> '_T':
+def get_option_value_for_target(env: 'Environment', target: 'BuildTarget', opt: OptionKey, fallback: '_T') -> '_T':
"""Get the value of an option, or the fallback value."""
try:
- v: '_T' = options.get_value(opt)
+ v = env.coredata.get_option_for_target(target, opt)
except (KeyError, AttributeError):
return fallback
@@ -275,36 +246,58 @@ def get_option_value(options: 'KeyedOptionDictType', opt: OptionKey, fallback: '
return v
-def are_asserts_disabled(options: KeyedOptionDictType) -> bool:
+def are_asserts_disabled(target: 'BuildTarget', env: 'Environment') -> bool:
"""Should debug assertions be disabled
- :param options: OptionDictionary
+ :param target: a target to check for
+ :param env: the environment
:return: whether to disable assertions or not
"""
- return (options.get_value('b_ndebug') == 'true' or
- (options.get_value('b_ndebug') == 'if-release' and
- options.get_value('buildtype') in {'release', 'plain'}))
+ return (env.coredata.get_option_for_target(target, 'b_ndebug') == 'true' or
+ (env.coredata.get_option_for_target(target, 'b_ndebug') == 'if-release' and
+ env.coredata.get_option_for_target(target, 'buildtype') in {'release', 'plain'}))
+
+
+def are_asserts_disabled_for_subproject(subproject: str, env: 'Environment') -> bool:
+ key = OptionKey('b_ndebug', subproject)
+ return (env.coredata.optstore.get_value_for(key) == 'true' or
+ (env.coredata.optstore.get_value_for(key) == 'if-release' and
+ env.coredata.optstore.get_value_for(key.evolve(name='buildtype')) in {'release', 'plain'}))
-def get_base_compile_args(options: 'KeyedOptionDictType', compiler: 'Compiler', env: 'Environment') -> T.List[str]:
+def get_base_compile_args(target: 'BuildTarget', compiler: 'Compiler', env: 'Environment') -> T.List[str]:
args: T.List[str] = []
try:
- if options.get_value(OptionKey('b_lto')):
+ if env.coredata.get_option_for_target(target, 'b_lto'):
+ num_threads = get_option_value_for_target(env, target, OptionKey('b_lto_threads'), 0)
+ ltomode = get_option_value_for_target(env, target, OptionKey('b_lto_mode'), 'default')
args.extend(compiler.get_lto_compile_args(
- threads=get_option_value(options, OptionKey('b_lto_threads'), 0),
- mode=get_option_value(options, OptionKey('b_lto_mode'), 'default')))
+ threads=num_threads,
+ mode=ltomode))
except (KeyError, AttributeError):
pass
try:
- args += compiler.get_colorout_args(options.get_value(OptionKey('b_colorout')))
- except (KeyError, AttributeError):
+ clrout = env.coredata.get_option_for_target(target, 'b_colorout')
+ assert isinstance(clrout, str)
+ args += compiler.get_colorout_args(clrout)
+ except KeyError:
pass
try:
- args += compiler.sanitizer_compile_args(options.get_value(OptionKey('b_sanitize')))
- except (KeyError, AttributeError):
+ sanitize = env.coredata.get_option_for_target(target, 'b_sanitize')
+ assert isinstance(sanitize, list)
+ if sanitize == ['none']:
+ sanitize = []
+ sanitize_args = compiler.sanitizer_compile_args(sanitize)
+ # We consider that if there are no sanitizer arguments returned, then
+ # the language doesn't support them.
+ if sanitize_args:
+ if not compiler.has_multi_arguments(sanitize_args, env)[0]:
+ raise MesonException(f'Compiler {compiler.name_string()} does not support sanitizer arguments {sanitize_args}')
+ args.extend(sanitize_args)
+ except KeyError:
pass
try:
- pgo_val = options.get_value(OptionKey('b_pgo'))
+ pgo_val = env.coredata.get_option_for_target(target, 'b_pgo')
if pgo_val == 'generate':
args.extend(compiler.get_profile_generate_args())
elif pgo_val == 'use':
@@ -312,21 +305,23 @@ def get_base_compile_args(options: 'KeyedOptionDictType', compiler: 'Compiler',
except (KeyError, AttributeError):
pass
try:
- if options.get_value(OptionKey('b_coverage')):
+ if env.coredata.get_option_for_target(target, 'b_coverage'):
args += compiler.get_coverage_args()
except (KeyError, AttributeError):
pass
try:
- args += compiler.get_assert_args(are_asserts_disabled(options), env)
- except (KeyError, AttributeError):
+ args += compiler.get_assert_args(are_asserts_disabled(target, env), env)
+ except KeyError:
pass
# This does not need a try...except
- if option_enabled(compiler.base_options, options, OptionKey('b_bitcode')):
+ if option_enabled(compiler.base_options, target, env, 'b_bitcode'):
args.append('-fembed-bitcode')
try:
+ crt_val = env.coredata.get_option_for_target(target, 'b_vscrt')
+ assert isinstance(crt_val, str)
+ buildtype = env.coredata.get_option_for_target(target, 'buildtype')
+ assert isinstance(buildtype, str)
try:
- crt_val = options.get_value(OptionKey('b_vscrt'))
- buildtype = options.get_value(OptionKey('buildtype'))
args += compiler.get_crt_compile_args(crt_val, buildtype)
except AttributeError:
pass
@@ -334,31 +329,46 @@ def get_base_compile_args(options: 'KeyedOptionDictType', compiler: 'Compiler',
pass
return args
-def get_base_link_args(options: 'KeyedOptionDictType', linker: 'Compiler',
- is_shared_module: bool, build_dir: str) -> T.List[str]:
+def get_base_link_args(target: 'BuildTarget',
+ linker: 'Compiler',
+ env: 'Environment') -> T.List[str]:
args: T.List[str] = []
+ build_dir = env.get_build_dir()
try:
- if options.get_value('b_lto'):
- if options.get_value('werror'):
+ if env.coredata.get_option_for_target(target, 'b_lto'):
+ if env.coredata.get_option_for_target(target, 'werror'):
args.extend(linker.get_werror_args())
thinlto_cache_dir = None
- if get_option_value(options, OptionKey('b_thinlto_cache'), False):
- thinlto_cache_dir = get_option_value(options, OptionKey('b_thinlto_cache_dir'), '')
+ cachedir_key = OptionKey('b_thinlto_cache')
+ if get_option_value_for_target(env, target, cachedir_key, False):
+ thinlto_cache_dir = get_option_value_for_target(env, target, OptionKey('b_thinlto_cache_dir'), '')
if thinlto_cache_dir == '':
thinlto_cache_dir = os.path.join(build_dir, 'meson-private', 'thinlto-cache')
+ num_threads = get_option_value_for_target(env, target, OptionKey('b_lto_threads'), 0)
+ lto_mode = get_option_value_for_target(env, target, OptionKey('b_lto_mode'), 'default')
args.extend(linker.get_lto_link_args(
- threads=get_option_value(options, OptionKey('b_lto_threads'), 0),
- mode=get_option_value(options, OptionKey('b_lto_mode'), 'default'),
+ threads=num_threads,
+ mode=lto_mode,
thinlto_cache_dir=thinlto_cache_dir))
except (KeyError, AttributeError):
pass
try:
- args += linker.sanitizer_link_args(options.get_value('b_sanitize'))
- except (KeyError, AttributeError):
+ sanitizer = env.coredata.get_option_for_target(target, 'b_sanitize')
+ assert isinstance(sanitizer, list)
+ if sanitizer == ['none']:
+ sanitizer = []
+ sanitizer_args = linker.sanitizer_link_args(sanitizer)
+ # We consider that if there are no sanitizer arguments returned, then
+ # the language doesn't support them.
+ if sanitizer_args:
+ if not linker.has_multi_link_arguments(sanitizer_args, env)[0]:
+ raise MesonException(f'Linker {linker.name_string()} does not support sanitizer arguments {sanitizer_args}')
+ args.extend(sanitizer_args)
+ except KeyError:
pass
try:
- pgo_val = options.get_value('b_pgo')
+ pgo_val = env.coredata.get_option_for_target(target, 'b_pgo')
if pgo_val == 'generate':
args.extend(linker.get_profile_generate_args())
elif pgo_val == 'use':
@@ -366,16 +376,16 @@ def get_base_link_args(options: 'KeyedOptionDictType', linker: 'Compiler',
except (KeyError, AttributeError):
pass
try:
- if options.get_value('b_coverage'):
+ if env.coredata.get_option_for_target(target, 'b_coverage'):
args += linker.get_coverage_link_args()
except (KeyError, AttributeError):
pass
- as_needed = option_enabled(linker.base_options, options, OptionKey('b_asneeded'))
- bitcode = option_enabled(linker.base_options, options, OptionKey('b_bitcode'))
+ as_needed = option_enabled(linker.base_options, target, env, 'b_asneeded')
+ bitcode = option_enabled(linker.base_options, target, env, 'b_bitcode')
# Shared modules cannot be built with bitcode_bundle because
# -bitcode_bundle is incompatible with -undefined and -bundle
- if bitcode and not is_shared_module:
+ if bitcode and not target.typename == 'shared module':
args.extend(linker.bitcode_args())
elif as_needed:
# -Wl,-dead_strip_dylibs is incompatible with bitcode
@@ -384,18 +394,23 @@ def get_base_link_args(options: 'KeyedOptionDictType', linker: 'Compiler',
# Apple's ld (the only one that supports bitcode) does not like -undefined
# arguments or -headerpad_max_install_names when bitcode is enabled
if not bitcode:
+ from ..build import SharedModule
args.extend(linker.headerpad_args())
- if (not is_shared_module and
- option_enabled(linker.base_options, options, OptionKey('b_lundef'))):
+ if (not isinstance(target, SharedModule) and
+ option_enabled(linker.base_options, target, env, 'b_lundef')):
args.extend(linker.no_undefined_link_args())
else:
args.extend(linker.get_allow_undefined_link_args())
try:
+ crt_val = env.coredata.get_option_for_target(target, 'b_vscrt')
+ assert isinstance(crt_val, str)
+ buildtype = env.coredata.get_option_for_target(target, 'buildtype')
+ assert isinstance(buildtype, str)
try:
- crt_val = options.get_value(OptionKey('b_vscrt'))
- buildtype = options.get_value(OptionKey('buildtype'))
- args += linker.get_crt_link_args(crt_val, buildtype)
+ crtargs = linker.get_crt_link_args(crt_val, buildtype)
+ assert isinstance(crtargs, list)
+ args += crtargs
except AttributeError:
pass
except KeyError:
@@ -428,8 +443,8 @@ class CompileResult(HoldableObject):
output_name: T.Optional[str] = field(default=None, init=False)
cached: bool = field(default=False, init=False)
-
class Compiler(HoldableObject, metaclass=abc.ABCMeta):
+
# Libraries to ignore in find_library() since they are provided by the
# compiler or the C library. Currently only used for MSVC.
ignore_libs: T.List[str] = []
@@ -483,6 +498,12 @@ def get_id(self) -> str:
def get_modes(self) -> T.List[Compiler]:
return self.modes
+ def get_exe(self) -> str:
+ return self.exelist[0]
+
+ def get_exe_args(self) -> T.List[str]:
+ return self.exelist[1:]
+
def get_linker_id(self) -> str:
# There is not guarantee that we have a dynamic linker instance, as
# some languages don't have separate linkers and compilers. In those
@@ -582,22 +603,25 @@ def gen_import_library_args(self, implibname: str) -> T.List[str]:
"""
return []
- def create_option(self, option_type: T.Type[UserOptionType], option_key: OptionKey, *args: T.Any, **kwargs: T.Any) -> T.Tuple[OptionKey, UserOptionType]:
- return option_key, option_type(f'{self.language}_{option_key.name}', *args, **kwargs)
+ def make_option_name(self, key: OptionKey) -> str:
+ return f'{self.language}_{key.name}'
@staticmethod
- def update_options(options: MutableKeyedOptionDictType, *args: T.Tuple[OptionKey, UserOptionType]) -> MutableKeyedOptionDictType:
+ def update_options(options: MutableKeyedOptionDictType, *args: T.Tuple[OptionKey, options.AnyOptionType]) -> MutableKeyedOptionDictType:
options.update(args)
return options
def get_options(self) -> 'MutableKeyedOptionDictType':
return {}
- def get_option_compile_args(self, options: 'KeyedOptionDictType') -> T.List[str]:
+ def get_option_compile_args(self, target: 'BuildTarget', env: 'Environment', subproject: T.Optional[str] = None) -> T.List[str]:
+ return []
+
+ def get_option_std_args(self, target: BuildTarget, env: Environment, subproject: T.Optional[str] = None) -> T.List[str]:
return []
- def get_option_link_args(self, options: 'KeyedOptionDictType') -> T.List[str]:
- return self.linker.get_option_args(options)
+ def get_option_link_args(self, target: 'BuildTarget', env: 'Environment', subproject: T.Optional[str] = None) -> T.List[str]:
+ return self.linker.get_option_link_args(target, env, subproject)
def check_header(self, hname: str, prefix: str, env: 'Environment', *,
extra_args: T.Union[None, T.List[str], T.Callable[[CompileCheckMode], T.List[str]]] = None,
@@ -889,8 +913,8 @@ def get_link_debugfile_args(self, targetfile: str) -> T.List[str]:
def get_std_shared_lib_link_args(self) -> T.List[str]:
return self.linker.get_std_shared_lib_args()
- def get_std_shared_module_link_args(self, options: 'KeyedOptionDictType') -> T.List[str]:
- return self.linker.get_std_shared_module_args(options)
+ def get_std_shared_module_link_args(self, target: 'BuildTarget') -> T.List[str]:
+ return self.linker.get_std_shared_module_args(target)
def get_link_whole_for(self, args: T.List[str]) -> T.List[str]:
return self.linker.get_link_whole_for(args)
@@ -1010,10 +1034,10 @@ def get_lto_link_args(self, *, threads: int = 0, mode: str = 'default',
thinlto_cache_dir: T.Optional[str] = None) -> T.List[str]:
return self.linker.get_lto_args()
- def sanitizer_compile_args(self, value: str) -> T.List[str]:
+ def sanitizer_compile_args(self, value: T.List[str]) -> T.List[str]:
return []
- def sanitizer_link_args(self, value: str) -> T.List[str]:
+ def sanitizer_link_args(self, value: T.List[str]) -> T.List[str]:
return self.linker.sanitizer_args(value)
def get_asneeded_args(self) -> T.List[str]:
@@ -1065,7 +1089,7 @@ def get_assert_args(self, disable: bool, env: 'Environment') -> T.List[str]:
return []
def get_crt_val(self, crt_val: str, buildtype: str) -> str:
- if crt_val in MSCRT_VALS:
+ if crt_val in options.MSCRT_VALS:
return crt_val
assert crt_val in {'from_buildtype', 'static_from_buildtype'}
@@ -1323,9 +1347,13 @@ def get_feature_args(self, kwargs: DFeatures, build_to_src: str) -> T.List[str]:
# TODO: using a TypeDict here would improve this
raise EnvironmentException(f'{self.id} does not implement get_feature_args')
- def get_prelink_args(self, prelink_name: str, obj_list: T.List[str]) -> T.List[str]:
+ def get_prelink_args(self, prelink_name: str, obj_list: T.List[str]) -> T.Tuple[T.List[str], T.List[str]]:
raise EnvironmentException(f'{self.id} does not know how to do prelinking.')
+ def get_prelink_append_compile_args(self) -> bool:
+ """Controls whether compile args have to be used for prelinking or not"""
+ return False
+
def rsp_file_syntax(self) -> 'RSPFileSyntax':
"""The format of the RSP file that this compiler supports.
@@ -1349,39 +1377,67 @@ def get_preprocessor(self) -> Compiler:
def form_compileropt_key(self, basename: str) -> OptionKey:
return OptionKey(f'{self.language}_{basename}', machine=self.for_machine)
-def get_global_options(lang: str,
+ def get_compileropt_value(self,
+ key: T.Union[str, OptionKey],
+ env: Environment,
+ target: T.Optional[BuildTarget],
+ subproject: T.Optional[str] = None
+ ) -> options.ElementaryOptionValues:
+ if isinstance(key, str):
+ key = self.form_compileropt_key(key)
+ if target:
+ return env.coredata.get_option_for_target(target, key)
+ else:
+ return env.coredata.optstore.get_value_for(key.evolve(subproject=subproject))
+
+ def _update_language_stds(self, opts: MutableKeyedOptionDictType, value: T.List[str]) -> None:
+ key = self.form_compileropt_key('std')
+ std = opts[key]
+ assert isinstance(std, (options.UserStdOption, options.UserComboOption)), 'for mypy'
+ if 'none' not in value:
+ value = ['none'] + value
+ std.choices = value
+
+
+def add_global_options(lang: str,
comp: T.Type[Compiler],
for_machine: MachineChoice,
- env: 'Environment') -> dict[OptionKey, options.UserOption[T.Any]]:
+ env: 'Environment'):
"""Retrieve options that apply to all compilers for a given language."""
description = f'Extra arguments passed to the {lang}'
argkey = OptionKey(f'{lang}_args', machine=for_machine)
- largkey = argkey.evolve(f'{lang}_link_args')
- envkey = argkey.evolve(f'{lang}_env_args')
+ largkey = OptionKey(f'{lang}_link_args', machine=for_machine)
+
+ comp_args_from_envvar = False
+ comp_options = env.coredata.optstore.get_pending_value(argkey)
+ if comp_options is None:
+ comp_args_from_envvar = True
+ comp_options = env.env_opts.get(argkey, [])
- comp_key = argkey if argkey in env.options else envkey
+ link_options = env.coredata.optstore.get_pending_value(largkey)
+ if link_options is None:
+ link_options = env.env_opts.get(largkey, [])
- comp_options = env.options.get(comp_key, [])
- link_options = env.options.get(largkey, [])
+ assert isinstance(comp_options, (str, list)), 'for mypy'
+ assert isinstance(link_options, (str, list)), 'for mypy'
- cargs = options.UserArrayOption(
- f'{lang}_{argkey.name}',
+ cargs = options.UserStringArrayOption(
+ argkey.name,
description + ' compiler',
comp_options, split_args=True, allow_dups=True)
- largs = options.UserArrayOption(
- f'{lang}_{largkey.name}',
+ largs = options.UserStringArrayOption(
+ largkey.name,
description + ' linker',
link_options, split_args=True, allow_dups=True)
- if comp.INVOKES_LINKER and comp_key == envkey:
+ env.coredata.optstore.add_compiler_option(lang, argkey, cargs)
+ env.coredata.optstore.add_compiler_option(lang, largkey, largs)
+
+ if comp.INVOKES_LINKER and comp_args_from_envvar:
# If the compiler acts as a linker driver, and we're using the
# environment variable flags for both the compiler and linker
# arguments, then put the compiler flags in the linker flags as well.
# This is how autotools works, and the env vars feature is for
# autotools compatibility.
largs.extend_value(comp_options)
-
- opts: dict[OptionKey, options.UserOption[T.Any]] = {argkey: cargs, largkey: largs}
-
- return opts
diff --git a/mesonbuild/compilers/cpp.py b/mesonbuild/compilers/cpp.py
index 930e7b7e5bb9..ed8d1cf05a41 100644
--- a/mesonbuild/compilers/cpp.py
+++ b/mesonbuild/compilers/cpp.py
@@ -3,7 +3,6 @@
from __future__ import annotations
-import copy
import functools
import os.path
import typing as T
@@ -19,15 +18,15 @@
CompileCheckMode,
)
from .c_function_attributes import CXX_FUNC_ATTRIBUTES, C_FUNC_ATTRIBUTES
-from .mixins.apple import AppleCompilerMixin
+from .mixins.apple import AppleCompilerMixin, AppleCPPStdsMixin
from .mixins.clike import CLikeCompiler
from .mixins.ccrx import CcrxCompiler
from .mixins.ti import TICompiler
from .mixins.arm import ArmCompiler, ArmclangCompiler
from .mixins.visualstudio import MSVCCompiler, ClangClCompiler
-from .mixins.gnu import GnuCompiler, gnu_common_warning_args, gnu_cpp_warning_args
+from .mixins.gnu import GnuCompiler, GnuCPPStds, gnu_common_warning_args, gnu_cpp_warning_args
from .mixins.intel import IntelGnuLikeCompiler, IntelVisualStudioLikeCompiler
-from .mixins.clang import ClangCompiler
+from .mixins.clang import ClangCompiler, ClangCPPStds
from .mixins.elbrus import ElbrusCompiler
from .mixins.pgi import PGICompiler
from .mixins.emscripten import EmscriptenMixin
@@ -35,19 +34,20 @@
from .mixins.metrowerks import mwccarm_instruction_set_args, mwcceppc_instruction_set_args
if T.TYPE_CHECKING:
- from ..coredata import MutableKeyedOptionDictType, KeyedOptionDictType
+ from ..options import MutableKeyedOptionDictType
from ..dependencies import Dependency
from ..envconfig import MachineInfo
from ..environment import Environment
from ..linkers.linkers import DynamicLinker
from ..mesonlib import MachineChoice
+ from ..build import BuildTarget
CompilerMixinBase = CLikeCompiler
else:
CompilerMixinBase = object
-_ALL_STDS = ['c++98', 'c++0x', 'c++03', 'c++1y', 'c++1z', 'c++11', 'c++14', 'c++17', 'c++2a', 'c++20', 'c++23', 'c++26']
-_ALL_STDS += [f'gnu{std[1:]}' for std in _ALL_STDS]
-_ALL_STDS += ['vc++11', 'vc++14', 'vc++17', 'vc++20', 'vc++latest', 'c++latest']
+ALL_STDS = ['c++98', 'c++0x', 'c++03', 'c++1y', 'c++1z', 'c++11', 'c++14', 'c++17', 'c++2a', 'c++20', 'c++23', 'c++26']
+ALL_STDS += [f'gnu{std[1:]}' for std in ALL_STDS]
+ALL_STDS += ['vc++11', 'vc++14', 'vc++17', 'vc++20', 'vc++latest', 'c++latest']
def non_msvc_eh_options(eh: str, args: T.List[str]) -> None:
@@ -175,7 +175,7 @@ def get_options(self) -> 'MutableKeyedOptionDictType':
opts = super().get_options()
key = self.form_compileropt_key('std')
opts.update({
- key: options.UserStdOption('C++', _ALL_STDS),
+ key: options.UserStdOption('cpp', ALL_STDS),
})
return opts
@@ -218,10 +218,7 @@ def language_stdlib_only_link_flags(self, env: Environment) -> T.List[str]:
raise MesonException('Could not detect either libc++ or libstdc++ as your C++ stdlib implementation.')
-class ClangCPPCompiler(_StdCPPLibMixin, ClangCompiler, CPPCompiler):
-
- _CPP23_VERSION = '>=12.0.0'
- _CPP26_VERSION = '>=17.0.0'
+class ClangCPPCompiler(_StdCPPLibMixin, ClangCPPStds, ClangCompiler, CPPCompiler):
def __init__(self, ccache: T.List[str], exelist: T.List[str], version: str, for_machine: MachineChoice, is_cross: bool,
info: 'MachineInfo',
@@ -239,55 +236,49 @@ def __init__(self, ccache: T.List[str], exelist: T.List[str], version: str, for_
'everything': ['-Weverything']}
def get_options(self) -> 'MutableKeyedOptionDictType':
- opts = CPPCompiler.get_options(self)
- self.update_options(
- opts,
- self.create_option(options.UserComboOption,
- self.form_compileropt_key('eh'),
- 'C++ exception handling type.',
- ['none', 'default', 'a', 's', 'sc'],
- 'default'),
- self.create_option(options.UserBooleanOption,
- self.form_compileropt_key('rtti'),
- 'Enable RTTI',
- True),
- self.create_option(options.UserBooleanOption,
- self.form_compileropt_key('debugstl'),
- 'STL debug mode',
- False),
- )
- cppstd_choices = [
- 'c++98', 'c++03', 'c++11', 'c++14', 'c++17', 'c++1z', 'c++2a', 'c++20',
- ]
- if version_compare(self.version, self._CPP23_VERSION):
- cppstd_choices.append('c++23')
- if version_compare(self.version, self._CPP26_VERSION):
- cppstd_choices.append('c++26')
- std_opt = opts[self.form_compileropt_key('std')]
- assert isinstance(std_opt, options.UserStdOption), 'for mypy'
- std_opt.set_versions(cppstd_choices, gnu=True)
+ opts = super().get_options()
+
+ key = self.form_compileropt_key('eh')
+ opts[key] = options.UserComboOption(
+ self.make_option_name(key),
+ 'C++ exception handling type.',
+ 'default',
+ choices=['none', 'default', 'a', 's', 'sc'])
+
+ key = self.form_compileropt_key('rtti')
+ opts[key] = options.UserBooleanOption(
+ self.make_option_name(key),
+ 'Enable RTTI',
+ True)
+
+ key = self.form_compileropt_key('debugstl')
+ opts[key] = options.UserBooleanOption(
+ self.make_option_name(key),
+ 'STL debug mode',
+ False)
+
if self.info.is_windows() or self.info.is_cygwin():
- self.update_options(
- opts,
- self.create_option(options.UserArrayOption,
- self.form_compileropt_key('winlibs'),
- 'Standard Win libraries to link against',
- gnu_winlibs),
- )
+ key = self.form_compileropt_key('winlibs')
+ opts[key] = options.UserStringArrayOption(
+ self.make_option_name(key),
+ 'Standard Win libraries to link against',
+ gnu_winlibs)
return opts
- def get_option_compile_args(self, options: 'KeyedOptionDictType') -> T.List[str]:
+ def get_option_compile_args(self, target: 'BuildTarget', env: 'Environment', subproject: T.Optional[str] = None) -> T.List[str]:
args: T.List[str] = []
- key = self.form_compileropt_key('std')
- std = options.get_value(key)
- if std != 'none':
- args.append(self._find_best_cpp_std(std))
- key = self.form_compileropt_key('eh')
- non_msvc_eh_options(options.get_value(key), args)
+ rtti = self.get_compileropt_value('rtti', env, target, subproject)
+ debugstl = self.get_compileropt_value('debugstl', env, target, subproject)
+ eh = self.get_compileropt_value('eh', env, target, subproject)
- key = self.form_compileropt_key('debugstl')
- if options.get_value(key):
+ assert isinstance(rtti, bool)
+ assert isinstance(eh, str)
+ assert isinstance(debugstl, bool)
+
+ non_msvc_eh_options(eh, args)
+
+ if debugstl:
args.append('-D_GLIBCXX_DEBUG=1')
# We can't do _LIBCPP_DEBUG because it's unreliable unless libc++ was built with it too:
@@ -296,23 +287,33 @@ def get_option_compile_args(self, options: 'KeyedOptionDictType') -> T.List[str]
if version_compare(self.version, '>=18'):
args.append('-D_LIBCPP_HARDENING_MODE=_LIBCPP_HARDENING_MODE_DEBUG')
- key = self.form_compileropt_key('rtti')
- if not options.get_value(key):
+ if not rtti:
args.append('-fno-rtti')
return args
- def get_option_link_args(self, options: 'KeyedOptionDictType') -> T.List[str]:
+ def get_option_std_args(self, target: BuildTarget, env: Environment, subproject: T.Optional[str] = None) -> T.List[str]:
+ args: T.List[str] = []
+ std = self.get_compileropt_value('std', env, target, subproject)
+ assert isinstance(std, str)
+ if std != 'none':
+ args.append(self._find_best_cpp_std(std))
+ return args
+
+ def get_option_link_args(self, target: 'BuildTarget', env: 'Environment', subproject: T.Optional[str] = None) -> T.List[str]:
if self.info.is_windows() or self.info.is_cygwin():
# without a typedict mypy can't understand this.
- key = self.form_compileropt_key('winlibs')
- libs = options.get_value(key).copy()
- assert isinstance(libs, list)
+ retval = self.get_compileropt_value('winlibs', env, target, subproject)
+ assert isinstance(retval, list)
+ libs = retval[:]
for l in libs:
assert isinstance(l, str)
return libs
return []
+ def is_libcpp_enable_assertions_deprecated(self) -> bool:
+ return version_compare(self.version, ">=18")
+
def get_assert_args(self, disable: bool, env: 'Environment') -> T.List[str]:
if disable:
return ['-DNDEBUG']
@@ -325,25 +326,32 @@ def get_assert_args(self, disable: bool, env: 'Environment') -> T.List[str]:
if self.language_stdlib_provider(env) == 'stdc++':
return ['-D_GLIBCXX_ASSERTIONS=1']
else:
- if version_compare(self.version, '>=18'):
+ if self.is_libcpp_enable_assertions_deprecated():
return ['-D_LIBCPP_HARDENING_MODE=_LIBCPP_HARDENING_MODE_FAST']
elif version_compare(self.version, '>=15'):
return ['-D_LIBCPP_ENABLE_ASSERTIONS=1']
return []
+ def get_pch_use_args(self, pch_dir: str, header: str) -> T.List[str]:
+ args = super().get_pch_use_args(pch_dir, header)
+ if version_compare(self.version, '>=11'):
+ return ['-fpch-instantiate-templates'] + args
+ return args
+
class ArmLtdClangCPPCompiler(ClangCPPCompiler):
id = 'armltdclang'
-class AppleClangCPPCompiler(AppleCompilerMixin, ClangCPPCompiler):
-
- _CPP23_VERSION = '>=13.0.0'
- # TODO: We don't know which XCode version will include LLVM 17 yet, so
- # use something absurd.
- _CPP26_VERSION = '>=99.0.0'
+class AppleClangCPPCompiler(AppleCompilerMixin, AppleCPPStdsMixin, ClangCPPCompiler):
+ def is_libcpp_enable_assertions_deprecated(self) -> bool:
+ # Upstream libc++ deprecated _LIBCPP_ENABLE_ASSERTIONS
+ # in favor of _LIBCPP_HARDENING_MODE from version 18 onwards,
+ # but Apple Clang 17's libc++ has back-ported that change.
+ # See: https://github.com/mesonbuild/meson/issues/14440
+ return version_compare(self.version, ">=17")
class EmscriptenCPPCompiler(EmscriptenMixin, ClangCPPCompiler):
@@ -372,10 +380,10 @@ def __init__(self, ccache: T.List[str], exelist: T.List[str], version: str, for_
info, linker=linker,
defines=defines, full_version=full_version)
- def get_option_compile_args(self, options: 'KeyedOptionDictType') -> T.List[str]:
+ def get_option_std_args(self, target: BuildTarget, env: Environment, subproject: T.Optional[str] = None) -> T.List[str]:
args: T.List[str] = []
- key = self.form_compileropt_key('std')
- std = options.get_value(key)
+ std = self.get_compileropt_value('std', env, target, subproject)
+ assert isinstance(std, str)
if std != 'none':
args.append(self._find_best_cpp_std(std))
return args
@@ -401,38 +409,39 @@ def __init__(self, ccache: T.List[str], exelist: T.List[str], version: str, for_
'everything': ['-Weverything']}
def get_options(self) -> 'MutableKeyedOptionDictType':
- opts = CPPCompiler.get_options(self)
+ opts = super().get_options()
+
+ key = self.form_compileropt_key('eh')
+ opts[key] = options.UserComboOption(
+ self.make_option_name(key),
+ 'C++ exception handling type.',
+ 'default',
+ choices=['none', 'default', 'a', 's', 'sc'])
+
key = self.form_compileropt_key('std')
- self.update_options(
- opts,
- self.create_option(options.UserComboOption,
- key.evolve('eh'),
- 'C++ exception handling type.',
- ['none', 'default', 'a', 's', 'sc'],
- 'default'),
- )
std_opt = opts[key]
assert isinstance(std_opt, options.UserStdOption), 'for mypy'
std_opt.set_versions(['c++98', 'c++03', 'c++11', 'c++14', 'c++17'], gnu=True)
return opts
- def get_option_compile_args(self, options: 'KeyedOptionDictType') -> T.List[str]:
+ def get_option_std_args(self, target: BuildTarget, env: Environment, subproject: T.Optional[str] = None) -> T.List[str]:
args: T.List[str] = []
- key = self.form_compileropt_key('std')
- std = options.get_value(key)
+ std = self.get_compileropt_value('std', env, target, subproject)
+ assert isinstance(std, str)
if std != 'none':
args.append('-std=' + std)
- key = self.form_compileropt_key('eh')
- non_msvc_eh_options(options.get_value(key), args)
+ eh = self.get_compileropt_value('eh', env, target, subproject)
+ assert isinstance(eh, str)
+ non_msvc_eh_options(eh, args)
return args
- def get_option_link_args(self, options: 'KeyedOptionDictType') -> T.List[str]:
+ def get_option_link_args(self, target: 'BuildTarget', env: 'Environment', subproject: T.Optional[str] = None) -> T.List[str]:
return []
-class GnuCPPCompiler(_StdCPPLibMixin, GnuCompiler, CPPCompiler):
+class GnuCPPCompiler(_StdCPPLibMixin, GnuCPPStds, GnuCompiler, CPPCompiler):
def __init__(self, ccache: T.List[str], exelist: T.List[str], version: str, for_machine: MachineChoice, is_cross: bool,
info: 'MachineInfo',
linker: T.Optional['DynamicLinker'] = None,
@@ -451,71 +460,70 @@ def __init__(self, ccache: T.List[str], exelist: T.List[str], version: str, for_
self.supported_warn_args(gnu_cpp_warning_args))}
def get_options(self) -> 'MutableKeyedOptionDictType':
- key = self.form_compileropt_key('std')
- opts = CPPCompiler.get_options(self)
- self.update_options(
- opts,
- self.create_option(options.UserComboOption,
- self.form_compileropt_key('eh'),
- 'C++ exception handling type.',
- ['none', 'default', 'a', 's', 'sc'],
- 'default'),
- self.create_option(options.UserBooleanOption,
- self.form_compileropt_key('rtti'),
- 'Enable RTTI',
- True),
- self.create_option(options.UserBooleanOption,
- self.form_compileropt_key('debugstl'),
- 'STL debug mode',
- False),
- )
- cppstd_choices = [
- 'c++98', 'c++03', 'c++11', 'c++14', 'c++17', 'c++1z',
- 'c++2a', 'c++20',
- ]
- if version_compare(self.version, '>=11.0.0'):
- cppstd_choices.append('c++23')
- if version_compare(self.version, '>=14.0.0'):
- cppstd_choices.append('c++26')
- std_opt = opts[key]
- assert isinstance(std_opt, options.UserStdOption), 'for mypy'
- std_opt.set_versions(cppstd_choices, gnu=True)
+ opts = super().get_options()
+
+ key = self.form_compileropt_key('eh')
+ opts[key] = options.UserComboOption(
+ self.make_option_name(key),
+ 'C++ exception handling type.',
+ 'default',
+ choices=['none', 'default', 'a', 's', 'sc'])
+
+ key = self.form_compileropt_key('rtti')
+ opts[key] = options.UserBooleanOption(
+ self.make_option_name(key),
+ 'Enable RTTI',
+ True)
+
+ key = self.form_compileropt_key('debugstl')
+ opts[key] = options.UserBooleanOption(
+ self.make_option_name(key),
+ 'STL debug mode',
+ False)
+
if self.info.is_windows() or self.info.is_cygwin():
- self.update_options(
- opts,
- self.create_option(options.UserArrayOption,
- key.evolve('cpp_winlibs'),
- 'Standard Win libraries to link against',
- gnu_winlibs),
- )
+ key = key.evolve(name='cpp_winlibs')
+ opts[key] = options.UserStringArrayOption(
+ self.make_option_name(key),
+ 'Standard Win libraries to link against',
+ gnu_winlibs)
+
return opts
- def get_option_compile_args(self, options: 'KeyedOptionDictType') -> T.List[str]:
+ def get_option_compile_args(self, target: 'BuildTarget', env: 'Environment', subproject: T.Optional[str] = None) -> T.List[str]:
args: T.List[str] = []
- stdkey = self.form_compileropt_key('std')
- ehkey = self.form_compileropt_key('eh')
- rttikey = self.form_compileropt_key('rtti')
- debugstlkey = self.form_compileropt_key('debugstl')
- std = options.get_value(stdkey)
- if std != 'none':
- args.append(self._find_best_cpp_std(std))
+ rtti = self.get_compileropt_value('rtti', env, target, subproject)
+ debugstl = self.get_compileropt_value('debugstl', env, target, subproject)
+ eh = self.get_compileropt_value('eh', env, target, subproject)
+
+ assert isinstance(rtti, bool)
+ assert isinstance(eh, str)
+ assert isinstance(debugstl, bool)
- non_msvc_eh_options(options.get_value(ehkey), args)
+ non_msvc_eh_options(eh, args)
- if not options.get_value(rttikey):
+ if not rtti:
args.append('-fno-rtti')
- if options.get_value(debugstlkey):
+ if debugstl:
args.append('-D_GLIBCXX_DEBUG=1')
return args
- def get_option_link_args(self, options: 'KeyedOptionDictType') -> T.List[str]:
+ def get_option_std_args(self, target: BuildTarget, env: Environment, subproject: T.Optional[str] = None) -> T.List[str]:
+ args: T.List[str] = []
+ std = self.get_compileropt_value('std', env, target, subproject)
+ assert isinstance(std, str)
+ if std != 'none':
+ args.append(self._find_best_cpp_std(std))
+ return args
+
+ def get_option_link_args(self, target: 'BuildTarget', env: 'Environment', subproject: T.Optional[str] = None) -> T.List[str]:
if self.info.is_windows() or self.info.is_cygwin():
# without a typedict mypy can't understand this.
- key = self.form_compileropt_key('winlibs')
- libs = options.get_value(key).copy()
- assert isinstance(libs, list)
+ retval = self.get_compileropt_value('winlibs', env, target, subproject)
+ assert isinstance(retval, list)
+ libs: T.List[str] = retval[:]
for l in libs:
assert isinstance(l, str)
return libs
@@ -567,7 +575,7 @@ def __init__(self, ccache: T.List[str], exelist: T.List[str], version: str, for_
PGICompiler.__init__(self)
def get_options(self) -> 'MutableKeyedOptionDictType':
- opts = CPPCompiler.get_options(self)
+ opts = super().get_options()
cppstd_choices = [
'c++98', 'c++03', 'c++11', 'c++14', 'c++17', 'c++20', 'c++23',
'gnu++98', 'gnu++03', 'gnu++11', 'gnu++14', 'gnu++17', 'gnu++20'
@@ -589,7 +597,20 @@ def __init__(self, ccache: T.List[str], exelist: T.List[str], version: str, for_
ElbrusCompiler.__init__(self)
def get_options(self) -> 'MutableKeyedOptionDictType':
- opts = CPPCompiler.get_options(self)
+ opts = super().get_options()
+
+ key = self.form_compileropt_key('eh')
+ opts[key] = options.UserComboOption(
+ self.make_option_name(key),
+ 'C++ exception handling type.',
+ 'default',
+ choices=['none', 'default', 'a', 's', 'sc'])
+
+ key = self.form_compileropt_key('debugstl')
+ opts[key] = options.UserBooleanOption(
+ self.make_option_name(key),
+ 'STL debug mode',
+ False)
cpp_stds = ['c++98']
if version_compare(self.version, '>=1.20.00'):
@@ -608,18 +629,6 @@ def get_options(self) -> 'MutableKeyedOptionDictType':
cpp_stds += ['c++20']
key = self.form_compileropt_key('std')
- self.update_options(
- opts,
- self.create_option(options.UserComboOption,
- self.form_compileropt_key('eh'),
- 'C++ exception handling type.',
- ['none', 'default', 'a', 's', 'sc'],
- 'default'),
- self.create_option(options.UserBooleanOption,
- self.form_compileropt_key('debugstl'),
- 'STL debug mode',
- False),
- )
std_opt = opts[key]
assert isinstance(std_opt, options.UserStdOption), 'for mypy'
std_opt.set_versions(cpp_stds, gnu=True)
@@ -638,21 +647,27 @@ def has_function(self, funcname: str, prefix: str, env: 'Environment', *,
dependencies=dependencies)
# Elbrus C++ compiler does not support RTTI, so don't check for it.
- def get_option_compile_args(self, options: 'KeyedOptionDictType') -> T.List[str]:
+ def get_option_compile_args(self, target: 'BuildTarget', env: 'Environment', subproject: T.Optional[str] = None) -> T.List[str]:
args: T.List[str] = []
- key = self.form_compileropt_key('std')
- std = options.get_value(key)
- if std != 'none':
- args.append(self._find_best_cpp_std(std))
+ eh = self.get_compileropt_value('eh', env, target, subproject)
+ assert isinstance(eh, str)
- key = self.form_compileropt_key('eh')
- non_msvc_eh_options(options.get_value(key), args)
+ non_msvc_eh_options(eh, args)
- key = self.form_compileropt_key('debugstl')
- if options.get_value(key):
+ debugstl = self.get_compileropt_value('debugstl', env, target, subproject)
+ assert isinstance(debugstl, str)
+ if debugstl:
args.append('-D_GLIBCXX_DEBUG=1')
return args
+ def get_option_std_args(self, target: BuildTarget, env: Environment, subproject: T.Optional[str] = None) -> T.List[str]:
+ args: T.List[str] = []
+ std = self.get_compileropt_value('std', env, target, subproject)
+ assert isinstance(std, str)
+ if std != 'none':
+ args.append(self._find_best_cpp_std(std))
+ return args
+
class IntelCPPCompiler(IntelGnuLikeCompiler, CPPCompiler):
def __init__(self, ccache: T.List[str], exelist: T.List[str], version: str, for_machine: MachineChoice, is_cross: bool,
@@ -671,7 +686,27 @@ def __init__(self, ccache: T.List[str], exelist: T.List[str], version: str, for_
'everything': default_warn_args + ['-Wextra']}
def get_options(self) -> 'MutableKeyedOptionDictType':
- opts = CPPCompiler.get_options(self)
+ opts = super().get_options()
+
+ key = self.form_compileropt_key('eh')
+ opts[key] = options.UserComboOption(
+ self.make_option_name(key),
+ 'C++ exception handling type.',
+ 'default',
+ choices=['none', 'default', 'a', 's', 'sc'])
+
+ key = self.form_compileropt_key('rtti')
+ opts[key] = options.UserBooleanOption(
+ self.make_option_name(key),
+ 'Enable RTTI',
+ True)
+
+ key = self.form_compileropt_key('debugstl')
+ opts[key] = options.UserBooleanOption(
+ self.make_option_name(key),
+ 'STL debug mode',
+ False)
+
# Every Unix compiler under the sun seems to accept -std=c++03,
# with the exception of ICC. Instead of preventing the user from
# globally requesting C++03, we transparently remap it to C++98
@@ -688,47 +723,42 @@ def get_options(self) -> 'MutableKeyedOptionDictType':
c_stds += ['c++2a']
g_stds += ['gnu++2a']
- key = self.form_compileropt_key('std')
- self.update_options(
- opts,
- self.create_option(options.UserComboOption,
- self.form_compileropt_key('eh'),
- 'C++ exception handling type.',
- ['none', 'default', 'a', 's', 'sc'],
- 'default'),
- self.create_option(options.UserBooleanOption,
- self.form_compileropt_key('rtti'),
- 'Enable RTTI',
- True),
- self.create_option(options.UserBooleanOption,
- self.form_compileropt_key('debugstl'),
- 'STL debug mode',
- False),
- )
- std_opt = opts[key]
- assert isinstance(std_opt, options.UserStdOption), 'for mypy'
- std_opt.set_versions(c_stds + g_stds)
+ self._update_language_stds(opts, c_stds + g_stds)
return opts
- def get_option_compile_args(self, options: 'KeyedOptionDictType') -> T.List[str]:
+ def get_option_compile_args(self, target: 'BuildTarget', env: 'Environment', subproject: T.Optional[str] = None) -> T.List[str]:
args: T.List[str] = []
- key = self.form_compileropt_key('std')
- std = options.get_value(key)
+
+ rtti = self.get_compileropt_value('rtti', env, target, subproject)
+ debugstl = self.get_compileropt_value('debugstl', env, target, subproject)
+ eh = self.get_compileropt_value('eh', env, target, subproject)
+
+ assert isinstance(rtti, bool)
+ assert isinstance(eh, str)
+ assert isinstance(debugstl, bool)
+
+ if eh == 'none':
+ args.append('-fno-exceptions')
+ if rtti:
+ args.append('-fno-rtti')
+ if debugstl:
+ args.append('-D_GLIBCXX_DEBUG=1')
+ return args
+
+ def get_option_std_args(self, target: BuildTarget, env: Environment, subproject: T.Optional[str] = None) -> T.List[str]:
+ args: T.List[str] = []
+ std = self.get_compileropt_value('std', env, target, subproject)
+ assert isinstance(std, str)
if std != 'none':
remap_cpp03 = {
'c++03': 'c++98',
'gnu++03': 'gnu++98'
}
args.append('-std=' + remap_cpp03.get(std, std))
- if options.get_value(key.evolve('eh')) == 'none':
- args.append('-fno-exceptions')
- if not options.get_value(key.evolve('rtti')):
- args.append('-fno-rtti')
- if options.get_value(key.evolve('debugstl')):
- args.append('-D_GLIBCXX_DEBUG=1')
+
return args
- def get_option_link_args(self, options: 'KeyedOptionDictType') -> T.List[str]:
+ def get_option_link_args(self, target: 'BuildTarget', env: 'Environment', subproject: T.Optional[str] = None) -> T.List[str]:
return []
@@ -755,39 +785,51 @@ class VisualStudioLikeCPPCompilerMixin(CompilerMixinBase):
'c++latest': (False, "latest"),
}
- def get_option_link_args(self, options: 'KeyedOptionDictType') -> T.List[str]:
+ def get_option_link_args(self, target: 'BuildTarget', env: 'Environment', subproject: T.Optional[str] = None) -> T.List[str]:
# need a typeddict for this
- key = self.form_compileropt_key('winlibs')
- return T.cast('T.List[str]', options.get_value(key)[:])
+ key = self.form_compileropt_key('winlibs').evolve(subproject=subproject)
+ if target:
+ value = env.coredata.get_option_for_target(target, key)
+ else:
+ value = env.coredata.optstore.get_value_for(key)
+ return T.cast('T.List[str]', value)[:]
def _get_options_impl(self, opts: 'MutableKeyedOptionDictType', cpp_stds: T.List[str]) -> 'MutableKeyedOptionDictType':
- key = self.form_compileropt_key('std')
- self.update_options(
- opts,
- self.create_option(options.UserComboOption,
- self.form_compileropt_key('eh'),
- 'C++ exception handling type.',
- ['none', 'default', 'a', 's', 'sc'],
- 'default'),
- self.create_option(options.UserBooleanOption,
- self.form_compileropt_key('rtti'),
- 'Enable RTTI',
- True),
- self.create_option(options.UserArrayOption,
- self.form_compileropt_key('winlibs'),
- 'Windows libs to link against.',
- msvc_winlibs),
- )
- std_opt = opts[key]
+ opts = super().get_options()
+
+ key = self.form_compileropt_key('eh')
+ opts[key] = options.UserComboOption(
+ self.make_option_name(key),
+ 'C++ exception handling type.',
+ 'default',
+ choices=['none', 'default', 'a', 's', 'sc'])
+
+ key = self.form_compileropt_key('rtti')
+ opts[key] = options.UserBooleanOption(
+ self.make_option_name(key),
+ 'Enable RTTI',
+ True)
+
+ key = self.form_compileropt_key('winlibs')
+ opts[key] = options.UserStringArrayOption(
+ self.make_option_name(key),
+ 'Standard Win libraries to link against',
+ msvc_winlibs)
+
+ std_opt = opts[self.form_compileropt_key('std')]
assert isinstance(std_opt, options.UserStdOption), 'for mypy'
std_opt.set_versions(cpp_stds)
return opts
- def get_option_compile_args(self, options: 'KeyedOptionDictType') -> T.List[str]:
+ def get_option_compile_args(self, target: 'BuildTarget', env: 'Environment', subproject: T.Optional[str] = None) -> T.List[str]:
args: T.List[str] = []
- key = self.form_compileropt_key('std')
- eh = options.get_value(self.form_compileropt_key('eh'))
+ eh = self.get_compileropt_value('eh', env, target, subproject)
+ rtti = self.get_compileropt_value('rtti', env, target, subproject)
+
+ assert isinstance(rtti, bool)
+ assert isinstance(eh, str)
+
if eh == 'default':
args.append('/EHsc')
elif eh == 'none':
@@ -795,24 +837,27 @@ def get_option_compile_args(self, options: 'KeyedOptionDictType') -> T.List[str]
else:
args.append('/EH' + eh)
- if not options.get_value(self.form_compileropt_key('rtti')):
+ if not rtti:
args.append('/GR-')
- permissive, ver = self.VC_VERSION_MAP[options.get_value(key)]
+ return args
+ def get_option_std_args(self, target: BuildTarget, env: Environment, subproject: T.Optional[str] = None) -> T.List[str]:
+ args: T.List[str] = []
+ std = self.get_compileropt_value('std', env, target, subproject)
+ assert isinstance(std, str)
+
+ permissive, ver = self.VC_VERSION_MAP[std]
if ver is not None:
args.append(f'/std:c++{ver}')
-
if not permissive:
args.append('/permissive-')
-
return args
def get_compiler_check_args(self, mode: CompileCheckMode) -> T.List[str]:
# XXX: this is a hack because so much GnuLike stuff is in the base CPPCompiler class.
return Compiler.get_compiler_check_args(self, mode)
-
class CPP11AsCPP14Mixin(CompilerMixinBase):
"""Mixin class for VisualStudio and ClangCl to replace C++11 std with C++14.
@@ -820,25 +865,24 @@ class CPP11AsCPP14Mixin(CompilerMixinBase):
This is a limitation of Clang and MSVC that ICL doesn't share.
"""
- def get_option_compile_args(self, options: 'KeyedOptionDictType') -> T.List[str]:
+ def get_option_std_args(self, target: BuildTarget, env: Environment, subproject: T.Optional[str] = None) -> T.List[str]:
# Note: there is no explicit flag for supporting C++11; we attempt to do the best we can
# which means setting the C++ standard version to C++14, in compilers that support it
# (i.e., after VS2015U3)
# if one is using anything before that point, one cannot set the standard.
- key = self.form_compileropt_key('std')
- if options.get_value(key) in {'vc++11', 'c++11'}:
+ stdkey = self.form_compileropt_key('std').evolve(subproject=subproject)
+ if target is not None:
+ std = env.coredata.get_option_for_target(target, stdkey)
+ else:
+ std = env.coredata.optstore.get_value_for(stdkey)
+ if std in {'vc++11', 'c++11'}:
mlog.warning(self.id, 'does not support C++11;',
'attempting best effort; setting the standard to C++14',
once=True, fatal=False)
- # Don't mutate anything we're going to change, we need to use
- # deepcopy since we're messing with members, and we can't simply
- # copy the members because the option proxy doesn't support it.
- options = copy.deepcopy(options)
- if options.get_value(key) == 'vc++11':
- options.set_value(key, 'vc++14')
- else:
- options.set_value(key, 'c++14')
- return super().get_option_compile_args(options)
+ original_args = super().get_option_std_args(target, env, subproject)
+ std_mapping = {'/std:c++11': '/std:c++14'}
+ processed_args = [std_mapping.get(x, x) for x in original_args]
+ return processed_args
class VisualStudioCPPCompiler(CPP11AsCPP14Mixin, VisualStudioLikeCPPCompilerMixin, MSVCCompiler, CPPCompiler):
@@ -871,14 +915,12 @@ def get_options(self) -> 'MutableKeyedOptionDictType':
cpp_stds.extend(['c++20', 'vc++20'])
return self._get_options_impl(super().get_options(), cpp_stds)
- def get_option_compile_args(self, options: 'KeyedOptionDictType') -> T.List[str]:
- key = self.form_compileropt_key('std')
- if options.get_value(key) != 'none' and version_compare(self.version, '<19.00.24210'):
+ def get_option_std_args(self, target: BuildTarget, env: Environment, subproject: T.Optional[str] = None) -> T.List[str]:
+ std = self.get_compileropt_value('std', env, target, subproject)
+ if std != 'none' and version_compare(self.version, '<19.00.24210'):
mlog.warning('This version of MSVC does not support cpp_std arguments', fatal=False)
- options = copy.copy(options)
- options.set_value(key, 'none')
- args = super().get_option_compile_args(options)
+ args = super().get_option_std_args(target, env, subproject)
if version_compare(self.version, '<19.11'):
try:
@@ -916,8 +958,13 @@ def __init__(self, exelist: T.List[str], version: str, for_machine: MachineChoic
IntelVisualStudioLikeCompiler.__init__(self, target)
def get_options(self) -> 'MutableKeyedOptionDictType':
- # This has only been tested with version 19.0,
- cpp_stds = ['none', 'c++11', 'vc++11', 'c++14', 'vc++14', 'c++17', 'vc++17', 'c++latest']
+ # This has only been tested with version 19.0, 2021.2.1, 2024.4.2 and 2025.0.1
+ if version_compare(self.version, '<2021.1.0'):
+ cpp_stds = ['none', 'c++11', 'vc++11', 'c++14', 'vc++14', 'c++17', 'vc++17', 'c++latest']
+ else:
+ cpp_stds = ['none', 'c++14', 'c++17', 'c++latest']
+ if version_compare(self.version, '>=2024.1.0'):
+ cpp_stds += ['c++20']
return self._get_options_impl(super().get_options(), cpp_stds)
def get_compiler_check_args(self, mode: CompileCheckMode) -> T.List[str]:
@@ -940,23 +987,23 @@ def __init__(self, ccache: T.List[str], exelist: T.List[str], version: str, for_
ArmCompiler.__init__(self)
def get_options(self) -> 'MutableKeyedOptionDictType':
- opts = CPPCompiler.get_options(self)
+ opts = super().get_options()
std_opt = self.form_compileropt_key('std')
assert isinstance(std_opt, options.UserStdOption), 'for mypy'
std_opt.set_versions(['c++03', 'c++11'])
return opts
- def get_option_compile_args(self, options: 'KeyedOptionDictType') -> T.List[str]:
+ def get_option_std_args(self, target: BuildTarget, env: Environment, subproject: T.Optional[str] = None) -> T.List[str]:
args: T.List[str] = []
- key = self.form_compileropt_key('std')
- std = options.get_value(key)
+ std = self.get_compileropt_value('std', env, target, subproject)
+ assert isinstance(std, str)
if std == 'c++11':
args.append('--cpp11')
elif std == 'c++03':
args.append('--cpp')
return args
- def get_option_link_args(self, options: 'KeyedOptionDictType') -> T.List[str]:
+ def get_option_link_args(self, target: 'BuildTarget', env: 'Environment', subproject: T.Optional[str] = None) -> T.List[str]:
return []
def get_compiler_check_args(self, mode: CompileCheckMode) -> T.List[str]:
@@ -976,16 +1023,13 @@ def __init__(self, ccache: T.List[str], exelist: T.List[str], version: str, for_
def get_always_args(self) -> T.List[str]:
return ['-nologo', '-lang=cpp']
- def get_option_compile_args(self, options: 'KeyedOptionDictType') -> T.List[str]:
- return []
-
def get_compile_only_args(self) -> T.List[str]:
return []
def get_output_args(self, outputname: str) -> T.List[str]:
return [f'-output=obj={outputname}']
- def get_option_link_args(self, options: 'KeyedOptionDictType') -> T.List[str]:
+ def get_option_link_args(self, target: 'BuildTarget', env: 'Environment', subproject: T.Optional[str] = None) -> T.List[str]:
return []
def get_compiler_check_args(self, mode: CompileCheckMode) -> T.List[str]:
@@ -1001,17 +1045,17 @@ def __init__(self, ccache: T.List[str], exelist: T.List[str], version: str, for_
TICompiler.__init__(self)
def get_options(self) -> 'MutableKeyedOptionDictType':
- opts = CPPCompiler.get_options(self)
+ opts = super().get_options()
key = self.form_compileropt_key('std')
std_opt = opts[key]
assert isinstance(std_opt, options.UserStdOption), 'for mypy'
std_opt.set_versions(['c++03'])
return opts
- def get_option_compile_args(self, options: 'KeyedOptionDictType') -> T.List[str]:
+ def get_option_std_args(self, target: BuildTarget, env: Environment, subproject: T.Optional[str] = None) -> T.List[str]:
args: T.List[str] = []
- key = self.form_compileropt_key('std')
- std = options.get_value(key)
+ std = self.get_compileropt_value('std', env, target, subproject)
+ assert isinstance(std, str)
if std != 'none':
args.append('--' + std)
return args
@@ -1019,7 +1063,7 @@ def get_option_compile_args(self, options: 'KeyedOptionDictType') -> T.List[str]
def get_always_args(self) -> T.List[str]:
return []
- def get_option_link_args(self, options: 'KeyedOptionDictType') -> T.List[str]:
+ def get_option_link_args(self, target: 'BuildTarget', env: 'Environment', subproject: T.Optional[str] = None) -> T.List[str]:
return []
class C2000CPPCompiler(TICPPCompiler):
@@ -1044,15 +1088,14 @@ def get_instruction_set_args(self, instruction_set: str) -> T.Optional[T.List[st
return mwccarm_instruction_set_args.get(instruction_set, None)
def get_options(self) -> 'MutableKeyedOptionDictType':
- opts = CPPCompiler.get_options(self)
- key = self.form_compileropt_key('std')
- opts[key].choices = ['none']
+ opts = super().get_options()
+ self._update_language_stds(opts, [])
return opts
- def get_option_compile_args(self, options: 'KeyedOptionDictType') -> T.List[str]:
+ def get_option_std_args(self, target: BuildTarget, env: Environment, subproject: T.Optional[str] = None) -> T.List[str]:
args: T.List[str] = []
- key = self.form_compileropt_key('std')
- std = options.get_value(key)
+ std = self.get_compileropt_value('std', env, target, subproject)
+ assert isinstance(std, str)
if std != 'none':
args.append('-lang')
args.append(std)
@@ -1073,15 +1116,14 @@ def get_instruction_set_args(self, instruction_set: str) -> T.Optional[T.List[st
return mwcceppc_instruction_set_args.get(instruction_set, None)
def get_options(self) -> 'MutableKeyedOptionDictType':
- opts = CPPCompiler.get_options(self)
- key = self.form_compileropt_key('std')
- opts[key].choices = ['none']
+ opts = super().get_options()
+ self._update_language_stds(opts, [])
return opts
- def get_option_compile_args(self, options: 'KeyedOptionDictType') -> T.List[str]:
+ def get_option_std_args(self, target: BuildTarget, env: Environment, subproject: T.Optional[str] = None) -> T.List[str]:
args: T.List[str] = []
- key = self.form_compileropt_key('std')
- std = options.get_value(key)
+ std = self.get_compileropt_value('std', env, target, subproject)
+ assert isinstance(std, str)
if std != 'none':
args.append('-lang ' + std)
return args
diff --git a/mesonbuild/compilers/cuda.py b/mesonbuild/compilers/cuda.py
index 38a938f24aff..6cc6f963b1cf 100644
--- a/mesonbuild/compilers/cuda.py
+++ b/mesonbuild/compilers/cuda.py
@@ -1,5 +1,6 @@
# SPDX-License-Identifier: Apache-2.0
# Copyright 2012-2017 The Meson development team
+# Copyright © 2023-2025 Intel Corporation
from __future__ import annotations
@@ -8,20 +9,17 @@
import string
import typing as T
-from .. import coredata
from .. import options
from .. import mlog
from ..mesonlib import (
EnvironmentException, Popen_safe,
is_windows, LibType, version_compare
)
-from ..options import OptionKey
-from .compilers import Compiler
+from .compilers import Compiler, CompileCheckMode
if T.TYPE_CHECKING:
- from .compilers import CompileCheckMode
from ..build import BuildTarget
- from ..coredata import MutableKeyedOptionDictType, KeyedOptionDictType
+ from ..options import MutableKeyedOptionDictType
from ..dependencies import Dependency
from ..environment import Environment # noqa: F401
from ..envconfig import MachineInfo
@@ -553,7 +551,7 @@ def sanity_check(self, work_dir: str, env: 'Environment') -> None:
# Use the -ccbin option, if available, even during sanity checking.
# Otherwise, on systems where CUDA does not support the default compiler,
# NVCC becomes unusable.
- flags += self.get_ccbin_args(env.coredata.optstore)
+ flags += self.get_ccbin_args(None, env, '')
# If cross-compiling, we can't run the sanity check, only compile it.
if self.is_cross and not env.has_exe_wrapper():
@@ -646,48 +644,51 @@ def get_options(self) -> 'MutableKeyedOptionDictType':
if version_compare(self.version, self._CPP20_VERSION):
cpp_stds += ['c++20']
- return self.update_options(
- super().get_options(),
- self.create_option(options.UserComboOption,
- self.form_compileropt_key('std'),
- 'C++ language standard to use with CUDA',
- cpp_stds,
- 'none'),
- self.create_option(options.UserStringOption,
- self.form_compileropt_key('ccbindir'),
- 'CUDA non-default toolchain directory to use (-ccbin)',
- ''),
- )
-
- def _to_host_compiler_options(self, master_options: 'KeyedOptionDictType') -> 'KeyedOptionDictType':
- """
- Convert an NVCC Option set to a host compiler's option set.
- """
+ opts = super().get_options()
+
+ key = self.form_compileropt_key('std')
+ opts[key] = options.UserComboOption(
+ self.make_option_name(key),
+ 'C++ language standard to use with CUDA',
+ 'none',
+ choices=cpp_stds)
+
+ key = self.form_compileropt_key('ccbindir')
+ opts[key] = options.UserStringOption(
+ self.make_option_name(key),
+ 'CUDA non-default toolchain directory to use (-ccbin)',
+ '')
+
+ return opts
- # We must strip the -std option from the host compiler option set, as NVCC has
- # its own -std flag that may not agree with the host compiler's.
- host_options = {key: master_options.get(key, opt) for key, opt in self.host_compiler.get_options().items()}
- std_key = OptionKey(f'{self.host_compiler.language}_std', machine=self.for_machine)
- overrides = {std_key: 'none'}
- # To shut up mypy.
- return coredata.OptionsView(host_options, overrides=overrides)
+ def get_option_compile_args(self, target: 'BuildTarget', env: 'Environment', subproject: T.Optional[str] = None) -> T.List[str]:
+ args = self.get_ccbin_args(target, env, subproject)
- def get_option_compile_args(self, options: 'KeyedOptionDictType') -> T.List[str]:
- args = self.get_ccbin_args(options)
+ try:
+ host_compiler_args = self.host_compiler.get_option_compile_args(target, env, subproject)
+ except KeyError:
+ host_compiler_args = []
+ return args + self._to_host_flags(host_compiler_args)
+
+ def get_option_std_args(self, target: BuildTarget, env: Environment, subproject: T.Optional[str] = None) -> T.List[str]:
# On Windows, the version of the C++ standard used by nvcc is dictated by
# the combination of CUDA version and MSVC version; the --std= is thus ignored
# and attempting to use it will result in a warning: https://stackoverflow.com/a/51272091/741027
if not is_windows():
- key = self.form_compileropt_key('std')
- std = options.get_value(key)
+ std = self.get_compileropt_value('std', env, target, subproject)
+ assert isinstance(std, str)
if std != 'none':
- args.append('--std=' + std)
+ return ['--std=' + std]
- return args + self._to_host_flags(self.host_compiler.get_option_compile_args(self._to_host_compiler_options(options)))
+ try:
+ host_compiler_args = self.host_compiler.get_option_std_args(target, env, subproject)
+ except KeyError:
+ host_compiler_args = []
+ return self._to_host_flags(host_compiler_args)
- def get_option_link_args(self, options: 'KeyedOptionDictType') -> T.List[str]:
- args = self.get_ccbin_args(options)
- return args + self._to_host_flags(self.host_compiler.get_option_link_args(self._to_host_compiler_options(options)), Phase.LINKER)
+ def get_option_link_args(self, target: 'BuildTarget', env: 'Environment', subproject: T.Optional[str] = None) -> T.List[str]:
+ args = self.get_ccbin_args(target, env, subproject)
+ return args + self._to_host_flags(self.host_compiler.get_option_link_args(target, env, subproject), Phase.LINKER)
def get_soname_args(self, env: 'Environment', prefix: str, shlib_name: str,
suffix: str, soversion: str,
@@ -707,10 +708,10 @@ def get_optimization_args(self, optimization_level: str) -> T.List[str]:
# return self._to_host_flags(self.host_compiler.get_optimization_args(optimization_level))
return cuda_optimization_args[optimization_level]
- def sanitizer_compile_args(self, value: str) -> T.List[str]:
+ def sanitizer_compile_args(self, value: T.List[str]) -> T.List[str]:
return self._to_host_flags(self.host_compiler.sanitizer_compile_args(value))
- def sanitizer_link_args(self, value: str) -> T.List[str]:
+ def sanitizer_link_args(self, value: T.List[str]) -> T.List[str]:
return self._to_host_flags(self.host_compiler.sanitizer_link_args(value))
def get_debug_args(self, is_debug: bool) -> T.List[str]:
@@ -797,9 +798,15 @@ def get_dependency_compile_args(self, dep: 'Dependency') -> T.List[str]:
def get_dependency_link_args(self, dep: 'Dependency') -> T.List[str]:
return self._to_host_flags(super().get_dependency_link_args(dep), Phase.LINKER)
- def get_ccbin_args(self, ccoptions: 'KeyedOptionDictType') -> T.List[str]:
- key = self.form_compileropt_key('ccbindir')
- ccbindir = ccoptions.get_value(key)
+ def get_ccbin_args(self,
+ target: 'T.Optional[BuildTarget]',
+ env: 'Environment',
+ subproject: T.Optional[str] = None) -> T.List[str]:
+ key = self.form_compileropt_key('ccbindir').evolve(subproject=subproject)
+ if target:
+ ccbindir = env.coredata.get_option_for_target(target, key)
+ else:
+ ccbindir = env.coredata.optstore.get_value_for(key)
if isinstance(ccbindir, str) and ccbindir != '':
return [self._shield_nvcc_list_arg('-ccbin='+ccbindir, False)]
else:
@@ -813,3 +820,12 @@ def get_profile_use_args(self) -> T.List[str]:
def get_assert_args(self, disable: bool, env: 'Environment') -> T.List[str]:
return self.host_compiler.get_assert_args(disable, env)
+
+ def has_multi_arguments(self, args: T.List[str], env: Environment) -> T.Tuple[bool, bool]:
+ args = self._to_host_flags(args)
+ return self.compiles('int main(void) { return 0; }', env, extra_args=args, mode=CompileCheckMode.COMPILE)
+
+ def has_multi_link_arguments(self, args: T.List[str], env: Environment) -> T.Tuple[bool, bool]:
+ args = ['-Xnvlink='+self._shield_nvcc_list_arg(s) for s in self.linker.fatal_warnings()]
+ args += self._to_host_flags(args, phase=Phase.LINKER)
+ return self.compiles('int main(void) { return 0; }', env, extra_args=args, mode=CompileCheckMode.LINK)
diff --git a/mesonbuild/compilers/cython.py b/mesonbuild/compilers/cython.py
index 5cc0200458fa..50bb4652b06a 100644
--- a/mesonbuild/compilers/cython.py
+++ b/mesonbuild/compilers/cython.py
@@ -1,5 +1,5 @@
# SPDX-License-Identifier: Apache-2.0
-# Copyright © 2021 Intel Corporation
+# Copyright © 2021-2025 Intel Corporation
from __future__ import annotations
"""Abstraction for Cython language compilers."""
@@ -11,8 +11,9 @@
from .compilers import Compiler
if T.TYPE_CHECKING:
- from ..coredata import MutableKeyedOptionDictType, KeyedOptionDictType
+ from ..options import MutableKeyedOptionDictType
from ..environment import Environment
+ from ..build import BuildTarget
class CythonCompiler(Compiler):
@@ -67,27 +68,32 @@ def compute_parameters_with_absolute_paths(self, parameter_list: T.List[str],
return new
def get_options(self) -> 'MutableKeyedOptionDictType':
- return self.update_options(
- super().get_options(),
- self.create_option(options.UserComboOption,
- self.form_compileropt_key('version'),
- 'Python version to target',
- ['2', '3'],
- '3'),
- self.create_option(options.UserComboOption,
- self.form_compileropt_key('language'),
- 'Output C or C++ files',
- ['c', 'cpp'],
- 'c'),
- )
-
- def get_option_compile_args(self, options: 'KeyedOptionDictType') -> T.List[str]:
- args: T.List[str] = []
+ opts = super().get_options()
+
key = self.form_compileropt_key('version')
- version = options.get_value(key)
- args.append(f'-{version}')
+ opts[key] = options.UserComboOption(
+ self.make_option_name(key),
+ 'Python version to target',
+ '3',
+ choices=['2', '3'])
+
key = self.form_compileropt_key('language')
- lang = options.get_value(key)
+ opts[key] = options.UserComboOption(
+ self.make_option_name(key),
+ 'Output C or C++ files',
+ 'c',
+ choices=['c', 'cpp'])
+
+ return opts
+
+ def get_option_compile_args(self, target: 'BuildTarget', env: 'Environment', subproject: T.Optional[str] = None) -> T.List[str]:
+ args: T.List[str] = []
+ version = self.get_compileropt_value('version', env, target, subproject)
+ assert isinstance(version, str)
+ args.append(f'-{version}')
+
+ lang = self.get_compileropt_value('language', env, target, subproject)
+ assert isinstance(lang, str)
if lang == 'cpp':
args.append('--cplus')
return args
diff --git a/mesonbuild/compilers/detect.py b/mesonbuild/compilers/detect.py
index 0507a7f81320..53bdd85131d9 100644
--- a/mesonbuild/compilers/detect.py
+++ b/mesonbuild/compilers/detect.py
@@ -98,6 +98,7 @@ def compiler_from_language(env: 'Environment', lang: str, for_machine: MachineCh
'cython': detect_cython_compiler,
'nasm': detect_nasm_compiler,
'masm': detect_masm_compiler,
+ 'linearasm': detect_linearasm_compiler,
}
return lang_map[lang](env, for_machine) if lang in lang_map else None
@@ -240,6 +241,8 @@ def detect_static_linker(env: 'Environment', compiler: Compiler) -> StaticLinker
return linkers.MetrowerksStaticLinkerARM(linker)
else:
return linkers.MetrowerksStaticLinkerEmbeddedPowerPC(linker)
+ if 'TASKING VX-toolset' in err:
+ return linkers.TaskingStaticLinker(linker)
if p.returncode == 0:
return linkers.ArLinker(compiler.for_machine, linker)
if p.returncode == 1 and err.startswith('usage'): # OSX
@@ -605,6 +608,23 @@ def sanitize(p: T.Optional[str]) -> T.Optional[str]:
return cls(
ccache, compiler, compiler_version, for_machine, is_cross, info,
full_version=full_version, linker=linker)
+ if 'TASKING VX-toolset' in err:
+ cls = c.TaskingCCompiler
+ lnk = linkers.TaskingLinker
+
+ tasking_ver_match = re.search(r'v([0-9]+)\.([0-9]+)r([0-9]+) Build ([0-9]+)', err)
+ assert tasking_ver_match is not None, 'for mypy'
+ tasking_version = '.'.join(x for x in tasking_ver_match.groups() if x is not None)
+
+ env.coredata.add_lang_args(cls.language, cls, for_machine, env)
+ ld = env.lookup_binary_entry(for_machine, cls.language + '_ld')
+ if ld is None:
+ raise MesonException(f'{cls.language}_ld was not properly defined in your cross file')
+
+ linker = lnk(ld, for_machine, version=tasking_version)
+ return cls(
+ ccache, compiler, tasking_version, for_machine, is_cross, info,
+ full_version=full_version, linker=linker)
_handle_exceptions(popen_exceptions, compilers)
raise EnvironmentException(f'Unknown compiler {compilers}')
@@ -877,9 +897,13 @@ def _detect_objc_or_objcpp_compiler(env: 'Environment', lang: str, for_machine:
version = _get_gnu_version_from_defines(defines)
comp = objc.GnuObjCCompiler if lang == 'objc' else objcpp.GnuObjCPPCompiler
linker = guess_nix_linker(env, compiler, comp, version, for_machine)
- return comp(
+ c = comp(
ccache, compiler, version, for_machine, is_cross, info,
defines, linker=linker)
+ if not c.compiles('int main(void) { return 0; }', env)[0]:
+ popen_exceptions[join_args(compiler)] = f'GCC was not built with support for {"objective-c" if lang == "objc" else "objective-c++"}'
+ continue
+ return c
if 'clang' in out:
linker = None
defines = _get_clang_compiler_defines(compiler, lang)
@@ -1023,7 +1047,11 @@ def detect_rust_compiler(env: 'Environment', for_machine: MachineChoice) -> Rust
popen_exceptions[join_args(compiler + arg)] = e
continue
- version = search_version(out)
+ # Full version contains the "-nightly" or "-beta" suffixes, but version
+ # should just be X.Y.Z
+ full_version = search_version(out)
+ version = full_version.split('-', 1)[0]
+
cls: T.Type[RustCompiler] = rust.RustCompiler
# Clippy is a wrapper around rustc, but it doesn't have rustc in its
@@ -1039,9 +1067,14 @@ def detect_rust_compiler(env: 'Environment', for_machine: MachineChoice) -> Rust
except OSError as e:
popen_exceptions[join_args(compiler + arg)] = e
continue
- version = search_version(out)
+ full_version = search_version(out)
+ version = full_version.split('-', 1)[0]
cls = rust.ClippyRustCompiler
+ mlog.deprecation(
+ 'clippy-driver is not intended as a general purpose compiler. '
+ 'You can use "ninja clippy" in order to run clippy on a '
+ 'meson project.')
if 'rustc' in out:
# On Linux and mac rustc will invoke gcc (clang for mac
@@ -1066,7 +1099,7 @@ def detect_rust_compiler(env: 'Environment', for_machine: MachineChoice) -> Rust
extra_args: T.Dict[str, T.Union[str, bool]] = {}
always_args: T.List[str] = []
if is_link_exe:
- compiler.extend(cls.use_linker_args(cc.linker.exelist[0], ''))
+ compiler.extend(cls.use_linker_args(cc.linker.get_exe(), ''))
extra_args['direct'] = True
extra_args['machine'] = cc.linker.machine
else:
@@ -1098,7 +1131,7 @@ def detect_rust_compiler(env: 'Environment', for_machine: MachineChoice) -> Rust
# inserts the correct prefix itself.
assert isinstance(linker, linkers.VisualStudioLikeLinkerMixin)
linker.direct = True
- compiler.extend(cls.use_linker_args(linker.exelist[0], ''))
+ compiler.extend(cls.use_linker_args(linker.get_exe(), ''))
else:
# On linux and macos rust will invoke the c compiler for
# linking, on windows it will use lld-link or link.exe.
@@ -1115,7 +1148,7 @@ def detect_rust_compiler(env: 'Environment', for_machine: MachineChoice) -> Rust
env.coredata.add_lang_args(cls.language, cls, for_machine, env)
return cls(
compiler, version, for_machine, is_cross, info,
- linker=linker)
+ linker=linker, full_version=full_version)
_handle_exceptions(popen_exceptions, compilers)
raise EnvironmentException('Unreachable code (exception to make mypy happy)')
@@ -1158,7 +1191,11 @@ def detect_d_compiler(env: 'Environment', for_machine: MachineChoice) -> Compile
version = search_version(out)
full_version = out.split('\n', 1)[0]
- if 'LLVM D compiler' in out:
+ # The OpenD fork should stay close enough to upstream D (in
+ # the areas that interest us) to allow supporting them both
+ # without much hassle.
+ # See: https://github.com/orgs/opendlang/discussions/56
+ if 'LLVM D compiler' in out or 'LLVM Open D compiler' in out:
cls = d.LLVMDCompiler
# LDC seems to require a file
# We cannot use NamedTemporaryFile on windows, its documented
@@ -1353,6 +1390,26 @@ def detect_masm_compiler(env: 'Environment', for_machine: MachineChoice) -> Comp
_handle_exceptions(popen_exceptions, [comp])
raise EnvironmentException('Unreachable code (exception to make mypy happy)')
+def detect_linearasm_compiler(env: Environment, for_machine: MachineChoice) -> Compiler:
+ from .asm import TILinearAsmCompiler
+ comp = ['cl6x']
+ comp_class: T.Type[Compiler] = TILinearAsmCompiler
+ arg = '-h'
+ info = env.machines[for_machine]
+ cc = detect_c_compiler(env, for_machine)
+ is_cross = env.is_cross_build(for_machine)
+
+ popen_exceptions: T.Dict[str, Exception] = {}
+ try:
+ output = Popen_safe(comp + [arg])[2]
+ version = search_version(output)
+ env.coredata.add_lang_args(comp_class.language, comp_class, for_machine, env)
+ return comp_class([], comp, version, for_machine, info, cc.linker, is_cross=is_cross)
+ except OSError as e:
+ popen_exceptions[' '.join(comp + [arg])] = e
+ _handle_exceptions(popen_exceptions, [comp])
+ raise EnvironmentException('Unreachable code (exception to make mypy happy)')
+
# GNU/Clang defines and version
# =============================
diff --git a/mesonbuild/compilers/fortran.py b/mesonbuild/compilers/fortran.py
index 5012fba074a0..a25778cf931d 100644
--- a/mesonbuild/compilers/fortran.py
+++ b/mesonbuild/compilers/fortran.py
@@ -4,9 +4,11 @@
from __future__ import annotations
import typing as T
+import functools
import os
from .. import options
+from .. import mesonlib
from .compilers import (
clike_debug_args,
Compiler,
@@ -25,12 +27,13 @@
)
if T.TYPE_CHECKING:
- from ..coredata import MutableKeyedOptionDictType, KeyedOptionDictType
+ from ..options import MutableKeyedOptionDictType
from ..dependencies import Dependency
from ..envconfig import MachineInfo
from ..environment import Environment
from ..linkers.linkers import DynamicLinker
from ..mesonlib import MachineChoice
+ from ..build import BuildTarget
class FortranCompiler(CLikeCompiler, Compiler):
@@ -112,14 +115,146 @@ def has_multi_link_arguments(self, args: T.List[str], env: 'Environment') -> T.T
return self._has_multi_link_arguments(args, env, 'stop; end program')
def get_options(self) -> 'MutableKeyedOptionDictType':
- return self.update_options(
- super().get_options(),
- self.create_option(options.UserComboOption,
- self.form_compileropt_key('std'),
- 'Fortran language standard to use',
- ['none'],
- 'none'),
- )
+ opts = super().get_options()
+
+ key = self.form_compileropt_key('std')
+ opts[key] = options.UserComboOption(
+ self.make_option_name(key),
+ 'Fortran language standard to use',
+ 'none',
+ choices=['none'])
+
+ return opts
+
+ def _compile_int(self, expression: str, prefix: str, env: 'Environment',
+ extra_args: T.Union[None, T.List[str], T.Callable[[CompileCheckMode], T.List[str]]],
+ dependencies: T.Optional[T.List['Dependency']]) -> bool:
+ # Use a trick for emulating a static assert
+ # Taken from https://github.com/j3-fortran/fortran_proposals/issues/70
+ t = f'''program test
+ {prefix}
+ real(merge(kind(1.),-1,({expression}))), parameter :: fail = 1.
+ end program test'''
+ return self.compiles(t, env, extra_args=extra_args,
+ dependencies=dependencies)[0]
+
+ def cross_compute_int(self, expression: str, low: T.Optional[int], high: T.Optional[int],
+ guess: T.Optional[int], prefix: str, env: 'Environment',
+ extra_args: T.Union[None, T.List[str], T.Callable[[CompileCheckMode], T.List[str]]] = None,
+ dependencies: T.Optional[T.List['Dependency']] = None) -> int:
+ # This only difference between this implementation and that of CLikeCompiler
+ # is a change in logical conjunction operator (.and. instead of &&)
+
+ # Try user's guess first
+ if isinstance(guess, int):
+ if self._compile_int(f'{expression} == {guess}', prefix, env, extra_args, dependencies):
+ return guess
+
+ # If no bounds are given, compute them in the limit of int32
+ maxint = 0x7fffffff
+ minint = -0x80000000
+ if not isinstance(low, int) or not isinstance(high, int):
+ if self._compile_int(f'{expression} >= 0', prefix, env, extra_args, dependencies):
+ low = cur = 0
+ while self._compile_int(f'{expression} > {cur}', prefix, env, extra_args, dependencies):
+ low = cur + 1
+ if low > maxint:
+ raise mesonlib.EnvironmentException('Cross-compile check overflowed')
+ cur = min(cur * 2 + 1, maxint)
+ high = cur
+ else:
+ high = cur = -1
+ while self._compile_int(f'{expression} < {cur}', prefix, env, extra_args, dependencies):
+ high = cur - 1
+ if high < minint:
+ raise mesonlib.EnvironmentException('Cross-compile check overflowed')
+ cur = max(cur * 2, minint)
+ low = cur
+ else:
+ # Sanity check limits given by user
+ if high < low:
+ raise mesonlib.EnvironmentException('high limit smaller than low limit')
+ condition = f'{expression} <= {high} .and. {expression} >= {low}'
+ if not self._compile_int(condition, prefix, env, extra_args, dependencies):
+ raise mesonlib.EnvironmentException('Value out of given range')
+
+ # Binary search
+ while low != high:
+ cur = low + int((high - low) / 2)
+ if self._compile_int(f'{expression} <= {cur}', prefix, env, extra_args, dependencies):
+ high = cur
+ else:
+ low = cur + 1
+
+ return low
+
+ def compute_int(self, expression: str, low: T.Optional[int], high: T.Optional[int],
+ guess: T.Optional[int], prefix: str, env: 'Environment', *,
+ extra_args: T.Union[None, T.List[str], T.Callable[[CompileCheckMode], T.List[str]]],
+ dependencies: T.Optional[T.List['Dependency']] = None) -> int:
+ if extra_args is None:
+ extra_args = []
+ if self.is_cross:
+ return self.cross_compute_int(expression, low, high, guess, prefix, env, extra_args, dependencies)
+ t = f'''program test
+ {prefix}
+ print '(i0)', {expression}
+ end program test
+ '''
+ res = self.run(t, env, extra_args=extra_args,
+ dependencies=dependencies)
+ if not res.compiled:
+ return -1
+ if res.returncode != 0:
+ raise mesonlib.EnvironmentException('Could not run compute_int test binary.')
+ return int(res.stdout)
+
+ def cross_sizeof(self, typename: str, prefix: str, env: 'Environment', *,
+ extra_args: T.Union[None, T.List[str], T.Callable[[CompileCheckMode], T.List[str]]] = None,
+ dependencies: T.Optional[T.List['Dependency']] = None) -> int:
+ if extra_args is None:
+ extra_args = []
+ t = f'''program test
+ use iso_c_binding
+ {prefix}
+ {typename} :: something
+ end program test
+ '''
+ if not self.compiles(t, env, extra_args=extra_args,
+ dependencies=dependencies)[0]:
+ return -1
+ return self.cross_compute_int('c_sizeof(x)', None, None, None, prefix + '\nuse iso_c_binding\n' + typename + ' :: x', env, extra_args, dependencies)
+
+ def sizeof(self, typename: str, prefix: str, env: 'Environment', *,
+ extra_args: T.Union[None, T.List[str], T.Callable[[CompileCheckMode], T.List[str]]] = None,
+ dependencies: T.Optional[T.List['Dependency']] = None) -> T.Tuple[int, bool]:
+ if extra_args is None:
+ extra_args = []
+ if self.is_cross:
+ r = self.cross_sizeof(typename, prefix, env, extra_args=extra_args,
+ dependencies=dependencies)
+ return r, False
+ t = f'''program test
+ use iso_c_binding
+ {prefix}
+ {typename} :: x
+ print '(i0)', c_sizeof(x)
+ end program test
+ '''
+ res = self.cached_run(t, env, extra_args=extra_args,
+ dependencies=dependencies)
+ if not res.compiled:
+ return -1, False
+ if res.returncode != 0:
+ raise mesonlib.EnvironmentException('Could not run sizeof test binary.')
+ return int(res.stdout), res.cached
+
+ @functools.lru_cache()
+ def output_is_64bit(self, env: 'Environment') -> bool:
+ '''
+ returns true if the output produced is 64-bit, false if 32-bit
+ '''
+ return self.sizeof('type(c_ptr)', '', env)[0] == 8
class GnuFortranCompiler(GnuCompiler, FortranCompiler):
@@ -141,20 +276,19 @@ def __init__(self, exelist: T.List[str], version: str, for_machine: MachineChoic
'everything': default_warn_args + ['-Wextra', '-Wpedantic', '-fimplicit-none']}
def get_options(self) -> 'MutableKeyedOptionDictType':
- opts = FortranCompiler.get_options(self)
+ opts = super().get_options()
fortran_stds = ['legacy', 'f95', 'f2003']
if version_compare(self.version, '>=4.4.0'):
fortran_stds += ['f2008']
if version_compare(self.version, '>=8.0.0'):
fortran_stds += ['f2018']
- key = self.form_compileropt_key('std')
- opts[key].choices = ['none'] + fortran_stds
+ self._update_language_stds(opts, fortran_stds)
return opts
- def get_option_compile_args(self, options: 'KeyedOptionDictType') -> T.List[str]:
+ def get_option_std_args(self, target: BuildTarget, env: Environment, subproject: T.Optional[str] = None) -> T.List[str]:
args: T.List[str] = []
- key = self.form_compileropt_key('std')
- std = options.get_value(key)
+ std = self.get_compileropt_value('std', env, target, subproject)
+ assert isinstance(std, str)
if std != 'none':
args.append('-std=' + std)
return args
@@ -203,10 +337,8 @@ def __init__(self, exelist: T.List[str], version: str, for_machine: MachineChoic
ElbrusCompiler.__init__(self)
def get_options(self) -> 'MutableKeyedOptionDictType':
- opts = FortranCompiler.get_options(self)
- fortran_stds = ['f95', 'f2003', 'f2008', 'gnu', 'legacy', 'f2008ts']
- key = self.form_compileropt_key('std')
- opts[key].choices = ['none'] + fortran_stds
+ opts = super().get_options()
+ self._update_language_stds(opts, ['f95', 'f2003', 'f2008', 'gnu', 'legacy', 'f2008ts'])
return opts
def get_module_outdir_args(self, path: str) -> T.List[str]:
@@ -283,16 +415,15 @@ def __init__(self, exelist: T.List[str], version: str, for_machine: MachineChoic
'everything': ['-warn', 'all']}
def get_options(self) -> 'MutableKeyedOptionDictType':
- opts = FortranCompiler.get_options(self)
- key = self.form_compileropt_key('std')
- opts[key].choices = ['none', 'legacy', 'f95', 'f2003', 'f2008', 'f2018']
+ opts = super().get_options()
+ self._update_language_stds(opts, ['none', 'legacy', 'f95', 'f2003', 'f2008', 'f2018'])
return opts
- def get_option_compile_args(self, options: 'KeyedOptionDictType') -> T.List[str]:
+ def get_option_std_args(self, target: BuildTarget, env: Environment, subproject: T.Optional[str] = None) -> T.List[str]:
args: T.List[str] = []
- key = self.form_compileropt_key('std')
- std = options.get_value(key)
+ std = self.get_compileropt_value('std', env, target, subproject)
stds = {'legacy': 'none', 'f95': 'f95', 'f2003': 'f03', 'f2008': 'f08', 'f2018': 'f18'}
+ assert isinstance(std, str)
if std != 'none':
args.append('-stand=' + stds[std])
return args
@@ -338,16 +469,15 @@ def __init__(self, exelist: T.List[str], version: str, for_machine: MachineChoic
'everything': ['/warn:all']}
def get_options(self) -> 'MutableKeyedOptionDictType':
- opts = FortranCompiler.get_options(self)
- key = self.form_compileropt_key('std')
- opts[key].choices = ['none', 'legacy', 'f95', 'f2003', 'f2008', 'f2018']
+ opts = super().get_options()
+ self._update_language_stds(opts, ['none', 'legacy', 'f95', 'f2003', 'f2008', 'f2018'])
return opts
- def get_option_compile_args(self, options: 'KeyedOptionDictType') -> T.List[str]:
+ def get_option_std_args(self, target: BuildTarget, env: Environment, subproject: T.Optional[str] = None) -> T.List[str]:
args: T.List[str] = []
- key = self.form_compileropt_key('std')
- std = options.get_value(key)
+ std = self.get_compileropt_value('std', env, target, subproject)
stds = {'legacy': 'none', 'f95': 'f95', 'f2003': 'f03', 'f2008': 'f08', 'f2018': 'f18'}
+ assert isinstance(std, str)
if std != 'none':
args.append('/stand:' + stds[std])
return args
@@ -513,7 +643,11 @@ def language_stdlib_only_link_flags(self, env: 'Environment') -> T.List[str]:
# https://github.com/llvm/llvm-project/commit/8d5386669ed63548daf1bee415596582d6d78d7d;
# it seems flang 18 doesn't work if something accidentally includes a program unit, see
# https://github.com/llvm/llvm-project/issues/92496
- return search_dirs + ['-lFortranRuntime', '-lFortranDecimal']
+ # Only link FortranRuntime and FortranDecimal for flang < 19, see
+ # https://github.com/scipy/scipy/issues/21562#issuecomment-2942938509
+ if version_compare(self.version, '<19'):
+ search_dirs += ['-lFortranRuntime', '-lFortranDecimal']
+ return search_dirs
class Open64FortranCompiler(FortranCompiler):
diff --git a/mesonbuild/compilers/mixins/apple.py b/mesonbuild/compilers/mixins/apple.py
index fc93d38a5673..2a0939334f85 100644
--- a/mesonbuild/compilers/mixins/apple.py
+++ b/mesonbuild/compilers/mixins/apple.py
@@ -1,5 +1,5 @@
# SPDX-License-Identifier: Apache-2.0
-# Copyright © 2024 Intel Corporation
+# Copyright © 2024-2025 Intel Corporation
"""Provides mixins for Apple compilers."""
@@ -56,6 +56,23 @@ def openmp_link_flags(self, env: Environment) -> T.List[str]:
raise MesonException("Couldn't find libomp")
return self.__BASE_OMP_FLAGS + link
- def get_prelink_args(self, prelink_name: str, obj_list: T.List[str]) -> T.List[str]:
+ def get_prelink_args(self, prelink_name: str, obj_list: T.List[str]) -> T.Tuple[T.List[str], T.List[str]]:
# The objects are prelinked through the compiler, which injects -lSystem
- return ['-nostdlib', '-r', '-o', prelink_name] + obj_list
+ return [prelink_name], ['-nostdlib', '-r', '-o', prelink_name] + obj_list
+
+
+class AppleCStdsMixin(Compiler):
+
+ """Provide version overrides for the Apple Compilers."""
+
+ _C17_VERSION = '>=10.0.0'
+ _C18_VERSION = '>=11.0.0'
+ _C2X_VERSION = '>=11.0.0'
+
+
+class AppleCPPStdsMixin(Compiler):
+
+ """Provide version overrides for the Apple C++ Compilers."""
+
+ _CPP23_VERSION = '>=13.0.0'
+ _CPP26_VERSION = '>=16.0.0'
diff --git a/mesonbuild/compilers/mixins/clang.py b/mesonbuild/compilers/mixins/clang.py
index a0d3d5ffb069..ae5ab631bdca 100644
--- a/mesonbuild/compilers/mixins/clang.py
+++ b/mesonbuild/compilers/mixins/clang.py
@@ -10,6 +10,7 @@
import typing as T
from ... import mesonlib
+from ... import options
from ...linkers.linkers import AppleDynamicLinker, ClangClDynamicLinker, LLVMDynamicLinker, GnuGoldDynamicLinker, \
MoldDynamicLinker, MSVCDynamicLinker
from ...options import OptionKey
@@ -17,8 +18,14 @@
from .gnu import GnuLikeCompiler
if T.TYPE_CHECKING:
+ from ...options import MutableKeyedOptionDictType
from ...environment import Environment
from ...dependencies import Dependency # noqa: F401
+ from ..compilers import Compiler
+
+ CompilerMixinBase = Compiler
+else:
+ CompilerMixinBase = object
clang_color_args: T.Dict[str, T.List[str]] = {
'auto': ['-fdiagnostics-color=auto'],
@@ -135,7 +142,7 @@ def openmp_flags(self, env: Environment) -> T.List[str]:
return []
def gen_vs_module_defs_args(self, defsfile: str) -> T.List[str]:
- if isinstance(self.linker, (MSVCDynamicLinker)):
+ if isinstance(self.linker, (ClangClDynamicLinker, MSVCDynamicLinker)):
# With MSVC, DLLs only export symbols that are explicitly exported,
# so if a module defs file is specified, we use that to export symbols
return ['-Wl,/DEF:' + defsfile]
@@ -187,7 +194,7 @@ def get_lto_compile_args(self, *, threads: int = 0, mode: str = 'default') -> T.
def linker_to_compiler_args(self, args: T.List[str]) -> T.List[str]:
if isinstance(self.linker, (ClangClDynamicLinker, MSVCDynamicLinker)):
- return [flag if flag.startswith('-Wl,') else f'-Wl,{flag}' for flag in args]
+ return [flag if flag.startswith('-Wl,') or flag.startswith('-fuse-ld=') else f'-Wl,{flag}' for flag in args]
else:
return args
@@ -204,3 +211,66 @@ def get_lto_link_args(self, *, threads: int = 0, mode: str = 'default',
raise mesonlib.MesonException('clang support for LTO threads requires clang >=4.0')
args.append(f'-flto-jobs={threads}')
return args
+
+
+class ClangCStds(CompilerMixinBase):
+
+ """Mixin class for clang based compilers for setting C standards.
+
+ This is used by both ClangCCompiler and ClangClCompiler, as they share
+ the same versions
+ """
+
+ _C17_VERSION = '>=6.0.0'
+ _C18_VERSION = '>=8.0.0'
+ _C2X_VERSION = '>=9.0.0'
+ _C23_VERSION = '>=18.0.0'
+ _C2Y_VERSION = '>=19.0.0'
+
+ def get_options(self) -> MutableKeyedOptionDictType:
+ opts = super().get_options()
+ stds = ['c89', 'c99', 'c11']
+ # https://releases.llvm.org/6.0.0/tools/clang/docs/ReleaseNotes.html
+ # https://en.wikipedia.org/wiki/Xcode#Latest_versions
+ if mesonlib.version_compare(self.version, self._C17_VERSION):
+ stds += ['c17']
+ if mesonlib.version_compare(self.version, self._C18_VERSION):
+ stds += ['c18']
+ if mesonlib.version_compare(self.version, self._C2X_VERSION):
+ stds += ['c2x']
+ if mesonlib.version_compare(self.version, self._C23_VERSION):
+ stds += ['c23']
+ if mesonlib.version_compare(self.version, self._C2Y_VERSION):
+ stds += ['c2y']
+ key = self.form_compileropt_key('std')
+ std_opt = opts[key]
+ assert isinstance(std_opt, options.UserStdOption), 'for mypy'
+ std_opt.set_versions(stds, gnu=True)
+ return opts
+
+
+class ClangCPPStds(CompilerMixinBase):
+
+ """Mixin class for clang based compilers for setting C++ standards.
+
+ This is used by the ClangCPPCompiler
+ """
+
+ _CPP23_VERSION = '>=12.0.0'
+ _CPP26_VERSION = '>=17.0.0'
+
+ def get_options(self) -> MutableKeyedOptionDictType:
+ opts = super().get_options()
+ stds = [
+ 'c++98', 'c++03', 'c++11', 'c++14', 'c++17', 'c++1z', 'c++2a',
+ 'c++20',
+ ]
+ if mesonlib.version_compare(self.version, self._CPP23_VERSION):
+ stds.append('c++23')
+ if mesonlib.version_compare(self.version, self._CPP26_VERSION):
+ stds.append('c++26')
+ key = self.form_compileropt_key('std')
+ std_opt = opts[key]
+ assert isinstance(std_opt, options.UserStdOption), 'for mypy'
+ std_opt.set_versions(stds, gnu=True)
+ return opts
diff --git a/mesonbuild/compilers/mixins/clike.py b/mesonbuild/compilers/mixins/clike.py
index f0515a9bdcb4..e9125d548325 100644
--- a/mesonbuild/compilers/mixins/clike.py
+++ b/mesonbuild/compilers/mixins/clike.py
@@ -53,9 +53,9 @@ class CLikeCompilerArgs(arglist.CompilerArgs):
# NOTE: not thorough. A list of potential corner cases can be found in
# https://github.com/mesonbuild/meson/pull/4593#pullrequestreview-182016038
- dedup1_prefixes = ('-l', '-Wl,-l', '-Wl,--export-dynamic')
+ dedup1_prefixes = ('-l', '-Wl,-l', '-Wl,-rpath,', '-Wl,-rpath-link,')
dedup1_suffixes = ('.lib', '.dll', '.so', '.dylib', '.a')
- dedup1_args = ('-c', '-S', '-E', '-pipe', '-pthread')
+ dedup1_args = ('-c', '-S', '-E', '-pipe', '-pthread', '-Wl,--export-dynamic')
def to_native(self, copy: bool = False) -> T.List[str]:
# This seems to be allowed, but could never work?
@@ -378,7 +378,7 @@ def _get_basic_compiler_args(self, env: 'Environment', mode: CompileCheckMode) -
try:
crt_val = env.coredata.optstore.get_value('b_vscrt')
buildtype = env.coredata.optstore.get_value('buildtype')
- cargs += self.get_crt_compile_args(crt_val, buildtype)
+ cargs += self.get_crt_compile_args(crt_val, buildtype) # type: ignore[arg-type]
except (KeyError, AttributeError):
pass
@@ -476,6 +476,21 @@ def cross_compute_int(self, expression: str, low: T.Optional[int], high: T.Optio
if self._compile_int(f'{expression} == {guess}', prefix, env, extra_args, dependencies):
return guess
+ # Try to expand the expression and evaluate it on the build machines compiler
+ if self.language in env.coredata.compilers.build:
+ try:
+ expanded, _ = self.get_define(expression, prefix, env, extra_args, dependencies, False)
+ evaluate_expanded = f'''
+ #include
+ #include
+ int main(void) {{ int expression = {expanded}; printf("%d", expression); return 0; }}'''
+ run = env.coredata.compilers.build[self.language].run(evaluate_expanded, env)
+ if run and run.compiled and run.returncode == 0:
+ if self._compile_int(f'{expression} == {run.stdout}', prefix, env, extra_args, dependencies):
+ return int(run.stdout)
+ except mesonlib.EnvironmentException:
+ pass
+
# If no bounds are given, compute them in the limit of int32
maxint = 0x7fffffff
minint = -0x80000000
@@ -1034,8 +1049,8 @@ def get_library_naming(self, env: 'Environment', libtype: LibType, strict: bool
elif env.machines[self.for_machine].is_cygwin():
shlibext = ['dll', 'dll.a']
prefixes = ['cyg'] + prefixes
- elif self.id.lower() == 'c6000' or self.id.lower() == 'ti':
- # TI C6000 compiler can use both extensions for static or dynamic libs.
+ elif self.id.lower() in {'c6000', 'c2000', 'ti'}:
+ # TI C28x compilers can use both extensions for static or dynamic libs.
stlibext = ['a', 'lib']
shlibext = ['dll', 'so']
else:
@@ -1075,17 +1090,17 @@ def tuple_key(x: str) -> T.Tuple[int, ...]:
return sorted(filtered, key=tuple_key, reverse=True)
@classmethod
- def _get_trials_from_pattern(cls, pattern: str, directory: str, libname: str) -> T.List[Path]:
- f = Path(directory) / pattern.format(libname)
+ def _get_trials_from_pattern(cls, pattern: str, directory: str, libname: str) -> T.List[str]:
+ f = os.path.join(directory, pattern.format(libname))
# Globbing for OpenBSD
if '*' in pattern:
# NOTE: globbing matches directories and broken symlinks
# so we have to do an isfile test on it later
- return [Path(x) for x in cls._sort_shlibs_openbsd(glob.glob(str(f)))]
+ return cls._sort_shlibs_openbsd(glob.glob(f))
return [f]
@staticmethod
- def _get_file_from_list(env: Environment, paths: T.List[Path]) -> T.Optional[Path]:
+ def _get_file_from_list(env: Environment, paths: T.List[str]) -> T.Optional[Path]:
'''
We just check whether the library exists. We can't do a link check
because the library might have unresolved symbols that require other
@@ -1093,16 +1108,16 @@ def _get_file_from_list(env: Environment, paths: T.List[Path]) -> T.Optional[Pat
architecture.
'''
for p in paths:
- if p.is_file():
+ if os.path.isfile(p):
if env.machines.host.is_darwin() and env.machines.build.is_darwin():
# Run `lipo` and check if the library supports the arch we want
- archs = mesonlib.darwin_get_object_archs(str(p))
+ archs = mesonlib.darwin_get_object_archs(p)
if not archs or env.machines.host.cpu_family not in archs:
mlog.debug(f'Rejected {p}, supports {archs} but need {env.machines.host.cpu_family}')
continue
- return p
+ return Path(p)
return None
@@ -1270,10 +1285,25 @@ def _has_multi_arguments(self, args: T.List[str], env: 'Environment', code: str)
# some compilers, e.g. GCC, don't warn for unsupported warning-disable
# flags, so when we are testing a flag like "-Wno-forgotten-towel", also
# check the equivalent enable flag too "-Wforgotten-towel".
- # Make an exception for -Wno-attributes=x as -Wattributes=x is invalid
- # for GCC at least.
- if arg.startswith('-Wno-') and not arg.startswith('-Wno-attributes='):
- new_args.append('-W' + arg[5:])
+ if arg.startswith('-Wno-'):
+ # Make an exception for -Wno-attributes=x as -Wattributes=x is invalid
+ # for GCC at least. Also, the positive form of some flags require a
+ # value to be specified, i.e. we need to pass -Wfoo=N rather than just
+ # -Wfoo.
+ if arg.startswith('-Wno-attributes='):
+ pass
+ elif arg in {
+ '-Wno-alloc-size-larger-than',
+ '-Wno-alloca-larger-than',
+ '-Wno-frame-larger-than',
+ '-Wno-stack-usage',
+ '-Wno-vla-larger-than',
+ }:
+ # Pass an arbitrary value to the enabling flag; since the test program
+ # is trivial, it is unlikely to provoke any of these warnings.
+ new_args.append('-W' + arg[5:] + '=1000')
+ else:
+ new_args.append('-W' + arg[5:])
if arg.startswith('-Wl,'):
mlog.warning(f'{arg} looks like a linker argument, '
'but has_argument and other similar methods only '
diff --git a/mesonbuild/compilers/mixins/elbrus.py b/mesonbuild/compilers/mixins/elbrus.py
index 66f419cf02d8..7037db23260e 100644
--- a/mesonbuild/compilers/mixins/elbrus.py
+++ b/mesonbuild/compilers/mixins/elbrus.py
@@ -1,5 +1,5 @@
# SPDX-License-Identifier: Apache-2.0
-# Copyright © 2023-2024 Intel Corporation
+# Copyright © 2023-2025 Intel Corporation
from __future__ import annotations
@@ -18,7 +18,7 @@
if T.TYPE_CHECKING:
from ...environment import Environment
- from ...coredata import KeyedOptionDictType
+ from ...build import BuildTarget
class ElbrusCompiler(GnuLikeCompiler):
@@ -76,16 +76,21 @@ def get_default_include_dirs(self) -> T.List[str]:
def get_optimization_args(self, optimization_level: str) -> T.List[str]:
return gnu_optimization_args[optimization_level]
- def get_prelink_args(self, prelink_name: str, obj_list: T.List[str]) -> T.List[str]:
- return ['-r', '-nodefaultlibs', '-nostartfiles', '-o', prelink_name] + obj_list
+ def get_prelink_args(self, prelink_name: str, obj_list: T.List[str]) -> T.Tuple[T.List[str], T.List[str]]:
+ return [prelink_name], ['-r', '-nodefaultlibs', '-nostartfiles', '-o', prelink_name] + obj_list
def get_pch_suffix(self) -> str:
# Actually it's not supported for now, but probably will be supported in future
return 'pch'
- def get_option_compile_args(self, options: 'KeyedOptionDictType') -> T.List[str]:
+ def get_option_std_args(self, target: BuildTarget, env: Environment, subproject: T.Optional[str] = None) -> T.List[str]:
args: T.List[str] = []
- std = options.get_value(OptionKey(f'{self.language}_std', machine=self.for_machine))
+ key = OptionKey(f'{self.language}_std', subproject=subproject, machine=self.for_machine)
+ if target:
+ std = env.coredata.get_option_for_target(target, key)
+ else:
+ std = env.coredata.optstore.get_value_for(key)
+ assert isinstance(std, str)
if std != 'none':
args.append('-std=' + std)
return args
diff --git a/mesonbuild/compilers/mixins/emscripten.py b/mesonbuild/compilers/mixins/emscripten.py
index 64315ae96797..91b25e8f7971 100644
--- a/mesonbuild/compilers/mixins/emscripten.py
+++ b/mesonbuild/compilers/mixins/emscripten.py
@@ -15,7 +15,6 @@
from mesonbuild.compilers.compilers import CompileCheckMode
if T.TYPE_CHECKING:
- from ... import coredata
from ...environment import Environment
from ...compilers.compilers import Compiler
from ...dependencies import Dependency
@@ -51,21 +50,23 @@ def _get_compile_output(self, dirname: str, mode: CompileCheckMode) -> str:
def thread_link_flags(self, env: 'Environment') -> T.List[str]:
args = ['-pthread']
- count: int = env.coredata.optstore.get_value(OptionKey(f'{self.language}_thread_count', machine=self.for_machine))
+ count = env.coredata.optstore.get_value(OptionKey(f'{self.language}_thread_count', machine=self.for_machine))
+ assert isinstance(count, int)
if count:
args.append(f'-sPTHREAD_POOL_SIZE={count}')
return args
- def get_options(self) -> coredata.MutableKeyedOptionDictType:
- return self.update_options(
- super().get_options(),
- self.create_option(
- options.UserIntegerOption,
- OptionKey(f'{self.language}_thread_count', machine=self.for_machine),
- 'Number of threads to use in web assembly, set to 0 to disable',
- (0, None, 4), # Default was picked at random
- ),
- )
+ def get_options(self) -> options.MutableKeyedOptionDictType:
+ opts = super().get_options()
+
+ key = OptionKey(f'{self.language}_thread_count', machine=self.for_machine)
+ opts[key] = options.UserIntegerOption(
+ self.make_option_name(key),
+ 'Number of threads to use in web assembly, set to 0 to disable',
+ 4, # Default was picked at random
+ min_value=0)
+
+ return opts
@classmethod
def native_args_to_unix(cls, args: T.List[str]) -> T.List[str]:
diff --git a/mesonbuild/compilers/mixins/gnu.py b/mesonbuild/compilers/mixins/gnu.py
index 62f55543a0a7..9ea591e04aad 100644
--- a/mesonbuild/compilers/mixins/gnu.py
+++ b/mesonbuild/compilers/mixins/gnu.py
@@ -1,5 +1,6 @@
# SPDX-License-Identifier: Apache-2.0
# Copyright 2019-2022 The meson development team
+# Copyright © 2023-2025 Intel Corporation
from __future__ import annotations
@@ -8,7 +9,6 @@
import abc
import functools
import os
-import multiprocessing
import pathlib
import re
import subprocess
@@ -16,11 +16,12 @@
from ... import mesonlib
from ... import mlog
-from ...options import OptionKey
+from ...options import OptionKey, UserStdOption
from mesonbuild.compilers.compilers import CompileCheckMode
if T.TYPE_CHECKING:
from ..._typing import ImmutableListProtocol
+ from ...options import MutableKeyedOptionDictType
from ...environment import Environment
from ..compilers import Compiler
else:
@@ -204,6 +205,7 @@
# -Wdeclaration-after-statement
# -Wtraditional
# -Wtraditional-conversion
+# -Wunsuffixed-float-constants
gnu_c_warning_args: T.Dict[str, T.List[str]] = {
"0.0.0": [
"-Wbad-function-cast",
@@ -218,9 +220,6 @@
"4.1.0": [
"-Wc++-compat",
],
- "4.5.0": [
- "-Wunsuffixed-float-constants",
- ],
}
# GCC warnings for C++
@@ -497,11 +496,11 @@ def get_lto_compile_args(self, *, threads: int = 0, mode: str = 'default') -> T.
# for their specific arguments
return ['-flto']
- def sanitizer_compile_args(self, value: str) -> T.List[str]:
- if value == 'none':
- return []
- args = ['-fsanitize=' + value]
- if 'address' in value: # for -fsanitize=address,undefined
+ def sanitizer_compile_args(self, value: T.List[str]) -> T.List[str]:
+ if not value:
+ return value
+ args = ['-fsanitize=' + ','.join(value)]
+ if 'address' in value:
args.append('-fno-omit-frame-pointer')
return args
@@ -550,16 +549,19 @@ def __init__(self, defines: T.Optional[T.Dict[str, str]]):
super().__init__()
self.defines = defines or {}
self.base_options.update({OptionKey('b_colorout'), OptionKey('b_lto_threads')})
+ self._has_color_support = mesonlib.version_compare(self.version, '>=4.9.0')
+ self._has_wpedantic_support = mesonlib.version_compare(self.version, '>=4.8.0')
+ self._has_lto_auto_support = mesonlib.version_compare(self.version, '>=10.0')
def get_colorout_args(self, colortype: str) -> T.List[str]:
- if mesonlib.version_compare(self.version, '>=4.9.0'):
+ if self._has_color_support:
return gnu_color_args[colortype][:]
return []
def get_warn_args(self, level: str) -> T.List[str]:
# Mypy doesn't understand cooperative inheritance
args = super().get_warn_args(level)
- if mesonlib.version_compare(self.version, '<4.8.0') and '-Wpedantic' in args:
+ if not self._has_wpedantic_support and '-Wpedantic' in args:
# -Wpedantic was added in 4.8.0
# https://gcc.gnu.org/gcc-4.8/changes.html
args[args.index('-Wpedantic')] = '-pedantic'
@@ -607,15 +609,16 @@ def get_has_func_attribute_extra_args(self, name: str) -> T.List[str]:
# error.
return ['-Werror=attributes']
- def get_prelink_args(self, prelink_name: str, obj_list: T.List[str]) -> T.List[str]:
- return ['-r', '-o', prelink_name] + obj_list
+ def get_prelink_args(self, prelink_name: str, obj_list: T.List[str]) -> T.Tuple[T.List[str], T.List[str]]:
+ return [prelink_name], ['-r', '-o', prelink_name] + obj_list
def get_lto_compile_args(self, *, threads: int = 0, mode: str = 'default') -> T.List[str]:
if threads == 0:
- if mesonlib.version_compare(self.version, '>= 10.0'):
+ if self._has_lto_auto_support:
return ['-flto=auto']
- # This matches clang's behavior of using the number of cpus
- return [f'-flto={multiprocessing.cpu_count()}']
+ # This matches clang's behavior of using the number of cpus, but
+ # obeying meson's MESON_NUM_PROCESSES convention.
+ return [f'-flto={mesonlib.determine_worker_count()}']
elif threads > 0:
return [f'-flto={threads}']
return super().get_lto_compile_args(threads=threads)
@@ -628,3 +631,55 @@ def use_linker_args(cls, linker: str, version: str) -> T.List[str]:
def get_profile_use_args(self) -> T.List[str]:
return super().get_profile_use_args() + ['-fprofile-correction']
+
+
+class GnuCStds(Compiler):
+
+ """Mixin class for gcc based compilers for setting C standards."""
+
+ _C18_VERSION = '>=8.0.0'
+ _C2X_VERSION = '>=9.0.0'
+ _C23_VERSION = '>=14.0.0'
+ _C2Y_VERSION = '>=15.0.0'
+
+ def get_options(self) -> MutableKeyedOptionDictType:
+ opts = super().get_options()
+ stds = ['c89', 'c99', 'c11']
+ if mesonlib.version_compare(self.version, self._C18_VERSION):
+ stds += ['c17', 'c18']
+ if mesonlib.version_compare(self.version, self._C2X_VERSION):
+ stds += ['c2x']
+ if mesonlib.version_compare(self.version, self._C23_VERSION):
+ stds += ['c23']
+ if mesonlib.version_compare(self.version, self._C2Y_VERSION):
+ stds += ['c2y']
+ key = self.form_compileropt_key('std')
+ std_opt = opts[key]
+ assert isinstance(std_opt, UserStdOption), 'for mypy'
+ std_opt.set_versions(stds, gnu=True)
+ return opts
+
+
+class GnuCPPStds(Compiler):
+
+ """Mixin class for GNU based compilers for setting CPP standards."""
+
+ _CPP23_VERSION = '>=11.0.0'
+ _CPP26_VERSION = '>=14.0.0'
+
+ def get_options(self) -> MutableKeyedOptionDictType:
+ opts = super().get_options()
+
+ stds = [
+ 'c++98', 'c++03', 'c++11', 'c++14', 'c++17', 'c++1z',
+ 'c++2a', 'c++20',
+ ]
+ if mesonlib.version_compare(self.version, self._CPP23_VERSION):
+ stds.append('c++23')
+ if mesonlib.version_compare(self.version, self._CPP26_VERSION):
+ stds.append('c++26')
+ key = self.form_compileropt_key('std')
+ std_opt = opts[key]
+ assert isinstance(std_opt, UserStdOption), 'for mypy'
+ std_opt.set_versions(stds, gnu=True)
+ return opts
diff --git a/mesonbuild/compilers/mixins/islinker.py b/mesonbuild/compilers/mixins/islinker.py
index 8d17a94b2d16..3f3561972188 100644
--- a/mesonbuild/compilers/mixins/islinker.py
+++ b/mesonbuild/compilers/mixins/islinker.py
@@ -1,5 +1,6 @@
# SPDX-License-Identifier: Apache-2.0
# Copyright 2019 The Meson development team
+# Copyright © 2023 Intel Corporation
from __future__ import annotations
@@ -16,9 +17,10 @@
from ...mesonlib import EnvironmentException, MesonException, is_windows
if T.TYPE_CHECKING:
- from ...coredata import KeyedOptionDictType
from ...environment import Environment
from ...compilers.compilers import Compiler
+ from ...build import BuildTarget
+ from ...options import OptionStore
else:
# This is a bit clever, for mypy we pretend that these mixins descend from
# Compiler, so we get all of the methods and attributes defined for us, but
@@ -36,7 +38,7 @@ class BasicLinkerIsCompilerMixin(Compiler):
functionality itself.
"""
- def sanitizer_link_args(self, value: str) -> T.List[str]:
+ def sanitizer_link_args(self, value: T.List[str]) -> T.List[str]:
return []
def get_lto_link_args(self, *, threads: int = 0, mode: str = 'default',
@@ -58,7 +60,7 @@ def get_linker_always_args(self) -> T.List[str]:
def get_linker_lib_prefix(self) -> str:
return ''
- def get_option_link_args(self, options: 'KeyedOptionDictType') -> T.List[str]:
+ def get_option_link_args(self, target: BuildTarget, env: Environment, subproject: T.Optional[str] = None) -> T.List[str]:
return []
def has_multi_link_args(self, args: T.List[str], env: 'Environment') -> T.Tuple[bool, bool]:
@@ -70,7 +72,7 @@ def get_link_debugfile_args(self, targetfile: str) -> T.List[str]:
def get_std_shared_lib_link_args(self) -> T.List[str]:
return []
- def get_std_shared_module_args(self, options: 'KeyedOptionDictType') -> T.List[str]:
+ def get_std_shared_module_args(self, options: OptionStore) -> T.List[str]:
return self.get_std_shared_lib_link_args()
def get_link_whole_for(self, args: T.List[str]) -> T.List[str]:
diff --git a/mesonbuild/compilers/mixins/pgi.py b/mesonbuild/compilers/mixins/pgi.py
index 50335c895cc5..fddc8378f636 100644
--- a/mesonbuild/compilers/mixins/pgi.py
+++ b/mesonbuild/compilers/mixins/pgi.py
@@ -54,6 +54,12 @@ def get_pic_args(self) -> T.List[str]:
def openmp_flags(self, env: Environment) -> T.List[str]:
return ['-mp']
+ def get_preprocess_only_args(self) -> T.List[str]:
+ return ['-E', '-P', '-o', '-']
+
+ def get_preprocess_to_file_args(self) -> T.List[str]:
+ return ['-E', '-P']
+
def get_optimization_args(self, optimization_level: str) -> T.List[str]:
return clike_optimization_args[optimization_level]
diff --git a/mesonbuild/compilers/mixins/tasking.py b/mesonbuild/compilers/mixins/tasking.py
new file mode 100644
index 000000000000..082cff073908
--- /dev/null
+++ b/mesonbuild/compilers/mixins/tasking.py
@@ -0,0 +1,138 @@
+# SPDX-License-Identifier: Apache-2.0
+# Copyright 2012-2023 The Meson development team
+from __future__ import annotations
+
+"""Representations specific to the TASKING embedded C/C++ compiler family."""
+
+import os
+import typing as T
+
+from ...mesonlib import EnvironmentException
+from ...options import OptionKey
+
+if T.TYPE_CHECKING:
+ from ...compilers.compilers import Compiler
+else:
+ # This is a bit clever, for mypy we pretend that these mixins descend from
+ # Compiler, so we get all of the methods and attributes defined for us, but
+ # for runtime we make them descend from object (which all classes normally
+ # do). This gives us DRYer type checking, with no runtime impact
+ Compiler = object
+
+tasking_buildtype_args: T.Mapping[str, T.List[str]] = {
+ 'plain': [],
+ 'debug': [],
+ 'debugoptimized': [],
+ 'release': [],
+ 'minsize': [],
+ 'custom': []
+}
+
+tasking_optimization_args: T.Mapping[str, T.List[str]] = {
+ 'plain': [],
+ '0': ['-O0'],
+ 'g': ['-O1'], # There is no debug specific level, O1 is recommended by the compiler
+ '1': ['-O1'],
+ '2': ['-O2'],
+ '3': ['-O3'],
+ 's': ['-Os']
+}
+
+tasking_debug_args: T.Mapping[bool, T.List[str]] = {
+ False: [],
+ True: ['-g3']
+}
+
+class TaskingCompiler(Compiler):
+ '''
+ Functionality that is common to all TASKING family compilers.
+ '''
+
+ LINKER_PREFIX = '-Wl'
+
+ def __init__(self) -> None:
+ if not self.is_cross:
+ raise EnvironmentException(f'{id} supports only cross-compilation.')
+
+ self.base_options = {
+ OptionKey(o) for o in [
+ 'b_lto',
+ 'b_staticpic',
+ 'b_ndebug'
+ ]
+ }
+
+ default_warn_args = [] # type: T.List[str]
+ self.warn_args = {'0': [],
+ '1': default_warn_args,
+ '2': default_warn_args + [],
+ '3': default_warn_args + [],
+ 'everything': default_warn_args + []} # type: T.Dict[str, T.List[str]]
+ # TODO: add additional compilable files so that meson can detect it
+ self.can_compile_suffixes.add('asm')
+
+ def get_pic_args(self) -> T.List[str]:
+ return ['--pic']
+
+ def get_buildtype_args(self, buildtype: str) -> T.List[str]:
+ return tasking_buildtype_args[buildtype]
+
+ def get_debug_args(self, is_debug: bool) -> T.List[str]:
+ return tasking_debug_args[is_debug]
+
+ def get_compile_only_args(self) -> T.List[str]:
+ return ['-c']
+
+ def get_dependency_gen_args(self, outtarget: str, outfile: str) -> T.List[str]:
+ return [f'--dep-file={outfile}']
+
+ def get_depfile_suffix(self) -> str:
+ return 'dep'
+
+ def get_no_stdinc_args(self) -> T.List[str]:
+ return ['--no-stdinc']
+
+ def get_werror_args(self) -> T.List[str]:
+ return ['--warnings-as-errors']
+
+ def get_no_stdlib_link_args(self) -> T.List[str]:
+ return ['--no-default-libraries']
+
+ def get_output_args(self, outputname: str) -> T.List[str]:
+ return ['-o', outputname]
+
+ def get_include_args(self, path: str, is_system: bool) -> T.List[str]:
+ if path == '':
+ path = '.'
+ return ['-I' + path]
+
+ def get_optimization_args(self, optimization_level: str) -> T.List[str]:
+ return tasking_optimization_args[optimization_level]
+
+ def get_no_optimization_args(self) -> T.List[str]:
+ return ['-O0']
+
+ def get_prelink_args(self, prelink_name: str, obj_list: T.List[str]) -> T.Tuple[T.List[str], T.List[str]]:
+ mil_link_list = []
+ obj_file_list = []
+ for obj in obj_list:
+ if obj.endswith('.mil'):
+ mil_link_list.append(obj)
+ else:
+ obj_file_list.append(obj)
+ obj_file_list.append(prelink_name)
+
+ return obj_file_list, ['--mil-link', '-o', prelink_name, '-c'] + mil_link_list
+
+ def get_prelink_append_compile_args(self) -> bool:
+ return True
+
+ def compute_parameters_with_absolute_paths(self, parameter_list: T.List[str], build_dir: str) -> T.List[str]:
+ for idx, i in enumerate(parameter_list):
+ if i[:2] == '-I' or i[:2] == '-L':
+ parameter_list[idx] = i[:2] + os.path.normpath(os.path.join(build_dir, i[2:]))
+
+ return parameter_list
+
+ def get_preprocess_only_args(self) -> T.List[str]:
+ return ['-E']
diff --git a/mesonbuild/compilers/mixins/visualstudio.py b/mesonbuild/compilers/mixins/visualstudio.py
index b4677f4172ba..275e7ab0a3c9 100644
--- a/mesonbuild/compilers/mixins/visualstudio.py
+++ b/mesonbuild/compilers/mixins/visualstudio.py
@@ -1,5 +1,6 @@
# SPDX-License-Identifier: Apache-2.0
# Copyright 2019 The meson development team
+# Copyright © 2023 Intel Corporation
from __future__ import annotations
@@ -166,12 +167,10 @@ def get_compile_only_args(self) -> T.List[str]:
def get_no_optimization_args(self) -> T.List[str]:
return ['/Od', '/Oi-']
- def sanitizer_compile_args(self, value: str) -> T.List[str]:
- if value == 'none':
- return []
- if value != 'address':
- raise mesonlib.MesonException('VS only supports address sanitizer at the moment.')
- return ['/fsanitize=address']
+ def sanitizer_compile_args(self, value: T.List[str]) -> T.List[str]:
+ if not value:
+ return value
+ return [f'/fsanitize={",".join(value)}']
def get_output_args(self, outputname: str) -> T.List[str]:
if self.mode == 'PREPROCESSOR':
@@ -338,6 +337,8 @@ def _calculate_toolset_version(self, version: int) -> T.Optional[str]:
return '14.2' # (Visual Studio 2019)
elif version < 1940:
return '14.3' # (Visual Studio 2022)
+ elif version < 1950:
+ return '14.4' # (Visual Studio current preview version, might not be final)
mlog.warning(f'Could not find toolset for version {self.version!r}')
return None
diff --git a/mesonbuild/compilers/objc.py b/mesonbuild/compilers/objc.py
index 97550c2ea251..d013417fccd3 100644
--- a/mesonbuild/compilers/objc.py
+++ b/mesonbuild/compilers/objc.py
@@ -5,20 +5,22 @@
import typing as T
-from .. import options
-from ..options import OptionKey
+from ..options import OptionKey, UserStdOption
+from .c import ALL_STDS
from .compilers import Compiler
+from .mixins.apple import AppleCStdsMixin
+from .mixins.clang import ClangCompiler, ClangCStds
from .mixins.clike import CLikeCompiler
-from .mixins.gnu import GnuCompiler, gnu_common_warning_args, gnu_objc_warning_args
-from .mixins.clang import ClangCompiler
+from .mixins.gnu import GnuCompiler, GnuCStds, gnu_common_warning_args, gnu_objc_warning_args
if T.TYPE_CHECKING:
- from .. import coredata
from ..envconfig import MachineInfo
from ..environment import Environment
from ..linkers.linkers import DynamicLinker
from ..mesonlib import MachineChoice
+ from ..build import BuildTarget
+ from ..options import MutableKeyedOptionDictType
class ObjCCompiler(CLikeCompiler, Compiler):
@@ -34,6 +36,14 @@ def __init__(self, ccache: T.List[str], exelist: T.List[str], version: str, for_
linker=linker)
CLikeCompiler.__init__(self)
+ def get_options(self) -> MutableKeyedOptionDictType:
+ opts = super().get_options()
+ key = self.form_compileropt_key('std')
+ opts.update({
+ key: UserStdOption('c', ALL_STDS),
+ })
+ return opts
+
@staticmethod
def get_display_language() -> str:
return 'Objective-C'
@@ -42,8 +52,13 @@ def sanity_check(self, work_dir: str, environment: 'Environment') -> None:
code = '#import\nint main(void) { return 0; }\n'
return self._sanity_check_impl(work_dir, environment, 'sanitycheckobjc.m', code)
+ def form_compileropt_key(self, basename: str) -> OptionKey:
+ if basename == 'std':
+ return OptionKey(f'c_{basename}', machine=self.for_machine)
+ return super().form_compileropt_key(basename)
+
-class GnuObjCCompiler(GnuCompiler, ObjCCompiler):
+class GnuObjCCompiler(GnuCStds, GnuCompiler, ObjCCompiler):
def __init__(self, ccache: T.List[str], exelist: T.List[str], version: str, for_machine: MachineChoice,
is_cross: bool, info: 'MachineInfo',
defines: T.Optional[T.Dict[str, str]] = None,
@@ -61,8 +76,19 @@ def __init__(self, ccache: T.List[str], exelist: T.List[str], version: str, for_
self.supported_warn_args(gnu_common_warning_args) +
self.supported_warn_args(gnu_objc_warning_args))}
+ def get_option_std_args(self, target: BuildTarget, env: Environment, subproject: T.Optional[str] = None) -> T.List[str]:
+ args: T.List[str] = []
+ key = OptionKey('c_std', subproject=subproject, machine=self.for_machine)
+ if target:
+ std = env.coredata.get_option_for_target(target, key)
+ else:
+ std = env.coredata.optstore.get_value_for(key)
+ assert isinstance(std, str)
+ if std != 'none':
+ args.append('-std=' + std)
+ return args
-class ClangObjCCompiler(ClangCompiler, ObjCCompiler):
+class ClangObjCCompiler(ClangCStds, ClangCompiler, ObjCCompiler):
def __init__(self, ccache: T.List[str], exelist: T.List[str], version: str, for_machine: MachineChoice,
is_cross: bool, info: 'MachineInfo',
defines: T.Optional[T.Dict[str, str]] = None,
@@ -78,23 +104,25 @@ def __init__(self, ccache: T.List[str], exelist: T.List[str], version: str, for_
'3': default_warn_args + ['-Wextra', '-Wpedantic'],
'everything': ['-Weverything']}
- def get_options(self) -> 'coredata.MutableKeyedOptionDictType':
- return self.update_options(
- super().get_options(),
- self.create_option(options.UserComboOption,
- OptionKey('c_std', machine=self.for_machine),
- 'C language standard to use',
- ['none', 'c89', 'c99', 'c11', 'c17', 'gnu89', 'gnu99', 'gnu11', 'gnu17'],
- 'none'),
- )
-
- def get_option_compile_args(self, options: 'coredata.KeyedOptionDictType') -> T.List[str]:
+ def form_compileropt_key(self, basename: str) -> OptionKey:
+ if basename == 'std':
+ return OptionKey('c_std', machine=self.for_machine)
+ return super().form_compileropt_key(basename)
+
+ def make_option_name(self, key: OptionKey) -> str:
+ if key.name == 'std':
+ return 'c_std'
+ return super().make_option_name(key)
+
+ def get_option_std_args(self, target: BuildTarget, env: Environment, subproject: T.Optional[str] = None) -> T.List[str]:
args = []
- std = options.get_value(OptionKey('c_std', machine=self.for_machine))
+ key = OptionKey('c_std', machine=self.for_machine)
+ std = self.get_compileropt_value(key, env, target, subproject)
+ assert isinstance(std, str)
if std != 'none':
args.append('-std=' + std)
return args
-class AppleClangObjCCompiler(ClangObjCCompiler):
+class AppleClangObjCCompiler(AppleCStdsMixin, ClangObjCCompiler):
"""Handle the differences between Apple's clang and vanilla clang."""
diff --git a/mesonbuild/compilers/objcpp.py b/mesonbuild/compilers/objcpp.py
index 973d7bb0cfb8..441428b2fa5a 100644
--- a/mesonbuild/compilers/objcpp.py
+++ b/mesonbuild/compilers/objcpp.py
@@ -5,20 +5,23 @@
import typing as T
-from .. import options
-from ..options import OptionKey
+from ..options import OptionKey, UserStdOption
-from .mixins.clike import CLikeCompiler
+from .cpp import ALL_STDS
from .compilers import Compiler
-from .mixins.gnu import GnuCompiler, gnu_common_warning_args, gnu_objc_warning_args
-from .mixins.clang import ClangCompiler
+from .mixins.apple import AppleCPPStdsMixin
+from .mixins.gnu import GnuCompiler, GnuCPPStds, gnu_common_warning_args, gnu_objc_warning_args
+from .mixins.clang import ClangCompiler, ClangCPPStds
+from .mixins.clike import CLikeCompiler
if T.TYPE_CHECKING:
- from .. import coredata
from ..envconfig import MachineInfo
from ..environment import Environment
from ..linkers.linkers import DynamicLinker
from ..mesonlib import MachineChoice
+ from ..build import BuildTarget
+ from ..options import MutableKeyedOptionDictType
+
class ObjCPPCompiler(CLikeCompiler, Compiler):
@@ -33,6 +36,16 @@ def __init__(self, ccache: T.List[str], exelist: T.List[str], version: str, for_
linker=linker)
CLikeCompiler.__init__(self)
+ def form_compileropt_key(self, basename: str) -> OptionKey:
+ if basename == 'std':
+ return OptionKey('cpp_std', machine=self.for_machine)
+ return super().form_compileropt_key(basename)
+
+ def make_option_name(self, key: OptionKey) -> str:
+ if key.name == 'std':
+ return 'cpp_std'
+ return super().make_option_name(key)
+
@staticmethod
def get_display_language() -> str:
return 'Objective-C++'
@@ -41,8 +54,16 @@ def sanity_check(self, work_dir: str, environment: 'Environment') -> None:
code = '#import\nclass MyClass;int main(void) { return 0; }\n'
return self._sanity_check_impl(work_dir, environment, 'sanitycheckobjcpp.mm', code)
+ def get_options(self) -> MutableKeyedOptionDictType:
+ opts = super().get_options()
+ key = self.form_compileropt_key('std')
+ opts.update({
+ key: UserStdOption('cpp', ALL_STDS),
+ })
+ return opts
-class GnuObjCPPCompiler(GnuCompiler, ObjCPPCompiler):
+
+class GnuObjCPPCompiler(GnuCPPStds, GnuCompiler, ObjCPPCompiler):
def __init__(self, ccache: T.List[str], exelist: T.List[str], version: str, for_machine: MachineChoice,
is_cross: bool, info: 'MachineInfo',
defines: T.Optional[T.Dict[str, str]] = None,
@@ -60,8 +81,19 @@ def __init__(self, ccache: T.List[str], exelist: T.List[str], version: str, for_
self.supported_warn_args(gnu_common_warning_args) +
self.supported_warn_args(gnu_objc_warning_args))}
+ def get_option_std_args(self, target: BuildTarget, env: Environment, subproject: T.Optional[str] = None) -> T.List[str]:
+ args: T.List[str] = []
+ key = OptionKey('cpp_std', subproject=subproject, machine=self.for_machine)
+ if target:
+ std = env.coredata.get_option_for_target(target, key)
+ else:
+ std = env.coredata.optstore.get_value_for(key)
+ assert isinstance(std, str)
+ if std != 'none':
+ args.append('-std=' + std)
+ return args
-class ClangObjCPPCompiler(ClangCompiler, ObjCPPCompiler):
+class ClangObjCPPCompiler(ClangCPPStds, ClangCompiler, ObjCPPCompiler):
def __init__(self, ccache: T.List[str], exelist: T.List[str], version: str, for_machine: MachineChoice,
is_cross: bool, info: 'MachineInfo',
@@ -78,26 +110,16 @@ def __init__(self, ccache: T.List[str], exelist: T.List[str], version: str, for_
'3': default_warn_args + ['-Wextra', '-Wpedantic'],
'everything': ['-Weverything']}
- def get_options(self) -> coredata.MutableKeyedOptionDictType:
- return self.update_options(
- super().get_options(),
- self.create_option(options.UserComboOption,
- OptionKey('cpp_std', machine=self.for_machine),
- 'C++ language standard to use',
- ['none', 'c++98', 'c++11', 'c++14', 'c++17', 'c++20', 'c++2b',
- 'gnu++98', 'gnu++11', 'gnu++14', 'gnu++17', 'gnu++20',
- 'gnu++2b'],
- 'none'),
- )
-
- def get_option_compile_args(self, options: 'coredata.KeyedOptionDictType') -> T.List[str]:
+ def get_option_std_args(self, target: BuildTarget, env: Environment, subproject: T.Optional[str] = None) -> T.List[str]:
args = []
- std = options.get_value(OptionKey('cpp_std', machine=self.for_machine))
+ key = OptionKey('cpp_std', machine=self.for_machine)
+ std = self.get_compileropt_value(key, env, target, subproject)
+ assert isinstance(std, str)
if std != 'none':
args.append('-std=' + std)
return args
-class AppleClangObjCPPCompiler(ClangObjCPPCompiler):
+class AppleClangObjCPPCompiler(AppleCPPStdsMixin, ClangObjCPPCompiler):
"""Handle the differences between Apple's clang and vanilla clang."""
diff --git a/mesonbuild/compilers/rust.py b/mesonbuild/compilers/rust.py
index 02ac593842ad..6f9d642bc16c 100644
--- a/mesonbuild/compilers/rust.py
+++ b/mesonbuild/compilers/rust.py
@@ -1,5 +1,6 @@
# SPDX-License-Identifier: Apache-2.0
# Copyright 2012-2022 The Meson development team
+# Copyright © 2023-2025 Intel Corporation
from __future__ import annotations
@@ -12,15 +13,16 @@
from .. import options
from ..mesonlib import EnvironmentException, MesonException, Popen_safe_logged
from ..options import OptionKey
-from .compilers import Compiler, clike_debug_args
+from .compilers import Compiler, CompileCheckMode, clike_debug_args
if T.TYPE_CHECKING:
- from ..coredata import MutableKeyedOptionDictType, KeyedOptionDictType
+ from ..options import MutableKeyedOptionDictType
from ..envconfig import MachineInfo
from ..environment import Environment # noqa: F401
from ..linkers.linkers import DynamicLinker
from ..mesonlib import MachineChoice
from ..dependencies import Dependency
+ from ..build import BuildTarget
rust_optimization_args: T.Dict[str, T.List[str]] = {
@@ -33,6 +35,35 @@
's': ['-C', 'opt-level=s'],
}
+def get_rustup_run_and_args(exelist: T.List[str]) -> T.Optional[T.Tuple[T.List[str], T.List[str]]]:
+ """Given the command for a rustc executable, check if it is invoked via
+ "rustup run" and if so separate the "rustup [OPTIONS] run TOOLCHAIN"
+ part from the arguments to rustc. If the returned value is not None,
+ other tools (for example clippy-driver or rustdoc) can be run by placing
+ the name of the tool between the two elements of the tuple."""
+ e = iter(exelist)
+ try:
+ if os.path.basename(next(e)) != 'rustup':
+ return None
+ # minimum three strings: "rustup run TOOLCHAIN"
+ n = 3
+ opt = next(e)
+
+ # options come first
+ while opt.startswith('-'):
+ n += 1
+ opt = next(e)
+
+ # then "run TOOLCHAIN"
+ if opt != 'run':
+ return None
+
+ next(e)
+ next(e)
+ return exelist[:n], list(e)
+ except StopIteration:
+ return None
+
class RustCompiler(Compiler):
# rustc doesn't invoke the compiler itself, it doesn't need a LINKER_PREFIX
@@ -40,7 +71,7 @@ class RustCompiler(Compiler):
id = 'rustc'
_WARNING_LEVELS: T.Dict[str, T.List[str]] = {
- '0': ['-A', 'warnings'],
+ '0': ['--cap-lints', 'allow'],
'1': [],
'2': [],
'3': ['-W', 'warnings'],
@@ -65,17 +96,20 @@ def __init__(self, exelist: T.List[str], version: str, for_machine: MachineChoic
super().__init__([], exelist, version, for_machine, info,
is_cross=is_cross, full_version=full_version,
linker=linker)
+ self.rustup_run_and_args: T.Optional[T.Tuple[T.List[str], T.List[str]]] = get_rustup_run_and_args(exelist)
self.base_options.update({OptionKey(o) for o in ['b_colorout', 'b_ndebug']})
if 'link' in self.linker.id:
self.base_options.add(OptionKey('b_vscrt'))
self.native_static_libs: T.List[str] = []
+ self.is_beta = '-beta' in full_version
+ self.is_nightly = '-nightly' in full_version
def needs_static_linker(self) -> bool:
return False
- def sanity_check(self, work_dir: str, environment: 'Environment') -> None:
+ def sanity_check(self, work_dir: str, environment: Environment) -> None:
source_name = os.path.join(work_dir, 'sanity.rs')
- output_name = os.path.join(work_dir, 'rusttest')
+ output_name = os.path.join(work_dir, 'rusttest.exe')
cmdlist = self.exelist.copy()
with open(source_name, 'w', encoding='utf-8') as ofile:
@@ -136,11 +170,14 @@ def _native_static_libs(self, work_dir: str, source_name: str) -> None:
# are always part of C/C++ linkers. Rustc probably should not print
# them, pkg-config for example never specify them.
# FIXME: https://github.com/rust-lang/rust/issues/55120
- exclude = {'-lc', '-lgcc_s', '-lkernel32', '-ladvapi32'}
+ exclude = {'-lc', '-lgcc_s', '-lkernel32', '-ladvapi32', '/defaultlib:msvcrt'}
self.native_static_libs = [i for i in match.group(1).split() if i not in exclude]
def get_dependency_gen_args(self, outtarget: str, outfile: str) -> T.List[str]:
- return ['--dep-info', outfile]
+ return ['--emit', f'dep-info={outfile}']
+
+ def get_output_args(self, outputname: str) -> T.List[str]:
+ return ['--emit', f'link={outputname}']
@functools.lru_cache(maxsize=None)
def get_sysroot(self) -> str:
@@ -166,6 +203,20 @@ def get_debug_args(self, is_debug: bool) -> T.List[str]:
def get_optimization_args(self, optimization_level: str) -> T.List[str]:
return rust_optimization_args[optimization_level]
+ def build_rpath_args(self, env: 'Environment', build_dir: str, from_dir: str,
+ rpath_paths: T.Tuple[str, ...], build_rpath: str,
+ install_rpath: str) -> T.Tuple[T.List[str], T.Set[bytes]]:
+ args, to_remove = super().build_rpath_args(env, build_dir, from_dir, rpath_paths,
+ build_rpath, install_rpath)
+
+ # ... but then add rustc's sysroot to account for rustup
+ # installations
+ rustc_rpath_args = []
+ for arg in args:
+ rustc_rpath_args.append('-C')
+ rustc_rpath_args.append(f'link-arg={arg}:{self.get_target_libdir()}')
+ return rustc_rpath_args, to_remove
+
def compute_parameters_with_absolute_paths(self, parameter_list: T.List[str],
build_dir: str) -> T.List[str]:
for idx, i in enumerate(parameter_list):
@@ -178,9 +229,6 @@ def compute_parameters_with_absolute_paths(self, parameter_list: T.List[str],
return parameter_list
- def get_output_args(self, outputname: str) -> T.List[str]:
- return ['-o', outputname]
-
@classmethod
def use_linker_args(cls, linker: str, version: str) -> T.List[str]:
return ['-C', f'linker={linker}']
@@ -190,11 +238,16 @@ def use_linker_args(cls, linker: str, version: str) -> T.List[str]:
# use_linker_args method instead.
def get_options(self) -> MutableKeyedOptionDictType:
- return dict((self.create_option(options.UserComboOption,
- self.form_compileropt_key('std'),
- 'Rust edition to use',
- ['none', '2015', '2018', '2021'],
- 'none'),))
+ opts = super().get_options()
+
+ key = self.form_compileropt_key('std')
+ opts[key] = options.UserComboOption(
+ self.make_option_name(key),
+ 'Rust edition to use',
+ 'none',
+ choices=['none', '2015', '2018', '2021', '2024'])
+
+ return opts
def get_dependency_compile_args(self, dep: 'Dependency') -> T.List[str]:
# Rust doesn't have dependency compile arguments so simply return
@@ -202,10 +255,10 @@ def get_dependency_compile_args(self, dep: 'Dependency') -> T.List[str]:
# provided by the linker flags.
return []
- def get_option_compile_args(self, options: 'KeyedOptionDictType') -> T.List[str]:
+ def get_option_std_args(self, target: BuildTarget, env: Environment, subproject: T.Optional[str] = None) -> T.List[str]:
args = []
- key = self.form_compileropt_key('std')
- std = options.get_value(key)
+ std = self.get_compileropt_value('std', env, target, subproject)
+ assert isinstance(std, str)
if std != 'none':
args.append('--edition=' + std)
return args
@@ -226,6 +279,8 @@ def get_colorout_args(self, colortype: str) -> T.List[str]:
def get_linker_always_args(self) -> T.List[str]:
args: T.List[str] = []
+ # Rust is super annoying, calling -C link-arg foo does not work, it has
+ # to be -C link-arg=foo
for a in super().get_linker_always_args():
args.extend(['-C', f'link-arg={a}'])
return args
@@ -252,6 +307,41 @@ def get_assert_args(self, disable: bool, env: 'Environment') -> T.List[str]:
action = "no" if disable else "yes"
return ['-C', f'debug-assertions={action}', '-C', 'overflow-checks=no']
+ def get_rust_tool(self, name: str, env: Environment) -> T.List[str]:
+ if self.rustup_run_and_args:
+ rustup_exelist, args = self.rustup_run_and_args
+ # do not use extend so that exelist is copied
+ exelist = rustup_exelist + [name]
+ else:
+ exelist = [name]
+ args = self.get_exe_args()
+
+ from ..programs import find_external_program
+ for prog in find_external_program(env, self.for_machine, exelist[0], exelist[0],
+ [exelist[0]], allow_default_for_cross=False):
+ exelist[0] = prog.path
+ break
+ else:
+ return []
+
+ return exelist + args
+
+ def has_multi_arguments(self, args: T.List[str], env: Environment) -> T.Tuple[bool, bool]:
+ return self.compiles('fn main() { std::process::exit(0) }\n', env, extra_args=args, mode=CompileCheckMode.COMPILE)
+
+ def has_multi_link_arguments(self, args: T.List[str], env: Environment) -> T.Tuple[bool, bool]:
+ args = self.linker.fatal_warnings() + args
+ return self.compiles('fn main() { std::process::exit(0) }\n', env, extra_args=args, mode=CompileCheckMode.LINK)
+
+ @functools.lru_cache(maxsize=None)
+ def get_rustdoc(self, env: 'Environment') -> T.Optional[RustdocTestCompiler]:
+ exelist = self.get_rust_tool('rustdoc', env)
+ if not exelist:
+ return None
+
+ return RustdocTestCompiler(exelist, self.version, self.for_machine,
+ self.is_cross, self.info, full_version=self.full_version,
+ linker=self.linker)
class ClippyRustCompiler(RustCompiler):
@@ -261,3 +351,21 @@ class ClippyRustCompiler(RustCompiler):
"""
id = 'clippy-driver rustc'
+
+
+class RustdocTestCompiler(RustCompiler):
+
+ """We invoke Rustdoc to run doctests. Some of the flags
+ are different from rustc and some (e.g. --emit link) are
+ ignored."""
+
+ id = 'rustdoc --test'
+
+ def get_debug_args(self, is_debug: bool) -> T.List[str]:
+ return []
+
+ def get_dependency_gen_args(self, outtarget: str, outfile: str) -> T.List[str]:
+ return []
+
+ def get_output_args(self, outputname: str) -> T.List[str]:
+ return []
diff --git a/mesonbuild/compilers/swift.py b/mesonbuild/compilers/swift.py
index a2525f927f28..8410fbbda77e 100644
--- a/mesonbuild/compilers/swift.py
+++ b/mesonbuild/compilers/swift.py
@@ -3,14 +3,19 @@
from __future__ import annotations
+import re
import subprocess, os.path
import typing as T
-from ..mesonlib import EnvironmentException
-
+from .. import mlog, options
+from ..mesonlib import EnvironmentException, MesonException, version_compare
from .compilers import Compiler, clike_debug_args
+
if T.TYPE_CHECKING:
+ from .. import build
+ from ..options import MutableKeyedOptionDictType
+ from ..dependencies import Dependency
from ..envconfig import MachineInfo
from ..environment import Environment
from ..linkers.linkers import DynamicLinker
@@ -39,6 +44,17 @@ def __init__(self, exelist: T.List[str], version: str, for_machine: MachineChoic
is_cross=is_cross, full_version=full_version,
linker=linker)
self.version = version
+ if self.info.is_darwin():
+ try:
+ self.sdk_path = subprocess.check_output(['xcrun', '--show-sdk-path'],
+ universal_newlines=True,
+ encoding='utf-8', stderr=subprocess.STDOUT).strip()
+ except subprocess.CalledProcessError as e:
+ mlog.error("Failed to get Xcode SDK path: " + e.output)
+ raise MesonException('Xcode license not accepted yet. Run `sudo xcodebuild -license`.')
+ except FileNotFoundError:
+ mlog.error('xcrun not found. Install Xcode to compile Swift code.')
+ raise MesonException('Could not detect Xcode. Please install it to compile Swift code.')
def get_pic_args(self) -> T.List[str]:
return []
@@ -55,6 +71,22 @@ def get_werror_args(self) -> T.List[str]:
def get_dependency_gen_args(self, outtarget: str, outfile: str) -> T.List[str]:
return ['-emit-dependencies']
+ def get_dependency_compile_args(self, dep: Dependency) -> T.List[str]:
+ args = dep.get_compile_args()
+ # Some deps might sneak in a hardcoded path to an older macOS SDK, which can
+ # cause compilation errors. Let's replace all .sdk paths with the current one.
+ # SwiftPM does it this way: https://github.com/swiftlang/swift-package-manager/pull/6772
+ # Not tested on anything else than macOS for now.
+ if not self.info.is_darwin():
+ return args
+ pattern = re.compile(r'.*\/MacOSX[^\/]*\.sdk(\/.*|$)')
+ for i, arg in enumerate(args):
+ if arg.startswith('-I'):
+ match = pattern.match(arg)
+ if match:
+ args[i] = '-I' + self.sdk_path + match.group(1)
+ return args
+
def depfile_for_object(self, objfile: str) -> T.Optional[str]:
return os.path.splitext(objfile)[0] + '.' + self.get_depfile_suffix()
@@ -85,6 +117,36 @@ def get_include_args(self, path: str, is_system: bool) -> T.List[str]:
def get_compile_only_args(self) -> T.List[str]:
return ['-c']
+ def get_options(self) -> MutableKeyedOptionDictType:
+ opts = super().get_options()
+
+ key = self.form_compileropt_key('std')
+ opts[key] = options.UserComboOption(
+ self.make_option_name(key),
+ 'Swift language version.',
+ 'none',
+ # List them with swiftc -frontend -swift-version ''
+ choices=['none', '4', '4.2', '5', '6'])
+
+ return opts
+
+ def get_option_std_args(self, target: build.BuildTarget, env: Environment, subproject: T.Optional[str] = None) -> T.List[str]:
+ args: T.List[str] = []
+
+ std = self.get_compileropt_value('std', env, target, subproject)
+ assert isinstance(std, str)
+
+ if std != 'none':
+ args += ['-swift-version', std]
+
+ return args
+
+ def get_working_directory_args(self, path: str) -> T.Optional[T.List[str]]:
+ if version_compare(self.version, '<4.2'):
+ return None
+
+ return ['-working-directory', path]
+
def compute_parameters_with_absolute_paths(self, parameter_list: T.List[str],
build_dir: str) -> T.List[str]:
for idx, i in enumerate(parameter_list):
diff --git a/mesonbuild/compilers/vala.py b/mesonbuild/compilers/vala.py
index a1d57b38cb8e..28861a60d348 100644
--- a/mesonbuild/compilers/vala.py
+++ b/mesonbuild/compilers/vala.py
@@ -14,11 +14,11 @@
if T.TYPE_CHECKING:
from ..arglist import CompilerArgs
- from ..coredata import KeyedOptionDictType
from ..envconfig import MachineInfo
from ..environment import Environment
from ..mesonlib import MachineChoice
from ..dependencies import Dependency
+ from ..build import BuildTarget
class ValaCompiler(Compiler):
@@ -31,6 +31,7 @@ def __init__(self, exelist: T.List[str], version: str, for_machine: MachineChoic
self.version = version
self.base_options = {OptionKey('b_colorout')}
self.force_link = False
+ self._has_color_support = version_compare(self.version, '>=0.37.1')
def needs_static_linker(self) -> bool:
return False # Because compiles into C.
@@ -80,7 +81,7 @@ def get_werror_args(self) -> T.List[str]:
return ['--fatal-warnings']
def get_colorout_args(self, colortype: str) -> T.List[str]:
- if version_compare(self.version, '>=0.37.1'):
+ if self._has_color_support:
return ['--color=' + colortype]
return []
@@ -140,7 +141,7 @@ def thread_flags(self, env: 'Environment') -> T.List[str]:
def thread_link_flags(self, env: 'Environment') -> T.List[str]:
return []
- def get_option_link_args(self, options: 'KeyedOptionDictType') -> T.List[str]:
+ def get_option_link_args(self, target: 'BuildTarget', env: 'Environment', subproject: T.Optional[str] = None) -> T.List[str]:
return []
def build_wrapper_args(self, env: 'Environment',
diff --git a/mesonbuild/coredata.py b/mesonbuild/coredata.py
index 84c352199bfd..3b17e74cee62 100644
--- a/mesonbuild/coredata.py
+++ b/mesonbuild/coredata.py
@@ -1,6 +1,6 @@
# SPDX-License-Identifier: Apache-2.0
-# Copyright 2013-2024 The Meson development team
-# Copyright © 2023-2024 Intel Corporation
+# Copyright 2013-2025 The Meson development team
+# Copyright © 2023-2025 Intel Corporation
from __future__ import annotations
@@ -9,16 +9,15 @@
from . import mlog, options
import pickle, os, uuid
import sys
+from functools import lru_cache
from itertools import chain
-from pathlib import PurePath
-from collections import OrderedDict, abc
-import dataclasses
+from collections import OrderedDict
+import textwrap
from .mesonlib import (
- MesonBugException,
MesonException, MachineChoice, PerMachine,
PerMachineDefaultable,
- stringlistify,
+ default_prefix,
pickle_load
)
@@ -34,7 +33,6 @@
if T.TYPE_CHECKING:
import argparse
from typing_extensions import Protocol
- from typing import Any
from . import dependencies
from .compilers.compilers import Compiler, CompileResult, RunResult, CompileCheckMode
@@ -43,7 +41,8 @@
from .mesonlib import FileOrString
from .cmake.traceparser import CMakeCacheEntry
from .interpreterbase import SubProject
- from .options import UserOption
+ from .options import ElementaryOptionValues, MutableKeyedOptionDictType
+ from .build import BuildTarget
class SharedCMDOptions(Protocol):
@@ -60,9 +59,7 @@ class SharedCMDOptions(Protocol):
cross_file: T.List[str]
native_file: T.List[str]
- OptionDictType = T.Union[T.Dict[str, 'options.UserOption[T.Any]'], 'OptionsView']
- MutableKeyedOptionDictType = T.Dict['OptionKey', 'options.UserOption[T.Any]']
- KeyedOptionDictType = T.Union['options.OptionStore', 'OptionsView']
+ OptionDictType = T.Dict[str, options.AnyOptionType]
CompilerCheckCacheKey = T.Tuple[T.Tuple[str, ...], str, FileOrString, T.Tuple[str, ...], CompileCheckMode]
# code, args
RunCheckCacheKey = T.Tuple[str, T.Tuple[str, ...]]
@@ -74,7 +71,7 @@ class SharedCMDOptions(Protocol):
#
# Pip requires that RCs are named like this: '0.1.0.rc1'
# But the corresponding Git tag needs to be '0.1.0rc1'
-version = '1.6.1'
+version = '1.8.3'
# The next stable version when we are in dev. This is used to allow projects to
# require meson version >=1.2.0 when using 1.1.99. FeatureNew won't warn when
@@ -146,16 +143,16 @@ class DependencyCache:
successfully lookup by providing a simple get/put interface.
"""
- def __init__(self, builtins: 'KeyedOptionDictType', for_machine: MachineChoice):
+ def __init__(self, builtins: options.OptionStore, for_machine: MachineChoice):
self.__cache: T.MutableMapping[TV_DepID, DependencySubCache] = OrderedDict()
self.__builtins = builtins
- self.__pkg_conf_key = OptionKey('pkg_config_path', machine=for_machine)
- self.__cmake_key = OptionKey('cmake_prefix_path', machine=for_machine)
+ self.__pkg_conf_key = options.OptionKey('pkg_config_path', machine=for_machine)
+ self.__cmake_key = options.OptionKey('cmake_prefix_path', machine=for_machine)
def __calculate_subkey(self, type_: DependencyCacheType) -> T.Tuple[str, ...]:
data: T.Dict[DependencyCacheType, T.List[str]] = {
- DependencyCacheType.PKG_CONFIG: stringlistify(self.__builtins.get_value(self.__pkg_conf_key)),
- DependencyCacheType.CMAKE: stringlistify(self.__builtins.get_value(self.__cmake_key)),
+ DependencyCacheType.PKG_CONFIG: T.cast('T.List[str]', self.__builtins.get_value_for(self.__pkg_conf_key)),
+ DependencyCacheType.CMAKE: T.cast('T.List[str]', self.__builtins.get_value_for(self.__cmake_key)),
DependencyCacheType.OTHER: [],
}
assert type_ in data, 'Someone forgot to update subkey calculations for a new type'
@@ -259,9 +256,9 @@ def __init__(self, cmd_options: SharedCMDOptions, scratch_dir: str, meson_comman
self.meson_command = meson_command
self.target_guids = {}
self.version = version
- self.optstore = options.OptionStore()
self.cross_files = self.__load_config_files(cmd_options, scratch_dir, 'cross')
self.compilers: PerMachine[T.Dict[str, Compiler]] = PerMachine(OrderedDict(), OrderedDict())
+ self.optstore = options.OptionStore(self.is_cross_build())
# Stores the (name, hash) of the options file, The name will be either
# "meson_options.txt" or "meson.options".
@@ -288,7 +285,7 @@ def __init__(self, cmd_options: SharedCMDOptions, scratch_dir: str, meson_comman
# Only to print a warning if it changes between Meson invocations.
self.config_files = self.__load_config_files(cmd_options, scratch_dir, 'native')
self.builtin_options_libdir_cross_fixup()
- self.init_builtins('')
+ self.init_builtins()
@staticmethod
def __load_config_files(cmd_options: SharedCMDOptions, scratch_dir: str, ftype: str) -> T.List[str]:
@@ -317,15 +314,15 @@ def __load_config_files(cmd_options: SharedCMDOptions, scratch_dir: str, ftype:
# in this case we've been passed some kind of pipe, copy
# the contents of that file into the meson private (scratch)
# directory so that it can be re-read when wiping/reconfiguring
- copy = os.path.join(scratch_dir, f'{uuid.uuid4()}.{ftype}.ini')
+ fcopy = os.path.join(scratch_dir, f'{uuid.uuid4()}.{ftype}.ini')
with open(f, encoding='utf-8') as rf:
- with open(copy, 'w', encoding='utf-8') as wf:
+ with open(fcopy, 'w', encoding='utf-8') as wf:
wf.write(rf.read())
- real.append(copy)
+ real.append(fcopy)
# Also replace the command line argument, as the pipe
# probably won't exist on reconfigure
- filenames[i] = copy
+ filenames[i] = fcopy
continue
if sys.platform != 'win32':
paths = [
@@ -355,78 +352,33 @@ def builtin_options_libdir_cross_fixup(self) -> None:
if self.cross_files:
options.BUILTIN_OPTIONS[OptionKey('libdir')].default = 'lib'
- def sanitize_prefix(self, prefix: str) -> str:
- prefix = os.path.expanduser(prefix)
- if not os.path.isabs(prefix):
- raise MesonException(f'prefix value {prefix!r} must be an absolute path')
- if prefix.endswith('/') or prefix.endswith('\\'):
- # On Windows we need to preserve the trailing slash if the
- # string is of type 'C:\' because 'C:' is not an absolute path.
- if len(prefix) == 3 and prefix[1] == ':':
- pass
- # If prefix is a single character, preserve it since it is
- # the root directory.
- elif len(prefix) == 1:
- pass
- else:
- prefix = prefix[:-1]
- return prefix
-
- def sanitize_dir_option_value(self, prefix: str, option: OptionKey, value: T.Any) -> T.Any:
- '''
- If the option is an installation directory option, the value is an
- absolute path and resides within prefix, return the value
- as a path relative to the prefix. Otherwise, return it as is.
-
- This way everyone can do f.ex, get_option('libdir') and usually get
- the library directory relative to prefix, even though it really
- should not be relied upon.
- '''
- try:
- value = PurePath(value)
- except TypeError:
- return value
- if option.name.endswith('dir') and value.is_absolute() and \
- option not in options.BUILTIN_DIR_NOPREFIX_OPTIONS:
- try:
- # Try to relativize the path.
- value = value.relative_to(prefix)
- except ValueError:
- # Path is not relative, let’s keep it as is.
- pass
- if '..' in value.parts:
- raise MesonException(
- f'The value of the \'{option}\' option is \'{value}\' but '
- 'directory options are not allowed to contain \'..\'.\n'
- f'If you need a path outside of the {prefix!r} prefix, '
- 'please use an absolute path.'
- )
- # .as_posix() keeps the posix-like file separators Meson uses.
- return value.as_posix()
-
- def init_builtins(self, subproject: str) -> None:
+ def init_builtins(self) -> None:
# Create builtin options with default values
for key, opt in options.BUILTIN_OPTIONS.items():
- self.add_builtin_option(self.optstore, key.evolve(subproject=subproject), opt)
+ self.add_builtin_option(self.optstore, key, opt)
for for_machine in iter(MachineChoice):
for key, opt in options.BUILTIN_OPTIONS_PER_MACHINE.items():
- self.add_builtin_option(self.optstore, key.evolve(subproject=subproject, machine=for_machine), opt)
+ self.add_builtin_option(self.optstore, key.evolve(machine=for_machine), opt)
@staticmethod
- def add_builtin_option(opts_map: 'MutableKeyedOptionDictType', key: OptionKey,
- opt: 'options.BuiltinOption') -> None:
+ def add_builtin_option(optstore: options.OptionStore, key: OptionKey,
+ opt: options.AnyOptionType) -> None:
+ # Create a copy of the object, as we're going to mutate it
+ opt = copy.copy(opt)
if key.subproject:
if opt.yielding:
# This option is global and not per-subproject
return
- value = opts_map.get_value(key.as_root())
else:
- value = None
- if key.has_module_prefix():
- modulename = key.get_module_prefix()
- opts_map.add_module_option(modulename, key, opt.init_option(key, value, options.default_prefix()))
+ new_value = options.argparse_prefixed_default(
+ opt, key, default_prefix())
+ opt.set_value(new_value)
+
+ modulename = key.get_module_prefix()
+ if modulename:
+ optstore.add_module_option(modulename, key, opt)
else:
- opts_map.add_system_option(key, opt.init_option(key, value, options.default_prefix()))
+ optstore.add_system_option(key, opt)
def init_backend_options(self, backend_name: str) -> None:
if backend_name == 'ninja':
@@ -434,75 +386,44 @@ def init_backend_options(self, backend_name: str) -> None:
'backend_max_links',
'Maximum number of linker processes to run or 0 for no '
'limit',
- (0, None, 0)))
+ 0,
+ min_value=0))
elif backend_name.startswith('vs'):
self.optstore.add_system_option('backend_startup_project', options.UserStringOption(
'backend_startup_project',
'Default project to execute in Visual Studio',
''))
- def get_option(self, key: OptionKey) -> T.Union[T.List[str], str, int, bool]:
- try:
- v = self.optstore.get_value(key)
- return v
- except KeyError:
- pass
-
- try:
- v = self.optstore.get_value_object(key.as_root())
- if v.yielding:
- return v.value
- except KeyError:
- pass
-
- raise MesonException(f'Tried to get unknown builtin option {str(key)}')
+ def get_option_for_target(self, target: 'BuildTarget', key: T.Union[str, OptionKey]) -> ElementaryOptionValues:
+ if isinstance(key, str):
+ assert ':' not in key
+ newkey = OptionKey(key, target.subproject)
+ else:
+ newkey = key
+ if newkey.subproject != target.subproject:
+ # FIXME: this should be an error. The caller needs to ensure that
+ # key and target have the same subproject for consistency.
+ # Now just do this to get things going.
+ newkey = newkey.evolve(subproject=target.subproject)
+ (option_object, value) = self.optstore.get_value_object_and_value_for(newkey)
+ override = target.get_override(newkey.name)
+ if override is not None:
+ return option_object.validate_value(override)
+ return value
+
+ def set_from_configure_command(self, options: SharedCMDOptions) -> bool:
+ unset_opts = getattr(options, 'unset_opts', [])
+ all_D = options.projectoptions[:]
+ for keystr, valstr in options.cmd_line_options.items():
+ all_D.append(f'{keystr}={valstr}')
+ return self.optstore.set_from_configure_command(all_D, unset_opts)
def set_option(self, key: OptionKey, value, first_invocation: bool = False) -> bool:
dirty = False
- if self.optstore.is_builtin_option(key):
- if key.name == 'prefix':
- value = self.sanitize_prefix(value)
- else:
- prefix = self.optstore.get_value('prefix')
- value = self.sanitize_dir_option_value(prefix, key, value)
-
try:
- opt = self.optstore.get_value_object(key)
+ changed = self.optstore.set_option(key, value, first_invocation)
except KeyError:
raise MesonException(f'Tried to set unknown builtin option {str(key)}')
-
- if opt.deprecated is True:
- mlog.deprecation(f'Option {key.name!r} is deprecated')
- elif isinstance(opt.deprecated, list):
- for v in opt.listify(value):
- if v in opt.deprecated:
- mlog.deprecation(f'Option {key.name!r} value {v!r} is deprecated')
- elif isinstance(opt.deprecated, dict):
- def replace(v):
- newvalue = opt.deprecated.get(v)
- if newvalue is not None:
- mlog.deprecation(f'Option {key.name!r} value {v!r} is replaced by {newvalue!r}')
- return newvalue
- return v
- newvalue = [replace(v) for v in opt.listify(value)]
- value = ','.join(newvalue)
- elif isinstance(opt.deprecated, str):
- # Option is deprecated and replaced by another. Note that a project
- # option could be replaced by a built-in or module option, which is
- # why we use OptionKey.from_string(newname) instead of
- # key.evolve(newname). We set the value on both the old and new names,
- # assuming they accept the same value. That could for example be
- # achieved by adding the values from old option as deprecated on the
- # new option, for example in the case of boolean option is replaced
- # by a feature option with a different name.
- newname = opt.deprecated
- newkey = OptionKey.from_string(newname).evolve(subproject=key.subproject)
- mlog.deprecation(f'Option {key.name!r} is replaced by {newname!r}')
- dirty |= self.set_option(newkey, value, first_invocation)
-
- changed = opt.set_value(value)
- if changed and opt.readonly and not first_invocation:
- raise MesonException(f'Tried modify read only option {str(key)!r}')
dirty |= changed
if key.name == 'buildtype':
@@ -518,7 +439,7 @@ def clear_cache(self) -> None:
def get_nondefault_buildtype_args(self) -> T.List[T.Union[T.Tuple[str, str, str], T.Tuple[str, bool, bool]]]:
result: T.List[T.Union[T.Tuple[str, str, str], T.Tuple[str, bool, bool]]] = []
- value = self.optstore.get_value('buildtype')
+ value = self.optstore.get_value_for('buildtype')
if value == 'plain':
opt = 'plain'
debug = False
@@ -537,8 +458,8 @@ def get_nondefault_buildtype_args(self) -> T.List[T.Union[T.Tuple[str, str, str]
else:
assert value == 'custom'
return []
- actual_opt = self.optstore.get_value('optimization')
- actual_debug = self.optstore.get_value('debug')
+ actual_opt = self.optstore.get_value_for('optimization')
+ actual_debug = self.optstore.get_value_for('debug')
if actual_opt != opt:
result.append(('optimization', actual_opt, opt))
if actual_debug != debug:
@@ -567,64 +488,36 @@ def _set_others_from_buildtype(self, value: str) -> bool:
assert value == 'custom'
return False
- dirty |= self.optstore.set_value('optimization', opt)
- dirty |= self.optstore.set_value('debug', debug)
+ dirty |= self.optstore.set_option(OptionKey('optimization'), opt)
+ dirty |= self.optstore.set_option(OptionKey('debug'), debug)
return dirty
- def is_per_machine_option(self, optname: OptionKey) -> bool:
- if optname.as_host() in options.BUILTIN_OPTIONS_PER_MACHINE:
- return True
- return self.optstore.is_compiler_option(optname)
-
def get_external_args(self, for_machine: MachineChoice, lang: str) -> T.List[str]:
# mypy cannot analyze type of OptionKey
key = OptionKey(f'{lang}_args', machine=for_machine)
return T.cast('T.List[str]', self.optstore.get_value(key))
+ @lru_cache(maxsize=None)
def get_external_link_args(self, for_machine: MachineChoice, lang: str) -> T.List[str]:
# mypy cannot analyze type of OptionKey
- key = OptionKey(f'{lang}_link_args', machine=for_machine)
- return T.cast('T.List[str]', self.optstore.get_value(key))
-
- def update_project_options(self, project_options: 'MutableKeyedOptionDictType', subproject: SubProject) -> None:
- for key, value in project_options.items():
- if key not in self.optstore:
- self.optstore.add_project_option(key, value)
- continue
- if key.subproject != subproject:
- raise MesonBugException(f'Tried to set an option for subproject {key.subproject} from {subproject}!')
-
- oldval = self.optstore.get_value_object(key)
- if type(oldval) is not type(value):
- self.optstore.set_value(key, value.value)
- elif oldval.choices != value.choices:
- # If the choices have changed, use the new value, but attempt
- # to keep the old options. If they are not valid keep the new
- # defaults but warn.
- self.optstore.set_value_object(key, value)
- try:
- value.set_value(oldval.value)
- except MesonException:
- mlog.warning(f'Old value(s) of {key} are no longer valid, resetting to default ({value.value}).',
- fatal=False)
-
- # Find any extranious keys for this project and remove them
- for key in self.optstore.keys() - project_options.keys():
- if self.optstore.is_project_option(key) and key.subproject == subproject:
- self.optstore.remove(key)
+ linkkey = OptionKey(f'{lang}_link_args', machine=for_machine)
+ return T.cast('T.List[str]', self.optstore.get_value_for(linkkey))
def is_cross_build(self, when_building_for: MachineChoice = MachineChoice.HOST) -> bool:
if when_building_for == MachineChoice.BUILD:
return False
return len(self.cross_files) > 0
- def copy_build_options_from_regular_ones(self) -> bool:
+ def copy_build_options_from_regular_ones(self, shut_up_pylint: bool = True) -> bool:
+ # FIXME, needs cross compilation support.
+ if shut_up_pylint:
+ return False
dirty = False
assert not self.is_cross_build()
for k in options.BUILTIN_OPTIONS_PER_MACHINE:
- o = self.optstore.get_value_object(k)
- dirty |= self.optstore.set_value(k.as_build(), o.value)
+ o = self.optstore.get_value_object_for(k.name)
+ dirty |= self.optstore.set_option(k, o.value, True)
for bk, bv in self.optstore.items():
if bk.machine is MachineChoice.BUILD:
hk = bk.as_host()
@@ -643,17 +536,17 @@ def set_options(self, opts_to_set: T.Dict[OptionKey, T.Any], subproject: str = '
# Set prefix first because it's needed to sanitize other options
pfk = OptionKey('prefix')
if pfk in opts_to_set:
- prefix = self.sanitize_prefix(opts_to_set[pfk])
- dirty |= self.optstore.set_value('prefix', prefix)
+ prefix = self.optstore.sanitize_prefix(opts_to_set[pfk])
for key in options.BUILTIN_DIR_NOPREFIX_OPTIONS:
if key not in opts_to_set:
- dirty |= self.optstore.set_value(key, options.BUILTIN_OPTIONS[key].prefixed_default(key, prefix))
+ val = options.BUILTIN_OPTIONS[key].prefixed_default(key, prefix)
+ dirty |= self.optstore.set_option(key, val)
unknown_options: T.List[OptionKey] = []
for k, v in opts_to_set.items():
if k == pfk:
continue
- elif k in self.optstore:
+ elif k.evolve(subproject=None) in self.optstore:
dirty |= self.set_option(k, v, first_invocation)
elif k.machine != MachineChoice.BUILD and not self.optstore.is_compiler_option(k):
unknown_options.append(k)
@@ -664,7 +557,7 @@ def set_options(self, opts_to_set: T.Dict[OptionKey, T.Any], subproject: str = '
# refactor they will get per-subproject values.
really_unknown = []
for uo in unknown_options:
- topkey = uo.evolve(subproject='')
+ topkey = uo.as_root()
if topkey not in self.optstore:
really_unknown.append(uo)
unknown_options = really_unknown
@@ -678,108 +571,44 @@ def set_options(self, opts_to_set: T.Dict[OptionKey, T.Any], subproject: str = '
return dirty
- def set_default_options(self, default_options: T.MutableMapping[OptionKey, str], subproject: str, env: 'Environment') -> None:
- from .compilers import base_options
-
- # Main project can set default options on subprojects, but subprojects
- # can only set default options on themselves.
- # Preserve order: if env.options has 'buildtype' it must come after
- # 'optimization' if it is in default_options.
- options: T.MutableMapping[OptionKey, T.Any] = OrderedDict()
- for k, v in default_options.items():
- if not subproject or k.subproject == subproject:
- options[k] = v
- options.update(env.options)
- env.options = options
-
- # Create a subset of options, keeping only project and builtin
- # options for this subproject.
- # Language and backend specific options will be set later when adding
- # languages and setting the backend (builtin options must be set first
- # to know which backend we'll use).
- options = OrderedDict()
-
- for k, v in env.options.items():
- # If this is a subproject, don't use other subproject options
- if k.subproject and k.subproject != subproject:
- continue
- # If the option is a builtin and is yielding then it's not allowed per subproject.
- #
- # Always test this using the HOST machine, as many builtin options
- # are not valid for the BUILD machine, but the yielding value does
- # not differ between them even when they are valid for both.
- if subproject and self.optstore.is_builtin_option(k) and self.optstore.get_value_object(k.evolve(subproject='', machine=MachineChoice.HOST)).yielding:
- continue
- # Skip base, compiler, and backend options, they are handled when
- # adding languages and setting backend.
- if self.optstore.is_compiler_option(k) or self.optstore.is_backend_option(k):
- continue
- if self.optstore.is_base_option(k) and k.as_root() in base_options:
- # set_options will report unknown base options
- continue
- options[k] = v
-
- self.set_options(options, subproject=subproject, first_invocation=env.first_invocation)
-
def add_compiler_options(self, c_options: MutableKeyedOptionDictType, lang: str, for_machine: MachineChoice,
env: Environment, subproject: str) -> None:
for k, o in c_options.items():
- value = env.options.get(k)
- if value is not None:
- o.set_value(value)
- if not subproject:
- self.optstore.set_value_object(k, o) # override compiler option on reconfigure
- self.optstore.setdefault(k, o)
-
- if subproject:
- sk = k.evolve(subproject=subproject)
- value = env.options.get(sk) or value
- if value is not None:
- o.set_value(value)
- self.optstore.set_value_object(sk, o) # override compiler option on reconfigure
- self.optstore.setdefault(sk, o)
+ comp_key = OptionKey(f'{k.name}', None, for_machine)
+ if lang == 'objc' and k.name == 'c_std':
+ # For objective C, always fall back to c_std.
+ self.optstore.add_compiler_option('c', comp_key, o)
+ elif lang == 'objcpp' and k.name == 'cpp_std':
+ self.optstore.add_compiler_option('cpp', comp_key, o)
+ else:
+ self.optstore.add_compiler_option(lang, comp_key, o)
def add_lang_args(self, lang: str, comp: T.Type['Compiler'],
for_machine: MachineChoice, env: 'Environment') -> None:
"""Add global language arguments that are needed before compiler/linker detection."""
from .compilers import compilers
- # These options are all new at this point, because the compiler is
- # responsible for adding its own options, thus calling
- # `self.optstore.update()`` is perfectly safe.
- for gopt_key, gopt_valobj in compilers.get_global_options(lang, comp, for_machine, env).items():
- self.optstore.add_compiler_option(lang, gopt_key, gopt_valobj)
+ compilers.add_global_options(lang, comp, for_machine, env)
def process_compiler_options(self, lang: str, comp: Compiler, env: Environment, subproject: str) -> None:
- from . import compilers
-
self.add_compiler_options(comp.get_options(), lang, comp.for_machine, env, subproject)
- enabled_opts: T.List[OptionKey] = []
for key in comp.base_options:
if subproject:
skey = key.evolve(subproject=subproject)
else:
skey = key
if skey not in self.optstore:
- self.optstore.add_system_option(skey, copy.deepcopy(compilers.base_options[key]))
- if skey in env.options:
- self.optstore.set_value(skey, env.options[skey])
- enabled_opts.append(skey)
- elif subproject and key in env.options:
- self.optstore.set_value(skey, env.options[key])
- enabled_opts.append(skey)
- if subproject and key not in self.optstore:
- self.optstore.add_system_option(key, copy.deepcopy(self.optstore.get_value_object(skey)))
- elif skey in env.options:
- self.optstore.set_value(skey, env.options[skey])
- elif subproject and key in env.options:
- self.optstore.set_value(skey, env.options[key])
- self.emit_base_options_warnings(enabled_opts)
-
- def emit_base_options_warnings(self, enabled_opts: T.List[OptionKey]) -> None:
- if OptionKey('b_bitcode') in enabled_opts:
- mlog.warning('Base option \'b_bitcode\' is enabled, which is incompatible with many linker options. Incompatible options such as \'b_asneeded\' have been disabled.', fatal=False)
- mlog.warning('Please see https://mesonbuild.com/Builtin-options.html#Notes_about_Apple_Bitcode_support for more details.', fatal=False)
+ self.optstore.add_system_option(skey, copy.deepcopy(options.COMPILER_BASE_OPTIONS[key]))
+
+ self.emit_base_options_warnings()
+
+ def emit_base_options_warnings(self) -> None:
+ bcodekey = OptionKey('b_bitcode')
+ if bcodekey in self.optstore and self.optstore.get_value(bcodekey):
+ msg = textwrap.dedent('''Base option 'b_bitcode' is enabled, which is incompatible with many linker options.
+ Incompatible options such as \'b_asneeded\' have been disabled.'
+ Please see https://mesonbuild.com/Builtin-options.html#Notes_about_Apple_Bitcode_support for more details.''')
+ mlog.warning(msg, once=True, fatal=False)
def get_cmd_line_file(build_dir: str) -> str:
return os.path.join(build_dir, 'meson-private', 'cmd_line.txt')
@@ -867,24 +696,21 @@ def save(obj: CoreData, build_dir: str) -> str:
def register_builtin_arguments(parser: argparse.ArgumentParser) -> None:
for n, b in options.BUILTIN_OPTIONS.items():
- b.add_to_argparse(str(n), parser, '')
+ options.option_to_argparse(b, n, parser, '')
for n, b in options.BUILTIN_OPTIONS_PER_MACHINE.items():
- b.add_to_argparse(str(n), parser, ' (just for host machine)')
- b.add_to_argparse(str(n.as_build()), parser, ' (just for build machine)')
+ options.option_to_argparse(b, n, parser, ' (just for host machine)')
+ options.option_to_argparse(b, n.as_build(), parser, ' (just for build machine)')
parser.add_argument('-D', action='append', dest='projectoptions', default=[], metavar="option",
help='Set the value of an option, can be used several times to set multiple options.')
-def create_options_dict(options: T.List[str], subproject: str = '') -> T.Dict[OptionKey, str]:
+def create_options_dict(options: T.List[str], subproject: str = '') -> T.Dict[str, str]:
result: T.OrderedDict[OptionKey, str] = OrderedDict()
for o in options:
try:
(key, value) = o.split('=', 1)
except ValueError:
raise MesonException(f'Option {o!r} must have a value separated by equals sign.')
- k = OptionKey.from_string(key)
- if subproject:
- k = k.evolve(subproject=subproject)
- result[k] = value
+ result[key] = value
return result
def parse_cmd_line_options(args: SharedCMDOptions) -> None:
@@ -900,75 +726,12 @@ def parse_cmd_line_options(args: SharedCMDOptions) -> None:
value = getattr(args, name, None)
if value is not None:
if key in args.cmd_line_options:
- cmdline_name = options.BuiltinOption.argparse_name_to_arg(name)
+ cmdline_name = options.argparse_name_to_arg(name)
raise MesonException(
f'Got argument {name} as both -D{name} and {cmdline_name}. Pick one.')
- args.cmd_line_options[key] = value
+ args.cmd_line_options[key.name] = value
delattr(args, name)
-@dataclasses.dataclass
-class OptionsView(abc.Mapping):
- '''A view on an options dictionary for a given subproject and with overrides.
- '''
-
- # TODO: the typing here could be made more explicit using a TypeDict from
- # python 3.8 or typing_extensions
- original_options: T.Union[KeyedOptionDictType, 'dict[OptionKey, UserOption[Any]]']
- subproject: T.Optional[str] = None
- overrides: T.Optional[T.Mapping[OptionKey, T.Union[str, int, bool, T.List[str]]]] = dataclasses.field(default_factory=dict)
-
- def __getitem__(self, key: OptionKey) -> options.UserOption:
- # FIXME: This is fundamentally the same algorithm than interpreter.get_option_internal().
- # We should try to share the code somehow.
- key = key.evolve(subproject=self.subproject)
- if not isinstance(self.original_options, options.OptionStore):
- # This is only used by CUDA currently.
- # This entire class gets removed when option refactor
- # is finished.
- if '_' in key.name or key.lang is not None:
- is_project_option = False
- else:
- sys.exit(f'FAIL {key}.')
- else:
- is_project_option = self.original_options.is_project_option(key)
- if not is_project_option:
- opt = self.original_options.get(key)
- if opt is None or opt.yielding:
- key2 = key.as_root()
- # This hack goes away once wi start using OptionStore
- # to hold overrides.
- if isinstance(self.original_options, options.OptionStore):
- if key2 not in self.original_options:
- raise KeyError(f'{key} {key2}')
- opt = self.original_options.get_value_object(key2)
- else:
- opt = self.original_options[key2]
- else:
- opt = self.original_options[key]
- if opt.yielding:
- opt = self.original_options.get(key.as_root(), opt)
- if self.overrides:
- override_value = self.overrides.get(key.as_root())
- if override_value is not None:
- opt = copy.copy(opt)
- opt.set_value(override_value)
- return opt
-
- def get_value(self, key: T.Union[str, OptionKey]):
- if isinstance(key, str):
- key = OptionKey(key)
- return self[key].value
-
- def set_value(self, key: T.Union[str, OptionKey], value: T.Union[str, int, bool, T.List[str]]):
- if isinstance(key, str):
- key = OptionKey(key)
- self.overrides[key] = value
-
- def __iter__(self) -> T.Iterator[OptionKey]:
- return iter(self.original_options)
-
- def __len__(self) -> int:
- return len(self.original_options)
FORBIDDEN_TARGET_NAMES = frozenset({
'clean',
diff --git a/mesonbuild/dependencies/__init__.py b/mesonbuild/dependencies/__init__.py
index 4552987898c0..7262bc1bdaec 100644
--- a/mesonbuild/dependencies/__init__.py
+++ b/mesonbuild/dependencies/__init__.py
@@ -7,7 +7,6 @@
ExternalLibrary, DependencyException, DependencyMethods,
BuiltinDependency, SystemDependency, get_leaf_external_dependencies)
from .detect import find_external_dependency, get_dep_identifier, packages, _packages_accept_language
-from .blas_lapack import openblas_factory
__all__ = [
@@ -222,6 +221,7 @@ def __init__(self, name: str, environment: 'Environment', kwargs: T.Dict[str, T.
'shaderc': 'misc',
'iconv': 'misc',
'intl': 'misc',
+ 'atomic': 'misc',
'dl': 'misc',
'openssl': 'misc',
'libcrypto': 'misc',
diff --git a/mesonbuild/dependencies/base.py b/mesonbuild/dependencies/base.py
index ed6138a7ee0b..38bfc0822731 100644
--- a/mesonbuild/dependencies/base.py
+++ b/mesonbuild/dependencies/base.py
@@ -1,6 +1,6 @@
# SPDX-License-Identifier: Apache-2.0
# Copyright 2013-2018 The Meson development team
-# Copyright © 2024 Intel Corporation
+# Copyright © 2024-2025 Intel Corporation
# This file contains the detection logic for external dependencies.
# Custom logic for several other packages are in separate files.
@@ -111,7 +111,7 @@ def __init__(self, type_name: DependencyTypeName, kwargs: T.Dict[str, T.Any]) ->
# This allows two Dependencies to be compared even after being copied.
# The purpose is to allow the name to be changed, but still have a proper comparison
self._id = uuid.uuid4().int
- self.name = f'dep{id(self)}'
+ self.name = f'dep{self._id}'
self.version: T.Optional[str] = None
self.language: T.Optional[str] = None # None means C-like
self.is_found = False
@@ -278,7 +278,8 @@ def __init__(self, version: str, incdirs: T.List['IncludeDirs'], compile_args: T
extra_files: T.Sequence[mesonlib.File],
ext_deps: T.List[Dependency], variables: T.Dict[str, str],
d_module_versions: T.List[T.Union[str, int]], d_import_dirs: T.List['IncludeDirs'],
- objects: T.List['ExtractedObjects']):
+ objects: T.List['ExtractedObjects'],
+ name: T.Optional[str] = None):
super().__init__(DependencyTypeName('internal'), {})
self.version = version
self.is_found = True
@@ -296,6 +297,8 @@ def __init__(self, version: str, incdirs: T.List['IncludeDirs'], compile_args: T
self.d_features['versions'] = d_module_versions
if d_import_dirs:
self.d_features['import_dirs'] = d_import_dirs
+ if name:
+ self.name = name
def __deepcopy__(self, memo: T.Dict[int, 'InternalDependency']) -> 'InternalDependency':
result = self.__class__.__new__(self.__class__)
@@ -335,7 +338,7 @@ def get_partial_dependency(self, *, compile_args: bool = False,
return InternalDependency(
self.version, final_includes, final_compile_args,
final_link_args, final_libraries, final_whole_libraries,
- final_sources, final_extra_files, final_deps, self.variables, [], [], [])
+ final_sources, final_extra_files, final_deps, self.variables, [], [], [], self.name)
def get_include_dirs(self) -> T.List['IncludeDirs']:
return self.include_directories
@@ -368,14 +371,14 @@ def generate_link_whole_dependency(self) -> Dependency:
def get_as_static(self, recursive: bool) -> InternalDependency:
new_dep = copy.copy(self)
- new_dep.libraries = [lib.get('static') for lib in self.libraries]
+ new_dep.libraries = [lib.get('static', recursive) for lib in self.libraries]
if recursive:
new_dep.ext_deps = [dep.get_as_static(True) for dep in self.ext_deps]
return new_dep
def get_as_shared(self, recursive: bool) -> InternalDependency:
new_dep = copy.copy(self)
- new_dep.libraries = [lib.get('shared') for lib in self.libraries]
+ new_dep.libraries = [lib.get('shared', recursive) for lib in self.libraries]
if recursive:
new_dep.ext_deps = [dep.get_as_shared(True) for dep in self.ext_deps]
return new_dep
@@ -400,7 +403,7 @@ def __init__(self, type_name: DependencyTypeName, environment: 'Environment', kw
self.version_reqs: T.Optional[T.List[str]] = version_reqs
self.required = kwargs.get('required', True)
self.silent = kwargs.get('silent', False)
- self.static = kwargs.get('static', self.env.coredata.get_option(OptionKey('prefer_static')))
+ self.static = kwargs.get('static', self.env.coredata.optstore.get_value_for(OptionKey('prefer_static')))
self.libtype = LibType.STATIC if self.static else LibType.PREFER_SHARED
if not isinstance(self.static, bool):
raise DependencyException('Static keyword must be boolean')
diff --git a/mesonbuild/dependencies/blas_lapack.py b/mesonbuild/dependencies/blas_lapack.py
index d86f4d6ffa63..efdedd2b5147 100644
--- a/mesonbuild/dependencies/blas_lapack.py
+++ b/mesonbuild/dependencies/blas_lapack.py
@@ -26,7 +26,7 @@
from ..mesonlib import MachineChoice
from ..options import OptionKey
-from .base import DependencyMethods, SystemDependency
+from .base import DependencyMethods, SystemDependency, DependencyException
from .cmake import CMakeDependency
from .detect import packages
from .factory import DependencyFactory, factory_methods
@@ -34,6 +34,7 @@
if T.TYPE_CHECKING:
from ..environment import Environment
+ from . factory import DependencyGenerator
"""
TODO: how to select BLAS interface layer (LP64, ILP64)?
@@ -303,7 +304,7 @@
"""
-def check_blas_machine_file(self, name: str, props: dict) -> T.Tuple[bool, T.List[str]]:
+def check_blas_machine_file(name: str, props: dict) -> T.Tuple[bool, T.List[str]]:
# TBD: do we need to support multiple extra dirs?
incdir = props.get(f'{name}_includedir')
assert incdir is None or isinstance(incdir, str)
@@ -364,11 +365,11 @@ def check_symbols(self, compile_args, suffix=None, check_cblas=True,
prototypes = "".join(f"void {symbol}{suffix}();\n" for symbol in symbols)
calls = " ".join(f"{symbol}{suffix}();\n" for symbol in symbols)
code = (f"{prototypes}"
- "int main(int argc, const char *argv[])\n"
- "{\n"
+ "int main(int argc, const char *argv[])\n"
+ "{\n"
f" {calls}"
- " return 0;\n"
- "}"
+ " return 0;\n"
+ "}"
)
code = '''#ifdef __cplusplus
extern "C" {
@@ -720,7 +721,6 @@ def detect_lapack_machine_file(self, props: dict) -> None:
self.detect([libdir], [incdir])
-
class AccelerateSystemDependency(BLASLAPACKMixin, SystemDependency):
"""
Accelerate is always installed on macOS, and not available on other OSes.
@@ -781,7 +781,6 @@ def detect(self, kwargs: T.Dict[str, T.Any]) -> None:
# We won't check symbols here, because Accelerate is built in a consistent fashion
# with known symbol mangling, unlike OpenBLAS or Netlib BLAS/LAPACK.
- return None
def get_symbol_suffix(self) -> str:
return '$NEWLAPACK' if self.interface == 'lp64' else '$NEWLAPACK$ILP64'
@@ -804,7 +803,7 @@ def parse_mkl_options(self, kwargs: T.Dict[str, T.Any]) -> None:
if not threading_module:
self.threading = 'iomp'
elif len(threading_module) > 1:
- raise mesonlib.MesonException(f'Multiple threading arguments: {threading_modules}')
+ raise mesonlib.MesonException(f'Multiple threading arguments: {threading_module}')
else:
# We have a single threading option specified - validate and process it
opt = threading_module[0]
@@ -818,7 +817,7 @@ def parse_mkl_options(self, kwargs: T.Dict[str, T.Any]) -> None:
if not sdl_module:
self.use_sdl = 'auto'
elif len(sdl_module) > 1:
- raise mesonlib.MesonException(f'Multiple sdl arguments: {threading_modules}')
+ raise mesonlib.MesonException(f'Multiple sdl arguments: {threading_module}')
else:
# We have a single sdl option specified - validate and process it
opt = sdl_module[0]
@@ -845,8 +844,6 @@ def parse_mkl_options(self, kwargs: T.Dict[str, T.Any]) -> None:
raise mesonlib.MesonException(f'Linking SDL implies using LP64 and Intel OpenMP, found '
f'conflicting options: {self.interface}, {self.threading}')
- return None
-
class MKLPkgConfigDependency(BLASLAPACKMixin, MKLMixin, PkgConfigDependency):
"""
@@ -874,7 +871,7 @@ def __init__(self, name: str, env: 'Environment', kwargs: T.Dict[str, T.Any]) ->
# available before the .pc file for SDL
self.use_sdl = False
- static_opt = kwargs.get('static', env.coredata.get_option(OptionKey('prefer_static')))
+ static_opt = kwargs.get('static', env.coredata.optstore.get_value_for(OptionKey('prefer_static')))
libtype = 'static' if static_opt else 'dynamic'
if self.use_sdl:
@@ -896,7 +893,6 @@ def __init__(self, name: str, environment: 'Environment', kwargs: T.Dict[str, T.
if self.use_sdl:
self.detect_sdl()
- return None
def detect_sdl(self) -> None:
# Use MKLROOT in addition to standard libdir(s)
@@ -926,7 +922,7 @@ def detect_sdl(self) -> None:
self.is_found = True
self.compile_args += incdir_args
self.link_args += link_arg
- if not sys.platform == 'win32':
+ if sys.platform != 'win32':
self.link_args += ['-lpthread', '-lm', '-ldl']
# Determine MKL version
diff --git a/mesonbuild/dependencies/boost.py b/mesonbuild/dependencies/boost.py
index 870c0b16b2c3..662f9851b18c 100644
--- a/mesonbuild/dependencies/boost.py
+++ b/mesonbuild/dependencies/boost.py
@@ -341,7 +341,7 @@ def get_link_args(self) -> T.List[str]:
class BoostDependency(SystemDependency):
def __init__(self, environment: Environment, kwargs: T.Dict[str, T.Any]) -> None:
super().__init__('boost', environment, kwargs, language='cpp')
- buildtype = environment.coredata.get_option(OptionKey('buildtype'))
+ buildtype = environment.coredata.optstore.get_value_for(OptionKey('buildtype'))
assert isinstance(buildtype, str)
self.debug = buildtype.startswith('debug')
self.multithreading = kwargs.get('threading', 'multi') == 'multi'
@@ -582,7 +582,9 @@ def filter_libraries(self, libs: T.List[BoostLibraryFile], lib_vers: str) -> T.L
vscrt = ''
try:
crt_val = self.env.coredata.optstore.get_value('b_vscrt')
+ assert isinstance(crt_val, str)
buildtype = self.env.coredata.optstore.get_value('buildtype')
+ assert isinstance(buildtype, str)
vscrt = self.clib_compiler.get_crt_compile_args(crt_val, buildtype)[0]
except (KeyError, IndexError, AttributeError):
pass
diff --git a/mesonbuild/dependencies/cmake.py b/mesonbuild/dependencies/cmake.py
index 4a722157ff56..4e449816d344 100644
--- a/mesonbuild/dependencies/cmake.py
+++ b/mesonbuild/dependencies/cmake.py
@@ -414,7 +414,16 @@ def _detect_dep(self, name: str, package_version: str, modules: T.List[T.Tuple[s
# Whether the package is found or not is always stored in PACKAGE_FOUND
self.is_found = self.traceparser.var_to_bool('PACKAGE_FOUND')
if not self.is_found:
- return
+ not_found_message = self.traceparser.get_cmake_var('PACKAGE_NOT_FOUND_MESSAGE')
+ if len(not_found_message) > 0:
+ mlog.notice(
+ 'CMake reported that the package {} was not found with the following reason:\n'
+ '{}'.format(name, not_found_message[0]), fatal=False)
+ else:
+ mlog.debug(
+ 'CMake reported that the package {} was not found, '
+ 'even though Meson\'s preliminary check succeeded.'.format(name))
+ raise self._gen_exception('PACKAGE_FOUND is false')
# Try to detect the version
vers_raw = self.traceparser.get_cmake_var('PACKAGE_VERSION')
@@ -546,7 +555,7 @@ def _detect_dep(self, name: str, package_version: str, modules: T.List[T.Tuple[s
# Make sure all elements in the lists are unique and sorted
incDirs = sorted(set(incDirs))
compileOptions = sorted(set(compileOptions))
- libraries = sorted(set(libraries))
+ libraries = sort_link_args(libraries)
mlog.debug(f'Include Dirs: {incDirs}')
mlog.debug(f'Compiler Options: {compileOptions}')
@@ -654,3 +663,27 @@ def __call__(self, name: str, env: Environment, kwargs: T.Dict[str, T.Any], lang
@staticmethod
def log_tried() -> str:
return CMakeDependency.log_tried()
+
+
+def sort_link_args(args: T.List[str]) -> T.List[str]:
+ itr = iter(args)
+ result: T.Set[T.Union[T.Tuple[str], T.Tuple[str, str]]] = set()
+
+ while True:
+ try:
+ arg = next(itr)
+ except StopIteration:
+ break
+
+ if arg == '-framework':
+ # Frameworks '-framework ...' are two arguments that need to stay together
+ try:
+ arg2 = next(itr)
+ except StopIteration:
+ raise MesonException(f'Linker arguments contain \'-framework\' with no argument value: {args}')
+
+ result.add((arg, arg2))
+ else:
+ result.add((arg,))
+
+ return [x for xs in sorted(result) for x in xs]
diff --git a/mesonbuild/dependencies/data/CMakeLists.txt b/mesonbuild/dependencies/data/CMakeLists.txt
index d682cb8246dc..4e7838ebbaee 100644
--- a/mesonbuild/dependencies/data/CMakeLists.txt
+++ b/mesonbuild/dependencies/data/CMakeLists.txt
@@ -100,3 +100,9 @@ if(${_packageName}_FOUND OR ${PACKAGE_NAME}_FOUND)
set(PACKAGE_DEFINITIONS "${${definitions}}")
set(PACKAGE_LIBRARIES "${${libs}}")
endif()
+
+if(${_packageName}_NOT_FOUND_MESSAGE)
+ set(PACKAGE_NOT_FOUND_MESSAGE "${${_packageName}_NOT_FOUND_MESSAGE}")
+elseif(${PACKAGE_NAME}_NOT_FOUND_MESSAGE)
+ set(PACKAGE_NOT_FOUND_MESSAGE "${${PACKAGE_NAME}_NOT_FOUND_MESSAGE}")
+endif()
diff --git a/mesonbuild/dependencies/detect.py b/mesonbuild/dependencies/detect.py
index faf024de9c31..aa62c661df21 100644
--- a/mesonbuild/dependencies/detect.py
+++ b/mesonbuild/dependencies/detect.py
@@ -59,10 +59,10 @@ def get_dep_identifier(name: str, kwargs: T.Dict[str, T.Any]) -> 'TV_DepID':
# All keyword arguments are strings, ints, or lists (or lists of lists)
if isinstance(value, list):
for i in value:
- assert isinstance(i, str)
+ assert isinstance(i, str), i
value = tuple(frozenset(listify(value)))
else:
- assert isinstance(value, (str, bool, int))
+ assert isinstance(value, (str, bool, int)), value
identifier = (*identifier, (key, value),)
return identifier
diff --git a/mesonbuild/dependencies/dev.py b/mesonbuild/dependencies/dev.py
index 94f51ff69b12..8f0f1baae323 100644
--- a/mesonbuild/dependencies/dev.py
+++ b/mesonbuild/dependencies/dev.py
@@ -56,7 +56,9 @@ class GTestDependencySystem(SystemDependency):
def __init__(self, name: str, environment: 'Environment', kwargs: T.Dict[str, T.Any]) -> None:
super().__init__(name, environment, kwargs, language='cpp')
self.main = kwargs.get('main', False)
- self.src_dirs = ['/usr/src/gtest/src', '/usr/src/googletest/googletest/src']
+
+ sysroot = environment.properties[self.for_machine].get_sys_root() or ''
+ self.src_dirs = [sysroot + '/usr/src/gtest/src', sysroot + '/usr/src/googletest/googletest/src']
if not self._add_sub_dependency(threads_factory(environment, self.for_machine, {})):
self.is_found = False
return
@@ -584,14 +586,14 @@ def __init__(self, environment: 'Environment', kwargs: JNISystemDependencyKW):
self.is_found = False
return
- if 'version' in kwargs and not version_compare(self.version, kwargs['version']):
+ if 'version' in kwargs and not version_compare_many(self.version, kwargs['version'])[0]:
mlog.error(f'Incorrect JDK version found ({self.version}), wanted {kwargs["version"]}')
self.is_found = False
return
self.java_home = environment.properties[self.for_machine].get_java_home()
if not self.java_home:
- self.java_home = pathlib.Path(shutil.which(self.javac.exelist[0])).resolve().parents[1]
+ self.java_home = pathlib.Path(shutil.which(self.javac.get_exe())).resolve().parents[1]
if m.is_darwin():
problem_java_prefix = pathlib.Path('/System/Library/Frameworks/JavaVM.framework/Versions')
if problem_java_prefix in self.java_home.parents:
diff --git a/mesonbuild/dependencies/dub.py b/mesonbuild/dependencies/dub.py
index 1c904ab2a5af..ac137e399efd 100644
--- a/mesonbuild/dependencies/dub.py
+++ b/mesonbuild/dependencies/dub.py
@@ -5,10 +5,11 @@
from .base import ExternalDependency, DependencyException, DependencyTypeName
from .pkgconfig import PkgConfigDependency
-from ..mesonlib import (Popen_safe, join_args, version_compare)
+from ..mesonlib import (Popen_safe, join_args, version_compare, version_compare_many)
from ..options import OptionKey
from ..programs import ExternalProgram
from .. import mlog
+from enum import Enum
import re
import os
import json
@@ -56,6 +57,10 @@ class FindTargetEntry(TypedDict):
search: str
artifactPath: str
+class DubDescriptionSource(Enum):
+ Local = 'local'
+ External = 'external'
+
class DubDependency(ExternalDependency):
# dub program and version
class_dubbin: T.Optional[T.Tuple[ExternalProgram, str]] = None
@@ -87,14 +92,13 @@ def __init__(self, name: str, environment: 'Environment', kwargs: T.Dict[str, T.
if DubDependency.class_dubbin is None:
if self.required:
raise DependencyException('DUB not found.')
- self.is_found = False
return
(self.dubbin, dubver) = DubDependency.class_dubbin # pylint: disable=unpacking-non-sequence
assert isinstance(self.dubbin, ExternalProgram)
- # Check if Dub's compatibility with Meson
+ # Check Dub's compatibility with Meson
self._search_in_cache = version_compare(dubver, '<=1.31.1')
self._use_cache_describe = version_compare(dubver, '>=1.35.0')
self._dub_has_build_deep = version_compare(dubver, '>=1.35.0')
@@ -108,25 +112,16 @@ def __init__(self, name: str, environment: 'Environment', kwargs: T.Dict[str, T.
mlog.warning(f'DUB dependency {name} not found because Dub {dubver} '
"is not compatible with Meson. (Can't locate artifacts in DUB's cache)."
' Upgrade to Dub >= 1.35')
- self.is_found = False
return
mlog.debug('Determining dependency {!r} with DUB executable '
'{!r}'.format(name, self.dubbin.get_path()))
- # if an explicit version spec was stated, use this when querying Dub
- main_pack_spec = name
- if 'version' in kwargs:
- version_spec = kwargs['version']
- if isinstance(version_spec, list):
- version_spec = " ".join(version_spec)
- main_pack_spec = f'{name}@{version_spec}'
-
# we need to know the target architecture
dub_arch = self.compiler.arch
# we need to know the build type as well
- dub_buildtype = str(environment.coredata.get_option(OptionKey('buildtype')))
+ dub_buildtype = str(environment.coredata.optstore.get_value_for(OptionKey('buildtype')))
# MESON types: choices=['plain', 'debug', 'debugoptimized', 'release', 'minsize', 'custom'])),
# DUB types: debug (default), plain, release, release-debug, release-nobounds, unittest, profile, profile-gc,
# docs, ddox, cov, unittest-cov, syntax and custom
@@ -135,37 +130,11 @@ def __init__(self, name: str, environment: 'Environment', kwargs: T.Dict[str, T.
elif dub_buildtype == 'minsize':
dub_buildtype = 'release'
- # A command that might be useful in case of missing DUB package
- def dub_build_deep_command() -> str:
- if self._dub_has_build_deep:
- cmd = ['dub', 'build', '--deep']
- else:
- cmd = ['dub', 'run', '--yes', 'dub-build-deep', '--']
-
- return join_args(cmd + [
- main_pack_spec,
- '--arch=' + dub_arch,
- '--compiler=' + self.compiler.get_exelist()[-1],
- '--build=' + dub_buildtype
- ])
-
- # Ask dub for the package
- describe_cmd = [
- 'describe', main_pack_spec, '--arch=' + dub_arch,
- '--build=' + dub_buildtype, '--compiler=' + self.compiler.get_exelist()[-1]
- ]
- ret, res, err = self._call_dubbin(describe_cmd)
-
- if ret != 0:
- mlog.debug('DUB describe failed: ' + err)
- if 'locally' in err:
- mlog.error(mlog.bold(main_pack_spec), 'is not present locally. You may try the following command:')
- mlog.log(mlog.bold(dub_build_deep_command()))
- self.is_found = False
+ result = self._get_dub_description(dub_arch, dub_buildtype)
+ if result is None:
return
-
+ description, build_cmd, description_source = result
dub_comp_id = self._ID_MAP[self.compiler.get_id()]
- description: DubDescription = json.loads(res)
self.compile_args = []
self.link_args = self.raw_link_args = []
@@ -204,7 +173,7 @@ def find_package_target(pkg: DubPackDesc) -> bool:
mlog.error(mlog.bold(pack_id), 'not found')
mlog.log('You may try the following command to install the necessary DUB libraries:')
- mlog.log(mlog.bold(dub_build_deep_command()))
+ mlog.log(mlog.bold(build_cmd))
return False
@@ -223,33 +192,44 @@ def find_package_target(pkg: DubPackDesc) -> bool:
# 4. Add other build settings (imports, versions etc.)
# 1
- self.is_found = False
packages: T.Dict[str, DubPackDesc] = {}
+ found_it = False
for pkg in description['packages']:
packages[pkg['name']] = pkg
if not pkg['active']:
continue
- if pkg['targetType'] == 'dynamicLibrary':
- mlog.error('DUB dynamic library dependencies are not supported.')
- self.is_found = False
- return
-
# check that the main dependency is indeed a library
if pkg['name'] == name:
- self.is_found = True
-
if pkg['targetType'] not in ['library', 'sourceLibrary', 'staticLibrary']:
- mlog.error(mlog.bold(name), "found but it isn't a library")
- self.is_found = False
+ mlog.error(mlog.bold(name), "found but it isn't a static library, it is:",
+ pkg['targetType'])
return
+ if self.version_reqs is not None:
+ ver = pkg['version']
+ if not version_compare_many(ver, self.version_reqs)[0]:
+ mlog.error(mlog.bold(f'{name}@{ver}'),
+ 'does not satisfy all version requirements of:',
+ ' '.join(self.version_reqs))
+ return
+
+ found_it = True
self.version = pkg['version']
self.pkg = pkg
+ if not found_it:
+ mlog.error(f'Could not find {name} in DUB description.')
+ if description_source is DubDescriptionSource.Local:
+ mlog.log('Make sure that the dependency is registered for your dub project by running:')
+ mlog.log(mlog.bold(f'dub add {name}'))
+ elif description_source is DubDescriptionSource.External:
+ # `dub describe pkg` did not contain the pkg
+ raise RuntimeError(f'`dub describe` succeeded but it does not contains {name}')
+ return
+
if name not in targets:
- self.is_found = False
if self.pkg['targetType'] == 'sourceLibrary':
# source libraries have no associated targets,
# but some build settings like import folders must be found from the package object.
@@ -258,10 +238,7 @@ def find_package_target(pkg: DubPackDesc) -> bool:
# (See openssl DUB package for example of sourceLibrary)
mlog.error('DUB targets of type', mlog.bold('sourceLibrary'), 'are not supported.')
else:
- mlog.error('Could not find target description for', mlog.bold(main_pack_spec))
-
- if not self.is_found:
- mlog.error(f'Could not find {name} in DUB description')
+ mlog.error('Could not find target description for', mlog.bold(self.name))
return
# Current impl only supports static libraries
@@ -269,19 +246,17 @@ def find_package_target(pkg: DubPackDesc) -> bool:
# 2
if not find_package_target(self.pkg):
- self.is_found = False
return
# 3
for link_dep in targets[name]['linkDependencies']:
pkg = packages[link_dep]
if not find_package_target(pkg):
- self.is_found = False
return
if show_buildtype_warning:
mlog.log('If it is not suitable, try the following command and reconfigure Meson with', mlog.bold('--clearcache'))
- mlog.log(mlog.bold(dub_build_deep_command()))
+ mlog.log(mlog.bold(build_cmd))
# 4
bs = targets[name]['buildSettings']
@@ -328,7 +303,7 @@ def find_package_target(pkg: DubPackDesc) -> bool:
for lib in bs['libs']:
if os.name != 'nt':
# trying to add system libraries by pkg-config
- pkgdep = PkgConfigDependency(lib, environment, {'required': 'true', 'silent': 'true'})
+ pkgdep = PkgConfigDependency(lib, environment, {'required': True, 'silent': True})
if pkgdep.is_found:
for arg in pkgdep.get_compile_args():
self.compile_args.append(arg)
@@ -345,6 +320,60 @@ def find_package_target(pkg: DubPackDesc) -> bool:
# fallback
self.link_args.append('-l'+lib)
+ self.is_found = True
+
+ # Get the dub description needed to resolve the dependency and a
+ # build command that can be used to build the dependency in case it is
+ # not present.
+ def _get_dub_description(self, dub_arch: str, dub_buildtype: str) -> T.Optional[T.Tuple[DubDescription, str, DubDescriptionSource]]:
+ def get_build_command() -> T.List[str]:
+ if self._dub_has_build_deep:
+ cmd = ['dub', 'build', '--deep']
+ else:
+ cmd = ['dub', 'run', '--yes', 'dub-build-deep', '--']
+
+ return cmd + [
+ '--arch=' + dub_arch,
+ '--compiler=' + self.compiler.get_exelist()[-1],
+ '--build=' + dub_buildtype,
+ ]
+
+ # Ask dub for the package
+ describe_cmd = [
+ 'describe', '--arch=' + dub_arch,
+ '--build=' + dub_buildtype, '--compiler=' + self.compiler.get_exelist()[-1]
+ ]
+ helper_build = join_args(get_build_command())
+ source = DubDescriptionSource.Local
+ ret, res, err = self._call_dubbin(describe_cmd)
+ if ret == 0:
+ return (json.loads(res), helper_build, source)
+
+ pack_spec = self.name
+ if self.version_reqs is not None:
+ if len(self.version_reqs) > 1:
+ mlog.error('Multiple version requirements are not supported for raw dub dependencies.')
+ mlog.error("Please specify only an exact version like '1.2.3'")
+ raise DependencyException('Multiple version requirements are not solvable for raw dub depencies')
+ elif len(self.version_reqs) == 1:
+ pack_spec += '@' + self.version_reqs[0]
+
+ describe_cmd = [
+ 'describe', pack_spec, '--arch=' + dub_arch,
+ '--build=' + dub_buildtype, '--compiler=' + self.compiler.get_exelist()[-1]
+ ]
+ helper_build = join_args(get_build_command() + [pack_spec])
+ source = DubDescriptionSource.External
+ ret, res, err = self._call_dubbin(describe_cmd)
+ if ret == 0:
+ return (json.loads(res), helper_build, source)
+
+ mlog.debug('DUB describe failed: ' + err)
+ if 'locally' in err:
+ mlog.error(mlog.bold(pack_spec), 'is not present locally. You may try the following command:')
+ mlog.log(mlog.bold(helper_build))
+ return None
+
# This function finds the target of the provided JSON package, built for the right
# compiler, architecture, configuration...
# It returns (target|None, {compatibilities})
@@ -469,7 +498,7 @@ def _get_comp_versions_to_find(self, dub_comp_id: str) -> T.List[str]:
def _call_dubbin(self, args: T.List[str], env: T.Optional[T.Dict[str, str]] = None) -> T.Tuple[int, str, str]:
assert isinstance(self.dubbin, ExternalProgram)
- p, out, err = Popen_safe(self.dubbin.get_command() + args, env=env)
+ p, out, err = Popen_safe(self.dubbin.get_command() + args, env=env, cwd=self.env.get_source_dir())
return p.returncode, out.strip(), err.strip()
def _call_compbin(self, args: T.List[str], env: T.Optional[T.Dict[str, str]] = None) -> T.Tuple[int, str, str]:
diff --git a/mesonbuild/dependencies/hdf5.py b/mesonbuild/dependencies/hdf5.py
index 62265302564c..7595c7cc61fb 100644
--- a/mesonbuild/dependencies/hdf5.py
+++ b/mesonbuild/dependencies/hdf5.py
@@ -153,10 +153,14 @@ def hdf5_factory(env: 'Environment', for_machine: 'MachineChoice',
pkgconfig_files = OrderedSet(['hdf5', 'hdf5-serial'])
pkg = PkgConfigInterface.instance(env, for_machine, silent=False)
if pkg:
- # some distros put hdf5-1.2.3.pc with version number in .pc filename.
- for mod in pkg.list_all():
- if mod.startswith('hdf5'):
- pkgconfig_files.add(mod)
+ try:
+ # old hdf5 versions put version number in .pc filename, e.g., hdf5-1.2.3.pc.
+ for mod in pkg.list_all():
+ if mod.startswith('hdf5'):
+ pkgconfig_files.add(mod)
+ except DependencyException:
+ # use just the standard files if pkg-config --list-all fails
+ pass
for mod in pkgconfig_files:
candidates.append(functools.partial(HDF5PkgConfigDependency, mod, env, kwargs, language))
diff --git a/mesonbuild/dependencies/misc.py b/mesonbuild/dependencies/misc.py
index 4815e1c18cd9..3ab2194e31ce 100644
--- a/mesonbuild/dependencies/misc.py
+++ b/mesonbuild/dependencies/misc.py
@@ -29,7 +29,9 @@ def netcdf_factory(env: 'Environment',
for_machine: 'mesonlib.MachineChoice',
kwargs: T.Dict[str, T.Any],
methods: T.List[DependencyMethods]) -> T.List['DependencyGenerator']:
- language = kwargs.get('language', 'c')
+ language = kwargs.get('language')
+ if language is None:
+ language = 'c'
if language not in ('c', 'cpp', 'fortran'):
raise DependencyException(f'Language {language} is not supported with NetCDF.')
@@ -51,6 +53,27 @@ def netcdf_factory(env: 'Environment',
packages['netcdf'] = netcdf_factory
+class AtomicBuiltinDependency(BuiltinDependency):
+ def __init__(self, name: str, env: Environment, kwargs: T.Dict[str, T.Any]):
+ super().__init__(name, env, kwargs)
+ self.feature_since = ('1.7.0', "consider checking for `atomic_flag_clear` with and without `find_library('atomic')`")
+
+ if self.clib_compiler.has_function('atomic_flag_clear', '#include ', env)[0]:
+ self.is_found = True
+
+
+class AtomicSystemDependency(SystemDependency):
+ def __init__(self, name: str, env: Environment, kwargs: T.Dict[str, T.Any]):
+ super().__init__(name, env, kwargs)
+ self.feature_since = ('1.7.0', "consider checking for `atomic_flag_clear` with and without `find_library('atomic')`")
+
+ h = self.clib_compiler.has_header('stdatomic.h', '', env)
+ self.link_args = self.clib_compiler.find_library('atomic', env, [], self.libtype)
+
+ if h[0] and self.link_args:
+ self.is_found = True
+
+
class DlBuiltinDependency(BuiltinDependency):
def __init__(self, name: str, env: 'Environment', kwargs: T.Dict[str, T.Any]):
super().__init__(name, env, kwargs)
@@ -369,7 +392,7 @@ def __init__(self, name: str, env: 'Environment', kwargs: T.Dict[str, T.Any]):
req = kwargs.get('version')
if req:
if self.version:
- self.is_found = mesonlib.version_compare(self.version, req)
+ self.is_found, *_ = mesonlib.version_compare_many(self.version, req)
else:
mlog.warning('Cannot determine version of curses to compare against.')
@@ -549,7 +572,7 @@ def shaderc_factory(env: 'Environment',
shared_libs = ['shaderc']
static_libs = ['shaderc_combined', 'shaderc_static']
- if kwargs.get('static', env.coredata.get_option(OptionKey('prefer_static'))):
+ if kwargs.get('static', env.coredata.optstore.get_value_for(OptionKey('prefer_static'))):
c = [functools.partial(PkgConfigDependency, name, env, kwargs)
for name in static_libs + shared_libs]
else:
@@ -564,6 +587,13 @@ def shaderc_factory(env: 'Environment',
packages['shaderc'] = shaderc_factory
+packages['atomic'] = atomic_factory = DependencyFactory(
+ 'atomic',
+ [DependencyMethods.SYSTEM, DependencyMethods.BUILTIN],
+ system_class=AtomicSystemDependency,
+ builtin_class=AtomicBuiltinDependency,
+)
+
packages['cups'] = cups_factory = DependencyFactory(
'cups',
[DependencyMethods.PKGCONFIG, DependencyMethods.CONFIG_TOOL, DependencyMethods.EXTRAFRAMEWORK, DependencyMethods.CMAKE],
diff --git a/mesonbuild/dependencies/mpi.py b/mesonbuild/dependencies/mpi.py
index 2d9e992f6f5a..a259972b0586 100644
--- a/mesonbuild/dependencies/mpi.py
+++ b/mesonbuild/dependencies/mpi.py
@@ -27,7 +27,9 @@ def mpi_factory(env: 'Environment',
for_machine: 'MachineChoice',
kwargs: T.Dict[str, T.Any],
methods: T.List[DependencyMethods]) -> T.List['DependencyGenerator']:
- language = kwargs.get('language', 'c')
+ language = kwargs.get('language')
+ if language is None:
+ language = 'c'
if language not in {'c', 'cpp', 'fortran'}:
# OpenMPI doesn't work without any other languages
return []
diff --git a/mesonbuild/dependencies/pkgconfig.py b/mesonbuild/dependencies/pkgconfig.py
index bc24f760fd0d..94e0893563a9 100644
--- a/mesonbuild/dependencies/pkgconfig.py
+++ b/mesonbuild/dependencies/pkgconfig.py
@@ -256,11 +256,16 @@ def _check_pkgconfig(self, pkgbin: ExternalProgram) -> T.Optional[str]:
def _get_env(self, uninstalled: bool = False) -> EnvironmentVariables:
env = EnvironmentVariables()
key = OptionKey('pkg_config_path', machine=self.for_machine)
- extra_paths: T.List[str] = self.env.coredata.optstore.get_value(key)[:]
+ pathlist = self.env.coredata.optstore.get_value_for(key)
+ assert isinstance(pathlist, list)
+ extra_paths: T.List[str] = pathlist[:]
if uninstalled:
- uninstalled_path = Path(self.env.get_build_dir(), 'meson-uninstalled').as_posix()
- if uninstalled_path not in extra_paths:
- extra_paths.append(uninstalled_path)
+ bpath = self.env.get_build_dir()
+ if bpath is not None:
+ # uninstalled can only be used if a build dir exists.
+ uninstalled_path = Path(bpath, 'meson-uninstalled').as_posix()
+ if uninstalled_path not in extra_paths:
+ extra_paths.insert(0, uninstalled_path)
env.set('PKG_CONFIG_PATH', extra_paths)
sysroot = self.env.properties[self.for_machine].get_sys_root()
if sysroot:
@@ -416,7 +421,7 @@ def _search_libs(self, libs_in: ImmutableListProtocol[str], raw_libs_in: Immutab
#
# Only prefix_libpaths are reordered here because there should not be
# too many system_libpaths to cause library version issues.
- pkg_config_path: T.List[str] = self.env.coredata.optstore.get_value(OptionKey('pkg_config_path', machine=self.for_machine))
+ pkg_config_path: T.List[str] = self.env.coredata.optstore.get_value(OptionKey('pkg_config_path', machine=self.for_machine)) # type: ignore[assignment]
pkg_config_path = self._convert_mingw_paths(pkg_config_path)
prefix_libpaths = OrderedSet(sort_libpaths(list(prefix_libpaths), pkg_config_path))
system_libpaths: OrderedSet[str] = OrderedSet()
diff --git a/mesonbuild/dependencies/python.py b/mesonbuild/dependencies/python.py
index be8e62ba1e78..7f9bd20696a3 100644
--- a/mesonbuild/dependencies/python.py
+++ b/mesonbuild/dependencies/python.py
@@ -250,9 +250,9 @@ def get_windows_link_args(self, limited_api: bool) -> T.Optional[T.List[str]]:
# Python itself (except with pybind11, which has an ugly
# hack to work around this) - so emit a warning to explain
# the cause of the expected link error.
- buildtype = self.env.coredata.get_option(OptionKey('buildtype'))
+ buildtype = self.env.coredata.optstore.get_value_for(OptionKey('buildtype'))
assert isinstance(buildtype, str)
- debug = self.env.coredata.get_option(OptionKey('debug'))
+ debug = self.env.coredata.optstore.get_value_for(OptionKey('debug'))
# `debugoptimized` buildtype may not set debug=True currently, see gh-11645
is_debug_build = debug or buildtype == 'debug'
vscrt_debug = False
@@ -333,6 +333,15 @@ def __init__(self, name: str, environment: 'Environment',
if not self.link_libpython and mesonlib.version_compare(self.version, '< 3.8'):
self.link_args = []
+ # But not Apple, because it's a framework
+ if self.env.machines.host.is_darwin() and 'PYTHONFRAMEWORKPREFIX' in self.variables:
+ framework_prefix = self.variables['PYTHONFRAMEWORKPREFIX']
+ # Add rpath, will be de-duplicated if necessary
+ if framework_prefix.startswith('/Applications/Xcode.app/'):
+ self.link_args += ['-Wl,-rpath,' + framework_prefix]
+ if self.raw_link_args is not None:
+ # When None, self.link_args is used
+ self.raw_link_args += ['-Wl,-rpath,' + framework_prefix]
class PythonFrameworkDependency(ExtraFrameworkDependency, _PythonDependencyBase):
@@ -429,6 +438,9 @@ def set_env(name: str, value: str) -> None:
set_env('PKG_CONFIG_LIBDIR', old_pkg_libdir)
set_env('PKG_CONFIG_PATH', old_pkg_path)
+ # Otherwise this doesn't fulfill the interface requirements
+ wrap_in_pythons_pc_dir.log_tried = PythonPkgConfigDependency.log_tried # type: ignore[attr-defined]
+
candidates.append(functools.partial(wrap_in_pythons_pc_dir, pkg_name, env, kwargs, installation))
# We only need to check both, if a python install has a LIBPC. It might point to the wrong location,
# e.g. relocated / cross compilation, but the presence of LIBPC indicates we should definitely look for something.
diff --git a/mesonbuild/dependencies/qt.py b/mesonbuild/dependencies/qt.py
index 1b60deb8afd2..8bb269e83d3f 100644
--- a/mesonbuild/dependencies/qt.py
+++ b/mesonbuild/dependencies/qt.py
@@ -9,6 +9,7 @@
import abc
import re
import os
+from pathlib import Path
import typing as T
from .base import DependencyException, DependencyMethods
@@ -19,7 +20,6 @@
from .factory import DependencyFactory
from .. import mlog
from .. import mesonlib
-from ..options import OptionKey
if T.TYPE_CHECKING:
from ..compilers import Compiler
@@ -51,7 +51,7 @@ def _qt_get_private_includes(mod_inc_dir: str, module: str, mod_version: str) ->
if len(dirname.split('.')) == 3:
private_dir = dirname
break
- return [private_dir, os.path.join(private_dir, 'Qt' + module)]
+ return [private_dir, Path(private_dir, f'Qt{module}').as_posix()]
def get_qmake_host_bins(qvars: T.Dict[str, str]) -> str:
@@ -297,14 +297,14 @@ def __init__(self, name: str, env: 'Environment', kwargs: T.Dict[str, T.Any]):
# Use the buildtype by default, but look at the b_vscrt option if the
# compiler supports it.
- is_debug = self.env.coredata.get_option(OptionKey('buildtype')) == 'debug'
- if OptionKey('b_vscrt') in self.env.coredata.optstore:
- if self.env.coredata.optstore.get_value('b_vscrt') in {'mdd', 'mtd'}:
+ is_debug = self.env.coredata.optstore.get_value_for('buildtype') == 'debug'
+ if 'b_vscrt' in self.env.coredata.optstore:
+ if self.env.coredata.optstore.get_value_for('b_vscrt') in {'mdd', 'mtd'}:
is_debug = True
modules_lib_suffix = _get_modules_lib_suffix(self.version, self.env.machines[self.for_machine], is_debug)
for module in self.requested_modules:
- mincdir = os.path.join(incdir, 'Qt' + module)
+ mincdir = Path(incdir, f'Qt{module}').as_posix()
self.compile_args.append('-I' + mincdir)
if module == 'QuickTest':
diff --git a/mesonbuild/dependencies/scalapack.py b/mesonbuild/dependencies/scalapack.py
index e50338710dd3..f34692c25615 100644
--- a/mesonbuild/dependencies/scalapack.py
+++ b/mesonbuild/dependencies/scalapack.py
@@ -9,7 +9,7 @@
import typing as T
from ..options import OptionKey
-from .base import DependencyMethods
+from .base import DependencyException, DependencyMethods
from .cmake import CMakeDependency
from .detect import packages
from .pkgconfig import PkgConfigDependency
@@ -28,7 +28,7 @@ def scalapack_factory(env: 'Environment', for_machine: 'MachineChoice',
candidates: T.List['DependencyGenerator'] = []
if DependencyMethods.PKGCONFIG in methods:
- static_opt = kwargs.get('static', env.coredata.get_option(OptionKey('prefer_static')))
+ static_opt = kwargs.get('static', env.coredata.optstore.get_value_for(OptionKey('prefer_static')))
mkl = 'mkl-static-lp64-iomp' if static_opt else 'mkl-dynamic-lp64-iomp'
candidates.append(functools.partial(
MKLPkgConfigDependency, mkl, env, kwargs))
@@ -65,8 +65,7 @@ def __init__(self, name: str, env: 'Environment', kwargs: T.Dict[str, T.Any],
super().__init__(name, env, kwargs, language=language)
# Doesn't work with gcc on windows, but does on Linux
- if (not self.__mklroot or (env.machines[self.for_machine].is_windows()
- and self.clib_compiler.id == 'gcc')):
+ if env.machines[self.for_machine].is_windows() and self.clib_compiler.id == 'gcc':
self.is_found = False
# This can happen either because we're using GCC, we couldn't find the
@@ -96,6 +95,9 @@ def __init__(self, name: str, env: 'Environment', kwargs: T.Dict[str, T.Any],
self.version = v
def _set_libs(self) -> None:
+ if self.__mklroot is None:
+ raise DependencyException('MKLROOT not set')
+
super()._set_libs()
if self.env.machines[self.for_machine].is_windows():
@@ -133,6 +135,9 @@ def _set_libs(self) -> None:
self.link_args.insert(i + 1, '-lmkl_blacs_intelmpi_lp64')
def _set_cargs(self) -> None:
+ if self.__mklroot is None:
+ raise DependencyException('MKLROOT not set')
+
allow_system = False
if self.language == 'fortran':
# gfortran doesn't appear to look in system paths for INCLUDE files,
diff --git a/mesonbuild/dependencies/ui.py b/mesonbuild/dependencies/ui.py
index 7adac5e75723..fc44037f119b 100644
--- a/mesonbuild/dependencies/ui.py
+++ b/mesonbuild/dependencies/ui.py
@@ -12,7 +12,6 @@
from .. import mlog
from .. import mesonlib
-from ..compilers.compilers import CrossNoRunException
from ..mesonlib import (
Popen_safe, extract_as_list, version_compare_many
)
@@ -235,31 +234,28 @@ def __init__(self, name: str, environment: 'Environment', kwargs: T.Dict[str, T.
self.link_args.append(lib)
if self.is_found:
- get_version = '''\
-#include
-#include
-
-int main() {
- printf("%i.%i.%i", VK_VERSION_MAJOR(VK_HEADER_VERSION_COMPLETE),
- VK_VERSION_MINOR(VK_HEADER_VERSION_COMPLETE),
- VK_VERSION_PATCH(VK_HEADER_VERSION_COMPLETE));
- return 0;
-}
-'''
try:
- run = self.clib_compiler.run(get_version, environment, extra_args=self.compile_args)
- except CrossNoRunException:
- run = None
- if run and run.compiled and run.returncode == 0:
- self.version = run.stdout
- elif self.vulkan_sdk:
- # fall back to heuristics: detect version number in path
- # matches the default install path on Windows
- match = re.search(rf'VulkanSDK{re.escape(os.path.sep)}([0-9]+(?:\.[0-9]+)+)', self.vulkan_sdk)
- if match:
- self.version = match.group(1)
- else:
- mlog.warning(f'Environment variable VULKAN_SDK={self.vulkan_sdk} is present, but Vulkan version could not be extracted.')
+ # VK_VERSION_* is deprecated and replaced by VK_API_VERSION_*. We'll continue to use the old one in
+ # order to support older Vulkan versions that don't have the new one yet, but we might have to update
+ # this code to also check VK_API_VERSION in the future if they decide to drop the old one at some point.
+ components = [str(self.clib_compiler.compute_int(f'VK_VERSION_{c}(VK_HEADER_VERSION_COMPLETE)',
+ low=0, high=None, guess=e,
+ prefix='#include ',
+ env=environment,
+ extra_args=None,
+ dependencies=None))
+ # list containing vulkan version components and their expected value
+ for c, e in [('MAJOR', 1), ('MINOR', 3), ('PATCH', None)]]
+ self.version = '.'.join(components)
+ except mesonlib.EnvironmentException:
+ if self.vulkan_sdk:
+ # fall back to heuristics: detect version number in path
+ # matches the default install path on Windows
+ match = re.search(rf'VulkanSDK{re.escape(os.path.sep)}([0-9]+(?:\.[0-9]+)+)', self.vulkan_sdk)
+ if match:
+ self.version = match.group(1)
+ else:
+ mlog.warning(f'Environment variable VULKAN_SDK={self.vulkan_sdk} is present, but Vulkan version could not be extracted.')
packages['gl'] = gl_factory = DependencyFactory(
'gl',
diff --git a/mesonbuild/envconfig.py b/mesonbuild/envconfig.py
index ffd7a51c250b..43fad0cd2ac4 100644
--- a/mesonbuild/envconfig.py
+++ b/mesonbuild/envconfig.py
@@ -13,6 +13,9 @@
from . import mlog
from pathlib import Path
+if T.TYPE_CHECKING:
+ from .options import ElementaryOptionValues
+
# These classes contains all the data pulled from configuration files (native
# and cross file currently), and also assists with the reading environment
@@ -64,6 +67,7 @@
'wasm64',
'x86',
'x86_64',
+ 'tricore'
)
# It would feel more natural to call this "64_BIT_CPU_FAMILIES", but
@@ -152,7 +156,7 @@ class CMakeSkipCompilerTest(Enum):
class Properties:
def __init__(
self,
- properties: T.Optional[T.Dict[str, T.Optional[T.Union[str, bool, int, T.List[str]]]]] = None,
+ properties: T.Optional[T.Dict[str, ElementaryOptionValues]] = None,
):
self.properties = properties or {}
@@ -269,9 +273,15 @@ def __repr__(self) -> str:
return f''
@classmethod
- def from_literal(cls, literal: T.Dict[str, str]) -> 'MachineInfo':
+ def from_literal(cls, raw: T.Dict[str, ElementaryOptionValues]) -> 'MachineInfo':
+ # We don't have enough type information to be sure of what we loaded
+ # So we need to accept that this might have ElementaryOptionValues, but
+ # then ensure that it's actually strings, since that's what the
+ # [*_machine] section should have.
+ assert all(isinstance(v, str) for v in raw.values()), 'for mypy'
+ literal = T.cast('T.Dict[str, str]', raw)
minimum_literal = {'cpu', 'cpu_family', 'endian', 'system'}
- if set(literal) < minimum_literal:
+ if minimum_literal - set(literal):
raise EnvironmentException(
f'Machine info is currently {literal}\n' +
'but is missing {}.'.format(minimum_literal - set(literal)))
@@ -388,7 +398,7 @@ class BinaryTable:
def __init__(
self,
- binaries: T.Optional[T.Dict[str, T.Union[str, T.List[str]]]] = None,
+ binaries: T.Optional[T.Mapping[str, ElementaryOptionValues]] = None,
):
self.binaries: T.Dict[str, T.List[str]] = {}
if binaries:
@@ -438,16 +448,19 @@ def detect_compiler_cache() -> T.List[str]:
@classmethod
def parse_entry(cls, entry: T.Union[str, T.List[str]]) -> T.Tuple[T.List[str], T.List[str]]:
- compiler = mesonlib.stringlistify(entry)
+ parts = mesonlib.stringlistify(entry)
# Ensure ccache exists and remove it if it doesn't
- if compiler[0] == 'ccache':
- compiler = compiler[1:]
+ if parts[0] == 'ccache':
+ compiler = parts[1:]
ccache = cls.detect_ccache()
- elif compiler[0] == 'sccache':
- compiler = compiler[1:]
+ elif parts[0] == 'sccache':
+ compiler = parts[1:]
ccache = cls.detect_sccache()
else:
+ compiler = parts
ccache = []
+ if not compiler:
+ raise EnvironmentException(f'Compiler cache specified without compiler: {parts[0]}')
# Return value has to be a list of compiler 'choices'
return compiler, ccache
diff --git a/mesonbuild/environment.py b/mesonbuild/environment.py
index 8f3096668836..c20390fccb92 100644
--- a/mesonbuild/environment.py
+++ b/mesonbuild/environment.py
@@ -1,6 +1,6 @@
# SPDX-License-Identifier: Apache-2.0
# Copyright 2012-2020 The Meson development team
-# Copyright © 2023 Intel Corporation
+# Copyright © 2023-2025 Intel Corporation
from __future__ import annotations
@@ -41,17 +41,30 @@
from mesonbuild import envconfig
if T.TYPE_CHECKING:
- from configparser import ConfigParser
-
from .compilers import Compiler
+ from .compilers.mixins.visualstudio import VisualStudioLikeCompiler
+ from .options import ElementaryOptionValues
from .wrap.wrap import Resolver
+ from . import cargo
CompilersDict = T.Dict[str, Compiler]
+NON_LANG_ENV_OPTIONS = [
+ ('PKG_CONFIG_PATH', 'pkg_config_path'),
+ ('CMAKE_PREFIX_PATH', 'cmake_prefix_path'),
+ ('LDFLAGS', 'ldflags'),
+ ('CPPFLAGS', 'cppflags'),
+]
+
build_filename = 'meson.build'
+def _as_str(val: object) -> str:
+ assert isinstance(val, str), 'for mypy'
+ return val
+
+
def _get_env_var(for_machine: MachineChoice, is_cross: bool, var_name: str) -> T.Optional[str]:
"""
Returns the exact env var and the value.
@@ -77,7 +90,8 @@ def _get_env_var(for_machine: MachineChoice, is_cross: bool, var_name: str) -> T
return value
-def detect_gcovr(gcovr_exe: str = 'gcovr', min_version: str = '3.3', log: bool = False):
+def detect_gcovr(gcovr_exe: str = 'gcovr', min_version: str = '3.3', log: bool = False) \
+ -> T.Union[T.Tuple[None, None], T.Tuple[str, str]]:
try:
p, found = Popen_safe([gcovr_exe, '--version'])[0:2]
except (FileNotFoundError, PermissionError):
@@ -90,7 +104,8 @@ def detect_gcovr(gcovr_exe: str = 'gcovr', min_version: str = '3.3', log: bool =
return gcovr_exe, found
return None, None
-def detect_lcov(lcov_exe: str = 'lcov', log: bool = False):
+def detect_lcov(lcov_exe: str = 'lcov', log: bool = False) \
+ -> T.Union[T.Tuple[None, None], T.Tuple[str, str]]:
try:
p, found = Popen_safe([lcov_exe, '--version'])[0:2]
except (FileNotFoundError, PermissionError):
@@ -103,7 +118,7 @@ def detect_lcov(lcov_exe: str = 'lcov', log: bool = False):
return lcov_exe, found
return None, None
-def detect_llvm_cov(suffix: T.Optional[str] = None):
+def detect_llvm_cov(suffix: T.Optional[str] = None) -> T.Optional[str]:
# If there's a known suffix or forced lack of suffix, use that
if suffix is not None:
if suffix == '':
@@ -120,7 +135,7 @@ def detect_llvm_cov(suffix: T.Optional[str] = None):
return tool
return None
-def compute_llvm_suffix(coredata: coredata.CoreData):
+def compute_llvm_suffix(coredata: coredata.CoreData) -> T.Optional[str]:
# Check to see if the user is trying to do coverage for either a C or C++ project
compilers = coredata.compilers[MachineChoice.BUILD]
cpp_compiler_is_clang = 'cpp' in compilers and compilers['cpp'].id == 'clang'
@@ -138,7 +153,8 @@ def compute_llvm_suffix(coredata: coredata.CoreData):
# Neither compiler is a Clang, or no compilers are for C or C++
return None
-def detect_lcov_genhtml(lcov_exe: str = 'lcov', genhtml_exe: str = 'genhtml'):
+def detect_lcov_genhtml(lcov_exe: str = 'lcov', genhtml_exe: str = 'genhtml') \
+ -> T.Tuple[str, T.Optional[str], str]:
lcov_exe, lcov_version = detect_lcov(lcov_exe)
if shutil.which(genhtml_exe) is None:
genhtml_exe = None
@@ -161,7 +177,7 @@ def detect_ninja(version: str = '1.8.2', log: bool = False) -> T.Optional[T.List
r = detect_ninja_command_and_version(version, log)
return r[0] if r else None
-def detect_ninja_command_and_version(version: str = '1.8.2', log: bool = False) -> T.Tuple[T.List[str], str]:
+def detect_ninja_command_and_version(version: str = '1.8.2', log: bool = False) -> T.Optional[T.Tuple[T.List[str], str]]:
env_ninja = os.environ.get('NINJA', None)
for n in [env_ninja] if env_ninja else ['ninja', 'ninja-build', 'samu']:
prog = ExternalProgram(n, silent=True)
@@ -187,6 +203,7 @@ def detect_ninja_command_and_version(version: str = '1.8.2', log: bool = False)
mlog.log('Found {}-{} at {}'.format(name, found,
' '.join([quote_arg(x) for x in prog.command])))
return (prog.command, found)
+ return None
def get_llvm_tool_names(tool: str) -> T.List[str]:
# Ordered list of possible suffixes of LLVM executables to try. Start with
@@ -196,6 +213,8 @@ def get_llvm_tool_names(tool: str) -> T.List[str]:
# unless it becomes a stable release.
suffixes = [
'', # base (no suffix)
+ '-20.1', '20.1',
+ '-20', '20',
'-19.1', '19.1',
'-19', '19',
'-18.1', '18.1',
@@ -333,6 +352,7 @@ def detect_windows_arch(compilers: CompilersDict) -> str:
# 32-bit and pretend like we're running under WOW64. Else, return the
# actual Windows architecture that we deduced above.
for compiler in compilers.values():
+ compiler = T.cast('VisualStudioLikeCompiler', compiler)
if compiler.id == 'msvc' and (compiler.target in {'x86', '80x86'}):
return 'x86'
if compiler.id == 'clang-cl' and (compiler.target in {'x86', 'i686'}):
@@ -535,7 +555,7 @@ def detect_machine_info(compilers: T.Optional[CompilersDict] = None) -> MachineI
# TODO make this compare two `MachineInfo`s purely. How important is the
# `detect_cpu_family({})` distinction? It is the one impediment to that.
-def machine_info_can_run(machine_info: MachineInfo):
+def machine_info_can_run(machine_info: MachineInfo) -> bool:
"""Whether we can run binaries for this machine on the current machine.
Can almost always run 32-bit binaries on 64-bit natively if the host
@@ -557,12 +577,12 @@ class Environment:
log_dir = 'meson-logs'
info_dir = 'meson-info'
- def __init__(self, source_dir: str, build_dir: str, cmd_options: coredata.SharedCMDOptions) -> None:
+ def __init__(self, source_dir: str, build_dir: T.Optional[str], cmd_options: coredata.SharedCMDOptions) -> None:
self.source_dir = source_dir
- self.build_dir = build_dir
# Do not try to create build directories when build_dir is none.
# This reduced mode is used by the --buildoptions introspector
if build_dir is not None:
+ self.build_dir = build_dir
self.scratch_dir = os.path.join(build_dir, Environment.private_dir)
self.log_dir = os.path.join(build_dir, Environment.log_dir)
self.info_dir = os.path.join(build_dir, Environment.info_dir)
@@ -591,6 +611,7 @@ def __init__(self, source_dir: str, build_dir: str, cmd_options: coredata.Shared
raise MesonException(f'{str(e)} Try regenerating using "meson setup --wipe".')
else:
# Just create a fresh coredata in this case
+ self.build_dir = ''
self.scratch_dir = ''
self.create_new_coredata(cmd_options)
@@ -625,7 +646,14 @@ def __init__(self, source_dir: str, build_dir: str, cmd_options: coredata.Shared
#
# Note that order matters because of 'buildtype', if it is after
# 'optimization' and 'debug' keys, it override them.
- self.options: T.MutableMapping[OptionKey, T.Union[str, T.List[str]]] = collections.OrderedDict()
+ self.options: T.MutableMapping[OptionKey, ElementaryOptionValues] = collections.OrderedDict()
+
+ # Environment variables with the name converted into an OptionKey type.
+ # These have subtly different behavior compared to machine files, so do
+ # not store them in self.options. See _set_default_options_from_env.
+ self.env_opts: T.MutableMapping[OptionKey, ElementaryOptionValues] = {}
+
+ self.machinestore = machinefile.MachineFileStore(self.coredata.config_files, self.coredata.cross_files, self.source_dir)
## Read in native file(s) to override build machine configuration
@@ -652,7 +680,7 @@ def __init__(self, source_dir: str, build_dir: str, cmd_options: coredata.Shared
# Keep only per machine options from the native file. The cross
# file takes precedence over all other options.
for key, value in list(self.options.items()):
- if self.coredata.is_per_machine_option(key):
+ if self.coredata.optstore.is_per_machine_option(key):
self.options[key.as_build()] = value
self._load_machine_file_options(config, properties.host, MachineChoice.HOST)
@@ -663,9 +691,6 @@ def __init__(self, source_dir: str, build_dir: str, cmd_options: coredata.Shared
self.properties = properties.default_missing()
self.cmakevars = cmakevars.default_missing()
- # Command line options override those from cross/native files
- self.options.update(cmd_options.cmd_line_options)
-
# Take default value from env if not set in cross/native files or command line.
self._set_default_options_from_env()
self._set_default_binaries_from_env()
@@ -682,6 +707,13 @@ def __init__(self, source_dir: str, build_dir: str, cmd_options: coredata.Shared
'See: https://mesonbuild.com/Builtin-options.html#build-type-options',
fatal=False)
+ # Filter out build machine options that are not valid per-project.
+ # We allow this in the file because it makes the machine files more
+ # useful (ie, the same file can be used for host == build configuration
+ # a host != build configuration)
+ self.options = {k: v for k, v in self.options.items()
+ if k.machine is MachineChoice.HOST or self.coredata.optstore.is_per_machine_option(k)}
+
exe_wrapper = self.lookup_binary_entry(MachineChoice.HOST, 'exe_wrapper')
if exe_wrapper is not None:
self.exe_wrapper = ExternalProgram.from_bin_list(self, MachineChoice.HOST, 'exe_wrapper')
@@ -691,8 +723,24 @@ def __init__(self, source_dir: str, build_dir: str, cmd_options: coredata.Shared
self.default_cmake = ['cmake']
self.default_pkgconfig = ['pkg-config']
self.wrap_resolver: T.Optional['Resolver'] = None
-
- def _load_machine_file_options(self, config: 'ConfigParser', properties: Properties, machine: MachineChoice) -> None:
+ # Store a global state of Cargo dependencies
+ self.cargo: T.Optional[cargo.Interpreter] = None
+
+ def mfilestr2key(self, machine_file_string: str, section: T.Optional[str], section_subproject: T.Optional[str], machine: MachineChoice) -> OptionKey:
+ key = OptionKey.from_string(machine_file_string)
+ if key.subproject:
+ suggestion = section if section == 'project options' else 'built-in options'
+ raise MesonException(f'Do not set subproject options in [{section}] section, use [subproject:{suggestion}] instead.')
+ if section_subproject:
+ key = key.evolve(subproject=section_subproject)
+ if machine == MachineChoice.BUILD:
+ if key.machine == MachineChoice.BUILD:
+ mlog.deprecation('Setting build machine options in the native file does not need the "build." prefix', once=True)
+ return key.evolve(machine=machine)
+ return key
+
+ def _load_machine_file_options(self, config: T.Mapping[str, T.Mapping[str, ElementaryOptionValues]],
+ properties: Properties, machine: MachineChoice) -> None:
"""Read the contents of a Machine file and put it in the options store."""
# Look for any options in the deprecated paths section, warn about
@@ -701,8 +749,9 @@ def _load_machine_file_options(self, config: 'ConfigParser', properties: Propert
paths = config.get('paths')
if paths:
mlog.deprecation('The [paths] section is deprecated, use the [built-in options] section instead.')
- for k, v in paths.items():
- self.options[OptionKey.from_string(k).evolve(machine=machine)] = v
+ for strk, v in paths.items():
+ k = self.mfilestr2key(strk, 'paths', None, machine)
+ self.options[k] = v
# Next look for compiler options in the "properties" section, this is
# also deprecated, and these will also be overwritten by the "built-in
@@ -711,45 +760,37 @@ def _load_machine_file_options(self, config: 'ConfigParser', properties: Propert
for lang in compilers.all_languages:
deprecated_properties.add(lang + '_args')
deprecated_properties.add(lang + '_link_args')
- for k, v in properties.properties.copy().items():
- if k in deprecated_properties:
- mlog.deprecation(f'{k} in the [properties] section of the machine file is deprecated, use the [built-in options] section.')
- self.options[OptionKey.from_string(k).evolve(machine=machine)] = v
- del properties.properties[k]
+ for strk, v in properties.properties.copy().items():
+ if strk in deprecated_properties:
+ mlog.deprecation(f'{strk} in the [properties] section of the machine file is deprecated, use the [built-in options] section.')
+ k = self.mfilestr2key(strk, 'properties', None, machine)
+ self.options[k] = v
+ del properties.properties[strk]
for section, values in config.items():
if ':' in section:
- subproject, section = section.split(':')
+ section_subproject, section = section.split(':')
else:
- subproject = ''
+ section_subproject = ''
if section == 'built-in options':
- for k, v in values.items():
- key = OptionKey.from_string(k)
+ for strk, v in values.items():
+ key = self.mfilestr2key(strk, section, section_subproject, machine)
# If we're in the cross file, and there is a `build.foo` warn about that. Later we'll remove it.
if machine is MachineChoice.HOST and key.machine is not machine:
mlog.deprecation('Setting build machine options in cross files, please use a native file instead, this will be removed in meson 2.0', once=True)
- if key.subproject:
- raise MesonException('Do not set subproject options in [built-in options] section, use [subproject:built-in options] instead.')
- self.options[key.evolve(subproject=subproject, machine=machine)] = v
+ self.options[key] = v
elif section == 'project options' and machine is MachineChoice.HOST:
# Project options are only for the host machine, we don't want
# to read these from the native file
- for k, v in values.items():
+ for strk, v in values.items():
# Project options are always for the host machine
- key = OptionKey.from_string(k)
- if key.subproject:
- raise MesonException('Do not set subproject options in [built-in options] section, use [subproject:built-in options] instead.')
- self.options[key.evolve(subproject=subproject)] = v
+ key = self.mfilestr2key(strk, section, section_subproject, machine)
+ self.options[key] = v
def _set_default_options_from_env(self) -> None:
opts: T.List[T.Tuple[str, str]] = (
[(v, f'{k}_args') for k, v in compilers.compilers.CFLAGS_MAPPING.items()] +
- [
- ('PKG_CONFIG_PATH', 'pkg_config_path'),
- ('CMAKE_PREFIX_PATH', 'cmake_prefix_path'),
- ('LDFLAGS', 'ldflags'),
- ('CPPFLAGS', 'cppflags'),
- ]
+ NON_LANG_ENV_OPTIONS
)
env_opts: T.DefaultDict[OptionKey, T.List[str]] = collections.defaultdict(list)
@@ -784,35 +825,35 @@ def _set_default_options_from_env(self) -> None:
env_opts[key].extend(p_list)
elif keyname == 'cppflags':
for lang in compilers.compilers.LANGUAGES_USING_CPPFLAGS:
- key = OptionKey(f'{lang}_env_args', machine=for_machine)
+ key = OptionKey(f'{lang}_args', machine=for_machine)
env_opts[key].extend(p_list)
else:
key = OptionKey.from_string(keyname).evolve(machine=for_machine)
if evar in compilers.compilers.CFLAGS_MAPPING.values():
- # If this is an environment variable, we have to
- # store it separately until the compiler is
- # instantiated, as we don't know whether the
- # compiler will want to use these arguments at link
- # time and compile time (instead of just at compile
- # time) until we're instantiating that `Compiler`
- # object. This is required so that passing
- # `-Dc_args=` on the command line and `$CFLAGS`
- # have subtly different behavior. `$CFLAGS` will be
- # added to the linker command line if the compiler
- # acts as a linker driver, `-Dc_args` will not.
- #
- # We still use the original key as the base here, as
- # we want to inherit the machine and the compiler
- # language
lang = key.name.split('_', 1)[0]
- key = key.evolve(f'{lang}_env_args')
+ key = key.evolve(f'{lang}_args')
env_opts[key].extend(p_list)
- # Only store options that are not already in self.options,
- # otherwise we'd override the machine files
- for k, v in env_opts.items():
- if k not in self.options:
- self.options[k] = v
+ # If this is an environment variable, we have to
+ # store it separately until the compiler is
+ # instantiated, as we don't know whether the
+ # compiler will want to use these arguments at link
+ # time and compile time (instead of just at compile
+ # time) until we're instantiating that `Compiler`
+ # object. This is required so that passing
+ # `-Dc_args=` on the command line and `$CFLAGS`
+ # have subtly different behavior. `$CFLAGS` will be
+ # added to the linker command line if the compiler
+ # acts as a linker driver, `-Dc_args` will not.
+ for (_, keyname), for_machine in itertools.product(NON_LANG_ENV_OPTIONS, MachineChoice):
+ key = OptionKey.from_string(keyname).evolve(machine=for_machine)
+ # Only store options that are not already in self.options,
+ # otherwise we'd override the machine files
+ if key in env_opts and key not in self.options:
+ self.options[key] = env_opts[key]
+ del env_opts[key]
+
+ self.env_opts.update(env_opts)
def _set_default_binaries_from_env(self) -> None:
"""Set default binaries from the environment.
@@ -856,7 +897,12 @@ def create_new_coredata(self, options: coredata.SharedCMDOptions) -> None:
# re-initialized with project options by the interpreter during
# build file parsing.
# meson_command is used by the regenchecker script, which runs meson
- self.coredata = coredata.CoreData(options, self.scratch_dir, mesonlib.get_meson_command())
+ meson_command = mesonlib.get_meson_command()
+ if meson_command is None:
+ meson_command = []
+ else:
+ meson_command = meson_command.copy()
+ self.coredata = coredata.CoreData(options, self.scratch_dir, meson_command)
self.first_invocation = True
def is_cross_build(self, when_building_for: MachineChoice = MachineChoice.HOST) -> bool:
@@ -897,7 +943,7 @@ def is_object(self, fname: 'mesonlib.FileOrString') -> bool:
return is_object(fname)
@lru_cache(maxsize=None)
- def is_library(self, fname: mesonlib.FileOrString):
+ def is_library(self, fname: mesonlib.FileOrString) -> bool:
return is_library(fname)
def lookup_binary_entry(self, for_machine: MachineChoice, name: str) -> T.Optional[T.List[str]]:
@@ -937,25 +983,25 @@ def get_static_lib_dir(self) -> str:
return self.get_libdir()
def get_prefix(self) -> str:
- return self.coredata.get_option(OptionKey('prefix'))
+ return _as_str(self.coredata.optstore.get_value_for(OptionKey('prefix')))
def get_libdir(self) -> str:
- return self.coredata.get_option(OptionKey('libdir'))
+ return _as_str(self.coredata.optstore.get_value_for(OptionKey('libdir')))
def get_libexecdir(self) -> str:
- return self.coredata.get_option(OptionKey('libexecdir'))
+ return _as_str(self.coredata.optstore.get_value_for(OptionKey('libexecdir')))
def get_bindir(self) -> str:
- return self.coredata.get_option(OptionKey('bindir'))
+ return _as_str(self.coredata.optstore.get_value_for(OptionKey('bindir')))
def get_includedir(self) -> str:
- return self.coredata.get_option(OptionKey('includedir'))
+ return _as_str(self.coredata.optstore.get_value_for(OptionKey('includedir')))
def get_mandir(self) -> str:
- return self.coredata.get_option(OptionKey('mandir'))
+ return _as_str(self.coredata.optstore.get_value_for(OptionKey('mandir')))
def get_datadir(self) -> str:
- return self.coredata.get_option(OptionKey('datadir'))
+ return _as_str(self.coredata.optstore.get_value_for(OptionKey('datadir')))
def get_compiler_system_lib_dirs(self, for_machine: MachineChoice) -> T.List[str]:
for comp in self.coredata.compilers[for_machine].values():
@@ -973,8 +1019,8 @@ def get_compiler_system_lib_dirs(self, for_machine: MachineChoice) -> T.List[str
p, out, _ = Popen_safe(comp.get_exelist() + ['-print-search-dirs'])
if p.returncode != 0:
raise mesonlib.MesonException('Could not calculate system search dirs')
- out = out.split('\n')[index].lstrip('libraries: =').split(':')
- return [os.path.normpath(p) for p in out]
+ split = out.split('\n')[index].lstrip('libraries: =').split(':')
+ return [os.path.normpath(p) for p in split]
def get_compiler_system_include_dirs(self, for_machine: MachineChoice) -> T.List[str]:
for comp in self.coredata.compilers[for_machine].values():
@@ -988,9 +1034,10 @@ def get_compiler_system_include_dirs(self, for_machine: MachineChoice) -> T.List
return []
return comp.get_default_include_dirs()
- def need_exe_wrapper(self, for_machine: MachineChoice = MachineChoice.HOST):
+ def need_exe_wrapper(self, for_machine: MachineChoice = MachineChoice.HOST) -> bool:
value = self.properties[for_machine].get('needs_exe_wrapper', None)
if value is not None:
+ assert isinstance(value, bool), 'for mypy'
return value
if not self.is_cross_build():
return False
@@ -1002,4 +1049,26 @@ def get_exe_wrapper(self) -> T.Optional[ExternalProgram]:
return self.exe_wrapper
def has_exe_wrapper(self) -> bool:
- return self.exe_wrapper and self.exe_wrapper.found()
+ return self.exe_wrapper is not None and self.exe_wrapper.found()
+
+ def get_env_for_paths(self, library_paths: T.Set[str], extra_paths: T.Set[str]) -> mesonlib.EnvironmentVariables:
+ env = mesonlib.EnvironmentVariables()
+ need_wine = not self.machines.build.is_windows() and self.machines.host.is_windows()
+ if need_wine:
+ # Executable paths should be in both PATH and WINEPATH.
+ # - Having them in PATH makes bash completion find it,
+ # and make running "foo.exe" find it when wine-binfmt is installed.
+ # - Having them in WINEPATH makes "wine foo.exe" find it.
+ library_paths.update(extra_paths)
+ if library_paths:
+ if need_wine:
+ env.prepend('WINEPATH', list(library_paths), separator=';')
+ elif self.machines.host.is_windows() or self.machines.host.is_cygwin():
+ extra_paths.update(library_paths)
+ elif self.machines.host.is_darwin():
+ env.prepend('DYLD_LIBRARY_PATH', list(library_paths))
+ else:
+ env.prepend('LD_LIBRARY_PATH', list(library_paths))
+ if extra_paths:
+ env.prepend('PATH', list(extra_paths))
+ return env
diff --git a/mesonbuild/interpreter/compiler.py b/mesonbuild/interpreter/compiler.py
index 90514446bb12..8aeac8ac6b2e 100644
--- a/mesonbuild/interpreter/compiler.py
+++ b/mesonbuild/interpreter/compiler.py
@@ -1,6 +1,6 @@
# SPDX-License-Identifier: Apache-2.0
# Copyright 2012-2021 The Meson development team
-# Copyright © 2021-2024 Intel Corporation
+# Copyright © 2021-2025 Intel Corporation
from __future__ import annotations
import collections
@@ -11,7 +11,6 @@
import typing as T
from .. import build
-from .. import coredata
from .. import dependencies
from .. import options
from .. import mesonlib
@@ -270,10 +269,10 @@ def _determine_args(self, kwargs: BaseCompileKW,
for idir in i.to_string_list(self.environment.get_source_dir(), self.environment.get_build_dir()):
args.extend(self.compiler.get_include_args(idir, False))
if not kwargs['no_builtin_args']:
- opts = coredata.OptionsView(self.environment.coredata.optstore, self.subproject)
- args += self.compiler.get_option_compile_args(opts)
+ args += self.compiler.get_option_compile_args(None, self.interpreter.environment, self.subproject)
+ args += self.compiler.get_option_std_args(None, self.interpreter.environment, self.subproject)
if mode is CompileCheckMode.LINK:
- args.extend(self.compiler.get_option_link_args(opts))
+ args.extend(self.compiler.get_option_link_args(None, self.interpreter.environment, self.subproject))
if kwargs.get('werror', False):
args.extend(self.compiler.get_werror_args())
args.extend(kwargs['args'])
@@ -587,7 +586,7 @@ def links_method(self, args: T.Tuple['mesonlib.FileOrString'], kwargs: 'CompileK
compiler = clist[SUFFIX_TO_LANG[suffix]]
extra_args = functools.partial(self._determine_args, kwargs)
- deps, msg = self._determine_dependencies(kwargs['dependencies'], compile_only=False)
+ deps, msg = self._determine_dependencies(kwargs['dependencies'], compile_only=False, endl=None)
result, cached = self.compiler.links(code, self.environment,
compiler=compiler,
extra_args=extra_args,
@@ -718,7 +717,7 @@ def find_library_method(self, args: T.Tuple[str], kwargs: 'FindLibraryKW') -> 'd
search_dirs = extract_search_dirs(kwargs)
- prefer_static = self.environment.coredata.get_option(OptionKey('prefer_static'))
+ prefer_static = self.environment.coredata.optstore.get_value_for(OptionKey('prefer_static'))
if kwargs['static'] is True:
libtype = mesonlib.LibType.STATIC
elif kwargs['static'] is False:
diff --git a/mesonbuild/interpreter/dependencyfallbacks.py b/mesonbuild/interpreter/dependencyfallbacks.py
index fd8a025ea220..53eeb2911cba 100644
--- a/mesonbuild/interpreter/dependencyfallbacks.py
+++ b/mesonbuild/interpreter/dependencyfallbacks.py
@@ -1,3 +1,7 @@
+# SPDX-License-Identifier: Apache-2.0
+# Copyright 2021-2024 The Meson Developers
+# Copyright © 2021-2025 Intel Corporation
+
from __future__ import annotations
from .interpreterobjects import extract_required_kwarg
@@ -5,7 +9,7 @@
from .. import dependencies
from .. import build
from ..wrap import WrapMode
-from ..mesonlib import extract_as_list, stringlistify, version_compare_many, listify
+from ..mesonlib import extract_as_list, stringlistify, version_compare_many
from ..options import OptionKey
from ..dependencies import Dependency, DependencyException, NotFoundDependency
from ..interpreterbase import (MesonInterpreterObject, FeatureNew,
@@ -19,8 +23,11 @@
class DependencyFallbacksHolder(MesonInterpreterObject):
- def __init__(self, interpreter: 'Interpreter', names: T.List[str], allow_fallback: T.Optional[bool] = None,
- default_options: T.Optional[T.Dict[OptionKey, str]] = None) -> None:
+ def __init__(self,
+ interpreter: 'Interpreter',
+ names: T.List[str],
+ allow_fallback: T.Optional[bool] = None,
+ default_options: T.Optional[T.Dict[str, str]] = None) -> None:
super().__init__(subproject=interpreter.subproject)
self.interpreter = interpreter
self.subproject = interpreter.subproject
@@ -115,20 +122,17 @@ def _do_subproject(self, kwargs: TYPE_nkwargs, func_args: TYPE_nvar, func_kwargs
# dependency('foo', static: true) should implicitly add
# default_options: ['default_library=static']
static = kwargs.get('static')
- default_options = func_kwargs.get('default_options', {})
- if static is not None and 'default_library' not in default_options:
+ extra_default_options = {}
+ if static is not None:
default_library = 'static' if static else 'shared'
mlog.log(f'Building fallback subproject with default_library={default_library}')
- default_options[OptionKey('default_library')] = default_library
- func_kwargs['default_options'] = default_options
+ extra_default_options['default_library'] = default_library
# Configure the subproject
subp_name = self.subproject_name
varname = self.subproject_varname
func_kwargs.setdefault('version', [])
- if 'default_options' in kwargs and isinstance(kwargs['default_options'], str):
- func_kwargs['default_options'] = listify(kwargs['default_options'])
- self.interpreter.do_subproject(subp_name, func_kwargs)
+ self.interpreter.do_subproject(subp_name, func_kwargs, extra_default_options=extra_default_options)
return self._get_subproject_dep(subp_name, varname, kwargs)
def _get_subproject(self, subp_name: str) -> T.Optional[SubprojectHolder]:
@@ -316,8 +320,8 @@ def lookup(self, kwargs: TYPE_nkwargs, force_fallback: bool = False) -> Dependen
return self._notfound_dependency()
# Check if usage of the subproject fallback is forced
- wrap_mode = WrapMode.from_string(self.coredata.get_option(OptionKey('wrap_mode')))
- force_fallback_for = self.coredata.get_option(OptionKey('force_fallback_for'))
+ wrap_mode = WrapMode.from_string(self.coredata.optstore.get_value_for(OptionKey('wrap_mode')))
+ force_fallback_for = self.coredata.optstore.get_value_for(OptionKey('force_fallback_for'))
assert isinstance(force_fallback_for, list), 'for mypy'
self.nofallback = wrap_mode == WrapMode.nofallback
self.forcefallback = (force_fallback or
diff --git a/mesonbuild/interpreter/interpreter.py b/mesonbuild/interpreter/interpreter.py
index 3fa5c25d06c9..7da5436c61bd 100644
--- a/mesonbuild/interpreter/interpreter.py
+++ b/mesonbuild/interpreter/interpreter.py
@@ -1,10 +1,10 @@
# SPDX-License-Identifier: Apache-2.0
# Copyright 2012-2021 The Meson development team
-# Copyright © 2023-2024 Intel Corporation
+# Copyright © 2023-2025 Intel Corporation
from __future__ import annotations
-import hashlib
+import io, sys, traceback
from .. import mparser
from .. import environment
@@ -13,13 +13,12 @@
from .. import mlog
from .. import options
from .. import build
-from .. import optinterpreter
from .. import compilers
from .. import envconfig
from ..wrap import wrap, WrapMode
from .. import mesonlib
from ..mesonlib import (EnvironmentVariables, ExecutableSerialisation, MesonBugException, MesonException, HoldableObject,
- FileMode, MachineChoice, listify,
+ FileMode, MachineChoice, is_parent_path, listify,
extract_as_list, has_path_sep, path_is_in_root, PerMachine)
from ..options import OptionKey
from ..programs import ExternalProgram, NonExistingExternalProgram
@@ -55,6 +54,7 @@
CT_BUILD_BY_DEFAULT,
CT_INPUT_KW,
CT_INSTALL_DIR_KW,
+ _EXCLUSIVE_EXECUTABLE_KWS,
EXECUTABLE_KWS,
JAR_KWS,
LIBRARY_KWS,
@@ -110,6 +110,8 @@
import copy
if T.TYPE_CHECKING:
+ from typing_extensions import Literal
+
from . import kwargs as kwtypes
from ..backend.backends import Backend
from ..interpreterbase.baseobjects import InterpreterObject, TYPE_var, TYPE_kwargs
@@ -129,6 +131,7 @@
ProgramVersionFunc = T.Callable[[T.Union[ExternalProgram, build.Executable, OverrideProgram]], str]
+ TestClass = T.TypeVar('TestClass', bound=Test)
def _project_version_validator(value: T.Union[T.List, str, mesonlib.File, None]) -> T.Optional[str]:
if isinstance(value, list):
@@ -269,22 +272,15 @@ def __init__(
subproject_dir: str = 'subprojects',
default_project_options: T.Optional[T.Dict[OptionKey, str]] = None,
ast: T.Optional[mparser.CodeBlockNode] = None,
- is_translated: bool = False,
relaxations: T.Optional[T.Set[InterpreterRuleRelaxation]] = None,
user_defined_options: T.Optional[coredata.SharedCMDOptions] = None,
) -> None:
- super().__init__(_build.environment.get_source_dir(), subdir, subproject)
+ super().__init__(_build.environment.get_source_dir(), subdir, subproject, subproject_dir, _build.environment)
self.active_projectname = ''
self.build = _build
- self.environment = self.build.environment
- self.coredata = self.environment.get_coredata()
self.backend = backend
self.summary: T.Dict[str, 'Summary'] = {}
self.modules: T.Dict[str, NewExtensionModule] = {}
- # Subproject directory is usually the name of the subproject, but can
- # be different for dependencies provided by wrap files.
- self.subproject_directory_name = subdir.split(os.path.sep)[-1]
- self.subproject_dir = subproject_dir
self.relaxations = relaxations or set()
if ast is None:
self.load_root_meson_file()
@@ -292,8 +288,7 @@ def __init__(
self.ast = ast
self.sanity_check_ast()
self.builtin.update({'meson': MesonMain(self.build, self)})
- self.generators: T.List[build.Generator] = []
- self.processed_buildfiles: T.Set[str] = set()
+ self.validated_cache: T.Set[str] = set()
self.project_args_frozen = False
self.global_args_frozen = False # implies self.project_args_frozen
self.subprojects: T.Dict[str, SubprojectHolder] = {}
@@ -301,24 +296,15 @@ def __init__(
self.configure_file_outputs: T.Dict[str, int] = {}
# Passed from the outside, only used in subprojects.
if default_project_options:
- self.default_project_options = default_project_options.copy()
+ assert isinstance(default_project_options, dict)
+ self.default_project_options = default_project_options
else:
self.default_project_options = {}
- self.project_default_options: T.Dict[OptionKey, str] = {}
+ self.project_default_options: T.List[str] = []
self.build_func_dict()
self.build_holder_map()
self.user_defined_options = user_defined_options
self.compilers: PerMachine[T.Dict[str, 'compilers.Compiler']] = PerMachine({}, {})
-
- # build_def_files needs to be defined before parse_project is called
- #
- # For non-meson subprojects, we'll be using the ast. Even if it does
- # exist we don't want to add a dependency on it, it's autogenerated
- # from the actual build files, and is just for reference.
- self.build_def_files: mesonlib.OrderedSet[str] = mesonlib.OrderedSet()
- build_filename = os.path.join(self.subdir, environment.build_filename)
- if not is_translated:
- self.build_def_files.add(build_filename)
self.parse_project()
self._redetect_machines()
@@ -629,15 +615,25 @@ def func_import(self, node: mparser.BaseNode, args: T.Tuple[str],
if real_modname in self.modules:
return self.modules[real_modname]
try:
- module = importlib.import_module(f'mesonbuild.modules.{real_modname}')
- except ImportError:
+ full_module_path = f'mesonbuild.modules.{real_modname}'
+ module = importlib.import_module(full_module_path)
+ except ImportError as e:
+ if e.name != full_module_path:
+ if required:
+ raise e
+
+ mlog.warning(f'Module "{modname}" exists but failed to import.')
+
+ for line in traceback.format_exception(e):
+ mlog.debug(line)
+
if required:
raise InvalidArguments(f'Module "{modname}" does not exist')
ext_module = NotFoundExtensionModule(real_modname)
else:
ext_module = module.initialize(self)
assert isinstance(ext_module, (ExtensionModule, NewExtensionModule)), 'for mypy'
- self.build.modules.append(real_modname)
+ self.build.modules.add(real_modname)
if ext_module.INFO.added:
FeatureNew.single_use(f'module {ext_module.INFO.name}', ext_module.INFO.added, self.subproject, location=node)
if ext_module.INFO.deprecated:
@@ -707,20 +703,18 @@ def func_declare_dependency(self, node: mparser.BaseNode, args: T.List[TYPE_var]
version = self.project_version
d_module_versions = kwargs['d_module_versions']
d_import_dirs = self.extract_incdirs(kwargs, 'd_import_dirs')
- srcdir = Path(self.environment.source_dir)
+ srcdir = self.environment.source_dir
+ subproject_dir = os.path.abspath(os.path.join(srcdir, self.subproject_dir))
+ project_root = os.path.abspath(os.path.join(srcdir, self.root_subdir))
# convert variables which refer to an -uninstalled.pc style datadir
for k, v in variables.items():
if not v:
FeatureNew.single_use('empty variable value in declare_dependency', '1.4.0', self.subproject, location=node)
- try:
- p = Path(v)
- except ValueError:
- continue
- else:
- if not self.is_subproject() and srcdir / self.subproject_dir in p.parents:
- continue
- if p.is_absolute() and p.is_dir() and srcdir / self.root_subdir in [p] + list(Path(os.path.abspath(p)).parents):
- variables[k] = P_OBJ.DependencyVariableString(v)
+ if os.path.isabs(v) \
+ and (self.is_subproject() or not is_parent_path(subproject_dir, v)) \
+ and is_parent_path(project_root, v) \
+ and os.path.isdir(v):
+ variables[k] = P_OBJ.DependencyVariableString(v)
dep = dependencies.InternalDependency(version, incs, compile_args,
link_args, libs, libs_whole, sources, extra_files,
@@ -803,13 +797,12 @@ def run_command_impl(self,
if not cmd.found():
raise InterpreterException(f'command {cmd.get_name()!r} not found or not executable')
elif isinstance(cmd, compilers.Compiler):
- exelist = cmd.get_exelist()
- cmd = exelist[0]
+ expanded_args = cmd.get_exe_args()
+ cmd = cmd.get_exe()
prog = ExternalProgram(cmd, silent=True)
if not prog.found():
raise InterpreterException(f'Program {cmd!r} not found or not executable')
cmd = prog
- expanded_args = exelist[1:]
else:
if isinstance(cmd, mesonlib.File):
cmd = cmd.absolute_path(srcdir, builddir)
@@ -828,7 +821,7 @@ def run_command_impl(self,
expanded_args.append(a.get_path())
elif isinstance(a, compilers.Compiler):
FeatureNew.single_use('Compiler object as a variadic argument to `run_command`', '0.61.0', self.subproject, location=self.current_node)
- prog = ExternalProgram(a.exelist[0], silent=True)
+ prog = ExternalProgram(a.get_exe(), silent=True)
if not prog.found():
raise InterpreterException(f'Program {cmd!r} not found or not executable')
expanded_args.append(prog.get_path())
@@ -874,14 +867,23 @@ def disabled_subproject(self, subp_name: str, disabled_feature: T.Optional[str]
self.subprojects[subp_name] = sub
return sub
- def do_subproject(self, subp_name: str, kwargs: kwtypes.DoSubproject, force_method: T.Optional[wrap.Method] = None) -> SubprojectHolder:
+ def do_subproject(self, subp_name: str, kwargs: kwtypes.DoSubproject, force_method: T.Optional[wrap.Method] = None,
+ extra_default_options: T.Optional[T.Dict[str, options.ElementaryOptionValues]] = None) -> SubprojectHolder:
+ if subp_name == 'sub_static':
+ pass
disabled, required, feature = extract_required_kwarg(kwargs, self.subproject)
if disabled:
assert feature, 'for mypy'
mlog.log('Subproject', mlog.bold(subp_name), ':', 'skipped: feature', mlog.bold(feature), 'disabled')
return self.disabled_subproject(subp_name, disabled_feature=feature)
- default_options = {k.evolve(subproject=subp_name): v for k, v in kwargs['default_options'].items()}
+ default_options = kwargs['default_options']
+ if isinstance(default_options, str):
+ default_options = [default_options]
+ if isinstance(default_options, list):
+ default_options = dict((x.split('=', 1) for x in default_options))
+ if extra_default_options:
+ default_options = {**extra_default_options, **default_options}
if subp_name == '':
raise InterpreterException('Subproject name must not be empty.')
@@ -913,10 +915,16 @@ def do_subproject(self, subp_name: str, kwargs: kwtypes.DoSubproject, force_meth
try:
subdir, method = r.resolve(subp_name, force_method)
except wrap.WrapException as e:
+ if force_method is not None:
+ prefix = force_method.title() + ' subproject'
+ else:
+ prefix = 'Subproject'
+ msg = [prefix, mlog.bold(subp_name), 'is buildable:', mlog.red('NO')]
if not required:
mlog.log(e)
- mlog.log('Subproject ', mlog.bold(subp_name), 'is buildable:', mlog.red('NO'), '(disabling)')
+ mlog.log(*msg, '(disabling)')
return self.disabled_subproject(subp_name, exception=e)
+ mlog.error(*msg)
raise e
os.makedirs(os.path.join(self.build.environment.get_build_dir(), subdir), exist_ok=True)
@@ -950,7 +958,7 @@ def do_subproject(self, subp_name: str, kwargs: kwtypes.DoSubproject, force_meth
raise e
def _do_subproject_meson(self, subp_name: str, subdir: str,
- default_options: T.Dict[OptionKey, str],
+ default_options: T.Dict[str, options.ElementaryOptionValues],
kwargs: kwtypes.DoSubproject,
ast: T.Optional[mparser.CodeBlockNode] = None,
build_def_files: T.Optional[T.List[str]] = None,
@@ -971,8 +979,7 @@ def _do_subproject_meson(self, subp_name: str, subdir: str,
new_build = self.build.copy()
subi = Interpreter(new_build, self.backend, subp_name, subdir, self.subproject_dir,
- default_options, ast=ast, is_translated=(ast is not None),
- relaxations=relaxations,
+ default_options, ast=ast, relaxations=relaxations,
user_defined_options=self.user_defined_options)
# Those lists are shared by all interpreters. That means that
# even if the subproject fails, any modification that the subproject
@@ -1011,21 +1018,21 @@ def _do_subproject_meson(self, subp_name: str, subdir: str,
return self.subprojects[subp_name]
def _do_subproject_cmake(self, subp_name: str, subdir: str,
- default_options: T.Dict[OptionKey, str],
+ default_options: T.Dict[str, options.ElementaryOptionValues],
kwargs: kwtypes.DoSubproject) -> SubprojectHolder:
from ..cmake import CMakeInterpreter
with mlog.nested(subp_name):
- prefix = self.coredata.optstore.get_value('prefix')
+ prefix = self.coredata.optstore.get_value_for('prefix')
from ..modules.cmake import CMakeSubprojectOptions
- options = kwargs.get('options') or CMakeSubprojectOptions()
- cmake_options = kwargs.get('cmake_options', []) + options.cmake_options
+ kw_opts = kwargs.get('options') or CMakeSubprojectOptions()
+ cmake_options = kwargs.get('cmake_options', []) + kw_opts.cmake_options
cm_int = CMakeInterpreter(Path(subdir), Path(prefix), self.build.environment, self.backend)
cm_int.initialise(cmake_options)
cm_int.analyse()
# Generate a meson ast and execute it with the normal do_subproject_meson
- ast = cm_int.pretend_to_be_meson(options.target_options)
+ ast = cm_int.pretend_to_be_meson(kw_opts.target_options)
result = self._do_subproject_meson(
subp_name, subdir, default_options,
kwargs, ast,
@@ -1038,77 +1045,74 @@ def _do_subproject_cmake(self, subp_name: str, subdir: str,
return result
def _do_subproject_cargo(self, subp_name: str, subdir: str,
- default_options: T.Dict[OptionKey, str],
+ default_options: T.Dict[str, options.ElementaryOptionValues],
kwargs: kwtypes.DoSubproject) -> SubprojectHolder:
from .. import cargo
FeatureNew.single_use('Cargo subproject', '1.3.0', self.subproject, location=self.current_node)
mlog.warning('Cargo subproject is an experimental feature and has no backwards compatibility guarantees.',
once=True, location=self.current_node)
+ if self.environment.cargo is None:
+ self.environment.cargo = cargo.Interpreter(self.environment)
with mlog.nested(subp_name):
- ast, options = cargo.interpret(subp_name, subdir, self.environment)
- self.coredata.update_project_options(options, subp_name)
+ ast = self.environment.cargo.interpret(subdir)
return self._do_subproject_meson(
subp_name, subdir, default_options, kwargs, ast,
# FIXME: Are there other files used by cargo interpreter?
[os.path.join(subdir, 'Cargo.toml')])
- def get_option_internal(self, optname: str) -> options.UserOption:
- key = OptionKey.from_string(optname).evolve(subproject=self.subproject)
-
- if not self.environment.coredata.optstore.is_project_option(key):
- for opts in [self.coredata.optstore, compilers.base_options]:
- v = opts.get(key)
- if v is None or v.yielding:
- v = opts.get(key.as_root())
- if v is not None:
- assert isinstance(v, options.UserOption), 'for mypy'
- return v
-
- try:
- opt = self.coredata.optstore.get_value_object(key)
- if opt.yielding and key.subproject and key.as_root() in self.coredata.optstore:
- popt = self.coredata.optstore.get_value_object(key.as_root())
- if type(opt) is type(popt):
- opt = popt
- else:
- # Get class name, then option type as a string
- opt_type = opt.__class__.__name__[4:][:-6].lower()
- popt_type = popt.__class__.__name__[4:][:-6].lower()
- # This is not a hard error to avoid dependency hell, the workaround
- # when this happens is to simply set the subproject's option directly.
- mlog.warning('Option {0!r} of type {1!r} in subproject {2!r} cannot yield '
- 'to parent option of type {3!r}, ignoring parent value. '
- 'Use -D{2}:{0}=value to set the value for this option manually'
- '.'.format(optname, opt_type, self.subproject, popt_type),
- location=self.current_node)
- return opt
- except KeyError:
- pass
-
- raise InterpreterException(f'Tried to access unknown option {optname!r}.')
-
@typed_pos_args('get_option', str)
@noKwargs
- def func_get_option(self, nodes: mparser.BaseNode, args: T.Tuple[str],
- kwargs: 'TYPE_kwargs') -> T.Union[options.UserOption, 'TYPE_var']:
+ def func_get_option(self, node: mparser.BaseNode, args: T.Tuple[str],
+ kwargs: TYPE_kwargs) -> T.Union[options.UserOption, 'TYPE_var']:
optname = args[0]
+
if ':' in optname:
raise InterpreterException('Having a colon in option name is forbidden, '
'projects are not allowed to directly access '
'options of other subprojects.')
-
if optname_regex.search(optname.split('.', maxsplit=1)[-1]) is not None:
raise InterpreterException(f'Invalid option name {optname!r}')
- opt = self.get_option_internal(optname)
- if isinstance(opt, options.UserFeatureOption):
- opt.name = optname
- return opt
- elif isinstance(opt, options.UserOption):
- if isinstance(opt.value, str):
- return P_OBJ.OptionString(opt.value, f'{{{optname}}}')
- return opt.value
- return opt
+ # Will be None only if the value comes from the default
+ value_object: T.Optional[options.AnyOptionType]
+
+ try:
+ optkey = options.OptionKey.from_string(optname).evolve(subproject=self.subproject)
+ value_object, value = self.coredata.optstore.get_value_object_and_value_for(optkey)
+ except KeyError:
+ if self.coredata.optstore.is_base_option(optkey):
+ # Due to backwards compatibility return the default
+ # option for base options instead of erroring out.
+ #
+ # TODO: This will have issues if we expect to return a user FeatureOption
+ # Of course, there's a bit of a layering violation here in
+ # that we return a UserFeatureOption, but otherwise the held value
+ # We probably need a lower level feature thing, or an enum
+ # instead of strings
+ value = self.coredata.optstore.get_default_for_b_option(optkey)
+ value_object = None
+ else:
+ if self.subproject:
+ raise MesonException(f'Option {optname} does not exist for subproject {self.subproject}.')
+ raise MesonException(f'Option {optname} does not exist.')
+ if isinstance(value_object, options.UserFeatureOption):
+ ocopy = copy.copy(value_object)
+ ocopy.name = optname
+ ocopy.value = value
+ return ocopy
+ elif optname == 'b_sanitize':
+ assert value_object is None or isinstance(value_object, options.UserStringArrayOption)
+ # To ensure backwards compatibility this always returns a string.
+ # We may eventually want to introduce a new "format" kwarg that
+ # allows the user to modify this behaviour, but for now this is
+ # likely good enough for most usecases.
+ if not value:
+ return 'none'
+ return ','.join(sorted(value))
+
+ if isinstance(value, str):
+ return P_OBJ.OptionString(value, f'{{{optname}}}')
+ return value
@typed_pos_args('configuration_data', optargs=[dict])
@noKwargs
@@ -1129,13 +1133,13 @@ def set_backend(self) -> None:
return
from ..backend import backends
- if self.user_defined_options and OptionKey('genvslite') in self.user_defined_options.cmd_line_options.keys():
+ if OptionKey('genvslite') in self.user_defined_options.cmd_line_options:
# Use of the '--genvslite vsxxxx' option ultimately overrides any '--backend xxx'
# option the user may specify.
- backend_name = self.coredata.get_option(OptionKey('genvslite'))
+ backend_name = self.coredata.optstore.get_value_for(OptionKey('genvslite'))
self.backend = backends.get_genvslite_backend(backend_name, self.build, self)
else:
- backend_name = self.coredata.get_option(OptionKey('backend'))
+ backend_name = self.coredata.optstore.get_value_for(OptionKey('backend'))
self.backend = backends.get_backend_from_name(backend_name, self.build, self)
if self.backend is None:
@@ -1183,58 +1187,40 @@ def func_project(self, node: mparser.FunctionNode, args: T.Tuple[str, T.List[str
else:
mesonlib.project_meson_versions[self.subproject] = mesonlib.NoProjectVersion()
- # Load "meson.options" before "meson_options.txt", and produce a warning if
- # it is being used with an old version. I have added check that if both
- # exist the warning isn't raised
- option_file = os.path.join(self.source_root, self.subdir, 'meson.options')
- old_option_file = os.path.join(self.source_root, self.subdir, 'meson_options.txt')
+ self._load_option_file()
- if os.path.exists(option_file):
- if os.path.exists(old_option_file):
- if os.path.samefile(option_file, old_option_file):
- mlog.debug("Not warning about meson.options with version minimum < 1.1 because meson_options.txt also exists")
- else:
- raise MesonException("meson.options and meson_options.txt both exist, but are not the same file.")
+ self.project_default_options = kwargs['default_options']
+ if isinstance(self.project_default_options, str):
+ self.project_default_options = [self.project_default_options]
+ assert isinstance(self.project_default_options, (list, dict))
+ if self.environment.first_invocation or (self.subproject != '' and self.subproject not in self.coredata.initialized_subprojects):
+ if self.subproject == '':
+ self.coredata.optstore.initialize_from_top_level_project_call(self.project_default_options,
+ self.user_defined_options.cmd_line_options,
+ self.environment.options)
else:
- FeatureNew.single_use('meson.options file', '1.1', self.subproject, 'Use meson_options.txt instead')
- else:
- option_file = old_option_file
- if os.path.exists(option_file):
- with open(option_file, 'rb') as f:
- # We want fast not cryptographically secure, this is just to
- # see if the option file has changed
- self.coredata.options_files[self.subproject] = (option_file, hashlib.sha1(f.read()).hexdigest())
- oi = optinterpreter.OptionInterpreter(self.environment.coredata.optstore, self.subproject)
- oi.process(option_file)
- self.coredata.update_project_options(oi.options, self.subproject)
- self.add_build_def_file(option_file)
- else:
- self.coredata.options_files[self.subproject] = None
+ invoker_method_default_options = self.default_project_options
+ self.coredata.optstore.initialize_from_subproject_call(self.subproject,
+ invoker_method_default_options,
+ self.project_default_options,
+ self.user_defined_options.cmd_line_options)
+ self.coredata.initialized_subprojects.add(self.subproject)
- if self.subproject:
- self.project_default_options = {k.evolve(subproject=self.subproject): v
- for k, v in kwargs['default_options'].items()}
- else:
- self.project_default_options = kwargs['default_options']
-
- # Do not set default_options on reconfigure otherwise it would override
- # values previously set from command line. That means that changing
- # default_options in a project will trigger a reconfigure but won't
- # have any effect.
- #
- # If this is the first invocation we always need to initialize
- # builtins, if this is a subproject that is new in a re-invocation we
- # need to initialize builtins for that
- if self.environment.first_invocation or (self.subproject != '' and self.subproject not in self.coredata.initialized_subprojects):
- default_options = self.project_default_options.copy()
- default_options.update(self.default_project_options)
- self.coredata.init_builtins(self.subproject)
- self.coredata.initialized_subprojects.add(self.subproject)
- else:
- default_options = {}
- self.coredata.set_default_options(default_options, self.subproject, self.environment)
+ if not self.is_subproject():
+ # We have to activate VS before adding languages and before calling
+ # self.set_backend() otherwise it wouldn't be able to detect which
+ # vs backend version we need. But after setting default_options in case
+ # the project sets vs backend by default.
+ backend = self.coredata.optstore.get_value_for(OptionKey('backend'))
+ assert backend is None or isinstance(backend, str), 'for mypy'
+ vsenv = self.coredata.optstore.get_value_for(OptionKey('vsenv'))
+ assert isinstance(vsenv, bool), 'for mypy'
+ force_vsenv = vsenv or backend.startswith('vs')
+ mesonlib.setup_vsenv(force_vsenv)
+ self.set_backend()
if not self.is_subproject():
+ self.coredata.optstore.validate_cmd_line_options(self.user_defined_options.cmd_line_options)
self.build.project_name = proj_name
self.active_projectname = proj_name
@@ -1292,36 +1278,21 @@ def func_project(self, node: mparser.FunctionNode, args: T.Tuple[str, T.List[str
self.build.subproject_dir = self.subproject_dir
# Load wrap files from this (sub)project.
- wrap_mode = WrapMode.from_string(self.coredata.get_option(OptionKey('wrap_mode')))
- if not self.is_subproject() or wrap_mode != WrapMode.nopromote:
- subdir = os.path.join(self.subdir, spdirname)
- r = wrap.Resolver(self.environment.get_source_dir(), subdir, self.subproject, wrap_mode)
- if self.is_subproject():
- assert self.environment.wrap_resolver is not None, 'for mypy'
- self.environment.wrap_resolver.merge_wraps(r)
- else:
- self.environment.wrap_resolver = r
+ subprojects_dir = os.path.join(self.subdir, spdirname)
+ if not self.is_subproject():
+ wrap_mode = WrapMode.from_string(self.coredata.optstore.get_value_for(OptionKey('wrap_mode')))
+ self.environment.wrap_resolver = wrap.Resolver(self.environment.get_source_dir(), subprojects_dir, self.subproject, wrap_mode)
+ else:
+ assert self.environment.wrap_resolver is not None, 'for mypy'
+ self.environment.wrap_resolver.load_and_merge(subprojects_dir, self.subproject)
self.build.projects[self.subproject] = proj_name
mlog.log('Project name:', mlog.bold(proj_name))
mlog.log('Project version:', mlog.bold(self.project_version))
- if not self.is_subproject():
- # We have to activate VS before adding languages and before calling
- # self.set_backend() otherwise it wouldn't be able to detect which
- # vs backend version we need. But after setting default_options in case
- # the project sets vs backend by default.
- backend = self.coredata.get_option(OptionKey('backend'))
- assert backend is None or isinstance(backend, str), 'for mypy'
- vsenv = self.coredata.get_option(OptionKey('vsenv'))
- assert isinstance(vsenv, bool), 'for mypy'
- force_vsenv = vsenv or backend.startswith('vs')
- mesonlib.setup_vsenv(force_vsenv)
-
self.add_languages(proj_langs, True, MachineChoice.HOST)
self.add_languages(proj_langs, False, MachineChoice.BUILD)
- self.set_backend()
if not self.is_subproject():
self.check_stdlibs()
@@ -1346,8 +1317,8 @@ def func_add_languages(self, node: mparser.FunctionNode, args: T.Tuple[T.List[st
mlog.warning('add_languages is missing native:, assuming languages are wanted for both host and build.',
location=node)
- success = self.add_languages(langs, False, MachineChoice.BUILD)
- success &= self.add_languages(langs, required, MachineChoice.HOST)
+ success = self.add_languages(langs, required, MachineChoice.HOST)
+ success &= self.add_languages(langs, False, MachineChoice.BUILD)
return success
def _stringify_user_arguments(self, args: T.List[TYPE_var], func_name: str) -> T.List[str]:
@@ -1421,7 +1392,14 @@ def _print_summary(self) -> None:
values['Cross files'] = self.user_defined_options.cross_file
if self.user_defined_options.native_file:
values['Native files'] = self.user_defined_options.native_file
- sorted_options = sorted(self.user_defined_options.cmd_line_options.items())
+
+ def compatibility_sort_helper(s):
+ if isinstance(s, tuple):
+ s = s[0]
+ if isinstance(s, str):
+ return s
+ return s.name
+ sorted_options = sorted(self.user_defined_options.cmd_line_options.items(), key=compatibility_sort_helper)
values.update({str(k): v for k, v in sorted_options})
if values:
self.summary_impl('User defined options', values, {'bool_yn': False, 'list_sep': None})
@@ -1472,10 +1450,19 @@ def func_expect_error(self, node: mparser.BaseNode, args: T.Tuple[str], kwargs:
class ExpectErrorObject(ContextManagerObject):
def __init__(self, msg: str, how: str, subproject: str) -> None:
super().__init__(subproject)
+ self.old_stdout = sys.stdout
+ sys.stdout = self.new_stdout = io.StringIO()
+ sys.stdout.colorize_console = getattr(self.old_stdout, 'colorize_console', None)
self.msg = msg
self.how = how
def __exit__(self, exc_type, exc_val, exc_tb):
+ sys.stdout = self.old_stdout
+ for l in self.new_stdout.getvalue().splitlines():
+ if 'ERROR:' in l:
+ print(l.replace('ERROR', 'ERROR (msbuild proof)'))
+ else:
+ print(l)
if exc_val is None:
raise InterpreterException('Expecting an error but code block succeeded')
if isinstance(exc_val, mesonlib.MesonException):
@@ -1550,15 +1537,6 @@ def add_languages_for(self, args: T.List[str], required: bool, for_machine: Mach
# update new values from commandline, if it applies
self.coredata.process_compiler_options(lang, comp, self.environment, self.subproject)
- # Add per-subproject compiler options. They inherit value from main project.
- if self.subproject:
- options = {}
- for k in comp.get_options():
- v = copy.copy(self.coredata.optstore.get_value_object(k))
- k = k.evolve(subproject=self.subproject)
- options[k] = v
- self.coredata.add_compiler_options(options, lang, for_machine, self.environment, self.subproject)
-
if for_machine == MachineChoice.HOST or self.environment.is_cross_build():
logger_fun = mlog.log
else:
@@ -1652,7 +1630,7 @@ def notfound_program(self, args: T.List[mesonlib.FileOrString]) -> ExternalProgr
# the host machine.
def find_program_impl(self, args: T.List[mesonlib.FileOrString],
for_machine: MachineChoice = MachineChoice.HOST,
- default_options: T.Optional[T.Dict[OptionKey, T.Union[str, int, bool, T.List[str]]]] = None,
+ default_options: T.Optional[T.Dict[OptionKey, options.ElementaryOptionValues]] = None,
required: bool = True, silent: bool = True,
wanted: T.Union[str, T.List[str]] = '',
search_dirs: T.Optional[T.List[str]] = None,
@@ -1683,7 +1661,7 @@ def find_program_impl(self, args: T.List[mesonlib.FileOrString],
return progobj
def program_lookup(self, args: T.List[mesonlib.FileOrString], for_machine: MachineChoice,
- default_options: T.Optional[T.Dict[OptionKey, T.Union[str, int, bool, T.List[str]]]],
+ default_options: T.Optional[T.Dict[OptionKey, options.ElementaryOptionValues]],
required: bool,
search_dirs: T.Optional[T.List[str]],
wanted: T.Union[str, T.List[str]],
@@ -1700,7 +1678,7 @@ def program_lookup(self, args: T.List[mesonlib.FileOrString], for_machine: Machi
return ExternalProgram('meson', self.environment.get_build_command(), silent=True)
fallback = None
- wrap_mode = WrapMode.from_string(self.coredata.get_option(OptionKey('wrap_mode')))
+ wrap_mode = WrapMode.from_string(self.coredata.optstore.get_value_for(OptionKey('wrap_mode')))
if wrap_mode != WrapMode.nofallback and self.environment.wrap_resolver:
fallback = self.environment.wrap_resolver.find_program_provider(args)
if fallback and wrap_mode == WrapMode.forcefallback:
@@ -1751,7 +1729,7 @@ def check_program_version(self, progobj: T.Union[ExternalProgram, build.Executab
return True
def find_program_fallback(self, fallback: str, args: T.List[mesonlib.FileOrString],
- default_options: T.Dict[OptionKey, T.Union[str, int, bool, T.List[str]]],
+ default_options: T.Dict[OptionKey, options.ElementaryOptionValues],
required: bool, extra_info: T.List[mlog.TV_Loggable]
) -> T.Optional[T.Union[ExternalProgram, build.Executable, OverrideProgram]]:
mlog.log('Fallback to subproject', mlog.bold(fallback), 'which provides program',
@@ -1851,12 +1829,24 @@ def func_dependency(self, node: mparser.BaseNode, args: T.Tuple[T.List[str]], kw
def func_disabler(self, node, args, kwargs):
return Disabler()
+ def _strip_exe_specific_kwargs(self, kwargs: kwtypes.Executable) -> kwtypes._BuildTarget:
+ kwargs = kwargs.copy()
+ for exe_kwarg in _EXCLUSIVE_EXECUTABLE_KWS:
+ del kwargs[exe_kwarg.name]
+ return kwargs
+
@permittedKwargs(build.known_exe_kwargs)
@typed_pos_args('executable', str, varargs=SOURCES_VARARGS)
@typed_kwargs('executable', *EXECUTABLE_KWS, allow_unknown=True)
def func_executable(self, node: mparser.BaseNode,
args: T.Tuple[str, SourcesVarargsType],
kwargs: kwtypes.Executable) -> build.Executable:
+ for_machine = kwargs['native']
+ m = self.environment.machines[for_machine]
+ if m.is_android() and kwargs.get('android_exe_type') == 'application':
+ holder = self.build_target(node, args, self._strip_exe_specific_kwargs(kwargs), build.SharedLibrary)
+ holder.shared_library_only = True
+ return holder
return self.build_target(node, args, kwargs, build.Executable)
@permittedKwargs(build.known_stlib_kwargs)
@@ -1951,6 +1941,10 @@ def func_build_target(self, node: mparser.BaseNode,
),
KwargInfo('fallback', (str, NoneType)),
KwargInfo('replace_string', str, default='@VCS_TAG@'),
+ INSTALL_KW.evolve(since='1.7.0'),
+ INSTALL_DIR_KW.evolve(since='1.7.0'),
+ INSTALL_TAG_KW.evolve(since='1.7.0'),
+ INSTALL_MODE_KW.evolve(since='1.7.0'),
)
def func_vcs_tag(self, node: mparser.BaseNode, args: T.List['TYPE_var'], kwargs: 'kwtypes.VcsTag') -> build.CustomTarget:
if kwargs['fallback'] is None:
@@ -1972,9 +1966,9 @@ def func_vcs_tag(self, node: mparser.BaseNode, args: T.List['TYPE_var'], kwargs:
else:
vcs = mesonlib.detect_vcs(source_dir)
if vcs:
- mlog.log('Found {} repository at {}'.format(vcs['name'], vcs['wc_dir']))
- vcs_cmd = vcs['get_rev'].split()
- regex_selector = vcs['rev_regex']
+ mlog.log('Found {} repository at {}'.format(vcs.name, vcs.wc_dir))
+ vcs_cmd = vcs.get_rev
+ regex_selector = vcs.rev_regex
else:
vcs_cmd = [' '] # executing this cmd will fail in vcstagger.py and force to use the fallback string
# vcstagger.py parameters: infile, outfile, fallback, source_dir, replace_string, regex_selector, command...
@@ -1991,6 +1985,13 @@ def func_vcs_tag(self, node: mparser.BaseNode, args: T.List['TYPE_var'], kwargs:
replace_string,
regex_selector] + vcs_cmd
+ install = kwargs['install']
+ install_mode = self._warn_kwarg_install_mode_sticky(kwargs['install_mode'])
+ install_dir: T.List[T.Union[str, Literal[False]]] = [] if kwargs['install_dir'] is None else [kwargs['install_dir']]
+ install_tag: T.List[T.Optional[str]] = [] if kwargs['install_tag'] is None else [kwargs['install_tag']]
+ if install and not install_dir:
+ raise InvalidArguments('vcs_tag: "install_dir" keyword argument must be set when "install" is true.')
+
tg = build.CustomTarget(
kwargs['output'][0],
self.subdir,
@@ -2001,6 +2002,10 @@ def func_vcs_tag(self, node: mparser.BaseNode, args: T.List['TYPE_var'], kwargs:
kwargs['output'],
build_by_default=True,
build_always_stale=True,
+ install=install,
+ install_dir=install_dir,
+ install_mode=install_mode,
+ install_tag=install_tag,
)
self.add_target(tg.name, tg)
return tg
@@ -2206,9 +2211,7 @@ def func_generator(self, node: mparser.FunctionNode,
if '@OUTPUT@' in o:
raise InvalidArguments('Tried to use @OUTPUT@ in a rule with more than one output.')
- gen = build.Generator(args[0], **kwargs)
- self.generators.append(gen)
- return gen
+ return build.Generator(args[0], **kwargs)
@typed_pos_args('benchmark', str, (build.Executable, build.Jar, ExternalProgram, mesonlib.File, build.CustomTarget, build.CustomTargetIndex))
@typed_kwargs('benchmark', *TEST_KWS)
@@ -2235,7 +2238,8 @@ def unpack_env_kwarg(self, kwargs: T.Union[EnvironmentVariables, T.Dict[str, 'TY
def make_test(self, node: mparser.BaseNode,
args: T.Tuple[str, T.Union[build.Executable, build.Jar, ExternalProgram, mesonlib.File, build.CustomTarget, build.CustomTargetIndex]],
- kwargs: 'kwtypes.BaseTest') -> Test:
+ kwargs: 'kwtypes.BaseTest',
+ klass: T.Type[TestClass] = Test) -> TestClass:
name = args[0]
if ':' in name:
mlog.deprecation(f'":" is not allowed in test name "{name}", it has been replaced with "_"',
@@ -2265,20 +2269,20 @@ def make_test(self, node: mparser.BaseNode,
s = ':' + s
suite.append(prj.replace(' ', '_').replace(':', '_') + s)
- return Test(name,
- prj,
- suite,
- exe,
- kwargs['depends'],
- kwargs.get('is_parallel', False),
- kwargs['args'],
- env,
- kwargs['should_fail'],
- kwargs['timeout'],
- kwargs['workdir'],
- kwargs['protocol'],
- kwargs['priority'],
- kwargs['verbose'])
+ return klass(name,
+ prj,
+ suite,
+ exe,
+ kwargs['depends'],
+ kwargs.get('is_parallel', False),
+ kwargs['args'],
+ env,
+ kwargs['should_fail'],
+ kwargs['timeout'],
+ kwargs['workdir'],
+ kwargs['protocol'],
+ kwargs['priority'],
+ kwargs['verbose'])
def add_test(self, node: mparser.BaseNode,
args: T.Tuple[str, T.Union[build.Executable, build.Jar, ExternalProgram, mesonlib.File, build.CustomTarget, build.CustomTargetIndex]],
@@ -2446,39 +2450,21 @@ def func_subdir(self, node: mparser.BaseNode, args: T.Tuple[str], kwargs: 'kwtyp
raise InvalidArguments('The "meson-" prefix is reserved and cannot be used for top-level subdir().')
if args[0] == '':
raise InvalidArguments("The argument given to subdir() is the empty string ''. This is prohibited.")
+ if os.path.isabs(args[0]):
+ raise InvalidArguments('Subdir argument must be a relative path.')
for i in kwargs['if_found']:
if not i.found():
return
- prev_subdir = self.subdir
- subdir = os.path.join(prev_subdir, args[0])
- if os.path.isabs(subdir):
- raise InvalidArguments('Subdir argument must be a relative path.')
- absdir = os.path.join(self.environment.get_source_dir(), subdir)
- symlinkless_dir = os.path.realpath(absdir)
- build_file = os.path.join(symlinkless_dir, 'meson.build')
- if build_file in self.processed_buildfiles:
+ subdir, is_new = self._resolve_subdir(self.environment.get_source_dir(), args[0])
+ if not is_new:
raise InvalidArguments(f'Tried to enter directory "{subdir}", which has already been visited.')
- self.processed_buildfiles.add(build_file)
- self.subdir = subdir
+
os.makedirs(os.path.join(self.environment.build_dir, subdir), exist_ok=True)
- buildfilename = os.path.join(self.subdir, environment.build_filename)
- self.build_def_files.add(buildfilename)
- absname = os.path.join(self.environment.get_source_dir(), buildfilename)
- if not os.path.isfile(absname):
- self.subdir = prev_subdir
+
+ if not self._evaluate_subdir(self.environment.get_source_dir(), subdir):
+ buildfilename = os.path.join(subdir, environment.build_filename)
raise InterpreterException(f"Nonexistent build file '{buildfilename!s}'")
- code = self.read_buildfile(absname, buildfilename)
- try:
- codeblock = mparser.Parser(code, absname).parse()
- except mesonlib.MesonException as me:
- me.file = absname
- raise me
- try:
- self.evaluate_codeblock(codeblock)
- except SubdirDoneRequest:
- pass
- self.subdir = prev_subdir
# This is either ignored on basically any OS nowadays, or silently gets
# ignored (Solaris) or triggers an "illegal operation" error (FreeBSD).
@@ -2535,7 +2521,7 @@ def func_install_data(self, node: mparser.BaseNode,
follow_symlinks=kwargs['follow_symlinks'])
def install_data_impl(self, sources: T.List[mesonlib.File], install_dir: str,
- install_mode: FileMode, rename: T.Optional[str],
+ install_mode: FileMode, rename: T.Optional[T.List[str]],
tag: T.Optional[str],
install_data_type: T.Optional[str] = None,
preserve_path: bool = False,
@@ -2689,11 +2675,11 @@ def func_configure_file(self, node: mparser.BaseNode, args: T.List[TYPE_var],
ofile_rpath = os.path.join(self.subdir, output)
if ofile_rpath in self.configure_file_outputs:
mesonbuildfile = os.path.join(self.subdir, 'meson.build')
- current_call = f"{mesonbuildfile}:{self.current_lineno}"
+ current_call = f"{mesonbuildfile}:{self.current_node.lineno}"
first_call = "{}:{}".format(mesonbuildfile, self.configure_file_outputs[ofile_rpath])
mlog.warning('Output file', mlog.bold(ofile_rpath, True), 'for configure_file() at', current_call, 'overwrites configure_file() output at', first_call)
else:
- self.configure_file_outputs[ofile_rpath] = self.current_lineno
+ self.configure_file_outputs[ofile_rpath] = self.current_node.lineno
(ofile_path, ofile_fname) = os.path.split(os.path.join(self.subdir, output))
ofile_abs = os.path.join(self.environment.build_dir, ofile_path, ofile_fname)
@@ -3084,7 +3070,7 @@ def check_clang_asan_lundef(self) -> None:
if OptionKey('b_sanitize') not in self.coredata.optstore:
return
if (self.coredata.optstore.get_value('b_lundef') and
- self.coredata.optstore.get_value('b_sanitize') != 'none'):
+ self.coredata.optstore.get_value('b_sanitize')):
value = self.coredata.optstore.get_value('b_sanitize')
mlog.warning(textwrap.dedent(f'''\
Trying to use {value} sanitizer on Clang with b_lundef.
@@ -3103,8 +3089,8 @@ def check_clang_asan_lundef(self) -> None:
# subproject than it is defined in (due to e.g. a
# declare_dependency).
def validate_within_subproject(self, subdir, fname):
- srcdir = Path(self.environment.source_dir)
- builddir = Path(self.environment.build_dir)
+ srcdir = self.environment.source_dir
+ builddir = self.environment.build_dir
if isinstance(fname, P_OBJ.DependencyVariableString):
def validate_installable_file(fpath: Path) -> bool:
installablefiles: T.Set[Path] = set()
@@ -3124,27 +3110,39 @@ def validate_installable_file(fpath: Path) -> bool:
# subproject files, as long as they are scheduled to be installed.
if validate_installable_file(norm):
return
- norm = Path(os.path.abspath(Path(srcdir, subdir, fname)))
- if os.path.isdir(norm):
- inputtype = 'directory'
- else:
- inputtype = 'file'
- if InterpreterRuleRelaxation.ALLOW_BUILD_DIR_FILE_REFERENCES in self.relaxations and builddir in norm.parents:
- return
- if srcdir not in norm.parents:
- # Grabbing files outside the source tree is ok.
- # This is for vendor stuff like:
- #
- # /opt/vendorsdk/src/file_with_license_restrictions.c
- return
- project_root = Path(srcdir, self.root_subdir)
- subproject_dir = project_root / self.subproject_dir
- if norm == project_root:
+
+ def do_validate_within_subproject(norm: str) -> None:
+ if os.path.isdir(norm):
+ inputtype = 'directory'
+ else:
+ inputtype = 'file'
+ if InterpreterRuleRelaxation.ALLOW_BUILD_DIR_FILE_REFERENCES in self.relaxations and is_parent_path(builddir, norm):
+ return
+
+ if not is_parent_path(srcdir, norm):
+ # Grabbing files outside the source tree is ok.
+ # This is for vendor stuff like:
+ #
+ # /opt/vendorsdk/src/file_with_license_restrictions.c
+ return
+
+ project_root = os.path.join(srcdir, self.root_subdir)
+ if not is_parent_path(project_root, norm):
+ name = os.path.basename(norm)
+ raise InterpreterException(f'Sandbox violation: Tried to grab {inputtype} {name} outside current (sub)project.')
+
+ subproject_dir = os.path.join(project_root, self.subproject_dir)
+ if is_parent_path(subproject_dir, norm):
+ name = os.path.basename(norm)
+ raise InterpreterException(f'Sandbox violation: Tried to grab {inputtype} {name} from a nested subproject.')
+
+ fname = os.path.join(subdir, fname)
+ if fname in self.validated_cache:
return
- if project_root not in norm.parents:
- raise InterpreterException(f'Sandbox violation: Tried to grab {inputtype} {norm.name} outside current (sub)project.')
- if subproject_dir == norm or subproject_dir in norm.parents:
- raise InterpreterException(f'Sandbox violation: Tried to grab {inputtype} {norm.name} from a nested subproject.')
+
+ norm = os.path.abspath(os.path.join(srcdir, fname))
+ do_validate_within_subproject(norm)
+ self.validated_cache.add(fname)
@T.overload
def source_strings_to_files(self, sources: T.List['mesonlib.FileOrString'], strict: bool = True) -> T.List['mesonlib.File']: ...
@@ -3175,6 +3173,8 @@ def source_strings_to_files(self, sources: T.List['SourceInputs'], strict: bool
results: T.List['SourceOutputs'] = []
for s in sources:
if isinstance(s, str):
+ if s.endswith(' '):
+ raise MesonException(f'{s!r} ends with a space. This is probably an error.')
if not strict and s.startswith(self.environment.get_build_dir()):
results.append(s)
mlog.warning(f'Source item {s!r} cannot be converted to File object, because it is a generated file. '
@@ -3250,9 +3250,9 @@ def add_target(self, name: str, tobj: build.Target) -> None:
def build_both_libraries(self, node: mparser.BaseNode, args: T.Tuple[str, SourcesVarargsType], kwargs: kwtypes.Library) -> build.BothLibraries:
shared_lib = self.build_target(node, args, kwargs, build.SharedLibrary)
static_lib = self.build_target(node, args, kwargs, build.StaticLibrary)
- preferred_library = self.coredata.get_option(OptionKey('default_both_libraries'))
+ preferred_library = self.coredata.optstore.get_value_for(OptionKey('default_both_libraries'))
if preferred_library == 'auto':
- preferred_library = self.coredata.get_option(OptionKey('default_library'))
+ preferred_library = self.coredata.optstore.get_value_for(OptionKey('default_library'))
if preferred_library == 'both':
preferred_library = 'shared'
@@ -3293,12 +3293,12 @@ def build_both_libraries(self, node: mparser.BaseNode, args: T.Tuple[str, Source
return build.BothLibraries(shared_lib, static_lib, preferred_library)
def build_library(self, node: mparser.BaseNode, args: T.Tuple[str, SourcesVarargsType], kwargs: kwtypes.Library):
- default_library = self.coredata.get_option(OptionKey('default_library', subproject=self.subproject))
+ default_library = self.coredata.optstore.get_value_for(OptionKey('default_library', subproject=self.subproject))
assert isinstance(default_library, str), 'for mypy'
if default_library == 'shared':
- return self.build_target(node, args, T.cast('kwtypes.StaticLibrary', kwargs), build.SharedLibrary)
+ return self.build_target(node, args, T.cast('kwtypes.SharedLibrary', kwargs), build.SharedLibrary)
elif default_library == 'static':
- return self.build_target(node, args, T.cast('kwtypes.SharedLibrary', kwargs), build.StaticLibrary)
+ return self.build_target(node, args, T.cast('kwtypes.StaticLibrary', kwargs), build.StaticLibrary)
elif default_library == 'both':
return self.build_both_libraries(node, args, kwargs)
else:
@@ -3415,6 +3415,7 @@ def build_target(self, node: mparser.BaseNode, args: T.Tuple[str, SourcesVarargs
kwargs['language_args'][lang].extend(args)
kwargs['depend_files'].extend(deps)
if targetclass is not build.Jar:
+ self.check_for_jar_sources(sources, targetclass)
kwargs['d_import_dirs'] = self.extract_incdirs(kwargs, 'd_import_dirs')
# Filter out kwargs from other target types. For example 'soversion'
@@ -3483,6 +3484,8 @@ def build_target(self, node: mparser.BaseNode, args: T.Tuple[str, SourcesVarargs
target = targetclass(name, self.subdir, self.subproject, for_machine, srcs, struct, objs,
self.environment, self.compilers[for_machine], kwargs)
+ if objs and target.uses_rust():
+ FeatureNew.single_use('objects in Rust targets', '1.8.0', self.subproject)
self.add_target(name, target)
self.project_args_frozen = True
@@ -3502,6 +3505,13 @@ def check_sources_exist(self, subdir, sources):
if not os.path.isfile(fname):
raise InterpreterException(f'Tried to add non-existing source file {s}.')
+ def check_for_jar_sources(self, sources, targetclass):
+ for s in sources:
+ if isinstance(s, (str, mesonlib.File)) and compilers.is_java(s):
+ raise InvalidArguments(f'Build target of type "{targetclass.typename}" cannot build java source: "{s}". Use "{build.Jar.typename}" instead.')
+ elif isinstance(s, build.StructuredSources):
+ self.check_for_jar_sources(s.as_list(), targetclass)
+
# Only permit object extraction from the same subproject
def validate_extraction(self, buildtarget: mesonlib.HoldableObject) -> None:
if self.subproject != buildtarget.subproject:
@@ -3516,6 +3526,8 @@ def is_subproject(self) -> bool:
@noSecondLevelHolderResolving
def func_set_variable(self, node: mparser.BaseNode, args: T.Tuple[str, object], kwargs: 'TYPE_kwargs') -> None:
varname, value = args
+ if mparser.IDENT_RE.fullmatch(varname) is None:
+ raise InvalidCode('Invalid variable name: ' + varname)
self.set_variable(varname, value, holderify=True)
@typed_pos_args('get_variable', (str, Disabler), optargs=[object])
diff --git a/mesonbuild/interpreter/interpreterobjects.py b/mesonbuild/interpreter/interpreterobjects.py
index f4a2b4107ed3..a2fadbefc2f2 100644
--- a/mesonbuild/interpreter/interpreterobjects.py
+++ b/mesonbuild/interpreter/interpreterobjects.py
@@ -94,7 +94,7 @@ def __init__(self, option: options.UserFeatureOption, interpreter: 'Interpreter'
super().__init__(option, interpreter)
if option and option.is_auto():
# TODO: we need to cast here because options is not a TypedDict
- auto = T.cast('options.UserFeatureOption', self.env.coredata.optstore.get_value_object('auto_features'))
+ auto = T.cast('options.UserFeatureOption', self.env.coredata.optstore.get_value_object_for('auto_features'))
self.held_object = copy.copy(auto)
self.held_object.name = option.name
self.methods.update({'enabled': self.enabled_method,
@@ -809,6 +809,11 @@ def get_exe(self) -> T.Union[ExternalProgram, build.Executable, build.CustomTarg
def get_name(self) -> str:
return self.name
+
+class Doctest(Test):
+ target: T.Optional[build.BuildTarget] = None
+
+
class NullSubprojectInterpreter(HoldableObject):
pass
@@ -958,7 +963,10 @@ def outdir_method(self, args: T.List[TYPE_var], kwargs: TYPE_kwargs) -> str:
@noKwargs
@typed_pos_args('extract_objects', varargs=(mesonlib.File, str, build.CustomTarget, build.CustomTargetIndex, build.GeneratedList))
def extract_objects_method(self, args: T.Tuple[T.List[T.Union[mesonlib.FileOrString, 'build.GeneratedTypes']]], kwargs: TYPE_nkwargs) -> build.ExtractedObjects:
- return self._target_object.extract_objects(args[0])
+ tobj = self._target_object
+ unity_value = self.interpreter.coredata.get_option_for_target(tobj, "unity")
+ is_unity = (unity_value == 'on' or (unity_value == 'subprojects' and tobj.subproject != ''))
+ return tobj.extract_objects(args[0], is_unity)
@noPosargs
@typed_kwargs(
diff --git a/mesonbuild/interpreter/kwargs.py b/mesonbuild/interpreter/kwargs.py
index ae4866a88ad8..d741aabc5583 100644
--- a/mesonbuild/interpreter/kwargs.py
+++ b/mesonbuild/interpreter/kwargs.py
@@ -1,6 +1,6 @@
# SPDX-License-Identifier: Apache-2.0
-# Copyright © 2021 The Meson Developers
-# Copyright © 2021 Intel Corporation
+# Copyright © 2021-2025 Intel Corporation
+# Copyright © 2021-2025 Intel Corporation
from __future__ import annotations
"""Keyword Argument type annotations."""
@@ -19,6 +19,9 @@
from ..programs import ExternalProgram
from .type_checking import PkgConfigDefineType, SourcesVarargsType
+if T.TYPE_CHECKING:
+ TestArgs = T.Union[str, File, build.Target, ExternalProgram]
+
class FuncAddProjectArgs(TypedDict):
"""Keyword Arguments for the add_*_arguments family of arguments.
@@ -38,7 +41,6 @@ class BaseTest(TypedDict):
"""Shared base for the Rust module."""
- args: T.List[T.Union[str, File, build.Target, ExternalProgram]]
should_fail: bool
timeout: int
workdir: T.Optional[str]
@@ -52,6 +54,7 @@ class FuncBenchmark(BaseTest):
"""Keyword Arguments shared between `test` and `benchmark`."""
+ args: T.List[TestArgs]
protocol: Literal['exitcode', 'tap', 'gtest', 'rust']
@@ -209,7 +212,7 @@ class Project(TypedDict):
version: T.Optional[FileOrString]
meson_version: T.Optional[str]
- default_options: T.Dict[OptionKey, T.Union[str, int, bool, T.List[str]]]
+ default_options: T.List[str]
license: T.List[str]
license_files: T.List[str]
subproject_dir: str
@@ -239,7 +242,7 @@ class Summary(TypedDict):
class FindProgram(ExtractRequired, ExtractSearchDirs):
- default_options: T.Dict[OptionKey, T.Union[str, int, bool, T.List[str]]]
+ default_options: T.Dict[OptionKey, options.ElementaryOptionValues]
native: MachineChoice
version: T.List[str]
@@ -286,6 +289,10 @@ class VcsTag(TypedDict):
build.ExtractedObjects, build.GeneratedList, ExternalProgram, File]]
output: T.List[str]
replace_string: str
+ install: bool
+ install_tag: T.Optional[str]
+ install_dir: T.Optional[str]
+ install_mode: FileMode
class ConfigureFile(TypedDict):
@@ -308,13 +315,13 @@ class ConfigureFile(TypedDict):
class Subproject(ExtractRequired):
- default_options: T.Dict[OptionKey, T.Union[str, int, bool, T.List[str]]]
+ default_options: T.Dict[OptionKey, options.ElementaryOptionValues]
version: T.List[str]
class DoSubproject(ExtractRequired):
- default_options: T.Dict[OptionKey, T.Union[str, int, bool, T.List[str]]]
+ default_options: T.Union[T.List[str], T.Dict[str, options.ElementaryOptionValues], str]
version: T.List[str]
cmake_options: T.List[str]
options: T.Optional[CMakeSubprojectOptions]
@@ -342,7 +349,7 @@ class _BaseBuildTarget(TypedDict):
name_suffix: T.Optional[str]
native: MachineChoice
objects: T.List[build.ObjectTypes]
- override_options: T.Dict[OptionKey, T.Union[str, int, bool, T.List[str]]]
+ override_options: T.Dict[OptionKey, options.ElementaryOptionValues]
depend_files: NotRequired[T.List[File]]
resources: T.List[str]
@@ -386,6 +393,7 @@ class Executable(_BuildTarget):
pie: T.Optional[bool]
vs_module_defs: T.Optional[T.Union[str, File, build.CustomTarget, build.CustomTargetIndex]]
win_subsystem: T.Optional[str]
+ android_exe_type: T.Optional[Literal['application', 'executable']]
class _StaticLibMixin(TypedDict):
diff --git a/mesonbuild/interpreter/mesonmain.py b/mesonbuild/interpreter/mesonmain.py
index c82f933450c1..8ede6916abcd 100644
--- a/mesonbuild/interpreter/mesonmain.py
+++ b/mesonbuild/interpreter/mesonmain.py
@@ -1,6 +1,6 @@
# SPDX-License-Identifier: Apache-2.0
# Copyright 2012-2021 The Meson development team
-# Copyright © 2021-2024 Intel Corporation
+# Copyright © 2021-2025 Intel Corporation
from __future__ import annotations
import copy
@@ -311,7 +311,7 @@ def get_compiler_method(self, args: T.Tuple[str], kwargs: 'NativeKW') -> 'Compil
@noPosargs
@noKwargs
def is_unity_method(self, args: T.List['TYPE_var'], kwargs: 'TYPE_kwargs') -> bool:
- optval = self.interpreter.environment.coredata.get_option(OptionKey('unity'))
+ optval = self.interpreter.environment.coredata.optstore.get_value_for(OptionKey('unity'))
return optval == 'on' or (optval == 'subprojects' and self.interpreter.is_subproject())
@noPosargs
@@ -360,7 +360,7 @@ def override_dependency_method(self, args: T.Tuple[str, dependencies.Dependency]
dep.name = name
optkey = OptionKey('default_library', subproject=self.interpreter.subproject)
- default_library = self.interpreter.coredata.get_option(optkey)
+ default_library = self.interpreter.coredata.optstore.get_value_for(optkey)
assert isinstance(default_library, str), 'for mypy'
static = kwargs['static']
if static is None:
diff --git a/mesonbuild/interpreter/primitives/string.py b/mesonbuild/interpreter/primitives/string.py
index 7cb492da7efc..a224dfac8946 100644
--- a/mesonbuild/interpreter/primitives/string.py
+++ b/mesonbuild/interpreter/primitives/string.py
@@ -7,7 +7,7 @@
import typing as T
-from ...mesonlib import version_compare
+from ...mesonlib import version_compare, version_compare_many
from ...interpreterbase import (
ObjectHolder,
MesonOperator,
@@ -169,9 +169,11 @@ def underscorify_method(self, args: T.List[TYPE_var], kwargs: TYPE_kwargs) -> st
return re.sub(r'[^a-zA-Z0-9]', '_', self.held_object)
@noKwargs
- @typed_pos_args('str.version_compare', str)
- def version_compare_method(self, args: T.Tuple[str], kwargs: TYPE_kwargs) -> bool:
- return version_compare(self.held_object, args[0])
+ @typed_pos_args('str.version_compare', varargs=str, min_varargs=1)
+ def version_compare_method(self, args: T.Tuple[T.List[str]], kwargs: TYPE_kwargs) -> bool:
+ if len(args[0]) > 1:
+ FeatureNew.single_use('version_compare() with multiple arguments', '1.8.0', self.subproject, location=self.current_node)
+ return version_compare_many(self.held_object, args[0])[0]
@staticmethod
def _op_div(this: str, other: str) -> str:
diff --git a/mesonbuild/interpreter/type_checking.py b/mesonbuild/interpreter/type_checking.py
index ed34be950065..78938ba9cd0c 100644
--- a/mesonbuild/interpreter/type_checking.py
+++ b/mesonbuild/interpreter/type_checking.py
@@ -1,5 +1,5 @@
# SPDX-License-Identifier: Apache-2.0
-# Copyright © 2021 Intel Corporation
+# Copyright © 2021-2025 Intel Corporation
"""Helpers for strict type checking."""
@@ -16,7 +16,6 @@
from ..interpreterbase.decorators import KwargInfo, ContainerTypeInfo
from ..mesonlib import (File, FileMode, MachineChoice, listify, has_path_sep,
EnvironmentVariables)
-from ..options import OptionKey
from ..programs import ExternalProgram
# Helper definition for type checks that are `Optional[T]`
@@ -27,6 +26,7 @@
from ..build import ObjectTypes
from ..interpreterbase import TYPE_var
+ from ..options import ElementaryOptionValues
from ..mesonlib import EnvInitValueType
_FullEnvInitValueType = T.Union[EnvironmentVariables, T.List[str], T.List[T.List[str]], EnvInitValueType, str, None]
@@ -292,24 +292,12 @@ def _env_convertor(value: _FullEnvInitValueType) -> EnvironmentVariables:
default=[],
)
-def _override_options_convertor(raw: T.Union[str, T.List[str], T.Dict[str, T.Union[str, int, bool, T.List[str]]]]) -> T.Dict[OptionKey, T.Union[str, int, bool, T.List[str]]]:
- if isinstance(raw, str):
- raw = [raw]
- if isinstance(raw, list):
- output: T.Dict[OptionKey, T.Union[str, int, bool, T.List[str]]] = {}
- for each in raw:
- k, v = split_equal_string(each)
- output[OptionKey.from_string(k)] = v
- return output
- return {OptionKey.from_string(k): v for k, v in raw.items()}
-
-OVERRIDE_OPTIONS_KW: KwargInfo[T.Union[str, T.Dict[str, T.Union[str, int, bool, T.List[str]]], T.List[str]]] = KwargInfo(
+OVERRIDE_OPTIONS_KW: KwargInfo[T.Union[str, T.Dict[str, ElementaryOptionValues], T.List[str]]] = KwargInfo(
'override_options',
(str, ContainerTypeInfo(list, str), ContainerTypeInfo(dict, (str, int, bool, list))),
default={},
validator=_options_validator,
- convertor=_override_options_convertor,
since_values={dict: '1.2.0'},
)
@@ -484,9 +472,7 @@ def link_whole_validator(values: T.List[T.Union[StaticLibrary, CustomTarget, Cus
PRESERVE_PATH_KW: KwargInfo[bool] = KwargInfo('preserve_path', bool, default=False, since='0.63.0')
-TEST_KWS: T.List[KwargInfo] = [
- KwargInfo('args', ContainerTypeInfo(list, (str, File, BuildTarget, CustomTarget, CustomTargetIndex, ExternalProgram)),
- listify=True, default=[]),
+TEST_KWS_NO_ARGS: T.List[KwargInfo] = [
KwargInfo('should_fail', bool, default=False),
KwargInfo('timeout', int, default=30),
KwargInfo('workdir', (str, NoneType), default=None,
@@ -503,6 +489,11 @@ def link_whole_validator(values: T.List[T.Union[StaticLibrary, CustomTarget, Cus
KwargInfo('verbose', bool, default=False, since='0.62.0'),
]
+TEST_KWS: T.List[KwargInfo] = TEST_KWS_NO_ARGS + [
+ KwargInfo('args', ContainerTypeInfo(list, (str, File, BuildTarget, CustomTarget, CustomTargetIndex, ExternalProgram)),
+ listify=True, default=[]),
+]
+
# Cannot have a default value because we need to check that rust_crate_type and
# rust_abi are mutually exclusive.
RUST_CRATE_TYPE_KW: KwargInfo[T.Union[str, None]] = KwargInfo(
@@ -710,6 +701,12 @@ def _convert_darwin_versions(val: T.List[T.Union[str, int]]) -> T.Optional[T.Tup
convertor=lambda x: x.lower() if isinstance(x, str) else None,
validator=_validate_win_subsystem,
),
+ KwargInfo(
+ 'android_exe_type',
+ (str, NoneType),
+ validator=in_set_validator({'application', 'executable'}),
+ since='1.8.0'
+ ),
]
# The total list of arguments used by Executable
diff --git a/mesonbuild/interpreterbase/interpreterbase.py b/mesonbuild/interpreterbase/interpreterbase.py
index 525d5d6c54a0..b13bbae1a5a9 100644
--- a/mesonbuild/interpreterbase/interpreterbase.py
+++ b/mesonbuild/interpreterbase/interpreterbase.py
@@ -27,18 +27,20 @@
SubdirDoneRequest,
)
+from .. import mlog
from .decorators import FeatureNew
from .disabler import Disabler, is_disabled
from .helpers import default_resolve_key, flatten, resolve_second_level_holders, stringifyUserArguments
from .operator import MesonOperator
from ._unholder import _unholder
-import os, copy, re, pathlib
+import os, copy, hashlib, re, pathlib
import typing as T
import textwrap
if T.TYPE_CHECKING:
from .baseobjects import InterpreterObjectTypeVar, SubProject, TYPE_kwargs, TYPE_var
+ from ..ast import AstVisitor
from ..interpreter import Interpreter
HolderMapType = T.Dict[
@@ -67,22 +69,27 @@ def __init__(self, op_type: str) -> None:
class InterpreterBase:
- def __init__(self, source_root: str, subdir: str, subproject: 'SubProject'):
+ def __init__(self, source_root: str, subdir: str, subproject: SubProject, subproject_dir: str, env: environment.Environment):
self.source_root = source_root
self.funcs: FunctionType = {}
self.builtin: T.Dict[str, InterpreterObject] = {}
# Holder maps store a mapping from an HoldableObject to a class ObjectHolder
self.holder_map: HolderMapType = {}
self.bound_holder_map: HolderMapType = {}
+ self.build_def_files: mesonlib.OrderedSet[str] = mesonlib.OrderedSet()
+ self.processed_buildfiles: T.Set[str] = set()
self.subdir = subdir
self.root_subdir = subdir
self.subproject = subproject
+ self.subproject_dir = subproject_dir
+ self.environment = env
+ self.coredata = env.get_coredata()
self.variables: T.Dict[str, InterpreterObject] = {}
self.argument_depth = 0
self.current_lineno = -1
# Current node set during a function call. This can be used as location
# when printing a warning message during a method call.
- self.current_node: mparser.BaseNode = None
+ self.current_node = mparser.BaseNode(-1, -1, 'sentinel')
# This is set to `version_string` when this statement is evaluated:
# meson.version().compare_version(version_string)
# If it was part of a if-clause, it is used to temporally override the
@@ -102,7 +109,9 @@ def read_buildfile(self, fname: str, errname: str) -> str:
raise InvalidCode.from_node(f'Build file failed to parse as unicode: {e}', node=node)
def load_root_meson_file(self) -> None:
- mesonfile = os.path.join(self.source_root, self.subdir, environment.build_filename)
+ build_filename = os.path.join(self.subdir, environment.build_filename)
+ self.build_def_files.add(build_filename)
+ mesonfile = os.path.join(self.source_root, build_filename)
if not os.path.isfile(mesonfile):
raise InvalidArguments(f'Missing Meson file in {mesonfile}')
code = self.read_buildfile(mesonfile, mesonfile)
@@ -183,7 +192,6 @@ def evaluate_codeblock(self, node: mparser.CodeBlockNode, start: int = 0, end: T
while i < len(statements):
cur = statements[i]
try:
- self.current_lineno = cur.lineno
self.evaluate_statement(cur)
except Exception as e:
if getattr(e, 'lineno', None) is None:
@@ -656,8 +664,6 @@ def set_variable(self, varname: str, variable: T.Union[TYPE_var, InterpreterObje
raise mesonlib.MesonBugException(f'set_variable in InterpreterBase called with a non InterpreterObject {variable} of type {type(variable).__name__}')
if not isinstance(varname, str):
raise InvalidCode('First argument to set_variable must be a string.')
- if re.match('[_a-zA-Z][_0-9a-zA-Z]*$', varname) is None:
- raise InvalidCode('Invalid variable name: ' + varname)
if varname in self.builtin:
raise InvalidCode(f'Tried to overwrite internal variable "{varname}"')
self.variables[varname] = variable
@@ -671,3 +677,71 @@ def get_variable(self, varname: str) -> InterpreterObject:
def validate_extraction(self, buildtarget: mesonlib.HoldableObject) -> None:
raise InterpreterException('validate_extraction is not implemented in this context (please file a bug)')
+
+ def _load_option_file(self) -> None:
+ from .. import optinterpreter # prevent circular import
+
+ # Load "meson.options" before "meson_options.txt", and produce a warning if
+ # it is being used with an old version. I have added check that if both
+ # exist the warning isn't raised
+ option_file = os.path.join(self.source_root, self.subdir, 'meson.options')
+ old_option_file = os.path.join(self.source_root, self.subdir, 'meson_options.txt')
+
+ if os.path.exists(option_file):
+ if os.path.exists(old_option_file):
+ if os.path.samefile(option_file, old_option_file):
+ mlog.debug("Not warning about meson.options with version minimum < 1.1 because meson_options.txt also exists")
+ else:
+ raise mesonlib.MesonException("meson.options and meson_options.txt both exist, but are not the same file.")
+ else:
+ FeatureNew.single_use('meson.options file', '1.1', self.subproject, 'Use meson_options.txt instead')
+ else:
+ option_file = old_option_file
+ if os.path.exists(option_file):
+ with open(option_file, 'rb') as f:
+ # We want fast not cryptographically secure, this is just to
+ # see if the option file has changed
+ self.coredata.options_files[self.subproject] = (option_file, hashlib.sha1(f.read()).hexdigest())
+ oi = optinterpreter.OptionInterpreter(self.environment.coredata.optstore, self.subproject)
+ oi.process(option_file)
+ self.coredata.optstore.update_project_options(oi.options, self.subproject)
+ self.build_def_files.add(option_file)
+ else:
+ self.coredata.options_files[self.subproject] = None
+
+ def _resolve_subdir(self, rootdir: str, new_subdir: str) -> T.Tuple[str, bool]:
+ subdir = os.path.join(self.subdir, new_subdir)
+ absdir = os.path.join(rootdir, subdir)
+ symlinkless_dir = os.path.realpath(absdir)
+ build_file = os.path.join(symlinkless_dir, environment.build_filename)
+ if build_file in self.processed_buildfiles:
+ return subdir, False
+ self.processed_buildfiles.add(build_file)
+ return subdir, True
+
+ def _evaluate_subdir(self, rootdir: str, subdir: str, visitors: T.Optional[T.Iterable[AstVisitor]] = None) -> bool:
+ buildfilename = os.path.join(subdir, environment.build_filename)
+ self.build_def_files.add(buildfilename)
+
+ absname = os.path.join(rootdir, buildfilename)
+ if not os.path.isfile(absname):
+ return False
+
+ code = self.read_buildfile(absname, buildfilename)
+ try:
+ codeblock = mparser.Parser(code, absname).parse()
+ except mesonlib.MesonException as me:
+ me.file = absname
+ raise me
+ try:
+ prev_subdir = self.subdir
+ self.subdir = subdir
+ if visitors:
+ for visitor in visitors:
+ codeblock.accept(visitor)
+ self.evaluate_codeblock(codeblock)
+ except SubdirDoneRequest:
+ pass
+ finally:
+ self.subdir = prev_subdir
+ return True
diff --git a/mesonbuild/linkers/base.py b/mesonbuild/linkers/base.py
index c8efc9d6d82c..68fdb2ea3a70 100644
--- a/mesonbuild/linkers/base.py
+++ b/mesonbuild/linkers/base.py
@@ -18,6 +18,7 @@ class RSPFileSyntax(enum.Enum):
MSVC = enum.auto()
GCC = enum.auto()
+ TASKING = enum.auto()
class ArLikeLinker:
diff --git a/mesonbuild/linkers/linkers.py b/mesonbuild/linkers/linkers.py
index be241c547d3a..59f60e03a19c 100644
--- a/mesonbuild/linkers/linkers.py
+++ b/mesonbuild/linkers/linkers.py
@@ -1,5 +1,6 @@
# SPDX-License-Identifier: Apache-2.0
# Copyright 2012-2022 The Meson development team
+# Copyright © 2023 Intel Corporation
from __future__ import annotations
@@ -14,9 +15,10 @@
from ..arglist import CompilerArgs
if T.TYPE_CHECKING:
- from ..coredata import KeyedOptionDictType
from ..environment import Environment
from ..mesonlib import MachineChoice
+ from ..build import BuildTarget
+ from ..compilers import Compiler
class StaticLinker:
@@ -26,6 +28,12 @@ class StaticLinker:
def __init__(self, exelist: T.List[str]):
self.exelist = exelist
+ def get_id(self) -> str:
+ return self.id
+
+ def get_exe(self) -> str:
+ return self.exelist[0]
+
def compiler_args(self, args: T.Optional[T.Iterable[str]] = None) -> CompilerArgs:
return CompilerArgs(self, args)
@@ -35,7 +43,10 @@ def can_linker_accept_rsp(self) -> bool:
"""
return mesonlib.is_windows()
- def get_base_link_args(self, options: 'KeyedOptionDictType') -> T.List[str]:
+ def get_base_link_args(self,
+ target: 'BuildTarget',
+ linker: 'Compiler',
+ env: 'Environment') -> T.List[str]:
"""Like compilers.get_base_link_args, but for the static linker."""
return []
@@ -65,7 +76,7 @@ def thread_link_flags(self, env: 'Environment') -> T.List[str]:
def openmp_flags(self, env: Environment) -> T.List[str]:
return []
- def get_option_link_args(self, options: 'KeyedOptionDictType') -> T.List[str]:
+ def get_option_link_args(self, target: 'BuildTarget', env: 'Environment', subproject: T.Optional[str] = None) -> T.List[str]:
return []
@classmethod
@@ -146,6 +157,9 @@ def __repr__(self) -> str:
def get_id(self) -> str:
return self.id
+ def get_exe(self) -> str:
+ return self.exelist[0]
+
def get_version_string(self) -> str:
return f'({self.id} {self.version})'
@@ -173,7 +187,10 @@ def get_lib_prefix(self) -> str:
# XXX: is use_ldflags a compiler or a linker attribute?
- def get_option_args(self, options: 'KeyedOptionDictType') -> T.List[str]:
+ def get_option_args(self, target: 'BuildTarget', env: 'Environment', subproject: T.Optional[str] = None) -> T.List[str]:
+ return []
+
+ def get_option_link_args(self, target: 'BuildTarget', env: 'Environment', subproject: T.Optional[str] = None) -> T.List[str]:
return []
def has_multi_arguments(self, args: T.List[str], env: 'Environment') -> T.Tuple[bool, bool]:
@@ -200,7 +217,7 @@ def get_optimization_link_args(self, optimization_level: str) -> T.List[str]:
def get_std_shared_lib_args(self) -> T.List[str]:
return []
- def get_std_shared_module_args(self, options: 'KeyedOptionDictType') -> T.List[str]:
+ def get_std_shared_module_args(self, Target: 'BuildTarget') -> T.List[str]:
return self.get_std_shared_lib_args()
def get_pie_args(self) -> T.List[str]:
@@ -215,7 +232,7 @@ def get_lto_args(self) -> T.List[str]:
def get_thinlto_cache_args(self, path: str) -> T.List[str]:
return []
- def sanitizer_args(self, value: str) -> T.List[str]:
+ def sanitizer_args(self, value: T.List[str]) -> T.List[str]:
return []
def get_asneeded_args(self) -> T.List[str]:
@@ -525,6 +542,24 @@ class MetrowerksStaticLinkerARM(MetrowerksStaticLinker):
class MetrowerksStaticLinkerEmbeddedPowerPC(MetrowerksStaticLinker):
id = 'mwldeppc'
+class TaskingStaticLinker(StaticLinker):
+ id = 'tasking'
+
+ def __init__(self, exelist: T.List[str]):
+ super().__init__(exelist)
+
+ def can_linker_accept_rsp(self) -> bool:
+ return True
+
+ def rsp_file_syntax(self) -> RSPFileSyntax:
+ return RSPFileSyntax.TASKING
+
+ def get_output_args(self, target: str) -> T.List[str]:
+ return ['-n', target]
+
+ def get_linker_always_args(self) -> T.List[str]:
+ return ['-r']
+
def prepare_rpaths(raw_rpaths: T.Tuple[str, ...], build_dir: str, from_dir: str) -> T.List[str]:
# The rpaths we write must be relative if they point to the build dir,
# because otherwise they have different length depending on the build
@@ -573,6 +608,9 @@ def get_std_shared_lib_args(self) -> T.List[str]:
def get_search_args(self, dirname: str) -> T.List[str]:
return ['-L' + dirname]
+ def sanitizer_args(self, value: T.List[str]) -> T.List[str]:
+ return []
+
class GnuLikeDynamicLinkerMixin(DynamicLinkerBase):
@@ -628,10 +666,10 @@ def get_allow_undefined_args(self) -> T.List[str]:
def get_lto_args(self) -> T.List[str]:
return ['-flto']
- def sanitizer_args(self, value: str) -> T.List[str]:
- if value == 'none':
- return []
- return ['-fsanitize=' + value]
+ def sanitizer_args(self, value: T.List[str]) -> T.List[str]:
+ if not value:
+ return value
+ return [f'-fsanitize={",".join(value)}']
def get_coverage_args(self) -> T.List[str]:
return ['--coverage']
@@ -701,8 +739,10 @@ def build_rpath_args(self, env: 'Environment', build_dir: str, from_dir: str,
# In order to avoid relinking for RPATH removal, the binary needs to contain just
# enough space in the ELF header to hold the final installation RPATH.
paths = ':'.join(all_paths)
- if len(paths) < len(install_rpath):
- padding = 'X' * (len(install_rpath) - len(paths))
+ paths_length = len(paths.encode('utf-8'))
+ install_rpath_length = len(install_rpath.encode('utf-8'))
+ if paths_length < install_rpath_length:
+ padding = 'X' * (install_rpath_length - paths_length)
if not paths:
paths = padding
else:
@@ -716,11 +756,16 @@ def build_rpath_args(self, env: 'Environment', build_dir: str, from_dir: str,
return (args, rpath_dirs_to_remove)
# Rpaths to use while linking must be absolute. These are not
- # written to the binary. Needed only with GNU ld:
+ # written to the binary. Needed only with GNU ld, and only for
+ # versions before 2.28:
+ # https://sourceware.org/bugzilla/show_bug.cgi?id=20535
# https://sourceware.org/bugzilla/show_bug.cgi?id=16936
# Not needed on Windows or other platforms that don't use RPATH
# https://github.com/mesonbuild/meson/issues/1897
#
+ # In 2.28 and on, $ORIGIN tokens inside of -rpath are respected,
+ # so we do not need to duplicate it in -rpath-link.
+ #
# In addition, this linker option tends to be quite long and some
# compilers have trouble dealing with it. That's why we will include
# one option per folder, like this:
@@ -730,8 +775,9 @@ def build_rpath_args(self, env: 'Environment', build_dir: str, from_dir: str,
# ...instead of just one single looooong option, like this:
#
# -Wl,-rpath-link,/path/to/folder1:/path/to/folder2:...
- for p in rpath_paths:
- args.extend(self._apply_prefix('-rpath-link,' + os.path.join(build_dir, p)))
+ if self.id in {'ld.bfd', 'ld.gold'} and mesonlib.version_compare(self.version, '<2.28'):
+ for p in rpath_paths:
+ args.extend(self._apply_prefix('-rpath-link,' + os.path.join(build_dir, p)))
return (args, rpath_dirs_to_remove)
@@ -790,10 +836,10 @@ def get_link_whole_for(self, args: T.List[str]) -> T.List[str]:
def get_coverage_args(self) -> T.List[str]:
return ['--coverage']
- def sanitizer_args(self, value: str) -> T.List[str]:
- if value == 'none':
- return []
- return ['-fsanitize=' + value]
+ def sanitizer_args(self, value: T.List[str]) -> T.List[str]:
+ if not value:
+ return value
+ return [f'-fsanitize={",".join(value)}']
def no_undefined_args(self) -> T.List[str]:
# We used to emit -undefined,error, but starting with Xcode 15 /
@@ -820,7 +866,7 @@ def get_soname_args(self, env: 'Environment', prefix: str, shlib_name: str,
install_name = ['@rpath/', prefix, shlib_name]
if soversion is not None:
install_name.append('.' + soversion)
- install_name.append('.dylib')
+ install_name.append('.' + suffix)
args = ['-install_name', ''.join(install_name)]
if darwin_versions:
args.extend(['-compatibility_version', darwin_versions[0],
@@ -1350,9 +1396,6 @@ def rsp_file_syntax(self) -> RSPFileSyntax:
def get_pie_args(self) -> T.List[str]:
return []
- def get_pie_args(self) -> T.List[str]:
- return []
-
class MSVCDynamicLinker(VisualStudioLikeLinkerMixin, DynamicLinker):
@@ -1480,8 +1523,10 @@ def build_rpath_args(self, env: 'Environment', build_dir: str, from_dir: str,
# In order to avoid relinking for RPATH removal, the binary needs to contain just
# enough space in the ELF header to hold the final installation RPATH.
paths = ':'.join(all_paths)
- if len(paths) < len(install_rpath):
- padding = 'X' * (len(install_rpath) - len(paths))
+ paths_length = len(paths.encode('utf-8'))
+ install_rpath_length = len(install_rpath.encode('utf-8'))
+ if paths_length < install_rpath_length:
+ padding = 'X' * (install_rpath_length - paths_length)
if not paths:
paths = padding
else:
@@ -1521,7 +1566,7 @@ def get_archive_name(self, filename: str) -> str:
def get_command_to_archive_shlib(self) -> T.List[str]:
# Archive shared library object and remove the shared library object,
# since it already exists in the archive.
- command = ['ar', '-q', '-v', '$out', '$in', '&&', 'rm', '-f', '$in']
+ command = ['ar', '-r', '-s', '-v', '$out', '$in', '&&', 'rm', '-f', '$in']
return command
def get_link_whole_for(self, args: T.List[str]) -> T.List[str]:
@@ -1673,3 +1718,56 @@ class MetrowerksLinkerARM(MetrowerksLinker):
class MetrowerksLinkerEmbeddedPowerPC(MetrowerksLinker):
id = 'mwldeppc'
+
+class TaskingLinker(DynamicLinker):
+ id = 'tasking'
+
+ _OPTIMIZATION_ARGS: T.Dict[str, T.List[str]] = {
+ 'plain': [],
+ '0': ['-O0'],
+ 'g': ['-O1'], # There is no debug specific level, O1 is recommended by the compiler
+ '1': ['-O1'],
+ '2': ['-O2'],
+ '3': ['-O2'], # There is no 3rd level optimization for the linker
+ 's': ['-Os'],
+ }
+
+ def __init__(self, exelist: T.List[str], for_machine: mesonlib.MachineChoice,
+ *, version: str = 'unknown version'):
+ super().__init__(exelist, for_machine, '', [],
+ version=version)
+
+ def get_accepts_rsp(self) -> bool:
+ return True
+
+ def get_lib_prefix(self) -> str:
+ return ""
+
+ def get_allow_undefined_args(self) -> T.List[str]:
+ return []
+
+ def invoked_by_compiler(self) -> bool:
+ return True
+
+ def get_search_args(self, dirname: str) -> T.List[str]:
+ return self._apply_prefix('-L' + dirname)
+
+ def get_output_args(self, outputname: str) -> T.List[str]:
+ return ['-o', outputname]
+
+ def get_lto_args(self) -> T.List[str]:
+ return ['--mil-link']
+
+ def rsp_file_syntax(self) -> RSPFileSyntax:
+ return RSPFileSyntax.TASKING
+
+ def fatal_warnings(self) -> T.List[str]:
+ """Arguments to make all warnings errors."""
+ return self._apply_prefix('--warnings-as-errors')
+
+ def get_link_whole_for(self, args: T.List[str]) -> T.List[str]:
+ args = mesonlib.listify(args)
+ l: T.List[str] = []
+ for a in args:
+ l.extend(self._apply_prefix('-Wl--whole-archive=' + a))
+ return l
diff --git a/mesonbuild/machinefile.py b/mesonbuild/machinefile.py
index a3aeae522713..b39a47217ce2 100644
--- a/mesonbuild/machinefile.py
+++ b/mesonbuild/machinefile.py
@@ -12,12 +12,8 @@
from .mesonlib import MesonException
if T.TYPE_CHECKING:
- from typing_extensions import TypeAlias
-
from .coredata import StrOrBytesPath
-
- SectionT: TypeAlias = T.Union[str, int, bool, T.List[str], T.List['SectionT']]
-
+ from .options import ElementaryOptionValues
class CmdLineFileParser(configparser.ConfigParser):
def __init__(self) -> None:
@@ -36,8 +32,8 @@ def optionxform(self, optionstr: str) -> str:
class MachineFileParser():
def __init__(self, filenames: T.List[str], sourcedir: str) -> None:
self.parser = CmdLineFileParser()
- self.constants: T.Dict[str, SectionT] = {'True': True, 'False': False}
- self.sections: T.Dict[str, T.Dict[str, SectionT]] = {}
+ self.constants: T.Dict[str, ElementaryOptionValues] = {'True': True, 'False': False}
+ self.sections: T.Dict[str, T.Dict[str, ElementaryOptionValues]] = {}
for fname in filenames:
try:
@@ -62,9 +58,9 @@ def __init__(self, filenames: T.List[str], sourcedir: str) -> None:
continue
self.sections[s] = self._parse_section(s)
- def _parse_section(self, s: str) -> T.Dict[str, SectionT]:
+ def _parse_section(self, s: str) -> T.Dict[str, ElementaryOptionValues]:
self.scope = self.constants.copy()
- section: T.Dict[str, SectionT] = {}
+ section: T.Dict[str, ElementaryOptionValues] = {}
for entry, value in self.parser.items(s):
if ' ' in entry or '\t' in entry or "'" in entry or '"' in entry:
raise MesonException(f'Malformed variable name {entry!r} in machine file.')
@@ -83,7 +79,7 @@ def _parse_section(self, s: str) -> T.Dict[str, SectionT]:
self.scope[entry] = res
return section
- def _evaluate_statement(self, node: mparser.BaseNode) -> SectionT:
+ def _evaluate_statement(self, node: mparser.BaseNode) -> ElementaryOptionValues:
if isinstance(node, (mparser.StringNode)):
return node.value
elif isinstance(node, mparser.BooleanNode):
@@ -93,7 +89,9 @@ def _evaluate_statement(self, node: mparser.BaseNode) -> SectionT:
elif isinstance(node, mparser.ParenthesizedNode):
return self._evaluate_statement(node.inner)
elif isinstance(node, mparser.ArrayNode):
- return [self._evaluate_statement(arg) for arg in node.args.arguments]
+ a = [self._evaluate_statement(arg) for arg in node.args.arguments]
+ assert all(isinstance(s, str) for s in a), 'for mypy'
+ return T.cast('T.List[str]', a)
elif isinstance(node, mparser.IdNode):
return self.scope[node.value]
elif isinstance(node, mparser.ArithmeticNode):
@@ -109,7 +107,7 @@ def _evaluate_statement(self, node: mparser.BaseNode) -> SectionT:
return os.path.join(l, r)
raise MesonException('Unsupported node type')
-def parse_machine_files(filenames: T.List[str], sourcedir: str) -> T.Dict[str, T.Dict[str, SectionT]]:
+def parse_machine_files(filenames: T.List[str], sourcedir: str) -> T.Dict[str, T.Dict[str, ElementaryOptionValues]]:
parser = MachineFileParser(filenames, sourcedir)
return parser.sections
diff --git a/mesonbuild/mcompile.py b/mesonbuild/mcompile.py
index 2f5708c86521..cfaeb7960b9b 100644
--- a/mesonbuild/mcompile.py
+++ b/mesonbuild/mcompile.py
@@ -355,14 +355,14 @@ def run(options: 'argparse.Namespace') -> int:
b = build.load(options.wd)
cdata = b.environment.coredata
- need_vsenv = T.cast('bool', cdata.get_option(OptionKey('vsenv')))
+ need_vsenv = T.cast('bool', cdata.optstore.get_value_for(OptionKey('vsenv')))
if setup_vsenv(need_vsenv):
mlog.log(mlog.green('INFO:'), 'automatically activated MSVC compiler environment')
cmd: T.List[str] = []
env: T.Optional[T.Dict[str, str]] = None
- backend = cdata.get_option(OptionKey('backend'))
+ backend = cdata.optstore.get_value_for(OptionKey('backend'))
assert isinstance(backend, str)
mlog.log(mlog.green('INFO:'), 'autodetecting backend as', backend)
if backend == 'ninja':
diff --git a/mesonbuild/mconf.py b/mesonbuild/mconf.py
index 3a6343ba1233..933fa6554fa3 100644
--- a/mesonbuild/mconf.py
+++ b/mesonbuild/mconf.py
@@ -1,6 +1,6 @@
# SPDX-License-Identifier: Apache-2.0
# Copyright 2014-2016 The Meson development team
-# Copyright © 2023-2024 Intel Corporation
+# Copyright © 2023-2025 Intel Corporation
from __future__ import annotations
@@ -26,8 +26,6 @@
if T.TYPE_CHECKING:
from typing_extensions import Protocol
- from typing import Any
- from .options import UserOption
import argparse
class CMDOptions(coredata.SharedCMDOptions, Protocol):
@@ -35,6 +33,7 @@ class CMDOptions(coredata.SharedCMDOptions, Protocol):
builddir: str
clearcache: bool
pager: bool
+ unset_opts: T.List[str]
# cannot be TV_Loggable, because non-ansidecorators do direct string concat
LOGLINE = T.Union[str, mlog.AnsiDecorator]
@@ -48,6 +47,8 @@ def add_arguments(parser: 'argparse.ArgumentParser') -> None:
help='Clear cached state (e.g. found dependencies)')
parser.add_argument('--no-pager', action='store_false', dest='pager',
help='Do not redirect output to a pager')
+ parser.add_argument('-U', action='append', dest='unset_opts', default=[],
+ help='Remove a subproject option.')
def stringify(val: T.Any) -> str:
if isinstance(val, bool):
@@ -72,6 +73,7 @@ def __init__(self, build_dir: str):
self.build_dir = os.path.dirname(self.build_dir)
self.build = None
self.max_choices_line_length = 60
+ self.pending_section: T.Optional[str] = None
self.name_col: T.List[LOGLINE] = []
self.value_col: T.List[LOGLINE] = []
self.choices_col: T.List[LOGLINE] = []
@@ -95,7 +97,7 @@ def __init__(self, build_dir: str):
if ophash != conf_options[1]:
oi = OptionInterpreter(self.coredata.optstore, sub)
oi.process(opfile)
- self.coredata.update_project_options(oi.options, sub)
+ self.coredata.optstore.update_project_options(oi.options, sub)
self.coredata.options_files[sub] = (opfile, ophash)
else:
opfile = os.path.join(self.source_dir, 'meson.options')
@@ -104,12 +106,12 @@ def __init__(self, build_dir: str):
if os.path.exists(opfile):
oi = OptionInterpreter(self.coredata.optstore, sub)
oi.process(opfile)
- self.coredata.update_project_options(oi.options, sub)
+ self.coredata.optstore.update_project_options(oi.options, sub)
with open(opfile, 'rb') as f:
ophash = hashlib.sha1(f.read()).hexdigest()
self.coredata.options_files[sub] = (opfile, ophash)
else:
- self.coredata.update_project_options({}, sub)
+ self.coredata.optstore.update_project_options({}, sub)
elif os.path.isfile(os.path.join(self.build_dir, environment.build_filename)):
# Make sure that log entries in other parts of meson don't interfere with the JSON output
with mlog.no_logging():
@@ -124,9 +126,6 @@ def __init__(self, build_dir: str):
def clear_cache(self) -> None:
self.coredata.clear_cache()
- def set_options(self, options: T.Dict[OptionKey, str]) -> bool:
- return self.coredata.set_options(options)
-
def save(self) -> None:
# Do nothing when using introspection
if self.default_values_only:
@@ -148,7 +147,7 @@ def print_aligned(self) -> None:
Each column will have a specific width, and will be line wrapped.
"""
total_width = shutil.get_terminal_size(fallback=(160, 0))[0]
- _col = max(total_width // 5, 20)
+ _col = max(total_width // 5, 24)
last_column = total_width - (3 * _col) - 3
four_column = (_col, _col, _col, last_column if last_column > 1 else _col)
@@ -189,10 +188,11 @@ def wrap_text(text: LOGLINE, width: int) -> mlog.TV_LoggableList:
items = [l[i] if l[i] else ' ' * four_column[i] for i in range(4)]
mlog.log(*items)
- def split_options_per_subproject(self, options: 'T.Union[dict[OptionKey, UserOption[Any]], coredata.KeyedOptionDictType]') -> T.Dict[str, 'coredata.MutableKeyedOptionDictType']:
- result: T.Dict[str, 'coredata.MutableKeyedOptionDictType'] = {}
- for k, o in options.items():
- if k.subproject:
+ def split_options_per_subproject(self, opts: T.Union[options.MutableKeyedOptionDictType, options.OptionStore]
+ ) -> T.Dict[str, options.MutableKeyedOptionDictType]:
+ result: T.Dict[str, options.MutableKeyedOptionDictType] = {}
+ for k, o in opts.items():
+ if k.subproject is not None:
self.all_subprojects.add(k.subproject)
result.setdefault(k.subproject, {})[k] = o
return result
@@ -207,12 +207,15 @@ def _add_line(self, name: LOGLINE, value: LOGLINE, choices: LOGLINE, descr: LOGL
self.choices_col.append(choices)
self.descr_col.append(descr)
- def add_option(self, name: str, descr: str, value: T.Any, choices: T.Any) -> None:
+ def add_option(self, key: OptionKey, descr: str, value: T.Any, choices: T.Any) -> None:
+ self._add_section()
value = stringify(value)
choices = stringify(choices)
- self._add_line(mlog.green(name), mlog.yellow(value), mlog.blue(choices), descr)
+ self._add_line(mlog.green(str(key.evolve(subproject=None))), mlog.yellow(value),
+ mlog.blue(choices), descr)
def add_title(self, title: str) -> None:
+ self._add_section()
newtitle = mlog.cyan(title)
descr = mlog.cyan('Description')
value = mlog.cyan('Default Value' if self.default_values_only else 'Current Value')
@@ -221,26 +224,32 @@ def add_title(self, title: str) -> None:
self._add_line(newtitle, value, choices, descr)
self._add_line('-' * len(newtitle), '-' * len(value), '-' * len(choices), '-' * len(descr))
- def add_section(self, section: str) -> None:
+ def _add_section(self) -> None:
+ if not self.pending_section:
+ return
self.print_margin = 0
self._add_line('', '', '', '')
- self._add_line(mlog.normal_yellow(section + ':'), '', '', '')
+ self._add_line(mlog.normal_yellow(self.pending_section + ':'), '', '', '')
self.print_margin = 2
+ self.pending_section = None
+
+ def add_section(self, section: str) -> None:
+ self.pending_section = section
- def print_options(self, title: str, opts: 'T.Union[dict[OptionKey, UserOption[Any]], coredata.KeyedOptionDictType]') -> None:
+ def print_options(self, title: str, opts: T.Union[options.MutableKeyedOptionDictType, options.OptionStore]) -> None:
if not opts:
return
if title:
self.add_title(title)
- auto = T.cast('options.UserFeatureOption', self.coredata.optstore.get_value_object('auto_features'))
+ #auto = T.cast('options.UserFeatureOption', self.coredata.optstore.get_value_for('auto_features'))
for k, o in sorted(opts.items()):
printable_value = o.printable_value()
- root = k.as_root()
- if o.yielding and k.subproject and root in self.coredata.optstore:
- printable_value = ''
- if isinstance(o, options.UserFeatureOption) and o.is_auto():
- printable_value = auto.printable_value()
- self.add_option(str(root), o.description, printable_value, o.choices)
+ #root = k.as_root()
+ #if o.yielding and k.subproject and root in self.coredata.options:
+ # printable_value = ''
+ #if isinstance(o, options.UserFeatureOption) and o.is_auto():
+ # printable_value = auto.printable_value()
+ self.add_option(k, o.description, printable_value, o.printable_choices())
def print_conf(self, pager: bool) -> None:
if pager:
@@ -263,11 +272,11 @@ def print_default_values_warning() -> None:
test_option_names = {OptionKey('errorlogs'),
OptionKey('stdsplit')}
- dir_options: 'coredata.MutableKeyedOptionDictType' = {}
- test_options: 'coredata.MutableKeyedOptionDictType' = {}
- core_options: 'coredata.MutableKeyedOptionDictType' = {}
- module_options: T.Dict[str, 'coredata.MutableKeyedOptionDictType'] = collections.defaultdict(dict)
- for k, v in self.coredata.optstore.items():
+ dir_options: options.MutableKeyedOptionDictType = {}
+ test_options: options.MutableKeyedOptionDictType = {}
+ core_options: options.MutableKeyedOptionDictType = {}
+ module_options: T.Dict[str, options.MutableKeyedOptionDictType] = collections.defaultdict(dict)
+ for k, v in self.coredata.optstore.options.items():
if k in dir_option_names:
dir_options[k] = v
elif k in test_option_names:
@@ -289,15 +298,15 @@ def print_default_values_warning() -> None:
project_options = self.split_options_per_subproject({k: v for k, v in self.coredata.optstore.items() if self.coredata.optstore.is_project_option(k)})
show_build_options = self.default_values_only or self.build.environment.is_cross_build()
- self.add_section('Main project options')
- self.print_options('Core options', host_core_options[''])
- if show_build_options:
- self.print_options('', build_core_options[''])
+ self.add_section('Global build options')
+ self.print_options('Core options', host_core_options[None])
+ if show_build_options and build_core_options:
+ self.print_options('', build_core_options[None])
self.print_options('Backend options', {k: v for k, v in self.coredata.optstore.items() if self.coredata.optstore.is_backend_option(k)})
self.print_options('Base options', {k: v for k, v in self.coredata.optstore.items() if self.coredata.optstore.is_base_option(k)})
- self.print_options('Compiler options', host_compiler_options.get('', {}))
+ self.print_options('Compiler options', host_compiler_options.get(None, {}))
if show_build_options:
- self.print_options('', build_compiler_options.get('', {}))
+ self.print_options('', build_compiler_options.get(None, {}))
for mod, mod_options in module_options.items():
self.print_options(f'{mod} module options', mod_options)
self.print_options('Directories', dir_options)
@@ -305,8 +314,9 @@ def print_default_values_warning() -> None:
self.print_options('Project options', project_options.get('', {}))
for subproject in sorted(self.all_subprojects):
if subproject == '':
- continue
- self.add_section('Subproject ' + subproject)
+ self.add_section('Main project')
+ else:
+ self.add_section('Subproject ' + subproject)
if subproject in host_core_options:
self.print_options('Core options', host_core_options[subproject])
if subproject in build_core_options and show_build_options:
@@ -315,7 +325,7 @@ def print_default_values_warning() -> None:
self.print_options('Compiler options', host_compiler_options[subproject])
if subproject in build_compiler_options and show_build_options:
self.print_options('', build_compiler_options[subproject])
- if subproject in project_options:
+ if subproject != '' and subproject in project_options:
self.print_options('Project options', project_options[subproject])
self.print_aligned()
@@ -325,6 +335,7 @@ def print_default_values_warning() -> None:
print_default_values_warning()
self.print_nondefault_buildtype_options()
+ self.print_augments()
def print_nondefault_buildtype_options(self) -> None:
mismatching = self.coredata.get_nondefault_buildtype_args()
@@ -335,8 +346,30 @@ def print_nondefault_buildtype_options(self) -> None:
for m in mismatching:
mlog.log(f'{m[0]:21}{m[1]:10}{m[2]:10}')
+ def print_augments(self) -> None:
+ if self.coredata.optstore.augments:
+ mlog.log('\nCurrently set option augments:')
+ for k, v in self.coredata.optstore.augments.items():
+ mlog.log(f'{k:21}{v:10}')
+ else:
+ mlog.log('\nThere are no option augments.')
+
+def has_option_flags(options: CMDOptions) -> bool:
+ if options.cmd_line_options:
+ return True
+ if options.unset_opts:
+ return True
+ return False
+
+def is_print_only(options: CMDOptions) -> bool:
+ if has_option_flags(options):
+ return False
+ if options.clearcache:
+ return False
+ return True
+
def run_impl(options: CMDOptions, builddir: str) -> int:
- print_only = not options.cmd_line_options and not options.clearcache
+ print_only = is_print_only(options)
c = None
try:
c = Conf(builddir)
@@ -347,8 +380,8 @@ def run_impl(options: CMDOptions, builddir: str) -> int:
return 0
save = False
- if options.cmd_line_options:
- save = c.set_options(options.cmd_line_options)
+ if has_option_flags(options):
+ save |= c.coredata.set_from_configure_command(options)
coredata.update_cmd_line_file(builddir, options)
if options.clearcache:
c.clear_cache()
diff --git a/mesonbuild/mdevenv.py b/mesonbuild/mdevenv.py
index 8c6ce2031d45..4962d96c6bf9 100644
--- a/mesonbuild/mdevenv.py
+++ b/mesonbuild/mdevenv.py
@@ -84,9 +84,9 @@ def bash_completion_files(b: build.Build, install_data: 'InstallData') -> T.List
dep = PkgConfigDependency('bash-completion', b.environment,
{'required': False, 'silent': True, 'version': '>=2.10'})
if dep.found():
- prefix = b.environment.coredata.get_option(OptionKey('prefix'))
+ prefix = b.environment.coredata.optstore.get_value_for(OptionKey('prefix'))
assert isinstance(prefix, str), 'for mypy'
- datadir = b.environment.coredata.get_option(OptionKey('datadir'))
+ datadir = b.environment.coredata.optstore.get_value_for(OptionKey('datadir'))
assert isinstance(datadir, str), 'for mypy'
datadir_abs = os.path.join(prefix, datadir)
completionsdir = dep.get_variable(pkgconfig='completionsdir', pkgconfig_define=(('datadir', datadir_abs),))
@@ -164,7 +164,7 @@ def run(options: argparse.Namespace) -> int:
b = build.load(options.builddir)
workdir = options.workdir or options.builddir
- need_vsenv = T.cast('bool', b.environment.coredata.get_option(OptionKey('vsenv')))
+ need_vsenv = T.cast('bool', b.environment.coredata.optstore.get_value_for(OptionKey('vsenv')))
setup_vsenv(need_vsenv) # Call it before get_env to get vsenv vars as well
dump_fmt = options.dump_format if options.dump else None
devenv, varnames = get_env(b, dump_fmt)
diff --git a/mesonbuild/mdist.py b/mesonbuild/mdist.py
index 17329009b40a..0361606a5cf5 100644
--- a/mesonbuild/mdist.py
+++ b/mesonbuild/mdist.py
@@ -1,6 +1,6 @@
# SPDX-License-Identifier: Apache-2.0
# Copyright 2017 The Meson development team
-# Copyright © 2023-2024 Intel Corporation
+# Copyright © 2023-2025 Intel Corporation
from __future__ import annotations
@@ -374,7 +374,7 @@ def run(options: argparse.Namespace) -> int:
if not buildfile.is_file():
raise MesonException(f'Directory {options.wd!r} does not seem to be a Meson build directory.')
b = build.load(options.wd)
- need_vsenv = T.cast('bool', b.environment.coredata.get_option(OptionKey('vsenv')))
+ need_vsenv = T.cast('bool', b.environment.coredata.optstore.get_value_for(OptionKey('vsenv')))
setup_vsenv(need_vsenv)
src_root = b.environment.source_dir
bld_root = b.environment.build_dir
diff --git a/mesonbuild/mesonmain.py b/mesonbuild/mesonmain.py
index 2c1ca97a386f..dd265c41b74c 100644
--- a/mesonbuild/mesonmain.py
+++ b/mesonbuild/mesonmain.py
@@ -234,6 +234,25 @@ def set_meson_command(mainfile: str) -> None:
from . import mesonlib
mesonlib.set_meson_command(mainfile)
+def validate_original_args(args):
+ import mesonbuild.options
+ import itertools
+
+ def has_startswith(coll, target):
+ for entry in coll:
+ if entry.startswith(target):
+ return True
+ return False
+ #ds = [x for x in args if x.startswith('-D')]
+ #longs = [x for x in args if x.startswith('--')]
+ for optionkey in itertools.chain(mesonbuild.options.BUILTIN_DIR_OPTIONS, mesonbuild.options.BUILTIN_CORE_OPTIONS):
+ longarg = mesonbuild.options.argparse_name_to_arg(optionkey.name)
+ shortarg = f'-D{optionkey.name}='
+ if has_startswith(args, longarg) and has_startswith(args, shortarg):
+ sys.exit(
+ f'Got argument {optionkey.name} as both {shortarg} and {longarg}. Pick one.')
+
+
def run(original_args: T.List[str], mainfile: str) -> int:
if os.environ.get('MESON_SHOW_DEPRECATIONS'):
# workaround for https://bugs.python.org/issue34624
@@ -281,11 +300,12 @@ def run(original_args: T.List[str], mainfile: str) -> int:
return run_script_command(args[1], args[2:])
set_meson_command(mainfile)
+ validate_original_args(args)
return CommandLineParser().run(args)
def main() -> int:
# Always resolve the command path so Ninja can find it for regen, tests, etc.
- if 'meson.exe' in sys.executable:
+ if getattr(sys, 'frozen', False):
assert os.path.isabs(sys.executable)
launcher = sys.executable
else:
diff --git a/mesonbuild/mformat.py b/mesonbuild/mformat.py
index 119c89351ec4..92729a02cb91 100644
--- a/mesonbuild/mformat.py
+++ b/mesonbuild/mformat.py
@@ -9,6 +9,7 @@
from copy import deepcopy
from dataclasses import dataclass, field, fields, asdict
from pathlib import Path
+import sys
from . import mparser
from .mesonlib import MesonException
@@ -960,17 +961,31 @@ def add_arguments(parser: argparse.ArgumentParser) -> None:
help='meson source files'
)
+def get_meson_format(sources: T.List[Path]) -> T.Optional[Path]:
+ for src_file in sources:
+ for parent in src_file.resolve().parents:
+ target = parent / 'meson.format'
+ if target.is_file():
+ return target
+ return None
+
def run(options: argparse.Namespace) -> int:
if options.output and len(options.sources) != 1:
raise MesonException('--output argument implies having exactly one source file')
if options.recursive and not (options.inplace or options.check_only):
raise MesonException('--recursive argument requires either --inplace or --check-only option')
+ from_stdin = len(options.sources) == 1 and options.sources[0].name == '-' and options.sources[0].parent == Path()
+ if options.recursive and from_stdin:
+ raise MesonException('--recursive argument is not compatible with stdin input')
+ if options.inplace and from_stdin:
+ raise MesonException('--inplace argument is not compatible with stdin input')
+
sources: T.List[Path] = options.sources.copy() or [Path(build_filename)]
+
if not options.configuration:
- default_config_path = sources[0].parent / 'meson.format'
- if default_config_path.exists():
- options.configuration = default_config_path
+ options.configuration = get_meson_format(sources)
+
formatter = Formatter(options.configuration, options.editor_config, options.recursive)
while sources:
@@ -979,7 +994,11 @@ def run(options: argparse.Namespace) -> int:
src_file = src_file / build_filename
try:
- code = src_file.read_text(encoding='utf-8')
+ if from_stdin:
+ src_file = Path('STDIN') # used for error messages and introspection
+ code = sys.stdin.read()
+ else:
+ code = src_file.read_text(encoding='utf-8')
except IOError as e:
raise MesonException(f'Unable to read from {src_file}') from e
@@ -1002,7 +1021,7 @@ def run(options: argparse.Namespace) -> int:
with options.output.open('w', encoding='utf-8', newline=formatter.current_config.newline) as of:
of.write(formatted)
except IOError as e:
- raise MesonException(f'Unable to write to {src_file}') from e
+ raise MesonException(f'Unable to write to {options.output}') from e
else:
print(formatted, end='')
diff --git a/mesonbuild/minit.py b/mesonbuild/minit.py
index 70f184d02ebd..192c75a68739 100644
--- a/mesonbuild/minit.py
+++ b/mesonbuild/minit.py
@@ -193,7 +193,7 @@ def run(options: Arguments) -> int:
raise SystemExit
b = build.load(options.builddir)
- need_vsenv = T.cast('bool', b.environment.coredata.get_option(OptionKey('vsenv')))
+ need_vsenv = T.cast('bool', b.environment.coredata.optstore.get_value_for(OptionKey('vsenv')))
vsenv_active = mesonlib.setup_vsenv(need_vsenv)
if vsenv_active:
mlog.log(mlog.green('INFO:'), 'automatically activated MSVC compiler environment')
diff --git a/mesonbuild/minstall.py b/mesonbuild/minstall.py
index 9921295fda8e..f65087c66194 100644
--- a/mesonbuild/minstall.py
+++ b/mesonbuild/minstall.py
@@ -74,11 +74,11 @@ def add_arguments(parser: argparse.ArgumentParser) -> None:
help='Do not rebuild before installing.')
parser.add_argument('--only-changed', default=False, action='store_true',
help='Only overwrite files that are older than the copied file.')
- parser.add_argument('--quiet', default=False, action='store_true',
+ parser.add_argument('-q', '--quiet', default=False, action='store_true',
help='Do not print every file that was installed.')
parser.add_argument('--destdir', default=None,
help='Sets or overrides DESTDIR environment. (Since 0.57.0)')
- parser.add_argument('--dry-run', '-n', action='store_true',
+ parser.add_argument('-n', '--dry-run', action='store_true',
help='Doesn\'t actually install, but print logs. (Since 0.57.0)')
parser.add_argument('--skip-subprojects', nargs='?', const='*', default='',
help='Do not install files from given subprojects. (Since 0.58.0)')
@@ -139,7 +139,6 @@ def append_to_log(lf: T.TextIO, line: str) -> None:
lf.write('\n')
lf.flush()
-
def set_chown(path: str, user: T.Union[str, int, None] = None,
group: T.Union[str, int, None] = None,
dir_fd: T.Optional[int] = None, follow_symlinks: bool = True) -> None:
@@ -150,10 +149,23 @@ def set_chown(path: str, user: T.Union[str, int, None] = None,
# Not nice, but better than actually rewriting shutil.chown until
# this python bug is fixed: https://bugs.python.org/issue18108
+ # This is running into a problem where this may not match any of signatures
+ # of `shtil.chown`, which (simplified) are:
+ # chown(path: int | AnyPath, user: int | str, group: None = None)
+ # chown(path: int | AnyPath, user: None, group: int | str)
+ # We cannot through easy coercion of the type system force it to say:
+ # - user is non null and group is null
+ # - user is null and group is non null
+ # - user is non null and group is non null
+ #
+ # This is checked by the only (current) caller, but let's be sure that the
+ # call we're making to `shutil.chown` is actually valid.
+ assert user is not None or group is not None, 'ensure that calls to chown are valid'
+
if sys.version_info >= (3, 13):
# pylint: disable=unexpected-keyword-arg
# cannot handle sys.version_info, https://github.com/pylint-dev/pylint/issues/9622
- shutil.chown(path, user, group, dir_fd=dir_fd, follow_symlinks=follow_symlinks)
+ shutil.chown(path, user, group, dir_fd=dir_fd, follow_symlinks=follow_symlinks) # type: ignore[call-overload]
else:
real_os_chown = os.chown
@@ -495,6 +507,9 @@ def do_copydir(self, data: InstallData, src_dir: str, dst_dir: str,
abs_src = os.path.join(root, d)
filepart = os.path.relpath(abs_src, start=src_dir)
abs_dst = os.path.join(dst_dir, filepart)
+ if os.path.islink(abs_src):
+ files.append(d)
+ continue
# Remove these so they aren't visited by os.walk at all.
if filepart in exclude_dirs:
dirs.remove(d)
@@ -564,7 +579,12 @@ def do_install(self, datafilename: str) -> None:
if is_windows() or destdir != '' or not os.isatty(sys.stdout.fileno()) or not os.isatty(sys.stderr.fileno()):
# can't elevate to root except in an interactive unix environment *and* when not doing a destdir install
raise
- rootcmd = os.environ.get('MESON_ROOT_CMD') or shutil.which('sudo') or shutil.which('doas')
+ rootcmd = (
+ os.environ.get('MESON_ROOT_CMD')
+ or shutil.which('sudo')
+ or shutil.which('doas')
+ or shutil.which('run0')
+ )
pkexec = shutil.which('pkexec')
if rootcmd is None and pkexec is not None and 'PKEXEC_UID' not in os.environ:
rootcmd = pkexec
@@ -852,9 +872,9 @@ def run(opts: 'ArgumentType') -> int:
sys.exit('Install data not found. Run this command in build directory root.')
if not opts.no_rebuild:
b = build.load(opts.wd)
- need_vsenv = T.cast('bool', b.environment.coredata.get_option(OptionKey('vsenv')))
+ need_vsenv = T.cast('bool', b.environment.coredata.optstore.get_value_for(OptionKey('vsenv')))
setup_vsenv(need_vsenv)
- backend = T.cast('str', b.environment.coredata.get_option(OptionKey('backend')))
+ backend = T.cast('str', b.environment.coredata.optstore.get_value_for(OptionKey('backend')))
if not rebuild_all(opts.wd, backend):
sys.exit(-1)
os.chdir(opts.wd)
diff --git a/mesonbuild/mintro.py b/mesonbuild/mintro.py
index 810a2b674b40..462ee2fb4673 100644
--- a/mesonbuild/mintro.py
+++ b/mesonbuild/mintro.py
@@ -30,8 +30,6 @@
if T.TYPE_CHECKING:
import argparse
- from typing import Any
- from .options import UserOption
from .interpreter import Interpreter
from .mparser import BaseNode
@@ -166,7 +164,7 @@ def list_install_plan(installdata: backends.InstallData) -> T.Dict[str, T.Dict[s
return plan
def get_target_dir(coredata: cdata.CoreData, subdir: str) -> str:
- if coredata.get_option(OptionKey('layout')) == 'flat':
+ if coredata.optstore.get_value_for(OptionKey('layout')) == 'flat':
return 'meson-out'
else:
return subdir
@@ -211,6 +209,7 @@ def nodes_to_paths(node_list: T.List[BaseNode]) -> T.List[Path]:
'build_by_default': i['build_by_default'],
'target_sources': [{
'language': 'unknown',
+ 'machine': i['machine'],
'compiler': [],
'parameters': [],
'sources': [str(x) for x in sources],
@@ -291,9 +290,9 @@ def list_buildoptions(coredata: cdata.CoreData, subprojects: T.Optional[T.List[s
test_option_names = {OptionKey('errorlogs'),
OptionKey('stdsplit')}
- dir_options: 'cdata.MutableKeyedOptionDictType' = {}
- test_options: 'cdata.MutableKeyedOptionDictType' = {}
- core_options: 'cdata.MutableKeyedOptionDictType' = {}
+ dir_options: options.MutableKeyedOptionDictType = {}
+ test_options: options.MutableKeyedOptionDictType = {}
+ core_options: options.MutableKeyedOptionDictType = {}
for k, v in coredata.optstore.items():
if k in dir_option_names:
dir_options[k] = v
@@ -305,25 +304,26 @@ def list_buildoptions(coredata: cdata.CoreData, subprojects: T.Optional[T.List[s
for s in subprojects:
core_options[k.evolve(subproject=s)] = v
- def add_keys(opts: 'T.Union[dict[OptionKey, UserOption[Any]], cdata.KeyedOptionDictType]', section: str) -> None:
+ def add_keys(opts: T.Union[options.MutableKeyedOptionDictType, options.OptionStore], section: str) -> None:
for key, opt in sorted(opts.items()):
optdict = {'name': str(key), 'value': opt.value, 'section': section,
- 'machine': key.machine.get_lower_case_name() if coredata.is_per_machine_option(key) else 'any'}
+ 'machine': key.machine.get_lower_case_name() if coredata.optstore.is_per_machine_option(key) else 'any'}
if isinstance(opt, options.UserStringOption):
typestr = 'string'
elif isinstance(opt, options.UserBooleanOption):
typestr = 'boolean'
elif isinstance(opt, options.UserComboOption):
- optdict['choices'] = opt.choices
+ optdict['choices'] = opt.printable_choices()
typestr = 'combo'
- elif isinstance(opt, options.UserIntegerOption):
+ elif isinstance(opt, (options.UserIntegerOption, options.UserUmaskOption)):
typestr = 'integer'
- elif isinstance(opt, options.UserArrayOption):
+ elif isinstance(opt, options.UserStringArrayOption):
typestr = 'array'
- if opt.choices:
- optdict['choices'] = opt.choices
+ c = opt.printable_choices()
+ if c:
+ optdict['choices'] = c
else:
- raise RuntimeError("Unknown option type")
+ raise RuntimeError('Unknown option type: ', repr(type(opt)))
optdict['type'] = typestr
optdict['description'] = opt.description
optlist.append(optdict)
@@ -336,7 +336,15 @@ def add_keys(opts: 'T.Union[dict[OptionKey, UserOption[Any]], cdata.KeyedOptionD
'compiler',
)
add_keys(dir_options, 'directory')
- add_keys({k: v for k, v in coredata.optstore.items() if coredata.optstore.is_project_option(k)}, 'user')
+
+ def project_option_key_to_introname(key: OptionKey) -> OptionKey:
+ assert key.subproject is not None
+ if key.subproject == '':
+ return key.evolve(subproject=None)
+ return key
+
+ add_keys({project_option_key_to_introname(k): v
+ for k, v in coredata.optstore.items() if coredata.optstore.is_project_option(k)}, 'user')
add_keys(test_options, 'test')
return optlist
diff --git a/mesonbuild/modules/__init__.py b/mesonbuild/modules/__init__.py
index 86dc8762e65a..67d16661a90f 100644
--- a/mesonbuild/modules/__init__.py
+++ b/mesonbuild/modules/__init__.py
@@ -20,6 +20,7 @@
from ..interpreterbase import TYPE_var, TYPE_kwargs
from ..programs import OverrideProgram
from ..dependencies import Dependency
+ from ..options import ElementaryOptionValues
class ModuleState:
"""Object passed to all module methods.
@@ -38,7 +39,7 @@ def __init__(self, interpreter: 'Interpreter') -> None:
self.subproject = interpreter.subproject
self.subdir = interpreter.subdir
self.root_subdir = interpreter.root_subdir
- self.current_lineno = interpreter.current_lineno
+ self.current_lineno = interpreter.current_node.lineno
self.environment = interpreter.environment
self.project_name = interpreter.build.project_name
self.project_version = interpreter.build.dep_manifest[interpreter.active_projectname].version
@@ -132,8 +133,8 @@ def test(self, args: T.Tuple[str, T.Union[build.Executable, build.Jar, 'External
self._interpreter.func_test(self.current_node, real_args, kwargs)
def get_option(self, name: str, subproject: str = '',
- machine: MachineChoice = MachineChoice.HOST) -> T.Union[T.List[str], str, int, bool]:
- return self.environment.coredata.get_option(OptionKey(name, subproject, machine))
+ machine: MachineChoice = MachineChoice.HOST) -> ElementaryOptionValues:
+ return self.environment.coredata.optstore.get_value_for(OptionKey(name, subproject, machine))
def is_user_defined_option(self, name: str, subproject: str = '',
machine: MachineChoice = MachineChoice.HOST,
diff --git a/mesonbuild/modules/_qt.py b/mesonbuild/modules/_qt.py
index 9f10c58266a5..7d52842f9dd4 100644
--- a/mesonbuild/modules/_qt.py
+++ b/mesonbuild/modules/_qt.py
@@ -8,16 +8,17 @@
import shutil
import typing as T
import xml.etree.ElementTree as ET
+import re
from . import ModuleReturnValue, ExtensionModule
from .. import build
from .. import options
from .. import mlog
from ..dependencies import find_external_dependency, Dependency, ExternalLibrary, InternalDependency
-from ..mesonlib import MesonException, File, version_compare, Popen_safe
+from ..mesonlib import MesonException, File, FileMode, version_compare, Popen_safe
from ..interpreter import extract_required_kwarg
from ..interpreter.type_checking import INSTALL_DIR_KW, INSTALL_KW, NoneType
-from ..interpreterbase import ContainerTypeInfo, FeatureDeprecated, KwargInfo, noPosargs, FeatureNew, typed_kwargs
+from ..interpreterbase import ContainerTypeInfo, FeatureDeprecated, KwargInfo, noPosargs, FeatureNew, typed_kwargs, typed_pos_args
from ..programs import NonExistingExternalProgram
if T.TYPE_CHECKING:
@@ -62,6 +63,7 @@ class MocCompilerKwArgs(TypedDict):
include_directories: T.List[T.Union[str, build.IncludeDirs]]
dependencies: T.List[T.Union[Dependency, ExternalLibrary]]
preserve_paths: bool
+ output_json: bool
class PreprocessKwArgs(TypedDict):
@@ -73,6 +75,7 @@ class PreprocessKwArgs(TypedDict):
moc_extra_arguments: T.List[str]
rcc_extra_arguments: T.List[str]
uic_extra_arguments: T.List[str]
+ moc_output_json: bool
include_directories: T.List[T.Union[str, build.IncludeDirs]]
dependencies: T.List[T.Union[Dependency, ExternalLibrary]]
method: str
@@ -81,7 +84,7 @@ class PreprocessKwArgs(TypedDict):
class HasToolKwArgs(kwargs.ExtractRequired):
method: str
- tools: T.List[Literal['moc', 'uic', 'rcc', 'lrelease']]
+ tools: T.List[Literal['moc', 'uic', 'rcc', 'lrelease', 'qmlcachegen', 'qmltyperegistrar']]
class CompileTranslationsKwArgs(TypedDict):
@@ -93,6 +96,88 @@ class CompileTranslationsKwArgs(TypedDict):
rcc_extra_arguments: T.List[str]
ts_files: T.List[T.Union[str, File, build.CustomTarget, build.CustomTargetIndex, build.GeneratedList]]
+ class GenQrcKwArgs(TypedDict):
+
+ sources: T.Sequence[File]
+ aliases: T.Sequence[str]
+ prefix: str
+ output: str
+
+ class GenQmldirKwArgs(TypedDict):
+
+ module_name: str
+ module_version: str
+ module_prefix: str
+ qml_sources: T.Sequence[T.Union[FileOrString, build.GeneratedTypes]]
+ qml_singletons: T.Sequence[T.Union[FileOrString, build.GeneratedTypes]]
+ qml_internals: T.Sequence[T.Union[FileOrString, build.GeneratedTypes]]
+ designer_supported: bool
+ imports: T.List[str]
+ optional_imports: T.List[str]
+ default_imports: T.List[str]
+ depends_imports: T.List[str]
+ typeinfo: str
+ output: str
+
+ class GenQmlCachegenKwArgs(TypedDict):
+
+ target_name: str
+ qml_sources: T.Sequence[T.Union[FileOrString, build.GeneratedTypes]]
+ qml_qrc: T.Union[FileOrString, build.GeneratedTypes]
+ extra_args: T.List[str]
+ module_prefix: str
+ method: str
+
+ class GenQmlTypeRegistrarKwArgs(TypedDict):
+
+ target_name: str
+ import_name: str
+ major_version: str
+ minor_version: str
+ namespace: str
+ typeinfo: str
+ generate_qmltype: bool
+ collected_json: T.Optional[T.Union[FileOrString, build.CustomTarget]]
+ extra_args: T.List[str]
+ method: str
+ install: bool
+ install_dir: T.Optional[str]
+
+ class MocJsonCollectKwArgs(TypedDict):
+
+ target_name: str
+ moc_json: T.Sequence[build.GeneratedList]
+ method: str
+
+ class QmlModuleKwArgs(TypedDict):
+
+ version: str
+ qml_sources: T.List[T.Union[FileOrString, build.GeneratedTypes]]
+ qml_singletons: T.List[T.Union[FileOrString, build.GeneratedTypes]]
+ qml_internals: T.List[T.Union[FileOrString, build.GeneratedTypes]]
+ resources_prefix: str
+ moc_headers: T.List[T.Union[FileOrString, build.GeneratedTypes]]
+ include_directories: T.List[T.Union[str, build.IncludeDirs]]
+ imports: T.List[str]
+ optional_imports: T.List[str]
+ default_imports: T.List[str]
+ depends_imports: T.List[str]
+ designer_supported: bool
+ namespace: str
+ typeinfo: str
+ moc_extra_arguments: T.List[str]
+ rcc_extra_arguments: T.List[str]
+ qmlcachegen_extra_arguments: T.List[str]
+ qmltyperegistrar_extra_arguments: T.List[str]
+ generate_qmldir: bool
+ generate_qmltype: bool
+ cachegen: bool
+ dependencies: T.List[T.Union[Dependency, ExternalLibrary]]
+ method: str
+ preserve_paths: bool
+ install_dir: str
+ install: bool
+
def _list_in_set_validator(choices: T.Set[str]) -> T.Callable[[T.List[str]], T.Optional[str]]:
"""Check that the choice given was one of the given set."""
def inner(checklist: T.List[str]) -> T.Optional[str]:
@@ -103,13 +188,22 @@ def inner(checklist: T.List[str]) -> T.Optional[str]:
return inner
+#While Qt recomment module name to a be dot separated alphanum, it can technically be
+#any well-formed ECMAScript Identifier Name.
+#As best effort here we just check for illegal characters
+#see https://doc.qt.io/qt-6/qtqml-modules-identifiedmodules.html
+_MODULE_NAME_PUNCT = r'- {}<>()[\].:;~%?&,+^=|!\/*"\''
+_MODULE_NAME_RE = f'[^{_MODULE_NAME_PUNCT}0-9][^{_MODULE_NAME_PUNCT}]*(\\.[^{_MODULE_NAME_PUNCT}0-9][^{_MODULE_NAME_PUNCT}]*)*'
+
class QtBaseModule(ExtensionModule):
_tools_detected = False
_rcc_supports_depfiles = False
_moc_supports_depfiles = False
- _set_of_qt_tools = {'moc', 'uic', 'rcc', 'lrelease'}
+ _set_of_qt_tools = {'moc', 'uic', 'rcc', 'lrelease', 'qmlcachegen', 'qmltyperegistrar'}
+ _moc_supports_json = False
+ _support_qml_module = False
- def __init__(self, interpreter: 'Interpreter', qt_version: int = 5):
+ def __init__(self, interpreter: Interpreter, qt_version: int = 5):
ExtensionModule.__init__(self, interpreter)
self.qt_version = qt_version
# It is important that this list does not change order as the order of
@@ -124,9 +218,10 @@ def __init__(self, interpreter: 'Interpreter', qt_version: int = 5):
'compile_resources': self.compile_resources,
'compile_ui': self.compile_ui,
'compile_moc': self.compile_moc,
+ 'qml_module': self.qml_module,
})
- def compilers_detect(self, state: 'ModuleState', qt_dep: 'QtDependencyType') -> None:
+ def compilers_detect(self, state: ModuleState, qt_dep: QtDependencyType) -> None:
"""Detect Qt (4 or 5) moc, uic, rcc in the specified bindir or in PATH"""
wanted = f'== {qt_dep.version}'
@@ -169,7 +264,7 @@ def get_version(p: T.Union[ExternalProgram, build.Executable]) -> str:
if p.found():
self.tools[name] = p
- def _detect_tools(self, state: 'ModuleState', method: str, required: bool = True) -> None:
+ def _detect_tools(self, state: ModuleState, method: str, required: bool = True) -> None:
if self._tools_detected:
return
self._tools_detected = True
@@ -180,8 +275,12 @@ def _detect_tools(self, state: 'ModuleState', method: str, required: bool = True
if qt.found():
# Get all tools and then make sure that they are the right version
self.compilers_detect(state, qt)
+ if version_compare(qt.version, '>=6.2.0'):
+ #5.1x supports qmlcachegen and other tools to some extend, but arguments/build process marginally differs
+ self._support_qml_module = True
if version_compare(qt.version, '>=5.15.0'):
self._moc_supports_depfiles = True
+ self._moc_supports_json = True
else:
mlog.warning('moc dependencies will not work properly until you move to Qt >= 5.15', fatal=False)
if version_compare(qt.version, '>=5.14.0'):
@@ -197,7 +296,7 @@ def _detect_tools(self, state: 'ModuleState', method: str, required: bool = True
self.tools['lrelease'] = NonExistingExternalProgram(name='lrelease' + suffix)
@staticmethod
- def _qrc_nodes(state: 'ModuleState', rcc_file: 'FileOrString') -> T.Tuple[str, T.List[str]]:
+ def _qrc_nodes(state: ModuleState, rcc_file: FileOrString) -> T.Tuple[str, T.List[str]]:
abspath: str
if isinstance(rcc_file, str):
abspath = os.path.join(state.environment.source_dir, state.subdir, rcc_file)
@@ -225,8 +324,8 @@ def _qrc_nodes(state: 'ModuleState', rcc_file: 'FileOrString') -> T.Tuple[str, T
except Exception:
raise MesonException(f'Unable to parse resource file {abspath}')
- def _parse_qrc_deps(self, state: 'ModuleState',
- rcc_file_: T.Union['FileOrString', build.CustomTarget, build.CustomTargetIndex, build.GeneratedList]) -> T.List[File]:
+ def _parse_qrc_deps(self, state: ModuleState,
+ rcc_file_: T.Union[FileOrString, build.CustomTarget, build.CustomTargetIndex, build.GeneratedList]) -> T.List[File]:
result: T.List[File] = []
inputs: T.Sequence['FileOrString'] = []
if isinstance(rcc_file_, (str, File)):
@@ -273,7 +372,7 @@ def _parse_qrc_deps(self, state: 'ModuleState',
validator=_list_in_set_validator(_set_of_qt_tools),
since='1.6.0'),
)
- def has_tools(self, state: 'ModuleState', args: T.Tuple, kwargs: 'HasToolKwArgs') -> bool:
+ def has_tools(self, state: ModuleState, args: T.Tuple, kwargs: HasToolKwArgs) -> bool:
method = kwargs.get('method', 'auto')
# We have to cast here because TypedDicts are invariant, even though
# ExtractRequiredKwArgs is a subset of HasToolKwArgs, type checkers
@@ -443,6 +542,7 @@ def _compile_ui_impl(self, state: ModuleState, kwargs: UICompilerKwArgs) -> buil
KwargInfo('include_directories', ContainerTypeInfo(list, (build.IncludeDirs, str)), listify=True, default=[]),
KwargInfo('dependencies', ContainerTypeInfo(list, (Dependency, ExternalLibrary)), listify=True, default=[]),
KwargInfo('preserve_paths', bool, default=False, since='1.4.0'),
+ KwargInfo('output_json', bool, default=False, since='1.7.0'),
)
def compile_moc(self, state: ModuleState, args: T.Tuple, kwargs: MocCompilerKwArgs) -> ModuleReturnValue:
if any(isinstance(s, (build.CustomTarget, build.CustomTargetIndex, build.GeneratedList)) for s in kwargs['headers']):
@@ -475,20 +575,31 @@ def _compile_moc_impl(self, state: ModuleState, kwargs: MocCompilerKwArgs) -> T.
output: T.List[build.GeneratedList] = []
+ do_output_json: bool = kwargs['output_json']
+ if do_output_json and not self._moc_supports_json:
+ raise MesonException(f'moc-qt{self.qt_version} doesn\'t support "output_json" option')
+
# depfile arguments (defaults to .d)
DEPFILE_ARGS: T.List[str] = ['--output-dep-file'] if self._moc_supports_depfiles else []
+ JSON_ARGS: T.List[str] = ['--output-json'] if do_output_json else []
- arguments = kwargs['extra_args'] + DEPFILE_ARGS + inc + compile_args + ['@INPUT@', '-o', '@OUTPUT@']
+ arguments = kwargs['extra_args'] + DEPFILE_ARGS + JSON_ARGS + inc + compile_args + ['@INPUT@', '-o', '@OUTPUT0@']
preserve_path_from = os.path.join(state.source_root, state.subdir) if kwargs['preserve_paths'] else None
if kwargs['headers']:
+ header_gen_output: T.List[str] = ['moc_@BASENAME@.cpp']
+ if do_output_json:
+ header_gen_output.append('moc_@BASENAME@.cpp.json')
moc_gen = build.Generator(
- self.tools['moc'], arguments, ['moc_@BASENAME@.cpp'],
+ self.tools['moc'], arguments, header_gen_output,
depfile='moc_@BASENAME@.cpp.d',
name=f'Qt{self.qt_version} moc header')
output.append(moc_gen.process_files(kwargs['headers'], state, preserve_path_from))
if kwargs['sources']:
+ source_gen_output: T.List[str] = ['@BASENAME@.moc']
+ if do_output_json:
+ source_gen_output.append('@BASENAME@.moc.json')
moc_gen = build.Generator(
- self.tools['moc'], arguments, ['@BASENAME@.moc'],
+ self.tools['moc'], arguments, source_gen_output,
depfile='@BASENAME@.moc.d',
name=f'Qt{self.qt_version} moc source')
output.append(moc_gen.process_files(kwargs['sources'], state, preserve_path_from))
@@ -510,6 +621,7 @@ def _compile_moc_impl(self, state: ModuleState, kwargs: MocCompilerKwArgs) -> T.
KwargInfo('include_directories', ContainerTypeInfo(list, (build.IncludeDirs, str)), listify=True, default=[]),
KwargInfo('dependencies', ContainerTypeInfo(list, (Dependency, ExternalLibrary)), listify=True, default=[]),
KwargInfo('preserve_paths', bool, default=False, since='1.4.0'),
+ KwargInfo('moc_output_json', bool, default=False, since='1.7.0'),
)
def preprocess(self, state: ModuleState, args: T.List[T.Union[str, File]], kwargs: PreprocessKwArgs) -> ModuleReturnValue:
_sources = args[1:]
@@ -551,6 +663,7 @@ def preprocess(self, state: ModuleState, args: T.List[T.Union[str, File]], kwarg
'dependencies': kwargs['dependencies'],
'method': method,
'preserve_paths': kwargs['preserve_paths'],
+ 'output_json': kwargs['moc_output_json']
}
sources.extend(self._compile_moc_impl(state, moc_kwargs))
@@ -568,7 +681,7 @@ def preprocess(self, state: ModuleState, args: T.List[T.Union[str, File]], kwarg
KwargInfo('rcc_extra_arguments', ContainerTypeInfo(list, str), listify=True, default=[], since='0.56.0'),
KwargInfo('ts_files', ContainerTypeInfo(list, (str, File, build.CustomTarget, build.CustomTargetIndex, build.GeneratedList)), listify=True, default=[]),
)
- def compile_translations(self, state: 'ModuleState', args: T.Tuple, kwargs: 'CompileTranslationsKwArgs') -> ModuleReturnValue:
+ def compile_translations(self, state: ModuleState, args: T.Tuple, kwargs: CompileTranslationsKwArgs) -> ModuleReturnValue:
ts_files = kwargs['ts_files']
if any(isinstance(s, (build.CustomTarget, build.CustomTargetIndex, build.GeneratedList)) for s in ts_files):
FeatureNew.single_use('qt.compile_translations: custom_target or generator for "ts_files" keyword argument',
@@ -631,3 +744,429 @@ def compile_translations(self, state: 'ModuleState', args: T.Tuple, kwargs: 'Com
return ModuleReturnValue(results.return_value[0], [results.new_objects, translations])
else:
return ModuleReturnValue(translations, [translations])
+
+ def _source_to_files(self, state: ModuleState, sources: T.Sequence[T.Union[FileOrString, build.GeneratedTypes]]) -> T.List[File]:
+
+ content_files = []
+ for s in sources:
+ if isinstance(s, (build.CustomTarget, build.CustomTargetIndex)):
+ for o in s.get_outputs():
+ content_files.append(File.from_built_file(state.backend.get_target_dir(s), o))
+ elif isinstance(s, File):
+ content_files.append(s)
+ elif isinstance(s, build.GeneratedList):
+ for gen_src in s.get_outputs():
+ content_files.append(File.from_built_file(state.subdir, gen_src))
+ else:
+ content_files.append(File.from_source_file(
+ state.environment.get_source_dir(),
+ state.subdir,
+ s
+ ))
+ return content_files
+
+ def _gen_qrc(self, state: ModuleState, kwargs: GenQrcKwArgs) -> File:
+
+ fileout = File.from_built_file(state.subdir, kwargs['output'])
+ fileout_abs = os.path.join(state.environment.build_dir, fileout.relative_name())
+ if not os.path.isdir(state.environment.build_dir):
+ os.mkdir(state.environment.build_dir)
+
+ rcc = ET.Element('RCC')
+ qresource = ET.SubElement(rcc, 'qresource', prefix='/' + kwargs['prefix'])
+ assert (len(kwargs['sources']) == len(kwargs['aliases']))
+ for source, alias in zip(kwargs['sources'], kwargs['aliases']):
+ filenode = ET.SubElement(qresource, 'file', alias=alias)
+ filenode.text = source.absolute_path(
+ state.environment.get_source_dir(),
+ state.environment.get_build_dir()
+ )
+
+ tree = ET.ElementTree(rcc)
+ tree.write(fileout_abs)
+ return fileout
+
+ def _gen_qmldir(self, state: ModuleState, kwargs: GenQmldirKwArgs) -> File:
+ module_name: str = kwargs['module_name']
+ module_version: str = kwargs['module_version']
+ module_prefix: str = kwargs['module_prefix']
+ designer_supported: bool = kwargs['designer_supported']
+ typeinfo_file: str = kwargs['typeinfo']
+
+ #Foo.Bar/1.0 foo.bar/auto foo.bar
+ import_re = re.compile(r'^(' + _MODULE_NAME_RE + r')(/((\d+(\.\d+)?)|auto))?$')
+
+ fileout = File.from_built_file(state.subdir, kwargs['output'])
+ fileout_abs = os.path.join(state.environment.build_dir, fileout.relative_name())
+ if not os.path.isdir(state.environment.build_dir):
+ os.mkdir(state.environment.build_dir)
+
+ with open(fileout_abs, 'w', encoding='utf-8') as fd:
+
+ def __gen_import(import_type: str, importlist: T.Sequence[str]) -> None:
+ for import_string in importlist:
+ match = import_re.match(import_string)
+ if not match:
+ raise MesonException(f'invalid syntax for qml import {import_string}')
+ module: str = match.group(1)
+ version: str = match.group(4) or ''
+ fd.write(f'{import_type} {module} {version}\n')
+
+ def __gen_declaration(qualifier: str, version: str, importlist: T.Sequence[T.Union[FileOrString, build.GeneratedTypes]]) -> None:
+ importpathlist = self._source_to_files(state, importlist)
+ for s in importpathlist:
+ basename: str = os.path.basename(s.fname)
+ classname: str = basename.rsplit('.', maxsplit=1)[0]
+
+ if not basename.endswith(('.qml', '.js', '.mjs')):
+ raise MesonException(f'unexpected file type declared in qml sources {s}')
+
+ if not classname or '.' in classname or classname[0].islower():
+ raise MesonException(f'{basename} is not a valid QML file name')
+ if version:
+ fd.write(f'{qualifier}{classname} {version} {basename}\n')
+ else:
+ fd.write(f'{qualifier}{classname} {basename}\n')
+
+ fd.write(f'module {module_name}\n')
+ fd.write(f'prefer :/{module_prefix}/\n')
+
+ __gen_import('import', kwargs['imports'])
+ __gen_import('optional import', kwargs['optional_imports'])
+ __gen_import('default import', kwargs['default_imports'])
+ __gen_import('depends', kwargs['depends_imports'])
+ __gen_declaration('', module_version, kwargs['qml_sources'])
+ __gen_declaration('singleton ', module_version, kwargs['qml_singletons'])
+ __gen_declaration('internal ', '', kwargs['qml_internals'])
+
+ if typeinfo_file:
+ fd.write(f'typeinfo {typeinfo_file}\n')
+
+ if designer_supported:
+ fd.write('designersupported\n')
+ return fileout
+
+ def _moc_json_collect(self, state: ModuleState, kwargs: MocJsonCollectKwArgs) -> build.CustomTarget:
+ self._detect_tools(state, kwargs['method'])
+ if not self.tools['moc'].found():
+ raise MesonException('qt.qml_module: ' +
+ self.tools['moc'].name + ' not found')
+
+ target_name: str = kwargs['target_name']
+ moc_json: T.Sequence[build.GeneratedList] = kwargs['moc_json']
+
+ #there may be a better way :-/
+ input_args: T.List[str] = []
+ input_counter = 0
+ for g in moc_json:
+ for fname in g.get_outputs():
+ if fname.endswith('.json'):
+ input_args.append(f'@INPUT{input_counter}@')
+ input_counter += 1
+
+ return build.CustomTarget(
+ f'moc_collect_json_{target_name}',
+ state.subdir,
+ state.subproject,
+ state.environment,
+ self.tools['moc'].get_command() + ['--collect-json', '-o', '@OUTPUT@'] + input_args,
+ moc_json,
+ [f'{target_name}_json_collect.json'],
+ description=f'Collecting json type information for {target_name}',
+ )
+
+ def _gen_qml_cachegen(self, state: ModuleState, kwargs: GenQmlCachegenKwArgs) -> T.List[T.Union[build.CustomTarget, build.GeneratedList]]:
+ self._detect_tools(state, kwargs['method'])
+ if not self.tools['qmlcachegen'].found():
+ raise MesonException('qt.qml_module: ' +
+ self.tools['qmlcachegen'].name + ' not found')
+
+ target_name: str = kwargs['target_name']
+
+ command_args = ['-o', '@OUTPUT@'] + kwargs['extra_args']
+ for qrc in self._source_to_files(state, [kwargs['qml_qrc']]):
+ command_args.extend(['--resource', qrc.absolute_path(
+ state.environment.get_source_dir(),
+ state.environment.get_build_dir()
+ )])
+
+ command_args.append('@INPUT@')
+
+ cache_gen = build.Generator(
+ self.tools['qmlcachegen'],
+ command_args,
+ [f'{target_name}_@BASENAME@.cpp'],
+ name=f'Qml cache generation for {target_name}')
+
+ output: T.List[T.Union[build.CustomTarget, build.GeneratedList]] = []
+ output.append(cache_gen.process_files(kwargs['qml_sources'], state))
+
+ cachegen_inputs: T.List[str] = []
+ qml_sources_paths = self._source_to_files(state, kwargs['qml_sources'])
+ for s in qml_sources_paths:
+ source_basename = os.path.basename(s.fname)
+ ressource_path = os.path.join('/', kwargs['module_prefix'], source_basename)
+ cachegen_inputs.append(ressource_path)
+
+ cacheloader_target = build.CustomTarget(
+ f'cacheloader_{target_name}',
+ state.subdir,
+ state.subproject,
+ state.environment,
+ self.tools['qmlcachegen'].get_command() + ['-o', '@OUTPUT@'] + ['--resource-name', f'qmlcache_{target_name}'] + kwargs['extra_args'] + ['--resource=@INPUT@'] + cachegen_inputs,
+ [kwargs['qml_qrc']],
+ #output name format matters here
+ [f'{target_name}_qmlcache_loader.cpp'],
+ description=f'Qml cache loader for {target_name}',
+ )
+ output.append(cacheloader_target)
+ return output
+
+ def _qml_type_registrar(self, state: ModuleState, kwargs: GenQmlTypeRegistrarKwArgs) -> build.CustomTarget:
+ self._detect_tools(state, kwargs['method'])
+ if not self.tools['qmltyperegistrar'].found():
+ raise MesonException('qt.qml_module: ' +
+ self.tools['qmltyperegistrar'].name + ' not found')
+
+ import_name: str = kwargs['import_name']
+ major_version: str = kwargs['major_version']
+ minor_version: str = kwargs['minor_version']
+ namespace: str = kwargs['namespace']
+ typeinfo: str = kwargs['typeinfo']
+ target_name: str = kwargs['target_name']
+ collected_json: T.Optional[T.Union[FileOrString, build.CustomTarget]] = kwargs['collected_json']
+
+ inputs: T.Sequence[T.Union[FileOrString, build.CustomTarget]] = [collected_json] if collected_json else []
+ outputs: T.List[str] = [f'{target_name}_qmltyperegistrations.cpp']
+ install_dir: T.List[T.Union[str, Literal[False]]] = [False]
+ install_tag: T.List[T.Union[str, None]] = [None]
+
+ cmd = self.tools['qmltyperegistrar'].get_command() + [
+ '--import-name', import_name,
+ '--major-version', major_version,
+ '--minor-version', minor_version,
+ '-o', '@OUTPUT0@',
+ ]
+
+ cmd.extend(kwargs['extra_args'])
+
+ if namespace:
+ cmd.extend(['--namespace', namespace])
+
+ if kwargs['generate_qmltype']:
+ cmd.extend(['--generate-qmltypes', '@OUTPUT1@'])
+ if typeinfo == '':
+ outputs.append(f'{target_name}.qmltypes')
+ else:
+ outputs.append(f'{typeinfo}')
+ install_dir.append(kwargs['install_dir'])
+ install_tag.append('devel')
+
+ if collected_json:
+ cmd.append('@INPUT@')
+
+ return build.CustomTarget(
+ f'typeregistrar_{target_name}',
+ state.subdir,
+ state.subproject,
+ state.environment,
+ cmd,
+ inputs,
+ outputs,
+ install=kwargs['install'],
+ install_dir=install_dir,
+ install_tag=install_tag,
+ description=f'Qml type registration for {target_name}',
+ )
+
+ @FeatureNew('qt.qml_module', '1.7')
+ @typed_pos_args('qt.qml_module', str)
+ @typed_kwargs(
+ 'qt.qml_module',
+ KwargInfo('version', str, default='254.254'),
+ #qml sources
+ KwargInfo('qml_sources', ContainerTypeInfo(list, (File, str, build.CustomTarget)), listify=True, default=[]),
+ KwargInfo('qml_singletons', ContainerTypeInfo(list, (File, str, build.CustomTarget)), listify=True, default=[]),
+ KwargInfo('qml_internals', ContainerTypeInfo(list, (File, str, build.CustomTarget)), listify=True, default=[]),
+ KwargInfo('resources_prefix', str, default='qt/qml'),
+ #qmldir generation
+ KwargInfo('imports', ContainerTypeInfo(list, (str)), default=[]),
+ KwargInfo('optional_imports', ContainerTypeInfo(list, (str)), default=[]),
+ KwargInfo('default_imports', ContainerTypeInfo(list, (str)), default=[]),
+ #match DEPENDENCIES argument from CMake, but dependencies keyword is already taken
+ KwargInfo('depends_imports', ContainerTypeInfo(list, (str)), default=[]),
+ KwargInfo('designer_supported', bool, default=False),
+ #for type registration, same arguments as moc
+ #moc_sources is voluntary ommited as typeregistrar needs to import a header
+ KwargInfo('moc_headers', ContainerTypeInfo(list, (File, str, build.CustomTarget)), listify=True, default=[]),
+ KwargInfo('include_directories', ContainerTypeInfo(list, (build.IncludeDirs, str)), listify=True, default=[]),
+ KwargInfo('namespace', str, default=''),
+ KwargInfo('typeinfo', str, default=''),
+
+ KwargInfo('moc_extra_arguments', ContainerTypeInfo(list, str), listify=True, default=[]),
+ KwargInfo('rcc_extra_arguments', ContainerTypeInfo(list, str), listify=True, default=[]),
+ KwargInfo('qmlcachegen_extra_arguments', ContainerTypeInfo(list, str), listify=True, default=[]),
+ KwargInfo('qmltyperegistrar_extra_arguments', ContainerTypeInfo(list, str), listify=True, default=[]),
+
+ KwargInfo('generate_qmldir', bool, default=True),
+ KwargInfo('generate_qmltype', bool, default=True),
+ KwargInfo('cachegen', bool, default=True),
+
+ KwargInfo('dependencies', ContainerTypeInfo(list, (Dependency, ExternalLibrary)), listify=True, default=[]),
+ INSTALL_DIR_KW,
+ INSTALL_KW,
+ KwargInfo('method', str, default='auto'),
+ KwargInfo('preserve_paths', bool, default=False),
+ )
+ def qml_module(self, state: ModuleState, args: T.Tuple[str], kwargs: QmlModuleKwArgs) -> ModuleReturnValue:
+
+ self._detect_tools(state, kwargs['method'])
+ if not self._support_qml_module:
+ raise MesonException('qt.qml_module is not suppported for this version of Qt')
+
+ #Major.Minor(.Patch)
+ version_re = re.compile(r'^(\d+)\.(\d+)(\.(\d+))?$')
+ module_name_re = re.compile(_MODULE_NAME_RE)
+
+ output: T.List[T.Union[build.CustomTarget, build.GeneratedList]] = []
+
+ module_name: str = args[0]
+ if not module_name_re.fullmatch(module_name):
+ raise MesonException(f'qml module URI should be in the form Foo.Bar.xxx, got {module_name}')
+
+ module_version: str = kwargs['version']
+ module_version_match = version_re.match(module_version)
+ if not module_version_match:
+ raise MesonException(f'qml module version should be in the form Major.Minor, got {module_version}')
+ module_version_major: str = module_version_match.group(1)
+ module_version_minor: str = module_version_match.group(2)
+ #qt ignores .patch version
+ module_version_short = f'{module_version_major}.{module_version_minor}'
+
+ module_prefix_list: T.List[str] = module_name.split('.')
+ module_prefix: str = os.path.join(*module_prefix_list)
+ module_prefix_full: str = os.path.join(*(kwargs['resources_prefix'].split('/') + module_prefix_list))
+
+ #same format as the one derived from qmltyperegistrar
+ target_name = re.sub(r'[^A-Za-z0-9]', '_', module_name)
+
+ qrc_resouces: T.List[T.Union[FileOrString, build.GeneratedTypes]] = []
+ all_qml: T.Sequence[T.Union[FileOrString, build.GeneratedTypes]] = kwargs['qml_sources'] + kwargs['qml_singletons'] + kwargs['qml_internals']
+ all_qml_files: T.List[File] = self._source_to_files(state, all_qml)
+ all_qml_basename: T.List[str] = [os.path.basename(p.fname) for p in all_qml_files]
+
+ install_dir: str = kwargs['install_dir'] or 'qml'
+ module_install_dir: str = os.path.join(install_dir, module_prefix)
+
+ if len(all_qml) != 0:
+ qml_qrc_kwargs: GenQrcKwArgs = {
+ 'output': f'{target_name}_qml.qrc',
+ 'sources': all_qml_files,
+ 'aliases': all_qml_basename,
+ 'prefix': module_prefix_full,
+ }
+ qml_qrc = self._gen_qrc(state, qml_qrc_kwargs)
+
+ if not kwargs['cachegen']:
+ qrc_resouces.append(qml_qrc)
+ else:
+ cachegen_kwargs: GenQmlCachegenKwArgs = {
+ 'target_name': target_name,
+ 'qml_qrc': qml_qrc,
+ 'qml_sources': all_qml,
+ 'module_prefix': module_prefix_full,
+ 'extra_args': kwargs['qmlcachegen_extra_arguments'],
+ 'method': kwargs['method'],
+ }
+ output.extend(self._gen_qml_cachegen(state, cachegen_kwargs))
+
+ #copy QML files for Qt tools
+ if kwargs['install']:
+ self.interpreter.install_data_impl(all_qml_files, module_install_dir,
+ FileMode(), all_qml_basename, 'devel')
+
+ collected_json: T.Optional[T.Union[FileOrString, build.CustomTarget]] = None
+ if kwargs['moc_headers']:
+ compile_moc_kwargs: MocCompilerKwArgs = {
+ 'sources': [],
+ 'headers': kwargs['moc_headers'],
+ 'extra_args': kwargs['moc_extra_arguments'],
+ 'method': kwargs['method'],
+ 'include_directories': kwargs['include_directories'],
+ 'dependencies': kwargs['dependencies'],
+ 'preserve_paths': kwargs['preserve_paths'],
+ 'output_json': True,
+ }
+ moc_output = self._compile_moc_impl(state, compile_moc_kwargs)
+ output.extend(moc_output)
+
+ moc_collect_json_kwargs: MocJsonCollectKwArgs = {
+ 'target_name': target_name,
+ 'moc_json': moc_output,
+ 'method': kwargs['method'],
+ }
+ collected_json = self._moc_json_collect(state, moc_collect_json_kwargs)
+ output.append(collected_json)
+
+ typeinfo_file: str = ''
+ #cmake NO_GENERATE_QMLTYPE disable the whole type registration, not just the .qmltype generation
+ if kwargs['generate_qmltype']:
+ qmltyperegistrar_kwargs: GenQmlTypeRegistrarKwArgs = {
+ 'target_name': target_name,
+ 'import_name': module_name,
+ 'major_version': module_version_major,
+ 'minor_version': module_version_minor,
+ 'collected_json': collected_json,
+ 'namespace': kwargs['namespace'],
+ 'generate_qmltype': True,
+ 'extra_args': kwargs['qmltyperegistrar_extra_arguments'],
+ 'typeinfo': kwargs['typeinfo'],
+ 'method': kwargs['method'],
+ 'install': kwargs['install'],
+ 'install_dir': module_install_dir,
+ }
+ type_registrar_output = self._qml_type_registrar(state, qmltyperegistrar_kwargs)
+ output.append(type_registrar_output)
+ if len(type_registrar_output.get_outputs()) == 2:
+ typeinfo_file = type_registrar_output.get_outputs()[1]
+
+ if kwargs['generate_qmldir']:
+ qmldir_kwargs: GenQmldirKwArgs = {
+ 'output': f'{target_name}_qmldir',
+ 'module_name': module_name,
+ 'module_version': module_version_short,
+ 'qml_sources': kwargs['qml_sources'],
+ 'qml_singletons': kwargs['qml_singletons'],
+ 'qml_internals': kwargs['qml_internals'],
+ 'imports': kwargs['imports'],
+ 'optional_imports': kwargs['optional_imports'],
+ 'default_imports': kwargs['default_imports'],
+ 'depends_imports': kwargs['depends_imports'],
+ 'designer_supported': kwargs['designer_supported'],
+ 'typeinfo': typeinfo_file,
+ 'module_prefix': module_prefix_full,
+ }
+ qmldir_file: File = self._gen_qmldir(state, qmldir_kwargs)
+
+ qmldir_qrc_kwargs: GenQrcKwArgs = {
+ 'output': f'{target_name}_qmldir.qrc',
+ 'sources': self._source_to_files(state, [qmldir_file]),
+ 'aliases': ['qmldir'],
+ 'prefix': module_prefix_full,
+ }
+ qrc_resouces.append(self._gen_qrc(state, qmldir_qrc_kwargs))
+
+ if kwargs['install']:
+ self.interpreter.install_data_impl([qmldir_file], module_install_dir,
+ FileMode(), ['qmldir'], 'devel')
+
+ if qrc_resouces:
+ compile_resource_kwargs: ResourceCompilerKwArgs = {
+ 'name': target_name,
+ 'sources': qrc_resouces,
+ 'extra_args': kwargs['rcc_extra_arguments'],
+ 'method': kwargs['method'],
+ }
+ output.extend(self._compile_resources_impl(state, compile_resource_kwargs))
+
+ return ModuleReturnValue(output, [output])
diff --git a/mesonbuild/modules/cmake.py b/mesonbuild/modules/cmake.py
index 2168aaa516c0..f12cc51a623c 100644
--- a/mesonbuild/modules/cmake.py
+++ b/mesonbuild/modules/cmake.py
@@ -154,15 +154,20 @@ def dependency(self, state: ModuleState, args: T.Tuple[str], kwargs: T.Dict[str,
@noKwargs
@typed_pos_args('cmake.subproject.include_directories', str)
- def include_directories(self, state: ModuleState, args: T.Tuple[str], kwargs: TYPE_kwargs) -> build.IncludeDirs:
+ def include_directories(self, state: ModuleState, args: T.Tuple[str], kwargs: TYPE_kwargs) -> T.List[build.IncludeDirs]:
info = self._args_to_info(args[0])
- return self.get_variable(state, [info['inc']], kwargs)
+ inc = self.get_variable(state, [info['inc']], kwargs)
+ assert isinstance(inc, list), 'for mypy'
+ assert isinstance(inc[0], build.IncludeDirs), 'for mypy'
+ return inc
@noKwargs
@typed_pos_args('cmake.subproject.target', str)
def target(self, state: ModuleState, args: T.Tuple[str], kwargs: TYPE_kwargs) -> build.Target:
info = self._args_to_info(args[0])
- return self.get_variable(state, [info['tgt']], kwargs)
+ tgt = self.get_variable(state, [info['tgt']], kwargs)
+ assert isinstance(tgt, build.Target), 'for mypy'
+ return tgt
@noKwargs
@typed_pos_args('cmake.subproject.target_type', str)
@@ -305,7 +310,9 @@ def write_basic_package_version_file(self, state: ModuleState, args: TYPE_var, k
pkgroot = pkgroot_name = kwargs['install_dir']
if pkgroot is None:
- pkgroot = os.path.join(state.environment.coredata.get_option(OptionKey('libdir')), 'cmake', name)
+ libdir = state.environment.coredata.optstore.get_value_for(OptionKey('libdir'))
+ assert isinstance(libdir, str), 'for mypy'
+ pkgroot = os.path.join(libdir, 'cmake', name)
pkgroot_name = os.path.join('{libdir}', 'cmake', name)
template_file = os.path.join(self.cmake_root, 'Modules', f'BasicConfigVersion-{compatibility}.cmake.in')
@@ -376,14 +383,17 @@ def configure_package_config_file(self, state: ModuleState, args: TYPE_var, kwar
install_dir = kwargs['install_dir']
if install_dir is None:
- install_dir = os.path.join(state.environment.coredata.get_option(OptionKey('libdir')), 'cmake', name)
+ libdir = state.environment.coredata.optstore.get_value_for(OptionKey('libdir'))
+ assert isinstance(libdir, str), 'for mypy'
+ install_dir = os.path.join(libdir, 'cmake', name)
conf = kwargs['configuration']
if isinstance(conf, dict):
FeatureNew.single_use('cmake.configure_package_config_file dict as configuration', '0.62.0', state.subproject, location=state.current_node)
conf = build.ConfigurationData(conf)
- prefix = state.environment.coredata.get_option(OptionKey('prefix'))
+ prefix = state.environment.coredata.optstore.get_value_for(OptionKey('prefix'))
+ assert isinstance(prefix, str), 'for mypy'
abs_install_dir = install_dir
if not os.path.isabs(abs_install_dir):
abs_install_dir = os.path.join(prefix, install_dir)
@@ -429,7 +439,7 @@ def subproject(self, state: ModuleState, args: T.Tuple[str], kwargs_: Subproject
'required': kwargs_['required'],
'options': kwargs_['options'],
'cmake_options': kwargs_['cmake_options'],
- 'default_options': {},
+ 'default_options': [],
'version': [],
}
subp = self.interpreter.do_subproject(dirname, kw, force_method='cmake')
@@ -443,5 +453,5 @@ def subproject(self, state: ModuleState, args: T.Tuple[str], kwargs_: Subproject
def subproject_options(self, state: ModuleState, args: TYPE_var, kwargs: TYPE_kwargs) -> CMakeSubprojectOptions:
return CMakeSubprojectOptions()
-def initialize(*args: T.Any, **kwargs: T.Any) -> CmakeModule:
- return CmakeModule(*args, **kwargs)
+def initialize(interp: Interpreter) -> CmakeModule:
+ return CmakeModule(interp)
diff --git a/mesonbuild/modules/external_project.py b/mesonbuild/modules/external_project.py
index fb82a384d919..339d0003fd4a 100644
--- a/mesonbuild/modules/external_project.py
+++ b/mesonbuild/modules/external_project.py
@@ -19,7 +19,7 @@
from ..interpreter.type_checking import ENV_KW, DEPENDS_KW
from ..interpreterbase.decorators import ContainerTypeInfo, KwargInfo, typed_kwargs, typed_pos_args
from ..mesonlib import (EnvironmentException, MesonException, Popen_safe, MachineChoice,
- get_variable_regex, do_replacement, join_args)
+ get_variable_regex, do_replacement, join_args, relpath)
from ..options import OptionKey
if T.TYPE_CHECKING:
@@ -75,13 +75,16 @@ def __init__(self,
self.src_dir = Path(self.env.get_source_dir(), self.subdir)
self.build_dir = Path(self.env.get_build_dir(), self.subdir, 'build')
self.install_dir = Path(self.env.get_build_dir(), self.subdir, 'dist')
- _p = self.env.coredata.get_option(OptionKey('prefix'))
+ _p = self.env.coredata.optstore.get_value_for(OptionKey('prefix'))
assert isinstance(_p, str), 'for mypy'
self.prefix = Path(_p)
- _l = self.env.coredata.get_option(OptionKey('libdir'))
+ _l = self.env.coredata.optstore.get_value_for(OptionKey('libdir'))
assert isinstance(_l, str), 'for mypy'
self.libdir = Path(_l)
- _i = self.env.coredata.get_option(OptionKey('includedir'))
+ _l = self.env.coredata.optstore.get_value_for(OptionKey('bindir'))
+ assert isinstance(_l, str), 'for mypy'
+ self.bindir = Path(_l)
+ _i = self.env.coredata.optstore.get_value_for(OptionKey('includedir'))
assert isinstance(_i, str), 'for mypy'
self.includedir = Path(_i)
self.name = self.src_dir.name
@@ -90,10 +93,10 @@ def __init__(self,
# will install files into "c:/bar/c:/foo" which is an invalid path.
# Work around that issue by removing the drive from prefix.
if self.prefix.drive:
- self.prefix = self.prefix.relative_to(self.prefix.drive)
+ self.prefix = Path(relpath(self.prefix, self.prefix.drive))
# self.prefix is an absolute path, so we cannot append it to another path.
- self.rel_prefix = self.prefix.relative_to(self.prefix.root)
+ self.rel_prefix = Path(relpath(self.prefix, self.prefix.root))
self._configure(state)
@@ -118,6 +121,7 @@ def _configure(self, state: 'ModuleState') -> None:
d = [('PREFIX', '--prefix=@PREFIX@', self.prefix.as_posix()),
('LIBDIR', '--libdir=@PREFIX@/@LIBDIR@', self.libdir.as_posix()),
+ ('BINDIR', '--bindir=@PREFIX@/@BINDIR@', self.bindir.as_posix()),
('INCLUDEDIR', None, self.includedir.as_posix()),
]
self._validate_configure_options(d, state)
@@ -278,6 +282,7 @@ class ExternalProjectModule(ExtensionModule):
def __init__(self, interpreter: 'Interpreter'):
super().__init__(interpreter)
+ self.devenv: T.Optional[EnvironmentVariables] = None
self.methods.update({'add_project': self.add_project,
})
@@ -299,8 +304,19 @@ def add_project(self, state: 'ModuleState', args: T.Tuple[str], kwargs: 'AddProj
kwargs['env'],
kwargs['verbose'],
kwargs['depends'])
+ abs_libdir = Path(project.install_dir, project.rel_prefix, project.libdir).as_posix()
+ abs_bindir = Path(project.install_dir, project.rel_prefix, project.bindir).as_posix()
+ env = state.environment.get_env_for_paths({abs_libdir}, {abs_bindir})
+ if self.devenv is None:
+ self.devenv = env
+ else:
+ self.devenv.merge(env)
return ModuleReturnValue(project, project.targets)
+ def postconf_hook(self, b: build.Build) -> None:
+ if self.devenv is not None:
+ b.devenv.append(self.devenv)
+
def initialize(interp: 'Interpreter') -> ExternalProjectModule:
return ExternalProjectModule(interp)
diff --git a/mesonbuild/modules/features/feature.py b/mesonbuild/modules/features/feature.py
index 7e0f621e543f..1bc603773301 100644
--- a/mesonbuild/modules/features/feature.py
+++ b/mesonbuild/modules/features/feature.py
@@ -3,7 +3,7 @@
import re
from typing import (
Dict, Set, Tuple, List, Callable, Optional,
- Union, Any, Iterable, cast, TYPE_CHECKING
+ Union, Any, Iterable, TYPE_CHECKING
)
from dataclasses import dataclass, field
from ...mesonlib import File, MesonException
@@ -18,7 +18,6 @@
from typing import TypedDict
from typing_extensions import NotRequired
from ...interpreterbase import TYPE_var, TYPE_kwargs
- from ...compilers import Compiler
from .. import ModuleState
@dataclass(unsafe_hash=True, order=True)
@@ -80,7 +79,7 @@ def __init__(self, func_name: str, opt_name: str, default: Any = None):
)
@staticmethod
- def convert(func_name:str, opt_name: str, values: 'IMPLIED_ATTR',
+ def convert(func_name: str, opt_name: str, values: 'IMPLIED_ATTR',
) -> Union[None, List[ConflictAttr]]:
if values is None:
return None
@@ -131,6 +130,7 @@ def convert(func_name:str, opt_name: str, values: 'IMPLIED_ATTR',
Union[str, Dict[str, str]]
]
]
+
class FeatureKwArgs(TypedDict):
#implies: Optional[List['FeatureObject']]
implies: NotRequired[List[Any]]
@@ -163,7 +163,8 @@ def __init__(self, state: 'ModuleState',
super().__init__()
@typed_pos_args('features.new', str, int)
- @typed_kwargs('features.new',
+ @typed_kwargs(
+ 'features.new',
KwargInfo(
'implies',
(FeatureObject, ContainerTypeInfo(list, FeatureObject)),
@@ -209,7 +210,8 @@ def init_attrs(state: 'ModuleState',
def update_method(self, state: 'ModuleState', args: List['TYPE_var'],
kwargs: 'TYPE_kwargs') -> 'FeatureObject':
@noPosargs
- @typed_kwargs('features.FeatureObject.update',
+ @typed_kwargs(
+ 'features.FeatureObject.update',
KwargInfo('name', (NoneType, str)),
KwargInfo('interest', (NoneType, int)),
KwargInfo(
@@ -306,7 +308,7 @@ def sort_cb(k: Union[FeatureObject, Iterable[FeatureObject]]) -> int:
# FIXME: that's not a safe way to increase the rank for
# multi features this why this function isn't considerd
# accurate.
- rank += len(prevalent_features) -1
+ rank += len(prevalent_features) - 1
return rank
return sorted(features, reverse=reverse, key=sort_cb)
diff --git a/mesonbuild/modules/features/module.py b/mesonbuild/modules/features/module.py
index a6f357b3f65b..c413c15e81b0 100644
--- a/mesonbuild/modules/features/module.py
+++ b/mesonbuild/modules/features/module.py
@@ -20,7 +20,6 @@
from typing import TypedDict
from ...interpreterbase import TYPE_var, TYPE_kwargs
from .. import ModuleState
- from .feature import FeatureKwArgs
class TestKwArgs(TypedDict):
compiler: Optional[Compiler]
@@ -109,6 +108,7 @@ def add_target(self, features: Union[FeatureObject, List[FeatureObject]],
class Module(NewExtensionModule):
INFO = ModuleInfo('features', '0.1.0')
+
def __init__(self) -> None:
super().__init__()
self.methods.update({
@@ -142,7 +142,8 @@ def _set_cache(self, state: 'ModuleState', key: str,
self._cache_dict(state)[key] = val
@typed_pos_args('features.test', varargs=FeatureObject, min_varargs=1)
- @typed_kwargs('features.test',
+ @typed_kwargs(
+ 'features.test',
KwargInfo('compiler', (NoneType, Compiler)),
KwargInfo('anyfet', bool, default = False),
KwargInfo('cached', bool, default = True),
@@ -255,7 +256,7 @@ def test_any(self, state: 'ModuleState', features: Set[FeatureObject],
features_any = set()
for fet in all_features:
_, test_any_result = self.cached_test(
- state, features={fet,},
+ state, features={fet, },
compiler=compiler,
cached=cached,
anyfet=False,
@@ -293,7 +294,7 @@ def test(self, state: 'ModuleState', features: Set[FeatureObject],
# Set the highest interested feature
prevalent_features = sorted(features)[-1:]
- prevalent_names = [fet.name for fet in prevalent_features]
+ prevalent_names = [fet.name for fet in prevalent_features]
# prepare the result dict
test_result: 'TestResultKwArgs' = {
'target_name': '__'.join(prevalent_names),
@@ -307,6 +308,7 @@ def test(self, state: 'ModuleState', features: Set[FeatureObject],
'is_disabled': False,
'fail_reason': '',
}
+
def fail_result(fail_reason: str, is_disabled: bool = False
) -> 'TestResultKwArgs':
test_result.update({
@@ -337,7 +339,7 @@ def fail_result(fail_reason: str, is_disabled: bool = False
predecessor_features = implied_features.difference(_caller)
for fet in sorted(predecessor_features):
_, pred_result = self.cached_test(
- state, features={fet,},
+ state, features={fet, },
compiler=compiler,
cached=cached,
anyfet=False,
@@ -431,7 +433,8 @@ def fail_result(fail_reason: str, is_disabled: bool = False
build.GeneratedList, build.StructuredSources, build.ExtractedObjects,
build.BuildTarget
))
- @typed_kwargs('features.multi_targets',
+ @typed_kwargs(
+ 'features.multi_targets',
KwargInfo(
'dispatch', (
ContainerTypeInfo(list, (FeatureObject, list)),
@@ -451,8 +454,8 @@ def fail_result(fail_reason: str, is_disabled: bool = False
allow_unknown=True
)
def multi_targets_method(self, state: 'ModuleState',
- args: Tuple[str], kwargs: 'TYPE_kwargs'
- ) -> TargetsObject:
+ args: Tuple[str], kwargs: 'TYPE_kwargs'
+ ) -> TargetsObject:
config_name = args[0]
sources = args[1] # type: ignore
dispatch: List[Union[FeatureObject, List[FeatureObject]]] = (
@@ -467,7 +470,7 @@ def multi_targets_method(self, state: 'ModuleState',
if not compiler:
compiler = get_compiler(state)
- baseline_features : Set[FeatureObject] = set()
+ baseline_features: Set[FeatureObject] = set()
has_baseline = baseline is not None
if has_baseline:
baseline_features = FeatureObject.get_implicit_combine_multi(baseline)
@@ -488,7 +491,7 @@ def multi_targets_method(self, state: 'ModuleState',
]] = []
for d in dispatch:
if isinstance(d, FeatureObject):
- target = {d,}
+ target = {d, }
is_base_part = d in baseline_features
else:
target = set(d)
@@ -647,7 +650,7 @@ def gen_config(self, state: 'ModuleState', config_name: str,
c_detect = '1'
dispatch_calls.append(
f'{prefix}_MTARGETS_EXPAND('
- f'EXEC_CB({c_detect}, {test["target_name"]}, __VA_ARGS__)'
+ f'EXEC_CB({c_detect}, {test["target_name"]}, __VA_ARGS__)'
')'
)
@@ -683,7 +686,8 @@ def gen_config(self, state: 'ModuleState', config_name: str,
return config_path
@typed_pos_args('features.sort', varargs=FeatureObject, min_varargs=1)
- @typed_kwargs('features.sort',
+ @typed_kwargs(
+ 'features.sort',
KwargInfo('reverse', bool, default = False),
)
def sort_method(self, state: 'ModuleState',
diff --git a/mesonbuild/modules/features/utils.py b/mesonbuild/modules/features/utils.py
index 88eb19d82d92..21136e3cdc98 100644
--- a/mesonbuild/modules/features/utils.py
+++ b/mesonbuild/modules/features/utils.py
@@ -33,7 +33,6 @@ def test_code(state: 'ModuleState', compiler: 'Compiler',
def generate_hash(*args: Any) -> str:
hasher = hashlib.sha1()
- test: List[bytes] = []
for a in args:
hasher.update(bytes(str(a), encoding='utf-8'))
return hasher.hexdigest()
diff --git a/mesonbuild/modules/gnome.py b/mesonbuild/modules/gnome.py
index e0c1214d0851..331cc37c7838 100644
--- a/mesonbuild/modules/gnome.py
+++ b/mesonbuild/modules/gnome.py
@@ -1,5 +1,6 @@
# SPDX-License-Identifier: Apache-2.0
# Copyright 2015-2016 The Meson development team
+# Copyright © 2023-2024 Intel Corporation
'''This module provides helper functions for Gnome/GLib related
functionality such as gobject-introspection, gresources and gtk-doc'''
@@ -82,6 +83,7 @@ class GenerateGir(TypedDict):
build_by_default: bool
dependencies: T.List[Dependency]
+ doc_format: T.Optional[str]
export_packages: T.List[str]
extra_args: T.List[str]
fatal_warnings: bool
@@ -520,7 +522,7 @@ def compile_resources(self, state: 'ModuleState', args: T.Tuple[str, 'FileOrStri
if gresource: # Only one target for .gresource files
return ModuleReturnValue(target_c, [target_c])
- install_dir = kwargs['install_dir'] or state.environment.coredata.get_option(OptionKey('includedir'))
+ install_dir = kwargs['install_dir'] or state.environment.coredata.optstore.get_value_for(OptionKey('includedir'))
assert isinstance(install_dir, str), 'for mypy'
target_h = GResourceHeaderTarget(
f'{target_name}_h',
@@ -702,14 +704,14 @@ def _get_dependencies_flags_raw(
lib_dir = os.path.dirname(flag)
external_ldflags.update([f'-L{lib_dir}'])
if include_rpath:
- external_ldflags.update([f'-Wl,-rpath {lib_dir}'])
+ external_ldflags.update([f'-Wl,-rpath,{lib_dir}'])
libname = os.path.basename(flag)
if libname.startswith("lib"):
libname = libname[3:]
libname = libname.split(".so")[0]
flag = f"-l{libname}"
# FIXME: Hack to avoid passing some compiler options in
- if flag.startswith("-W"):
+ if flag.startswith("-W") and not flag.startswith('-Wl,-rpath,'):
continue
# If it's a framework arg, slurp the framework name too
# to preserve the order of arguments
@@ -911,8 +913,8 @@ def _get_langs_compilers_flags(state: 'ModuleState', langs_compilers: T.List[T.T
cflags += state.project_args[lang]
if OptionKey('b_sanitize') in compiler.base_options:
sanitize = state.environment.coredata.optstore.get_value('b_sanitize')
+ assert isinstance(sanitize, list)
cflags += compiler.sanitizer_compile_args(sanitize)
- sanitize = sanitize.split(',')
# These must be first in ldflags
if 'address' in sanitize:
internal_ldflags += ['-lasan']
@@ -955,13 +957,14 @@ def _make_gir_filelist(state: 'ModuleState', srcdir: str, ns: str,
return gir_filelist_filename
- @staticmethod
def _make_gir_target(
+ self,
state: 'ModuleState',
girfile: str,
scan_command: T.Sequence[T.Union['FileOrString', Executable, ExternalProgram, OverrideProgram]],
generated_files: T.Sequence[T.Union[str, mesonlib.File, CustomTarget, CustomTargetIndex, GeneratedList]],
depends: T.Sequence[T.Union['FileOrString', build.BuildTarget, 'build.GeneratedTypes', build.StructuredSources]],
+ env_flags: T.Sequence[str],
kwargs: T.Dict[str, T.Any]) -> GirTarget:
install = kwargs['install_gir']
if install is None:
@@ -982,8 +985,14 @@ def _make_gir_target(
# g-ir-scanner uses Python's distutils to find the compiler, which uses 'CC'
cc_exelist = state.environment.coredata.compilers.host['c'].get_exelist()
run_env.set('CC', [quote_arg(x) for x in cc_exelist], ' ')
+ run_env.set('CFLAGS', [quote_arg(x) for x in env_flags], ' ')
run_env.merge(kwargs['env'])
+ gir_dep, _, _ = self._get_gir_dep(state)
+
+ # response file supported?
+ rspable = mesonlib.version_compare(gir_dep.get_version(), '>= 1.85.0')
+
return GirTarget(
girfile,
state.subdir,
@@ -998,6 +1007,7 @@ def _make_gir_target(
install_dir=[install_dir],
install_tag=['devel'],
env=run_env,
+ rspable=rspable,
)
@staticmethod
@@ -1088,11 +1098,12 @@ def _get_scanner_cflags(cflags: T.Iterable[str]) -> T.Iterable[str]:
yield f
@staticmethod
- def _get_scanner_ldflags(ldflags: T.Iterable[str]) -> T.Iterable[str]:
+ def _get_scanner_ldflags(ldflags: T.Iterable[str]) -> tuple[list[str], list[str]]:
'g-ir-scanner only accepts -L/-l; must ignore -F and other linker flags'
- for f in ldflags:
- if f.startswith(('-L', '-l', '--extra-library')):
- yield f
+ return (
+ [f for f in ldflags if f.startswith(('-L', '-l', '--extra-library'))],
+ [f for f in ldflags if f.startswith(('-Wl,-rpath,'))],
+ )
@typed_pos_args('gnome.generate_gir', varargs=(Executable, build.SharedLibrary, build.StaticLibrary), min_varargs=1)
@typed_kwargs(
@@ -1102,6 +1113,7 @@ def _get_scanner_ldflags(ldflags: T.Iterable[str]) -> T.Iterable[str]:
_EXTRA_ARGS_KW,
ENV_KW.evolve(since='1.2.0'),
KwargInfo('dependencies', ContainerTypeInfo(list, Dependency), default=[], listify=True),
+ KwargInfo('doc_format', (str, NoneType), since='1.8.0'),
KwargInfo('export_packages', ContainerTypeInfo(list, str), default=[], listify=True),
KwargInfo('fatal_warnings', bool, default=False, since='0.55.0'),
KwargInfo('header', ContainerTypeInfo(list, str), default=[], listify=True),
@@ -1161,11 +1173,14 @@ def generate_gir(self, state: 'ModuleState', args: T.Tuple[T.List[T.Union[Execut
scan_cflags += list(self._get_scanner_cflags(dep_cflags))
scan_cflags += list(self._get_scanner_cflags(self._get_external_args_for_langs(state, [lc[0] for lc in langs_compilers])))
scan_internal_ldflags = []
- scan_internal_ldflags += list(self._get_scanner_ldflags(internal_ldflags))
- scan_internal_ldflags += list(self._get_scanner_ldflags(dep_internal_ldflags))
scan_external_ldflags = []
- scan_external_ldflags += list(self._get_scanner_ldflags(external_ldflags))
- scan_external_ldflags += list(self._get_scanner_ldflags(dep_external_ldflags))
+ scan_env_ldflags = state.environment.coredata.get_external_link_args(MachineChoice.HOST, 'c')
+ for cli_flags, env_flags in (self._get_scanner_ldflags(internal_ldflags), self._get_scanner_ldflags(dep_internal_ldflags)):
+ scan_internal_ldflags += cli_flags
+ scan_env_ldflags += env_flags
+ for cli_flags, env_flags in (self._get_scanner_ldflags(external_ldflags), self._get_scanner_ldflags(dep_external_ldflags)):
+ scan_external_ldflags += cli_flags
+ scan_env_ldflags += env_flags
girtargets_inc_dirs = self._get_gir_targets_inc_dirs(girtargets)
inc_dirs = kwargs['include_directories']
@@ -1207,6 +1222,9 @@ def generate_gir(self, state: 'ModuleState', args: T.Tuple[T.List[T.Union[Execut
scan_command += ['--sources-top-dirs', os.path.join(state.environment.get_source_dir(), state.root_subdir)]
scan_command += ['--sources-top-dirs', os.path.join(state.environment.get_build_dir(), state.root_subdir)]
+ if kwargs['doc_format'] is not None and self._gir_has_option('--doc-format'):
+ scan_command += ['--doc-format', kwargs['doc_format']]
+
if '--warn-error' in scan_command:
FeatureDeprecated.single_use('gnome.generate_gir argument --warn-error', '0.55.0',
state.subproject, 'Use "fatal_warnings" keyword argument', state.current_node)
@@ -1216,7 +1234,7 @@ def generate_gir(self, state: 'ModuleState', args: T.Tuple[T.List[T.Union[Execut
generated_files = [f for f in libsources if isinstance(f, (GeneratedList, CustomTarget, CustomTargetIndex))]
scan_target = self._make_gir_target(
- state, girfile, scan_command, generated_files, depends,
+ state, girfile, scan_command, generated_files, depends, scan_env_ldflags,
# We have to cast here because mypy can't figure this out
T.cast('T.Dict[str, T.Any]', kwargs))
@@ -1649,7 +1667,7 @@ def gdbus_codegen(self, state: 'ModuleState', args: T.Tuple[str, T.Optional[T.Un
targets = []
install_header = kwargs['install_header']
- install_dir = kwargs['install_dir'] or state.environment.coredata.get_option(OptionKey('includedir'))
+ install_dir = kwargs['install_dir'] or state.environment.coredata.optstore.get_value_for(OptionKey('includedir'))
assert isinstance(install_dir, str), 'for mypy'
output = namebase + '.c'
@@ -1961,7 +1979,7 @@ def _make_mkenum_impl(
) -> build.CustomTarget:
real_cmd: T.List[T.Union[str, 'ToolType']] = [self._find_tool(state, 'glib-mkenums')]
real_cmd.extend(cmd)
- _install_dir = install_dir or state.environment.coredata.get_option(OptionKey('includedir'))
+ _install_dir = install_dir or state.environment.coredata.optstore.get_value_for(OptionKey('includedir'))
assert isinstance(_install_dir, str), 'for mypy'
return CustomTarget(
@@ -1979,6 +1997,7 @@ def _make_mkenum_impl(
extra_depends=depends,
# https://github.com/mesonbuild/meson/issues/973
absolute_paths=True,
+ rspable=mesonlib.is_windows() or mesonlib.is_cygwin(),
description='Generating GObject enum file {}',
)
@@ -2173,7 +2192,7 @@ def generate_vapi(self, state: 'ModuleState', args: T.Tuple[str], kwargs: 'Gener
cmd.append(gir_file)
vapi_output = library + '.vapi'
- datadir = state.environment.coredata.get_option(OptionKey('datadir'))
+ datadir = state.environment.coredata.optstore.get_value_for(OptionKey('datadir'))
assert isinstance(datadir, str), 'for mypy'
install_dir = kwargs['install_dir'] or os.path.join(datadir, 'vala', 'vapi')
diff --git a/mesonbuild/modules/hotdoc.py b/mesonbuild/modules/hotdoc.py
index 50b5fe6f2fed..5099b41bec60 100644
--- a/mesonbuild/modules/hotdoc.py
+++ b/mesonbuild/modules/hotdoc.py
@@ -331,7 +331,7 @@ def make_targets(self) -> T.Tuple[HotdocTarget, mesonlib.ExecutableSerialisation
for path in self.include_paths:
self.cmd.extend(['--include-path', path])
- if self.state.environment.coredata.get_option(OptionKey('werror', subproject=self.state.subproject)):
+ if self.state.environment.coredata.optstore.get_value_for(OptionKey('werror', subproject=self.state.subproject)):
self.cmd.append('--fatal-warnings')
self.generate_hotdoc_config()
diff --git a/mesonbuild/modules/i18n.py b/mesonbuild/modules/i18n.py
index 551e0b36fab6..87baab203089 100644
--- a/mesonbuild/modules/i18n.py
+++ b/mesonbuild/modules/i18n.py
@@ -4,6 +4,7 @@
from __future__ import annotations
from os import path
+from pathlib import Path
import shlex
import typing as T
@@ -13,7 +14,8 @@
from ..options import OptionKey
from .. import mlog
from ..interpreter.type_checking import CT_BUILD_BY_DEFAULT, CT_INPUT_KW, INSTALL_TAG_KW, OUTPUT_KW, INSTALL_DIR_KW, INSTALL_KW, NoneType, in_set_validator
-from ..interpreterbase import FeatureNew, InvalidArguments
+from ..interpreterbase import FeatureNew
+from ..interpreterbase.exceptions import InvalidArguments
from ..interpreterbase.decorators import ContainerTypeInfo, KwargInfo, noPosargs, typed_kwargs, typed_pos_args
from ..programs import ExternalProgram
from ..scripts.gettext import read_linguas
@@ -65,6 +67,16 @@ class ItsJoinFile(TypedDict):
its_files: T.List[str]
mo_targets: T.List[T.Union[build.BuildTarget, build.CustomTarget, build.CustomTargetIndex]]
+ class XgettextProgramT(TypedDict):
+
+ args: T.List[str]
+ recursive: bool
+ install: bool
+ install_dir: T.Optional[str]
+ install_tag: T.Optional[str]
+
+ SourcesType = T.Union[str, mesonlib.File, build.BuildTarget, build.BothLibraries, build.CustomTarget]
+
_ARGS: KwargInfo[T.List[str]] = KwargInfo(
'args',
@@ -115,6 +127,125 @@ class ItsJoinFile(TypedDict):
}
+class XgettextProgram:
+
+ pot_files: T.Dict[str, build.CustomTarget] = {}
+
+ def __init__(self, xgettext: ExternalProgram, interpreter: Interpreter):
+ self.xgettext = xgettext
+ self.interpreter = interpreter
+
+ def extract(self,
+ name: str,
+ sources: T.List[SourcesType],
+ args: T.List[str],
+ recursive: bool,
+ install: bool,
+ install_dir: T.Optional[str],
+ install_tag: T.Optional[str]) -> build.CustomTarget:
+
+ if not name.endswith('.pot'):
+ name += '.pot'
+
+ source_files = self._get_source_files(sources)
+
+ command = self.xgettext.command + args
+ command.append(f'--directory={self.interpreter.environment.get_source_dir()}')
+ command.append(f'--directory={self.interpreter.environment.get_build_dir()}')
+ command.append('--output=@OUTPUT@')
+
+ depends = list(self._get_depends(sources)) if recursive else []
+ rsp_file = self._get_rsp_file(name, source_files, depends, command)
+ inputs: T.List[T.Union[mesonlib.File, build.CustomTarget]]
+ if rsp_file:
+ inputs = [rsp_file]
+ depend_files = list(source_files)
+ command.append('--files-from=@INPUT@')
+ else:
+ inputs = list(source_files) + depends
+ depends = None
+ depend_files = None
+ command.append('@INPUT@')
+
+ ct = build.CustomTarget(
+ '',
+ self.interpreter.subdir,
+ self.interpreter.subproject,
+ self.interpreter.environment,
+ command,
+ inputs,
+ [name],
+ depend_files = depend_files,
+ extra_depends = depends,
+ install = install,
+ install_dir = [install_dir] if install_dir else None,
+ install_tag = [install_tag] if install_tag else None,
+ description = 'Extracting translations to {}',
+ )
+
+ for source_id in self._get_source_id(sources):
+ self.pot_files[source_id] = ct
+ self.pot_files[ct.get_id()] = ct
+
+ self.interpreter.add_target(ct.name, ct)
+ return ct
+
+ def _get_source_files(self, sources: T.Iterable[SourcesType]) -> T.Set[mesonlib.File]:
+ source_files = set()
+ for source in sources:
+ if isinstance(source, mesonlib.File):
+ source_files.add(source)
+ elif isinstance(source, str):
+ mesonlib.check_direntry_issues(source)
+ source_files.add(mesonlib.File.from_source_file(self.interpreter.source_root, self.interpreter.subdir, source))
+ elif isinstance(source, build.BuildTarget):
+ source_files.update(source.get_sources())
+ elif isinstance(source, build.BothLibraries):
+ source_files.update(source.get('shared').get_sources())
+ return source_files
+
+ def _get_depends(self, sources: T.Iterable[SourcesType]) -> T.Set[build.CustomTarget]:
+ depends = set()
+ for source in sources:
+ if isinstance(source, build.BuildTarget):
+ for source_id in self._get_source_id(source.get_dependencies()):
+ if source_id in self.pot_files:
+ depends.add(self.pot_files[source_id])
+ elif isinstance(source, build.CustomTarget):
+ # Dependency on another extracted pot file
+ source_id = source.get_id()
+ if source_id in self.pot_files:
+ depends.add(self.pot_files[source_id])
+ return depends
+
+ def _get_rsp_file(self,
+ name: str,
+ source_files: T.Iterable[mesonlib.File],
+ depends: T.Iterable[build.CustomTarget],
+ arguments: T.List[str]) -> T.Optional[mesonlib.File]:
+ source_list = '\n'.join(source.relative_name() for source in source_files)
+ for dep in depends:
+ source_list += '\n' + path.join(dep.subdir, dep.get_filename())
+
+ estimated_cmdline_length = len(source_list) + sum(len(arg) + 1 for arg in arguments) + 1
+ if estimated_cmdline_length < mesonlib.get_rsp_threshold():
+ return None
+
+ rsp_file = Path(self.interpreter.environment.build_dir, self.interpreter.subdir, name+'.rsp')
+ rsp_file.write_text(source_list, encoding='utf-8')
+
+ return mesonlib.File.from_built_file(self.interpreter.subdir, rsp_file.name)
+
+ @staticmethod
+ def _get_source_id(sources: T.Iterable[T.Union[SourcesType, build.CustomTargetIndex]]) -> T.Iterable[str]:
+ for source in sources:
+ if isinstance(source, build.Target):
+ yield source.get_id()
+ elif isinstance(source, build.BothLibraries):
+ yield source.get('static').get_id()
+ yield source.get('shared').get_id()
+
+
class I18nModule(ExtensionModule):
INFO = ModuleInfo('i18n')
@@ -125,6 +256,7 @@ def __init__(self, interpreter: 'Interpreter'):
'merge_file': self.merge_file,
'gettext': self.gettext,
'itstool_join': self.itstool_join,
+ 'xgettext': self.xgettext,
})
self.tools: T.Dict[str, T.Optional[T.Union[ExternalProgram, build.Executable]]] = {
'itstool': None,
@@ -278,7 +410,7 @@ def gettext(self, state: 'ModuleState', args: T.Tuple[str], kwargs: 'Gettext') -
targets.append(pottarget)
install = kwargs['install']
- install_dir = kwargs['install_dir'] or state.environment.coredata.get_option(OptionKey('localedir'))
+ install_dir = kwargs['install_dir'] or state.environment.coredata.optstore.get_value_for(OptionKey('localedir'))
assert isinstance(install_dir, str), 'for mypy'
if not languages:
languages = read_linguas(path.join(state.environment.source_dir, state.subdir))
@@ -398,6 +530,27 @@ def itstool_join(self, state: 'ModuleState', args: T.List['TYPE_var'], kwargs: '
return ModuleReturnValue(ct, [ct])
+ @FeatureNew('i18n.xgettext', '1.8.0')
+ @typed_pos_args('i18n.xgettext', str, varargs=(str, mesonlib.File, build.BuildTarget, build.BothLibraries, build.CustomTarget), min_varargs=1)
+ @typed_kwargs(
+ 'i18n.xgettext',
+ _ARGS,
+ KwargInfo('recursive', bool, default=False),
+ INSTALL_KW,
+ INSTALL_DIR_KW,
+ INSTALL_TAG_KW,
+ )
+ def xgettext(self, state: ModuleState, args: T.Tuple[str, T.List[SourcesType]], kwargs: XgettextProgramT) -> build.CustomTarget:
+ toolname = 'xgettext'
+ if self.tools[toolname] is None or not self.tools[toolname].found():
+ self.tools[toolname] = state.find_program(toolname, required=True, for_machine=mesonlib.MachineChoice.BUILD)
+
+ if kwargs['install'] and not kwargs['install_dir']:
+ raise InvalidArguments('i18n.xgettext: "install_dir" keyword argument must be set when "install" is true.')
+
+ xgettext_program = XgettextProgram(T.cast('ExternalProgram', self.tools[toolname]), self.interpreter)
+ return xgettext_program.extract(*args, **kwargs)
+
def initialize(interp: 'Interpreter') -> I18nModule:
return I18nModule(interp)
diff --git a/mesonbuild/modules/pkgconfig.py b/mesonbuild/modules/pkgconfig.py
index 1bdf82931a94..cc0450a523b4 100644
--- a/mesonbuild/modules/pkgconfig.py
+++ b/mesonbuild/modules/pkgconfig.py
@@ -490,7 +490,7 @@ def _generate_pkgconfig_file(self, state: ModuleState, deps: DependenciesHelper,
srcdir = PurePath(state.environment.get_source_dir())
else:
outdir = state.environment.scratch_dir
- prefix = PurePath(_as_str(coredata.get_option(OptionKey('prefix'))))
+ prefix = PurePath(_as_str(coredata.optstore.get_value_for(OptionKey('prefix'))))
if pkgroot:
pkgroot_ = PurePath(pkgroot)
if not pkgroot_.is_absolute():
@@ -507,7 +507,7 @@ def _generate_pkgconfig_file(self, state: ModuleState, deps: DependenciesHelper,
if optname == 'prefix':
ofile.write('prefix={}\n'.format(self._escape(prefix)))
else:
- dirpath = PurePath(_as_str(coredata.get_option(OptionKey(optname))))
+ dirpath = PurePath(_as_str(coredata.optstore.get_value_for(OptionKey(optname))))
ofile.write('{}={}\n'.format(optname, self._escape('${prefix}' / dirpath)))
if uninstalled and not dataonly:
ofile.write('srcdir={}\n'.format(self._escape(srcdir)))
@@ -701,14 +701,15 @@ def parse_variable_list(vardict: T.Dict[str, str]) -> T.List[T.Tuple[str, str]]:
pcfile = filebase + '.pc'
pkgroot = pkgroot_name = kwargs['install_dir'] or default_install_dir
if pkgroot is None:
- if mesonlib.is_freebsd():
- pkgroot = os.path.join(_as_str(state.environment.coredata.get_option(OptionKey('prefix'))), 'libdata', 'pkgconfig')
+ m = state.environment.machines.host
+ if m.is_freebsd():
+ pkgroot = os.path.join(_as_str(state.environment.coredata.optstore.get_value_for(OptionKey('prefix'))), 'libdata', 'pkgconfig')
pkgroot_name = os.path.join('{prefix}', 'libdata', 'pkgconfig')
- elif mesonlib.is_haiku():
- pkgroot = os.path.join(_as_str(state.environment.coredata.get_option(OptionKey('prefix'))), 'develop', 'lib', 'pkgconfig')
+ elif m.is_haiku():
+ pkgroot = os.path.join(_as_str(state.environment.coredata.optstore.get_value_for(OptionKey('prefix'))), 'develop', 'lib', 'pkgconfig')
pkgroot_name = os.path.join('{prefix}', 'develop', 'lib', 'pkgconfig')
else:
- pkgroot = os.path.join(_as_str(state.environment.coredata.get_option(OptionKey('libdir'))), 'pkgconfig')
+ pkgroot = os.path.join(_as_str(state.environment.coredata.optstore.get_value_for(OptionKey('libdir'))), 'pkgconfig')
pkgroot_name = os.path.join('{libdir}', 'pkgconfig')
relocatable = state.get_option('pkgconfig.relocatable')
self._generate_pkgconfig_file(state, deps, subdirs, name, description, url,
diff --git a/mesonbuild/modules/python.py b/mesonbuild/modules/python.py
index f828f0ebf272..e6a44e7de815 100644
--- a/mesonbuild/modules/python.py
+++ b/mesonbuild/modules/python.py
@@ -113,7 +113,7 @@ class PythonInstallation(_ExternalProgramHolder['PythonExternalProgram']):
def __init__(self, python: 'PythonExternalProgram', interpreter: 'Interpreter'):
_ExternalProgramHolder.__init__(self, python, interpreter)
info = python.info
- prefix = self.interpreter.environment.coredata.get_option(OptionKey('prefix'))
+ prefix = self.interpreter.environment.coredata.optstore.get_value_for(OptionKey('prefix'))
assert isinstance(prefix, str), 'for mypy'
self.variables = info['variables']
self.suffix = info['suffix']
@@ -169,7 +169,7 @@ def extension_module_method(self, args: T.Tuple[str, T.List[BuildTargetSource]],
self.current_node)
limited_api_version = kwargs.pop('limited_api')
- allow_limited_api = self.interpreter.environment.coredata.get_option(OptionKey('python.allow_limited_api'))
+ allow_limited_api = self.interpreter.environment.coredata.optstore.get_value_for(OptionKey('python.allow_limited_api'))
if limited_api_version != '' and allow_limited_api:
target_suffix = self.limited_api_suffix
@@ -374,7 +374,7 @@ def __init__(self, interpreter: 'Interpreter') -> None:
def _get_install_scripts(self) -> T.List[mesonlib.ExecutableSerialisation]:
backend = self.interpreter.backend
ret = []
- optlevel = self.interpreter.environment.coredata.get_option(OptionKey('python.bytecompile'))
+ optlevel = self.interpreter.environment.coredata.optstore.get_value_for(OptionKey('python.bytecompile'))
if optlevel == -1:
return ret
if not any(PythonExternalProgram.run_bytecompile.values()):
diff --git a/mesonbuild/modules/rust.py b/mesonbuild/modules/rust.py
index 1368c4c1970f..c5f18e8e5705 100644
--- a/mesonbuild/modules/rust.py
+++ b/mesonbuild/modules/rust.py
@@ -1,9 +1,10 @@
# SPDX-License-Identifier: Apache-2.0
-# Copyright © 2020-2024 Intel Corporation
+# Copyright © 2020-2025 Intel Corporation
from __future__ import annotations
import itertools
import os
+import re
import typing as T
from mesonbuild.interpreterbase.decorators import FeatureNew
@@ -11,35 +12,44 @@
from . import ExtensionModule, ModuleReturnValue, ModuleInfo
from .. import mesonlib, mlog
from ..build import (BothLibraries, BuildTarget, CustomTargetIndex, Executable, ExtractedObjects, GeneratedList,
- CustomTarget, InvalidArguments, Jar, StructuredSources, SharedLibrary)
-from ..compilers.compilers import are_asserts_disabled, lang_suffixes
+ CustomTarget, InvalidArguments, Jar, StructuredSources, SharedLibrary, StaticLibrary)
+from ..compilers.compilers import are_asserts_disabled_for_subproject, lang_suffixes
from ..interpreter.type_checking import (
- DEPENDENCIES_KW, LINK_WITH_KW, SHARED_LIB_KWS, TEST_KWS, OUTPUT_KW,
- INCLUDE_DIRECTORIES, SOURCES_VARARGS, NoneType, in_set_validator
+ DEPENDENCIES_KW, LINK_WITH_KW, LINK_WHOLE_KW, SHARED_LIB_KWS, TEST_KWS, TEST_KWS_NO_ARGS,
+ OUTPUT_KW, INCLUDE_DIRECTORIES, SOURCES_VARARGS, NoneType, in_set_validator
)
from ..interpreterbase import ContainerTypeInfo, InterpreterException, KwargInfo, typed_kwargs, typed_pos_args, noPosargs, permittedKwargs
-from ..mesonlib import File
-from ..programs import ExternalProgram
+from ..interpreter.interpreterobjects import Doctest
+from ..mesonlib import File, MesonException, PerMachine
+from ..programs import ExternalProgram, NonExistingExternalProgram
if T.TYPE_CHECKING:
from . import ModuleState
from ..build import IncludeDirs, LibTypes
+ from ..compilers.rust import RustCompiler
from ..dependencies import Dependency, ExternalLibrary
from ..interpreter import Interpreter
from ..interpreter import kwargs as _kwargs
from ..interpreter.interpreter import SourceInputs, SourceOutputs
+ from ..interpreter.interpreterobjects import Test
from ..programs import OverrideProgram
from ..interpreter.type_checking import SourcesVarargsType
from typing_extensions import TypedDict, Literal
- class FuncTest(_kwargs.BaseTest):
+ ArgsType = T.TypeVar('ArgsType')
+ class FuncRustTest(_kwargs.BaseTest, T.Generic[ArgsType]):
+ args: T.List[ArgsType]
dependencies: T.List[T.Union[Dependency, ExternalLibrary]]
is_parallel: bool
link_with: T.List[LibTypes]
+ link_whole: T.List[LibTypes]
rust_args: T.List[str]
+ FuncTest = FuncRustTest[_kwargs.TestArgs]
+ FuncDoctest = FuncRustTest[str]
+
class FuncBindgen(TypedDict):
args: T.List[str]
@@ -53,37 +63,48 @@ class FuncBindgen(TypedDict):
bindgen_version: T.List[str]
+RUST_TEST_KWS: T.List[KwargInfo] = [
+ KwargInfo(
+ 'rust_args',
+ ContainerTypeInfo(list, str),
+ listify=True,
+ default=[],
+ since='1.2.0',
+ ),
+ KwargInfo('is_parallel', bool, default=False),
+]
+
+def no_spaces_validator(arg: T.Optional[T.Union[str, T.List]]) -> T.Optional[str]:
+ if any(bool(re.search(r'\s', x)) for x in arg):
+ return 'must not contain spaces due to limitations of rustdoc'
+ return None
+
+
class RustModule(ExtensionModule):
"""A module that holds helper functions for rust."""
INFO = ModuleInfo('rust', '0.57.0', stabilized='1.0.0')
+ _bindgen_rust_target: T.Optional[str]
+ rustdoc: PerMachine[T.Optional[ExternalProgram]] = PerMachine(None, None)
def __init__(self, interpreter: Interpreter) -> None:
super().__init__(interpreter)
self._bindgen_bin: T.Optional[T.Union[ExternalProgram, Executable, OverrideProgram]] = None
+ if 'rust' in interpreter.compilers.host:
+ rustc = T.cast('RustCompiler', interpreter.compilers.host['rust'])
+ self._bindgen_rust_target = 'nightly' if rustc.is_nightly else rustc.version
+ else:
+ self._bindgen_rust_target = None
+ self._bindgen_set_std = False
self.methods.update({
'test': self.test,
+ 'doctest': self.doctest,
'bindgen': self.bindgen,
'proc_macro': self.proc_macro,
})
- @typed_pos_args('rust.test', str, BuildTarget)
- @typed_kwargs(
- 'rust.test',
- *TEST_KWS,
- DEPENDENCIES_KW,
- LINK_WITH_KW.evolve(since='1.2.0'),
- KwargInfo(
- 'rust_args',
- ContainerTypeInfo(list, str),
- listify=True,
- default=[],
- since='1.2.0',
- ),
- KwargInfo('is_parallel', bool, default=False),
- )
- def test(self, state: ModuleState, args: T.Tuple[str, BuildTarget], kwargs: FuncTest) -> ModuleReturnValue:
+ def test_common(self, funcname: str, state: ModuleState, args: T.Tuple[str, BuildTarget], kwargs: FuncRustTest) -> T.Tuple[Executable, _kwargs.FuncTest]:
"""Generate a rust test target from a given rust target.
Rust puts its unitests inside its main source files, unlike most
@@ -127,19 +148,21 @@ def test(self, state: ModuleState, args: T.Tuple[str, BuildTarget], kwargs: Func
"""
if any(isinstance(t, Jar) for t in kwargs.get('link_with', [])):
raise InvalidArguments('Rust tests cannot link with Jar targets')
+ if any(isinstance(t, Jar) for t in kwargs.get('link_whole', [])):
+ raise InvalidArguments('Rust tests cannot link with Jar targets')
name = args[0]
base_target: BuildTarget = args[1]
if not base_target.uses_rust():
- raise InterpreterException('Second positional argument to rustmod.test() must be a rust based target')
+ raise InterpreterException(f'Second positional argument to rustmod.{funcname}() must be a rust based target')
extra_args = kwargs['args']
# Delete any arguments we don't want passed
if '--test' in extra_args:
- mlog.warning('Do not add --test to rustmod.test arguments')
+ mlog.warning(f'Do not add --test to rustmod.{funcname}() arguments')
extra_args.remove('--test')
if '--format' in extra_args:
- mlog.warning('Do not add --format to rustmod.test arguments')
+ mlog.warning(f'Do not add --format to rustmod.{funcname}() arguments')
i = extra_args.index('--format')
# Also delete the argument to --format
del extra_args[i + 1]
@@ -161,7 +184,11 @@ def test(self, state: ModuleState, args: T.Tuple[str, BuildTarget], kwargs: Func
new_target_kwargs['install'] = False
new_target_kwargs['dependencies'] = new_target_kwargs.get('dependencies', []) + kwargs['dependencies']
new_target_kwargs['link_with'] = new_target_kwargs.get('link_with', []) + kwargs['link_with']
+ new_target_kwargs['link_whole'] = new_target_kwargs.get('link_whole', []) + kwargs['link_whole']
del new_target_kwargs['rust_crate_type']
+ for kw in ['pic', 'prelink', 'rust_abi', 'version', 'soversion', 'darwin_versions']:
+ if kw in new_target_kwargs:
+ del new_target_kwargs[kw]
lang_args = base_target.extra_args.copy()
lang_args['rust'] = base_target.extra_args['rust'] + kwargs['rust_args'] + ['--test']
@@ -176,12 +203,91 @@ def test(self, state: ModuleState, args: T.Tuple[str, BuildTarget], kwargs: Func
base_target.objects, base_target.environment, base_target.compilers,
new_target_kwargs
)
+ return new_target, tkwargs
- test = self.interpreter.make_test(
+ @typed_pos_args('rust.test', str, BuildTarget)
+ @typed_kwargs(
+ 'rust.test',
+ *TEST_KWS,
+ DEPENDENCIES_KW,
+ LINK_WITH_KW.evolve(since='1.2.0'),
+ LINK_WHOLE_KW.evolve(since='1.8.0'),
+ *RUST_TEST_KWS,
+ )
+ def test(self, state: ModuleState, args: T.Tuple[str, BuildTarget], kwargs: FuncTest) -> ModuleReturnValue:
+ name, _ = args
+ new_target, tkwargs = self.test_common('test', state, args, kwargs)
+ test: Test = self.interpreter.make_test(
self.interpreter.current_node, (name, new_target), tkwargs)
return ModuleReturnValue(None, [new_target, test])
+ @FeatureNew('rust.doctest', '1.8.0')
+ @typed_pos_args('rust.doctest', str, BuildTarget)
+ @typed_kwargs(
+ 'rust.doctest',
+ *TEST_KWS_NO_ARGS,
+ DEPENDENCIES_KW,
+ LINK_WITH_KW,
+ LINK_WHOLE_KW,
+ *RUST_TEST_KWS,
+ KwargInfo(
+ 'args',
+ ContainerTypeInfo(list, str),
+ listify=True,
+ default=[],
+ validator=no_spaces_validator,
+ ),
+ )
+ def doctest(self, state: ModuleState, args: T.Tuple[str, T.Union[SharedLibrary, StaticLibrary]], kwargs: FuncDoctest) -> ModuleReturnValue:
+ name, base_target = args
+
+ if state.environment.is_cross_build() and state.environment.need_exe_wrapper(base_target.for_machine):
+ mlog.notice('skipping Rust doctests due to cross compilation', once=True)
+ return ModuleReturnValue(None, [])
+
+ # Link the base target's crate into the tests
+ kwargs['link_with'].append(base_target)
+ kwargs['depends'].append(base_target)
+ workdir = kwargs['workdir']
+ kwargs['workdir'] = None
+ new_target, tkwargs = self.test_common('doctest', state, args, kwargs)
+
+ # added automatically by rustdoc; keep things simple
+ tkwargs['args'].remove('--test')
+
+ # --test-args= is "parsed" simply via the Rust function split_whitespace().
+ # This means no quoting nightmares (pfew) but it also means no spaces.
+ # Unfortunately it's pretty hard at this point to accept e.g. CustomTarget,
+ # because their paths may not be known. This is not a big deal because the
+ # user does not control the test harness, so make things easy and allow
+ # strings only.
+ if tkwargs['args']:
+ tkwargs['args'] = ['--test-args=' + ' '.join(T.cast('T.Sequence[str]', tkwargs['args']))]
+ if workdir:
+ tkwargs['args'].append('--test-run-directory=' + workdir)
+
+ if self.rustdoc[base_target.for_machine] is None:
+ rustc = T.cast('RustCompiler', base_target.compilers['rust'])
+ rustdoc = rustc.get_rustdoc(state.environment)
+ if rustdoc:
+ self.rustdoc[base_target.for_machine] = ExternalProgram(rustdoc.get_exe())
+ else:
+ self.rustdoc[base_target.for_machine] = NonExistingExternalProgram()
+
+ rustdoc_prog = self.rustdoc[base_target.for_machine]
+ if not rustdoc_prog.found():
+ raise MesonException(f'could not find rustdoc for {base_target.for_machine} machine')
+
+ doctests: Doctest = self.interpreter.make_test(
+ self.interpreter.current_node, (name, rustdoc_prog), tkwargs, Doctest)
+
+ # Note that the new_target is intentionally not returned, as it
+ # is only reached via the base_target and never built by "ninja"
+ doctests.target = new_target
+ base_target.doctests = doctests
+ return ModuleReturnValue(None, [doctests])
+
@noPosargs
@typed_kwargs(
'rust.bindgen',
@@ -231,7 +337,7 @@ def bindgen(self, state: ModuleState, args: T.List, kwargs: FuncBindgen) -> Modu
# bindgen always uses clang, so it's safe to hardcode -I here
clang_args.extend([f'-I{x}' for x in i.to_string_list(
state.environment.get_source_dir(), state.environment.get_build_dir())])
- if are_asserts_disabled(state.environment.coredata.optstore):
+ if are_asserts_disabled_for_subproject(state.subproject, state.environment):
clang_args.append('-DNDEBUG')
for de in kwargs['dependencies']:
@@ -247,6 +353,21 @@ def bindgen(self, state: ModuleState, args: T.List, kwargs: FuncBindgen) -> Modu
if self._bindgen_bin is None:
self._bindgen_bin = state.find_program('bindgen', wanted=kwargs['bindgen_version'])
+ if self._bindgen_rust_target is not None:
+ # ExternalCommand.command's type is bonkers
+ _, _, err = mesonlib.Popen_safe(
+ T.cast('T.List[str]', self._bindgen_bin.get_command()) +
+ ['--rust-target', self._bindgen_rust_target])
+ # < 0.71: Sometimes this is "invalid Rust target" and
+ # sometimes "invalid # rust target"
+ # >= 0.71: error: invalid value '...' for '--rust-target ': "..." is not a valid Rust target, accepted values are of the form ...
+ # It's also much harder to hit this in 0.71 than in previous versions
+ if 'Got an invalid' in err or 'is not a valid Rust target' in err:
+ self._bindgen_rust_target = None
+
+ # TODO: Executable needs to learn about get_version
+ if isinstance(self._bindgen_bin, ExternalProgram):
+ self._bindgen_set_std = mesonlib.version_compare(self._bindgen_bin.get_version(), '>= 0.71')
name: str
if isinstance(header, File):
@@ -314,9 +435,18 @@ def bindgen(self, state: ModuleState, args: T.List, kwargs: FuncBindgen) -> Modu
'@INPUT@', '--output',
os.path.join(state.environment.build_dir, '@OUTPUT0@')
] + \
- kwargs['args'] + inline_wrapper_args + ['--'] + \
- kwargs['c_args'] + clang_args + \
- ['-MD', '-MQ', '@INPUT@', '-MF', '@DEPFILE@']
+ kwargs['args'] + inline_wrapper_args
+ if self._bindgen_rust_target and '--rust-target' not in cmd:
+ cmd.extend(['--rust-target', self._bindgen_rust_target])
+ if self._bindgen_set_std and '--rust-edition' not in cmd:
+ rust_std = state.environment.coredata.optstore.get_value('rust_std')
+ assert isinstance(rust_std, str), 'for mypy'
+ if rust_std != 'none':
+ cmd.extend(['--rust-edition', rust_std])
+ cmd.append('--')
+ cmd.extend(kwargs['c_args'])
+ cmd.extend(clang_args)
+ cmd.extend(['-MD', '-MQ', '@INPUT@', '-MF', '@DEPFILE@'])
target = CustomTarget(
f'rustmod-bindgen-{name}'.replace('/', '_'),
diff --git a/mesonbuild/modules/wayland.py b/mesonbuild/modules/wayland.py
index e17cf995ced3..94c6f819db5e 100644
--- a/mesonbuild/modules/wayland.py
+++ b/mesonbuild/modules/wayland.py
@@ -35,7 +35,7 @@ class FindProtocol(TypedDict):
class WaylandModule(ExtensionModule):
- INFO = ModuleInfo('wayland', '0.62.0', unstable=True)
+ INFO = ModuleInfo('wayland', '0.62.0', stabilized='1.8.0')
def __init__(self, interpreter: Interpreter) -> None:
super().__init__(interpreter)
diff --git a/mesonbuild/mparser.py b/mesonbuild/mparser.py
index 4f43455468c6..116e88fdb31d 100644
--- a/mesonbuild/mparser.py
+++ b/mesonbuild/mparser.py
@@ -94,6 +94,9 @@ def __eq__(self, other: object) -> bool:
return self.tid == other.tid
return NotImplemented
+
+IDENT_RE = re.compile('[_a-zA-Z][_0-9a-zA-Z]*')
+
class Lexer:
def __init__(self, code: str):
if code.startswith(codecs.BOM_UTF8.decode('utf-8')):
@@ -113,7 +116,7 @@ def __init__(self, code: str):
('whitespace', re.compile(r'[ \t]+')),
('multiline_fstring', re.compile(r"f'''(.|\n)*?'''", re.M)),
('fstring', re.compile(r"f'([^'\\]|(\\.))*'")),
- ('id', re.compile('[_a-zA-Z][_0-9a-zA-Z]*')),
+ ('id', IDENT_RE),
('number', re.compile(r'0[bB][01]+|0[oO][0-7]+|0[xX][0-9a-fA-F]+|0|[1-9]\d*')),
('eol_cont', re.compile(r'\\[ \t]*(#.*)?\n')),
('eol', re.compile(r'\n')),
@@ -218,7 +221,7 @@ def lex(self, filename: str) -> T.Generator[Token, None, None]:
yield Token(tid, filename, curline_start, curline, col, bytespan, value)
break
if not matched:
- raise ParseException('lexer', self.getline(line_start), lineno, col)
+ raise ParseException(f'lexer: unrecognized token {self.code[loc]!r}', self.getline(line_start), lineno, loc - line_start)
@dataclass
class BaseNode:
@@ -1099,7 +1102,7 @@ def codeblock(self) -> CodeBlockNode:
e.ast = block
raise
- # Remaining whitespaces will not be catched since there are no more nodes
+ # Remaining whitespaces will not be caught since there are no more nodes
for ws_token in self.current_ws:
block.append_whitespaces(ws_token)
self.current_ws = []
diff --git a/mesonbuild/msetup.py b/mesonbuild/msetup.py
index e634c05ab5aa..d4e745468379 100644
--- a/mesonbuild/msetup.py
+++ b/mesonbuild/msetup.py
@@ -1,6 +1,6 @@
# SPDX-License-Identifier: Apache-2.0
# Copyright 2016-2018 The Meson development team
-# Copyright © 2023-2024 Intel Corporation
+# Copyright © 2023-2025 Intel Corporation
from __future__ import annotations
@@ -27,6 +27,7 @@ class CMDOptions(SharedCMDOptions, Protocol):
builddir: str
sourcedir: str
pager: bool
+ unset_opts: T.List[str]
git_ignore_file = '''# This file is autogenerated by Meson. If you change or delete it, it won't be recreated.
*
@@ -179,6 +180,9 @@ def validate_dirs(self) -> T.Tuple[str, str]:
# See class Backend's 'generate' for comments on capture args and returned dictionary.
def generate(self, capture: bool = False, vslite_ctx: T.Optional[dict] = None) -> T.Optional[dict]:
env = environment.Environment(self.source_dir, self.build_dir, self.options)
+ if not env.first_invocation:
+ assert self.options.reconfigure
+ env.coredata.set_from_configure_command(self.options)
mlog.initialize(env.get_log_dir(), self.options.fatal_warnings)
if self.options.profile:
mlog.set_timestamp_start(time.monotonic())
@@ -187,6 +191,24 @@ def generate(self, capture: bool = False, vslite_ctx: T.Optional[dict] = None) -
with mesonlib.BuildDirLock(self.build_dir):
return self._generate(env, capture, vslite_ctx)
+ def check_unused_options(self, coredata: 'coredata.CoreData', cmd_line_options: T.Any, all_subprojects: T.Mapping[str, object]) -> None:
+ pending = coredata.optstore.pending_options
+ errlist: T.List[str] = []
+ known_subprojects = all_subprojects.keys()
+ for opt in pending:
+ # It is not an error to set wrong option for unknown subprojects
+ # because they might be used in future reconfigurations
+ if coredata.optstore.accept_as_pending_option(opt, known_subprojects):
+ continue
+ keystr = str(opt)
+ if keystr in cmd_line_options:
+ errlist.append(f'"{keystr}"')
+ if errlist:
+ errstr = ', '.join(errlist)
+ raise MesonException(f'Unknown options: {errstr}')
+
+ coredata.optstore.clear_pending()
+
def _generate(self, env: environment.Environment, capture: bool, vslite_ctx: T.Optional[dict]) -> T.Optional[dict]:
# Get all user defined options, including options that have been defined
# during a previous invocation or using meson configure.
@@ -242,6 +264,9 @@ def _generate(self, env: environment.Environment, capture: bool, vslite_ctx: T.O
cdf = env.dump_coredata()
self.finalize_postconf_hooks(b, intr)
+ self.check_unused_options(env.coredata,
+ intr.user_defined_options.cmd_line_options,
+ intr.subprojects)
if self.options.profile:
localvars = locals()
fname = f'profile-{intr.backend.name}-backend.log'
@@ -275,9 +300,9 @@ def _generate(self, env: environment.Environment, capture: bool, vslite_ctx: T.O
# collect warnings about unsupported build configurations; must be done after full arg processing
# by Interpreter() init, but this is most visible at the end
- if env.coredata.optstore.get_value('backend') == 'xcode':
+ if env.coredata.optstore.get_value_for('backend') == 'xcode':
mlog.warning('xcode backend is currently unmaintained, patches welcome')
- if env.coredata.optstore.get_value('layout') == 'flat':
+ if env.coredata.optstore.get_value_for('layout') == 'flat':
mlog.warning('-Dlayout=flat is unsupported and probably broken. It was a failed experiment at '
'making Windows build artifacts runnable while uninstalled, due to PATH considerations, '
'but was untested by CI and anyways breaks reasonable use of conflicting targets in different subdirs. '
@@ -321,17 +346,17 @@ def run_genvslite_setup(options: CMDOptions) -> None:
# invoke the appropriate 'meson compile ...' build commands upon the normal visual studio build/rebuild/clean actions, instead of using
# the native VS/msbuild system.
builddir_prefix = options.builddir
- genvsliteval = options.cmd_line_options.pop(OptionKey('genvslite'))
+ genvsliteval = options.cmd_line_options.pop('genvslite') # type: ignore [call-overload]
# The command line may specify a '--backend' option, which doesn't make sense in conjunction with
# '--genvslite', where we always want to use a ninja back end -
- k_backend = OptionKey('backend')
+ k_backend = 'backend'
if k_backend in options.cmd_line_options.keys():
- if options.cmd_line_options[k_backend] != 'ninja':
+ if options.cmd_line_options[k_backend] != 'ninja': # type: ignore [index]
raise MesonException('Explicitly specifying a backend option with \'genvslite\' is not necessary '
'(the ninja backend is always used) but specifying a non-ninja backend '
'conflicts with a \'genvslite\' setup')
else:
- options.cmd_line_options[k_backend] = 'ninja'
+ options.cmd_line_options[k_backend] = 'ninja' # type: ignore [index]
buildtypes_list = coredata.get_genvs_default_buildtype_list()
vslite_ctx = {}
@@ -358,7 +383,7 @@ def run(options: T.Union[CMDOptions, T.List[str]]) -> int:
# lie
options.pager = False
- if OptionKey('genvslite') in options.cmd_line_options.keys():
+ if 'genvslite' in options.cmd_line_options.keys():
run_genvslite_setup(options)
else:
app = MesonApp(options)
diff --git a/mesonbuild/msubprojects.py b/mesonbuild/msubprojects.py
index c15415485217..c74283c29c0f 100755
--- a/mesonbuild/msubprojects.py
+++ b/mesonbuild/msubprojects.py
@@ -324,7 +324,8 @@ def update_git(self) -> bool:
self.log(' -> Not a git repository.')
self.log('Pass --reset option to delete directory and redownload.')
return False
- revision = self.wrap.values.get('revision')
+ revision_val = self.wrap.values.get('revision')
+ revision = revision_val if revision_val.upper() != 'HEAD' else 'HEAD'
url = self.wrap.values.get('url')
push_url = self.wrap.values.get('push-url')
if not revision or not url:
diff --git a/mesonbuild/mtest.py b/mesonbuild/mtest.py
index c417bc0b38b7..4a907d89e9a6 100644
--- a/mesonbuild/mtest.py
+++ b/mesonbuild/mtest.py
@@ -14,7 +14,6 @@
import datetime
import enum
import json
-import multiprocessing
import os
import pickle
import platform
@@ -24,7 +23,6 @@
import subprocess
import shlex
import sys
-import textwrap
import time
import typing as T
import unicodedata
@@ -36,9 +34,9 @@
from .coredata import MesonVersionMismatchException, major_versions_differ
from .coredata import version as coredata_version
from .mesonlib import (MesonException, OrderedSet, RealPathAction,
- get_wine_shortpath, join_args, split_args, setup_vsenv)
+ get_wine_shortpath, join_args, split_args, setup_vsenv,
+ determine_worker_count)
from .options import OptionKey
-from .mintro import get_infodir, load_info_file
from .programs import ExternalProgram
from .backend.backends import TestProtocol, TestSerialisation
@@ -81,6 +79,9 @@
UNENCODABLE_XML_CHR_RANGES = [fr'{chr(low)}-{chr(high)}' for (low, high) in UNENCODABLE_XML_UNICHRS]
UNENCODABLE_XML_CHRS_RE = re.compile('([' + ''.join(UNENCODABLE_XML_CHR_RANGES) + '])')
+RUST_TEST_RE = re.compile(r'^test (?!result)(.*) \.\.\. (.*)$')
+RUST_DOCTEST_RE = re.compile(r'^(.*?) - (.*? |)\(line (\d+)\)')
+
def is_windows() -> bool:
platname = platform.system().lower()
@@ -97,26 +98,28 @@ def uniwidth(s: str) -> int:
result += UNIWIDTH_MAPPING[w]
return result
-def determine_worker_count() -> int:
- varname = 'MESON_TESTTHREADS'
- num_workers = 0
- if varname in os.environ:
- try:
- num_workers = int(os.environ[varname])
- if num_workers < 0:
- raise ValueError
- except ValueError:
- print(f'Invalid value in {varname}, using 1 thread.')
- num_workers = 1
+def test_slice(arg: str) -> T.Tuple[int, int]:
+ values = arg.split('/')
+ if len(values) != 2:
+ raise argparse.ArgumentTypeError("value does not conform to format 'SLICE/NUM_SLICES'")
- if num_workers == 0:
- try:
- # Fails in some weird environments such as Debian
- # reproducible build.
- num_workers = multiprocessing.cpu_count()
- except Exception:
- num_workers = 1
- return num_workers
+ try:
+ nrslices = int(values[1])
+ except ValueError:
+ raise argparse.ArgumentTypeError('NUM_SLICES is not an integer')
+ if nrslices <= 0:
+ raise argparse.ArgumentTypeError('NUM_SLICES is not a positive integer')
+
+ try:
+ subslice = int(values[0])
+ except ValueError:
+ raise argparse.ArgumentTypeError('SLICE is not an integer')
+ if subslice <= 0:
+ raise argparse.ArgumentTypeError('SLICE is not a positive integer')
+ if subslice > nrslices:
+ raise argparse.ArgumentTypeError('SLICE exceeds NUM_SLICES')
+
+ return subslice, nrslices
# Note: when adding arguments, please also add them to the completion
# scripts in $MESONSRC/data/shell-completions/
@@ -152,7 +155,7 @@ def add_arguments(parser: argparse.ArgumentParser) -> None:
help="Run benchmarks instead of tests.")
parser.add_argument('--logbase', default='testlog',
help="Base name for log file.")
- parser.add_argument('-j', '--num-processes', default=determine_worker_count(), type=int,
+ parser.add_argument('-j', '--num-processes', default=determine_worker_count(['MESON_TESTTHREADS']), type=int,
help='How many parallel processes to use.')
parser.add_argument('-v', '--verbose', default=False, action='store_true',
help='Do not redirect stdout and stderr')
@@ -168,12 +171,13 @@ def add_arguments(parser: argparse.ArgumentParser) -> None:
help='Arguments to pass to the specified test(s) or all tests')
parser.add_argument('--max-lines', default=100, dest='max_lines', type=int,
help='Maximum number of lines to show from a long test log. Since 1.5.0.')
+ parser.add_argument('--slice', default=None, type=test_slice, metavar='SLICE/NUM_SLICES',
+ help='Split tests into NUM_SLICES slices and execute slice SLICE. Since 1.8.0.')
parser.add_argument('args', nargs='*',
help='Optional list of test names to run. "testname" to run all tests with that name, '
'"subprojname:testname" to specifically run "testname" from "subprojname", '
'"subprojname:" to run all tests defined by "subprojname".')
-
def print_safe(s: str) -> None:
end = '' if s[-1] == '\n' else '\n'
try:
@@ -262,6 +266,7 @@ class TestResult(enum.Enum):
EXPECTEDFAIL = 'EXPECTEDFAIL'
UNEXPECTEDPASS = 'UNEXPECTEDPASS'
ERROR = 'ERROR'
+ IGNORED = 'IGNORED'
@staticmethod
def maxlen() -> int:
@@ -283,7 +288,7 @@ def was_killed(self) -> bool:
def colorize(self, s: str) -> mlog.AnsiDecorator:
if self.is_bad():
decorator = mlog.red
- elif self in (TestResult.SKIP, TestResult.EXPECTEDFAIL):
+ elif self in (TestResult.SKIP, TestResult.IGNORED, TestResult.EXPECTEDFAIL):
decorator = mlog.yellow
elif self.is_finished():
decorator = mlog.green
@@ -345,6 +350,8 @@ class Version(T.NamedTuple):
plan: T.Optional[Plan] = None
lineno = 0
num_tests = 0
+ last_test = 0
+ highest_test = 0
yaml_lineno: T.Optional[int] = None
yaml_indent = ''
state = _MAIN
@@ -415,10 +422,11 @@ def parse_line(self, line: T.Optional[str]) -> T.Iterator[TYPE_TAPResult]:
yield self.Error('unexpected test after late plan')
self.found_late_test = True
self.num_tests += 1
- num = self.num_tests if m.group(2) is None else int(m.group(2))
- if num != self.num_tests:
- yield self.Error('out of order test numbers')
- yield from self.parse_test(m.group(1) == 'ok', num,
+ self.last_test = self.last_test + 1 if m.group(2) is None else int(m.group(2))
+ self.highest_test = max(self.highest_test, self.last_test)
+ if self.plan and self.last_test > self.plan.num_tests:
+ yield self.Error('test number exceeds maximum specified in test plan')
+ yield from self.parse_test(m.group(1) == 'ok', self.last_test,
m.group(3), m.group(4), m.group(5))
self.state = self._AFTER_TEST
return
@@ -468,11 +476,21 @@ def parse_line(self, line: T.Optional[str]) -> T.Iterator[TYPE_TAPResult]:
if self.state == self._YAML:
yield self.Error(f'YAML block not terminated (started on line {self.yaml_lineno})')
- if not self.bailed_out and self.plan and self.num_tests != self.plan.num_tests:
+ if self.bailed_out:
+ return
+
+ if self.plan and self.num_tests != self.plan.num_tests:
if self.num_tests < self.plan.num_tests:
yield self.Error(f'Too few tests run (expected {self.plan.num_tests}, got {self.num_tests})')
else:
yield self.Error(f'Too many tests run (expected {self.plan.num_tests}, got {self.num_tests})')
+ return
+
+ if self.highest_test != self.num_tests:
+ if self.highest_test < self.num_tests:
+ yield self.Error(f'Duplicate test numbers (expected {self.num_tests}, got test numbered {self.highest_test}')
+ else:
+ yield self.Error(f'Missing test numbers (expected {self.num_tests}, got test numbered {self.highest_test}')
class TestLogger:
def flush(self) -> None:
@@ -840,7 +858,8 @@ def log(self, harness: 'TestHarness', test: 'TestRun') -> None:
{TestResult.INTERRUPT, TestResult.ERROR})),
failures=str(sum(1 for r in test.results if r.result in
{TestResult.FAIL, TestResult.UNEXPECTEDPASS, TestResult.TIMEOUT})),
- skipped=str(sum(1 for r in test.results if r.result is TestResult.SKIP)),
+ skipped=str(sum(1 for r in test.results if r.result in
+ {TestResult.SKIP, TestResult.IGNORED})),
time=str(test.duration),
)
@@ -850,13 +869,16 @@ def log(self, harness: 'TestHarness', test: 'TestRun') -> None:
testcase = et.SubElement(suite, 'testcase', name=str(subtest), classname=suitename)
if subtest.result is TestResult.SKIP:
et.SubElement(testcase, 'skipped')
+ elif subtest.result is TestResult.IGNORED:
+ skip = et.SubElement(testcase, 'skipped')
+ skip.text = 'Test output was not parsed.'
elif subtest.result is TestResult.ERROR:
et.SubElement(testcase, 'error')
elif subtest.result is TestResult.FAIL:
et.SubElement(testcase, 'failure')
elif subtest.result is TestResult.UNEXPECTEDPASS:
fail = et.SubElement(testcase, 'failure')
- fail.text = 'Test unexpected passed.'
+ fail.text = 'Test unexpectedly passed.'
elif subtest.result is TestResult.INTERRUPT:
fail = et.SubElement(testcase, 'error')
fail.text = 'Test was interrupted by user.'
@@ -885,12 +907,28 @@ def log(self, harness: 'TestHarness', test: 'TestRun') -> None:
if test.res is TestResult.SKIP:
et.SubElement(testcase, 'skipped')
suite.attrib['skipped'] = str(int(suite.attrib['skipped']) + 1)
+ elif test.res is TestResult.IGNORED:
+ skip = et.SubElement(testcase, 'skipped')
+ skip.text = 'Test output was not parsed.'
+ suite.attrib['skipped'] = str(int(suite.attrib['skipped']) + 1)
elif test.res is TestResult.ERROR:
et.SubElement(testcase, 'error')
suite.attrib['errors'] = str(int(suite.attrib['errors']) + 1)
elif test.res is TestResult.FAIL:
et.SubElement(testcase, 'failure')
suite.attrib['failures'] = str(int(suite.attrib['failures']) + 1)
+ elif test.res is TestResult.UNEXPECTEDPASS:
+ fail = et.SubElement(testcase, 'failure')
+ fail.text = 'Test unexpectedly passed.'
+ suite.attrib['failures'] = str(int(suite.attrib['failures']) + 1)
+ elif test.res is TestResult.INTERRUPT:
+ fail = et.SubElement(testcase, 'error')
+ fail.text = 'Test was interrupted by user.'
+ suite.attrib['errors'] = str(int(suite.attrib['errors']) + 1)
+ elif test.res is TestResult.TIMEOUT:
+ fail = et.SubElement(testcase, 'error')
+ fail.text = 'Test did not finish before configured timeout.'
+ suite.attrib['errors'] = str(int(suite.attrib['errors']) + 1)
if test.stdo:
out = et.SubElement(testcase, 'system-out')
out.text = replace_unencodable_xml_chars(test.stdo.rstrip())
@@ -919,7 +957,8 @@ def __new__(cls, test: TestSerialisation, *args: T.Any, **kwargs: T.Any) -> T.An
return super().__new__(TestRun.PROTOCOL_TO_CLASS[test.protocol])
def __init__(self, test: TestSerialisation, test_env: T.Dict[str, str],
- name: str, timeout: T.Optional[int], is_parallel: bool, verbose: bool):
+ name: str, timeout: T.Optional[int], is_parallel: bool, verbose: bool,
+ interactive: bool):
self.res = TestResult.PENDING
self.test = test
self._num: T.Optional[int] = None
@@ -939,6 +978,7 @@ def __init__(self, test: TestSerialisation, test_env: T.Dict[str, str],
self.junit: T.Optional[et.ElementTree] = None
self.is_parallel = is_parallel
self.verbose = verbose
+ self.interactive = interactive
self.warnings: T.List[str] = []
def start(self, cmd: T.List[str]) -> None:
@@ -953,6 +993,15 @@ def num(self) -> int:
self._num = TestRun.TEST_NUM
return self._num
+ @property
+ def console_mode(self) -> ConsoleUser:
+ if self.interactive:
+ return ConsoleUser.INTERACTIVE
+ elif self.direct_stdout:
+ return ConsoleUser.STDOUT
+ else:
+ return ConsoleUser.LOGGER
+
@property
def direct_stdout(self) -> bool:
return self.verbose and not self.is_parallel and not self.needs_parsing
@@ -961,7 +1010,7 @@ def get_results(self) -> str:
if self.results:
# running or succeeded
passed = sum(x.result.is_ok() for x in self.results)
- ran = sum(x.result is not TestResult.SKIP for x in self.results)
+ ran = sum(x.result not in {TestResult.SKIP, TestResult.IGNORED} for x in self.results)
if passed == ran:
return f'{passed} subtests passed'
else:
@@ -981,6 +1030,8 @@ def get_details(self) -> str:
def _complete(self) -> None:
if self.res == TestResult.RUNNING:
self.res = TestResult.OK
+ if self.needs_parsing and self.console_mode is ConsoleUser.INTERACTIVE:
+ self.res = TestResult.IGNORED
assert isinstance(self.res, TestResult)
if self.should_fail and self.res in (TestResult.OK, TestResult.FAIL):
self.res = TestResult.UNEXPECTEDPASS if self.res is TestResult.OK else TestResult.EXPECTEDFAIL
@@ -1119,7 +1170,8 @@ async def parse(self, harness: 'TestHarness', lines: T.AsyncIterator[str]) -> No
'This is probably a bug in the test; if they are not TAP syntax, prefix them with a #')
if all(t.result is TestResult.SKIP for t in self.results):
# This includes the case where self.results is empty
- res = TestResult.SKIP
+ if res != TestResult.ERROR:
+ res = TestResult.SKIP
if res and self.res == TestResult.RUNNING:
self.res = res
@@ -1145,8 +1197,14 @@ def parse_res(n: int, name: str, result: str) -> TAPParser.Test:
n = 1
async for line in lines:
- if line.startswith('test ') and not line.startswith('test result'):
- _, name, _, result = line.rstrip().split(' ')
+ match = RUST_TEST_RE.match(line)
+ if match:
+ name, result = match.groups()
+ doctest = RUST_DOCTEST_RE.match(name)
+ if doctest:
+ name = ':'.join((x.rstrip() for x in doctest.groups() if x))
+ else:
+ name = name.rstrip()
name = name.replace('::', '.')
t = parse_res(n, name, result)
self.results.append(t)
@@ -1427,6 +1485,15 @@ def __init__(self, test: TestSerialisation, env: T.Dict[str, str], name: str,
if ('MSAN_OPTIONS' not in env or not env['MSAN_OPTIONS']):
env['MSAN_OPTIONS'] = 'halt_on_error=1:abort_on_error=1:print_summary=1:print_stacktrace=1'
+ # Valgrind also doesn't reflect errors in its exit code by default.
+ if 'VALGRIND_OPTS' not in env or not env['VALGRIND_OPTS']:
+ try:
+ wrapper_name = TestHarness.get_wrapper(self.options)[0]
+ if 'valgrind' in wrapper_name:
+ env['VALGRIND_OPTS'] = '--error-exitcode=1'
+ except IndexError:
+ pass
+
if self.options.interactive or self.test.timeout is None or self.test.timeout <= 0:
timeout = None
elif self.options.timeout_multiplier is None:
@@ -1438,14 +1505,11 @@ def __init__(self, test: TestSerialisation, env: T.Dict[str, str], name: str,
is_parallel = test.is_parallel and self.options.num_processes > 1 and not self.options.interactive
verbose = (test.verbose or self.options.verbose) and not self.options.quiet
- self.runobj = TestRun(test, env, name, timeout, is_parallel, verbose)
+ self.runobj = TestRun(test, env, name, timeout, is_parallel, verbose, self.options.interactive)
- if self.options.interactive:
- self.console_mode = ConsoleUser.INTERACTIVE
- elif self.runobj.direct_stdout:
- self.console_mode = ConsoleUser.STDOUT
- else:
- self.console_mode = ConsoleUser.LOGGER
+ @property
+ def console_mode(self) -> ConsoleUser:
+ return self.runobj.console_mode
def _get_test_cmd(self) -> T.Optional[T.List[str]]:
testentry = self.test.fname[0]
@@ -1566,7 +1630,7 @@ async def _run_cmd(self, harness: 'TestHarness', cmd: T.List[str]) -> None:
env=self.runobj.env,
cwd=self.test.workdir)
- if self.runobj.needs_parsing:
+ if self.runobj.needs_parsing and self.console_mode is not ConsoleUser.INTERACTIVE:
parse_coro = self.runobj.parse(harness, p.stdout_lines())
parse_task = asyncio.ensure_future(parse_coro)
else:
@@ -1594,6 +1658,7 @@ def __init__(self, options: argparse.Namespace):
self.unexpectedpass_count = 0
self.success_count = 0
self.skip_count = 0
+ self.ignored_count = 0
self.timeout_count = 0
self.test_count = 0
self.name_max_len = 0
@@ -1737,6 +1802,8 @@ def process_test_result(self, result: TestRun) -> None:
self.timeout_count += 1
elif result.res is TestResult.SKIP:
self.skip_count += 1
+ elif result.res is TestResult.IGNORED:
+ self.ignored_count += 1
elif result.res is TestResult.OK:
self.success_count += 1
elif result.res in {TestResult.FAIL, TestResult.ERROR, TestResult.INTERRUPT}:
@@ -1795,15 +1862,22 @@ def format(self, result: TestRun, colorize: bool,
return prefix + left + middle + right
def summary(self) -> str:
- return textwrap.dedent('''
- Ok: {:<4}
- Expected Fail: {:<4}
- Fail: {:<4}
- Unexpected Pass: {:<4}
- Skipped: {:<4}
- Timeout: {:<4}
- ''').format(self.success_count, self.expectedfail_count, self.fail_count,
- self.unexpectedpass_count, self.skip_count, self.timeout_count)
+ results = {
+ 'Ok: ': self.success_count,
+ 'Expected Fail: ': self.expectedfail_count,
+ 'Fail: ': self.fail_count,
+ 'Unexpected Pass: ': self.unexpectedpass_count,
+ 'Skipped: ': self.skip_count,
+ 'Ignored: ': self.ignored_count,
+ 'Timeout: ': self.timeout_count,
+ }
+
+ summary = []
+ for result, count in results.items():
+ if count > 0 or result.startswith('Ok:') or result.startswith('Fail:'):
+ summary.append(result + '{:<4}'.format(count))
+
+ return '\n{}\n'.format('\n'.join(summary))
def total_failure_count(self) -> int:
return self.fail_count + self.unexpectedpass_count + self.timeout_count
@@ -1813,9 +1887,10 @@ def doit(self) -> int:
raise RuntimeError('Test harness object can only be used once.')
self.is_run = True
tests = self.get_tests()
+ rebuild_only_tests = tests if self.options.args else []
if not tests:
return 0
- if not self.options.no_rebuild and not rebuild_deps(self.ninja, self.options.wd, tests):
+ if not self.options.no_rebuild and not rebuild_deps(self.ninja, self.options.wd, rebuild_only_tests, self.options.benchmark):
# We return 125 here in case the build failed.
# The reason is that exit code 125 tells `git bisect run` that the current
# commit should be skipped. Thus users can directly use `meson test` to
@@ -1963,6 +2038,11 @@ def get_tests(self, errorfile: T.Optional[T.IO] = None) -> T.List[TestSerialisat
tests = [t for t in self.tests if self.test_suitable(t)]
if self.options.args:
tests = list(self.tests_from_args(tests))
+ if self.options.slice:
+ our_slice, nslices = self.options.slice
+ if nslices > len(tests):
+ raise MesonException(f'number of slices ({nslices}) exceeds number of tests ({len(tests)})')
+ tests = tests[our_slice - 1::nslices]
if not tests:
print('No suitable tests defined.', file=errorfile)
@@ -1984,15 +2064,16 @@ def open_logfiles(self) -> None:
@staticmethod
def get_wrapper(options: argparse.Namespace) -> T.List[str]:
- wrap: T.List[str] = []
if options.gdb:
wrap = [options.gdb_path, '--quiet']
if options.repeat > 1:
wrap += ['-ex', 'run', '-ex', 'quit']
# Signal the end of arguments to gdb
wrap += ['--args']
- if options.wrapper:
- wrap += options.wrapper
+ elif options.wrapper:
+ wrap = options.wrapper
+ else:
+ wrap = []
return wrap
def get_pretty_suite(self, test: TestSerialisation) -> str:
@@ -2128,7 +2209,7 @@ def list_tests(th: TestHarness) -> bool:
print(th.get_pretty_suite(t))
return not tests
-def rebuild_deps(ninja: T.List[str], wd: str, tests: T.List[TestSerialisation]) -> bool:
+def rebuild_deps(ninja: T.List[str], wd: str, tests: T.List[TestSerialisation], benchmark: bool) -> bool:
def convert_path_to_target(path: str) -> str:
path = os.path.relpath(path, wd)
if os.sep != '/':
@@ -2137,19 +2218,34 @@ def convert_path_to_target(path: str) -> str:
assert len(ninja) > 0
- depends: T.Set[str] = set()
targets: T.Set[str] = set()
- intro_targets: T.Dict[str, T.List[str]] = {}
- for target in load_info_file(get_infodir(wd), kind='targets'):
- intro_targets[target['id']] = [
- convert_path_to_target(f)
- for f in target['filename']]
- for t in tests:
- for d in t.depends:
- if d in depends:
- continue
- depends.update(d)
- targets.update(intro_targets[d])
+ if tests:
+ targets_file = os.path.join(wd, 'meson-info/intro-targets.json')
+ with open(targets_file, encoding='utf-8') as fp:
+ targets_info = json.load(fp)
+
+ depends: T.Set[str] = set()
+ intro_targets: T.Dict[str, T.List[str]] = {}
+ for target in targets_info:
+ intro_targets[target['id']] = [
+ convert_path_to_target(f)
+ for f in target['filename']]
+ for t in tests:
+ for d in t.depends:
+ if d in depends:
+ continue
+ depends.update(d)
+ targets.update(intro_targets[d])
+ else:
+ if benchmark:
+ targets.add('meson-benchmark-prereq')
+ else:
+ targets.add('meson-test-prereq')
+
+ if not targets:
+ # We want to build minimal deps, but if the subset of targets have no
+ # deps then ninja falls back to 'all'.
+ return True
ret = subprocess.run(ninja + ['-C', wd] + sorted(targets)).returncode
if ret != 0:
@@ -2187,11 +2283,11 @@ def run(options: argparse.Namespace) -> int:
return 1
b = build.load(options.wd)
- need_vsenv = T.cast('bool', b.environment.coredata.get_option(OptionKey('vsenv')))
+ need_vsenv = T.cast('bool', b.environment.coredata.optstore.get_value_for(OptionKey('vsenv')))
setup_vsenv(need_vsenv)
if not options.no_rebuild:
- backend = b.environment.coredata.get_option(OptionKey('backend'))
+ backend = b.environment.coredata.optstore.get_value_for(OptionKey('backend'))
if backend == 'none':
# nothing to build...
options.no_rebuild = True
diff --git a/mesonbuild/munstable_coredata.py b/mesonbuild/munstable_coredata.py
index 409b514b608e..b647dd8fcfb4 100644
--- a/mesonbuild/munstable_coredata.py
+++ b/mesonbuild/munstable_coredata.py
@@ -53,7 +53,7 @@ def run(options):
print('')
coredata = cdata.load(options.builddir)
- backend = coredata.get_option(OptionKey('backend'))
+ backend = coredata.optstore.get_value_for(OptionKey('backend'))
for k, v in sorted(coredata.__dict__.items()):
if k in {'backend_options', 'base_options', 'builtins', 'compiler_options', 'user_options'}:
# use `meson configure` to view these
diff --git a/mesonbuild/optinterpreter.py b/mesonbuild/optinterpreter.py
index 4688ee4c4f49..892d4d5e3786 100644
--- a/mesonbuild/optinterpreter.py
+++ b/mesonbuild/optinterpreter.py
@@ -15,7 +15,6 @@
from .interpreter.type_checking import NoneType, in_set_validator
if T.TYPE_CHECKING:
- from . import coredata
from .interpreterbase import TYPE_var, TYPE_kwargs
from .interpreterbase import SubProject
from typing_extensions import TypedDict, Literal
@@ -67,9 +66,9 @@ class OptionException(mesonlib.MesonException):
class OptionInterpreter:
def __init__(self, optionstore: 'OptionStore', subproject: 'SubProject') -> None:
- self.options: 'coredata.MutableKeyedOptionDictType' = {}
+ self.options: options.MutableKeyedOptionDictType = {}
self.subproject = subproject
- self.option_types: T.Dict[str, T.Callable[..., options.UserOption]] = {
+ self.option_types: T.Dict[str, T.Callable[..., options.AnyOptionType]] = {
'string': self.string_parser,
'boolean': self.boolean_parser,
'combo': self.combo_parser,
@@ -226,7 +225,8 @@ def string_parser(self, name: str, description: str, args: T.Tuple[bool, _DEPREC
),
)
def boolean_parser(self, name: str, description: str, args: T.Tuple[bool, _DEPRECATED_ARGS], kwargs: BooleanArgs) -> options.UserOption:
- return options.UserBooleanOption(name, description, kwargs['value'], *args)
+ yielding, deprecated = args
+ return options.UserBooleanOption(name, description, kwargs['value'], yielding=yielding, deprecated=deprecated)
@typed_kwargs(
'combo option',
@@ -238,7 +238,7 @@ def combo_parser(self, name: str, description: str, args: T.Tuple[bool, _DEPRECA
value = kwargs['value']
if value is None:
value = kwargs['choices'][0]
- return options.UserComboOption(name, description, choices, value, *args)
+ return options.UserComboOption(name, description, value, *args, choices=choices)
@typed_kwargs(
'integer option',
@@ -253,9 +253,8 @@ def combo_parser(self, name: str, description: str, args: T.Tuple[bool, _DEPRECA
KwargInfo('max', (int, NoneType)),
)
def integer_parser(self, name: str, description: str, args: T.Tuple[bool, _DEPRECATED_ARGS], kwargs: IntegerArgs) -> options.UserOption:
- value = kwargs['value']
- inttuple = (kwargs['min'], kwargs['max'], value)
- return options.UserIntegerOption(name, description, inttuple, *args)
+ return options.UserIntegerOption(
+ name, description, kwargs['value'], *args, min_value=kwargs['min'], max_value=kwargs['max'])
@typed_kwargs(
'string array option',
@@ -270,10 +269,11 @@ def string_array_parser(self, name: str, description: str, args: T.Tuple[bool, _
FeatureDeprecated('String value for array option', '1.3.0').use(self.subproject)
else:
raise mesonlib.MesonException('Value does not define an array: ' + value)
- return options.UserArrayOption(name, description, value,
- choices=choices,
- yielding=args[0],
- deprecated=args[1])
+ return options.UserStringArrayOption(
+ name, description, value,
+ choices=choices,
+ yielding=args[0],
+ deprecated=args[1])
@typed_kwargs(
'feature option',
diff --git a/mesonbuild/options.py b/mesonbuild/options.py
index 1566f940c98c..2b2f9d578725 100644
--- a/mesonbuild/options.py
+++ b/mesonbuild/options.py
@@ -1,12 +1,17 @@
# SPDX-License-Identifier: Apache-2.0
# Copyright 2013-2024 Contributors to the The Meson project
-# Copyright © 2019-2024 Intel Corporation
+# Copyright © 2019-2025 Intel Corporation
from __future__ import annotations
from collections import OrderedDict
from itertools import chain
-from functools import total_ordering
import argparse
+import copy
+import dataclasses
+import itertools
+import os
+import pathlib
+
import typing as T
from .mesonlib import (
@@ -22,13 +27,26 @@
default_sbindir,
default_sysconfdir,
MesonException,
+ MesonBugException,
listify_array_value,
MachineChoice,
)
from . import mlog
if T.TYPE_CHECKING:
- from typing_extensions import TypedDict
+ from typing_extensions import Literal, Final, TypeAlias, TypedDict
+
+ from .interpreterbase import SubProject
+
+ DeprecatedType: TypeAlias = T.Union[bool, str, T.Dict[str, str], T.List[str]]
+ AnyOptionType: TypeAlias = T.Union[
+ 'UserBooleanOption', 'UserComboOption', 'UserFeatureOption',
+ 'UserIntegerOption', 'UserStdOption', 'UserStringArrayOption',
+ 'UserStringOption', 'UserUmaskOption']
+ ElementaryOptionValues: TypeAlias = T.Union[str, int, bool, T.List[str]]
+ MutableKeyedOptionDictType: TypeAlias = T.Dict['OptionKey', AnyOptionType]
+
+ _OptionKeyTuple: TypeAlias = T.Tuple[T.Optional[str], MachineChoice, str]
class ArgparseKWs(TypedDict, total=False):
@@ -46,7 +64,6 @@ class ArgparseKWs(TypedDict, total=False):
genvslitelist = ['vs2022']
buildtypelist = ['plain', 'debug', 'debugoptimized', 'release', 'minsize', 'custom']
-
# This is copied from coredata. There is no way to share this, because this
# is used in the OptionKey constructor, and the coredata lists are
# OptionKeys...
@@ -90,7 +107,10 @@ class ArgparseKWs(TypedDict, total=False):
'vsenv',
}
-@total_ordering
+_BAD_VALUE = 'Qwert Zuiopü'
+_optionkey_cache: T.Dict[_OptionKeyTuple, OptionKey] = {}
+
+
class OptionKey:
"""Represents an option key in the various option dictionaries.
@@ -100,19 +120,42 @@ class OptionKey:
internally easier to reason about and produce.
"""
- __slots__ = ['name', 'subproject', 'machine', '_hash']
+ __slots__ = ('name', 'subproject', 'machine', '_hash')
name: str
- subproject: str
+ subproject: T.Optional[str] # None is global, empty string means top level project
machine: MachineChoice
_hash: int
- def __init__(self, name: str, subproject: str = '',
- machine: MachineChoice = MachineChoice.HOST):
- # the _type option to the constructor is kinda private. We want to be
- # able to save the state and avoid the lookup function when
- # pickling/unpickling, but we need to be able to calculate it when
- # constructing a new OptionKey
+ def __new__(cls,
+ name: str = '',
+ subproject: T.Optional[str] = None,
+ machine: MachineChoice = MachineChoice.HOST) -> OptionKey:
+ """The use of the __new__ method allows to add a transparent cache
+ to the OptionKey object creation, without breaking its API.
+ """
+ if not name:
+ return super().__new__(cls) # for unpickling, do not cache now
+
+ tuple_: _OptionKeyTuple = (subproject, machine, name)
+ try:
+ return _optionkey_cache[tuple_]
+ except KeyError:
+ instance = super().__new__(cls)
+ instance._init(name, subproject, machine)
+ _optionkey_cache[tuple_] = instance
+ return instance
+
+ def _init(self, name: str, subproject: T.Optional[str], machine: MachineChoice) -> None:
+ # We don't use the __init__ method, because it would be called after __new__
+ # while we need __new__ to initialise the object before populating the cache.
+
+ if not isinstance(machine, MachineChoice):
+ raise MesonException(f'Internal error, bad machine type: {machine}')
+ if not isinstance(name, str):
+ raise MesonBugException(f'Key name is not a string: {name}')
+ assert ':' not in name
+
object.__setattr__(self, 'name', name)
object.__setattr__(self, 'subproject', subproject)
object.__setattr__(self, 'machine', machine)
@@ -129,20 +172,14 @@ def __getstate__(self) -> T.Dict[str, T.Any]:
}
def __setstate__(self, state: T.Dict[str, T.Any]) -> None:
- """De-serialize the state of a pickle.
-
- This is very clever. __init__ is not a constructor, it's an
- initializer, therefore it's safe to call more than once. We create a
- state in the custom __getstate__ method, which is valid to pass
- splatted to the initializer.
- """
- # Mypy doesn't like this, because it's so clever.
- self.__init__(**state) # type: ignore
+ # Here, the object is created using __new__()
+ self._init(**state)
+ _optionkey_cache[self._to_tuple()] = self
def __hash__(self) -> int:
return self._hash
- def _to_tuple(self) -> T.Tuple[str, str, str, MachineChoice, str]:
+ def _to_tuple(self) -> _OptionKeyTuple:
return (self.subproject, self.machine, self.name)
def __eq__(self, other: object) -> bool:
@@ -150,16 +187,52 @@ def __eq__(self, other: object) -> bool:
return self._to_tuple() == other._to_tuple()
return NotImplemented
+ def __ne__(self, other: object) -> bool:
+ if isinstance(other, OptionKey):
+ return self._to_tuple() != other._to_tuple()
+ return NotImplemented
+
def __lt__(self, other: object) -> bool:
if isinstance(other, OptionKey):
+ if self.subproject is None:
+ return other.subproject is not None
+ elif other.subproject is None:
+ return False
return self._to_tuple() < other._to_tuple()
return NotImplemented
+ def __le__(self, other: object) -> bool:
+ if isinstance(other, OptionKey):
+ if self.subproject is None and other.subproject is not None:
+ return True
+ elif self.subproject is not None and other.subproject is None:
+ return False
+ return self._to_tuple() <= other._to_tuple()
+ return NotImplemented
+
+ def __gt__(self, other: object) -> bool:
+ if isinstance(other, OptionKey):
+ if other.subproject is None:
+ return self.subproject is not None
+ elif self.subproject is None:
+ return False
+ return self._to_tuple() > other._to_tuple()
+ return NotImplemented
+
+ def __ge__(self, other: object) -> bool:
+ if isinstance(other, OptionKey):
+ if self.subproject is None and other.subproject is not None:
+ return False
+ elif self.subproject is not None and other.subproject is None:
+ return True
+ return self._to_tuple() >= other._to_tuple()
+ return NotImplemented
+
def __str__(self) -> str:
out = self.name
if self.machine is MachineChoice.BUILD:
out = f'build.{out}'
- if self.subproject:
+ if self.subproject is not None:
out = f'{self.subproject}:{out}'
return out
@@ -173,10 +246,11 @@ def from_string(cls, raw: str) -> 'OptionKey':
This takes strings like `mysubproject:build.myoption` and Creates an
OptionKey out of them.
"""
+ assert isinstance(raw, str)
try:
subproject, raw2 = raw.split(':')
except ValueError:
- subproject, raw2 = '', raw
+ subproject, raw2 = None, raw
for_machine = MachineChoice.HOST
try:
@@ -194,7 +268,9 @@ def from_string(cls, raw: str) -> 'OptionKey':
return cls(opt, subproject, for_machine)
- def evolve(self, name: T.Optional[str] = None, subproject: T.Optional[str] = None,
+ def evolve(self,
+ name: T.Optional[str] = None,
+ subproject: T.Optional[str] = _BAD_VALUE,
machine: T.Optional[MachineChoice] = None) -> 'OptionKey':
"""Create a new copy of this key, but with altered members.
@@ -206,29 +282,22 @@ def evolve(self, name: T.Optional[str] = None, subproject: T.Optional[str] = Non
"""
# We have to be a little clever with lang here, because lang is valid
# as None, for non-compiler options
- return OptionKey(
- name if name is not None else self.name,
- subproject if subproject is not None else self.subproject,
- machine if machine is not None else self.machine,
- )
+ return OptionKey(name if name is not None else self.name,
+ subproject if subproject != _BAD_VALUE else self.subproject, # None is a valid value so it can'the default value in method declaration.
+ machine if machine is not None else self.machine)
- def as_root(self) -> 'OptionKey':
+ def as_root(self) -> OptionKey:
"""Convenience method for key.evolve(subproject='')."""
return self.evolve(subproject='')
- def as_build(self) -> 'OptionKey':
+ def as_build(self) -> OptionKey:
"""Convenience method for key.evolve(machine=MachineChoice.BUILD)."""
return self.evolve(machine=MachineChoice.BUILD)
- def as_host(self) -> 'OptionKey':
+ def as_host(self) -> OptionKey:
"""Convenience method for key.evolve(machine=MachineChoice.HOST)."""
return self.evolve(machine=MachineChoice.HOST)
- def is_project_hack_for_optionsview(self) -> bool:
- """This method will be removed once we can delete OptionsView."""
- import sys
- sys.exit('FATAL internal error. This should not make it into an actual release. File a bug.')
-
def has_module_prefix(self) -> bool:
return '.' in self.name
@@ -237,34 +306,43 @@ def get_module_prefix(self) -> T.Optional[str]:
return self.name.split('.', 1)[0]
return None
- def without_module_prefix(self) -> 'OptionKey':
- if self.has_module_prefix():
- newname = self.name.split('.', 1)[1]
- return self.evolve(newname)
- return self
+ def is_for_build(self) -> bool:
+ return self.machine is MachineChoice.BUILD
+if T.TYPE_CHECKING:
+ OptionStringLikeDict: TypeAlias = T.Dict[T.Union[OptionKey, str], str]
+@dataclasses.dataclass
class UserOption(T.Generic[_T], HoldableObject):
- def __init__(self, name: str, description: str, choices: T.Optional[T.Union[str, T.List[_T]]],
- yielding: bool,
- deprecated: T.Union[bool, str, T.Dict[str, str], T.List[str]] = False):
- super().__init__()
- self.name = name
- self.choices = choices
- self.description = description
- if not isinstance(yielding, bool):
- raise MesonException('Value of "yielding" must be a boolean.')
- self.yielding = yielding
- self.deprecated = deprecated
- self.readonly = False
-
- def listify(self, value: T.Any) -> T.List[T.Any]:
+
+ name: str
+ description: str
+ value_: dataclasses.InitVar[_T]
+ yielding: bool = DEFAULT_YIELDING
+ deprecated: DeprecatedType = False
+ readonly: bool = dataclasses.field(default=False)
+
+ def __post_init__(self, value_: _T) -> None:
+ self.value = self.validate_value(value_)
+ # Final isn't technically allowed in a __post_init__ method
+ self.default: Final[_T] = self.value # type: ignore[misc]
+
+ def listify(self, value: ElementaryOptionValues) -> T.List[str]:
+ if isinstance(value, list):
+ return value
+ if isinstance(value, bool):
+ return ['true'] if value else ['false']
+ if isinstance(value, int):
+ return [str(value)]
return [value]
- def printable_value(self) -> T.Union[str, int, bool, T.List[T.Union[str, int, bool]]]:
+ def printable_value(self) -> ElementaryOptionValues:
assert isinstance(self.value, (str, int, bool, list))
return self.value
+ def printable_choices(self) -> T.Optional[T.List[str]]:
+ return None
+
# Check that the input is a valid value and return the
# "cleaned" or "native" version. For example the Boolean
# option could take the string "true" and return True.
@@ -272,29 +350,32 @@ def validate_value(self, value: T.Any) -> _T:
raise RuntimeError('Derived option class did not override validate_value.')
def set_value(self, newvalue: T.Any) -> bool:
- oldvalue = getattr(self, 'value', None)
+ oldvalue = self.value
self.value = self.validate_value(newvalue)
return self.value != oldvalue
-_U = T.TypeVar('_U', bound=UserOption[_T])
+@dataclasses.dataclass
+class EnumeratedUserOption(UserOption[_T]):
+
+ """A generic UserOption that has enumerated values."""
+
+ choices: T.List[_T] = dataclasses.field(default_factory=list)
+
+ def printable_choices(self) -> T.Optional[T.List[str]]:
+ return [str(c) for c in self.choices]
class UserStringOption(UserOption[str]):
- def __init__(self, name: str, description: str, value: T.Any, yielding: bool = DEFAULT_YIELDING,
- deprecated: T.Union[bool, str, T.Dict[str, str], T.List[str]] = False):
- super().__init__(name, description, None, yielding, deprecated)
- self.set_value(value)
def validate_value(self, value: T.Any) -> str:
if not isinstance(value, str):
raise MesonException(f'The value of option "{self.name}" is "{value}", which is not a string.')
return value
-class UserBooleanOption(UserOption[bool]):
- def __init__(self, name: str, description: str, value: bool, yielding: bool = DEFAULT_YIELDING,
- deprecated: T.Union[bool, str, T.Dict[str, str], T.List[str]] = False):
- super().__init__(name, description, [True, False], yielding, deprecated)
- self.set_value(value)
+@dataclasses.dataclass
+class UserBooleanOption(EnumeratedUserOption[bool]):
+
+ choices: T.List[bool] = dataclasses.field(default_factory=lambda: [True, False])
def __bool__(self) -> bool:
return self.value
@@ -310,31 +391,44 @@ def validate_value(self, value: T.Any) -> bool:
return False
raise MesonException(f'Option "{self.name}" value {value} is not boolean (true or false).')
-class UserIntegerOption(UserOption[int]):
- def __init__(self, name: str, description: str, value: T.Any, yielding: bool = DEFAULT_YIELDING,
- deprecated: T.Union[bool, str, T.Dict[str, str], T.List[str]] = False):
- min_value, max_value, default_value = value
- self.min_value = min_value
- self.max_value = max_value
- c: T.List[str] = []
- if min_value is not None:
- c.append('>=' + str(min_value))
- if max_value is not None:
- c.append('<=' + str(max_value))
- choices = ', '.join(c)
- super().__init__(name, description, choices, yielding, deprecated)
- self.set_value(default_value)
-
- def validate_value(self, value: T.Any) -> int:
+
+class _UserIntegerBase(UserOption[_T]):
+
+ min_value: T.Optional[int]
+ max_value: T.Optional[int]
+
+ if T.TYPE_CHECKING:
+ def toint(self, v: str) -> int: ...
+
+ def __post_init__(self, value_: _T) -> None:
+ super().__post_init__(value_)
+ choices: T.List[str] = []
+ if self.min_value is not None:
+ choices.append(f'>= {self.min_value!s}')
+ if self.max_value is not None:
+ choices.append(f'<= {self.max_value!s}')
+ self.__choices: str = ', '.join(choices)
+
+ def printable_choices(self) -> T.Optional[T.List[str]]:
+ return [self.__choices]
+
+ def validate_value(self, value: T.Any) -> _T:
if isinstance(value, str):
- value = self.toint(value)
+ value = T.cast('_T', self.toint(value))
if not isinstance(value, int):
raise MesonException(f'Value {value!r} for option "{self.name}" is not an integer.')
if self.min_value is not None and value < self.min_value:
raise MesonException(f'Value {value} for option "{self.name}" is less than minimum value {self.min_value}.')
if self.max_value is not None and value > self.max_value:
raise MesonException(f'Value {value} for option "{self.name}" is more than maximum value {self.max_value}.')
- return value
+ return T.cast('_T', value)
+
+
+@dataclasses.dataclass
+class UserIntegerOption(_UserIntegerBase[int]):
+
+ min_value: T.Optional[int] = None
+ max_value: T.Optional[int] = None
def toint(self, valuestring: str) -> int:
try:
@@ -342,6 +436,7 @@ def toint(self, valuestring: str) -> int:
except ValueError:
raise MesonException(f'Value string "{valuestring}" for option "{self.name}" is not convertible to an integer.')
+
class OctalInt(int):
# NinjaBackend.get_user_option_args uses str() to converts it to a command line option
# UserUmaskOption.toint() uses int(str, 8) to convert it to an integer
@@ -349,39 +444,32 @@ class OctalInt(int):
def __str__(self) -> str:
return oct(int(self))
-class UserUmaskOption(UserIntegerOption, UserOption[T.Union[str, OctalInt]]):
- def __init__(self, name: str, description: str, value: T.Any, yielding: bool = DEFAULT_YIELDING,
- deprecated: T.Union[bool, str, T.Dict[str, str], T.List[str]] = False):
- super().__init__(name, description, (0, 0o777, value), yielding, deprecated)
- self.choices = ['preserve', '0000-0777']
+
+@dataclasses.dataclass
+class UserUmaskOption(_UserIntegerBase[T.Union["Literal['preserve']", OctalInt]]):
+
+ min_value: T.Optional[int] = dataclasses.field(default=0, init=False)
+ max_value: T.Optional[int] = dataclasses.field(default=0o777, init=False)
def printable_value(self) -> str:
- if self.value == 'preserve':
- return self.value
- return format(self.value, '04o')
+ if isinstance(self.value, int):
+ return format(self.value, '04o')
+ return self.value
- def validate_value(self, value: T.Any) -> T.Union[str, OctalInt]:
+ def validate_value(self, value: T.Any) -> T.Union[Literal['preserve'], OctalInt]:
if value == 'preserve':
return 'preserve'
return OctalInt(super().validate_value(value))
- def toint(self, valuestring: T.Union[str, OctalInt]) -> int:
+ def toint(self, valuestring: str) -> int:
try:
return int(valuestring, 8)
except ValueError as e:
raise MesonException(f'Invalid mode for option "{self.name}" {e}')
-class UserComboOption(UserOption[str]):
- def __init__(self, name: str, description: str, choices: T.List[str], value: T.Any,
- yielding: bool = DEFAULT_YIELDING,
- deprecated: T.Union[bool, str, T.Dict[str, str], T.List[str]] = False):
- super().__init__(name, description, choices, yielding, deprecated)
- if not isinstance(self.choices, list):
- raise MesonException(f'Combo choices for option "{self.name}" must be an array.')
- for i in self.choices:
- if not isinstance(i, str):
- raise MesonException(f'Combo choice elements for option "{self.name}" must be strings.')
- self.set_value(value)
+
+@dataclasses.dataclass
+class UserComboOption(EnumeratedUserOption[str]):
def validate_value(self, value: T.Any) -> str:
if value not in self.choices:
@@ -395,20 +483,33 @@ def validate_value(self, value: T.Any) -> str:
raise MesonException('Value "{}" (of type "{}") for option "{}" is not one of the choices.'
' Possible choices are (as string): {}.'.format(
value, _type, self.name, optionsstring))
+
+ assert isinstance(value, str), 'for mypy'
return value
-class UserArrayOption(UserOption[T.List[str]]):
- def __init__(self, name: str, description: str, value: T.Union[str, T.List[str]],
- split_args: bool = False,
- allow_dups: bool = False, yielding: bool = DEFAULT_YIELDING,
- choices: T.Optional[T.List[str]] = None,
- deprecated: T.Union[bool, str, T.Dict[str, str], T.List[str]] = False):
- super().__init__(name, description, choices if choices is not None else [], yielding, deprecated)
- self.split_args = split_args
- self.allow_dups = allow_dups
- self.set_value(value)
-
- def listify(self, value: T.Any) -> T.List[T.Any]:
+@dataclasses.dataclass
+class UserArrayOption(UserOption[T.List[_T]]):
+
+ value_: dataclasses.InitVar[T.Union[_T, T.List[_T]]]
+ choices: T.Optional[T.List[_T]] = None
+ split_args: bool = False
+ allow_dups: bool = False
+
+ def extend_value(self, value: T.Union[str, T.List[str]]) -> None:
+ """Extend the value with an additional value."""
+ new = self.validate_value(value)
+ self.set_value(self.value + new)
+
+ def printable_choices(self) -> T.Optional[T.List[str]]:
+ if self.choices is None:
+ return None
+ return [str(c) for c in self.choices]
+
+
+@dataclasses.dataclass
+class UserStringArrayOption(UserArrayOption[str]):
+
+ def listify(self, value: ElementaryOptionValues) -> T.List[str]:
try:
return listify_array_value(value, self.split_args)
except MesonException as e:
@@ -436,19 +537,13 @@ def validate_value(self, value: T.Union[str, T.List[str]]) -> T.List[str]:
)
return newvalue
- def extend_value(self, value: T.Union[str, T.List[str]]) -> None:
- """Extend the value with an additional value."""
- new = self.validate_value(value)
- self.set_value(self.value + new)
-
+@dataclasses.dataclass
class UserFeatureOption(UserComboOption):
- static_choices = ['enabled', 'disabled', 'auto']
- def __init__(self, name: str, description: str, value: T.Any, yielding: bool = DEFAULT_YIELDING,
- deprecated: T.Union[bool, str, T.Dict[str, str], T.List[str]] = False):
- super().__init__(name, description, self.static_choices, value, yielding, deprecated)
- self.name: T.Optional[str] = None # TODO: Refactor options to all store their name
+ choices: T.List[str] = dataclasses.field(
+ # Ensure we get a copy with the lambda
+ default_factory=lambda: ['enabled', 'disabled', 'auto'], init=False)
def is_enabled(self) -> bool:
return self.value == 'enabled'
@@ -459,6 +554,32 @@ def is_disabled(self) -> bool:
def is_auto(self) -> bool:
return self.value == 'auto'
+
+_U = T.TypeVar('_U', bound=UserOption)
+
+
+def choices_are_different(a: _U, b: _U) -> bool:
+ """Are the choices between two options the same?
+
+ :param a: A UserOption[T]
+ :param b: A second UserOption[T]
+ :return: True if the choices have changed, otherwise False
+ """
+ if isinstance(a, EnumeratedUserOption):
+ # We expect `a` and `b` to be of the same type, but can't really annotate it that way.
+ assert isinstance(b, EnumeratedUserOption), 'for mypy'
+ return a.choices != b.choices
+ elif isinstance(a, UserArrayOption):
+ # We expect `a` and `b` to be of the same type, but can't really annotate it that way.
+ assert isinstance(b, UserArrayOption), 'for mypy'
+ return a.choices != b.choices
+ elif isinstance(a, _UserIntegerBase):
+ assert isinstance(b, _UserIntegerBase), 'for mypy'
+ return a.max_value != b.max_value or a.min_value != b.min_value
+
+ return False
+
+
class UserStdOption(UserComboOption):
'''
UserOption specific to c_std and cpp_std options. User can set a list of
@@ -479,7 +600,7 @@ def __init__(self, lang: str, all_stds: T.List[str]) -> None:
# Map a deprecated std to its replacement. e.g. gnu11 -> c11.
self.deprecated_stds: T.Dict[str, str] = {}
opt_name = 'cpp_std' if lang == 'c++' else f'{lang}_std'
- super().__init__(opt_name, f'{lang} language standard to use', ['none'], 'none')
+ super().__init__(opt_name, f'{lang} language standard to use', 'none', choices=['none'])
def set_versions(self, versions: T.List[str], gnu: bool = False, gnu_deprecated: bool = False) -> None:
assert all(std in self.all_stds for std in versions)
@@ -512,165 +633,125 @@ def validate_value(self, value: T.Union[str, T.List[str]]) -> str:
f'However, the deprecated {std} std currently falls back to {newstd}.\n' +
'This will be an error in meson 2.0.\n' +
'If the project supports both GNU and MSVC compilers, a value such as\n' +
- '"c_std=gnu11,c11" specifies that GNU is preferred but it can safely fallback to plain c11.')
+ '"c_std=gnu11,c11" specifies that GNU is preferred but it can safely fallback to plain c11.', once=True)
return newstd
raise MesonException(f'None of values {candidates} are supported by the {self.lang.upper()} compiler. ' +
f'Possible values for option "{self.name}" are {self.choices}')
-class BuiltinOption(T.Generic[_T, _U]):
+def argparse_name_to_arg(name: str) -> str:
+ if name == 'warning_level':
+ return '--warnlevel'
+ return '--' + name.replace('_', '-')
- """Class for a builtin option type.
- There are some cases that are not fully supported yet.
- """
+def argparse_prefixed_default(opt: AnyOptionType, name: OptionKey, prefix: str = '') -> ElementaryOptionValues:
+ if isinstance(opt, (UserComboOption, UserIntegerOption, UserUmaskOption)):
+ return T.cast('ElementaryOptionValues', opt.default)
+ try:
+ return BUILTIN_DIR_NOPREFIX_OPTIONS[name][prefix]
+ except KeyError:
+ return T.cast('ElementaryOptionValues', opt.default)
- def __init__(self, opt_type: T.Type[_U], description: str, default: T.Any, yielding: bool = True, *,
- choices: T.Any = None, readonly: bool = False):
- self.opt_type = opt_type
- self.description = description
- self.default = default
- self.choices = choices
- self.yielding = yielding
- self.readonly = readonly
-
- def init_option(self, name: 'OptionKey', value: T.Optional[T.Any], prefix: str) -> _U:
- """Create an instance of opt_type and return it."""
- if value is None:
- value = self.prefixed_default(name, prefix)
- keywords = {'yielding': self.yielding, 'value': value}
- if self.choices:
- keywords['choices'] = self.choices
- o = self.opt_type(name.name, self.description, **keywords)
- o.readonly = self.readonly
- return o
-
- def _argparse_action(self) -> T.Optional[str]:
- # If the type is a boolean, the presence of the argument in --foo form
- # is to enable it. Disabling happens by using -Dfoo=false, which is
- # parsed under `args.projectoptions` and does not hit this codepath.
- if isinstance(self.default, bool):
- return 'store_true'
- return None
-
- def _argparse_choices(self) -> T.Any:
- if self.opt_type is UserBooleanOption:
- return [True, False]
- elif self.opt_type is UserFeatureOption:
- return UserFeatureOption.static_choices
- return self.choices
-
- @staticmethod
- def argparse_name_to_arg(name: str) -> str:
- if name == 'warning_level':
- return '--warnlevel'
- else:
- return '--' + name.replace('_', '-')
-
- def prefixed_default(self, name: 'OptionKey', prefix: str = '') -> T.Any:
- if self.opt_type in [UserComboOption, UserIntegerOption]:
- return self.default
- try:
- return BUILTIN_DIR_NOPREFIX_OPTIONS[name][prefix]
- except KeyError:
- pass
- return self.default
- def add_to_argparse(self, name: str, parser: argparse.ArgumentParser, help_suffix: str) -> None:
- kwargs: ArgparseKWs = {}
+def option_to_argparse(option: AnyOptionType, name: OptionKey, parser: argparse.ArgumentParser, help_suffix: str) -> None:
+ kwargs: ArgparseKWs = {}
- c = self._argparse_choices()
- b = self._argparse_action()
- h = self.description
- if not b:
- h = '{} (default: {}).'.format(h.rstrip('.'), self.prefixed_default(name))
- else:
- kwargs['action'] = b
- if c and not b:
- kwargs['choices'] = c
- kwargs['default'] = argparse.SUPPRESS
- kwargs['dest'] = name
+ if isinstance(option, (EnumeratedUserOption, UserArrayOption)):
+ c = option.choices
+ else:
+ c = None
+ b = 'store_true' if isinstance(option.default, bool) else None
+ h = option.description
+ if not b:
+ h = '{} (default: {}).'.format(h.rstrip('.'), argparse_prefixed_default(option, name))
+ else:
+ kwargs['action'] = b
+ if c and not b:
+ kwargs['choices'] = c
+ kwargs['default'] = argparse.SUPPRESS
+ kwargs['dest'] = str(name)
- cmdline_name = self.argparse_name_to_arg(name)
- parser.add_argument(cmdline_name, help=h + help_suffix, **kwargs)
+ cmdline_name = argparse_name_to_arg(str(name))
+ parser.add_argument(cmdline_name, help=h + help_suffix, **kwargs)
# Update `docs/markdown/Builtin-options.md` after changing the options below
# Also update mesonlib._BUILTIN_NAMES. See the comment there for why this is required.
# Please also update completion scripts in $MESONSRC/data/shell-completions/
-BUILTIN_DIR_OPTIONS: T.Dict['OptionKey', 'BuiltinOption'] = OrderedDict([
- (OptionKey('prefix'), BuiltinOption(UserStringOption, 'Installation prefix', default_prefix())),
- (OptionKey('bindir'), BuiltinOption(UserStringOption, 'Executable directory', 'bin')),
- (OptionKey('datadir'), BuiltinOption(UserStringOption, 'Data file directory', default_datadir())),
- (OptionKey('includedir'), BuiltinOption(UserStringOption, 'Header file directory', default_includedir())),
- (OptionKey('infodir'), BuiltinOption(UserStringOption, 'Info page directory', default_infodir())),
- (OptionKey('libdir'), BuiltinOption(UserStringOption, 'Library directory', default_libdir())),
- (OptionKey('licensedir'), BuiltinOption(UserStringOption, 'Licenses directory', '')),
- (OptionKey('libexecdir'), BuiltinOption(UserStringOption, 'Library executable directory', default_libexecdir())),
- (OptionKey('localedir'), BuiltinOption(UserStringOption, 'Locale data directory', default_localedir())),
- (OptionKey('localstatedir'), BuiltinOption(UserStringOption, 'Localstate data directory', 'var')),
- (OptionKey('mandir'), BuiltinOption(UserStringOption, 'Manual page directory', default_mandir())),
- (OptionKey('sbindir'), BuiltinOption(UserStringOption, 'System executable directory', default_sbindir())),
- (OptionKey('sharedstatedir'), BuiltinOption(UserStringOption, 'Architecture-independent data directory', 'com')),
- (OptionKey('sysconfdir'), BuiltinOption(UserStringOption, 'Sysconf data directory', default_sysconfdir())),
-])
-
-BUILTIN_CORE_OPTIONS: T.Dict['OptionKey', 'BuiltinOption'] = OrderedDict([
- (OptionKey('auto_features'), BuiltinOption(UserFeatureOption, "Override value of all 'auto' features", 'auto')),
- (OptionKey('backend'), BuiltinOption(UserComboOption, 'Backend to use', 'ninja', choices=backendlist,
- readonly=True)),
- (OptionKey('genvslite'),
- BuiltinOption(
- UserComboOption,
- 'Setup multiple buildtype-suffixed ninja-backend build directories, '
- 'and a [builddir]_vs containing a Visual Studio meta-backend with multiple configurations that calls into them',
- 'vs2022',
- choices=genvslitelist)
- ),
- (OptionKey('buildtype'), BuiltinOption(UserComboOption, 'Build type to use', 'debug',
- choices=buildtypelist)),
- (OptionKey('debug'), BuiltinOption(UserBooleanOption, 'Enable debug symbols and other information', True)),
- (OptionKey('default_library'), BuiltinOption(UserComboOption, 'Default library type', 'shared', choices=['shared', 'static', 'both'],
- yielding=False)),
- (OptionKey('default_both_libraries'), BuiltinOption(UserComboOption, 'Default library type for both_libraries', 'shared', choices=['shared', 'static', 'auto'])),
- (OptionKey('errorlogs'), BuiltinOption(UserBooleanOption, "Whether to print the logs from failing tests", True)),
- (OptionKey('install_umask'), BuiltinOption(UserUmaskOption, 'Default umask to apply on permissions of installed files', '022')),
- (OptionKey('layout'), BuiltinOption(UserComboOption, 'Build directory layout', 'mirror', choices=['mirror', 'flat'])),
- (OptionKey('optimization'), BuiltinOption(UserComboOption, 'Optimization level', '0', choices=['plain', '0', 'g', '1', '2', '3', 's'])),
- (OptionKey('prefer_static'), BuiltinOption(UserBooleanOption, 'Whether to try static linking before shared linking', False)),
- (OptionKey('stdsplit'), BuiltinOption(UserBooleanOption, 'Split stdout and stderr in test logs', True)),
- (OptionKey('strip'), BuiltinOption(UserBooleanOption, 'Strip targets on install', False)),
- (OptionKey('unity'), BuiltinOption(UserComboOption, 'Unity build', 'off', choices=['on', 'off', 'subprojects'])),
- (OptionKey('unity_size'), BuiltinOption(UserIntegerOption, 'Unity block size', (2, None, 4))),
- (OptionKey('warning_level'), BuiltinOption(UserComboOption, 'Compiler warning level to use', '1', choices=['0', '1', '2', '3', 'everything'], yielding=False)),
- (OptionKey('werror'), BuiltinOption(UserBooleanOption, 'Treat warnings as errors', False, yielding=False)),
- (OptionKey('wrap_mode'), BuiltinOption(UserComboOption, 'Wrap mode', 'default', choices=['default', 'nofallback', 'nodownload', 'forcefallback', 'nopromote'])),
- (OptionKey('force_fallback_for'), BuiltinOption(UserArrayOption, 'Force fallback for those subprojects', [])),
- (OptionKey('vsenv'), BuiltinOption(UserBooleanOption, 'Activate Visual Studio environment', False, readonly=True)),
-
- # Pkgconfig module
- (OptionKey('pkgconfig.relocatable'),
- BuiltinOption(UserBooleanOption, 'Generate pkgconfig files as relocatable', False)),
-
- # Python module
- (OptionKey('python.bytecompile'),
- BuiltinOption(UserIntegerOption, 'Whether to compile bytecode', (-1, 2, 0))),
- (OptionKey('python.install_env'),
- BuiltinOption(UserComboOption, 'Which python environment to install to', 'prefix', choices=['auto', 'prefix', 'system', 'venv'])),
- (OptionKey('python.platlibdir'),
- BuiltinOption(UserStringOption, 'Directory for site-specific, platform-specific files.', '')),
- (OptionKey('python.purelibdir'),
- BuiltinOption(UserStringOption, 'Directory for site-specific, non-platform-specific files.', '')),
- (OptionKey('python.allow_limited_api'),
- BuiltinOption(UserBooleanOption, 'Whether to allow use of the Python Limited API', True)),
-])
+BUILTIN_DIR_OPTIONS: T.Mapping[OptionKey, AnyOptionType] = {
+ OptionKey(o.name): o for o in [
+ UserStringOption('prefix', 'Installation prefix', default_prefix()),
+ UserStringOption('bindir', 'Executable directory', 'bin'),
+ UserStringOption('datadir', 'Data file directory', default_datadir()),
+ UserStringOption('includedir', 'Header file directory', default_includedir()),
+ UserStringOption('infodir', 'Info page directory', default_infodir()),
+ UserStringOption('libdir', 'Library directory', default_libdir()),
+ UserStringOption('licensedir', 'Licenses directory', ''),
+ UserStringOption('libexecdir', 'Library executable directory', default_libexecdir()),
+ UserStringOption('localedir', 'Locale data directory', default_localedir()),
+ UserStringOption('localstatedir', 'Localstate data directory', 'var'),
+ UserStringOption('mandir', 'Manual page directory', default_mandir()),
+ UserStringOption('sbindir', 'System executable directory', default_sbindir()),
+ UserStringOption('sharedstatedir', 'Architecture-independent data directory', 'com'),
+ UserStringOption('sysconfdir', 'Sysconf data directory', default_sysconfdir()),
+ ]
+}
+
+BUILTIN_CORE_OPTIONS: T.Mapping[OptionKey, AnyOptionType] = {
+ OptionKey(o.name): o for o in T.cast('T.List[AnyOptionType]', [
+ UserFeatureOption('auto_features', "Override value of all 'auto' features", 'auto'),
+ UserComboOption('backend', 'Backend to use', 'ninja', choices=backendlist, readonly=True),
+ UserComboOption(
+ 'genvslite',
+ 'Setup multiple buildtype-suffixed ninja-backend build directories, '
+ 'and a [builddir]_vs containing a Visual Studio meta-backend with multiple configurations that calls into them',
+ 'vs2022',
+ choices=genvslitelist
+ ),
+ UserComboOption('buildtype', 'Build type to use', 'debug', choices=buildtypelist),
+ UserBooleanOption('debug', 'Enable debug symbols and other information', True),
+ UserComboOption('default_library', 'Default library type', 'shared', choices=['shared', 'static', 'both'],
+ yielding=False),
+ UserComboOption('default_both_libraries', 'Default library type for both_libraries', 'shared',
+ choices=['shared', 'static', 'auto']),
+ UserBooleanOption('errorlogs', "Whether to print the logs from failing tests", True),
+ UserUmaskOption('install_umask', 'Default umask to apply on permissions of installed files', OctalInt(0o022)),
+ UserComboOption('layout', 'Build directory layout', 'mirror', choices=['mirror', 'flat']),
+ UserComboOption('optimization', 'Optimization level', '0', choices=['plain', '0', 'g', '1', '2', '3', 's']),
+ UserBooleanOption('prefer_static', 'Whether to try static linking before shared linking', False),
+ UserBooleanOption('stdsplit', 'Split stdout and stderr in test logs', True),
+ UserBooleanOption('strip', 'Strip targets on install', False),
+ UserComboOption('unity', 'Unity build', 'off', choices=['on', 'off', 'subprojects']),
+ UserIntegerOption('unity_size', 'Unity block size', 4, min_value=2),
+ UserComboOption('warning_level', 'Compiler warning level to use', '1', choices=['0', '1', '2', '3', 'everything'],
+ yielding=False),
+ UserBooleanOption('werror', 'Treat warnings as errors', False, yielding=False),
+ UserComboOption('wrap_mode', 'Wrap mode', 'default', choices=['default', 'nofallback', 'nodownload', 'forcefallback', 'nopromote']),
+ UserStringArrayOption('force_fallback_for', 'Force fallback for those subprojects', []),
+ UserBooleanOption('vsenv', 'Activate Visual Studio environment', False, readonly=True),
+
+ # Pkgconfig module
+ UserBooleanOption('pkgconfig.relocatable', 'Generate pkgconfig files as relocatable', False),
+
+ # Python module
+ UserIntegerOption('python.bytecompile', 'Whether to compile bytecode', 0, min_value=-1, max_value=2),
+ UserComboOption('python.install_env', 'Which python environment to install to', 'prefix',
+ choices=['auto', 'prefix', 'system', 'venv']),
+ UserStringOption('python.platlibdir', 'Directory for site-specific, platform-specific files.', ''),
+ UserStringOption('python.purelibdir', 'Directory for site-specific, non-platform-specific files.', ''),
+ UserBooleanOption('python.allow_limited_api', 'Whether to allow use of the Python Limited API', True),
+ ])
+}
BUILTIN_OPTIONS = OrderedDict(chain(BUILTIN_DIR_OPTIONS.items(), BUILTIN_CORE_OPTIONS.items()))
-BUILTIN_OPTIONS_PER_MACHINE: T.Dict['OptionKey', 'BuiltinOption'] = OrderedDict([
- (OptionKey('pkg_config_path'), BuiltinOption(UserArrayOption, 'List of additional paths for pkg-config to search', [])),
- (OptionKey('cmake_prefix_path'), BuiltinOption(UserArrayOption, 'List of additional prefixes for cmake to search', [])),
-])
+BUILTIN_OPTIONS_PER_MACHINE: T.Mapping[OptionKey, AnyOptionType] = {
+ OptionKey(o.name): o for o in [
+ UserStringArrayOption('pkg_config_path', 'List of additional paths for pkg-config to search', []),
+ UserStringArrayOption('cmake_prefix_path', 'List of additional prefixes for cmake to search', []),
+ ]
+}
# Special prefix-dependent defaults for installation directories that reside in
# a path outside of the prefix in FHS and common usage.
@@ -682,52 +763,183 @@ def add_to_argparse(self, name: str, parser: argparse.ArgumentParser, help_suffi
OptionKey('python.purelibdir'): {},
}
+MSCRT_VALS = ['none', 'md', 'mdd', 'mt', 'mtd']
+
+COMPILER_BASE_OPTIONS: T.Mapping[OptionKey, AnyOptionType] = {
+ OptionKey(o.name): o for o in T.cast('T.List[AnyOptionType]', [
+ UserBooleanOption('b_pch', 'Use precompiled headers', True),
+ UserBooleanOption('b_lto', 'Use link time optimization', False),
+ UserIntegerOption('b_lto_threads', 'Use multiple threads for Link Time Optimization', 0),
+ UserComboOption('b_lto_mode', 'Select between different LTO modes.', 'default', choices=['default', 'thin']),
+ UserBooleanOption('b_thinlto_cache', 'Use LLVM ThinLTO caching for faster incremental builds', False),
+ UserStringOption('b_thinlto_cache_dir', 'Directory to store ThinLTO cache objects', ''),
+ UserStringArrayOption('b_sanitize', 'Code sanitizer to use', []),
+ UserBooleanOption('b_lundef', 'Use -Wl,--no-undefined when linking', True),
+ UserBooleanOption('b_asneeded', 'Use -Wl,--as-needed when linking', True),
+ UserComboOption(
+ 'b_pgo', 'Use profile guided optimization', 'off', choices=['off', 'generate', 'use']),
+ UserBooleanOption('b_coverage', 'Enable coverage tracking.', False),
+ UserComboOption(
+ 'b_colorout', 'Use colored output', 'always', choices=['auto', 'always', 'never']),
+ UserComboOption(
+ 'b_ndebug', 'Disable asserts', 'false', choices=['true', 'false', 'if-release']),
+ UserBooleanOption('b_staticpic', 'Build static libraries as position independent', True),
+ UserBooleanOption('b_pie', 'Build executables as position independent', False),
+ UserBooleanOption('b_bitcode', 'Generate and embed bitcode (only macOS/iOS/tvOS)', False),
+ UserComboOption(
+ 'b_vscrt', 'VS run-time library type to use.', 'from_buildtype',
+ choices=MSCRT_VALS + ['from_buildtype', 'static_from_buildtype']),
+ ])
+}
+
class OptionStore:
- def __init__(self) -> None:
- self.d: T.Dict['OptionKey', 'UserOption[T.Any]'] = {}
+ DEFAULT_DEPENDENTS = {'plain': ('plain', False),
+ 'debug': ('0', True),
+ 'debugoptimized': ('2', True),
+ 'release': ('3', False),
+ 'minsize': ('s', True),
+ }
+
+ def __init__(self, is_cross: bool) -> None:
+ self.options: T.Dict['OptionKey', 'AnyOptionType'] = {}
self.project_options: T.Set[OptionKey] = set()
self.module_options: T.Set[OptionKey] = set()
from .compilers import all_languages
self.all_languages = set(all_languages)
+ self.project_options = set()
+ self.augments: T.Dict[str, str] = {}
+ self.is_cross = is_cross
- def __len__(self) -> int:
- return len(self.d)
+ # Pending options are options that need to be initialized later, either
+ # configuration dependent options like compiler options, or options for
+ # a different subproject
+ self.pending_options: T.Dict[OptionKey, ElementaryOptionValues] = {}
+
+ def clear_pending(self) -> None:
+ self.pending_options = {}
- def ensure_key(self, key: T.Union[OptionKey, str]) -> OptionKey:
+ def ensure_and_validate_key(self, key: T.Union[OptionKey, str]) -> OptionKey:
if isinstance(key, str):
return OptionKey(key)
+ # FIXME. When not cross building all "build" options need to fall back
+ # to "host" options due to how the old code worked.
+ #
+ # This is NOT how it should be.
+ #
+ # This needs to be changed to that trying to add or access "build" keys
+ # is a hard error and fix issues that arise.
+ #
+ # I did not do this yet, because it would make this MR even
+ # more massive than it already is. Later then.
+ if not self.is_cross and key.machine == MachineChoice.BUILD:
+ key = key.as_host()
return key
- def get_value_object(self, key: T.Union[OptionKey, str]) -> 'UserOption[T.Any]':
- return self.d[self.ensure_key(key)]
+ def get_pending_value(self, key: T.Union[OptionKey, str], default: T.Optional[ElementaryOptionValues] = None) -> ElementaryOptionValues:
+ key = self.ensure_and_validate_key(key)
+ if key in self.options:
+ return self.options[key].value
+ return self.pending_options.get(key, default)
- def get_value(self, key: T.Union[OptionKey, str]) -> 'T.Any':
+ def get_value(self, key: T.Union[OptionKey, str]) -> ElementaryOptionValues:
return self.get_value_object(key).value
- def add_system_option(self, key: T.Union[OptionKey, str], valobj: 'UserOption[T.Any]') -> None:
- key = self.ensure_key(key)
+ def __len__(self) -> int:
+ return len(self.options)
+
+ def get_value_object_for(self, key: 'T.Union[OptionKey, str]') -> AnyOptionType:
+ key = self.ensure_and_validate_key(key)
+ potential = self.options.get(key, None)
+ if self.is_project_option(key):
+ assert key.subproject is not None
+ if potential is not None and potential.yielding:
+ parent_key = key.as_root()
+ try:
+ parent_option = self.options[parent_key]
+ except KeyError:
+ # Subproject is set to yield, but top level
+ # project does not have an option of the same
+ # name. Return the subproject option.
+ return potential
+ # If parent object has different type, do not yield.
+ # This should probably be an error.
+ if type(parent_option) is type(potential):
+ return parent_option
+ return potential
+ if potential is None:
+ raise KeyError(f'Tried to access nonexistant project option {key}.')
+ return potential
+ else:
+ if potential is None:
+ parent_key = OptionKey(key.name, subproject=None, machine=key.machine)
+ if parent_key not in self.options:
+ raise KeyError(f'Tried to access nonexistant project parent option {parent_key}.')
+ return self.options[parent_key]
+ return potential
+
+ def get_value_object_and_value_for(self, key: OptionKey) -> T.Tuple[AnyOptionType, ElementaryOptionValues]:
+ assert isinstance(key, OptionKey)
+ vobject = self.get_value_object_for(key)
+ computed_value = vobject.value
+ if key.subproject is not None:
+ keystr = str(key)
+ if keystr in self.augments:
+ computed_value = vobject.validate_value(self.augments[keystr])
+ return (vobject, computed_value)
+
+ def get_value_for(self, name: 'T.Union[OptionKey, str]', subproject: T.Optional[str] = None) -> ElementaryOptionValues:
+ if isinstance(name, str):
+ key = OptionKey(name, subproject)
+ else:
+ assert subproject is None
+ key = name
+ vobject, resolved_value = self.get_value_object_and_value_for(key)
+ return resolved_value
+
+ def add_system_option(self, key: T.Union[OptionKey, str], valobj: AnyOptionType) -> None:
+ key = self.ensure_and_validate_key(key)
if '.' in key.name:
raise MesonException(f'Internal error: non-module option has a period in its name {key.name}.')
self.add_system_option_internal(key, valobj)
- def add_system_option_internal(self, key: T.Union[OptionKey, str], valobj: 'UserOption[T.Any]') -> None:
- key = self.ensure_key(key)
+ def add_system_option_internal(self, key: OptionKey, valobj: AnyOptionType) -> None:
assert isinstance(valobj, UserOption)
- self.d[key] = valobj
+ if not isinstance(valobj.name, str):
+ assert isinstance(valobj.name, str)
+ if key in self.options:
+ return
+
+ pval = self.pending_options.pop(key, None)
+ if key.subproject:
+ proj_key = key.evolve(subproject=None)
+ self.add_system_option_internal(proj_key, valobj)
+ if pval is not None:
+ self.augments[key] = pval
+ else:
+ self.options[key] = valobj
+ if pval is not None:
+ self.set_option(key, pval)
- def add_compiler_option(self, language: str, key: T.Union[OptionKey, str], valobj: 'UserOption[T.Any]') -> None:
- key = self.ensure_key(key)
+ def add_compiler_option(self, language: str, key: T.Union[OptionKey, str], valobj: AnyOptionType) -> None:
+ key = self.ensure_and_validate_key(key)
if not key.name.startswith(language + '_'):
raise MesonException(f'Internal error: all compiler option names must start with language prefix. ({key.name} vs {language}_)')
self.add_system_option(key, valobj)
- def add_project_option(self, key: T.Union[OptionKey, str], valobj: 'UserOption[T.Any]') -> None:
- key = self.ensure_key(key)
- self.d[key] = valobj
+ def add_project_option(self, key: T.Union[OptionKey, str], valobj: AnyOptionType) -> None:
+ key = self.ensure_and_validate_key(key)
+ assert key.subproject is not None
+ if key in self.options:
+ raise MesonException(f'Internal error: tried to add a project option {key} that already exists.')
+
+ self.options[key] = valobj
self.project_options.add(key)
+ pval = self.pending_options.pop(key, None)
+ if pval is not None:
+ self.set_option(key, pval)
- def add_module_option(self, modulename: str, key: T.Union[OptionKey, str], valobj: 'UserOption[T.Any]') -> None:
- key = self.ensure_key(key)
+ def add_module_option(self, modulename: str, key: T.Union[OptionKey, str], valobj: AnyOptionType) -> None:
+ key = self.ensure_and_validate_key(key)
if key.name.startswith('build.'):
raise MesonException('FATAL internal error: somebody goofed option handling.')
if not key.name.startswith(modulename + '.'):
@@ -735,48 +947,223 @@ def add_module_option(self, modulename: str, key: T.Union[OptionKey, str], valob
self.add_system_option_internal(key, valobj)
self.module_options.add(key)
- def set_value(self, key: T.Union[OptionKey, str], new_value: 'T.Any') -> bool:
- key = self.ensure_key(key)
- return self.d[key].set_value(new_value)
+ def sanitize_prefix(self, prefix: str) -> str:
+ prefix = os.path.expanduser(prefix)
+ if not os.path.isabs(prefix):
+ raise MesonException(f'prefix value {prefix!r} must be an absolute path')
+ if prefix.endswith('/') or prefix.endswith('\\'):
+ # On Windows we need to preserve the trailing slash if the
+ # string is of type 'C:\' because 'C:' is not an absolute path.
+ if len(prefix) == 3 and prefix[1] == ':':
+ pass
+ # If prefix is a single character, preserve it since it is
+ # the root directory.
+ elif len(prefix) == 1:
+ pass
+ else:
+ prefix = prefix[:-1]
+ return prefix
+
+ def sanitize_dir_option_value(self, prefix: str, option: OptionKey, value: T.Any) -> T.Any:
+ '''
+ If the option is an installation directory option, the value is an
+ absolute path and resides within prefix, return the value
+ as a path relative to the prefix. Otherwise, return it as is.
+
+ This way everyone can do f.ex, get_option('libdir') and usually get
+ the library directory relative to prefix, even though it really
+ should not be relied upon.
+ '''
+ try:
+ value = pathlib.PurePath(value)
+ except TypeError:
+ return value
+ if option.name.endswith('dir') and value.is_absolute() and \
+ option not in BUILTIN_DIR_NOPREFIX_OPTIONS:
+ try:
+ # Try to relativize the path.
+ value = value.relative_to(prefix)
+ except ValueError:
+ # Path is not relative, let’s keep it as is.
+ pass
+ if '..' in value.parts:
+ raise MesonException(
+ f"The value of the '{option}' option is '{value}' but "
+ "directory options are not allowed to contain '..'.\n"
+ f"If you need a path outside of the {prefix!r} prefix, "
+ "please use an absolute path."
+ )
+ # .as_posix() keeps the posix-like file separators Meson uses.
+ return value.as_posix()
+
+ def set_option(self, key: OptionKey, new_value: ElementaryOptionValues, first_invocation: bool = False) -> bool:
+ if key.name == 'prefix':
+ assert isinstance(new_value, str), 'for mypy'
+ new_value = self.sanitize_prefix(new_value)
+ elif self.is_builtin_option(key):
+ prefix = self.get_value_for('prefix')
+ assert isinstance(prefix, str), 'for mypy'
+ new_value = self.sanitize_dir_option_value(prefix, key, new_value)
+
+ try:
+ opt = self.get_value_object_for(key)
+ except KeyError:
+ raise MesonException(f'Unknown options: "{key!s}" not found.')
+
+ if opt.deprecated is True:
+ mlog.deprecation(f'Option {key.name!r} is deprecated')
+ elif isinstance(opt.deprecated, list):
+ for v in opt.listify(new_value):
+ if v in opt.deprecated:
+ mlog.deprecation(f'Option {key.name!r} value {v!r} is deprecated')
+ elif isinstance(opt.deprecated, dict):
+ def replace(v: str) -> str:
+ assert isinstance(opt.deprecated, dict) # No, Mypy can not tell this from two lines above
+ newvalue = opt.deprecated.get(v)
+ if newvalue is not None:
+ mlog.deprecation(f'Option {key.name!r} value {v!r} is replaced by {newvalue!r}')
+ return newvalue
+ return v
+ valarr = [replace(v) for v in opt.listify(new_value)]
+ new_value = ','.join(valarr)
+ elif isinstance(opt.deprecated, str):
+ mlog.deprecation(f'Option {key.name!r} is replaced by {opt.deprecated!r}')
+ # Change both this aption and the new one pointed to.
+ dirty = self.set_option(key.evolve(name=opt.deprecated), new_value)
+ dirty |= opt.set_value(new_value)
+ return dirty
+
+ old_value = opt.value
+ changed = opt.set_value(new_value)
+
+ if opt.readonly and changed and not first_invocation:
+ raise MesonException(f'Tried to modify read only option {str(key)!r}')
+
+ if key.name == 'prefix' and first_invocation and changed:
+ assert isinstance(old_value, str), 'for mypy'
+ assert isinstance(new_value, str), 'for mypy'
+ self.reset_prefixed_options(old_value, new_value)
+
+ if changed and key.name == 'buildtype' and new_value != 'custom':
+ assert isinstance(new_value, str), 'for mypy'
+ optimization, debug = self.DEFAULT_DEPENDENTS[new_value]
+ dkey = key.evolve(name='debug')
+ optkey = key.evolve(name='optimization')
+ self.options[dkey].set_value(debug)
+ self.options[optkey].set_value(optimization)
+
+ return changed
+
+ def set_option_maybe_root(self, o: OptionKey, new_value: str) -> bool:
+ if not self.is_cross and o.is_for_build():
+ return False
+
+ if o in self.options:
+ return self.set_option(o, new_value)
+ if self.accept_as_pending_option(o):
+ old_value = self.pending_options.get(o, None)
+ self.pending_options[o] = new_value
+ return old_value is None or str(old_value) == new_value
+ else:
+ o = o.as_root()
+ return self.set_option(o, new_value)
+
+ def set_from_configure_command(self, D_args: T.List[str], U_args: T.List[str]) -> bool:
+ dirty = False
+ D_args = [] if D_args is None else D_args
+ (global_options, perproject_global_options, project_options) = self.classify_D_arguments(D_args)
+ U_args = [] if U_args is None else U_args
+ for key, valstr in global_options:
+ dirty |= self.set_option_maybe_root(key, valstr)
+ for key, valstr in project_options:
+ dirty |= self.set_option_maybe_root(key, valstr)
+ for keystr, valstr in perproject_global_options:
+ if keystr in self.augments:
+ if self.augments[keystr] != valstr:
+ self.augments[keystr] = valstr
+ dirty = True
+ else:
+ self.augments[keystr] = valstr
+ dirty = True
+ for delete in U_args:
+ if delete in self.augments:
+ del self.augments[delete]
+ dirty = True
+ return dirty
+
+ def reset_prefixed_options(self, old_prefix: str, new_prefix: str) -> None:
+ for optkey, prefix_mapping in BUILTIN_DIR_NOPREFIX_OPTIONS.items():
+ valobj = self.options[optkey]
+ new_value = valobj.value
+ if new_prefix not in prefix_mapping:
+ new_value = BUILTIN_OPTIONS[optkey].default
+ else:
+ if old_prefix in prefix_mapping:
+ # Only reset the value if it has not been changed from the default.
+ if prefix_mapping[old_prefix] == valobj.value:
+ new_value = prefix_mapping[new_prefix]
+ else:
+ new_value = prefix_mapping[new_prefix]
+ valobj.set_value(new_value)
# FIXME, this should be removed.or renamed to "change_type_of_existing_object" or something like that
- def set_value_object(self, key: T.Union[OptionKey, str], new_object: 'UserOption[T.Any]') -> None:
- key = self.ensure_key(key)
- self.d[key] = new_object
+ def set_value_object(self, key: T.Union[OptionKey, str], new_object: AnyOptionType) -> None:
+ key = self.ensure_and_validate_key(key)
+ self.options[key] = new_object
+
+ def get_value_object(self, key: T.Union[OptionKey, str]) -> AnyOptionType:
+ key = self.ensure_and_validate_key(key)
+ return self.options[key]
+
+ def get_default_for_b_option(self, key: OptionKey) -> ElementaryOptionValues:
+ assert self.is_base_option(key)
+ try:
+ return T.cast('ElementaryOptionValues', COMPILER_BASE_OPTIONS[key.evolve(subproject=None)].default)
+ except KeyError:
+ raise MesonBugException(f'Requested base option {key} which does not exist.')
def remove(self, key: OptionKey) -> None:
- del self.d[key]
+ del self.options[key]
+ try:
+ self.project_options.remove(key)
+ except KeyError:
+ pass
- def __contains__(self, key: OptionKey) -> bool:
- key = self.ensure_key(key)
- return key in self.d
+ def __contains__(self, key: T.Union[str, OptionKey]) -> bool:
+ key = self.ensure_and_validate_key(key)
+ return key in self.options
def __repr__(self) -> str:
- return repr(self.d)
+ return repr(self.options)
def keys(self) -> T.KeysView[OptionKey]:
- return self.d.keys()
+ return self.options.keys()
- def values(self) -> T.ValuesView[UserOption[T.Any]]:
- return self.d.values()
+ def values(self) -> T.ValuesView[AnyOptionType]:
+ return self.options.values()
- def items(self) -> T.ItemsView['OptionKey', 'UserOption[T.Any]']:
- return self.d.items()
+ def items(self) -> T.ItemsView['OptionKey', 'AnyOptionType']:
+ return self.options.items()
# FIXME: this method must be deleted and users moved to use "add_xxx_option"s instead.
- def update(self, **kwargs: UserOption[T.Any]) -> None:
- self.d.update(**kwargs)
+ def update(self, **kwargs: AnyOptionType) -> None:
+ self.options.update(**kwargs)
- def setdefault(self, k: OptionKey, o: UserOption[T.Any]) -> UserOption[T.Any]:
- return self.d.setdefault(k, o)
+ def setdefault(self, k: OptionKey, o: AnyOptionType) -> AnyOptionType:
+ return self.options.setdefault(k, o)
- def get(self, o: OptionKey, default: T.Optional[UserOption[T.Any]] = None) -> T.Optional[UserOption[T.Any]]:
- return self.d.get(o, default)
+ def get(self, o: OptionKey, default: T.Optional[AnyOptionType] = None, **kwargs: T.Any) -> T.Optional[AnyOptionType]:
+ return self.options.get(o, default, **kwargs)
def is_project_option(self, key: OptionKey) -> bool:
"""Convenience method to check if this is a project option."""
return key in self.project_options
+ def is_per_machine_option(self, optname: OptionKey) -> bool:
+ if optname.evolve(subproject=None, machine=MachineChoice.HOST) in BUILTIN_OPTIONS_PER_MACHINE:
+ return True
+ return self.is_compiler_option(optname)
+
def is_reserved_name(self, key: OptionKey) -> bool:
if key.name in _BUILTIN_NAMES:
return True
@@ -802,7 +1189,11 @@ def is_base_option(self, key: OptionKey) -> bool:
def is_backend_option(self, key: OptionKey) -> bool:
"""Convenience method to check if this is a backend option."""
- return key.name.startswith('backend_')
+ if isinstance(key, str):
+ name: str = key
+ else:
+ name = key.name
+ return name.startswith('backend_')
def is_compiler_option(self, key: OptionKey) -> bool:
"""Convenience method to check if this is a compiler option."""
@@ -817,3 +1208,282 @@ def is_compiler_option(self, key: OptionKey) -> bool:
def is_module_option(self, key: OptionKey) -> bool:
return key in self.module_options
+
+ def classify_D_arguments(self, D: T.List[str]) -> T.Tuple[T.List[T.Tuple[OptionKey, str]],
+ T.List[T.Tuple[str, str]],
+ T.List[T.Tuple[OptionKey, str]]]:
+ global_options = []
+ project_options = []
+ perproject_global_options = []
+ for setval in D:
+ keystr, valstr = setval.split('=', 1)
+ key = OptionKey.from_string(keystr)
+ valuetuple = (key, valstr)
+ if self.is_project_option(key):
+ project_options.append(valuetuple)
+ elif key.subproject is None:
+ global_options.append(valuetuple)
+ else:
+ # FIXME, augments are currently stored as strings, not OptionKeys
+ strvaluetuple = (keystr, valstr)
+ perproject_global_options.append(strvaluetuple)
+ return (global_options, perproject_global_options, project_options)
+
+ def optlist2optdict(self, optlist: T.List[str]) -> T.Dict[str, str]:
+ optdict = {}
+ for p in optlist:
+ k, v = p.split('=', 1)
+ optdict[k] = v
+ return optdict
+
+ def prefix_split_options(self, coll: T.Union[T.List[str], OptionStringLikeDict]) -> T.Tuple[str, T.Union[T.List[str], OptionStringLikeDict]]:
+ prefix = None
+ if isinstance(coll, list):
+ others: T.List[str] = []
+ for e in coll:
+ if e.startswith('prefix='):
+ prefix = e.split('=', 1)[1]
+ else:
+ others.append(e)
+ return (prefix, others)
+ else:
+ others_d: OptionStringLikeDict = {}
+ for k, v in coll.items():
+ if isinstance(k, OptionKey) and k.name == 'prefix':
+ prefix = v
+ elif k == 'prefix':
+ prefix = v
+ else:
+ others_d[k] = v
+ return (prefix, others_d)
+
+ def first_handle_prefix(self,
+ project_default_options: T.Union[T.List[str], OptionStringLikeDict],
+ cmd_line_options: OptionStringLikeDict,
+ machine_file_options: T.Mapping[OptionKey, ElementaryOptionValues]) \
+ -> T.Tuple[T.Union[T.List[str], OptionStringLikeDict],
+ T.Union[T.List[str], OptionStringLikeDict],
+ T.MutableMapping[OptionKey, ElementaryOptionValues]]:
+ # Copy to avoid later mutation
+ nopref_machine_file_options = T.cast(
+ 'T.MutableMapping[OptionKey, ElementaryOptionValues]', copy.copy(machine_file_options))
+
+ prefix = None
+ (possible_prefix, nopref_project_default_options) = self.prefix_split_options(project_default_options)
+ prefix = prefix if possible_prefix is None else possible_prefix
+
+ possible_prefixv = nopref_machine_file_options.pop(OptionKey('prefix'), None)
+ assert possible_prefixv is None or isinstance(possible_prefixv, str), 'mypy: prefix from machine file was not a string?'
+ prefix = prefix if possible_prefixv is None else possible_prefixv
+
+ (possible_prefix, nopref_cmd_line_options) = self.prefix_split_options(cmd_line_options)
+ prefix = prefix if possible_prefix is None else possible_prefix
+
+ if prefix is not None:
+ self.hard_reset_from_prefix(prefix)
+ return (nopref_project_default_options, nopref_cmd_line_options, nopref_machine_file_options)
+
+ def hard_reset_from_prefix(self, prefix: str) -> None:
+ prefix = self.sanitize_prefix(prefix)
+ for optkey, prefix_mapping in BUILTIN_DIR_NOPREFIX_OPTIONS.items():
+ valobj = self.options[optkey]
+ if prefix in prefix_mapping:
+ new_value = prefix_mapping[prefix]
+ else:
+ _v = BUILTIN_OPTIONS[optkey].default
+ assert isinstance(_v, str), 'for mypy'
+ new_value = _v
+ valobj.set_value(new_value)
+ self.options[OptionKey('prefix')].set_value(prefix)
+
+ def initialize_from_top_level_project_call(self,
+ project_default_options_in: T.Union[T.List[str], OptionStringLikeDict],
+ cmd_line_options_in: OptionStringLikeDict,
+ machine_file_options_in: T.Mapping[OptionKey, ElementaryOptionValues]) -> None:
+ first_invocation = True
+ (project_default_options, cmd_line_options, machine_file_options) = self.first_handle_prefix(project_default_options_in,
+ cmd_line_options_in,
+ machine_file_options_in)
+ if isinstance(project_default_options, str):
+ project_default_options = [project_default_options]
+ if isinstance(project_default_options, list):
+ project_default_options = self.optlist2optdict(project_default_options) # type: ignore [assignment]
+ if project_default_options is None:
+ project_default_options = {}
+ assert isinstance(project_default_options, dict)
+ for keystr, valstr in project_default_options.items():
+ # Ths is complicated by the fact that a string can have two meanings:
+ #
+ # default_options: 'foo=bar'
+ #
+ # can be either
+ #
+ # A) a system option in which case the subproject is None
+ # B) a project option, in which case the subproject is '' (this method is only called from top level)
+ #
+ # The key parsing function can not handle the difference between the two
+ # and defaults to A.
+ if isinstance(keystr, str):
+ key = OptionKey.from_string(keystr)
+ else:
+ key = keystr
+ # Due to backwards compatibility we ignore build-machine options
+ # when building natively.
+ if not self.is_cross and key.is_for_build():
+ continue
+ if key.subproject:
+ augstr = str(key)
+ self.augments[augstr] = valstr
+ elif key in self.options:
+ self.set_option(key, valstr, first_invocation)
+ else:
+ # Setting a project option with default_options.
+ # Argubly this should be a hard error, the default
+ # value of project option should be set in the option
+ # file, not in the project call.
+ proj_key = key.as_root()
+ if self.is_project_option(proj_key):
+ self.set_option(proj_key, valstr)
+ else:
+ self.pending_options[key] = valstr
+ assert isinstance(machine_file_options, dict)
+ for keystr, valstr in machine_file_options.items():
+ if isinstance(keystr, str):
+ # FIXME, standardise on Key or string.
+ key = OptionKey.from_string(keystr)
+ else:
+ key = keystr
+ # Due to backwards compatibility we ignore all build-machine options
+ # when building natively.
+ if not self.is_cross and key.is_for_build():
+ continue
+ if key.subproject:
+ augstr = str(key)
+ self.augments[augstr] = valstr
+ elif key in self.options:
+ self.set_option(key, valstr, first_invocation)
+ else:
+ proj_key = key.as_root()
+ if proj_key in self.options:
+ self.set_option(proj_key, valstr, first_invocation)
+ else:
+ self.pending_options[key] = valstr
+ assert isinstance(cmd_line_options, dict)
+ for keystr, valstr in cmd_line_options.items():
+ if isinstance(keystr, str):
+ key = OptionKey.from_string(keystr)
+ else:
+ key = keystr
+ # Due to backwards compatibility we ignore all build-machine options
+ # when building natively.
+ if not self.is_cross and key.is_for_build():
+ continue
+ if key.subproject:
+ augstr = str(key)
+ self.augments[augstr] = valstr
+ elif key in self.options:
+ self.set_option(key, valstr, True)
+ else:
+ proj_key = key.as_root()
+ if proj_key in self.options:
+ self.set_option(proj_key, valstr, True)
+ else:
+ self.pending_options[key] = valstr
+
+ def accept_as_pending_option(self, key: OptionKey, known_subprojects: T.Optional[T.Union[T.Set[str], T.KeysView[str]]] = None) -> bool:
+ # Fail on unknown options that we can know must exist at this point in time.
+ # Subproject and compiler options are resolved later.
+ #
+ # Some base options (sanitizers etc) might get added later.
+ # Permitting them all is not strictly correct.
+ if key.subproject:
+ if known_subprojects is None or key.subproject not in known_subprojects:
+ return True
+ if self.is_compiler_option(key):
+ return True
+ return (self.is_base_option(key) and
+ key.evolve(subproject=None, machine=MachineChoice.HOST) in COMPILER_BASE_OPTIONS)
+
+ def validate_cmd_line_options(self, cmd_line_options: OptionStringLikeDict) -> None:
+ unknown_options = []
+ for keystr, valstr in cmd_line_options.items():
+ if isinstance(keystr, str):
+ key = OptionKey.from_string(keystr)
+ else:
+ key = keystr
+
+ if key in self.pending_options and not self.accept_as_pending_option(key):
+ unknown_options.append(f'"{key}"')
+
+ if unknown_options:
+ keys = ', '.join(unknown_options)
+ raise MesonException(f'Unknown options: {keys}')
+
+ def hacky_mchackface_back_to_list(self, optdict: T.Union[T.List[str], OptionStringLikeDict]) -> T.List[str]:
+ if isinstance(optdict, dict):
+ return [f'{k}={v}' for k, v in optdict.items()]
+ return optdict
+
+ def initialize_from_subproject_call(self,
+ subproject: str,
+ spcall_default_options_in: OptionStringLikeDict,
+ project_default_options_in: T.Union[T.List[str], OptionStringLikeDict],
+ cmd_line_options: OptionStringLikeDict) -> None:
+ is_first_invocation = True
+ spcall_default_options = self.hacky_mchackface_back_to_list(spcall_default_options_in)
+ project_default_options = self.hacky_mchackface_back_to_list(project_default_options_in)
+ for o in itertools.chain(project_default_options, spcall_default_options):
+ keystr, valstr = o.split('=', 1)
+ key = OptionKey.from_string(keystr)
+ if key.subproject is None:
+ key = key.evolve(subproject=subproject)
+ elif key.subproject == subproject:
+ without_subp = key.evolve(subproject=None)
+ raise MesonException(f'subproject name not needed in default_options; use "{without_subp}" instead of "{key}"')
+ # If the key points to a project option, set the value from that.
+ # Otherwise set an augment.
+ if key in self.project_options:
+ self.set_option(key, valstr, is_first_invocation)
+ else:
+ self.pending_options.pop(key, None)
+ aug_str = str(key)
+ self.augments[aug_str] = valstr
+ # Check for pending options
+ for key, valstr in cmd_line_options.items(): # type: ignore [assignment]
+ if not isinstance(key, OptionKey):
+ key = OptionKey.from_string(key)
+ if key.subproject != subproject:
+ continue
+ self.pending_options.pop(key, None)
+ if key in self.options:
+ self.set_option(key, valstr, is_first_invocation)
+ else:
+ self.augments[str(key)] = valstr
+
+ def update_project_options(self, project_options: MutableKeyedOptionDictType, subproject: SubProject) -> None:
+ for key, value in project_options.items():
+ if key not in self.options:
+ self.add_project_option(key, value)
+ continue
+ if key.subproject != subproject:
+ raise MesonBugException(f'Tried to set an option for subproject {key.subproject} from {subproject}!')
+
+ oldval = self.get_value_object(key)
+ if type(oldval) is not type(value):
+ self.set_option(key, value.value)
+ elif choices_are_different(oldval, value):
+ # If the choices have changed, use the new value, but attempt
+ # to keep the old options. If they are not valid keep the new
+ # defaults but warn.
+ self.set_value_object(key, value)
+ try:
+ value.set_value(oldval.value)
+ except MesonException:
+ mlog.warning(f'Old value(s) of {key} are no longer valid, resetting to default ({value.value}).',
+ fatal=False)
+
+ # Find any extranious keys for this project and remove them
+ potential_removed_keys = self.options.keys() - project_options.keys()
+ for key in potential_removed_keys:
+ if self.is_project_option(key) and key.subproject == subproject:
+ self.remove(key)
diff --git a/mesonbuild/programs.py b/mesonbuild/programs.py
index 9ad38e126b60..d01440cce193 100644
--- a/mesonbuild/programs.py
+++ b/mesonbuild/programs.py
@@ -55,6 +55,15 @@ def __init__(self, name: str, command: T.Optional[T.List[str]] = None,
if ret:
self.command = ret + args
else:
+ if os.path.isabs(cmd) and not os.path.exists(cmd):
+ # Maybe the name is an absolute path to a native Windows
+ # executable, but without the extension. This is technically wrong,
+ # but many people do it because it works in the MinGW shell.
+ for ext in self.windows_exts:
+ trial_ext = f'{cmd}.{ext}'
+ if os.path.exists(trial_ext):
+ cmd = trial_ext
+ break
self.command = [cmd] + args
else:
if search_dirs is None:
@@ -113,7 +122,10 @@ def get_version(self, interpreter: T.Optional['Interpreter'] = None) -> str:
output = o.strip()
if not output:
output = e.strip()
- match = re.search(r'([0-9][0-9\.]+)', output)
+
+ match = re.search(r'([0-9]+(\.[0-9]+)+)', output)
+ if not match:
+ match = re.search(r'([0-9][0-9\.]+)', output)
if not match:
raise mesonlib.MesonException(f'Could not find a version number in output of {raw_cmd!r}')
self.cached_version = match.group(1)
diff --git a/mesonbuild/scripts/clangformat.py b/mesonbuild/scripts/clangformat.py
index 9ce050458986..a3c19e9adbec 100644
--- a/mesonbuild/scripts/clangformat.py
+++ b/mesonbuild/scripts/clangformat.py
@@ -4,37 +4,41 @@
from __future__ import annotations
import argparse
-import subprocess
from pathlib import Path
+import sys
-from .run_tool import run_tool
+from .run_tool import run_clang_tool, run_with_buffered_output
from ..environment import detect_clangformat
from ..mesonlib import version_compare
from ..programs import ExternalProgram
import typing as T
-def run_clang_format(fname: Path, exelist: T.List[str], check: bool, cformat_ver: T.Optional[str]) -> subprocess.CompletedProcess:
+async def run_clang_format(fname: Path, exelist: T.List[str], options: argparse.Namespace, cformat_ver: T.Optional[str]) -> int:
clangformat_10 = False
- if check and cformat_ver:
+ if options.check and cformat_ver:
if version_compare(cformat_ver, '>=10'):
clangformat_10 = True
exelist = exelist + ['--dry-run', '--Werror']
+ # The option is not documented but it exists in version 10
+ if options.color == 'always' or options.color == 'auto' and sys.stdout.isatty():
+ exelist += ['--color=1']
else:
original = fname.read_bytes()
before = fname.stat().st_mtime
- ret = subprocess.run(exelist + ['-style=file', '-i', str(fname)])
+ ret = await run_with_buffered_output(exelist + ['-style=file', '-i', str(fname)])
after = fname.stat().st_mtime
if before != after:
print('File reformatted: ', fname)
- if check and not clangformat_10:
+ if options.check and not clangformat_10:
# Restore the original if only checking.
fname.write_bytes(original)
- ret.returncode = 1
+ return 1
return ret
def run(args: T.List[str]) -> int:
parser = argparse.ArgumentParser()
parser.add_argument('--check', action='store_true')
+ parser.add_argument('--color', default='always')
parser.add_argument('sourcedir')
parser.add_argument('builddir')
options = parser.parse_args(args)
@@ -52,4 +56,4 @@ def run(args: T.List[str]) -> int:
else:
cformat_ver = None
- return run_tool('clang-format', srcdir, builddir, run_clang_format, exelist, options.check, cformat_ver)
+ return run_clang_tool('clang-format', srcdir, builddir, run_clang_format, exelist, options, cformat_ver)
diff --git a/mesonbuild/scripts/clangtidy.py b/mesonbuild/scripts/clangtidy.py
index a922f8514062..550faeef354e 100644
--- a/mesonbuild/scripts/clangtidy.py
+++ b/mesonbuild/scripts/clangtidy.py
@@ -11,21 +11,22 @@
import shutil
import sys
-from .run_tool import run_tool
+from .run_tool import run_clang_tool, run_with_buffered_output
from ..environment import detect_clangtidy, detect_clangapply
import typing as T
-def run_clang_tidy(fname: Path, tidyexe: list, builddir: Path, fixesdir: T.Optional[Path]) -> subprocess.CompletedProcess:
+async def run_clang_tidy(fname: Path, tidyexe: list, builddir: Path, fixesdir: T.Optional[Path]) -> int:
args = []
if fixesdir is not None:
handle, name = tempfile.mkstemp(prefix=fname.name + '.', suffix='.yaml', dir=fixesdir)
os.close(handle)
args.extend(['-export-fixes', name])
- return subprocess.run(tidyexe + args + ['-quiet', '-p', str(builddir), str(fname)])
+ return await run_with_buffered_output(tidyexe + args + ['-quiet', '-p', str(builddir), str(fname)])
def run(args: T.List[str]) -> int:
parser = argparse.ArgumentParser()
parser.add_argument('--fix', action='store_true')
+ parser.add_argument('--color', default='always')
parser.add_argument('sourcedir')
parser.add_argument('builddir')
options = parser.parse_args(args)
@@ -38,6 +39,9 @@ def run(args: T.List[str]) -> int:
print(f'Could not execute clang-tidy "{" ".join(tidyexe)}"')
return 1
+ if options.color == 'always' or options.color == 'auto' and sys.stdout.isatty():
+ tidyexe += ['--use-color']
+
fixesdir: T.Optional[Path] = None
if options.fix:
applyexe = detect_clangapply()
@@ -52,7 +56,7 @@ def run(args: T.List[str]) -> int:
fixesdir.unlink()
fixesdir.mkdir(parents=True)
- tidyret = run_tool('clang-tidy', srcdir, builddir, run_clang_tidy, tidyexe, builddir, fixesdir)
+ tidyret = run_clang_tool('clang-tidy', srcdir, builddir, run_clang_tidy, tidyexe, builddir, fixesdir)
if fixesdir is not None:
print('Applying fix-its...')
applyret = subprocess.run(applyexe + ['-format', '-style=file', '-ignore-insert-conflict', fixesdir]).returncode
diff --git a/mesonbuild/scripts/clippy.py b/mesonbuild/scripts/clippy.py
new file mode 100644
index 000000000000..6d282e4f81e9
--- /dev/null
+++ b/mesonbuild/scripts/clippy.py
@@ -0,0 +1,76 @@
+# SPDX-License-Identifier: Apache-2.0
+# Copyright 2024 The Meson development team
+
+from __future__ import annotations
+from collections import defaultdict
+import os
+import tempfile
+import typing as T
+
+from .run_tool import run_tool_on_targets, run_with_buffered_output
+from .. import build, mlog
+from ..mesonlib import MachineChoice, PerMachine
+
+if T.TYPE_CHECKING:
+ from ..compilers.rust import RustCompiler
+
+class ClippyDriver:
+ def __init__(self, build: build.Build, tempdir: str):
+ self.tools: PerMachine[T.List[str]] = PerMachine([], [])
+ self.warned: T.DefaultDict[str, bool] = defaultdict(lambda: False)
+ self.tempdir = tempdir
+ for machine in MachineChoice:
+ compilers = build.environment.coredata.compilers[machine]
+ if 'rust' in compilers:
+ compiler = T.cast('RustCompiler', compilers['rust'])
+ self.tools[machine] = compiler.get_rust_tool('clippy-driver', build.environment)
+
+ def warn_missing_clippy(self, machine: str) -> None:
+ if self.warned[machine]:
+ return
+ mlog.warning(f'clippy-driver not found for {machine} machine')
+ self.warned[machine] = True
+
+ def __call__(self, target: T.Dict[str, T.Any]) -> T.Iterable[T.Coroutine[None, None, int]]:
+ for src_block in target['target_sources']:
+ if 'compiler' in src_block and src_block['language'] == 'rust':
+ clippy = getattr(self.tools, src_block['machine'])
+ if not clippy:
+ self.warn_missing_clippy(src_block['machine'])
+ continue
+
+ cmdlist = list(clippy)
+ prev = None
+ lints_cap = None
+ for arg in src_block['parameters']:
+ if prev == '--cap-lints':
+ cmdlist.append(prev)
+ lints_cap = arg
+ prev = None
+ elif prev:
+ prev = None
+ continue
+ if arg in {'--emit', '--out-dir', '--cap-lints'}:
+ prev = arg
+ else:
+ cmdlist.append(arg)
+
+ # no use in running clippy if it wouldn't print anything anyway
+ if lints_cap == 'allow':
+ break
+
+ cmdlist.extend(src_block['sources'])
+ # the default for --emit is to go all the way to linking,
+ # and --emit dep-info= is not enough for clippy to do
+ # enough analysis, so use --emit metadata.
+ cmdlist.append('--emit')
+ cmdlist.append('metadata')
+ cmdlist.append('--out-dir')
+ cmdlist.append(self.tempdir)
+ yield run_with_buffered_output(cmdlist)
+
+def run(args: T.List[str]) -> int:
+ os.chdir(args[0])
+ build_data = build.load(os.getcwd())
+ with tempfile.TemporaryDirectory() as d:
+ return run_tool_on_targets(ClippyDriver(build_data, d))
diff --git a/mesonbuild/scripts/depaccumulate.py b/mesonbuild/scripts/depaccumulate.py
new file mode 100644
index 000000000000..7576390d4380
--- /dev/null
+++ b/mesonbuild/scripts/depaccumulate.py
@@ -0,0 +1,129 @@
+# SPDX-License-Identifier: Apache-2.0
+# Copyright © 2021-2024 Intel Corporation
+
+"""Accumulator for p1689r5 module dependencies.
+
+See: https://www.open-std.org/jtc1/sc22/wg21/docs/papers/2022/p1689r5.html
+"""
+
+from __future__ import annotations
+import json
+import re
+import textwrap
+import typing as T
+
+if T.TYPE_CHECKING:
+ from .depscan import Description, Rule
+
+# The quoting logic has been copied from the ninjabackend to avoid having to
+# import half of Meson just to quote outputs, which is a performance problem
+_QUOTE_PAT = re.compile(r'[$ :\n]')
+
+
+def quote(text: str) -> str:
+ # Fast path for when no quoting is necessary
+ if not _QUOTE_PAT.search(text):
+ return text
+ if '\n' in text:
+ errmsg = textwrap.dedent(f'''\
+ Ninja does not support newlines in rules. The content was:
+
+ {text}
+
+ Please report this error with a test case to the Meson bug tracker.''')
+ raise RuntimeError(errmsg)
+ return _QUOTE_PAT.sub(r'$\g<0>', text)
+
+
+_PROVIDER_CACHE: T.Dict[str, str] = {}
+
+
+def get_provider(rules: T.List[Rule], name: str) -> T.Optional[str]:
+ """Get the object that a module from another Target provides
+
+ We must rely on the object file here instead of the module itself, because
+ the object rule is part of the generated build.ninja, while the module is
+ only declared inside a dyndep. This creates for the dyndep generator to
+ depend on previous dyndeps as order deps. Since the module
+ interface file will be generated when the object is generated we can rely on
+ that in proxy and simplify generation.
+
+ :param rules: The list of rules to check
+ :param name: The logical-name to look for
+ :raises RuntimeError: If no provider can be found
+ :return: The object file of the rule providing the module
+ """
+ # Cache the result for performance reasons
+ if name in _PROVIDER_CACHE:
+ return _PROVIDER_CACHE[name]
+
+ for r in rules:
+ for p in r.get('provides', []):
+ if p['logical-name'] == name:
+ obj = r['primary-output']
+ _PROVIDER_CACHE[name] = obj
+ return obj
+ return None
+
+
+def process_rules(rules: T.List[Rule],
+ extra_rules: T.List[Rule],
+ ) -> T.Iterable[T.Tuple[str, T.Optional[T.List[str]], T.List[str]]]:
+ """Process the rules for this Target
+
+ :param rules: the rules for this target
+ :param extra_rules: the rules for all of the targets this one links with, to use their provides
+ :yield: A tuple of the output, the exported modules, and the consumed modules
+ """
+ for rule in rules:
+ prov: T.Optional[T.List[str]] = None
+ req: T.List[str] = []
+ if 'provides' in rule:
+ prov = [p['compiled-module-path'] for p in rule['provides']]
+ if 'requires' in rule:
+ for p in rule['requires']:
+ modfile = p.get('compiled-module-path')
+ if modfile is not None:
+ req.append(modfile)
+ else:
+ # We can't error if this is not found because of compiler
+ # provided modules
+ found = get_provider(extra_rules, p['logical-name'])
+ if found:
+ req.append(found)
+ yield rule['primary-output'], prov, req
+
+
+def formatter(files: T.Optional[T.List[str]]) -> str:
+ if files:
+ fmt = ' '.join(quote(f) for f in files)
+ return f'| {fmt}'
+ return ''
+
+
+def gen(outfile: str, desc: Description, extra_rules: T.List[Rule]) -> int:
+ with open(outfile, 'w', encoding='utf-8') as f:
+ f.write('ninja_dyndep_version = 1\n\n')
+
+ for obj, provides, requires in process_rules(desc['rules'], extra_rules):
+ ins = formatter(requires)
+ out = formatter(provides)
+ f.write(f'build {quote(obj)} {out}: dyndep {ins}\n\n')
+
+ return 0
+
+
+def run(args: T.List[str]) -> int:
+ assert len(args) >= 2, 'got wrong number of arguments!'
+ outfile, jsonfile, *jsondeps = args
+ with open(jsonfile, 'r', encoding='utf-8') as f:
+ desc: Description = json.load(f)
+
+ # All rules, necessary for fulfilling across TU and target boundaries
+ rules = desc['rules'].copy()
+ for dep in jsondeps:
+ with open(dep, encoding='utf-8') as f:
+ d: Description = json.load(f)
+ rules.extend(d['rules'])
+
+ return gen(outfile, desc, rules)
diff --git a/mesonbuild/scripts/depscan.py b/mesonbuild/scripts/depscan.py
index 44e805447713..6bd5cde9aac0 100644
--- a/mesonbuild/scripts/depscan.py
+++ b/mesonbuild/scripts/depscan.py
@@ -1,22 +1,60 @@
# SPDX-License-Identifier: Apache-2.0
# Copyright 2020 The Meson development team
-# Copyright © 2023 Intel Corporation
+# Copyright © 2023-2024 Intel Corporation
from __future__ import annotations
import collections
+import json
import os
import pathlib
import pickle
import re
import typing as T
-from ..backend.ninjabackend import ninja_quote
-
if T.TYPE_CHECKING:
- from typing_extensions import Literal
+ from typing_extensions import Literal, TypedDict, NotRequired
from ..backend.ninjabackend import TargetDependencyScannerInfo
+ Require = TypedDict(
+ 'Require',
+ {
+ 'logical-name': str,
+ 'compiled-module-path': NotRequired[str],
+ 'source-path': NotRequired[str],
+ 'unique-on-source-path': NotRequired[bool],
+ 'lookup-method': NotRequired[Literal['by-name', 'include-angle', 'include-quote']]
+ },
+ )
+
+ Provide = TypedDict(
+ 'Provide',
+ {
+ 'logical-name': str,
+ 'compiled-module-path': NotRequired[str],
+ 'source-path': NotRequired[str],
+ 'unique-on-source-path': NotRequired[bool],
+ 'is-interface': NotRequired[bool],
+ },
+ )
+
+ Rule = TypedDict(
+ 'Rule',
+ {
+ 'primary-output': NotRequired[str],
+ 'outputs': NotRequired[T.List[str]],
+ 'provides': NotRequired[T.List[Provide]],
+ 'requires': NotRequired[T.List[Require]],
+ }
+ )
+
+ class Description(TypedDict):
+
+ version: int
+ revision: int
+ rules: T.List[Rule]
+
+
CPP_IMPORT_RE = re.compile(r'\w*import ([a-zA-Z0-9]+);')
CPP_EXPORT_RE = re.compile(r'\w*export module ([a-zA-Z0-9]+);')
@@ -37,7 +75,7 @@ def __init__(self, pickle_file: str, outfile: str):
self.sources = self.target_data.sources
self.provided_by: T.Dict[str, str] = {}
self.exports: T.Dict[str, str] = {}
- self.needs: collections.defaultdict[str, T.List[str]] = collections.defaultdict(list)
+ self.imports: collections.defaultdict[str, T.List[str]] = collections.defaultdict(list)
self.sources_with_exports: T.List[str] = []
def scan_file(self, fname: str, lang: Literal['cpp', 'fortran']) -> None:
@@ -58,7 +96,7 @@ def scan_fortran_file(self, fname: str) -> None:
# In Fortran you have an using declaration also for the module
# you define in the same file. Prevent circular dependencies.
if needed not in modules_in_this_file:
- self.needs[fname].append(needed)
+ self.imports[fname].append(needed)
if export_match:
exported_module = export_match.group(1).lower()
assert exported_module not in modules_in_this_file
@@ -89,7 +127,7 @@ def scan_fortran_file(self, fname: str) -> None:
# submodule (a1:a2) a3 <- requires a1@a2.smod
#
# a3 does not depend on the a1 parent module directly, only transitively.
- self.needs[fname].append(parent_module_name_full)
+ self.imports[fname].append(parent_module_name_full)
def scan_cpp_file(self, fname: str) -> None:
fpath = pathlib.Path(fname)
@@ -98,7 +136,7 @@ def scan_cpp_file(self, fname: str) -> None:
export_match = CPP_EXPORT_RE.match(line)
if import_match:
needed = import_match.group(1)
- self.needs[fname].append(needed)
+ self.imports[fname].append(needed)
if export_match:
exported_module = export_match.group(1)
if exported_module in self.provided_by:
@@ -123,47 +161,44 @@ def module_name_for(self, src: str, lang: Literal['cpp', 'fortran']) -> str:
def scan(self) -> int:
for s, lang in self.sources:
self.scan_file(s, lang)
- with open(self.outfile, 'w', encoding='utf-8') as ofile:
- ofile.write('ninja_dyndep_version = 1\n')
- for src, lang in self.sources:
- objfilename = self.target_data.source2object[src]
- mods_and_submods_needed = []
- module_files_generated = []
- module_files_needed = []
- if src in self.sources_with_exports:
- module_files_generated.append(self.module_name_for(src, lang))
- if src in self.needs:
- for modname in self.needs[src]:
- if modname not in self.provided_by:
- # Nothing provides this module, we assume that it
- # comes from a dependency library somewhere and is
- # already built by the time this compilation starts.
- pass
- else:
- mods_and_submods_needed.append(modname)
-
- for modname in mods_and_submods_needed:
- provider_src = self.provided_by[modname]
- provider_modfile = self.module_name_for(provider_src, lang)
- # Prune self-dependencies
- if provider_src != src:
- module_files_needed.append(provider_modfile)
-
- quoted_objfilename = ninja_quote(objfilename, True)
- quoted_module_files_generated = [ninja_quote(x, True) for x in module_files_generated]
- quoted_module_files_needed = [ninja_quote(x, True) for x in module_files_needed]
- if quoted_module_files_generated:
- mod_gen = '| ' + ' '.join(quoted_module_files_generated)
- else:
- mod_gen = ''
- if quoted_module_files_needed:
- mod_dep = '| ' + ' '.join(quoted_module_files_needed)
- else:
- mod_dep = ''
- build_line = 'build {} {}: dyndep {}'.format(quoted_objfilename,
- mod_gen,
- mod_dep)
- ofile.write(build_line + '\n')
+ description: Description = {
+ 'version': 1,
+ 'revision': 0,
+ 'rules': [],
+ }
+ for src, lang in self.sources:
+ rule: Rule = {
+ 'primary-output': self.target_data.source2object[src],
+ 'requires': [],
+ 'provides': [],
+ }
+ if src in self.sources_with_exports:
+ rule['outputs'] = [self.module_name_for(src, lang)]
+ if src in self.imports:
+ for modname in self.imports[src]:
+ provider_src = self.provided_by.get(modname)
+ if provider_src == src:
+ continue
+ rule['requires'].append({
+ 'logical-name': modname,
+ })
+ if provider_src:
+ rule['requires'][-1].update({
+ 'source-path': provider_src,
+ 'compiled-module-path': self.module_name_for(provider_src, lang),
+ })
+ if src in self.exports:
+ modname = self.exports[src]
+ rule['provides'].append({
+ 'logical-name': modname,
+ 'source-path': src,
+ 'compiled-module-path': self.module_name_for(src, lang),
+ })
+ description['rules'].append(rule)
+
+ with open(self.outfile, 'w', encoding='utf-8') as f:
+ json.dump(description, f)
+
return 0
def run(args: T.List[str]) -> int:
diff --git a/mesonbuild/scripts/externalproject.py b/mesonbuild/scripts/externalproject.py
index ce49fbcbf26e..4013b0acf233 100644
--- a/mesonbuild/scripts/externalproject.py
+++ b/mesonbuild/scripts/externalproject.py
@@ -5,12 +5,11 @@
import os
import argparse
-import multiprocessing
import subprocess
from pathlib import Path
import typing as T
-from ..mesonlib import Popen_safe, split_args
+from ..mesonlib import Popen_safe, split_args, determine_worker_count
class ExternalProject:
def __init__(self, options: argparse.Namespace):
@@ -48,7 +47,7 @@ def supports_jobs_flag(self) -> bool:
def build(self) -> int:
make_cmd = self.make.copy()
if self.supports_jobs_flag():
- make_cmd.append(f'-j{multiprocessing.cpu_count()}')
+ make_cmd.append(f'-j{determine_worker_count()}')
rc = self._run('build', make_cmd)
if rc != 0:
return rc
diff --git a/mesonbuild/scripts/gtkdochelper.py b/mesonbuild/scripts/gtkdochelper.py
index 06844289fc1a..a0b090584f63 100644
--- a/mesonbuild/scripts/gtkdochelper.py
+++ b/mesonbuild/scripts/gtkdochelper.py
@@ -7,7 +7,10 @@
import subprocess
import shutil
import argparse
-from ..mesonlib import MesonException, Popen_safe, is_windows, is_cygwin, split_args
+from ..mesonlib import (
+ MesonException, Popen_safe, is_windows, is_cygwin, is_parent_path,
+ split_args,
+)
from . import destdir_join
import typing as T
@@ -112,7 +115,7 @@ def build_gtkdoc(source_root: str, build_root: str, doc_subdir: str, src_subdirs
# FIXME: Use mesonlib.File objects so we don't need to do this
if not os.path.isabs(f):
f = os.path.join(doc_src, f)
- elif os.path.commonpath([f, build_root]) == build_root:
+ elif is_parent_path(build_root, f):
continue
shutil.copyfile(f, os.path.join(abs_out, os.path.basename(f)))
diff --git a/mesonbuild/scripts/python_info.py b/mesonbuild/scripts/python_info.py
index d3bf71b5e294..21d42b99326a 100755
--- a/mesonbuild/scripts/python_info.py
+++ b/mesonbuild/scripts/python_info.py
@@ -101,7 +101,7 @@ def links_against_libpython():
except Exception:
pass
-# pypy supports modules targetting the limited api but
+# pypy supports modules targeting the limited api but
# does not use a special suffix to distinguish them:
# https://doc.pypy.org/en/latest/cpython_differences.html#permitted-abi-tags-in-extensions
if is_pypy:
diff --git a/mesonbuild/scripts/regen_checker.py b/mesonbuild/scripts/regen_checker.py
index fc69ed7d50a1..9874607334e6 100644
--- a/mesonbuild/scripts/regen_checker.py
+++ b/mesonbuild/scripts/regen_checker.py
@@ -44,7 +44,7 @@ def run(args: T.List[str]) -> int:
with open(coredata_file, 'rb') as f:
coredata = pickle.load(f)
assert isinstance(coredata, CoreData)
- backend = coredata.get_option(OptionKey('backend'))
+ backend = coredata.optstore.get_value_for(OptionKey('backend'))
assert isinstance(backend, str)
regen_timestamp = os.stat(dumpfile).st_mtime
if need_regen(regeninfo, regen_timestamp):
diff --git a/mesonbuild/scripts/run_tool.py b/mesonbuild/scripts/run_tool.py
index a84de15b12df..e206ff7fe8d7 100644
--- a/mesonbuild/scripts/run_tool.py
+++ b/mesonbuild/scripts/run_tool.py
@@ -3,17 +3,86 @@
from __future__ import annotations
-import itertools
+import asyncio.subprocess
import fnmatch
-import concurrent.futures
+import itertools
+import json
+import signal
+import sys
from pathlib import Path
+from .. import mlog
from ..compilers import lang_suffixes
-from ..mesonlib import quiet_git
+from ..mesonlib import quiet_git, join_args, determine_worker_count
+from ..mtest import complete_all
import typing as T
-if T.TYPE_CHECKING:
- import subprocess
+Info = T.TypeVar("Info")
+
+async def run_with_buffered_output(cmdlist: T.List[str]) -> int:
+ """Run the command in cmdlist, buffering the output so that it is
+ not mixed for multiple child processes. Kill the child on
+ cancellation."""
+ quoted_cmdline = join_args(cmdlist)
+ p: T.Optional[asyncio.subprocess.Process] = None
+ try:
+ p = await asyncio.create_subprocess_exec(*cmdlist,
+ stdin=asyncio.subprocess.DEVNULL,
+ stdout=asyncio.subprocess.PIPE,
+ stderr=asyncio.subprocess.STDOUT)
+ stdo, _ = await p.communicate()
+ except FileNotFoundError as e:
+ print(mlog.blue('>>>'), quoted_cmdline, file=sys.stderr)
+ print(mlog.red('not found:'), e.filename, file=sys.stderr)
+ return 1
+ except asyncio.CancelledError:
+ if p:
+ p.kill()
+ await p.wait()
+ return p.returncode or 1
+ else:
+ return 0
+
+ if stdo:
+ print(mlog.blue('>>>'), quoted_cmdline, flush=True)
+ sys.stdout.buffer.write(stdo)
+ return p.returncode
+
+async def _run_workers(infos: T.Iterable[Info],
+ fn: T.Callable[[Info], T.Iterable[T.Coroutine[None, None, int]]]) -> int:
+ futures: T.List[asyncio.Future[int]] = []
+ semaphore = asyncio.Semaphore(determine_worker_count())
+
+ async def run_one(worker_coro: T.Coroutine[None, None, int]) -> int:
+ try:
+ async with semaphore:
+ return await worker_coro
+ except asyncio.CancelledError as e:
+ worker_coro.throw(e)
+ return await worker_coro
+
+ def sigterm_handler() -> None:
+ for f in futures:
+ f.cancel()
+
+ if sys.platform != 'win32':
+ loop = asyncio.get_running_loop()
+ loop.add_signal_handler(signal.SIGINT, sigterm_handler)
+ loop.add_signal_handler(signal.SIGTERM, sigterm_handler)
+
+ for i in infos:
+ futures.extend((asyncio.ensure_future(run_one(x)) for x in fn(i)))
+ if not futures:
+ return 0
+
+ try:
+ await complete_all(futures)
+ except BaseException:
+ for f in futures:
+ f.cancel()
+ raise
+
+ return max(f.result() for f in futures if f.done() and not f.cancelled())
def parse_pattern_file(fname: Path) -> T.List[str]:
patterns = []
@@ -27,7 +96,7 @@ def parse_pattern_file(fname: Path) -> T.List[str]:
pass
return patterns
-def run_tool(name: str, srcdir: Path, builddir: Path, fn: T.Callable[..., subprocess.CompletedProcess], *args: T.Any) -> int:
+def all_clike_files(name: str, srcdir: Path, builddir: Path) -> T.Iterable[Path]:
patterns = parse_pattern_file(srcdir / f'.{name}-include')
globs: T.Union[T.List[T.List[Path]], T.List[T.Generator[Path, None, None]]]
if patterns:
@@ -44,29 +113,26 @@ def run_tool(name: str, srcdir: Path, builddir: Path, fn: T.Callable[..., subpro
suffixes = set(lang_suffixes['c']).union(set(lang_suffixes['cpp']))
suffixes.add('h')
suffixes = {f'.{s}' for s in suffixes}
- futures = []
- returncode = 0
- e = concurrent.futures.ThreadPoolExecutor()
- try:
- for f in itertools.chain(*globs):
- strf = str(f)
- if f.is_dir() or f.suffix not in suffixes or \
- any(fnmatch.fnmatch(strf, i) for i in ignore):
- continue
- futures.append(e.submit(fn, f, *args))
- concurrent.futures.wait(
- futures,
- return_when=concurrent.futures.FIRST_EXCEPTION
- )
- finally:
- # We try to prevent new subprocesses from being started by canceling
- # the futures, but this is not water-tight: some may have started
- # between the wait being interrupted or exited and the futures being
- # canceled. (A fundamental fix would probably require the ability to
- # terminate such subprocesses upon cancellation of the future.)
- for x in futures: # Python >=3.9: e.shutdown(cancel_futures=True)
- x.cancel()
- e.shutdown()
- if futures:
- returncode = max(x.result().returncode for x in futures)
- return returncode
+ for f in itertools.chain.from_iterable(globs):
+ strf = str(f)
+ if f.is_dir() or f.suffix not in suffixes or \
+ any(fnmatch.fnmatch(strf, i) for i in ignore):
+ continue
+ yield f
+
+def run_clang_tool(name: str, srcdir: Path, builddir: Path, fn: T.Callable[..., T.Coroutine[None, None, int]], *args: T.Any) -> int:
+ if sys.platform == 'win32':
+ asyncio.set_event_loop_policy(asyncio.WindowsProactorEventLoopPolicy())
+
+ def wrapper(path: Path) -> T.Iterable[T.Coroutine[None, None, int]]:
+ yield fn(path, *args)
+ return asyncio.run(_run_workers(all_clike_files(name, srcdir, builddir), wrapper))
+
+def run_tool_on_targets(fn: T.Callable[[T.Dict[str, T.Any]],
+ T.Iterable[T.Coroutine[None, None, int]]]) -> int:
+ if sys.platform == 'win32':
+ asyncio.set_event_loop_policy(asyncio.WindowsProactorEventLoopPolicy())
+
+ with open('meson-info/intro-targets.json', encoding='utf-8') as fp:
+ targets = json.load(fp)
+ return asyncio.run(_run_workers(targets, fn))
diff --git a/mesonbuild/scripts/rustdoc.py b/mesonbuild/scripts/rustdoc.py
new file mode 100644
index 000000000000..f5f74c4e3300
--- /dev/null
+++ b/mesonbuild/scripts/rustdoc.py
@@ -0,0 +1,101 @@
+# SPDX-License-Identifier: Apache-2.0
+# Copyright 2024 The Meson development team
+
+from __future__ import annotations
+from collections import defaultdict
+import os
+import tempfile
+import typing as T
+
+from .run_tool import run_tool_on_targets, run_with_buffered_output
+from .. import build, mlog
+from ..mesonlib import MachineChoice, PerMachine
+from ..wrap import WrapMode, wrap
+
+if T.TYPE_CHECKING:
+ from ..compilers.rust import RustCompiler
+
+async def run_and_confirm_success(cmdlist: T.List[str], crate: str) -> int:
+ returncode = await run_with_buffered_output(cmdlist)
+ if returncode == 0:
+ print(mlog.green('Generated'), os.path.join('doc', crate))
+ return returncode
+
+class Rustdoc:
+ def __init__(self, build: build.Build, tempdir: str, subprojects: T.Set[str]) -> None:
+ self.tools: PerMachine[T.List[str]] = PerMachine([], [])
+ self.warned: T.DefaultDict[str, bool] = defaultdict(lambda: False)
+ self.tempdir = tempdir
+ self.subprojects = subprojects
+ for machine in MachineChoice:
+ compilers = build.environment.coredata.compilers[machine]
+ if 'rust' in compilers:
+ compiler = T.cast('RustCompiler', compilers['rust'])
+ self.tools[machine] = compiler.get_rust_tool('rustdoc', build.environment)
+
+ def warn_missing_rustdoc(self, machine: str) -> None:
+ if self.warned[machine]:
+ return
+ mlog.warning(f'rustdoc not found for {machine} machine')
+ self.warned[machine] = True
+
+ def __call__(self, target: T.Dict[str, T.Any]) -> T.Iterable[T.Coroutine[None, None, int]]:
+ if target['subproject'] is not None and target['subproject'] not in self.subprojects:
+ return
+
+ for src_block in target['target_sources']:
+ if 'compiler' in src_block and src_block['language'] == 'rust':
+ rustdoc = getattr(self.tools, src_block['machine'])
+ if not rustdoc:
+ self.warn_missing_rustdoc(src_block['machine'])
+ continue
+
+ cmdlist = list(rustdoc)
+ prev = None
+ crate_name = None
+ is_test = False
+ for arg in src_block['parameters']:
+ if prev:
+ if prev == '--crate-name':
+ cmdlist.extend((prev, arg))
+ crate_name = arg
+ prev = None
+ continue
+
+ if arg == '--test':
+ is_test = True
+ break
+ elif arg in {'--crate-name', '--emit', '--out-dir', '-l'}:
+ prev = arg
+ elif arg != '-g' and not arg.startswith('-l'):
+ cmdlist.append(arg)
+
+ if is_test:
+ # --test has a completely different meaning for rustc and rustdoc;
+ # when using rust.test(), only the non-test target is documented
+ continue
+ if crate_name:
+ cmdlist.extend(src_block['sources'])
+ # Assume documentation is generated for the developer's use
+ cmdlist.append('--document-private-items')
+ cmdlist.append('-o')
+ cmdlist.append('doc')
+ yield run_and_confirm_success(cmdlist, crate_name)
+ else:
+ print(mlog.yellow('Skipping'), target['name'], '(no crate name)')
+
+def get_nonwrap_subprojects(build_data: build.Build) -> T.Set[str]:
+ wrap_resolver = wrap.Resolver(
+ build_data.environment.get_source_dir(),
+ build_data.subproject_dir,
+ wrap_mode=WrapMode.nodownload)
+ return set(sp
+ for sp in build_data.environment.coredata.initialized_subprojects
+ if sp and (sp not in wrap_resolver.wraps or wrap_resolver.wraps[sp].type is None))
+
+def run(args: T.List[str]) -> int:
+ os.chdir(args[0])
+ build_data = build.load(os.getcwd())
+ subproject_list = get_nonwrap_subprojects(build_data)
+ with tempfile.TemporaryDirectory() as d:
+ return run_tool_on_targets(Rustdoc(build_data, d, subproject_list))
diff --git a/mesonbuild/scripts/symbolextractor.py b/mesonbuild/scripts/symbolextractor.py
index 5c45253d5702..b0a07d906553 100644
--- a/mesonbuild/scripts/symbolextractor.py
+++ b/mesonbuild/scripts/symbolextractor.py
@@ -207,7 +207,7 @@ def _get_implib_dllname(impfilename: str) -> T.Tuple[T.List[str], str]:
# var which is the list of library paths MSVC will search for import
# libraries while linking.
for lib in (['lib'], get_tool('llvm-lib')):
- output, e = call_tool_nowarn(lib + ['-list', impfilename])
+ output, e = call_tool_nowarn(lib + ['-list', '-nologo', impfilename])
if output:
# The output is a list of DLLs that each symbol exported by the import
# library is available in. We only build import libraries that point to
@@ -269,7 +269,10 @@ def gen_symbols(libfilename: str, impfilename: str, outfilename: str, cross_host
# In case of cross builds just always relink. In theory we could
# determine the correct toolset, but we would need to use the correct
# `nm`, `readelf`, etc, from the cross info which requires refactoring.
- dummy_syms(outfilename)
+ if cross_host == 'windows' and os.path.isfile(impfilename):
+ windows_syms(impfilename, outfilename)
+ else:
+ dummy_syms(outfilename)
elif mesonlib.is_linux() or mesonlib.is_hurd():
gnu_syms(libfilename, outfilename)
elif mesonlib.is_osx():
diff --git a/mesonbuild/templates/cpptemplates.py b/mesonbuild/templates/cpptemplates.py
index 70e4dd42b2a7..1bfa2ae4fa25 100644
--- a/mesonbuild/templates/cpptemplates.py
+++ b/mesonbuild/templates/cpptemplates.py
@@ -1,17 +1,21 @@
# SPDX-License-Identifier: Apache-2.0
# Copyright 2019 The Meson development team
+# Copyright © 2023-2025 Intel Corporation
from __future__ import annotations
+import typing as T
from mesonbuild.templates.sampleimpl import FileHeaderImpl
+if T.TYPE_CHECKING:
+ from ..minit import Arguments
hello_cpp_template = '''#include
#define PROJECT_NAME "{project_name}"
int main(int argc, char **argv) {{
- if(argc != 1) {{
+ if (argc != 1) {{
std::cout << argv[0] << "takes no arguments.\\n";
return 1;
}}
@@ -20,13 +24,23 @@
}}
'''
-hello_cpp_meson_template = '''project('{project_name}', 'cpp',
+hello_cpp_meson_template = '''project(
+ '{project_name}',
+ 'cpp',
version : '{version}',
- default_options : ['warning_level=3',
- 'cpp_std=c++14'])
+ meson_version : '>= {meson_version}',
+ default_options : ['warning_level=3', 'cpp_std=c++14'],
+)
-exe = executable('{exe_name}', '{source_name}',
- install : true)
+dependencies = [{dependencies}
+]
+
+exe = executable(
+ '{exe_name}',
+ '{source_name}',
+ install : true,
+ dependencies : dependencies,
+)
test('basic', exe)
'''
@@ -83,7 +97,7 @@ class {utoken}_PUBLIC {class_name} {{
#include
int main(int argc, char **argv) {{
- if(argc != 1) {{
+ if (argc != 1) {{
std::cout << argv[0] << " takes no arguments.\\n";
return 1;
}}
@@ -92,28 +106,45 @@ class {utoken}_PUBLIC {class_name} {{
}}
'''
-lib_cpp_meson_template = '''project('{project_name}', 'cpp',
+lib_cpp_meson_template = '''project(
+ '{project_name}',
+ 'cpp',
version : '{version}',
- default_options : ['warning_level=3', 'cpp_std=c++14'])
+ meson_version : '>= {meson_version}',
+ default_options : ['warning_level=3', 'cpp_std=c++14'],
+)
+
+dependencies = [{dependencies}
+]
# These arguments are only used to build the shared library
# not the executables that use the library.
lib_args = ['-DBUILDING_{utoken}']
-shlib = shared_library('{lib_name}', '{source_file}',
+lib = library(
+ '{lib_name}',
+ '{source_file}',
install : true,
- cpp_args : lib_args,
+ cpp_shared_args : lib_args,
gnu_symbol_visibility : 'hidden',
+ dependencies : dependencies,
)
-test_exe = executable('{test_exe_name}', '{test_source_file}',
- link_with : shlib)
+test_exe = executable(
+ '{test_exe_name}',
+ '{test_source_file}',
+ dependencies : dependencies,
+ link_with : lib,
+)
test('{test_name}', test_exe)
# Make this library usable as a Meson subproject.
{ltoken}_dep = declare_dependency(
- include_directories: include_directories('.'),
- link_with : shlib)
+ include_directories : include_directories('.'),
+ dependencies : dependencies,
+ link_with : lib,
+)
+meson.override_dependency('{project_name}', {ltoken}_dep)
# Make this library usable from the system's
# package manager.
@@ -121,12 +152,9 @@ class {utoken}_PUBLIC {class_name} {{
pkg_mod = import('pkgconfig')
pkg_mod.generate(
- name : '{project_name}',
- filebase : '{ltoken}',
+ lib,
description : 'Meson sample project.',
subdirs : '{header_dir}',
- libraries : shlib,
- version : '{version}',
)
'''
@@ -141,3 +169,7 @@ class CppProject(FileHeaderImpl):
lib_header_template = lib_hpp_template
lib_test_template = lib_cpp_test_template
lib_meson_template = lib_cpp_meson_template
+
+ def __init__(self, args: Arguments):
+ super().__init__(args)
+ self.meson_version = '1.3.0'
diff --git a/mesonbuild/templates/cstemplates.py b/mesonbuild/templates/cstemplates.py
index 4b16b7265ec2..59c718953271 100644
--- a/mesonbuild/templates/cstemplates.py
+++ b/mesonbuild/templates/cstemplates.py
@@ -1,5 +1,6 @@
# SPDX-License-Identifier: Apache-2.0
# Copyright 2019 The Meson development team
+# Copyright © 2023-2025 Intel Corporation
from __future__ import annotations
@@ -23,12 +24,23 @@
'''
-hello_cs_meson_template = '''project('{project_name}', 'cs',
+hello_cs_meson_template = '''project(
+ '{project_name}',
+ 'cs',
version : '{version}',
- default_options : ['warning_level=3'])
+ meson_version : '>= {meson_version}',
+ default_options : ['warning_level=3'],
+)
+
+dependencies = [{dependencies}
+]
-exe = executable('{exe_name}', '{source_name}',
- install : true)
+exe = executable(
+ '{exe_name}',
+ '{source_name}',
+ install : true,
+ dependencies : dependencies,
+)
test('basic', exe)
'''
@@ -60,22 +72,39 @@
'''
-lib_cs_meson_template = '''project('{project_name}', 'cs',
+lib_cs_meson_template = '''project(
+ '{project_name}',
+ 'cs',
version : '{version}',
- default_options : ['warning_level=3'])
+ meson_version : '>= {meson_version}',
+ default_options : ['warning_level=3'],
+)
+
+dependencies = [{dependencies}
+]
-stlib = shared_library('{lib_name}', '{source_file}',
+stlib = shared_library(
+ '{lib_name}',
+ '{source_file}',
+ dependencies : dependencies,
install : true,
)
-test_exe = executable('{test_exe_name}', '{test_source_file}',
- link_with : stlib)
+test_exe = executable(
+ '{test_exe_name}',
+ '{test_source_file}',
+ dependencies : dependencies,
+ link_with : stlib,
+)
test('{test_name}', test_exe)
# Make this library usable as a Meson subproject.
{ltoken}_dep = declare_dependency(
- include_directories: include_directories('.'),
- link_with : stlib)
+ include_directories : include_directories('.'),
+ dependencies : dependencies,
+ link_with : stlib,
+)
+meson.override_dependency('{project_name}', {ltoken}_dep)
'''
diff --git a/mesonbuild/templates/ctemplates.py b/mesonbuild/templates/ctemplates.py
index d7616054a814..559cef91b2bf 100644
--- a/mesonbuild/templates/ctemplates.py
+++ b/mesonbuild/templates/ctemplates.py
@@ -1,10 +1,15 @@
# SPDX-License-Identifier: Apache-2.0
# Copyright 2019 The Meson development team
+# Copyright © 2023-2025 Intel Corporation
from __future__ import annotations
+import typing as T
from mesonbuild.templates.sampleimpl import FileHeaderImpl
+if T.TYPE_CHECKING:
+ from ..minit import Arguments
+
lib_h_template = '''#pragma once
#if defined _WIN32 || defined __CYGWIN__
@@ -43,7 +48,7 @@
#include
int main(int argc, char **argv) {{
- if(argc != 1) {{
+ if (argc != 1) {{
printf("%s takes no arguments.\\n", argv[0]);
return 1;
}}
@@ -51,28 +56,45 @@
}}
'''
-lib_c_meson_template = '''project('{project_name}', 'c',
+lib_c_meson_template = '''project(
+ '{project_name}',
+ 'c',
version : '{version}',
- default_options : ['warning_level=3'])
+ meson_version : '>= {meson_version}',
+ default_options : ['warning_level=3'],
+)
# These arguments are only used to build the shared library
# not the executables that use the library.
lib_args = ['-DBUILDING_{utoken}']
-shlib = shared_library('{lib_name}', '{source_file}',
+dependencies = [{dependencies}
+]
+
+lib = library(
+ '{lib_name}',
+ '{source_file}',
install : true,
- c_args : lib_args,
+ c_shared_args : lib_args,
gnu_symbol_visibility : 'hidden',
+ dependencies : dependencies,
)
-test_exe = executable('{test_exe_name}', '{test_source_file}',
- link_with : shlib)
+test_exe = executable(
+ '{test_exe_name}',
+ '{test_source_file}',
+ dependencies : dependencies,
+ link_with : lib,
+)
test('{test_name}', test_exe)
# Make this library usable as a Meson subproject.
{ltoken}_dep = declare_dependency(
- include_directories: include_directories('.'),
- link_with : shlib)
+ include_directories : include_directories('.'),
+ dependencies : dependencies,
+ link_with : lib,
+)
+meson.override_dependency('{project_name}', {ltoken}_dep)
# Make this library usable from the system's
# package manager.
@@ -80,12 +102,9 @@
pkg_mod = import('pkgconfig')
pkg_mod.generate(
- name : '{project_name}',
- filebase : '{ltoken}',
+ lib,
description : 'Meson sample project.',
subdirs : '{header_dir}',
- libraries : shlib,
- version : '{version}',
)
'''
@@ -94,7 +113,7 @@
#define PROJECT_NAME "{project_name}"
int main(int argc, char **argv) {{
- if(argc != 1) {{
+ if (argc != 1) {{
printf("%s takes no arguments.\\n", argv[0]);
return 1;
}}
@@ -103,12 +122,23 @@
}}
'''
-hello_c_meson_template = '''project('{project_name}', 'c',
+hello_c_meson_template = '''project(
+ '{project_name}',
+ 'c',
+ meson_version : '>= {meson_version}',
version : '{version}',
- default_options : ['warning_level=3'])
+ default_options : ['warning_level=3'],
+)
-exe = executable('{exe_name}', '{source_name}',
- install : true)
+dependencies = [{dependencies}
+]
+
+exe = executable(
+ '{exe_name}',
+ '{source_name}',
+ dependencies : dependencies,
+ install : true,
+)
test('basic', exe)
'''
@@ -124,3 +154,7 @@ class CProject(FileHeaderImpl):
lib_header_template = lib_h_template
lib_test_template = lib_c_test_template
lib_meson_template = lib_c_meson_template
+
+ def __init__(self, args: Arguments):
+ super().__init__(args)
+ self.meson_version = '1.3.0'
diff --git a/mesonbuild/templates/cudatemplates.py b/mesonbuild/templates/cudatemplates.py
index 12eefa5a86ef..252f44a276a9 100644
--- a/mesonbuild/templates/cudatemplates.py
+++ b/mesonbuild/templates/cudatemplates.py
@@ -1,17 +1,22 @@
# SPDX-License-Identifier: Apache-2.0
# Copyright 2019 The Meson development team
+# Copyright © 2023-2025 Intel Corporation
from __future__ import annotations
+import typing as T
from mesonbuild.templates.sampleimpl import FileHeaderImpl
+if T.TYPE_CHECKING:
+ from ..minit import Arguments
+
hello_cuda_template = '''#include
#define PROJECT_NAME "{project_name}"
int main(int argc, char **argv) {{
- if(argc != 1) {{
+ if (argc != 1) {{
std::cout << argv[0] << " takes no arguments.\\n";
return 1;
}}
@@ -20,13 +25,23 @@
}}
'''
-hello_cuda_meson_template = '''project('{project_name}', ['cuda', 'cpp'],
+hello_cuda_meson_template = '''project(
+ '{project_name}',
+ ['cuda', 'cpp'],
version : '{version}',
- default_options : ['warning_level=3',
- 'cpp_std=c++14'])
+ meson_version : '>= {meson_version}',
+ default_options : ['warning_level=3', 'cpp_std=c++14'],
+)
+
+dependencies = [{dependencies}
+]
-exe = executable('{exe_name}', '{source_name}',
- install : true)
+exe = executable(
+ '{exe_name}',
+ '{source_name}',
+ dependencies : dependencies,
+ install : true,
+)
test('basic', exe)
'''
@@ -83,7 +98,7 @@ class {utoken}_PUBLIC {class_name} {{
#include
int main(int argc, char **argv) {{
- if(argc != 1) {{
+ if (argc != 1) {{
std::cout << argv[0] << " takes no arguments.\\n";
return 1;
}}
@@ -92,28 +107,45 @@ class {utoken}_PUBLIC {class_name} {{
}}
'''
-lib_cuda_meson_template = '''project('{project_name}', ['cuda', 'cpp'],
+lib_cuda_meson_template = '''project(
+ '{project_name}',
+ ['cuda', 'cpp'],
version : '{version}',
- default_options : ['warning_level=3'])
+ meson_version : '>= {meson_version}',
+ default_options : ['warning_level=3'],
+)
# These arguments are only used to build the shared library
# not the executables that use the library.
lib_args = ['-DBUILDING_{utoken}']
-shlib = shared_library('{lib_name}', '{source_file}',
+dependencies = [{dependencies}
+]
+
+lib = library(
+ '{lib_name}',
+ '{source_file}',
install : true,
- cpp_args : lib_args,
+ cpp_shared_args : lib_args,
gnu_symbol_visibility : 'hidden',
+ dependencies : dependencies,
)
-test_exe = executable('{test_exe_name}', '{test_source_file}',
- link_with : shlib)
+test_exe = executable(
+ '{test_exe_name}',
+ '{test_source_file}',
+ link_with : lib,
+ dependencies : dependencies,
+)
test('{test_name}', test_exe)
# Make this library usable as a Meson subproject.
{ltoken}_dep = declare_dependency(
- include_directories: include_directories('.'),
- link_with : shlib)
+ include_directories : include_directories('.'),
+ dependencies : dependencies,
+ link_with : lib,
+)
+meson.override_dependency('{project_name}', {ltoken}_dep)
# Make this library usable from the system's
# package manager.
@@ -121,12 +153,9 @@ class {utoken}_PUBLIC {class_name} {{
pkg_mod = import('pkgconfig')
pkg_mod.generate(
- name : '{project_name}',
- filebase : '{ltoken}',
+ lib,
description : 'Meson sample project.',
subdirs : '{header_dir}',
- libraries : shlib,
- version : '{version}',
)
'''
@@ -141,3 +170,7 @@ class CudaProject(FileHeaderImpl):
lib_header_template = lib_h_template
lib_test_template = lib_cuda_test_template
lib_meson_template = lib_cuda_meson_template
+
+ def __init__(self, args: Arguments):
+ super().__init__(args)
+ self.meson_version = '1.3.0'
diff --git a/mesonbuild/templates/dlangtemplates.py b/mesonbuild/templates/dlangtemplates.py
index 2e9a32915e59..db3bdbf16bff 100644
--- a/mesonbuild/templates/dlangtemplates.py
+++ b/mesonbuild/templates/dlangtemplates.py
@@ -1,5 +1,6 @@
# SPDX-License-Identifier: Apache-2.0
# Copyright 2019 The Meson development team
+# Copyright © 2023-2025 Intel Corporation
from __future__ import annotations
@@ -23,12 +24,23 @@
}}
'''
-hello_d_meson_template = '''project('{project_name}', 'd',
- version : '{version}',
- default_options: ['warning_level=3'])
+hello_d_meson_template = '''project(
+ '{project_name}',
+ 'd',
+ version : '{version}',
+ meson_version : '>= {meson_version}',
+ default_options : ['warning_level=3'],
+)
-exe = executable('{exe_name}', '{source_name}',
- install : true)
+dependencies = [{dependencies}
+]
+
+exe = executable(
+ '{exe_name}',
+ '{source_name}',
+ dependencies : dependencies,
+ install : true,
+)
test('basic', exe)
'''
@@ -61,31 +73,51 @@
}}
'''
-lib_d_meson_template = '''project('{project_name}', 'd',
+lib_d_meson_template = '''project(
+ '{project_name}',
+ 'd',
version : '{version}',
- default_options : ['warning_level=3'])
+ meson_version : '>= {meson_version}',
+ default_options : ['warning_level=3'],
+)
+
+dependencies = [{dependencies}
+]
+
-stlib = static_library('{lib_name}', '{source_file}',
+stlib = static_library(
+ '{lib_name}',
+ '{source_file}',
install : true,
gnu_symbol_visibility : 'hidden',
+ dependencies : dependencies,
)
-test_exe = executable('{test_exe_name}', '{test_source_file}',
- link_with : stlib)
+test_exe = executable(
+ '{test_exe_name}',
+ '{test_source_file}',
+ link_with : stlib,
+ dependencies : dependencies,
+)
test('{test_name}', test_exe)
# Make this library usable as a Meson subproject.
{ltoken}_dep = declare_dependency(
- include_directories: include_directories('.'),
- link_with : stlib)
+ include_directories : include_directories('.'),
+ dependencies : dependencies,
+ link_with : stlib,
+)
+meson.override_dependency('{project_name}', {ltoken}_dep)
# Make this library usable from the Dlang
# build system.
dlang_mod = import('dlang')
-if find_program('dub', required: false).found()
- dlang_mod.generate_dub_file(meson.project_name().to_lower(), meson.source_root(),
+if find_program('dub', required : false).found()
+ dlang_mod.generate_dub_file(
+ meson.project_name().to_lower(),
+ meson.source_root(),
name : meson.project_name(),
- license: meson.project_license(),
+ license : meson.project_license(),
sourceFiles : '{source_file}',
description : 'Meson sample project.',
version : '{version}',
diff --git a/mesonbuild/templates/fortrantemplates.py b/mesonbuild/templates/fortrantemplates.py
index 9ac001564c70..7aaa9d39cf40 100644
--- a/mesonbuild/templates/fortrantemplates.py
+++ b/mesonbuild/templates/fortrantemplates.py
@@ -1,10 +1,15 @@
# SPDX-License-Identifier: Apache-2.0
# Copyright 2019 The Meson development team
+# Copyright © 2023-2025 Intel Corporation
from __future__ import annotations
+import typing as T
from mesonbuild.templates.sampleimpl import FileImpl
+if T.TYPE_CHECKING:
+ from ..minit import Arguments
+
lib_fortran_template = '''
! This procedure will not be exported and is not
! directly callable by users of this library.
@@ -36,37 +41,51 @@
end program
'''
-lib_fortran_meson_template = '''project('{project_name}', 'fortran',
+lib_fortran_meson_template = '''project(
+ '{project_name}',
+ 'fortran',
version : '{version}',
- default_options : ['warning_level=3'])
+ meson_version : '>= {meson_version}',
+ default_options : ['warning_level=3'],
+)
# These arguments are only used to build the shared library
# not the executables that use the library.
lib_args = ['-DBUILDING_{utoken}']
-shlib = shared_library('{lib_name}', '{source_file}',
+dependencies = [{dependencies}
+]
+
+lib = library(
+ '{lib_name}',
+ '{source_file}',
install : true,
- fortran_args : lib_args,
+ fortran_shared_args : lib_args,
gnu_symbol_visibility : 'hidden',
+ dependencies : dependencies,
)
-test_exe = executable('{test_exe_name}', '{test_source_file}',
- link_with : shlib)
+test_exe = executable(
+ '{test_exe_name}',
+ '{test_source_file}',
+ link_with : lib,
+ dependencies : dependencies,
+)
test('{test_name}', test_exe)
# Make this library usable as a Meson subproject.
{ltoken}_dep = declare_dependency(
- include_directories: include_directories('.'),
- link_with : shlib)
+ include_directories : include_directories('.'),
+ dependencies : dependencies,
+ link_with : lib,
+)
+meson.override_dependency('{project_name}', {ltoken}_dep)
pkg_mod = import('pkgconfig')
pkg_mod.generate(
- name : '{project_name}',
- filebase : '{ltoken}',
+ lib,
description : 'Meson sample project.',
subdirs : '{header_dir}',
- libraries : shlib,
- version : '{version}',
)
'''
@@ -80,12 +99,23 @@
end program
'''
-hello_fortran_meson_template = '''project('{project_name}', 'fortran',
+hello_fortran_meson_template = '''project(
+ '{project_name}',
+ 'fortran',
version : '{version}',
- default_options : ['warning_level=3'])
+ meson_version : '>= {meson_version}',
+ default_options : ['warning_level=3'],
+)
-exe = executable('{exe_name}', '{source_name}',
- install : true)
+dependencies = [{dependencies}
+]
+
+exe = executable(
+ '{exe_name}',
+ '{source_name}',
+ dependencies : dependencies,
+ install : true,
+)
test('basic', exe)
'''
@@ -99,3 +129,7 @@ class FortranProject(FileImpl):
lib_template = lib_fortran_template
lib_meson_template = lib_fortran_meson_template
lib_test_template = lib_fortran_test_template
+
+ def __init__(self, args: Arguments):
+ super().__init__(args)
+ self.meson_version = '1.3.0'
diff --git a/mesonbuild/templates/javatemplates.py b/mesonbuild/templates/javatemplates.py
index e229d7add2fe..c30c7f7b5224 100644
--- a/mesonbuild/templates/javatemplates.py
+++ b/mesonbuild/templates/javatemplates.py
@@ -1,5 +1,6 @@
# SPDX-License-Identifier: Apache-2.0
# Copyright 2019 The Meson development team
+# Copyright © 2023-2025 Intel Corporation
from __future__ import annotations
@@ -12,7 +13,7 @@
final static String PROJECT_NAME = "{project_name}";
public static void main (String args[]) {{
- if(args.length != 0) {{
+ if (args.length != 0) {{
System.out.println(args + " takes no arguments.");
System.exit(0);
}}
@@ -23,13 +24,24 @@
'''
-hello_java_meson_template = '''project('{project_name}', 'java',
+hello_java_meson_template = '''project(
+ '{project_name}',
+ 'java',
version : '{version}',
- default_options : ['warning_level=3'])
+ meson_version : '>= {meson_version}',
+ default_options : ['warning_level=3'],
+)
+
+dependencies = [{dependencies}
+]
-exe = jar('{exe_name}', '{source_name}',
+exe = jar(
+ '{exe_name}',
+ '{source_name}',
main_class : '{exe_name}',
- install : true)
+ dependencies : dependencies,
+ install : true,
+)
test('basic', exe)
'''
@@ -50,7 +62,7 @@
public class {class_test} {{
public static void main (String args[]) {{
- if(args.length != 0) {{
+ if (args.length != 0) {{
System.out.println(args + " takes no arguments.");
System.exit(1);
}}
@@ -63,24 +75,41 @@
'''
-lib_java_meson_template = '''project('{project_name}', 'java',
+lib_java_meson_template = '''project(
+ '{project_name}',
+ 'java',
version : '{version}',
- default_options : ['warning_level=3'])
+ meson_version : '>= {meson_version}',
+ default_options : ['warning_level=3'],
+)
+
+dependencies = [{dependencies}
+]
-jarlib = jar('{class_name}', '{source_file}',
+jarlib = jar(
+ '{class_name}',
+ '{source_file}',
+ dependencies : dependencies,
main_class : '{class_name}',
install : true,
)
-test_jar = jar('{class_test}', '{test_source_file}',
+test_jar = jar(
+ '{class_test}',
+ '{test_source_file}',
main_class : '{class_test}',
- link_with : jarlib)
+ dependencies : dependencies,
+ link_with : jarlib,
+)
test('{test_name}', test_jar)
# Make this library usable as a Meson subproject.
{ltoken}_dep = declare_dependency(
- include_directories: include_directories('.'),
- link_with : jarlib)
+ include_directories : include_directories('.'),
+ dependencies : dependencies,
+ link_with : jarlib,
+)
+meson.override_dependency('{project_name}', {ltoken}_dep)
'''
diff --git a/mesonbuild/templates/mesontemplates.py b/mesonbuild/templates/mesontemplates.py
index db553c09dd75..23269392fdbe 100644
--- a/mesonbuild/templates/mesontemplates.py
+++ b/mesonbuild/templates/mesontemplates.py
@@ -1,70 +1,20 @@
# SPDX-License-Identifier: Apache-2.0
# Copyright 2019 The Meson development team
+# Copyright © 2023-2025 Intel Corporation
from __future__ import annotations
import typing as T
+from .samplefactory import sample_generator
+
if T.TYPE_CHECKING:
from ..minit import Arguments
-meson_executable_template = '''project('{project_name}', {language},
- version : '{version}',
- default_options : [{default_options}])
-
-executable('{executable}',
- {sourcespec},{depspec}
- install : true)
-'''
-
-
-meson_jar_template = '''project('{project_name}', '{language}',
- version : '{version}',
- default_options : [{default_options}])
-
-jar('{executable}',
- {sourcespec},{depspec}
- main_class: '{main_class}',
- install : true)
-'''
-
def create_meson_build(options: Arguments) -> None:
- if options.type != 'executable':
- raise SystemExit('\nGenerating a meson.build file from existing sources is\n'
- 'supported only for project type "executable".\n'
- 'Run meson init in an empty directory to create a sample project.')
- default_options = ['warning_level=3']
- if options.language == 'cpp':
- # This shows how to set this very common option.
- default_options += ['cpp_std=c++14']
- # If we get a meson.build autoformatter one day, this code could
- # be simplified quite a bit.
- formatted_default_options = ', '.join(f"'{x}'" for x in default_options)
- sourcespec = ',\n '.join(f"'{x}'" for x in options.srcfiles)
- depspec = ''
- if options.deps:
- depspec = '\n dependencies : [\n '
- depspec += ',\n '.join(f"dependency('{x}')"
- for x in options.deps.split(','))
- depspec += '],'
- if options.language != 'java':
- language = f"'{options.language}'" if options.language != 'vala' else ['c', 'vala']
- content = meson_executable_template.format(project_name=options.name,
- language=language,
- version=options.version,
- executable=options.executable,
- sourcespec=sourcespec,
- depspec=depspec,
- default_options=formatted_default_options)
+ proj = sample_generator(options)
+ if options.type == 'executable':
+ proj.create_executable()
else:
- content = meson_jar_template.format(project_name=options.name,
- language=options.language,
- version=options.version,
- executable=options.executable,
- main_class=options.name,
- sourcespec=sourcespec,
- depspec=depspec,
- default_options=formatted_default_options)
- open('meson.build', 'w', encoding='utf-8').write(content)
- print('Generated meson.build file:\n\n' + content)
+ proj.create_library()
diff --git a/mesonbuild/templates/objcpptemplates.py b/mesonbuild/templates/objcpptemplates.py
index 33bff2d79fcf..1fdfa06a4fa9 100644
--- a/mesonbuild/templates/objcpptemplates.py
+++ b/mesonbuild/templates/objcpptemplates.py
@@ -1,10 +1,15 @@
# SPDX-License-Identifier: Apache-2.0
# Copyright 2019 The Meson development team
+# Copyright © 2023-2025 Intel Corporation
from __future__ import annotations
+import typing as T
from mesonbuild.templates.sampleimpl import FileHeaderImpl
+if T.TYPE_CHECKING:
+ from ..minit import Arguments
+
lib_h_template = '''#pragma once
#if defined _WIN32 || defined __CYGWIN__
@@ -43,7 +48,7 @@
#import
int main(int argc, char **argv) {{
- if(argc != 1) {{
+ if (argc != 1) {{
std::cout << argv[0] << " takes no arguments." << std::endl;
return 1;
}}
@@ -51,28 +56,45 @@
}}
'''
-lib_objcpp_meson_template = '''project('{project_name}', 'objcpp',
+lib_objcpp_meson_template = '''project(
+ '{project_name}',
+ 'objcpp',
version : '{version}',
- default_options : ['warning_level=3'])
+ meson_version : '>= {meson_version}',
+ default_options : ['warning_level=3'],
+)
+
+dependencies = [{dependencies}
+]
# These arguments are only used to build the shared library
# not the executables that use the library.
lib_args = ['-DBUILDING_{utoken}']
-shlib = shared_library('{lib_name}', '{source_file}',
+lib = library(
+ '{lib_name}',
+ '{source_file}',
install : true,
- objcpp_args : lib_args,
+ objcpp_shared_args : lib_args,
+ dependencies : dependencies,
gnu_symbol_visibility : 'hidden',
)
-test_exe = executable('{test_exe_name}', '{test_source_file}',
- link_with : shlib)
+test_exe = executable(
+ '{test_exe_name}',
+ '{test_source_file}',
+ dependencies : dependencies,
+ link_with : lib,
+)
test('{test_name}', test_exe)
# Make this library usable as a Meson subproject.
{ltoken}_dep = declare_dependency(
- include_directories: include_directories('.'),
- link_with : shlib)
+ include_directories : include_directories('.'),
+ dependencies : dependencies,
+ link_with : lib,
+)
+meson.override_dependency('{project_name}', {ltoken}_dep)
# Make this library usable from the system's
# package manager.
@@ -80,12 +102,9 @@
pkg_mod = import('pkgconfig')
pkg_mod.generate(
- name : '{project_name}',
- filebase : '{ltoken}',
+ lib,
description : 'Meson sample project.',
subdirs : '{header_dir}',
- libraries : shlib,
- version : '{version}',
)
'''
@@ -94,7 +113,7 @@
#define PROJECT_NAME "{project_name}"
int main(int argc, char **argv) {{
- if(argc != 1) {{
+ if (argc != 1) {{
std::cout << argv[0] << " takes no arguments." << std::endl;
return 1;
}}
@@ -103,12 +122,23 @@
}}
'''
-hello_objcpp_meson_template = '''project('{project_name}', 'objcpp',
+hello_objcpp_meson_template = '''project(
+ '{project_name}',
+ 'objcpp',
version : '{version}',
- default_options : ['warning_level=3'])
+ meson_version : '>= {meson_version}',
+ default_options : ['warning_level=3'],
+)
-exe = executable('{exe_name}', '{source_name}',
- install : true)
+dependencies = [{dependencies}
+]
+
+exe = executable(
+ '{exe_name}',
+ '{source_name}',
+ dependencies : dependencies,
+ install : true,
+)
test('basic', exe)
'''
@@ -124,3 +154,7 @@ class ObjCppProject(FileHeaderImpl):
lib_header_template = lib_h_template
lib_test_template = lib_objcpp_test_template
lib_meson_template = lib_objcpp_meson_template
+
+ def __init__(self, args: Arguments):
+ super().__init__(args)
+ self.meson_version = '1.3.0'
diff --git a/mesonbuild/templates/objctemplates.py b/mesonbuild/templates/objctemplates.py
index 8f46d91fd9b2..5603bae3bd76 100644
--- a/mesonbuild/templates/objctemplates.py
+++ b/mesonbuild/templates/objctemplates.py
@@ -1,10 +1,15 @@
# SPDX-License-Identifier: Apache-2.0
# Copyright 2019 The Meson development team
+# Copyright © 2023-2025 Intel Corporation
from __future__ import annotations
+import typing as T
from mesonbuild.templates.sampleimpl import FileHeaderImpl
+if T.TYPE_CHECKING:
+ from ..minit import Arguments
+
lib_h_template = '''#pragma once
#if defined _WIN32 || defined __CYGWIN__
@@ -43,7 +48,7 @@
#import
int main(int argc, char **argv) {{
- if(argc != 1) {{
+ if (argc != 1) {{
printf("%s takes no arguments.\\n", argv[0]);
return 1;
}}
@@ -51,28 +56,44 @@
}}
'''
-lib_objc_meson_template = '''project('{project_name}', 'objc',
+lib_objc_meson_template = '''project(
+ '{project_name}',
+ 'objc',
version : '{version}',
- default_options : ['warning_level=3'])
+ meson_version : '>= {meson_version}',
+ default_options : ['warning_level=3'],
+)
+
+dependencies = [{dependencies}
+]
# These arguments are only used to build the shared library
# not the executables that use the library.
lib_args = ['-DBUILDING_{utoken}']
-shlib = shared_library('{lib_name}', '{source_file}',
+lib = library(
+ '{lib_name}',
+ '{source_file}',
install : true,
- objc_args : lib_args,
+ objc_shared_args : lib_args,
+ dependencies : dependencies,
gnu_symbol_visibility : 'hidden',
)
-test_exe = executable('{test_exe_name}', '{test_source_file}',
- link_with : shlib)
+test_exe = executable(
+ '{test_exe_name}',
+ '{test_source_file}',
+ dependencies : dependencies,
+ link_with : lib)
test('{test_name}', test_exe)
# Make this library usable as a Meson subproject.
{ltoken}_dep = declare_dependency(
- include_directories: include_directories('.'),
- link_with : shlib)
+ include_directories : include_directories('.'),
+ dependencies : dependencies,
+ link_with : lib,
+)
+meson.override_dependency('{project_name}', {ltoken}_dep)
# Make this library usable from the system's
# package manager.
@@ -80,12 +101,9 @@
pkg_mod = import('pkgconfig')
pkg_mod.generate(
- name : '{project_name}',
- filebase : '{ltoken}',
+ lib,
description : 'Meson sample project.',
subdirs : '{header_dir}',
- libraries : shlib,
- version : '{version}',
)
'''
@@ -94,7 +112,7 @@
#define PROJECT_NAME "{project_name}"
int main(int argc, char **argv) {{
- if(argc != 1) {{
+ if (argc != 1) {{
printf("%s takes no arguments.\\n", argv[0]);
return 1;
}}
@@ -103,12 +121,23 @@
}}
'''
-hello_objc_meson_template = '''project('{project_name}', 'objc',
+hello_objc_meson_template = '''project(
+ '{project_name}',
+ 'objc',
version : '{version}',
- default_options : ['warning_level=3'])
+ meson_version : '>= {meson_version}',
+ default_options : ['warning_level=3'],
+)
-exe = executable('{exe_name}', '{source_name}',
- install : true)
+dependencies = [{dependencies}
+]
+
+exe = executable(
+ '{exe_name}',
+ '{source_name}',
+ dependencies : dependencies,
+ install : true,
+)
test('basic', exe)
'''
@@ -124,3 +153,7 @@ class ObjCProject(FileHeaderImpl):
lib_header_template = lib_h_template
lib_test_template = lib_objc_test_template
lib_meson_template = lib_objc_meson_template
+
+ def __init__(self, args: Arguments):
+ super().__init__(args)
+ self.meson_version = '1.3.0'
diff --git a/mesonbuild/templates/rusttemplates.py b/mesonbuild/templates/rusttemplates.py
index 1dbf5b614115..ee1f0081dcd6 100644
--- a/mesonbuild/templates/rusttemplates.py
+++ b/mesonbuild/templates/rusttemplates.py
@@ -1,5 +1,6 @@
# SPDX-License-Identifier: Apache-2.0
# Copyright 2019 The Meson development team
+# Copyright © 2023-2025 Intel Corporation
from __future__ import annotations
@@ -7,6 +8,9 @@
from mesonbuild.templates.sampleimpl import FileImpl
+if T.TYPE_CHECKING:
+ from ..minit import Arguments
+
lib_rust_template = '''#![crate_name = "{crate_file}"]
@@ -33,20 +37,35 @@
'''
-lib_rust_meson_template = '''project('{project_name}', 'rust',
- version : '{version}', meson_version: '>=1.3.0',
- default_options : ['rust_std=2021', 'warning_level=3'])
+lib_rust_meson_template = '''project(
+ '{project_name}',
+ 'rust',
+ version : '{version}',
+ meson_version : '>= {meson_version}',
+ default_options : ['rust_std=2021', 'warning_level=3'],
+)
rust = import('rust')
-shlib = static_library('{lib_name}', '{source_file}', install : true)
+dependencies = [{dependencies}
+]
+
+lib = static_library(
+ '{lib_name}',
+ '{source_file}',
+ dependencies : dependencies,
+ install : true,
+)
-rust.test('{test_name}', shlib)
+rust.test('{test_name}', lib)
# Make this library usable as a Meson subproject.
{ltoken}_dep = declare_dependency(
- include_directories: include_directories('.'),
- link_with : shlib)
+ include_directories : include_directories('.'),
+ dependencies : dependencies,
+ link_with : lib,
+)
+meson.override_dependency('{project_name}', {ltoken}_dep)
'''
hello_rust_template = '''
@@ -56,12 +75,23 @@
}}
'''
-hello_rust_meson_template = '''project('{project_name}', 'rust',
- version : '{version}', meson_version: '>=1.3.0',
- default_options : ['rust_std=2021', 'warning_level=3'])
+hello_rust_meson_template = '''project(
+ '{project_name}',
+ 'rust',
+ version : '{version}',
+ meson_version : '>= {meson_version}',
+ default_options : ['rust_std=2021', 'warning_level=3'],
+)
-exe = executable('{exe_name}', '{source_name}',
- install : true)
+dependencies = [{dependencies}
+]
+
+exe = executable(
+ '{exe_name}',
+ '{source_name}',
+ dependencies : dependencies,
+ install : true,
+)
test('basic', exe)
'''
@@ -76,6 +106,10 @@ class RustProject(FileImpl):
lib_test_template = None
lib_meson_template = lib_rust_meson_template
+ def __init__(self, args: Arguments):
+ super().__init__(args)
+ self.meson_version = '1.3.0'
+
def lib_kwargs(self) -> T.Dict[str, str]:
kwargs = super().lib_kwargs()
kwargs['crate_file'] = self.lowercase_token
diff --git a/mesonbuild/templates/samplefactory.py b/mesonbuild/templates/samplefactory.py
index 0083c614a36d..438f90c9ac32 100644
--- a/mesonbuild/templates/samplefactory.py
+++ b/mesonbuild/templates/samplefactory.py
@@ -1,5 +1,6 @@
# SPDX-License-Identifier: Apache-2.0
# Copyright 2019 The Meson development team
+# Copyright © 2023-2025 Intel Corporation
from __future__ import annotations
diff --git a/mesonbuild/templates/sampleimpl.py b/mesonbuild/templates/sampleimpl.py
index c222a1bf9aa7..d033f3c143d5 100644
--- a/mesonbuild/templates/sampleimpl.py
+++ b/mesonbuild/templates/sampleimpl.py
@@ -1,9 +1,11 @@
# SPDX-License-Identifier: Apache-2.0
# Copyright 2019 The Meson development team
+# Copyright © 2023-2025 Intel Corporation
from __future__ import annotations
import abc
+import os
import re
import typing as T
@@ -19,6 +21,9 @@ def __init__(self, args: Arguments):
self.lowercase_token = re.sub(r'[^a-z0-9]', '_', self.name.lower())
self.uppercase_token = self.lowercase_token.upper()
self.capitalized_token = self.lowercase_token.capitalize()
+ self.meson_version = '1.0.0'
+ self.force = args.force
+ self.dependencies = args.deps.split(',') if args.deps else []
@abc.abstractmethod
def create_executable(self) -> None:
@@ -28,30 +33,39 @@ def create_executable(self) -> None:
def create_library(self) -> None:
pass
- @abc.abstractproperty
+ @property
+ @abc.abstractmethod
def exe_template(self) -> str:
pass
- @abc.abstractproperty
+ @property
+ @abc.abstractmethod
def exe_meson_template(self) -> str:
pass
- @abc.abstractproperty
+ @property
+ @abc.abstractmethod
def lib_template(self) -> str:
pass
- @abc.abstractproperty
+ @property
+ @abc.abstractmethod
def lib_test_template(self) -> T.Optional[str]:
pass
- @abc.abstractproperty
+ @property
+ @abc.abstractmethod
def lib_meson_template(self) -> str:
pass
- @abc.abstractproperty
+ @property
+ @abc.abstractmethod
def source_ext(self) -> str:
pass
+ def _format_dependencies(self) -> str:
+ return ''.join(f"\n dependency('{d}')," for d in self.dependencies)
+
class ClassImpl(SampleImpl):
@@ -59,14 +73,18 @@ class ClassImpl(SampleImpl):
def create_executable(self) -> None:
source_name = f'{self.capitalized_token}.{self.source_ext}'
- with open(source_name, 'w', encoding='utf-8') as f:
- f.write(self.exe_template.format(project_name=self.name,
- class_name=self.capitalized_token))
- with open('meson.build', 'w', encoding='utf-8') as f:
- f.write(self.exe_meson_template.format(project_name=self.name,
- exe_name=self.name,
- source_name=source_name,
- version=self.version))
+ if not os.path.exists(source_name):
+ with open(source_name, 'w', encoding='utf-8') as f:
+ f.write(self.exe_template.format(project_name=self.name,
+ class_name=self.capitalized_token))
+ if self.force or not os.path.exists('meson.build'):
+ with open('meson.build', 'w', encoding='utf-8') as f:
+ f.write(self.exe_meson_template.format(project_name=self.name,
+ exe_name=self.name,
+ source_name=source_name,
+ version=self.version,
+ meson_version=self.meson_version,
+ dependencies=self._format_dependencies()))
def create_library(self) -> None:
lib_name = f'{self.capitalized_token}.{self.source_ext}'
@@ -82,14 +100,18 @@ def create_library(self) -> None:
'lib_name': self.lowercase_token,
'test_name': self.lowercase_token,
'version': self.version,
+ 'meson_version': self.meson_version,
+ 'dependencies': self._format_dependencies(),
}
- with open(lib_name, 'w', encoding='utf-8') as f:
- f.write(self.lib_template.format(**kwargs))
- if self.lib_test_template:
+ if not os.path.exists(lib_name):
+ with open(lib_name, 'w', encoding='utf-8') as f:
+ f.write(self.lib_template.format(**kwargs))
+ if self.lib_test_template and not os.path.exists(test_name):
with open(test_name, 'w', encoding='utf-8') as f:
f.write(self.lib_test_template.format(**kwargs))
- with open('meson.build', 'w', encoding='utf-8') as f:
- f.write(self.lib_meson_template.format(**kwargs))
+ if self.force or not os.path.exists('meson.build'):
+ with open('meson.build', 'w', encoding='utf-8') as f:
+ f.write(self.lib_meson_template.format(**kwargs))
class FileImpl(SampleImpl):
@@ -98,13 +120,17 @@ class FileImpl(SampleImpl):
def create_executable(self) -> None:
source_name = f'{self.lowercase_token}.{self.source_ext}'
- with open(source_name, 'w', encoding='utf-8') as f:
- f.write(self.exe_template.format(project_name=self.name))
- with open('meson.build', 'w', encoding='utf-8') as f:
- f.write(self.exe_meson_template.format(project_name=self.name,
- exe_name=self.name,
- source_name=source_name,
- version=self.version))
+ if not os.path.exists(source_name):
+ with open(source_name, 'w', encoding='utf-8') as f:
+ f.write(self.exe_template.format(project_name=self.name))
+ if self.force or not os.path.exists('meson.build'):
+ with open('meson.build', 'w', encoding='utf-8') as f:
+ f.write(self.exe_meson_template.format(project_name=self.name,
+ exe_name=self.name,
+ source_name=source_name,
+ version=self.version,
+ meson_version=self.meson_version,
+ dependencies=self._format_dependencies()))
def lib_kwargs(self) -> T.Dict[str, str]:
"""Get Language specific keyword arguments
@@ -125,28 +151,34 @@ def lib_kwargs(self) -> T.Dict[str, str]:
'lib_name': self.lowercase_token,
'test_name': self.lowercase_token,
'version': self.version,
+ 'meson_version': self.meson_version,
+ 'dependencies': self._format_dependencies(),
}
def create_library(self) -> None:
lib_name = f'{self.lowercase_token}.{self.source_ext}'
test_name = f'{self.lowercase_token}_test.{self.source_ext}'
kwargs = self.lib_kwargs()
- with open(lib_name, 'w', encoding='utf-8') as f:
- f.write(self.lib_template.format(**kwargs))
- if self.lib_test_template:
+ if not os.path.exists(lib_name):
+ with open(lib_name, 'w', encoding='utf-8') as f:
+ f.write(self.lib_template.format(**kwargs))
+ if self.lib_test_template and not os.path.exists(test_name):
with open(test_name, 'w', encoding='utf-8') as f:
f.write(self.lib_test_template.format(**kwargs))
- with open('meson.build', 'w', encoding='utf-8') as f:
- f.write(self.lib_meson_template.format(**kwargs))
+ if self.force or not os.path.exists('meson.build'):
+ with open('meson.build', 'w', encoding='utf-8') as f:
+ f.write(self.lib_meson_template.format(**kwargs))
class FileHeaderImpl(FileImpl):
- @abc.abstractproperty
+ @property
+ @abc.abstractmethod
def header_ext(self) -> str:
pass
- @abc.abstractproperty
+ @property
+ @abc.abstractmethod
def lib_header_template(self) -> str:
pass
@@ -158,5 +190,6 @@ def lib_kwargs(self) -> T.Dict[str, str]:
def create_library(self) -> None:
super().create_library()
kwargs = self.lib_kwargs()
- with open(kwargs['header_file'], 'w', encoding='utf-8') as f:
- f.write(self.lib_header_template.format_map(kwargs))
+ if not os.path.exists(kwargs['header_file']):
+ with open(kwargs['header_file'], 'w', encoding='utf-8') as f:
+ f.write(self.lib_header_template.format_map(kwargs))
diff --git a/mesonbuild/templates/valatemplates.py b/mesonbuild/templates/valatemplates.py
index 1520de0a708f..b2aab3f31be0 100644
--- a/mesonbuild/templates/valatemplates.py
+++ b/mesonbuild/templates/valatemplates.py
@@ -1,5 +1,6 @@
# SPDX-License-Identifier: Apache-2.0
# Copyright 2019 The Meson development team
+# Copyright © 2023-2025 Intel Corporation
from __future__ import annotations
@@ -11,16 +12,24 @@
}}
'''
-hello_vala_meson_template = '''project('{project_name}', ['c', 'vala'],
- version : '{version}')
+hello_vala_meson_template = '''project(
+ '{project_name}',
+ 'vala',
+ meson_version : '>= {meson_version}',
+ version : '{version}',
+)
dependencies = [
- dependency('glib-2.0'),
- dependency('gobject-2.0'),
+ dependency('glib-2.0'),
+ dependency('gobject-2.0'),{dependencies}
]
-exe = executable('{exe_name}', '{source_name}', dependencies : dependencies,
- install : true)
+exe = executable(
+ '{exe_name}',
+ '{source_name}',
+ dependencies : dependencies,
+ install : true,
+)
test('basic', exe)
'''
@@ -46,29 +55,44 @@
}}
'''
-lib_vala_meson_template = '''project('{project_name}', ['c', 'vala'],
- version : '{version}')
+lib_vala_meson_template = '''project(
+ '{project_name}',
+ 'vala',
+ meson_version : '>= {meson_version}',
+ version : '{version}',
+)
dependencies = [
- dependency('glib-2.0'),
- dependency('gobject-2.0'),
+ dependency('glib-2.0'),
+ dependency('gobject-2.0'),{dependencies}
]
# These arguments are only used to build the shared library
# not the executables that use the library.
-shlib = shared_library('foo', '{source_file}',
- dependencies: dependencies,
- install: true,
- install_dir: [true, true, true])
-
-test_exe = executable('{test_exe_name}', '{test_source_file}', dependencies : dependencies,
- link_with : shlib)
+lib = shared_library(
+ 'foo',
+ '{source_file}',
+ dependencies : dependencies,
+ install : true,
+ install_dir : [true, true, true],
+)
+
+test_exe = executable(
+ '{test_exe_name}',
+ '{test_source_file}',
+ dependencies : dependencies,
+ link_with : lib,
+)
test('{test_name}', test_exe)
# Make this library usable as a Meson subproject.
{ltoken}_dep = declare_dependency(
- include_directories: include_directories('.'),
- link_with : shlib)
+ include_directories : include_directories('.'),
+ dependencies : dependencies,
+ link_with : lib,
+)
+meson.override_dependency('{project_name}', {ltoken}_dep)
+
'''
diff --git a/mesonbuild/utils/universal.py b/mesonbuild/utils/universal.py
index 88d8e1f891c7..f3b4355bf6db 100644
--- a/mesonbuild/utils/universal.py
+++ b/mesonbuild/utils/universal.py
@@ -13,6 +13,7 @@
import stat
import time
import abc
+import multiprocessing
import platform, subprocess, operator, os, shlex, shutil, re
import collections
from functools import lru_cache, wraps
@@ -23,6 +24,7 @@
import pickle
import errno
import json
+import dataclasses
from mesonbuild import mlog
from .core import MesonException, HoldableObject
@@ -36,6 +38,7 @@
from ..environment import Environment
from ..compilers.compilers import Compiler
from ..interpreterbase.baseobjects import SubProject
+ from .. import programs
class _EnvPickleLoadable(Protocol):
@@ -94,6 +97,7 @@ class _VerPickleLoadable(Protocol):
'default_sysconfdir',
'detect_subprojects',
'detect_vcs',
+ 'determine_worker_count',
'do_conf_file',
'do_conf_str',
'do_replacement',
@@ -103,6 +107,7 @@ class _VerPickleLoadable(Protocol):
'generate_list',
'get_compiler_for_source',
'get_filenames_templates_dict',
+ 'get_rsp_threshold',
'get_variable_regex',
'get_wine_shortpath',
'git',
@@ -121,12 +126,14 @@ class _VerPickleLoadable(Protocol):
'is_netbsd',
'is_openbsd',
'is_osx',
+ 'is_parent_path',
'is_qnx',
'is_sunos',
'is_windows',
'is_wsl',
'iter_regexin_iter',
'join_args',
+ 'lazy_property',
'listify',
'listify_array_value',
'partition',
@@ -394,19 +401,21 @@ def __repr__(self) -> str:
@staticmethod
@lru_cache(maxsize=None)
- def from_source_file(source_root: str, subdir: str, fname: str) -> 'File':
+ def from_source_file(source_root: str, subdir: str, fname: str) -> File:
if not os.path.isfile(os.path.join(source_root, subdir, fname)):
raise MesonException(f'File {fname} does not exist.')
return File(False, subdir, fname)
@staticmethod
+ @lru_cache(maxsize=None)
def from_built_file(subdir: str, fname: str) -> 'File':
return File(True, subdir, fname)
@staticmethod
+ @lru_cache(maxsize=None)
def from_built_relative(relative: str) -> 'File':
dirpart, fnamepart = os.path.split(relative)
- return File(True, dirpart, fnamepart)
+ return File.from_built_file(dirpart, fnamepart)
@staticmethod
def from_absolute_file(fname: str) -> 'File':
@@ -473,6 +482,10 @@ def classify_unity_sources(compilers: T.Iterable['Compiler'], sources: T.Sequenc
return compsrclist
+MACHINE_NAMES = ['build', 'host']
+MACHINE_PREFIXES = ['build.', '']
+
+
class MachineChoice(enum.IntEnum):
"""Enum class representing one of the two abstract machine names used in
@@ -486,27 +499,24 @@ def __str__(self) -> str:
return f'{self.get_lower_case_name()} machine'
def get_lower_case_name(self) -> str:
- return PerMachine('build', 'host')[self]
+ return MACHINE_NAMES[self.value]
def get_prefix(self) -> str:
- return PerMachine('build.', '')[self]
+ return MACHINE_PREFIXES[self.value]
+@dataclasses.dataclass(eq=False, order=False)
class PerMachine(T.Generic[_T]):
- def __init__(self, build: _T, host: _T) -> None:
- self.build = build
- self.host = host
+ build: _T
+ host: _T
def __getitem__(self, machine: MachineChoice) -> _T:
- return {
- MachineChoice.BUILD: self.build,
- MachineChoice.HOST: self.host,
- }[machine]
+ return [self.build, self.host][machine.value]
def __setitem__(self, machine: MachineChoice, val: _T) -> None:
setattr(self, machine.get_lower_case_name(), val)
- def miss_defaulting(self) -> "PerMachineDefaultable[T.Optional[_T]]":
+ def miss_defaulting(self) -> PerMachineDefaultable[T.Optional[_T]]:
"""Unset definition duplicated from their previous to None
This is the inverse of ''default_missing''. By removing defaulted
@@ -524,10 +534,8 @@ def assign(self, build: _T, host: _T) -> None:
self.build = build
self.host = host
- def __repr__(self) -> str:
- return f'PerMachine({self.build!r}, {self.host!r})'
-
+@dataclasses.dataclass(eq=False, order=False)
class PerThreeMachine(PerMachine[_T]):
"""Like `PerMachine` but includes `target` too.
@@ -535,9 +543,8 @@ class PerThreeMachine(PerMachine[_T]):
need to computer the `target` field so we don't bother overriding the
`__getitem__`/`__setitem__` methods.
"""
- def __init__(self, build: _T, host: _T, target: _T) -> None:
- super().__init__(build, host)
- self.target = target
+
+ target: _T
def miss_defaulting(self) -> "PerThreeMachineDefaultable[T.Optional[_T]]":
"""Unset definition duplicated from their previous to None
@@ -559,29 +566,23 @@ def miss_defaulting(self) -> "PerThreeMachineDefaultable[T.Optional[_T]]":
def matches_build_machine(self, machine: MachineChoice) -> bool:
return self.build == self[machine]
- def __repr__(self) -> str:
- return f'PerThreeMachine({self.build!r}, {self.host!r}, {self.target!r})'
-
+@dataclasses.dataclass(eq=False, order=False)
class PerMachineDefaultable(PerMachine[T.Optional[_T]]):
"""Extends `PerMachine` with the ability to default from `None`s.
"""
- def __init__(self, build: T.Optional[_T] = None, host: T.Optional[_T] = None) -> None:
- super().__init__(build, host)
- def default_missing(self) -> "PerMachine[_T]":
+ build: T.Optional[_T] = None
+ host: T.Optional[_T] = None
+
+ def default_missing(self) -> PerMachine[_T]:
"""Default host to build
This allows just specifying nothing in the native case, and just host in the
cross non-compiler case.
"""
- freeze = PerMachine(self.build, self.host)
- if freeze.host is None:
- freeze.host = freeze.build
- return freeze
-
- def __repr__(self) -> str:
- return f'PerMachineDefaultable({self.build!r}, {self.host!r})'
+ assert self.build is not None, 'Cannot fill in missing when all fields are empty'
+ return PerMachine(self.build, self.host if self.host is not None else self.build)
@classmethod
def default(cls, is_cross: bool, build: _T, host: _T) -> PerMachine[_T]:
@@ -598,28 +599,24 @@ def default(cls, is_cross: bool, build: _T, host: _T) -> PerMachine[_T]:
return m.default_missing()
+@dataclasses.dataclass(eq=False, order=False)
class PerThreeMachineDefaultable(PerMachineDefaultable[T.Optional[_T]], PerThreeMachine[T.Optional[_T]]):
"""Extends `PerThreeMachine` with the ability to default from `None`s.
"""
- def __init__(self) -> None:
- PerThreeMachine.__init__(self, None, None, None)
- def default_missing(self) -> "PerThreeMachine[T.Optional[_T]]":
+ target: T.Optional[_T] = None
+
+ def default_missing(self) -> PerThreeMachine[_T]:
"""Default host to build and target to host.
This allows just specifying nothing in the native case, just host in the
cross non-compiler case, and just target in the native-built
cross-compiler case.
"""
- freeze = PerThreeMachine(self.build, self.host, self.target)
- if freeze.host is None:
- freeze.host = freeze.build
- if freeze.target is None:
- freeze.target = freeze.host
- return freeze
-
- def __repr__(self) -> str:
- return f'PerThreeMachineDefaultable({self.build!r}, {self.host!r}, {self.target!r})'
+ assert self.build is not None, 'Cannot default a PerMachine when all values are None'
+ host = self.host if self.host is not None else self.build
+ target = self.target if self.target is not None else host
+ return PerThreeMachine(self.build, host, target)
def is_sunos() -> bool:
@@ -751,40 +748,50 @@ def windows_detect_native_arch() -> str:
raise EnvironmentException('Unable to detect native OS architecture')
return arch
-def detect_vcs(source_dir: T.Union[str, Path]) -> T.Optional[T.Dict[str, str]]:
+@dataclasses.dataclass
+class VcsData:
+ name: str
+ cmd: str
+ repo_dir: str
+ get_rev: T.List[str]
+ rev_regex: str
+ dep: str
+ wc_dir: T.Optional[str] = None
+
+def detect_vcs(source_dir: T.Union[str, Path]) -> T.Optional[VcsData]:
vcs_systems = [
- {
- 'name': 'git',
- 'cmd': 'git',
- 'repo_dir': '.git',
- 'get_rev': 'git describe --dirty=+ --always',
- 'rev_regex': '(.*)',
- 'dep': '.git/logs/HEAD'
- },
- {
- 'name': 'mercurial',
- 'cmd': 'hg',
- 'repo_dir': '.hg',
- 'get_rev': 'hg id -i',
- 'rev_regex': '(.*)',
- 'dep': '.hg/dirstate'
- },
- {
- 'name': 'subversion',
- 'cmd': 'svn',
- 'repo_dir': '.svn',
- 'get_rev': 'svn info',
- 'rev_regex': 'Revision: (.*)',
- 'dep': '.svn/wc.db'
- },
- {
- 'name': 'bazaar',
- 'cmd': 'bzr',
- 'repo_dir': '.bzr',
- 'get_rev': 'bzr revno',
- 'rev_regex': '(.*)',
- 'dep': '.bzr'
- },
+ VcsData(
+ name = 'git',
+ cmd = 'git',
+ repo_dir = '.git',
+ get_rev = ['git', 'describe', '--dirty=+', '--always'],
+ rev_regex = '(.*)',
+ dep = '.git/logs/HEAD',
+ ),
+ VcsData(
+ name = 'mercurial',
+ cmd = 'hg',
+ repo_dir = '.hg',
+ get_rev = ['hg', 'id', '-i'],
+ rev_regex = '(.*)',
+ dep= '.hg/dirstate',
+ ),
+ VcsData(
+ name = 'subversion',
+ cmd = 'svn',
+ repo_dir = '.svn',
+ get_rev = ['svn', 'info'],
+ rev_regex = 'Revision: (.*)',
+ dep = '.svn/wc.db',
+ ),
+ VcsData(
+ name = 'bazaar',
+ cmd = 'bzr',
+ repo_dir = '.bzr',
+ get_rev = ['bzr', 'revno'],
+ rev_regex = '(.*)',
+ dep = '.bzr',
+ ),
]
if isinstance(source_dir, str):
source_dir = Path(source_dir)
@@ -795,8 +802,10 @@ def detect_vcs(source_dir: T.Union[str, Path]) -> T.Optional[T.Dict[str, str]]:
parent_paths_and_self.appendleft(source_dir)
for curdir in parent_paths_and_self:
for vcs in vcs_systems:
- if Path.is_dir(curdir.joinpath(vcs['repo_dir'])) and shutil.which(vcs['cmd']):
- vcs['wc_dir'] = str(curdir)
+ repodir = vcs.repo_dir
+ cmd = vcs.cmd
+ if curdir.joinpath(repodir).is_dir() and shutil.which(cmd):
+ vcs.wc_dir = str(curdir)
return vcs
return None
@@ -810,21 +819,18 @@ def current_vs_supports_modules() -> bool:
return True
return vsver.startswith('16.9.0') and '-pre.' in vsver
+_VERSION_TOK_RE = re.compile(r'(\d+)|([a-zA-Z]+)')
+
# a helper class which implements the same version ordering as RPM
class Version:
def __init__(self, s: str) -> None:
self._s = s
- # split into numeric, alphabetic and non-alphanumeric sequences
- sequences1 = re.finditer(r'(\d+|[a-zA-Z]+|[^a-zA-Z\d]+)', s)
-
- # non-alphanumeric separators are discarded
- sequences2 = [m for m in sequences1 if not re.match(r'[^a-zA-Z\d]+', m.group(1))]
-
+ # extract numeric and alphabetic sequences
# numeric sequences are converted from strings to ints
- sequences3 = [int(m.group(1)) if m.group(1).isdigit() else m.group(1) for m in sequences2]
-
- self._v = sequences3
+ self._v = [
+ int(m.group(1)) if m.group(1) else m.group(2)
+ for m in _VERSION_TOK_RE.finditer(s)]
def __str__(self) -> str:
return '{} (V={})'.format(self._s, str(self._v))
@@ -1085,6 +1091,51 @@ def default_sysconfdir() -> str:
return 'etc'
+def determine_worker_count(varnames: T.Optional[T.List[str]] = None) -> int:
+ num_workers = 0
+ varnames = varnames or []
+ # Add MESON_NUM_PROCESSES last, so it will prevail if more than one
+ # variable is present.
+ varnames.append('MESON_NUM_PROCESSES')
+ for varname in varnames:
+ if varname in os.environ:
+ try:
+ num_workers = int(os.environ[varname])
+ if num_workers < 0:
+ raise ValueError
+ except ValueError:
+ print(f'Invalid value in {varname}, using 1 thread.')
+ num_workers = 1
+
+ if num_workers == 0:
+ try:
+ # Fails in some weird environments such as Debian
+ # reproducible build.
+ num_workers = multiprocessing.cpu_count()
+ except Exception:
+ num_workers = 1
+ return num_workers
+
+def is_parent_path(parent: str, trial: str) -> bool:
+ '''Checks if @trial is a file under the directory @parent. Both @trial and @parent should be
+ adequately normalized, though empty and '.' segments in @parent and @trial are accepted
+ and discarded, matching the behavior of os.path.commonpath. Either both or none should
+ be absolute.'''
+ assert os.path.isabs(parent) == os.path.isabs(trial)
+ if is_windows():
+ parent = parent.replace('\\', '/')
+ trial = trial.replace('\\', '/')
+
+ split_parent = parent.split('/')
+ split_trial = trial.split('/')
+
+ split_parent = [c for c in split_parent if c and c != '.']
+ split_trial = [c for c in split_trial if c and c != '.']
+
+ components = len(split_parent)
+ return len(split_trial) >= components and split_trial[:components] == split_parent
+
+
def has_path_sep(name: str, sep: str = '/\\') -> bool:
'Checks if any of the specified @sep path separators are in @name'
for each in sep:
@@ -1173,6 +1224,46 @@ def join_args(args: T.Iterable[str]) -> str:
def do_replacement(regex: T.Pattern[str], line: str,
variable_format: Literal['meson', 'cmake', 'cmake@'],
confdata: T.Union[T.Dict[str, T.Tuple[str, T.Optional[str]]], 'ConfigurationData']) -> T.Tuple[str, T.Set[str]]:
+ if variable_format == 'meson':
+ return do_replacement_meson(regex, line, confdata)
+ elif variable_format in {'cmake', 'cmake@'}:
+ return do_replacement_cmake(regex, line, variable_format == 'cmake@', confdata)
+ else:
+ raise MesonException('Invalid variable format')
+
+def do_replacement_meson(regex: T.Pattern[str], line: str,
+ confdata: T.Union[T.Dict[str, T.Tuple[str, T.Optional[str]]], 'ConfigurationData']) -> T.Tuple[str, T.Set[str]]:
+ missing_variables: T.Set[str] = set()
+
+ def variable_replace(match: T.Match[str]) -> str:
+ # Pairs of escape characters before '@', '\@', '${' or '\${'
+ if match.group(0).endswith('\\'):
+ num_escapes = match.end(0) - match.start(0)
+ return '\\' * (num_escapes // 2)
+ # \@escaped\@ variables
+ elif match.groupdict().get('escaped') is not None:
+ return match.group('escaped')[1:-2]+'@'
+ else:
+ # Template variable to be replaced
+ varname = match.group('variable')
+ var_str = ''
+ if varname in confdata:
+ var, _ = confdata.get(varname)
+ if isinstance(var, str):
+ var_str = var
+ elif isinstance(var, int):
+ var_str = str(var)
+ else:
+ msg = f'Tried to replace variable {varname!r} value with ' \
+ f'something other than a string or int: {var!r}'
+ raise MesonException(msg)
+ else:
+ missing_variables.add(varname)
+ return var_str
+ return re.sub(regex, variable_replace, line), missing_variables
+
+def do_replacement_cmake(regex: T.Pattern[str], line: str, at_only: bool,
+ confdata: T.Union[T.Dict[str, T.Tuple[str, T.Optional[str]]], 'ConfigurationData']) -> T.Tuple[str, T.Set[str]]:
missing_variables: T.Set[str] = set()
def variable_replace(match: T.Match[str]) -> str:
@@ -1181,7 +1272,7 @@ def variable_replace(match: T.Match[str]) -> str:
num_escapes = match.end(0) - match.start(0)
return '\\' * (num_escapes // 2)
# Handle cmake escaped \${} tags
- elif variable_format == 'cmake' and match.group(0) == '\\${':
+ elif not at_only and match.group(0) == '\\${':
return '${'
# \@escaped\@ variables
elif match.groupdict().get('escaped') is not None:
@@ -1189,12 +1280,15 @@ def variable_replace(match: T.Match[str]) -> str:
else:
# Template variable to be replaced
varname = match.group('variable')
+ if not varname:
+ varname = match.group('cmake_variable')
+
var_str = ''
if varname in confdata:
var, _ = confdata.get(varname)
if isinstance(var, str):
var_str = var
- elif variable_format.startswith("cmake") and isinstance(var, bool):
+ elif isinstance(var, bool):
var_str = str(int(var))
elif isinstance(var, int):
var_str = str(var)
@@ -1207,11 +1301,36 @@ def variable_replace(match: T.Match[str]) -> str:
return var_str
return re.sub(regex, variable_replace, line), missing_variables
-def do_define(regex: T.Pattern[str], line: str, confdata: 'ConfigurationData',
- variable_format: Literal['meson', 'cmake', 'cmake@'], subproject: T.Optional[SubProject] = None) -> str:
- cmake_bool_define = False
- if variable_format != "meson":
- cmake_bool_define = "cmakedefine01" in line
+def do_define_meson(regex: T.Pattern[str], line: str, confdata: 'ConfigurationData',
+ subproject: T.Optional[SubProject] = None) -> str:
+
+ arr = line.split()
+ if len(arr) != 2:
+ raise MesonException('#mesondefine does not contain exactly two tokens: %s' % line.strip())
+
+ varname = arr[1]
+ try:
+ v, _ = confdata.get(varname)
+ except KeyError:
+ return '/* #undef %s */\n' % varname
+
+ if isinstance(v, str):
+ result = f'#define {varname} {v}'.strip() + '\n'
+ result, _ = do_replacement_meson(regex, result, confdata)
+ return result
+ elif isinstance(v, bool):
+ if v:
+ return '#define %s\n' % varname
+ else:
+ return '#undef %s\n' % varname
+ elif isinstance(v, int):
+ return '#define %s %d\n' % (varname, v)
+ else:
+ raise MesonException('#mesondefine argument "%s" is of unknown type.' % varname)
+
+def do_define_cmake(regex: T.Pattern[str], line: str, confdata: 'ConfigurationData', at_only: bool,
+ subproject: T.Optional[SubProject] = None) -> str:
+ cmake_bool_define = 'cmakedefine01' in line
def get_cmake_define(line: str, confdata: 'ConfigurationData') -> str:
arr = line.split()
@@ -1230,12 +1349,10 @@ def get_cmake_define(line: str, confdata: 'ConfigurationData') -> str:
return ' '.join(define_value)
arr = line.split()
- if len(arr) != 2:
- if variable_format == 'meson':
- raise MesonException('#mesondefine does not contain exactly two tokens: %s' % line.strip())
- elif subproject is not None:
- from ..interpreterbase.decorators import FeatureNew
- FeatureNew.single_use('cmakedefine without exactly two tokens', '0.54.1', subproject)
+
+ if len(arr) != 2 and subproject is not None:
+ from ..interpreterbase.decorators import FeatureNew
+ FeatureNew.single_use('cmakedefine without exactly two tokens', '0.54.1', subproject)
varname = arr[1]
try:
@@ -1246,26 +1363,13 @@ def get_cmake_define(line: str, confdata: 'ConfigurationData') -> str:
else:
return '/* #undef %s */\n' % varname
- if isinstance(v, str) or variable_format != "meson":
- if variable_format == 'meson':
- result = v
- else:
- if not cmake_bool_define and not v:
- return '/* #undef %s */\n' % varname
+ if not cmake_bool_define and not v:
+ return '/* #undef %s */\n' % varname
- result = get_cmake_define(line, confdata)
- result = f'#define {varname} {result}'.strip() + '\n'
- result, _ = do_replacement(regex, result, variable_format, confdata)
- return result
- elif isinstance(v, bool):
- if v:
- return '#define %s\n' % varname
- else:
- return '#undef %s\n' % varname
- elif isinstance(v, int):
- return '#define %s %d\n' % (varname, v)
- else:
- raise MesonException('#mesondefine argument "%s" is of unknown type.' % varname)
+ result = get_cmake_define(line, confdata)
+ result = f'#define {varname} {result}'.strip() + '\n'
+ result, _ = do_replacement_cmake(regex, result, at_only, confdata)
+ return result
def get_variable_regex(variable_format: Literal['meson', 'cmake', 'cmake@'] = 'meson') -> T.Pattern[str]:
# Only allow (a-z, A-Z, 0-9, _, -) as valid characters for a define
@@ -1280,31 +1384,65 @@ def get_variable_regex(variable_format: Literal['meson', 'cmake', 'cmake@'] = 'm
''', re.VERBOSE)
else:
regex = re.compile(r'''
- (?:\\\\)+(?=\\?\$) # Match multiple backslashes followed by a dollar sign
+ (?:\\\\)+(?=\\?(\$|@)) # Match multiple backslashes followed by a dollar sign or an @ symbol
| # OR
\\\${ # Match a backslash followed by a dollar sign and an opening curly brace
| # OR
- \${(?P[-a-zA-Z0-9_]+)} # Match a variable enclosed in curly braces and capture the variable name
+ \${(?P[-a-zA-Z0-9_]+)} # Match a variable enclosed in curly braces and capture the variable name
+ | # OR
+ (?[-a-zA-Z0-9_]+)@ # Match a variable enclosed in @ symbols and capture the variable name; no matches beginning with '\@'
+ | # OR
+ (?P\\@[-a-zA-Z0-9_]+\\@) # Match an escaped variable enclosed in @ symbols
''', re.VERBOSE)
return regex
def do_conf_str(src: str, data: T.List[str], confdata: 'ConfigurationData',
variable_format: Literal['meson', 'cmake', 'cmake@'],
subproject: T.Optional[SubProject] = None) -> T.Tuple[T.List[str], T.Set[str], bool]:
- def line_is_valid(line: str, variable_format: str) -> bool:
- if variable_format == 'meson':
+ if variable_format == 'meson':
+ return do_conf_str_meson(src, data, confdata, subproject)
+ elif variable_format in {'cmake', 'cmake@'}:
+ return do_conf_str_cmake(src, data, confdata, variable_format == 'cmake@', subproject)
+ else:
+ raise MesonException('Invalid variable format')
+
+def do_conf_str_meson(src: str, data: T.List[str], confdata: 'ConfigurationData',
+ subproject: T.Optional[SubProject] = None) -> T.Tuple[T.List[str], T.Set[str], bool]:
+
+ regex = get_variable_regex('meson')
+
+ search_token = '#mesondefine'
+
+ result: T.List[str] = []
+ missing_variables: T.Set[str] = set()
+ # Detect when the configuration data is empty and no tokens were found
+ # during substitution so we can warn the user to use the `copy:` kwarg.
+ confdata_useless = not confdata.keys()
+ for line in data:
+ if line.lstrip().startswith(search_token):
+ confdata_useless = False
+ line = do_define_meson(regex, line, confdata, subproject)
+ else:
if '#cmakedefine' in line:
- return False
- else: # cmake format
- if '#mesondefine' in line:
- return False
- return True
+ raise MesonException(f'Format error in {src}: saw "{line.strip()}" when format set to "meson"')
+ line, missing = do_replacement_meson(regex, line, confdata)
+ missing_variables.update(missing)
+ if missing:
+ confdata_useless = False
+ result.append(line)
+
+ return result, missing_variables, confdata_useless
+
+def do_conf_str_cmake(src: str, data: T.List[str], confdata: 'ConfigurationData', at_only: bool,
+ subproject: T.Optional[SubProject] = None) -> T.Tuple[T.List[str], T.Set[str], bool]:
+
+ variable_format: Literal['cmake', 'cmake@'] = 'cmake'
+ if at_only:
+ variable_format = 'cmake@'
regex = get_variable_regex(variable_format)
- search_token = '#mesondefine'
- if variable_format != 'meson':
- search_token = '#cmakedefine'
+ search_token = '#cmakedefine'
result: T.List[str] = []
missing_variables: T.Set[str] = set()
@@ -1314,11 +1452,11 @@ def line_is_valid(line: str, variable_format: str) -> bool:
for line in data:
if line.lstrip().startswith(search_token):
confdata_useless = False
- line = do_define(regex, line, confdata, variable_format, subproject)
+ line = do_define_cmake(regex, line, confdata, at_only, subproject)
else:
- if not line_is_valid(line, variable_format):
+ if '#mesondefine' in line:
raise MesonException(f'Format error in {src}: saw "{line.strip()}" when format set to "{variable_format}"')
- line, missing = do_replacement(regex, line, variable_format, confdata)
+ line, missing = do_replacement_cmake(regex, line, at_only, confdata)
missing_variables.update(missing)
if missing:
confdata_useless = False
@@ -1441,7 +1579,7 @@ def listify(item: T.Any, flatten: bool = True) -> T.List[T.Any]:
result.append(i)
return result
-def listify_array_value(value: T.Union[str, T.List[str]], shlex_split_args: bool = False) -> T.List[str]:
+def listify_array_value(value: object, shlex_split_args: bool = False) -> T.List[str]:
if isinstance(value, str):
if value.startswith('['):
try:
@@ -1601,7 +1739,7 @@ def Popen_safe_logged(args: T.List[str], msg: str = 'Called', **kwargs: T.Any) -
return p, o, e
-def iter_regexin_iter(regexiter: T.Iterable[str], initer: T.Iterable[str]) -> T.Optional[str]:
+def iter_regexin_iter(regexiter: T.Iterable[str], initer: T.Iterable[str | programs.ExternalProgram]) -> T.Optional[str]:
'''
Takes each regular expression in @regexiter and tries to search for it in
every item in @initer. If there is a match, returns that match.
@@ -1617,7 +1755,7 @@ def iter_regexin_iter(regexiter: T.Iterable[str], initer: T.Iterable[str]) -> T.
return None
-def _substitute_values_check_errors(command: T.List[str], values: T.Dict[str, T.Union[str, T.List[str]]]) -> None:
+def _substitute_values_check_errors(command: T.List[str | programs.ExternalProgram], values: T.Dict[str, T.Union[str, T.List[str]]]) -> None:
# Error checking
inregex: T.List[str] = ['@INPUT([0-9]+)?@', '@PLAINNAME@', '@BASENAME@']
outregex: T.List[str] = ['@OUTPUT([0-9]+)?@', '@OUTDIR@']
@@ -1657,7 +1795,7 @@ def _substitute_values_check_errors(command: T.List[str], values: T.Dict[str, T.
raise MesonException(m.format(match2.group(), len(values['@OUTPUT@'])))
-def substitute_values(command: T.List[str], values: T.Dict[str, T.Union[str, T.List[str]]]) -> T.List[str]:
+def substitute_values(command: T.List[str | programs.ExternalProgram], values: T.Dict[str, T.Union[str, T.List[str]]]) -> T.List[str | programs.ExternalProgram]:
'''
Substitute the template strings in the @values dict into the list of
strings @command and return a new list. For a full list of the templates,
@@ -1684,7 +1822,7 @@ def replace(m: T.Match[str]) -> str:
_substitute_values_check_errors(command, values)
# Substitution
- outcmd: T.List[str] = []
+ outcmd: T.List[str | programs.ExternalProgram] = []
rx_keys = [re.escape(key) for key in values if key not in ('@INPUT@', '@OUTPUT@')]
value_rx = re.compile('|'.join(rx_keys)) if rx_keys else None
for vv in command:
@@ -1787,7 +1925,7 @@ def _make_tree_writable(topdir: T.Union[str, Path]) -> None:
os.chmod(d, os.stat(d).st_mode | stat.S_IWRITE | stat.S_IREAD)
for fname in files:
fpath = os.path.join(d, fname)
- if os.path.isfile(fpath):
+ if not os.path.islink(fpath) and os.path.isfile(fpath):
os.chmod(fpath, os.stat(fpath).st_mode | stat.S_IWRITE | stat.S_IREAD)
@@ -2252,3 +2390,50 @@ def first(iter: T.Iterable[_T], predicate: T.Callable[[_T], bool]) -> T.Optional
if predicate(i):
return i
return None
+
+
+def get_rsp_threshold() -> int:
+ '''Return a conservative estimate of the commandline size in bytes
+ above which a response file should be used. May be overridden for
+ debugging by setting environment variable MESON_RSP_THRESHOLD.'''
+
+ if is_windows():
+ # Usually 32k, but some projects might use cmd.exe,
+ # and that has a limit of 8k.
+ limit = 8192
+ else:
+ # Unix-like OSes usually have very large command line limits, (On Linux,
+ # for example, this is limited by the kernel's MAX_ARG_STRLEN). However,
+ # some programs place much lower limits, notably Wine which enforces a
+ # 32k limit like Windows. Therefore, we limit the command line to 32k.
+ limit = 32768
+
+ # Be conservative
+ limit = limit // 2
+ return int(os.environ.get('MESON_RSP_THRESHOLD', limit))
+
+
+class lazy_property(T.Generic[_T]):
+ """Descriptor that replaces the function it wraps with the value generated.
+
+ This property will only be calculated the first time it's queried, and will
+ be cached and the cached value used for subsequent calls.
+
+ This works by shadowing itself with the calculated value, in the instance.
+ Due to Python's MRO that means that the calculated value will be found
+ before this property, speeding up subsequent lookups.
+ """
+ def __init__(self, func: T.Callable[[T.Any], _T]) -> None:
+ self.__name: T.Optional[str] = None
+ self.__func = func
+
+ def __set_name__(self, owner: T.Any, name: str) -> None:
+ if self.__name is None:
+ self.__name = name
+ else:
+ assert name == self.__name
+
+ def __get__(self, instance: object, cls: T.Type) -> _T:
+ value = self.__func(instance)
+ setattr(instance, self.__name, value)
+ return value
diff --git a/mesonbuild/wrap/wrap.py b/mesonbuild/wrap/wrap.py
index 197a4478a40e..e22dabfec45b 100644
--- a/mesonbuild/wrap/wrap.py
+++ b/mesonbuild/wrap/wrap.py
@@ -53,7 +53,21 @@
ALL_TYPES = ['file', 'git', 'hg', 'svn', 'redirect']
-PATCH = shutil.which('patch')
+if mesonlib.is_windows():
+ from ..programs import ExternalProgram
+ from ..mesonlib import version_compare
+ _exclude_paths: T.List[str] = []
+ while True:
+ _patch = ExternalProgram('patch', silent=True, exclude_paths=_exclude_paths)
+ if not _patch.found():
+ break
+ if version_compare(_patch.get_version(), '>=2.6.1'):
+ break
+ _exclude_paths.append(os.path.dirname(_patch.get_path()))
+ PATCH = _patch.get_path() if _patch.found() else None
+else:
+ PATCH = shutil.which('patch')
+
def whitelist_wrapdb(urlstr: str) -> urllib.parse.ParseResult:
""" raises WrapException if not whitelisted subdomain """
@@ -394,7 +408,7 @@ def get_from_wrapdb(self, subp_name: str) -> T.Optional[PackageDefinition]:
self.add_wrap(wrap)
return wrap
- def merge_wraps(self, other_resolver: 'Resolver') -> None:
+ def _merge_wraps(self, other_resolver: 'Resolver') -> None:
for k, v in other_resolver.wraps.items():
self.wraps.setdefault(k, v)
for k, v in other_resolver.provided_deps.items():
@@ -402,6 +416,11 @@ def merge_wraps(self, other_resolver: 'Resolver') -> None:
for k, v in other_resolver.provided_programs.items():
self.provided_programs.setdefault(k, v)
+ def load_and_merge(self, subdir: str, subproject: SubProject) -> None:
+ if self.wrap_mode != WrapMode.nopromote:
+ other_resolver = Resolver(self.source_dir, subdir, subproject, self.wrap_mode, self.wrap_frontend, self.allow_insecure, self.silent)
+ self._merge_wraps(other_resolver)
+
def find_dep_provider(self, packagename: str) -> T.Tuple[T.Optional[str], T.Optional[str]]:
# Python's ini parser converts all key values to lowercase.
# Thus the query name must also be in lower case.
@@ -865,4 +884,4 @@ def copy_tree(self, root_src_dir: str, root_dst_dir: str) -> None:
except PermissionError:
os.chmod(dst_file, stat.S_IWUSR)
os.remove(dst_file)
- shutil.copy2(src_file, dst_dir)
+ shutil.copy2(src_file, dst_dir, follow_symlinks=False)
diff --git a/packaging/mpackage.py b/packaging/mpackage.py
new file mode 100755
index 000000000000..a075e0627439
--- /dev/null
+++ b/packaging/mpackage.py
@@ -0,0 +1,102 @@
+#!/usr/bin/env python3
+
+# Converts a release tarball to a Debian package.
+
+# This script only works on Jussi's private release machine.
+
+import os, sys, subprocess, re, shutil
+import tarfile
+from glob import glob
+import pathlib
+
+assert(os.getcwd() == '/home/jpakkane')
+
+packdir = 'mesonpackaging'
+relfile = packdir + '/releases'
+
+files = glob('meson/dist/*.tar.gz')
+assert(len(files) == 1)
+infile = files[0]
+
+with tarfile.open(infile , 'r') as tf:
+ for e in tf.getmembers():
+ if '__pycache__' in e.name or e.name.endswith('.pyc'):
+ sys.exit('Source archive has Python binary files:' + str(e.name))
+
+fname = os.path.split(infile)[1]
+tmp = fname.replace('-', '_')
+if '0rc' in fname:
+ version = tmp[6:-7]
+ base_version = tmp[6:-10]
+ extension = tmp[-7:]
+ rcnum = tmp[-8:-7]
+ dchversion = base_version + '~rc' + rcnum
+ origname = tmp[:11] + '~rc' + rcnum + '.orig' + extension
+else:
+ origname = tmp[:11] + '.orig.' + tmp[-6:]
+ version = tmp[6:-7]
+ dchversion = version
+version_lines = pathlib.Path(relfile).read_text().split('\n')[:-1]
+prev_ver = version_lines[-1]
+version_lines.append(version)
+print('Deb orig name is', origname)
+print('Version is', version)
+print('Previous version is', prev_ver)
+assert(prev_ver)
+outdir = os.path.join(packdir, version)
+origfile = os.path.join(packdir, version, origname)
+if not os.path.exists(outdir):
+ os.mkdir(outdir)
+ shutil.copyfile(infile, origfile)
+ subprocess.check_call(['tar', 'xf', origname], cwd=outdir)
+ extractdir = glob(os.path.join(packdir, version, 'meson-*'))[0]
+ fromdeb = glob(os.path.join(packdir, prev_ver, 'meson-*/debian'))[0]
+ todeb = os.path.join(extractdir, 'debian')
+ shutil.copytree(fromdeb, todeb)
+ myenv = os.environ.copy()
+ myenv['EDITOR'] = 'emacs'
+ subprocess.check_call(['dch', '-v', dchversion + '-1'], cwd=extractdir, env=myenv)
+ pathlib.Path(relfile).write_text('\n'.join(version_lines) + '\n')
+else:
+ extractdir = glob(os.path.join(packdir, version, 'meson-*'))[0]
+ print('Outdir already exists')
+
+subprocess.check_call(['debuild', '-S'], cwd=extractdir)
+
+subprocess.call(['sudo rm -rf /var/cache/pbuilder/result/*'], shell=True)
+subprocess.check_call('sudo pbuilder --build *.dsc 2>&1 | tee buildlog.txt',
+ shell=True,
+ cwd=outdir)
+subprocess.check_call('sudo dpkg -i /var/cache/pbuilder/result/meson*all.deb',
+ shell=True)
+
+if os.path.exists('smoke/build'):
+ shutil.rmtree('smoke/build')
+if os.path.exists('smoke/buildcross'):
+ shutil.rmtree('smoke/buildcross')
+subprocess.check_call(['meson', 'setup', 'build'], cwd='smoke')
+subprocess.check_call(['ninja', 'test'], cwd='smoke/build')
+subprocess.check_call(['ninja', 'reconfigure'], cwd='smoke/build')
+subprocess.check_call(['ninja', 'test'], cwd='smoke/build')
+#subprocess.check_call(['/usr/bin/meson',
+# 'env2mfile',
+# '--cross',
+# '--debarch',
+# 'armhf',
+# '-o',
+# 'cross-file.txt'], cwd='smoke')
+subprocess.check_call(['/usr/share/meson/debcrossgen',
+ '--arch',
+ 'armhf',
+ '-o',
+ 'cross-file.txt'], cwd='smoke')
+subprocess.check_call(['meson',
+ 'setup',
+ 'buildcross',
+ '--cross-file',
+ 'cross-file.txt'], cwd='smoke')
+subprocess.check_call(['ninja', 'test'], cwd='smoke/buildcross')
+subprocess.check_call(['sudo', 'apt-get', '-y', 'remove', 'meson'])
+subprocess.call('rm meson-*tar.gz*', shell=True)
+subprocess.check_call(['cp', infile, '.'])
+subprocess.check_call(['gpg', '--detach-sign', '--armor', fname])
diff --git a/run_meson_command_tests.py b/run_meson_command_tests.py
index f9faca9af6fe..2877b9873e1f 100755
--- a/run_meson_command_tests.py
+++ b/run_meson_command_tests.py
@@ -46,6 +46,11 @@ def get_pybindir():
return sysconfig.get_path('scripts', scheme=scheme, vars={'base': ''}).strip('\\/')
return sysconfig.get_path('scripts', vars={'base': ''}).strip('\\/')
+def has_python_module(module: str) -> bool:
+ result = subprocess.run(python_command + ['-c', f'import {module}'])
+ return result.returncode == 0
+
+
class CommandTests(unittest.TestCase):
'''
Test that running meson in various ways works as expected by checking the
@@ -141,11 +146,17 @@ def test_meson_installed(self):
# distutils complains that prefix isn't contained in PYTHONPATH
os.environ['PYTHONPATH'] = os.path.join(str(pylibdir), '')
os.environ['PATH'] = str(bindir) + os.pathsep + os.environ['PATH']
- self._run(python_command + ['setup.py', 'install', '--prefix', str(prefix)])
- # Fix importlib-metadata by appending all dirs in pylibdir
- PYTHONPATHS = [pylibdir] + [x for x in pylibdir.iterdir()]
- PYTHONPATHS = [os.path.join(str(x), '') for x in PYTHONPATHS]
- os.environ['PYTHONPATH'] = os.pathsep.join(PYTHONPATHS)
+ if has_python_module('gpep517'):
+ self._run(python_command + ['-m', 'gpep517', 'install-from-source', '--destdir', '/', '--prefix', str(prefix)])
+ elif has_python_module('pip'):
+ self._run(python_command + ['-m', 'pip', 'install', '--prefix', str(prefix), '.'])
+ else:
+ # Legacy deprecated setuptools command used as fallback
+ self._run(python_command + ['setup.py', 'install', '--prefix', str(prefix)])
+ # Fix importlib-metadata by appending all dirs in pylibdir
+ PYTHONPATHS = [pylibdir] + [x for x in pylibdir.iterdir() if x.name.endswith('.egg')]
+ PYTHONPATHS = [os.path.join(str(x), '') for x in PYTHONPATHS]
+ os.environ['PYTHONPATH'] = os.pathsep.join(PYTHONPATHS)
# Check that all the files were installed correctly
self.assertTrue(bindir.is_dir())
self.assertTrue(pylibdir.is_dir())
diff --git a/run_mypy.py b/run_mypy.py
index f59ec89c071e..fe5f7780fc73 100755
--- a/run_mypy.py
+++ b/run_mypy.py
@@ -36,6 +36,7 @@
# 'mesonbuild/coredata.py',
'mesonbuild/depfile.py',
'mesonbuild/envconfig.py',
+ 'mesonbuild/environment.py',
'mesonbuild/interpreter/compiler.py',
'mesonbuild/interpreter/mesonmain.py',
'mesonbuild/interpreter/interpreterobjects.py',
@@ -56,6 +57,7 @@
'mesonbuild/mlog.py',
'mesonbuild/msubprojects.py',
'mesonbuild/modules/__init__.py',
+ 'mesonbuild/modules/cmake.py',
'mesonbuild/modules/cuda.py',
'mesonbuild/modules/external_project.py',
'mesonbuild/modules/fs.py',
@@ -80,6 +82,7 @@
'mesonbuild/msetup.py',
'mesonbuild/mtest.py',
'mesonbuild/optinterpreter.py',
+ 'mesonbuild/options.py',
'mesonbuild/programs.py',
]
additional = [
@@ -109,8 +112,6 @@ def check_mypy() -> None:
sys.exit(1)
def main() -> int:
- check_mypy()
-
root = Path(__file__).absolute().parent
parser = argparse.ArgumentParser(description='Process some integers.')
@@ -122,6 +123,9 @@ def main() -> int:
parser.add_argument('--allver', action='store_true', help='Check all supported versions of python')
opts, args = parser.parse_known_args()
+ if not opts.mypy:
+ check_mypy()
+
if opts.pretty:
args.append('--pretty')
diff --git a/run_project_tests.py b/run_project_tests.py
index ab34c27f21d2..fa7c8a6eb3dd 100755
--- a/run_project_tests.py
+++ b/run_project_tests.py
@@ -76,7 +76,7 @@ class ArgumentType(CompilerArgumentType):
v: bool
ALL_TESTS = ['cmake', 'common', 'native', 'warning-meson', 'failing-meson', 'failing-build', 'failing-test',
- 'keyval', 'platform-osx', 'platform-windows', 'platform-linux',
+ 'keyval', 'platform-osx', 'platform-windows', 'platform-linux', 'platform-android',
'java', 'C#', 'vala', 'cython', 'rust', 'd', 'objective c', 'objective c++',
'fortran', 'swift', 'cuda', 'python3', 'python', 'fpga', 'frameworks', 'nasm', 'wasm', 'wayland',
'format',
@@ -712,7 +712,14 @@ def _run_test(test: TestDef,
# Build with subprocess
def build_step() -> None:
build_start = time.time()
- pc, o, _ = Popen_safe(compile_commands + dir_args, cwd=test_build_dir, stderr=subprocess.STDOUT)
+
+ if backend is Backend.ninja:
+ # FIXME: meson test inprocess does not handle running ninja via StringIO
+ targets = ['all', 'meson-test-prereq', 'meson-benchmark-prereq']
+ else:
+ targets = []
+
+ pc, o, _ = Popen_safe(compile_commands + dir_args + targets, cwd=test_build_dir, stderr=subprocess.STDOUT)
testresult.add_step(BuildStep.build, o, '', '', time.time() - build_start)
if should_fail == 'build':
if pc.returncode != 0:
@@ -1116,6 +1123,8 @@ def __init__(self, category: str, subdir: str, skip: bool = False, stdout_mandat
TestCategory('platform-osx', 'osx', not mesonlib.is_osx()),
TestCategory('platform-windows', 'windows', not mesonlib.is_windows() and not mesonlib.is_cygwin()),
TestCategory('platform-linux', 'linuxlike', mesonlib.is_osx() or mesonlib.is_windows()),
+ # FIXME, does not actually run in CI, change to run the test if an Android cross toolchain is detected.
+ TestCategory('platform-android', 'android', not mesonlib.is_android()),
TestCategory('java', 'java', backend is not Backend.ninja or not have_java()),
TestCategory('C#', 'csharp', skip_csharp(backend)),
TestCategory('vala', 'vala', backend is not Backend.ninja or not shutil.which(os.environ.get('VALAC', 'valac'))),
diff --git a/setup.py b/setup.py
index 305005416c60..425d51d33b70 100644
--- a/setup.py
+++ b/setup.py
@@ -3,7 +3,7 @@
# Copyright 2016 The Meson development team
-import sys
+import os, sys
if sys.version_info < (3, 7):
raise SystemExit('ERROR: Tried to install Meson with an unsupported Python version: \n{}'
@@ -11,10 +11,24 @@
from setuptools import setup
+scm_args = {}
+HERE = os.path.dirname(__file__)
+if os.path.exists(os.path.join(HERE, '.git')):
+ try:
+ import setuptools_scm
+ except ModuleNotFoundError:
+ pass
+ else:
+ sys.path.insert(0, HERE)
+ from mesonbuild import coredata
+
+ scheme = 'guess-next-dev' if 'rc' in coredata.version else 'release-branch-semver'
+ scm_args = {'use_scm_version': {'version_scheme': scheme}}
+
data_files = []
if sys.platform != 'win32':
# Only useful on UNIX-like systems
data_files = [('share/man/man1', ['man/meson.1']),
('share/polkit-1/actions', ['data/com.mesonbuild.install.policy'])]
-setup(data_files=data_files,)
+setup(data_files=data_files,**scm_args)
diff --git a/test cases/android/1 exe_type/exe_type.c b/test cases/android/1 exe_type/exe_type.c
new file mode 100644
index 000000000000..cd9ca7db9479
--- /dev/null
+++ b/test cases/android/1 exe_type/exe_type.c
@@ -0,0 +1,5 @@
+#include
+
+int main(void) {
+ return 0;
+}
diff --git a/test cases/android/1 exe_type/meson.build b/test cases/android/1 exe_type/meson.build
new file mode 100644
index 000000000000..5b0e64a5182d
--- /dev/null
+++ b/test cases/android/1 exe_type/meson.build
@@ -0,0 +1,15 @@
+project('android exe type', 'c')
+fs = import('fs')
+
+e = executable('executable', 'exe_type.c',
+ android_exe_type : 'executable')
+a = executable('application', 'exe_type.c',
+ android_exe_type : 'application')
+
+if fs.name(e.full_path()).contains('.')
+ error('Executable with exe_type `executable` did have expected filename')
+endif
+
+if not fs.name(a.full_path()).startswith('lib') or not fs.name(a.full_path()).endswith('.so')
+ error('Executable with exe_type `application` did not have expected filename')
+endif
diff --git a/test cases/cmake/13 system includes/main2.cpp b/test cases/cmake/13 system includes/main2.cpp
new file mode 100644
index 000000000000..a94a11679dd3
--- /dev/null
+++ b/test cases/cmake/13 system includes/main2.cpp
@@ -0,0 +1,5 @@
+#include
+
+int main(void) {
+ return 0;
+}
diff --git a/test cases/cmake/13 system includes/meson.build b/test cases/cmake/13 system includes/meson.build
index 1265d4607421..fe7158070da5 100644
--- a/test cases/cmake/13 system includes/meson.build
+++ b/test cases/cmake/13 system includes/meson.build
@@ -13,6 +13,10 @@ endif
cm = import('cmake')
sub_pro = cm.subproject('cmMod')
sub_dep = sub_pro.dependency('cmModLib')
+sub_inc = sub_pro.include_directories('cmModLib')
exe1 = executable('main1', ['main.cpp'], dependencies: [sub_dep])
test('test1', exe1)
+
+exe2 = executable('main2', ['main2.cpp'], include_directories: sub_inc)
+test('test2', exe1)
diff --git a/test cases/cmake/28 include directories/main.c b/test cases/cmake/28 include directories/main.c
new file mode 100644
index 000000000000..bb7fe2966e18
--- /dev/null
+++ b/test cases/cmake/28 include directories/main.c
@@ -0,0 +1,9 @@
+#include
+#include
+
+int main(void)
+{
+ cmTestFunc();
+ return 0;
+}
+
diff --git a/test cases/cmake/28 include directories/meson.build b/test cases/cmake/28 include directories/meson.build
new file mode 100644
index 000000000000..e2c92d70f069
--- /dev/null
+++ b/test cases/cmake/28 include directories/meson.build
@@ -0,0 +1,17 @@
+project('include directories test', 'c')
+
+cm = import('cmake')
+
+sub_pro = cm.subproject('cmTest')
+sub_dep = sub_pro.dependency('cmTest')
+
+missing_inc_dir_arg = '-Werror=missing-include-dirs'
+has_missing_inc_dir = meson.get_compiler('c').has_argument(missing_inc_dir_arg)
+if not has_missing_inc_dir
+ error('MESON_SKIP_TEST: Compiler does not support ' + missing_inc_dir_arg)
+else
+ add_project_arguments(missing_inc_dir_arg, language: 'c')
+endif
+
+exe1 = executable('exe1', ['main.c'], dependencies: [sub_dep])
+test('test1', exe1)
diff --git a/test cases/cmake/28 include directories/subprojects/cmTest/CMakeLists.txt b/test cases/cmake/28 include directories/subprojects/cmTest/CMakeLists.txt
new file mode 100644
index 000000000000..66ea3cb309e8
--- /dev/null
+++ b/test cases/cmake/28 include directories/subprojects/cmTest/CMakeLists.txt
@@ -0,0 +1,6 @@
+cmake_minimum_required(VERSION ${CMAKE_VERSION})
+
+project(cmTest C)
+
+add_library(cmTest src/cmTest.c)
+target_include_directories(cmTest PUBLIC include)
diff --git a/test cases/cmake/28 include directories/subprojects/cmTest/include/cmTest.h b/test cases/cmake/28 include directories/subprojects/cmTest/include/cmTest.h
new file mode 100644
index 000000000000..3af821cc37c3
--- /dev/null
+++ b/test cases/cmake/28 include directories/subprojects/cmTest/include/cmTest.h
@@ -0,0 +1,3 @@
+#pragma once
+
+void cmTestFunc(void);
diff --git a/test cases/cmake/28 include directories/subprojects/cmTest/src/cmTest.c b/test cases/cmake/28 include directories/subprojects/cmTest/src/cmTest.c
new file mode 100644
index 000000000000..f77b694cc58e
--- /dev/null
+++ b/test cases/cmake/28 include directories/subprojects/cmTest/src/cmTest.c
@@ -0,0 +1,7 @@
+#include "include/cmTest.h"
+#include
+
+void cmTestFunc(void)
+{
+ printf ("Hello\n");
+}
diff --git a/test cases/common/104 has arg/meson.build b/test cases/common/104 has arg/meson.build
index c85ec9f25ab8..500b8a9ad097 100644
--- a/test cases/common/104 has arg/meson.build
+++ b/test cases/common/104 has arg/meson.build
@@ -58,6 +58,22 @@ if cpp.get_id() == 'gcc' and cpp.version().version_compare('>=12.1.0')
assert(cpp.has_argument('-Wno-attributes=meson::i_do_not_exist'))
endif
+if cpp.get_id() == 'gcc'
+ # Handle negative flags whose positive counterparts require a value to be
+ # specified.
+ if cpp.version().version_compare('>=4.4.0')
+ assert(cpp.has_argument('-Wno-frame-larger-than'))
+ endif
+ if cpp.version().version_compare('>=4.7.0')
+ assert(cpp.has_argument('-Wno-stack-usage'))
+ endif
+ if cpp.version().version_compare('>=7.1.0')
+ assert(cpp.has_argument('-Wno-alloc-size-larger-than'))
+ assert(cpp.has_argument('-Wno-alloca-larger-than'))
+ assert(cpp.has_argument('-Wno-vla-larger-than'))
+ endif
+endif
+
if cc.get_id() == 'clang' and cc.version().version_compare('<=4.0.0')
# 4.0.0 does not support -fpeel-loops. Newer versions may.
# Please adjust above version number as new versions of clang are released.
diff --git a/test cases/common/14 configure file/config10.h.in b/test cases/common/14 configure file/config10.h.in
new file mode 100644
index 000000000000..6c0661a6ae9e
--- /dev/null
+++ b/test cases/common/14 configure file/config10.h.in
@@ -0,0 +1,4 @@
+/* Should both be the same */
+#define MESSAGE1 "@var@"
+#define MESSAGE2 "${var}"
+
diff --git a/test cases/common/14 configure file/meson.build b/test cases/common/14 configure file/meson.build
index 036a562b796c..3a4ff4dc9f40 100644
--- a/test cases/common/14 configure file/meson.build
+++ b/test cases/common/14 configure file/meson.build
@@ -331,6 +331,16 @@ configure_file(output : 'config9b.h',
test('test9', executable('prog9', 'prog9.c'))
+# Test @ and curly braces at the same time with cmake format
+conf10 = configuration_data()
+conf10.set('var', 'foo')
+configure_file(
+ input : 'config10.h.in',
+ output : '@BASENAME@',
+ format : 'cmake',
+ configuration : conf10)
+test('test10', executable('prog10', 'prog10.c'))
+
check_inputs = find_program('check_inputs.py')
configure_file(output : 'check_inputs.txt',
input : ['prog.c', files('prog2.c', 'prog4.c')],
diff --git a/test cases/common/14 configure file/prog10.c b/test cases/common/14 configure file/prog10.c
new file mode 100644
index 000000000000..cdca6dae43b2
--- /dev/null
+++ b/test cases/common/14 configure file/prog10.c
@@ -0,0 +1,7 @@
+#include
+#include
+
+int main(void) {
+ return strcmp(MESSAGE1, "foo")
+ || strcmp(MESSAGE2, "foo");
+}
diff --git a/test cases/common/247 deprecated option/meson.build b/test cases/common/247 deprecated option/meson.build
index 4c734eefbed3..f311421301f5 100644
--- a/test cases/common/247 deprecated option/meson.build
+++ b/test cases/common/247 deprecated option/meson.build
@@ -7,14 +7,33 @@ project('deprecated options',
'o5=auto',
'o6=false',
'o8=/foo',
+ ':p1=false',
+ ':p2=a,b',
+ ':p3=a,b',
+ ':p4=true',
+ ':p5=auto',
+ ':p6=false',
+ ':p8=/foo',
]
)
assert(get_option('o1') == false)
+assert(get_option('p1') == false)
+assert(get_option('c1') == false)
assert(get_option('o2') == ['a', 'b'])
+assert(get_option('p2') == ['a', 'b'])
+assert(get_option('c2') == ['a', 'b'])
assert(get_option('o3') == ['c', 'b'])
+assert(get_option('p3') == ['c', 'b'])
+assert(get_option('c3') == ['c', 'b'])
assert(get_option('o4').enabled())
+assert(get_option('p4').enabled())
+assert(get_option('c4').enabled())
assert(get_option('o5') == false)
+assert(get_option('p5') == false)
+assert(get_option('c5') == false)
assert(get_option('o6') == false)
+assert(get_option('p6') == false)
+assert(get_option('c6') == false)
assert(get_option('o7').disabled())
assert(get_option('python.platlibdir') == '/foo')
diff --git a/test cases/common/247 deprecated option/meson_options.txt b/test cases/common/247 deprecated option/meson_options.txt
index 88cd8aa5175e..36fcb914c164 100644
--- a/test cases/common/247 deprecated option/meson_options.txt
+++ b/test cases/common/247 deprecated option/meson_options.txt
@@ -1,23 +1,38 @@
# Option fully deprecated, it warns when any value is set.
option('o1', type: 'boolean', deprecated: true)
+option('p1', type: 'boolean', deprecated: true)
+option('c1', type: 'boolean', deprecated: true)
# One of the choices is deprecated, it warns only when 'a' is in the list of values.
option('o2', type: 'array', choices: ['a', 'b'], deprecated: ['a'])
+option('p2', type: 'array', choices: ['a', 'b'], deprecated: ['a'])
+option('c2', type: 'array', choices: ['a', 'b'], deprecated: ['a'])
# One of the choices is deprecated, it warns only when 'a' is in the list of values
# and replace it by 'c'.
option('o3', type: 'array', choices: ['a', 'b', 'c'], deprecated: {'a': 'c'})
+option('p3', type: 'array', choices: ['a', 'b', 'c'], deprecated: {'a': 'c'})
+option('c3', type: 'array', choices: ['a', 'b', 'c'], deprecated: {'a': 'c'})
# A boolean option has been replaced by a feature, old true/false values are remapped.
option('o4', type: 'feature', deprecated: {'true': 'enabled', 'false': 'disabled'})
+option('p4', type: 'feature', deprecated: {'true': 'enabled', 'false': 'disabled'})
+option('c4', type: 'feature', deprecated: {'true': 'enabled', 'false': 'disabled'})
# A feature option has been replaced by a boolean, enabled/disabled/auto values are remapped.
option('o5', type: 'boolean', deprecated: {'enabled': 'true', 'disabled': 'false', 'auto': 'false'})
+option('p5', type: 'boolean', deprecated: {'enabled': 'true', 'disabled': 'false', 'auto': 'false'})
+option('c5', type: 'boolean', deprecated: {'enabled': 'true', 'disabled': 'false', 'auto': 'false'})
# A boolean option has been replaced by a feature with another name, old true/false values
# are accepted by the new option for backward compatibility.
option('o6', type: 'boolean', value: true, deprecated: 'o7')
+option('p6', type: 'boolean', value: true, deprecated: 'o7')
+option('c6', type: 'boolean', value: true, deprecated: 'o7')
+
option('o7', type: 'feature', value: 'enabled', deprecated: {'true': 'enabled', 'false': 'disabled'})
# A project option is replaced by a module option
option('o8', type: 'string', value: '', deprecated: 'python.platlibdir')
+option('p8', type: 'string', value: '', deprecated: 'python.platlibdir')
+option('c8', type: 'string', value: '', deprecated: 'python.platlibdir')
diff --git a/test cases/common/247 deprecated option/test.json b/test cases/common/247 deprecated option/test.json
index c2f2ca325f2a..a644b0405a5c 100644
--- a/test cases/common/247 deprecated option/test.json
+++ b/test cases/common/247 deprecated option/test.json
@@ -1,4 +1,29 @@
{
+ "matrix": {
+ "options": {
+ "c1": [
+ { "val": "false" }
+ ],
+ "c2": [
+ { "val": "a,b" }
+ ],
+ "c3": [
+ { "val": "a,b" }
+ ],
+ "c4": [
+ { "val": "true" }
+ ],
+ "c5": [
+ { "val": "auto" }
+ ],
+ "c6": [
+ { "val": "false" }
+ ],
+ "c8": [
+ { "val": "/foo" }
+ ]
+ }
+ },
"stdout": [
{
"line": ".*DEPRECATION: Option 'o1' is deprecated",
@@ -24,6 +49,56 @@
"line": ".*DEPRECATION: Option 'o5' value 'auto' is replaced by 'false'",
"match": "re",
"count": 1
+ },
+ {
+ "line": ".*DEPRECATION: Option 'p1' is deprecated",
+ "match": "re",
+ "count": 1
+ },
+ {
+ "line": ".*DEPRECATION: Option 'p2' value 'a' is deprecated",
+ "match": "re",
+ "count": 1
+ },
+ {
+ "line": ".*DEPRECATION: Option 'p3' value 'a' is replaced by 'c'",
+ "match": "re",
+ "count": 1
+ },
+ {
+ "line": ".*DEPRECATION: Option 'p4' value 'true' is replaced by 'enabled'",
+ "match": "re",
+ "count": 1
+ },
+ {
+ "line": ".*DEPRECATION: Option 'p5' value 'auto' is replaced by 'false'",
+ "match": "re",
+ "count": 1
+ },
+ {
+ "line": ".*DEPRECATION: Option 'c1' is deprecated",
+ "match": "re",
+ "count": 1
+ },
+ {
+ "line": ".*DEPRECATION: Option 'c2' value 'a' is deprecated",
+ "match": "re",
+ "count": 1
+ },
+ {
+ "line": ".*DEPRECATION: Option 'c3' value 'a' is replaced by 'c'",
+ "match": "re",
+ "count": 1
+ },
+ {
+ "line": ".*DEPRECATION: Option 'c4' value 'true' is replaced by 'enabled'",
+ "match": "re",
+ "count": 1
+ },
+ {
+ "line": ".*DEPRECATION: Option 'c5' value 'auto' is replaced by 'false'",
+ "match": "re",
+ "count": 1
}
]
}
diff --git a/test cases/common/273 both libraries/meson.build b/test cases/common/273 both libraries/meson.build
index 00da1c8e6cf5..789f4205e82e 100644
--- a/test cases/common/273 both libraries/meson.build
+++ b/test cases/common/273 both libraries/meson.build
@@ -111,3 +111,32 @@ if get_option('default_library') == 'both' and get_option('default_both_librarie
)
test('test shared', main_shared)
endif
+
+# Test case for https://github.com/mesonbuild/meson/pull/14098
+if get_option('default_library') == 'shared'
+
+ if get_option('use_dep')
+ lib_deps = [with_bl_dep.as_static(recursive: true)]
+ lib_links = []
+ else
+ lib_deps = []
+ lib_links = [with_bl.get_static_lib()]
+ endif
+
+ lib_with_static_dep = library(
+ 'lib_with_static_dep',
+ files('src/library.c'),
+ c_shared_args: ['-DEXPORT'],
+ link_with: lib_links,
+ dependencies: lib_deps,
+ )
+
+ main_with_static_dep = executable(
+ 'main_with_static_dep',
+ files('src/main.c'),
+ c_args: [f'-DEXPECTED=1'],
+ link_with: lib_with_static_dep,
+ )
+ test('test static dep', main_with_static_dep)
+
+endif
diff --git a/test cases/common/281 subproj options/crossfile.ini b/test cases/common/281 subproj options/crossfile.ini
new file mode 100644
index 000000000000..01b45a650d1c
--- /dev/null
+++ b/test cases/common/281 subproj options/crossfile.ini
@@ -0,0 +1,2 @@
+[sub:project options]
+bar = true
diff --git a/test cases/common/281 subproj options/meson.build b/test cases/common/281 subproj options/meson.build
new file mode 100644
index 000000000000..d450004900a8
--- /dev/null
+++ b/test cases/common/281 subproj options/meson.build
@@ -0,0 +1,3 @@
+project('pkg_opt_test', default_options: ['werror=false', 'sub:from_toplevel=true', 'sub:werror=true'])
+
+subproject('sub', default_options: ['sub2:from_subp=true'])
diff --git a/test cases/common/281 subproj options/subprojects/sub/meson.build b/test cases/common/281 subproj options/subprojects/sub/meson.build
new file mode 100644
index 000000000000..6cc4906cc71c
--- /dev/null
+++ b/test cases/common/281 subproj options/subprojects/sub/meson.build
@@ -0,0 +1,12 @@
+project('subproject', 'c')
+assert(get_option('bar') == true)
+assert(get_option('werror') == true)
+assert(get_option('from_toplevel') == true)
+
+# b_lto is only initialized if used, see test "common/40 options"
+cc = meson.get_compiler('c')
+if cc.get_id() in ['gcc', 'clang', 'clang-cl']
+ assert(get_option('b_lto') == true)
+endif
+
+subproject('sub2')
diff --git a/test cases/common/281 subproj options/subprojects/sub/meson_options.txt b/test cases/common/281 subproj options/subprojects/sub/meson_options.txt
new file mode 100644
index 000000000000..7f94d02cc60a
--- /dev/null
+++ b/test cases/common/281 subproj options/subprojects/sub/meson_options.txt
@@ -0,0 +1,2 @@
+option('bar', type: 'boolean', value: false)
+option('from_toplevel', type: 'boolean', value: false)
diff --git a/test cases/common/281 subproj options/subprojects/sub2/f.c b/test cases/common/281 subproj options/subprojects/sub2/f.c
new file mode 100644
index 000000000000..0aae46148dc8
--- /dev/null
+++ b/test cases/common/281 subproj options/subprojects/sub2/f.c
@@ -0,0 +1,3 @@
+int f(void)
+{
+}
diff --git a/test cases/common/281 subproj options/subprojects/sub2/meson.build b/test cases/common/281 subproj options/subprojects/sub2/meson.build
new file mode 100644
index 000000000000..65f3e5af0501
--- /dev/null
+++ b/test cases/common/281 subproj options/subprojects/sub2/meson.build
@@ -0,0 +1,9 @@
+project('subproject', 'c')
+
+assert(get_option('from_subp') == true)
+
+# b_lto is only initialized if used, see test "common/40 options"
+cc = meson.get_compiler('c')
+if cc.get_id() in ['gcc', 'clang', 'clang-cl']
+ assert(get_option('b_lto') == true)
+endif
diff --git a/test cases/common/281 subproj options/subprojects/sub2/meson_options.txt b/test cases/common/281 subproj options/subprojects/sub2/meson_options.txt
new file mode 100644
index 000000000000..d645182a2c45
--- /dev/null
+++ b/test cases/common/281 subproj options/subprojects/sub2/meson_options.txt
@@ -0,0 +1 @@
+option('from_subp', type: 'boolean', value: false)
diff --git a/test cases/common/281 subproj options/test.json b/test cases/common/281 subproj options/test.json
new file mode 100644
index 000000000000..fcdf4ddd8257
--- /dev/null
+++ b/test cases/common/281 subproj options/test.json
@@ -0,0 +1,7 @@
+{
+ "matrix": {
+ "options": {
+ "b_lto": [{ "val": "true" }]
+ }
+ }
+}
diff --git a/test cases/common/282 test args and depends in path/libs/a/lib_a.c b/test cases/common/282 test args and depends in path/libs/a/lib_a.c
new file mode 100644
index 000000000000..7191a69c316d
--- /dev/null
+++ b/test cases/common/282 test args and depends in path/libs/a/lib_a.c
@@ -0,0 +1,5 @@
+char
+func_a (void)
+{
+ return 'a';
+}
diff --git a/test cases/common/282 test args and depends in path/libs/a/lib_a.def b/test cases/common/282 test args and depends in path/libs/a/lib_a.def
new file mode 100644
index 000000000000..4af3bdb165e6
--- /dev/null
+++ b/test cases/common/282 test args and depends in path/libs/a/lib_a.def
@@ -0,0 +1,3 @@
+LIBRARY LIBA
+EXPORTS
+ func_a
diff --git a/test cases/common/282 test args and depends in path/libs/a/meson.build b/test cases/common/282 test args and depends in path/libs/a/meson.build
new file mode 100644
index 000000000000..0b4b6a4addf8
--- /dev/null
+++ b/test cases/common/282 test args and depends in path/libs/a/meson.build
@@ -0,0 +1,5 @@
+lib_a = shared_library('a',
+ ['lib_a.c'],
+ name_prefix: 'lib',
+ gnu_symbol_visibility: 'default',
+ vs_module_defs: 'lib_a.def')
diff --git a/test cases/common/282 test args and depends in path/libs/b/lib_b.c b/test cases/common/282 test args and depends in path/libs/b/lib_b.c
new file mode 100644
index 000000000000..17e5730f65b8
--- /dev/null
+++ b/test cases/common/282 test args and depends in path/libs/b/lib_b.c
@@ -0,0 +1,5 @@
+char
+func_b (void)
+{
+ return 'b';
+}
diff --git a/test cases/common/282 test args and depends in path/libs/b/lib_b.def b/test cases/common/282 test args and depends in path/libs/b/lib_b.def
new file mode 100644
index 000000000000..403a731aa771
--- /dev/null
+++ b/test cases/common/282 test args and depends in path/libs/b/lib_b.def
@@ -0,0 +1,3 @@
+LIBRARY LIBB
+EXPORTS
+ func_b
diff --git a/test cases/common/282 test args and depends in path/libs/b/meson.build b/test cases/common/282 test args and depends in path/libs/b/meson.build
new file mode 100644
index 000000000000..766125d5901c
--- /dev/null
+++ b/test cases/common/282 test args and depends in path/libs/b/meson.build
@@ -0,0 +1,5 @@
+lib_b = shared_library('b',
+ ['lib_b.c'],
+ name_prefix: 'lib',
+ gnu_symbol_visibility: 'default',
+ vs_module_defs: 'lib_b.def')
diff --git a/test cases/common/282 test args and depends in path/libs/meson.build b/test cases/common/282 test args and depends in path/libs/meson.build
new file mode 100644
index 000000000000..b00ea8a2ecb6
--- /dev/null
+++ b/test cases/common/282 test args and depends in path/libs/meson.build
@@ -0,0 +1,2 @@
+subdir('a')
+subdir('b')
diff --git a/test cases/common/282 test args and depends in path/meson.build b/test cases/common/282 test args and depends in path/meson.build
new file mode 100644
index 000000000000..d9dd9ad5da6d
--- /dev/null
+++ b/test cases/common/282 test args and depends in path/meson.build
@@ -0,0 +1,19 @@
+project('test-args-and-depends-in-path', 'c')
+
+subdir('libs')
+
+dl_dep = dependency('dl', required: false)
+
+fs = import('fs')
+
+test_exe = executable('test-exe',
+ c_args: [
+ '-DLIBA=' + fs.name(lib_a.full_path()),
+ '-DLIBB=' + fs.name(lib_b.full_path()),
+ ],
+ sources: ['test.c'],
+ dependencies: [dl_dep])
+
+test ('test', test_exe,
+ args: [lib_a],
+ depends: [lib_b])
diff --git a/test cases/common/282 test args and depends in path/test.c b/test cases/common/282 test args and depends in path/test.c
new file mode 100644
index 000000000000..82452bab319b
--- /dev/null
+++ b/test cases/common/282 test args and depends in path/test.c
@@ -0,0 +1,67 @@
+#include
+#include
+#include
+#include
+
+#ifndef _WIN32
+#include
+#else
+#include
+#endif
+
+typedef struct {
+ const char *library_name;
+ const char *func_name;
+ char expected_result;
+} test_t;
+
+static void
+load (test_t *test)
+{
+#ifndef _WIN32
+ void *h = dlopen (test->library_name, RTLD_NOW | RTLD_LOCAL);
+ if (h == NULL) {
+ fprintf (stderr, "dlopen (%s) failed: %s\n",
+ test->library_name, dlerror ());
+ exit (EXIT_FAILURE);
+ }
+
+ typedef char (*func_t)(void);
+ func_t func = (func_t) dlsym (h, test->func_name);
+ assert (func != NULL);
+
+ assert (func () == test->expected_result);
+ dlclose (h);
+#else /* _WIN32 */
+ HMODULE h = LoadLibraryA (test->library_name);
+ if (h == NULL) {
+ fprintf (stderr, "LoadLibrary (%s) failed with error code %u\n",
+ test->library_name, (unsigned int) GetLastError ());
+ exit (EXIT_FAILURE);
+ }
+
+ typedef char (*func_t)(void);
+ func_t func = (func_t) GetProcAddress (h, test->func_name);
+ assert (func != NULL);
+
+ assert (func () == test->expected_result);
+ FreeLibrary (h);
+#endif
+}
+
+#define STRINGIFY_HELPER(x) #x
+#define STRINGIFY(x) STRINGIFY_HELPER(x)
+
+int
+main (void)
+{
+ test_t tests[] = {
+ {STRINGIFY (LIBA), "func_a", 'a'},
+ {STRINGIFY (LIBB), "func_b", 'b'},
+ };
+
+ for (size_t i = 0; i < sizeof (tests) / sizeof (tests[0]); i++)
+ load (&tests[i]);
+
+ return 0;
+}
diff --git a/test cases/common/35 string operations/meson.build b/test cases/common/35 string operations/meson.build
index 27cc0d8c8671..ab77b4947c56 100644
--- a/test cases/common/35 string operations/meson.build
+++ b/test cases/common/35 string operations/meson.build
@@ -79,6 +79,11 @@ assert(not version_number.version_compare('!=1.2.8'), 'Version_compare neq broke
assert(version_number.version_compare('<2.0'), 'Version_compare major less broken')
assert(version_number.version_compare('>0.9'), 'Version_compare major greater broken')
+assert(version_number.version_compare('>1.2', '<1.3'))
+assert(not version_number.version_compare('>1.2', '>1.3'))
+assert(not version_number.version_compare('<1.2', '<1.3'))
+assert(version_number.version_compare('>1.0', '>1.2'))
+
assert(' spaces tabs '.strip() == 'spaces tabs', 'Spaces and tabs badly stripped')
assert('''
multiline string '''.strip() == '''multiline string''', 'Newlines badly stripped')
diff --git a/test cases/common/40 options/meson.build b/test cases/common/40 options/meson.build
index de4a7d50db14..f41265af7cb3 100644
--- a/test cases/common/40 options/meson.build
+++ b/test cases/common/40 options/meson.build
@@ -1,4 +1,13 @@
-project('options', 'c', meson_version : '>= 1.0.0')
+# SPDX-License-Identifier: Apache-2.0
+# Copyright © 2025 Intel Corporation
+
+project('options', meson_version : '>= 1.0.0')
+
+# This must happen before any language is added, or we wont be sure that the
+# compiler didn't cause b_lto to be initialized
+assert(get_option('b_lto') == false, 'Unused b_ option does not have default value')
+
+add_languages('c', required : true)
if get_option('testoption') != 'optval'
error('Incorrect value to test option')
@@ -18,7 +27,7 @@ if get_option('array_opt') != ['one', 'two']
endif
# If the default changes, update test cases/unit/13 reconfigure
-if get_option('b_lto') != false
+if get_option('b_pch') != true
error('Incorrect value in base option.')
endif
@@ -30,8 +39,9 @@ if get_option('integer_opt') != 3
error('Incorrect value in integer option.')
endif
-if get_option('neg_int_opt') != -3
- error('Incorrect value in negative integer option.')
+negint = get_option('neg_int_opt')
+if negint not in [-2, -3, -10]
+ error('Incorrect value @0@ in negative integer option.'.format(negint))
endif
if get_option('CaseSenSiTivE') != 'Some CAPS'
diff --git a/test cases/common/42 subproject/meson.build b/test cases/common/42 subproject/meson.build
index c2c0122f1f2e..bc24adbcc9c5 100644
--- a/test cases/common/42 subproject/meson.build
+++ b/test cases/common/42 subproject/meson.build
@@ -1,7 +1,8 @@
project('subproj user', 'c',
version : '2.3.4',
license : 'mylicense',
- license_files: 'mylicense.txt',
+ # also grab the meson license to test monorepo support
+ license_files: ['mylicense.txt', '../../../COPYING'],
)
assert(meson.project_name() == 'subproj user', 'Incorrect project name')
diff --git a/test cases/common/42 subproject/test.json b/test cases/common/42 subproject/test.json
index 949cb79fa0cc..e469052f4396 100644
--- a/test cases/common/42 subproject/test.json
+++ b/test cases/common/42 subproject/test.json
@@ -4,6 +4,7 @@
{"type": "pdb", "file": "usr/bin/user"},
{"type": "file", "file": "usr/share/sublib/sublib.depmf"},
{"type": "file", "file": "usr/share/sublib/mylicense.txt"},
+ {"type": "file", "file": "usr/share/sublib/COPYING"},
{"type": "file", "file": "usr/share/sublib/subprojects/sublib/sublicense1.txt"},
{"type": "file", "file": "usr/share/sublib/subprojects/sublib/sublicense2.txt"}
]
diff --git a/test cases/common/66 vcstag/meson.build b/test cases/common/66 vcstag/meson.build
index 38fa590385cf..53f90d8571f9 100644
--- a/test cases/common/66 vcstag/meson.build
+++ b/test cases/common/66 vcstag/meson.build
@@ -32,7 +32,9 @@ tagprog = executable('tagprog', 'tagprog.c', version_src)
version_src_executable = vcs_tag(input : 'vcstag.c.in',
output : 'vcstag-executable.c',
-command : [tagprog])
+command : [tagprog],
+install: true,
+install_dir: get_option('includedir'))
executable('tagprog-custom', 'tagprog.c', version_src_custom)
executable('tagprog-fallback', 'tagprog.c', version_src_fallback)
diff --git a/test cases/common/66 vcstag/test.json b/test cases/common/66 vcstag/test.json
new file mode 100644
index 000000000000..4c9e3a197215
--- /dev/null
+++ b/test cases/common/66 vcstag/test.json
@@ -0,0 +1,8 @@
+{
+ "installed": [
+ {
+ "type": "file",
+ "file": "usr/include/vcstag-executable.c"
+ }
+ ]
+}
diff --git a/test cases/common/87 default options/meson.build b/test cases/common/87 default options/meson.build
index 51b5cdac9fc8..1b482f1e38be 100644
--- a/test cases/common/87 default options/meson.build
+++ b/test cases/common/87 default options/meson.build
@@ -30,4 +30,4 @@ assert(w_level == '3', 'warning level "' + w_level + '" instead of "3"')
# assert(cc.compiles('int foobar;', no_builtin_args : true), 'No_builtin did not disable builtins.')
# endif
-subproject('sub1')
+subproject('sub1', default_options: 'func_test_option=true')
diff --git a/test cases/common/87 default options/subprojects/sub1/meson.build b/test cases/common/87 default options/subprojects/sub1/meson.build
index de0dc216c115..d6f79609592c 100644
--- a/test cases/common/87 default options/subprojects/sub1/meson.build
+++ b/test cases/common/87 default options/subprojects/sub1/meson.build
@@ -1,3 +1,4 @@
project('sub1')
assert(get_option('test_option') == false)
+assert(get_option('func_test_option') == true)
diff --git a/test cases/common/87 default options/subprojects/sub1/meson_options.txt b/test cases/common/87 default options/subprojects/sub1/meson_options.txt
index fc96f5e099c3..37ce4d4bb79e 100644
--- a/test cases/common/87 default options/subprojects/sub1/meson_options.txt
+++ b/test cases/common/87 default options/subprojects/sub1/meson_options.txt
@@ -1 +1,2 @@
option('test_option', type : 'boolean', value : true, description : 'Test option. Superproject overrides default to "false"')
+option('func_test_option', type : 'boolean', value : false, description : 'Test option. Superproject overrides default to "true"')
diff --git a/test cases/common/98 subproject subdir/meson.build b/test cases/common/98 subproject subdir/meson.build
index d2bafedf5119..5d92772c1354 100644
--- a/test cases/common/98 subproject subdir/meson.build
+++ b/test cases/common/98 subproject subdir/meson.build
@@ -83,7 +83,7 @@ d = dependency('subsubsub')
assert(d.found(), 'Should be able to fallback to sub-sub-subproject')
# Verify that `static: true` implies 'default_library=static'.
-d = dependency('sub_static', static: true)
+d = dependency('sub_static', static: true, default_options: ['bar=true'])
assert(d.found())
# Verify that when not specifying static kwarg we can still get fallback dep.
d = dependency('sub_static')
diff --git a/test cases/common/98 subproject subdir/subprojects/sub_static/meson.build b/test cases/common/98 subproject subdir/subprojects/sub_static/meson.build
index 6c00623a1d22..8de7cb40674a 100644
--- a/test cases/common/98 subproject subdir/subprojects/sub_static/meson.build
+++ b/test cases/common/98 subproject subdir/subprojects/sub_static/meson.build
@@ -1,6 +1,7 @@
project('sub_static')
assert(get_option('default_library') == 'static')
+assert(get_option('bar') == true)
meson.override_dependency('sub_static', declare_dependency())
meson.override_dependency('sub_static2', declare_dependency(), static: true)
meson.override_dependency('sub_static3', declare_dependency(variables: {'static': 'true'}), static: true)
diff --git a/test cases/common/98 subproject subdir/subprojects/sub_static/meson_options.txt b/test cases/common/98 subproject subdir/subprojects/sub_static/meson_options.txt
new file mode 100644
index 000000000000..129a7d4a07a4
--- /dev/null
+++ b/test cases/common/98 subproject subdir/subprojects/sub_static/meson_options.txt
@@ -0,0 +1 @@
+option('bar', type: 'boolean', value: false)
diff --git a/test cases/cuda/13 cuda compiler setting/meson.build b/test cases/cuda/13 cuda compiler setting/meson.build
index 4f111d1b9ee1..ba560b1e6f80 100644
--- a/test cases/cuda/13 cuda compiler setting/meson.build
+++ b/test cases/cuda/13 cuda compiler setting/meson.build
@@ -1,4 +1,5 @@
-project('simple', 'cuda', version : '1.0.0')
+project('simple', ['cpp', 'cuda'], version : '1.0.0',
+ default_options: ['cpp_std=c++2a', 'cuda_std=c++17'])
exe = executable('prog', 'prog.cu')
test('cudatest', exe)
diff --git a/test cases/cython/4 override_options/foo_cpp.pyx b/test cases/cython/4 override_options/foo_cpp.pyx
new file mode 100644
index 000000000000..01ff64d97d3d
--- /dev/null
+++ b/test cases/cython/4 override_options/foo_cpp.pyx
@@ -0,0 +1,2 @@
+def the_answer():
+ return 43
diff --git a/test cases/cython/4 override_options/meson.build b/test cases/cython/4 override_options/meson.build
new file mode 100644
index 000000000000..896993c0be12
--- /dev/null
+++ b/test cases/cython/4 override_options/meson.build
@@ -0,0 +1,11 @@
+project('my project', 'cython', 'cpp',
+ default_options : ['buildtype=release'])
+
+py = import('python').find_installation(pure: false)
+
+py.extension_module(
+ 'foo',
+ 'foo_cpp.pyx',
+ override_options : ['cython_language=cpp'],
+)
+
diff --git a/test cases/d/11 dub/meson.build b/test cases/d/11 dub/meson.build
index 91955710e709..fe79e046eb78 100644
--- a/test cases/d/11 dub/meson.build
+++ b/test cases/d/11 dub/meson.build
@@ -6,7 +6,7 @@ if not dub_exe.found()
endif
dub_ver = dub_exe.version()
-if dub_ver.version_compare('>1.31.1') and dub_ver.version_compare('<1.35.0')
+if dub_ver.version_compare('>1.31.1', '<1.35.0')
error('MESON_SKIP_TEST: Incompatible Dub version ' + dub_ver)
endif
@@ -17,7 +17,7 @@ test('test urld', test_exe)
# If you want meson to generate/update a dub.json file
dlang = import('dlang')
-dlang.generate_dub_file(meson.project_name().to_lower(), meson.source_root(),
+dlang.generate_dub_file(meson.project_name().to_lower(), meson.build_root(),
authors: 'Meson Team',
description: 'Test executable',
copyright: 'Copyright © 2018, Meson Team',
@@ -25,4 +25,4 @@ dlang.generate_dub_file(meson.project_name().to_lower(), meson.source_root(),
sourceFiles: 'test.d',
targetType: 'executable',
dependencies: urld_dep
-)
\ No newline at end of file
+)
diff --git a/test cases/d/14 dub with deps/meson.build b/test cases/d/14 dub with deps/meson.build
index 2e3bce87d37d..f7fbd0aedcc2 100644
--- a/test cases/d/14 dub with deps/meson.build
+++ b/test cases/d/14 dub with deps/meson.build
@@ -6,18 +6,15 @@ if not dub_exe.found()
endif
dub_ver = dub_exe.version()
-if dub_ver.version_compare('>1.31.1') and dub_ver.version_compare('<1.35.0')
+if dub_ver.version_compare('>1.31.1', '<1.35.0')
error('MESON_SKIP_TEST: Incompatible Dub version')
endif
if meson.get_compiler('d').get_id() == 'gcc'
error('MESON_SKIP_TEST: can\'t build dependencies with GDC')
-elif meson.get_compiler('d').get_id() == 'llvm'
- dc = 'ldc2'
-elif meson.get_compiler('d').get_id() == 'dmd'
- dc = 'dmd'
endif
+dc = meson.get_compiler('d').cmd_array()[0]
arch = host_machine.cpu_family()
if host_machine.system() == 'windows'
diff --git a/test cases/d/17 dub and meson project/.gitignore b/test cases/d/17 dub and meson project/.gitignore
new file mode 100644
index 000000000000..0036ff3946de
--- /dev/null
+++ b/test cases/d/17 dub and meson project/.gitignore
@@ -0,0 +1,2 @@
+17-dub-meson-project*
+lib17-dub-meson-project*
diff --git a/test cases/d/17 dub and meson project/dub.json b/test cases/d/17 dub and meson project/dub.json
new file mode 100644
index 000000000000..f04e51b551c4
--- /dev/null
+++ b/test cases/d/17 dub and meson project/dub.json
@@ -0,0 +1,11 @@
+{
+ "name": "17-dub-meson-project",
+ "dependencies": {
+ "urld": ">=3.0.0 <3.0.1",
+ "dubtestproject:test3": "1.2.0",
+ ":multi-configuration": "*"
+ },
+ "subPackages": [
+ "multi-configuration"
+ ]
+}
diff --git a/test cases/d/17 dub and meson project/dub.selections.json b/test cases/d/17 dub and meson project/dub.selections.json
new file mode 100644
index 000000000000..ad72f31683c5
--- /dev/null
+++ b/test cases/d/17 dub and meson project/dub.selections.json
@@ -0,0 +1,7 @@
+{
+ "fileVersion": 1,
+ "versions": {
+ "dubtestproject": "1.2.0",
+ "urld": "3.0.0"
+ }
+}
diff --git a/test cases/d/17 dub and meson project/meson.build b/test cases/d/17 dub and meson project/meson.build
new file mode 100644
index 000000000000..9070e6c720ff
--- /dev/null
+++ b/test cases/d/17 dub and meson project/meson.build
@@ -0,0 +1,32 @@
+project('Meson integration with dub.json', 'd')
+
+dub_exe = find_program('dub', required : false)
+if not dub_exe.found()
+ error('MESON_SKIP_TEST: Dub not found')
+endif
+
+dub_ver = dub_exe.version()
+if not dub_ver.version_compare('>=1.35.0')
+ error('MESON_SKIP_TEST: test requires dub >=1.35.0')
+endif
+
+# Multiple versions supported
+urld = dependency('urld', method: 'dub', version: [ '>=3.0.0', '<3.0.1' ])
+
+# The version we got is the one in dub.selections.json
+version = urld.version()
+if version != '3.0.0'
+ error(f'Expected urld version to be the one selected in dub.selections.json but got @version@')
+endif
+
+# dependency calls from subdirectories respect meson.project_source_root()/dub.selections.json
+subdir('x/y/z')
+
+# dependencies respect their configuration selected in dub.json
+run_command(dub_exe, 'build', '--deep', ':multi-configuration',
+ '--compiler', meson.get_compiler('d').cmd_array()[0],
+ '--arch', host_machine.cpu_family(),
+ '--root', meson.project_source_root(),
+ '--config', 'lib',
+ check: true)
+found = dependency('17-dub-meson-project:multi-configuration', method: 'dub')
diff --git a/test cases/d/17 dub and meson project/multi-configuration/.gitignore b/test cases/d/17 dub and meson project/multi-configuration/.gitignore
new file mode 100644
index 000000000000..61763ee20b1b
--- /dev/null
+++ b/test cases/d/17 dub and meson project/multi-configuration/.gitignore
@@ -0,0 +1,2 @@
+libmulti-configuration*
+multi-configuration*
diff --git a/test cases/d/17 dub and meson project/multi-configuration/dub.json b/test cases/d/17 dub and meson project/multi-configuration/dub.json
new file mode 100644
index 000000000000..373176d9ecc8
--- /dev/null
+++ b/test cases/d/17 dub and meson project/multi-configuration/dub.json
@@ -0,0 +1,14 @@
+{
+ "name": "multi-configuration",
+ "configurations": {
+ "app": {
+ "targetType": "executable"
+ },
+ "lib": {
+ "targetType": "library",
+ "excludedSourceFiles": [
+ "source/app.d"
+ ]
+ }
+ }
+}
diff --git a/test cases/d/17 dub and meson project/multi-configuration/dub.selections.json b/test cases/d/17 dub and meson project/multi-configuration/dub.selections.json
new file mode 100644
index 000000000000..322586b10676
--- /dev/null
+++ b/test cases/d/17 dub and meson project/multi-configuration/dub.selections.json
@@ -0,0 +1,5 @@
+{
+ "fileVersion": 1,
+ "versions": {
+ }
+}
diff --git a/test cases/d/17 dub and meson project/multi-configuration/source/app.d b/test cases/d/17 dub and meson project/multi-configuration/source/app.d
new file mode 100644
index 000000000000..d66321b3c581
--- /dev/null
+++ b/test cases/d/17 dub and meson project/multi-configuration/source/app.d
@@ -0,0 +1 @@
+void main () {}
diff --git a/test cases/d/17 dub and meson project/multi-configuration/source/foo.d b/test cases/d/17 dub and meson project/multi-configuration/source/foo.d
new file mode 100644
index 000000000000..e69de29bb2d1
diff --git a/test cases/d/17 dub and meson project/source/app.d b/test cases/d/17 dub and meson project/source/app.d
new file mode 100644
index 000000000000..d66321b3c581
--- /dev/null
+++ b/test cases/d/17 dub and meson project/source/app.d
@@ -0,0 +1 @@
+void main () {}
diff --git a/test cases/d/17 dub and meson project/x/y/z/dub.json b/test cases/d/17 dub and meson project/x/y/z/dub.json
new file mode 100644
index 000000000000..dc0ef515964a
--- /dev/null
+++ b/test cases/d/17 dub and meson project/x/y/z/dub.json
@@ -0,0 +1,6 @@
+{
+ "dependencies": {
+ "dubtestproject:test3": "*"
+ },
+ "name": "dub-respects-project-root-subdir"
+}
diff --git a/test cases/d/17 dub and meson project/x/y/z/dub.selections.json b/test cases/d/17 dub and meson project/x/y/z/dub.selections.json
new file mode 100644
index 000000000000..daf7a2de9823
--- /dev/null
+++ b/test cases/d/17 dub and meson project/x/y/z/dub.selections.json
@@ -0,0 +1,6 @@
+{
+ "fileVersion": 1,
+ "versions": {
+ "dubtestproject": "1.3.0"
+ }
+}
diff --git a/test cases/d/17 dub and meson project/x/y/z/meson.build b/test cases/d/17 dub and meson project/x/y/z/meson.build
new file mode 100644
index 000000000000..99943b8a723e
--- /dev/null
+++ b/test cases/d/17 dub and meson project/x/y/z/meson.build
@@ -0,0 +1,6 @@
+root = meson.project_source_root()
+dep = dependency('dubtestproject:test3', method: 'dub')
+version = dep.version()
+if version != '1.2.0'
+ error(f'Expected urld version to be the one selected in "@root@/dub.selections.json" but got @version@')
+endif
diff --git a/test cases/d/9 features/app.d b/test cases/d/9 features/app.d
index ae59be139d42..e7faec1d648a 100644
--- a/test cases/d/9 features/app.d
+++ b/test cases/d/9 features/app.d
@@ -1,4 +1,4 @@
-
+import std.conv;
import std.stdio;
import std.array : split;
import std.string : strip;
@@ -16,6 +16,22 @@ auto getPeople ()
return import ("people.txt").strip.split ("\n");
}
+// Put these in templates to prevent the compiler from failing to parse them
+// since frontend version 2.111
+template VersionInt (int v) {
+ mixin(`version(` ~ v.to!string ~ `)
+ enum VersionInt = true;
+ else
+ enum VersionInt = false;`);
+}
+template DebugInt (int v) {
+ import std.conv;
+ mixin(`debug(` ~ v.to!string ~ `)
+ enum DebugInt = true;
+ else
+ enum DebugInt = false;`);
+}
+
void main (string[] args)
{
import std.array : join;
@@ -43,13 +59,13 @@ void main (string[] args)
}
version (With_VersionInteger)
- version(3) exit(0);
+ static if (VersionInt!3) exit(0);
version (With_Debug)
debug exit(0);
version (With_DebugInteger)
- debug(3) exit(0);
+ static if (DebugInt!(3)) exit(0);
version (With_DebugIdentifier)
debug(DebugIdentifier) exit(0);
@@ -57,9 +73,9 @@ void main (string[] args)
version (With_DebugAll) {
int dbg = 0;
debug dbg++;
- debug(2) dbg++;
- debug(3) dbg++;
- debug(4) dbg++;
+ static if (DebugInt!2) dbg++;
+ static if (DebugInt!3) dbg++;
+ static if (DebugInt!4) dbg++;
debug(DebugIdentifier) dbg++;
if (dbg == 5)
diff --git a/test cases/d/9 features/meson.build b/test cases/d/9 features/meson.build
index 065ef3a6ddef..736ce7535fa3 100644
--- a/test cases/d/9 features/meson.build
+++ b/test cases/d/9 features/meson.build
@@ -2,13 +2,16 @@ project('D Features', 'd', meson_version: '>=1.6', default_options : ['debug=fal
dc = meson.get_compiler('d')
-# GDC 13 hard errors if options are given number values.
-# https://github.com/mesonbuild/meson/pull/11996
-
-if dc.get_id() == 'gcc' and dc.version().version_compare('>=13')
- number_options_supported = false
+# See: https://dlang.org/changelog/2.111.0.html#dmd.deprecation-version-debug-number
+# GDC fails even before that: https://github.com/mesonbuild/meson/pull/11996
+if dc.get_id() == 'gcc'
+ number_options_supported = dc.version().version_compare('<13')
+elif dc.get_id() == 'dmd'
+ number_options_supported = dc.version().version_compare('<2.111')
+elif dc.get_id() == 'llvm'
+ number_options_supported = dc.version().version_compare('<1.41')
else
- number_options_supported = true
+ error(f'Unknown D compiler id')
endif
# ONLY FOR BACKWARDS COMPATIBILITY.
diff --git a/test cases/failing test/5 tap tests/meson.build b/test cases/failing test/5 tap tests/meson.build
index 664ac34d2f8b..27b9fe7ac82e 100644
--- a/test cases/failing test/5 tap tests/meson.build
+++ b/test cases/failing test/5 tap tests/meson.build
@@ -7,3 +7,4 @@ test('nonzero return code with tests', test_with_status, protocol: 'tap')
test('missing test', tester, args : ['1..1'], protocol: 'tap')
test('incorrect skip', tester, args : ['1..1 # skip\nok 1'], protocol: 'tap')
test('partially skipped', tester, args : ['not ok 1\nok 2 # skip'], protocol: 'tap')
+test('premature bailout', tester, args : ['Bail out!'], protocol: 'tap')
diff --git a/test cases/failing/126 generator host binary/test.json b/test cases/failing/126 generator host binary/test.json
index 7e354d60dcad..c633622cf2f8 100644
--- a/test cases/failing/126 generator host binary/test.json
+++ b/test cases/failing/126 generator host binary/test.json
@@ -1,5 +1,8 @@
{
"stdout": [
- { "line": "ERROR: An exe_wrapper is needed but was not found. Please define one in cross file and check the command and/or add it to PATH." }
+ {
+ "match": "re",
+ "line": "ERROR: An exe_wrapper is needed for .* but was not found. Please define one in cross file and check the command and/or add it to PATH."
+ }
]
}
diff --git a/test cases/failing/133 dub missing dependency/dub.json b/test cases/failing/133 dub missing dependency/dub.json
new file mode 100644
index 000000000000..1ad9ddcc1f19
--- /dev/null
+++ b/test cases/failing/133 dub missing dependency/dub.json
@@ -0,0 +1,3 @@
+{
+ "name": "132-missing-dep"
+}
diff --git a/test cases/failing/133 dub missing dependency/dub.selections.json b/test cases/failing/133 dub missing dependency/dub.selections.json
new file mode 100644
index 000000000000..322586b10676
--- /dev/null
+++ b/test cases/failing/133 dub missing dependency/dub.selections.json
@@ -0,0 +1,5 @@
+{
+ "fileVersion": 1,
+ "versions": {
+ }
+}
diff --git a/test cases/failing/133 dub missing dependency/meson.build b/test cases/failing/133 dub missing dependency/meson.build
new file mode 100644
index 000000000000..fcccb3b415d4
--- /dev/null
+++ b/test cases/failing/133 dub missing dependency/meson.build
@@ -0,0 +1,17 @@
+project('Dub dependency not in dub.json')
+
+if not add_languages('d', required: false)
+ error('MESON_SKIP_TEST test requires D compiler')
+endif
+
+dub_exe = find_program('dub', required : false)
+if not dub_exe.found()
+ error('MESON_SKIP_TEST: Dub not found')
+endif
+
+dub_ver = dub_exe.version()
+if not dub_ver.version_compare('>=1.35.0')
+ error('MESON_SKIP_TEST: test requires dub >=1.35.0')
+endif
+
+dep = dependency('urld', method: 'dub') # not in dub.json
diff --git a/test cases/failing/133 dub missing dependency/source/app.d b/test cases/failing/133 dub missing dependency/source/app.d
new file mode 100644
index 000000000000..d66321b3c581
--- /dev/null
+++ b/test cases/failing/133 dub missing dependency/source/app.d
@@ -0,0 +1 @@
+void main () {}
diff --git a/test cases/failing/133 dub missing dependency/test.json b/test cases/failing/133 dub missing dependency/test.json
new file mode 100644
index 000000000000..e58dbc7aabee
--- /dev/null
+++ b/test cases/failing/133 dub missing dependency/test.json
@@ -0,0 +1,8 @@
+{
+ "stdout": [
+ {
+ "line": "test cases/failing/132 dub missing dependency/meson.build:17:6: ERROR: Dependency \"urld\" not found",
+ "line": "dub add urld"
+ }
+ ]
+}
diff --git a/test cases/failing/134 java sources in non jar target/Test.java b/test cases/failing/134 java sources in non jar target/Test.java
new file mode 100644
index 000000000000..e69de29bb2d1
diff --git a/test cases/failing/134 java sources in non jar target/meson.build b/test cases/failing/134 java sources in non jar target/meson.build
new file mode 100644
index 000000000000..0aa802a48304
--- /dev/null
+++ b/test cases/failing/134 java sources in non jar target/meson.build
@@ -0,0 +1,3 @@
+# https://github.com/mesonbuild/meson/issues/13870
+project('134 java sources in non jar target')
+executable('Test.jar', 'Test.java')
diff --git a/test cases/failing/134 java sources in non jar target/test.json b/test cases/failing/134 java sources in non jar target/test.json
new file mode 100644
index 000000000000..271ac36affdf
--- /dev/null
+++ b/test cases/failing/134 java sources in non jar target/test.json
@@ -0,0 +1,7 @@
+{
+ "stdout": [
+ {
+ "line": "test cases/failing/134 java sources in non jar target/meson.build:3:0: ERROR: Build target of type \"executable\" cannot build java source: \"Test.java\". Use \"jar\" instead."
+ }
+ ]
+}
diff --git a/test cases/failing/95 invalid option file/test.json b/test cases/failing/95 invalid option file/test.json
index 073ac67177af..debb4a1440e8 100644
--- a/test cases/failing/95 invalid option file/test.json
+++ b/test cases/failing/95 invalid option file/test.json
@@ -1,7 +1,7 @@
{
"stdout": [
{
- "line": "test cases/failing/95 invalid option file/meson_options.txt:1:0: ERROR: lexer"
+ "line": "test cases/failing/95 invalid option file/meson_options.txt:1:0: ERROR: lexer: unrecognized token \"'\""
}
]
}
diff --git a/test cases/fortran/7 generated/gen.py b/test cases/fortran/7 generated/gen.py
new file mode 100755
index 000000000000..86d9bf712261
--- /dev/null
+++ b/test cases/fortran/7 generated/gen.py
@@ -0,0 +1,45 @@
+#!/usr/bin/env python3
+# SPDX-License-Identifier: Apache-2.0
+# Copyright © 2024 Intel Corporation
+
+from __future__ import annotations
+import argparse
+import typing as T
+
+if T.TYPE_CHECKING:
+ class Arguments(T.Protocol):
+
+ input: str
+ output: str
+ replacements: T.List[T.Tuple[str, str]]
+
+
+def process(txt: str, replacements: T.List[T.Tuple[str, str]]) -> str:
+ for k, v in replacements:
+ txt = txt.replace(k, v)
+ return txt
+
+
+def split_arg(arg: str) -> T.Tuple[str, str]:
+ args = arg.split('=', maxsplit=1)
+ assert len(args) == 2, 'Did not get the right number of args?'
+ return T.cast('T.Tuple[str, str]', tuple(args))
+
+
+def main() -> None:
+ parser = argparse.ArgumentParser()
+ parser.add_argument('input')
+ parser.add_argument('output')
+ parser.add_argument('--replace', action='append', required=True, dest='replacements', type=split_arg)
+ args = T.cast('Arguments', parser.parse_args())
+
+ with open(args.input, 'r', encoding='utf-8') as f:
+ content = f.read()
+
+ content = process(content, args.replacements)
+
+ with open(args.output, 'w', encoding='utf-8') as f:
+ f.write(content)
+
+if __name__ == "__main__":
+ main()
diff --git a/test cases/fortran/7 generated/meson.build b/test cases/fortran/7 generated/meson.build
index f021309d1c75..257ea1e2207f 100644
--- a/test cases/fortran/7 generated/meson.build
+++ b/test cases/fortran/7 generated/meson.build
@@ -8,20 +8,7 @@ conf_data = configuration_data()
conf_data.set('ONE', 1)
conf_data.set('TWO', 2)
-mod3_f = custom_target(
- 'mod3.f',
- input : 'mod3.f90',
- output : 'mod3.f90',
- # We need a platform agnostic way to do a copy a file, using a custom_target
- # and we need to use the @OUTDIR@, not @OUTPUT@ in order to exercise
- # https://github.com/mesonbuild/meson/issues/9258
- command : [
- find_program('python', 'python3'), '-c',
- 'import sys, shutil; shutil.copy(sys.argv[1], sys.argv[2])',
- '@INPUT@', '@OUTDIR@',
- ],
-)
-
+mod3_f = import('fs').copyfile('mod3.f90', 'mod3.f90')
three = library('mod3', mod3_f)
templates_basenames = ['mod2', 'mod1']
@@ -35,5 +22,22 @@ foreach template_basename : templates_basenames
endforeach
sources = ['prog.f90'] + generated_sources
-exe = executable('generated', sources, link_with: three)
-test('generated', exe)
+exe = executable('configure_file', sources, link_with: three)
+test('configure_file', exe)
+
+gen = generator(
+ find_program('gen.py'),
+ arguments : [
+ '@INPUT@', '@OUTPUT@',
+ '--replace', '@ONE@=1',
+ '--replace', '@TWO@=2',
+ ],
+ output : ['@BASENAME@.f90'],
+)
+
+exe = executable(
+ 'generator',
+ 'prog.f90', gen.process('mod1.fpp', 'mod2.fpp'),
+ link_with: three,
+)
+test('generator', exe)
diff --git a/test cases/fortran/8 module names/lib.f90 b/test cases/fortran/8 module names/lib.f90
new file mode 100644
index 000000000000..f8a8bfdacbb6
--- /dev/null
+++ b/test cases/fortran/8 module names/lib.f90
@@ -0,0 +1,9 @@
+program lib
+use MyMod1
+use MyMod2 ! test inline comment
+
+implicit none
+
+call showvalues()
+
+end program
diff --git a/test cases/fortran/8 module names/meson.build b/test cases/fortran/8 module names/meson.build
index 632c597889bf..9340c79d7789 100644
--- a/test cases/fortran/8 module names/meson.build
+++ b/test cases/fortran/8 module names/meson.build
@@ -1,6 +1,15 @@
+# SPDX-License-Identifier: Apache-2.0
+# Copyright © 2024 Intel Corporation
+
project('mod_name_case', 'fortran')
sources = ['test.f90', 'mod1.f90', 'mod2.f90']
-exe = executable('mod_name_case', sources)
-test('mod_name_case', exe)
+l = static_library('s1', 'mod1.f90')
+l2 = static_library('s2', 'mod2.f90', link_whole : l)
+if get_option('unittest')
+ sh = static_library('library', 'lib.f90', link_with : l2)
+else
+ exe = executable('mod_name_case', 'test.f90', link_with : l2)
+ test('mod_name_case', exe)
+endif
diff --git a/test cases/fortran/8 module names/meson_options.txt b/test cases/fortran/8 module names/meson_options.txt
new file mode 100644
index 000000000000..b5b7ee9a2394
--- /dev/null
+++ b/test cases/fortran/8 module names/meson_options.txt
@@ -0,0 +1 @@
+option('unittest', type : 'boolean', value : false)
diff --git a/test cases/fortran/8 module names/mod2.f90 b/test cases/fortran/8 module names/mod2.f90
index 2087750debfc..3061c211ffab 100644
--- a/test cases/fortran/8 module names/mod2.f90
+++ b/test cases/fortran/8 module names/mod2.f90
@@ -1,6 +1,14 @@
module mymod2
+use mymod1
implicit none
integer, parameter :: myModVal2 = 2
+contains
+ subroutine showvalues()
+ print*, "myModVal1 = ", myModVal1
+ print*, "myModVal2 = ", myModVal2
+ end subroutine showvalues
+
+
end module mymod2
diff --git a/test cases/fortran/8 module names/test.f90 b/test cases/fortran/8 module names/test.f90
index 60ff16e9034c..fcfc23f919f8 100644
--- a/test cases/fortran/8 module names/test.f90
+++ b/test cases/fortran/8 module names/test.f90
@@ -1,9 +1,8 @@
program main
-use mymod1
use MyMod2 ! test inline comment
implicit none
-integer, parameter :: testVar = myModVal1 + myModVal2
+call showvalues()
end program
diff --git a/test cases/frameworks/15 llvm/test.json b/test cases/frameworks/15 llvm/test.json
index fa883b1f43c6..b9cdc20a3970 100644
--- a/test cases/frameworks/15 llvm/test.json
+++ b/test cases/frameworks/15 llvm/test.json
@@ -2,9 +2,9 @@
"matrix": {
"options": {
"method": [
- { "val": "config-tool", "expect_skip_on_jobname": ["msys2-gcc"] },
- { "val": "cmake", "expect_skip_on_jobname": ["msys2-gcc"] },
- { "val": "combination", "expect_skip_on_jobname": ["msys2-gcc"] }
+ { "val": "config-tool" },
+ { "val": "cmake" },
+ { "val": "combination" }
],
"link-static": [
{ "val": true, "expect_skip_on_jobname": ["arch", "opensuse", "linux-gentoo-gcc"] },
diff --git a/test cases/frameworks/17 mpi/test.json b/test cases/frameworks/17 mpi/test.json
index 3a46657ef112..cbd1686121c5 100644
--- a/test cases/frameworks/17 mpi/test.json
+++ b/test cases/frameworks/17 mpi/test.json
@@ -2,10 +2,8 @@
"matrix": {
"options": {
"method": [
- { "val": "auto",
- "expect_skip_on_jobname": ["ubuntu"] },
- { "val": "pkg-config",
- "expect_skip_on_jobname": ["ubuntu"] },
+ { "val": "auto" },
+ { "val": "pkg-config" },
{ "val": "config-tool",
"expect_skip_on_jobname": ["fedora"] },
{
diff --git a/test cases/frameworks/25 hdf5/meson.build b/test cases/frameworks/25 hdf5/meson.build
index 38e001202bfc..095c63f66760 100644
--- a/test cases/frameworks/25 hdf5/meson.build
+++ b/test cases/frameworks/25 hdf5/meson.build
@@ -28,6 +28,7 @@ test_fortran = add_languages('fortran', required: false)
if test_fortran
cpp = meson.get_compiler('cpp')
fc = meson.get_compiler('fortran')
+ fs = import('fs')
if host_machine.system() == 'darwin' and cpp.get_id() == 'clang' and fc.get_id() == 'gcc'
# Search paths don't work correctly here and -lgfortran doesn't work
@@ -35,6 +36,10 @@ if test_fortran
elif host_machine.system() == 'windows' and cpp.get_id() != 'gcc' and fc.get_id() == 'gcc'
# mixing gfortran with non-gcc doesn't work on windows
test_fortran = false
+ elif fs.is_dir('/ci') and '-I' not in run_command('h5fc', '-show').stdout()
+ # h5fc does not include needed -I flags when HDF5 is built using CMake
+ # https://github.com/HDFGroup/hdf5/issues/5660
+ test_fortran = false
endif
# --- Fortran tests
diff --git a/test cases/frameworks/34 gir static lib/test.json b/test cases/frameworks/34 gir static lib/test.json
index c2ba9b2a886e..f6b50c380e20 100644
--- a/test cases/frameworks/34 gir static lib/test.json
+++ b/test cases/frameworks/34 gir static lib/test.json
@@ -5,5 +5,5 @@
{"type": "file", "platform": "cygwin", "file": "usr/lib/libgirlib.dll.a"},
{"type": "file", "file": "usr/share/gir-1.0/Meson-1.0.gir"}
],
- "expect_skip_on_jobname": ["azure", "bionic", "cygwin", "macos", "msys2"]
+ "expect_skip_on_jobname": ["azure", "bionic", "macos", "msys2"]
}
diff --git a/test cases/frameworks/38 gettext extractor/meson.build b/test cases/frameworks/38 gettext extractor/meson.build
new file mode 100644
index 000000000000..a31c87d0aa69
--- /dev/null
+++ b/test cases/frameworks/38 gettext extractor/meson.build
@@ -0,0 +1,19 @@
+project(
+ 'gettext extractor',
+ 'c',
+ default_options: {'default_library': 'static'},
+ meson_version: '>=1.8.0',
+)
+
+if not find_program('xgettext', required: false).found()
+ error('MESON_SKIP_TEST xgettext command not found')
+endif
+
+if host_machine.system() == 'darwin'
+ error('MESON_SKIP_TEST test is unstable on macOS for unknown reasons')
+endif
+
+i18n = import('i18n')
+xgettext_args = ['-ktr', '--add-comments=TRANSLATOR:', '--from-code=UTF-8']
+
+subdir('src')
diff --git a/test cases/frameworks/38 gettext extractor/src/lib1/lib1.c b/test cases/frameworks/38 gettext extractor/src/lib1/lib1.c
new file mode 100644
index 000000000000..723edda00637
--- /dev/null
+++ b/test cases/frameworks/38 gettext extractor/src/lib1/lib1.c
@@ -0,0 +1,10 @@
+#include "lib1.h"
+
+#include
+
+#define tr(STRING) (STRING)
+
+void say_something(void)
+{
+ printf("%s\n", tr("Something!"));
+}
diff --git a/test cases/frameworks/38 gettext extractor/src/lib1/lib1.h b/test cases/frameworks/38 gettext extractor/src/lib1/lib1.h
new file mode 100644
index 000000000000..6199d29c4ec6
--- /dev/null
+++ b/test cases/frameworks/38 gettext extractor/src/lib1/lib1.h
@@ -0,0 +1,6 @@
+#ifndef LIB1_H
+#define LIB1_H
+
+void say_something(void);
+
+#endif
diff --git a/test cases/frameworks/38 gettext extractor/src/lib1/meson.build b/test cases/frameworks/38 gettext extractor/src/lib1/meson.build
new file mode 100644
index 000000000000..3ec7fa987d6d
--- /dev/null
+++ b/test cases/frameworks/38 gettext extractor/src/lib1/meson.build
@@ -0,0 +1,3 @@
+lib1 = library('mylib1', 'lib1.c')
+lib1_pot = i18n.xgettext('lib1', lib1, args: xgettext_args)
+lib1_includes = include_directories('.')
\ No newline at end of file
diff --git a/test cases/frameworks/38 gettext extractor/src/lib2/lib2.c b/test cases/frameworks/38 gettext extractor/src/lib2/lib2.c
new file mode 100644
index 000000000000..051271ec703d
--- /dev/null
+++ b/test cases/frameworks/38 gettext extractor/src/lib2/lib2.c
@@ -0,0 +1,13 @@
+#include "lib2.h"
+
+#include
+
+#include
+
+#define tr(STRING) (STRING)
+
+void say_something_else(void)
+{
+ say_something();
+ printf("%s\n", tr("Something else!"));
+}
diff --git a/test cases/frameworks/38 gettext extractor/src/lib2/lib2.h b/test cases/frameworks/38 gettext extractor/src/lib2/lib2.h
new file mode 100644
index 000000000000..faf693f7ceb3
--- /dev/null
+++ b/test cases/frameworks/38 gettext extractor/src/lib2/lib2.h
@@ -0,0 +1,6 @@
+#ifndef LIB2_H
+#define LIB2_H
+
+void say_something_else(void);
+
+#endif
diff --git a/test cases/frameworks/38 gettext extractor/src/lib2/meson.build b/test cases/frameworks/38 gettext extractor/src/lib2/meson.build
new file mode 100644
index 000000000000..ac5e7fe4b050
--- /dev/null
+++ b/test cases/frameworks/38 gettext extractor/src/lib2/meson.build
@@ -0,0 +1,3 @@
+lib2 = library('mylib2', 'lib2.c', include_directories: lib1_includes, link_with: lib1)
+lib2_pot = i18n.xgettext('lib2', lib2, args: xgettext_args)
+lib2_includes = include_directories('.')
diff --git a/test cases/frameworks/38 gettext extractor/src/main.c b/test cases/frameworks/38 gettext extractor/src/main.c
new file mode 100644
index 000000000000..807096bd7925
--- /dev/null
+++ b/test cases/frameworks/38 gettext extractor/src/main.c
@@ -0,0 +1,8 @@
+#include
+
+int main(void)
+{
+ say_something_else();
+
+ return 0;
+}
diff --git a/test cases/frameworks/38 gettext extractor/src/meson.build b/test cases/frameworks/38 gettext extractor/src/meson.build
new file mode 100644
index 000000000000..27fc81326450
--- /dev/null
+++ b/test cases/frameworks/38 gettext extractor/src/meson.build
@@ -0,0 +1,6 @@
+subdir('lib1')
+subdir('lib2')
+
+main = executable('say', 'main.c', link_with: [lib2], include_directories: lib2_includes)
+
+main_pot = i18n.xgettext('main', main, args: xgettext_args, install: true, install_dir: 'intl', install_tag: 'intl', recursive: true)
diff --git a/test cases/frameworks/38 gettext extractor/test.json b/test cases/frameworks/38 gettext extractor/test.json
new file mode 100644
index 000000000000..032698e20a34
--- /dev/null
+++ b/test cases/frameworks/38 gettext extractor/test.json
@@ -0,0 +1,6 @@
+{
+ "installed": [
+ { "type": "file", "file": "usr/intl/main.pot" }
+ ],
+ "expect_skip_on_jobname": ["azure", "cygwin", "macos"]
+}
diff --git a/test cases/frameworks/39 gir both_libraries/bar.c b/test cases/frameworks/39 gir both_libraries/bar.c
new file mode 100644
index 000000000000..4cb41f798294
--- /dev/null
+++ b/test cases/frameworks/39 gir both_libraries/bar.c
@@ -0,0 +1,7 @@
+#include "bar.h"
+#include "foo.h"
+
+int bar_func(void)
+{
+ return foo_func() + 42;
+}
diff --git a/test cases/frameworks/39 gir both_libraries/bar.h b/test cases/frameworks/39 gir both_libraries/bar.h
new file mode 100644
index 000000000000..d22827b837f7
--- /dev/null
+++ b/test cases/frameworks/39 gir both_libraries/bar.h
@@ -0,0 +1 @@
+int bar_func(void);
diff --git a/test cases/frameworks/39 gir both_libraries/foo.c b/test cases/frameworks/39 gir both_libraries/foo.c
new file mode 100644
index 000000000000..b88aa91dabb4
--- /dev/null
+++ b/test cases/frameworks/39 gir both_libraries/foo.c
@@ -0,0 +1,6 @@
+#include "foo.h"
+
+int foo_func(void)
+{
+ return 42;
+}
diff --git a/test cases/frameworks/39 gir both_libraries/foo.h b/test cases/frameworks/39 gir both_libraries/foo.h
new file mode 100644
index 000000000000..2a0867249307
--- /dev/null
+++ b/test cases/frameworks/39 gir both_libraries/foo.h
@@ -0,0 +1 @@
+int foo_func(void);
diff --git a/test cases/frameworks/39 gir both_libraries/meson.build b/test cases/frameworks/39 gir both_libraries/meson.build
new file mode 100644
index 000000000000..cb9cdd31f3ed
--- /dev/null
+++ b/test cases/frameworks/39 gir both_libraries/meson.build
@@ -0,0 +1,42 @@
+project('gir both libraries', 'c')
+
+gir = dependency('gobject-introspection-1.0', required: false)
+if not gir.found()
+ error('MESON_SKIP_TEST gobject-introspection not found.')
+endif
+
+if host_machine.system() == 'cygwin'
+ # FIXME: g-ir-scanner seems broken on cygwin:
+ # ERROR: can't resolve libraries to shared libraries: foo++
+ error('MESON_SKIP_TEST g-ir-scanner is broken on cygwin.')
+endif
+
+gnome = import('gnome')
+
+# Regression test simulating how GStreamer generate its GIRs.
+# Generated gobject-introspection binaries for every GStreamer libraries must
+# first call gst_init() defined in the main libgstreamer, which means they need
+# to link on that lib.
+# A regression caused by https://github.com/mesonbuild/meson/pull/12632 made
+# Meson not link the binary generated for bar with libfoo in the case it uses
+# both_libraries().
+
+libfoo = both_libraries('foo', 'foo.c')
+foo_gir = gnome.generate_gir(libfoo,
+ namespace: 'foo',
+ nsversion: '1.0',
+ sources: ['foo.c', 'foo.h'],
+)
+foo_dep = declare_dependency(
+ link_with: libfoo,
+ sources: foo_gir,
+)
+
+libbar = both_libraries('bar', 'bar.c', dependencies: foo_dep)
+gnome.generate_gir(libbar,
+ namespace: 'bar',
+ nsversion: '1.0',
+ sources: ['bar.c', 'bar.h'],
+ extra_args: '--add-init-section=extern void foo_func(void);foo_func();',
+ dependencies: foo_dep,
+)
diff --git a/test cases/frameworks/39 gir both_libraries/test.json b/test cases/frameworks/39 gir both_libraries/test.json
new file mode 100644
index 000000000000..82ac42a293b3
--- /dev/null
+++ b/test cases/frameworks/39 gir both_libraries/test.json
@@ -0,0 +1,3 @@
+{
+ "expect_skip_on_jobname": ["azure", "macos", "msys2", "cygwin"]
+}
\ No newline at end of file
diff --git a/test cases/frameworks/40 qt qml/Basic.qml b/test cases/frameworks/40 qt qml/Basic.qml
new file mode 100644
index 000000000000..33c0a28c70b0
--- /dev/null
+++ b/test cases/frameworks/40 qt qml/Basic.qml
@@ -0,0 +1,5 @@
+import QtQuick 2.0
+
+Item {
+ property int ok: 1
+}
diff --git a/test cases/frameworks/40 qt qml/Internal.qml b/test cases/frameworks/40 qt qml/Internal.qml
new file mode 100644
index 000000000000..e8eee472379b
--- /dev/null
+++ b/test cases/frameworks/40 qt qml/Internal.qml
@@ -0,0 +1,5 @@
+import QtQuick
+
+Item {
+ property int ok: 5
+}
diff --git a/test cases/frameworks/40 qt qml/Main.qml b/test cases/frameworks/40 qt qml/Main.qml
new file mode 100644
index 000000000000..94b67186dd35
--- /dev/null
+++ b/test cases/frameworks/40 qt qml/Main.qml
@@ -0,0 +1,53 @@
+import QtQuick
+import My.Module1 as M1
+
+Item {
+ id: root
+
+ Component.onCompleted: {
+ function checkInstance(label, instance, value) {
+ if (!instance) {
+ console.log(label, "KO instance is null")
+ return false
+ } if (instance.ok !== value) {
+ console.log(label, "KO got", instance.ok, "expected", value)
+ return false
+ } else {
+ console.log(label, "OK")
+ return true
+ }
+ }
+
+ function checkClass(namespace, classname, value) {
+ let newObject = null;
+ try {
+ newObject = Qt.createQmlObject(
+ "import %1; %2 {}".arg(namespace).arg(classname),
+ root,
+ "some path"
+ )
+ } catch (e) {
+ console.log(namespace, classname, "KO failed to instanciate object")
+ return false
+ }
+ return checkInstance("%1 %2".arg(namespace).arg(classname), newObject, value)
+ }
+
+ let ret = true
+ ret &= checkClass("My.Module1", "Basic", 1);
+ ret &= checkClass("My.Module1", "Thing", 2);
+ ret &= checkClass("My.Module1", "QmlCppExposed", 3);
+ ret &= checkInstance("My.Module1 QmlSingleton", M1.QmlSingleton, 5)
+
+ ret &= checkClass("My.Module2", "Thing", 2);
+ ret &= checkClass("My.Module3", "Basic", 1);
+ ret &= checkClass("My.Module4", "BasicAliased", 1);
+ ret &= checkClass("My.Module5", "SubdirHeader", 6);
+ ret &= checkClass("My.Module6", "Basic", 1);
+
+ if (!ret)
+ Qt.exit(1)
+ else
+ Qt.quit()
+ }
+}
diff --git a/test cases/frameworks/40 qt qml/QmlCppExposed.hpp b/test cases/frameworks/40 qt qml/QmlCppExposed.hpp
new file mode 100644
index 000000000000..10568c8b717e
--- /dev/null
+++ b/test cases/frameworks/40 qt qml/QmlCppExposed.hpp
@@ -0,0 +1,25 @@
+#pragma once
+#include
+#include
+
+class QmlCppExposed : public QObject
+{
+ Q_OBJECT
+ QML_ELEMENT
+ Q_PROPERTY(int ok READ getOk WRITE setOk NOTIFY okChanged)
+
+public:
+ inline int getOk() const { return m_ok; }
+ inline void setOk(int value) {
+ if (value == m_ok)
+ return;
+ m_ok = value;
+ emit okChanged();
+ }
+
+signals:
+ void okChanged();
+
+private:
+ int m_ok = 3;
+};
diff --git a/test cases/frameworks/40 qt qml/QmlCppOtherExposed.hpp b/test cases/frameworks/40 qt qml/QmlCppOtherExposed.hpp
new file mode 100644
index 000000000000..78426163566f
--- /dev/null
+++ b/test cases/frameworks/40 qt qml/QmlCppOtherExposed.hpp
@@ -0,0 +1,25 @@
+#pragma once
+#include
+#include
+
+class QmlCppOtherExposed : public QObject
+{
+ Q_OBJECT
+ QML_ELEMENT
+ Q_PROPERTY(int ok READ getOk WRITE setOk NOTIFY okChanged)
+
+public:
+ inline int getOk() const { return m_ok; }
+ inline void setOk(int value) {
+ if (value == m_ok)
+ return;
+ m_ok = value;
+ emit okChanged();
+ }
+
+signals:
+ void okChanged();
+
+private:
+ int m_ok = 42;
+};
diff --git a/test cases/frameworks/40 qt qml/QmlMain.cpp b/test cases/frameworks/40 qt qml/QmlMain.cpp
new file mode 100644
index 000000000000..0cec6f3a3f1c
--- /dev/null
+++ b/test cases/frameworks/40 qt qml/QmlMain.cpp
@@ -0,0 +1,31 @@
+#include
+#include
+#include
+
+//extern type registration
+extern void qml_register_types_My_Module6();
+
+int main(int argCount, char* argVector[])
+{
+ //register resources from static libraries
+ Q_INIT_RESOURCE(My_Module6);
+ Q_INIT_RESOURCE(qmlcache_My_Module6);
+ qml_register_types_My_Module6();
+
+ //don't require a grapical environment to run the test
+ qputenv("QT_QPA_PLATFORM", "offscreen");
+
+ QGuiApplication app(argCount, argVector);
+ QQmlApplicationEngine engine;
+
+ QObject::connect(&engine, &QQmlApplicationEngine::objectCreated, [](QObject *object, const QUrl &url){
+ if (object == nullptr) {
+ qFatal("unable to load scene");
+ }
+ });
+
+ engine.addImportPath("qrc:///qt/qml");
+ engine.addImportPath("qrc:///test");
+ engine.load("qrc:///qt/qml/My/Module0/Main.qml");
+ return app.exec();
+}
diff --git a/test cases/frameworks/40 qt qml/QmlSingleton.qml b/test cases/frameworks/40 qt qml/QmlSingleton.qml
new file mode 100644
index 000000000000..73ea95d11bf6
--- /dev/null
+++ b/test cases/frameworks/40 qt qml/QmlSingleton.qml
@@ -0,0 +1,10 @@
+pragma Singleton
+import QtQuick
+
+Item {
+ property alias ok: sub.ok
+
+ Internal {
+ id: sub
+ }
+}
diff --git a/test cases/frameworks/40 qt qml/custom_qmldir b/test cases/frameworks/40 qt qml/custom_qmldir
new file mode 100644
index 000000000000..9d84db651401
--- /dev/null
+++ b/test cases/frameworks/40 qt qml/custom_qmldir
@@ -0,0 +1,4 @@
+module My.Module4
+prefer :/qt/qml/My/Module4/
+BasicAliased 1.0 Basic.qml
+Thing 1.0 Thing.qml
diff --git a/test cases/frameworks/40 qt qml/custom_qmldir.qrc b/test cases/frameworks/40 qt qml/custom_qmldir.qrc
new file mode 100644
index 000000000000..bee52092c587
--- /dev/null
+++ b/test cases/frameworks/40 qt qml/custom_qmldir.qrc
@@ -0,0 +1,5 @@
+
+
+ custom_qmldir
+
+
diff --git a/test cases/frameworks/40 qt qml/meson.build b/test cases/frameworks/40 qt qml/meson.build
new file mode 100644
index 000000000000..060e044a5ec8
--- /dev/null
+++ b/test cases/frameworks/40 qt qml/meson.build
@@ -0,0 +1,115 @@
+project('qt6 qml build test', 'cpp',
+ meson_version: '>= 1.7.0',
+ # Qt6 requires C++ 17 support
+ default_options : ['cpp_std=c++17']
+)
+
+qt_modules = ['Core', 'Gui', 'Qml']
+
+qtdep = dependency('qt6', modules : qt_modules, main : true, private_headers: true, required : false, method : get_option('method'))
+if not qtdep.found()
+ error('MESON_SKIP_TEST qt6 not found.')
+endif
+
+qtmodule = import('qt6')
+fs = import('fs')
+
+qmlmodule1 = qtmodule.qml_module(
+ 'My.Module1',
+ version: '1.0',
+ qml_sources: files('Basic.qml', 'subdir/Thing.qml'),
+ qml_singletons: files('QmlSingleton.qml'),
+ qml_internals: files('Internal.qml'),
+ moc_headers: files('QmlCppExposed.hpp', 'QmlCppOtherExposed.hpp'),
+ designer_supported: true,
+ dependencies: [qtdep],
+ install: true
+)
+
+#with a different resource prefix
+qmlmodule2 = qtmodule.qml_module(
+ 'My.Module2',
+ version: '1.0',
+ qml_sources: ['Basic.qml', 'subdir/Thing.qml'],
+ resources_prefix: '/test',
+ dependencies: [qtdep],
+)
+
+#test with generated targets
+basic_copy = fs.copyfile('Basic.qml')
+thing_copy = fs.copyfile('subdir/Thing.qml')
+
+#build without cachegen
+qmlmodule3 = qtmodule.qml_module(
+ 'My.Module3',
+ version: '1.10.42',
+ qml_sources: [basic_copy, thing_copy],
+ cachegen: false,
+ dependencies: [qtdep],
+)
+
+#build without cachegen
+qmlmodule4 = qtmodule.qml_module(
+ 'My.Module4',
+ qml_sources: files('Basic.qml', 'subdir/Thing.qml'),
+ generate_qmldir: false,
+ dependencies: [qtdep],
+)
+
+qmlmodule4_res = qtmodule.compile_resources(
+ name : 'qmlmodule4_resource',
+ sources : files(['custom_qmldir.qrc']),
+ method : get_option('method')
+)
+
+#a module with only C++ classes
+cpponly_module = qtmodule.qml_module(
+ 'My.Module5',
+ version: '1.0',
+ moc_headers: files('subdir/SubdirHeader.hpp'),
+ dependencies: [qtdep],
+ install: true
+)
+
+#module as static library
+qmlmodule6 = qtmodule.qml_module(
+ 'My.Module6',
+ version: '1.0',
+ qml_sources: files('Basic.qml'),
+ moc_headers: files('subdir/SubdirHeader.hpp'),
+ cachegen: true,
+ dependencies: [qtdep],
+)
+
+qmlmodule6_static = static_library(
+ 'Qmlmodule6Lib',
+ sources: qmlmodule6,
+ include_directories: include_directories('subdir'),
+ dependencies: [qtdep],
+ override_options: 'unity=off',
+)
+
+#qml entry point and qmldir dependecies
+qmlmodule0 = qtmodule.qml_module(
+ 'My.Module0',
+ version: '1.0',
+ qml_sources: files('Main.qml'),
+ imports: ['QtQuick/2.0', 'My.Module1'],
+ optional_imports: ['My.Module2/auto'],
+ dependencies: [qtdep],
+)
+
+qmltest = executable(
+ 'qmlmodule',
+ sources : [
+ 'QmlMain.cpp', qmlmodule0, qmlmodule1, qmlmodule2,
+ qmlmodule3, qmlmodule4, qmlmodule4_res, cpponly_module
+ ],
+ link_with : qmlmodule6_static,
+ dependencies : qtdep,
+ # headers in subdirectory needs to have their include path explicitly
+ # added for the code generated by by qmltyperegistrar. see QTBUG-87221
+ include_directories: include_directories('subdir'),
+ #generated code doesn't support unity build
+ override_options: 'unity=off',
+)
diff --git a/test cases/frameworks/40 qt qml/meson_options.txt b/test cases/frameworks/40 qt qml/meson_options.txt
new file mode 100644
index 000000000000..bc1069ebc881
--- /dev/null
+++ b/test cases/frameworks/40 qt qml/meson_options.txt
@@ -0,0 +1 @@
+option('method', type : 'string', value : 'auto', description : 'The method to use to find Qt')
diff --git a/test cases/frameworks/40 qt qml/subdir/SubdirHeader.hpp b/test cases/frameworks/40 qt qml/subdir/SubdirHeader.hpp
new file mode 100644
index 000000000000..019a1692335d
--- /dev/null
+++ b/test cases/frameworks/40 qt qml/subdir/SubdirHeader.hpp
@@ -0,0 +1,27 @@
+#pragma once
+#include
+#include
+
+#include "QmlCppExposed.hpp"
+
+class SubdirHeader : public QObject
+{
+ Q_OBJECT
+ QML_ELEMENT
+ Q_PROPERTY(int ok READ getOk WRITE setOk NOTIFY okChanged)
+
+public:
+ inline int getOk() const { return m_ok; }
+ inline void setOk(int value) {
+ if (value == m_ok)
+ return;
+ m_ok = value;
+ emit okChanged();
+ }
+
+signals:
+ void okChanged();
+
+private:
+ int m_ok = 6;
+};
diff --git a/test cases/frameworks/40 qt qml/subdir/Thing.qml b/test cases/frameworks/40 qt qml/subdir/Thing.qml
new file mode 100644
index 000000000000..5b015c35ca01
--- /dev/null
+++ b/test cases/frameworks/40 qt qml/subdir/Thing.qml
@@ -0,0 +1,5 @@
+import QtQuick 2.0
+
+Item {
+ property int ok: 2
+}
diff --git a/test cases/frameworks/40 qt qml/test.json b/test cases/frameworks/40 qt qml/test.json
new file mode 100644
index 000000000000..d1f868400658
--- /dev/null
+++ b/test cases/frameworks/40 qt qml/test.json
@@ -0,0 +1,22 @@
+{
+ "matrix": {
+ "options": {
+ "method": [
+ { "val": "config-tool" },
+ { "val": "qmake" },
+ { "val": "pkg-config" }
+ ]
+ }
+ },
+ "installed": [
+ {"type": "file", "file": "usr/qml/My/Module1/QmlSingleton.qml"},
+ {"type": "file", "file": "usr/qml/My/Module1/qmldir"},
+ {"type": "file", "file": "usr/qml/My/Module1/Basic.qml"},
+ {"type": "file", "file": "usr/qml/My/Module1/Internal.qml"},
+ {"type": "file", "file": "usr/qml/My/Module1/Thing.qml"},
+ {"type": "file", "file": "usr/qml/My/Module1/My_Module1.qmltypes"},
+ {"type": "file", "file": "usr/qml/My/Module5/qmldir"},
+ {"type": "file", "file": "usr/qml/My/Module5/My_Module5.qmltypes"}
+ ],
+ "expect_skip_on_jobname": ["cygwin", "msys2", "azure", "bionic", "macos"]
+}
diff --git a/test cases/frameworks/7 gnome/gir/meson-python-sample.c b/test cases/frameworks/7 gnome/gir/meson-python-sample.c
new file mode 100644
index 000000000000..0ab7439d023e
--- /dev/null
+++ b/test cases/frameworks/7 gnome/gir/meson-python-sample.c
@@ -0,0 +1,51 @@
+#include "meson-python-sample.h"
+
+#include
+
+struct _MesonPythonSample
+{
+ GObject parent_instance;
+};
+
+G_DEFINE_TYPE (MesonPythonSample, meson_python_sample, G_TYPE_OBJECT)
+
+/**
+ * meson_python_sample_new:
+ *
+ * Allocates a new #MesonPythonSample.
+ *
+ * Returns: (transfer full): a #MesonPythonSample.
+ */
+MesonPythonSample *
+meson_python_sample_new (void)
+{
+ return g_object_new (MESON_TYPE_PYTHON_SAMPLE, NULL);
+}
+
+static void
+meson_python_sample_class_init (MesonPythonSampleClass *klass)
+{
+ if (!Py_IsInitialized ()) {
+ Py_Initialize ();
+ Py_Finalize ();
+ }
+}
+
+static void
+meson_python_sample_init (MesonPythonSample *self)
+{
+}
+
+/**
+ * meson_python_sample_print_message:
+ * @self: a #MesonSample2.
+ *
+ * Prints Hello.
+ *
+ * Returns: Nothing.
+ */
+void
+meson_python_sample_print_message (MesonPythonSample *self)
+{
+ g_print ("Message: Hello\n");
+}
diff --git a/test cases/frameworks/7 gnome/gir/meson-python-sample.def b/test cases/frameworks/7 gnome/gir/meson-python-sample.def
new file mode 100644
index 000000000000..c5542b96bebd
--- /dev/null
+++ b/test cases/frameworks/7 gnome/gir/meson-python-sample.def
@@ -0,0 +1,4 @@
+EXPORTS
+ meson_python_sample_new
+ meson_python_sample_print_message
+ meson_python_sample_get_type
diff --git a/test cases/frameworks/7 gnome/gir/meson-python-sample.h b/test cases/frameworks/7 gnome/gir/meson-python-sample.h
new file mode 100644
index 000000000000..6dab2f7b8ef4
--- /dev/null
+++ b/test cases/frameworks/7 gnome/gir/meson-python-sample.h
@@ -0,0 +1,17 @@
+#ifndef MESON_PYTHON_SAMPLE_H
+#define MESON_PYTHON_SAMPLE_H
+
+#include
+
+G_BEGIN_DECLS
+
+#define MESON_TYPE_PYTHON_SAMPLE (meson_python_sample_get_type())
+
+G_DECLARE_FINAL_TYPE (MesonPythonSample, meson_python_sample, MESON, SAMPLE, GObject)
+
+MesonPythonSample *meson_python_sample_new (void);
+void meson_python_sample_print_message (MesonPythonSample *self);
+
+G_END_DECLS
+
+#endif /* MESON_PYTHON_SAMPLE_H */
diff --git a/test cases/frameworks/7 gnome/gir/meson.build b/test cases/frameworks/7 gnome/gir/meson.build
index d2ceaee4256b..b02a80618a8a 100644
--- a/test cases/frameworks/7 gnome/gir/meson.build
+++ b/test cases/frameworks/7 gnome/gir/meson.build
@@ -2,6 +2,7 @@ subdir('dep1')
libsources = ['meson-sample.c', 'meson-sample.h']
lib2sources = ['meson-sample2.c', 'meson-sample2.h']
+pythonsources = ['meson-python-sample.c', 'meson-python-sample.h']
gen_source = custom_target(
'meson_sample3.h',
@@ -26,6 +27,23 @@ girlib2 = shared_library(
install : true
)
+if get_option('b_sanitize') == 'none'
+ py3_dep = py3.dependency(embed: true)
+else
+ warning('Python 3 test not supported with b_sanitize')
+ py3_dep = disabler()
+endif
+
+if py3_dep.found()
+ pythongirlib = shared_library(
+ 'python_gir_lib',
+ sources: pythonsources,
+ dependencies: [gobj, py3_dep],
+ vs_module_defs: 'meson-python-sample.def',
+ install: true
+ )
+endif
+
girexe = executable(
'girprog',
sources : 'prog.c',
@@ -36,17 +54,31 @@ girexe = executable(
fake_dep = dependency('no-way-this-exists', required: false)
+# g-ir-scanner ignores CFLAGS for MSVC
+flags_dep_for_msvc = declare_dependency(
+ compile_args: ['-DMESON_TEST_2']
+)
+
+girs = [girlib, girlib2]
+girs_sources = [libsources, lib2sources, gen_source]
+# dep1_dep pulls in dep2_dep for us
+girs_deps = [fake_dep, dep1_dep, flags_dep_for_msvc]
+if py3_dep.found()
+ girs += [pythongirlib]
+ girs_sources += [pythonsources]
+ girs_deps += [py3_dep]
+endif
+
gnome.generate_gir(
- girlib, girlib2,
- sources : [libsources, lib2sources, gen_source],
- env : {'CPPFLAGS': '-DMESON_TEST_2'},
+ girs,
+ sources : girs_sources,
nsversion : '1.0',
namespace : 'Meson',
symbol_prefix : 'meson',
identifier_prefix : 'Meson',
includes : ['GObject-2.0', 'MesonDep1-1.0'],
- # dep1_dep pulls in dep2_dep for us
- dependencies : [[fake_dep, dep1_dep]],
+ dependencies : girs_deps,
+ doc_format: 'gtk-doc-markdown',
install : true,
build_by_default : true,
)
diff --git a/test cases/frameworks/7 gnome/meson.build b/test cases/frameworks/7 gnome/meson.build
index 4d54e774b121..f75ca93a105e 100644
--- a/test cases/frameworks/7 gnome/meson.build
+++ b/test cases/frameworks/7 gnome/meson.build
@@ -1,4 +1,4 @@
-project('gobject-introspection', 'c')
+project('gobject-introspection', 'c', meson_version: '>= 1.2.0')
copyfile = find_program('copyfile.py')
copyfile_gen = generator(copyfile,
@@ -15,8 +15,8 @@ if not gir.found()
error('MESON_SKIP_TEST gobject-introspection not found.')
endif
-python3 = import('python3')
-py3 = python3.find_python()
+python3 = import('python')
+py3 = python3.find_installation()
if run_command(py3, '-c', 'import gi;', check: false).returncode() != 0
error('MESON_SKIP_TEST python3-gi not found')
endif
@@ -30,7 +30,6 @@ if cc.get_id() == 'intel'
add_global_arguments('-wd2282', language : 'c')
endif
-py3 = import('python3').find_python()
pycode = '''import os, sys
if "MESON_UNIT_TEST_PRETEND_GLIB_OLD" in os.environ:
sys.exit(0)
diff --git a/test cases/frameworks/7 gnome/mkenums/meson.build b/test cases/frameworks/7 gnome/mkenums/meson.build
index 284231f2f83c..36b73d24561f 100644
--- a/test cases/frameworks/7 gnome/mkenums/meson.build
+++ b/test cases/frameworks/7 gnome/mkenums/meson.build
@@ -140,12 +140,9 @@ test('enum test 5', enumexe5)
# Generate template then use as input to mkenums
-# Simple trick to copy the file without substitutions, can be
-# removed when https://github.com/mesonbuild/meson/pull/3383 is fixed
gen_h_template = configure_file(input: 'enums.h.in',
output: 'enums6.h.in',
- configuration: configuration_data(),
- format: 'cmake')
+ copy: true)
enums_h6 = gnome.mkenums('enums6',
sources : 'meson-sample.h',
diff --git a/test cases/frameworks/7 gnome/resources-data/meson.build b/test cases/frameworks/7 gnome/resources-data/meson.build
index 31a577b2ed93..bb251b7cad37 100644
--- a/test cases/frameworks/7 gnome/resources-data/meson.build
+++ b/test cases/frameworks/7 gnome/resources-data/meson.build
@@ -1,7 +1,5 @@
subdir('subdir')
-python3 = import('python3').find_python()
-
fake_generator_script = '''
import os, sys
assert os.path.exists(sys.argv[1]), "File %s not found" % sys.argv[1]
@@ -13,6 +11,6 @@ print("This is a generated resource.")
res3_txt = custom_target('res3',
input: 'res3.txt.in',
output: 'res3.txt',
- command: [python3, '-c', fake_generator_script, '@INPUT@'],
+ command: [py3, '-c', fake_generator_script, '@INPUT@'],
capture: true,
)
diff --git a/test cases/frameworks/7 gnome/test.json b/test cases/frameworks/7 gnome/test.json
index 77c9bdfb5216..0d3bead150fa 100644
--- a/test cases/frameworks/7 gnome/test.json
+++ b/test cases/frameworks/7 gnome/test.json
@@ -12,6 +12,7 @@
{"type": "file", "file": "usr/include/subdir-3/marshaller-3.h"},
{"type": "file", "file": "usr/include/subdir-4/marshaller-4.h"},
{"type": "file", "file": "usr/include/subdir-5/marshaller-5.h"},
+ {"type": "expr", "file": "usr/lib/?libpython_gir_lib.so"},
{"type": "expr", "file": "usr/lib/?libgir_lib.so"},
{"type": "file", "platform": "cygwin", "file": "usr/lib/libgir_lib.dll.a"},
{"type": "expr", "file": "usr/lib/?libgir_lib2.so"},
diff --git a/test cases/frameworks/8 flex/lexer.l b/test cases/frameworks/8 flex/lexer.l
index ca6513cb81c1..23a5f4869c6e 100644
--- a/test cases/frameworks/8 flex/lexer.l
+++ b/test cases/frameworks/8 flex/lexer.l
@@ -3,11 +3,11 @@
#include "parser.tab.h"
extern int yylex(void);
-extern int yyerror();
+extern int yyerror(char *s);
%}
%option noyywrap nounput noinput
%%
("true"|"false") {return BOOLEAN;}
-. { yyerror(); }
+. { yyerror("Invalid value"); }
diff --git a/test cases/frameworks/8 flex/parser.y b/test cases/frameworks/8 flex/parser.y
index 663f2f3cf186..ba8004efdf79 100644
--- a/test cases/frameworks/8 flex/parser.y
+++ b/test cases/frameworks/8 flex/parser.y
@@ -1,6 +1,6 @@
%{
extern int yylex(void);
-extern int yyerror();
+extern int yyerror(char *s);
%}
%token BOOLEAN
diff --git a/test cases/frameworks/8 flex/prog.c b/test cases/frameworks/8 flex/prog.c
index ae481d098327..840a0644ae81 100644
--- a/test cases/frameworks/8 flex/prog.c
+++ b/test cases/frameworks/8 flex/prog.c
@@ -24,7 +24,7 @@ int yywrap(void) {
return 0;
}
-int yyerror(void) {
- printf("Parse error\n");
+int yyerror(char* s) {
+ printf("Parse error: %s\n", s);
exit(1);
}
diff --git a/test cases/linuxlike/3 linker script/meson.build b/test cases/linuxlike/3 linker script/meson.build
index 660858792cb2..634b70b2cbc4 100644
--- a/test cases/linuxlike/3 linker script/meson.build
+++ b/test cases/linuxlike/3 linker script/meson.build
@@ -1,4 +1,4 @@
-project('linker script', 'c')
+project('linker script', 'c', default_options : {'prefix': '/tmp'})
# Solaris 11.4 ld supports --version-script only when you also specify
# -z gnu-version-script-compat
diff --git a/test cases/osx/3 has function xcode8/meson.build b/test cases/osx/3 has function xcode8/meson.build
index edd3688f23f7..8836c0ce852c 100644
--- a/test cases/osx/3 has function xcode8/meson.build
+++ b/test cases/osx/3 has function xcode8/meson.build
@@ -12,7 +12,7 @@ sdk_args = ['-isysroot', '/Applications/Xcode.app/Contents/Developer/Platforms/M
args_10_12 = ['-mmacosx-version-min=10.13'] + sdk_args
# Test requires XCode 8 which has the MacOSX 10.12 SDK
-if cc.version().version_compare('>=8.0') and cc.version().version_compare('<8.1')
+if cc.version().version_compare('>=8.0', '<8.1')
if cc.has_function('clock_gettime', args : args_10_11, prefix : '#include ')
error('Should not have found clock_gettime via when targeting Mac OS X 10.11')
endif
diff --git a/test cases/python/4 custom target depends extmodule/blaster.py b/test cases/python/4 custom target depends extmodule/blaster.py
index 65b6493df890..939010080c09 100644
--- a/test cases/python/4 custom target depends extmodule/blaster.py
+++ b/test cases/python/4 custom target depends extmodule/blaster.py
@@ -7,8 +7,7 @@
from pathlib import Path
filedir = Path(os.path.dirname(__file__)).resolve()
-if list(filedir.glob('ext/*tachyon*')):
- sys.path.insert(0, (filedir / 'ext').as_posix())
+sys.path.insert(0, (filedir / 'ext').as_posix())
if hasattr(os, 'add_dll_directory'):
os.add_dll_directory(filedir / 'ext' / 'lib')
diff --git a/test cases/rewrite/7 prefix/meson.build b/test cases/rewrite/7 prefix/meson.build
new file mode 100644
index 000000000000..563d7d7c9547
--- /dev/null
+++ b/test cases/rewrite/7 prefix/meson.build
@@ -0,0 +1,5 @@
+project('lalala', 'cpp',
+ default_options : [
+ 'prefix=/export/doocs',
+ ],
+)
diff --git a/test cases/rust/22 cargo subproject/subprojects/bar-0.1-rs/Cargo.toml b/test cases/rust/22 cargo subproject/subprojects/bar-0.1-rs/Cargo.toml
index d60a5d8f1c76..d8b4eb26c172 100644
--- a/test cases/rust/22 cargo subproject/subprojects/bar-0.1-rs/Cargo.toml
+++ b/test cases/rust/22 cargo subproject/subprojects/bar-0.1-rs/Cargo.toml
@@ -1,6 +1,7 @@
[package]
name = "bar"
version = "0.1"
+flooob = "lolz"
# This dependency does not exist, it is required by default but this subproject
# is called with default-features=false.
@@ -8,6 +9,10 @@ version = "0.1"
optional = true
version = "1.0"
+[dependencies.common]
+version = "0.0.1"
+features = ["f2"]
+
[features]
default = ["f2"]
f1 = []
diff --git a/test cases/rust/22 cargo subproject/subprojects/common-0-rs.wrap b/test cases/rust/22 cargo subproject/subprojects/common-0-rs.wrap
new file mode 100644
index 000000000000..99686e90e78e
--- /dev/null
+++ b/test cases/rust/22 cargo subproject/subprojects/common-0-rs.wrap
@@ -0,0 +1,2 @@
+[wrap-file]
+method = cargo
diff --git a/test cases/rust/22 cargo subproject/subprojects/common-0-rs/Cargo.toml b/test cases/rust/22 cargo subproject/subprojects/common-0-rs/Cargo.toml
new file mode 100644
index 000000000000..b22e1accf288
--- /dev/null
+++ b/test cases/rust/22 cargo subproject/subprojects/common-0-rs/Cargo.toml
@@ -0,0 +1,12 @@
+[package]
+name = "common"
+version = "0.0.1"
+edition = "2021"
+
+[lib]
+crate-type = ["rlib"]
+path = "lib.rs"
+
+[features]
+f1 = []
+f2 = []
diff --git a/test cases/rust/22 cargo subproject/subprojects/common-0-rs/lib.rs b/test cases/rust/22 cargo subproject/subprojects/common-0-rs/lib.rs
new file mode 100644
index 000000000000..a7adf8f62953
--- /dev/null
+++ b/test cases/rust/22 cargo subproject/subprojects/common-0-rs/lib.rs
@@ -0,0 +1,4 @@
+#[cfg(all(feature = "f1", feature = "f2"))]
+pub fn common_func() -> i32 {
+ 0
+}
diff --git a/test cases/rust/22 cargo subproject/subprojects/extra-dep-1-rs/Cargo.toml b/test cases/rust/22 cargo subproject/subprojects/extra-dep-1-rs/Cargo.toml
new file mode 100644
index 000000000000..4b6fa5777f45
--- /dev/null
+++ b/test cases/rust/22 cargo subproject/subprojects/extra-dep-1-rs/Cargo.toml
@@ -0,0 +1,3 @@
+[package]
+name = "extra-deps"
+version = "1.0"
diff --git a/test cases/rust/22 cargo subproject/subprojects/extra-dep-1-rs/meson.build b/test cases/rust/22 cargo subproject/subprojects/extra-dep-1-rs/meson.build
index 40d109b2d0f8..b5ca439513dc 100644
--- a/test cases/rust/22 cargo subproject/subprojects/extra-dep-1-rs/meson.build
+++ b/test cases/rust/22 cargo subproject/subprojects/extra-dep-1-rs/meson.build
@@ -1,7 +1,5 @@
project('extra dep', 'c', version: '1.0')
-assert(get_option('feature-default') == true)
-
l = static_library('extra-dep', 'lib.c')
d = declare_dependency(link_with: l,
variables: {
diff --git a/test cases/rust/22 cargo subproject/subprojects/foo-0-rs/Cargo.toml b/test cases/rust/22 cargo subproject/subprojects/foo-0-rs/Cargo.toml
index 0f0225d06cc9..8c5351a77b2e 100644
--- a/test cases/rust/22 cargo subproject/subprojects/foo-0-rs/Cargo.toml
+++ b/test cases/rust/22 cargo subproject/subprojects/foo-0-rs/Cargo.toml
@@ -20,6 +20,13 @@ version = "1.0"
[dependencies]
mybar = { version = "0.1", package = "bar", default-features = false }
+[dependencies.common]
+version = "0.0.1"
+features = ["f1"]
+
+[dependencies.libname]
+version = "1"
+
[features]
default = ["f1"]
f1 = ["f2", "f3"]
diff --git a/test cases/rust/22 cargo subproject/subprojects/foo-0-rs/lib.rs b/test cases/rust/22 cargo subproject/subprojects/foo-0-rs/lib.rs
index 1c8cbc9d3fb5..4497dc4a4f3e 100644
--- a/test cases/rust/22 cargo subproject/subprojects/foo-0-rs/lib.rs
+++ b/test cases/rust/22 cargo subproject/subprojects/foo-0-rs/lib.rs
@@ -1,3 +1,6 @@
+extern crate common;
+extern crate libothername;
+
extern "C" {
fn extra_func() -> i32;
}
@@ -5,6 +8,8 @@ extern "C" {
#[cfg(feature = "foo")]
#[no_mangle]
pub extern "C" fn rust_func() -> i32 {
+ assert!(common::common_func() == 0);
+ assert!(libothername::stuff() == 42);
let v: i32;
unsafe {
v = extra_func();
diff --git a/test cases/rust/22 cargo subproject/subprojects/libname-1-rs.wrap b/test cases/rust/22 cargo subproject/subprojects/libname-1-rs.wrap
new file mode 100644
index 000000000000..99686e90e78e
--- /dev/null
+++ b/test cases/rust/22 cargo subproject/subprojects/libname-1-rs.wrap
@@ -0,0 +1,2 @@
+[wrap-file]
+method = cargo
diff --git a/test cases/rust/22 cargo subproject/subprojects/libname-1-rs/Cargo.toml b/test cases/rust/22 cargo subproject/subprojects/libname-1-rs/Cargo.toml
new file mode 100644
index 000000000000..1fbc87c166ff
--- /dev/null
+++ b/test cases/rust/22 cargo subproject/subprojects/libname-1-rs/Cargo.toml
@@ -0,0 +1,7 @@
+[package]
+name = "libname"
+version = "1"
+
+[lib]
+name="libothername"
+path = "lib.rs"
diff --git a/test cases/rust/22 cargo subproject/subprojects/libname-1-rs/lib.rs b/test cases/rust/22 cargo subproject/subprojects/libname-1-rs/lib.rs
new file mode 100644
index 000000000000..ff82dd1004be
--- /dev/null
+++ b/test cases/rust/22 cargo subproject/subprojects/libname-1-rs/lib.rs
@@ -0,0 +1,3 @@
+pub fn stuff() -> i32 {
+ 42
+}
diff --git a/test cases/rust/22 cargo subproject/test.json b/test cases/rust/22 cargo subproject/test.json
new file mode 100644
index 000000000000..c973b1c91f5c
--- /dev/null
+++ b/test cases/rust/22 cargo subproject/test.json
@@ -0,0 +1,8 @@
+{
+ "stdout": [
+ {
+ "line": "foo-0-rs| WARNING: Package entry bar has unexpected keys \"flooob\". This may (unlikely) be an error in the cargo manifest, or may be a missing implementation in Meson. If this issue can be reproduced with the latest version of Meson, please help us by opening an issue at https://github.com/mesonbuild/meson/issues. Please include the crate and version that is generating this warning if possible."
+ }
+ ]
+}
+
diff --git a/test cases/rust/26 cargo system deps/main.rs b/test cases/rust/26 cargo system deps/main.rs
new file mode 100644
index 000000000000..25e3b2fb896f
--- /dev/null
+++ b/test cases/rust/26 cargo system deps/main.rs
@@ -0,0 +1,5 @@
+extern crate sub;
+
+pub fn main() {
+ sub::func();
+}
diff --git a/test cases/rust/26 cargo system deps/meson.build b/test cases/rust/26 cargo system deps/meson.build
new file mode 100644
index 000000000000..9545bb452c16
--- /dev/null
+++ b/test cases/rust/26 cargo system deps/meson.build
@@ -0,0 +1,11 @@
+project('cargo system-deps', 'rust')
+
+glib = dependency('glib-2.0', required: false)
+if not glib.found()
+ error('MESON_SKIP_TEST: Need glib system dependency')
+endif
+
+sub_dep = dependency('sub-1-rs')
+exe = executable('main', 'main.rs', dependencies : sub_dep)
+test('main', exe)
+
diff --git a/test cases/rust/26 cargo system deps/subprojects/sub-1-rs.wrap b/test cases/rust/26 cargo system deps/subprojects/sub-1-rs.wrap
new file mode 100644
index 000000000000..99686e90e78e
--- /dev/null
+++ b/test cases/rust/26 cargo system deps/subprojects/sub-1-rs.wrap
@@ -0,0 +1,2 @@
+[wrap-file]
+method = cargo
diff --git a/test cases/rust/26 cargo system deps/subprojects/sub-1-rs/Cargo.toml b/test cases/rust/26 cargo system deps/subprojects/sub-1-rs/Cargo.toml
new file mode 100644
index 000000000000..88d5445f6696
--- /dev/null
+++ b/test cases/rust/26 cargo system deps/subprojects/sub-1-rs/Cargo.toml
@@ -0,0 +1,16 @@
+[package]
+name = 'sub'
+version = '1'
+
+[build-dependencies]
+system-deps = "6"
+
+[lib]
+name = "sub"
+path = "lib.rs"
+
+[package.metadata.system-deps]
+glib = { name = "glib-2.0", version=" 2.0 , 2.1 , <3 ", feature="default" }
+gobject = { name = "gobject-2.0", version=">=99", optional=true }
+notfound = { feature="notfound" }
+libffi = "1.0"
diff --git a/test cases/rust/26 cargo system deps/subprojects/sub-1-rs/lib.rs b/test cases/rust/26 cargo system deps/subprojects/sub-1-rs/lib.rs
new file mode 100644
index 000000000000..6e39d236532c
--- /dev/null
+++ b/test cases/rust/26 cargo system deps/subprojects/sub-1-rs/lib.rs
@@ -0,0 +1,15 @@
+extern "C" {
+ fn g_get_tmp_dir() -> *mut std::ffi::c_void;
+}
+
+#[cfg(system_deps_have_glib)]
+#[cfg(not(system_deps_have_gobject))]
+pub fn func() {
+ unsafe {
+ g_get_tmp_dir();
+ }
+}
+
+pub fn func1() {
+ func()
+}
diff --git a/test cases/rust/27 objects/lib1-dylib.rs b/test cases/rust/27 objects/lib1-dylib.rs
new file mode 100644
index 000000000000..1dbf61422ea4
--- /dev/null
+++ b/test cases/rust/27 objects/lib1-dylib.rs
@@ -0,0 +1,15 @@
+extern "C" {
+ fn from_lib1();
+}
+
+#[no_mangle]
+extern "C" fn from_lib2()
+{
+ println!("hello world from rust");
+}
+
+#[no_mangle]
+pub extern "C" fn c_func()
+{
+ unsafe { from_lib1(); }
+}
diff --git a/test cases/rust/27 objects/lib1.c b/test cases/rust/27 objects/lib1.c
new file mode 100644
index 000000000000..b463bffa3c6e
--- /dev/null
+++ b/test cases/rust/27 objects/lib1.c
@@ -0,0 +1,11 @@
+#include
+#include "lib1.h"
+#include "lib2.h"
+
+void from_lib2(void) {
+ printf("hello world from c\n");
+}
+
+void c_func(void) {
+ from_lib1();
+}
diff --git a/test cases/rust/27 objects/lib1.h b/test cases/rust/27 objects/lib1.h
new file mode 100644
index 000000000000..8bb18d4bbe4e
--- /dev/null
+++ b/test cases/rust/27 objects/lib1.h
@@ -0,0 +1,4 @@
+#pragma once
+
+void from_lib2(void);
+void c_func(void);
diff --git a/test cases/rust/27 objects/lib2.c b/test cases/rust/27 objects/lib2.c
new file mode 100644
index 000000000000..a61d5349f878
--- /dev/null
+++ b/test cases/rust/27 objects/lib2.c
@@ -0,0 +1,8 @@
+#include
+#include "lib1.h"
+#include "lib2.h"
+
+void from_lib1(void)
+{
+ from_lib2();
+}
diff --git a/test cases/rust/27 objects/lib2.h b/test cases/rust/27 objects/lib2.h
new file mode 100644
index 000000000000..08c4cd30ad1e
--- /dev/null
+++ b/test cases/rust/27 objects/lib2.h
@@ -0,0 +1,3 @@
+#pragma once
+
+void from_lib1(void);
diff --git a/test cases/rust/27 objects/main.rs b/test cases/rust/27 objects/main.rs
new file mode 100644
index 000000000000..538359943271
--- /dev/null
+++ b/test cases/rust/27 objects/main.rs
@@ -0,0 +1,9 @@
+extern "C" {
+ fn c_func();
+}
+
+fn main() {
+ unsafe {
+ c_func();
+ }
+}
diff --git a/test cases/rust/27 objects/meson.build b/test cases/rust/27 objects/meson.build
new file mode 100644
index 000000000000..78373e4b6535
--- /dev/null
+++ b/test cases/rust/27 objects/meson.build
@@ -0,0 +1,28 @@
+project('staticlib group', 'c', 'rust', meson_version: '>=1.8.0')
+
+lib1 = static_library('lib1', 'lib1.c')
+dep1 = declare_dependency(objects: lib1.extract_all_objects(recursive: false))
+lib2 = static_library('lib2', 'lib2.c')
+dep2 = declare_dependency(objects: lib2.extract_all_objects(recursive: false))
+executable('lib1objs', 'main.rs',
+ objects: lib1.extract_all_objects(recursive: false),
+ link_with: lib2)
+executable('lib2objs', 'main.rs',
+ objects: lib2.extract_all_objects(recursive: false),
+ link_with: lib1)
+executable('lib1objs_as_dep', 'main.rs',
+ dependencies: dep1,
+ link_with: lib2)
+executable('lib2objs_as_dep', 'main.rs',
+ dependencies: dep2,
+ link_with: lib1)
+
+lib12 = shared_library('dylib2objs', 'lib1-dylib.rs',
+ objects: lib2.extract_all_objects(recursive: false),
+ rust_abi: 'c')
+executable('dylib', 'main.rs', link_with: lib12)
+
+lib12 = shared_library('dylib2objs_as_dep', 'lib1-dylib.rs',
+ dependencies: dep2,
+ rust_abi: 'c')
+executable('dylib_as_dep', 'main.rs', link_with: lib12)
diff --git a/test cases/rust/9 unit tests/doctest1.rs b/test cases/rust/9 unit tests/doctest1.rs
new file mode 100644
index 000000000000..da42792b8be0
--- /dev/null
+++ b/test cases/rust/9 unit tests/doctest1.rs
@@ -0,0 +1,12 @@
+//! This is a doctest
+//!
+//! ```
+//! assert_eq!(2+2, 4)
+//! ```
+
+/// ```ignore
+/// this one will be skipped
+/// ```
+pub fn my_func()
+{
+}
diff --git a/test cases/rust/9 unit tests/meson.build b/test cases/rust/9 unit tests/meson.build
index b444271ae18c..0fa2fa80b304 100644
--- a/test cases/rust/9 unit tests/meson.build
+++ b/test cases/rust/9 unit tests/meson.build
@@ -1,4 +1,4 @@
-project('rust unit tests', 'rust')
+project('rust unit tests', 'rust', meson_version: '>=1.8.0')
t = executable(
'rust_test',
@@ -31,12 +31,22 @@ test(
suite : ['foo'],
)
+rust = import('rust')
+
+rustdoc = find_program('rustdoc', required: false)
+if rustdoc.found()
+ doclib = static_library('rust_doc_lib', ['doctest1.rs'], build_by_default : false)
+ rust.doctest('rust doctests', doclib,
+ protocol : 'rust',
+ suite : ['doctests'],
+ )
+endif
+
exe = executable('rust_exe', ['test2.rs', 'test.rs'], build_by_default : false)
-rust = import('unstable-rust')
rust.test('rust_test_from_exe', exe, should_fail : true)
-lib = static_library('rust_static', ['test.rs'], build_by_default : false, rust_crate_type : 'lib')
+lib = static_library('rust_static', ['test.rs'], build_by_default : false, rust_abi: 'c')
rust.test('rust_test_from_static', lib, args: ['--skip', 'test_add_intentional_fail'])
lib = shared_library('rust_shared', ['test.rs'], build_by_default : false)
diff --git a/test cases/swift/10 file name matches module name/Library.swift b/test cases/swift/10 file name matches module name/Library.swift
new file mode 100644
index 000000000000..617952f4ef7e
--- /dev/null
+++ b/test cases/swift/10 file name matches module name/Library.swift
@@ -0,0 +1 @@
+public func callMe() {}
diff --git a/test cases/swift/10 file name matches module name/Library2.swift b/test cases/swift/10 file name matches module name/Library2.swift
new file mode 100644
index 000000000000..7172b249b678
--- /dev/null
+++ b/test cases/swift/10 file name matches module name/Library2.swift
@@ -0,0 +1 @@
+public func callMe2() {}
diff --git a/test cases/swift/10 file name matches module name/main.swift b/test cases/swift/10 file name matches module name/main.swift
new file mode 100644
index 000000000000..d5e8a0e9c2b1
--- /dev/null
+++ b/test cases/swift/10 file name matches module name/main.swift
@@ -0,0 +1,4 @@
+import Library
+
+callMe()
+callMe2()
diff --git a/test cases/swift/10 file name matches module name/meson.build b/test cases/swift/10 file name matches module name/meson.build
new file mode 100644
index 000000000000..34af03313f81
--- /dev/null
+++ b/test cases/swift/10 file name matches module name/meson.build
@@ -0,0 +1,4 @@
+project('file name matches module name', 'swift')
+
+lib = static_library('Library', 'Library.swift', 'Library2.swift')
+executable('program', 'main.swift', link_with: [lib])
diff --git a/test cases/swift/9 sdk path from dep/foo.swift b/test cases/swift/9 sdk path from dep/foo.swift
new file mode 100644
index 000000000000..6ca38879f363
--- /dev/null
+++ b/test cases/swift/9 sdk path from dep/foo.swift
@@ -0,0 +1,4 @@
+// This import is needed for swiftc to implictly import the FFI module
+// which will in turn conflict with the dependency's include path and error out
+// if we don't manually replace all SDK paths with the newest one.
+import Foundation
diff --git a/test cases/swift/9 sdk path from dep/meson.build b/test cases/swift/9 sdk path from dep/meson.build
new file mode 100644
index 000000000000..4cc44bc725a7
--- /dev/null
+++ b/test cases/swift/9 sdk path from dep/meson.build
@@ -0,0 +1,12 @@
+project('swift sdk include dir test', 'swift')
+
+bar_dep = declare_dependency(
+ # Simulates including 'libffi' from brew as a dep via pkg-config
+ # Without a workaround that replaces all SDK paths with the most recent one,
+ # a compile error will occur due to conflicting definitions of the FFI module.
+ compile_args: '-I/Library/Developer/CommandLineTools/SDKs/MacOSX12.sdk/usr/include/ffi',
+)
+
+foo = static_library('foo', 'foo.swift',
+ dependencies: [bar_dep],
+)
diff --git a/test cases/unit/10 build_rpath/meson.build b/test cases/unit/10 build_rpath/meson.build
index c0bc3bd2703f..f53c0f8bd685 100644
--- a/test cases/unit/10 build_rpath/meson.build
+++ b/test cases/unit/10 build_rpath/meson.build
@@ -8,6 +8,12 @@ executable('prog', 'prog.c',
install : true,
)
+executable('multibyte_rpath', 'prog.c',
+ link_with: l,
+ install_rpath: get_option('prefix') / '⢖⢖⢖⢖⢖',
+ install: true
+ )
+
executable('progcxx', 'prog.cc',
link_with : l,
build_rpath : '/foo/bar',
diff --git a/test cases/unit/106 underspecified mtest/main.c b/test cases/unit/106 underspecified mtest/main.c
new file mode 100644
index 000000000000..8842fc1226ef
--- /dev/null
+++ b/test cases/unit/106 underspecified mtest/main.c
@@ -0,0 +1 @@
+int main(void) { return 0 ; }
diff --git a/test cases/unit/106 underspecified mtest/meson.build b/test cases/unit/106 underspecified mtest/meson.build
new file mode 100644
index 000000000000..c0a88d6770c8
--- /dev/null
+++ b/test cases/unit/106 underspecified mtest/meson.build
@@ -0,0 +1,8 @@
+project('underspecified deps', 'c')
+
+runner = find_program('runner.py')
+exe1 = executable('main1', 'main.c')
+exe2 = executable('main2', 'main.c')
+
+test('runner-with-exedep', runner, args: exe1)
+test('runner-without-dep', runner, args: exe2.full_path())
diff --git a/test cases/unit/106 underspecified mtest/runner.py b/test cases/unit/106 underspecified mtest/runner.py
new file mode 100755
index 000000000000..9fb9ac40b94e
--- /dev/null
+++ b/test cases/unit/106 underspecified mtest/runner.py
@@ -0,0 +1,5 @@
+#!/usr/bin/env python3
+
+import sys, subprocess
+
+subprocess.run(sys.argv[1:], check=True)
diff --git a/test cases/unit/115 c cpp stds/meson.build b/test cases/unit/115 c cpp stds/meson.build
index 0b15efc08d27..fb68af610008 100644
--- a/test cases/unit/115 c cpp stds/meson.build
+++ b/test cases/unit/115 c cpp stds/meson.build
@@ -1,6 +1,17 @@
-project('c cpp stds', 'c', 'cpp',
- default_options: [
- 'c_std=gnu89,c89',
- 'cpp_std=gnu++98,vc++11',
- ],
+# SPDX-License-Identifier: Apache-2.0
+# Copyright © 2024 Intel Corporation
+
+project(
+ 'c cpp stds',
+ default_options: [
+ 'c_std=gnu89,c89',
+ 'cpp_std=gnu++98,vc++11',
+ ],
)
+
+if get_option('with-c')
+ add_languages('c', 'cpp', native : false)
+endif
+if get_option('with-objc')
+ add_languages('objc', 'objcpp', native : false)
+endif
diff --git a/test cases/unit/115 c cpp stds/meson.options b/test cases/unit/115 c cpp stds/meson.options
new file mode 100644
index 000000000000..7040758810c1
--- /dev/null
+++ b/test cases/unit/115 c cpp stds/meson.options
@@ -0,0 +1,5 @@
+# SPDX-License-Identifier: Apache-2.0
+# Copyright © 2024 Intel Corporation
+
+option('with-c', type : 'boolean', value : false)
+option('with-objc', type : 'boolean', value : false)
diff --git a/test cases/unit/122 persp options/meson.build b/test cases/unit/122 persp options/meson.build
new file mode 100644
index 000000000000..2df4205e4884
--- /dev/null
+++ b/test cases/unit/122 persp options/meson.build
@@ -0,0 +1,24 @@
+project('toplevel', 'c')
+
+round = get_option('round')
+opt = get_option('optimization')
+if round == 1
+ assert(opt == '1')
+elif round == 2
+ assert(opt == '1')
+elif round == 3
+ assert(opt == '1')
+elif round == 4
+ assert(opt == '3')
+elif round == 5
+ assert(opt == '3')
+elif round == 6
+ assert(opt == '3', opt)
+else
+ assert(false, 'Invalid round number')
+endif
+
+executable('toplevel', 'toplevel.c')
+
+subproject('sub1')
+subproject('sub2')
diff --git a/test cases/unit/122 persp options/meson.options b/test cases/unit/122 persp options/meson.options
new file mode 100644
index 000000000000..2bfd08d362e3
--- /dev/null
+++ b/test cases/unit/122 persp options/meson.options
@@ -0,0 +1 @@
+option('round', type: 'integer', value: 1, description: 'The test round.')
diff --git a/test cases/unit/122 persp options/subprojects/sub1/meson.build b/test cases/unit/122 persp options/subprojects/sub1/meson.build
new file mode 100644
index 000000000000..5b176189ca1f
--- /dev/null
+++ b/test cases/unit/122 persp options/subprojects/sub1/meson.build
@@ -0,0 +1,22 @@
+project('sub1', 'c')
+
+round = get_option('round')
+opt = get_option('optimization')
+if round == 1
+ assert(opt == '1')
+elif round == 2
+ assert(opt == '1')
+elif round == 3
+ assert(opt == '1')
+elif round == 4
+ assert(opt == '1')
+elif round == 5
+ assert(opt == '1')
+elif round == 6
+ assert(opt == '2')
+else
+ assert(false, 'Invalid round number')
+endif
+
+
+executable('sub1', 'sub1.c')
diff --git a/test cases/unit/122 persp options/subprojects/sub1/meson.options b/test cases/unit/122 persp options/subprojects/sub1/meson.options
new file mode 100644
index 000000000000..ba5661a27c8b
--- /dev/null
+++ b/test cases/unit/122 persp options/subprojects/sub1/meson.options
@@ -0,0 +1 @@
+option('round', type: 'integer', value: 1, description: 'The test round.', yield: true)
diff --git a/test cases/unit/122 persp options/subprojects/sub1/sub1.c b/test cases/unit/122 persp options/subprojects/sub1/sub1.c
new file mode 100644
index 000000000000..4e4b87372ad6
--- /dev/null
+++ b/test cases/unit/122 persp options/subprojects/sub1/sub1.c
@@ -0,0 +1,6 @@
+#include
+
+int main(void) {
+ printf("This is subproject 1.\n");
+ return 0;
+}
diff --git a/test cases/unit/122 persp options/subprojects/sub2/meson.build b/test cases/unit/122 persp options/subprojects/sub2/meson.build
new file mode 100644
index 000000000000..e8935bc521b9
--- /dev/null
+++ b/test cases/unit/122 persp options/subprojects/sub2/meson.build
@@ -0,0 +1,21 @@
+project('sub2', 'c')
+
+round = get_option('round')
+opt = get_option('optimization')
+if round == 1
+ assert(opt == '1')
+elif round == 2
+ assert(opt == '3')
+elif round == 3
+ assert(opt == '2')
+elif round == 4
+ assert(opt == '2')
+elif round == 5
+ assert(opt == '1')
+elif round == 6
+ assert(opt == '2')
+else
+ assert(false, 'Invalid round number')
+endif
+
+executable('sub2', 'sub2.c')
diff --git a/test cases/unit/122 persp options/subprojects/sub2/meson.options b/test cases/unit/122 persp options/subprojects/sub2/meson.options
new file mode 100644
index 000000000000..ba5661a27c8b
--- /dev/null
+++ b/test cases/unit/122 persp options/subprojects/sub2/meson.options
@@ -0,0 +1 @@
+option('round', type: 'integer', value: 1, description: 'The test round.', yield: true)
diff --git a/test cases/unit/122 persp options/subprojects/sub2/sub2.c b/test cases/unit/122 persp options/subprojects/sub2/sub2.c
new file mode 100644
index 000000000000..4e4b87372ad6
--- /dev/null
+++ b/test cases/unit/122 persp options/subprojects/sub2/sub2.c
@@ -0,0 +1,6 @@
+#include
+
+int main(void) {
+ printf("This is subproject 1.\n");
+ return 0;
+}
diff --git a/test cases/unit/122 persp options/toplevel.c b/test cases/unit/122 persp options/toplevel.c
new file mode 100644
index 000000000000..5748d6b37aef
--- /dev/null
+++ b/test cases/unit/122 persp options/toplevel.c
@@ -0,0 +1,6 @@
+#include
+
+int main(void) {
+ printf("This is the top level project.\n");
+ return 0;
+}
diff --git a/test cases/unit/122 reconfigure base options/meson.build b/test cases/unit/123 reconfigure base options/meson.build
similarity index 100%
rename from test cases/unit/122 reconfigure base options/meson.build
rename to test cases/unit/123 reconfigure base options/meson.build
diff --git a/test cases/unit/122 reconfigure base options/subprojects/sub/meson.build b/test cases/unit/123 reconfigure base options/subprojects/sub/meson.build
similarity index 100%
rename from test cases/unit/122 reconfigure base options/subprojects/sub/meson.build
rename to test cases/unit/123 reconfigure base options/subprojects/sub/meson.build
diff --git a/test cases/unit/124 interactive tap/meson.build b/test cases/unit/124 interactive tap/meson.build
new file mode 100644
index 000000000000..30518db6f403
--- /dev/null
+++ b/test cases/unit/124 interactive tap/meson.build
@@ -0,0 +1,4 @@
+project('interactive TAP output')
+
+test_script = find_program('script.py')
+test('main', test_script, protocol: 'tap')
diff --git a/test cases/unit/124 interactive tap/script.py b/test cases/unit/124 interactive tap/script.py
new file mode 100755
index 000000000000..873a4ae8168b
--- /dev/null
+++ b/test cases/unit/124 interactive tap/script.py
@@ -0,0 +1,5 @@
+#!/usr/bin/env python3
+
+print('''1..2
+ok 1
+not ok 2''')
diff --git a/test cases/unit/125 declare_dep var/meson.build b/test cases/unit/125 declare_dep var/meson.build
new file mode 100644
index 000000000000..4909b590eff3
--- /dev/null
+++ b/test cases/unit/125 declare_dep var/meson.build
@@ -0,0 +1,7 @@
+project('foo')
+
+declare_dependency(
+ variables: {
+ 'dir': get_option('dir')
+ }
+)
diff --git a/test cases/unit/125 declare_dep var/meson_options.txt b/test cases/unit/125 declare_dep var/meson_options.txt
new file mode 100644
index 000000000000..eb15ffcdebbb
--- /dev/null
+++ b/test cases/unit/125 declare_dep var/meson_options.txt
@@ -0,0 +1 @@
+option('dir', type: 'string')
diff --git a/test cases/unit/125 pkgsubproj/meson.build b/test cases/unit/125 pkgsubproj/meson.build
new file mode 100644
index 000000000000..b4cf89fa0b6d
--- /dev/null
+++ b/test cases/unit/125 pkgsubproj/meson.build
@@ -0,0 +1,3 @@
+project('pkg_opt_test')
+
+subproject('sub')
diff --git a/test cases/unit/125 pkgsubproj/subprojects/sub/meson.build b/test cases/unit/125 pkgsubproj/subprojects/sub/meson.build
new file mode 100644
index 000000000000..99622b681cdd
--- /dev/null
+++ b/test cases/unit/125 pkgsubproj/subprojects/sub/meson.build
@@ -0,0 +1 @@
+project('subproject', default_options: 'pkgconfig.relocatable=true')
diff --git a/test cases/unit/126 test slice/meson.build b/test cases/unit/126 test slice/meson.build
new file mode 100644
index 000000000000..a41c2f62d7ff
--- /dev/null
+++ b/test cases/unit/126 test slice/meson.build
@@ -0,0 +1,12 @@
+project('test_slice')
+
+python = import('python').find_installation('python3')
+
+foreach i : range(10)
+ test('test-' + (i + 1).to_string(),
+ python,
+ args: [
+ meson.current_source_dir() / 'test.py'
+ ],
+ )
+endforeach
diff --git a/test cases/unit/126 test slice/test.py b/test cases/unit/126 test slice/test.py
new file mode 100644
index 000000000000..e69de29bb2d1
diff --git a/test cases/unit/127 sanitizers/meson.build b/test cases/unit/127 sanitizers/meson.build
new file mode 100644
index 000000000000..b42fb35d4968
--- /dev/null
+++ b/test cases/unit/127 sanitizers/meson.build
@@ -0,0 +1,8 @@
+# SPDX-License-Identifier: Apache-2.0
+# Copyright © 2023-2024 Intel Corporation
+
+project('sanitizer', 'c', meson_version : '>= 1.8')
+
+summary({
+ 'value': get_option('b_sanitize'),
+}, section: 'summary')
diff --git a/test cases/unit/128 long opt vs D/meson.build b/test cases/unit/128 long opt vs D/meson.build
new file mode 100644
index 000000000000..e05d88d200d3
--- /dev/null
+++ b/test cases/unit/128 long opt vs D/meson.build
@@ -0,0 +1 @@
+project('empty test')
diff --git a/test cases/unit/128 long opt vs D/meson_options.txt b/test cases/unit/128 long opt vs D/meson_options.txt
new file mode 100644
index 000000000000..255bf1576e02
--- /dev/null
+++ b/test cases/unit/128 long opt vs D/meson_options.txt
@@ -0,0 +1 @@
+option('sysconfdir2', type: 'string', value: '')
diff --git a/test cases/unit/69 cross/crossfile.in b/test cases/unit/69 cross/crossfile.in
index 678e8d3a3abd..beab9bc45edc 100644
--- a/test cases/unit/69 cross/crossfile.in
+++ b/test cases/unit/69 cross/crossfile.in
@@ -3,3 +3,6 @@ system = '@system@'
cpu_family = '@cpu_family@'
cpu = '@cpu@'
endian = '@endian@'
+
+[built-in options]
+c_args = ['-funroll-loops']
diff --git a/test cases/unit/69 cross/meson.build b/test cases/unit/69 cross/meson.build
index acf4f0f177b2..645d453c5ba4 100644
--- a/test cases/unit/69 cross/meson.build
+++ b/test cases/unit/69 cross/meson.build
@@ -1,16 +1,25 @@
project('crosstest')
+add_languages('c', native: true)
if get_option('generate')
conf_data = configuration_data()
conf_data.set('system', build_machine.system())
conf_data.set('cpu', build_machine.cpu())
conf_data.set('cpu_family', build_machine.cpu_family())
conf_data.set('endian', build_machine.endian())
+ conf_data.set('c_args', '-pedantic')
configure_file(input: 'crossfile.in',
output: 'crossfile',
configuration: conf_data)
- message('Written cross file')
+ configure_file(input: 'nativefile.in',
+ output: 'nativefile',
+ configuration: conf_data)
+ message('Written native and cross file')
+
+ add_languages('c', native: false)
+ assert(get_option('build.c_args') == get_option('c_args'))
else
assert(meson.is_cross_build(), 'not setup as cross build')
+ assert(get_option('build.c_args') == ['-pedantic'])
endif
diff --git a/test cases/unit/69 cross/nativefile.in b/test cases/unit/69 cross/nativefile.in
new file mode 100644
index 000000000000..9a639999d355
--- /dev/null
+++ b/test cases/unit/69 cross/nativefile.in
@@ -0,0 +1,2 @@
+[built-in options]
+build.c_args = ['@c_args@']
diff --git a/test cases/vala/11 generated vapi/meson.build b/test cases/vala/11 generated vapi/meson.build
index d5f38cad6abd..9e1303d3a8d4 100644
--- a/test cases/vala/11 generated vapi/meson.build
+++ b/test cases/vala/11 generated vapi/meson.build
@@ -1,5 +1,9 @@
project('vapi-test', ['c', 'vala'])
+if host_machine.system() == 'cygwin'
+ error('MESON_SKIP_TEST Does not work with the Vala currently packaged in cygwin')
+endif
+
gnome = import('gnome')
subdir('libfoo')
subdir('libbar')
diff --git a/test cases/vala/11 generated vapi/test.json b/test cases/vala/11 generated vapi/test.json
index 1a742aa89f31..1407afffb2b8 100644
--- a/test cases/vala/11 generated vapi/test.json
+++ b/test cases/vala/11 generated vapi/test.json
@@ -9,5 +9,6 @@
{"type": "file", "file": "usr/share/vala/vapi/foo-1.0.deps"},
{"type": "file", "file": "usr/share/vala/vapi/bar-1.0.vapi"},
{"type": "file", "file": "usr/share/vala/vapi/bar-1.0.deps"}
- ]
+ ],
+ "expect_skip_on_jobname": ["cygwin"]
}
diff --git a/test cases/vala/31 generated ui file subdirectory/meson.build b/test cases/vala/31 generated ui file subdirectory/meson.build
new file mode 100644
index 000000000000..421058151828
--- /dev/null
+++ b/test cases/vala/31 generated ui file subdirectory/meson.build
@@ -0,0 +1,22 @@
+project('demo', 'c', 'vala')
+
+gnome = import('gnome', required: false)
+
+if not gnome.found()
+ error('MESON_SKIP_TEST: gnome module not supported')
+endif
+
+deps = [
+ dependency('glib-2.0', version : '>=2.50'),
+ dependency('gobject-2.0'),
+ dependency('gtk+-3.0'),
+]
+
+subdir('subdir')
+
+executable(
+ 'demo',
+ 'test.vala',
+ resources,
+ dependencies: deps,
+)
diff --git a/test cases/vala/31 generated ui file subdirectory/subdir/TestBox.ui.in b/test cases/vala/31 generated ui file subdirectory/subdir/TestBox.ui.in
new file mode 100644
index 000000000000..bf5c83178b65
--- /dev/null
+++ b/test cases/vala/31 generated ui file subdirectory/subdir/TestBox.ui.in
@@ -0,0 +1,6 @@
+
+
+
+
+
+
diff --git a/test cases/vala/31 generated ui file subdirectory/subdir/meson.build b/test cases/vala/31 generated ui file subdirectory/subdir/meson.build
new file mode 100644
index 000000000000..dbe9344a5fe8
--- /dev/null
+++ b/test cases/vala/31 generated ui file subdirectory/subdir/meson.build
@@ -0,0 +1,13 @@
+ui_tgt = custom_target(
+ input: 'TestBox.ui.in',
+ output: 'TestBox.ui',
+ command: [find_program('cat')],
+ feed: true,
+ capture: true,
+)
+
+resources = gnome.compile_resources('test-resources',
+ 'test.gresource.xml',
+ c_name: 'test_res',
+ dependencies: ui_tgt,
+)
diff --git a/test cases/vala/31 generated ui file subdirectory/subdir/test.gresource.xml b/test cases/vala/31 generated ui file subdirectory/subdir/test.gresource.xml
new file mode 100644
index 000000000000..382b95193a7c
--- /dev/null
+++ b/test cases/vala/31 generated ui file subdirectory/subdir/test.gresource.xml
@@ -0,0 +1,6 @@
+
+
+
+ TestBox.ui
+
+
diff --git a/test cases/vala/31 generated ui file subdirectory/test.vala b/test cases/vala/31 generated ui file subdirectory/test.vala
new file mode 100644
index 000000000000..36f565b63451
--- /dev/null
+++ b/test cases/vala/31 generated ui file subdirectory/test.vala
@@ -0,0 +1,7 @@
+[GtkTemplate (ui = "/com/mesonbuild/test/TestBox.ui")]
+class TestBox: Gtk.Box {
+}
+
+int main() {
+ return 0;
+}
diff --git a/test cases/windows/25 embed manifest/DPIAware.manifest b/test cases/windows/25 embed manifest/DPIAware.manifest
new file mode 100644
index 000000000000..f2708ecb1315
--- /dev/null
+++ b/test cases/windows/25 embed manifest/DPIAware.manifest
@@ -0,0 +1,9 @@
+
+
+
+
+ true
+ PerMonitorV2
+
+
+
diff --git a/test cases/windows/25 embed manifest/meson.build b/test cases/windows/25 embed manifest/meson.build
new file mode 100644
index 000000000000..0f4c9b43f072
--- /dev/null
+++ b/test cases/windows/25 embed manifest/meson.build
@@ -0,0 +1,11 @@
+project('can-manifests-be-embedded', 'c')
+
+cc = meson.get_compiler('c')
+
+if cc.get_linker_id() not in ['link', 'lld-link', 'xilink'] # cc.get_linker_argument_syntax() != 'link'
+ error('MESON_SKIP_TEST: test is only relevant for the Microsoft linker')
+endif
+
+# Ensure that the manifest can be embedded
+executable('prog', 'prog.c',
+ link_args: ['/MANIFEST:EMBED', '/MANIFESTINPUT:' + meson.project_source_root() / 'DPIAware.manifest'])
diff --git a/test cases/windows/25 embed manifest/prog.c b/test cases/windows/25 embed manifest/prog.c
new file mode 100644
index 000000000000..b1d9c2ce482b
--- /dev/null
+++ b/test cases/windows/25 embed manifest/prog.c
@@ -0,0 +1,3 @@
+int main(int argc, char *argv[]) {
+ return 0;
+}
diff --git a/tools/dircondenser.py b/tools/dircondenser.py
index fcdb1d5dc8d5..b8679a4c78bf 100755
--- a/tools/dircondenser.py
+++ b/tools/dircondenser.py
@@ -70,6 +70,7 @@ def condense(dirname: str) -> None:
os.chdir(curdir)
replace_source('run_unittests.py', replacements)
replace_source('run_project_tests.py', replacements)
+ replace_source('run_format_tests.py', replacements)
for f in glob('unittests/*.py'):
replace_source(f, replacements)
diff --git a/tools/run_with_cov.py b/tools/run_with_cov.py
deleted file mode 100755
index 0d3fba654f9f..000000000000
--- a/tools/run_with_cov.py
+++ /dev/null
@@ -1,42 +0,0 @@
-#!/usr/bin/env python3
-# SPDX-License-Identifier: Apache-2.0
-# Copyright 2021 The Meson development team
-
-import subprocess
-import coverage
-import os
-import sys
-from pathlib import Path
-
-root_path = Path(__file__).parent.parent.absolute()
-
-# Python magic so we can import mesonlib
-sys.path.append(root_path.as_posix())
-from mesonbuild import mesonlib
-
-def generate_coveragerc() -> Path:
- i_file = (root_path / 'data' / '.coveragerc.in')
- o_file = (root_path / '.coveragerc')
- raw = i_file.read_text(encoding='utf-8')
- raw = raw.replace('@ROOT@', root_path.as_posix())
- o_file.write_text(raw, encoding='utf-8')
- return o_file
-
-def main() -> int:
- # Remove old run data
- out_dir = root_path / '.coverage'
- mesonlib.windows_proof_rmtree(out_dir.as_posix())
- out_dir.mkdir(parents=True, exist_ok=True)
-
- # Setup coverage
- python_path = (root_path / 'ci').as_posix()
- os.environ['PYTHONPATH'] = os.pathsep.join([python_path, os.environ.get('PYTHONPATH', '')])
- os.environ['COVERAGE_PROCESS_START'] = generate_coveragerc().as_posix()
- coverage.process_startup()
-
- # Run the actual command
- cmd = mesonlib.python_command + sys.argv[1:]
- return subprocess.run(cmd, env=os.environ.copy()).returncode
-
-if __name__ == '__main__':
- raise SystemExit(main())
diff --git a/unittests/allplatformstests.py b/unittests/allplatformstests.py
index aedf9feca134..c062b24b456e 100644
--- a/unittests/allplatformstests.py
+++ b/unittests/allplatformstests.py
@@ -1,5 +1,6 @@
# SPDX-License-Identifier: Apache-2.0
# Copyright 2016-2021 The Meson development team
+# Copyright © 2023-2025 Intel Corporation
import subprocess
import re
@@ -12,7 +13,7 @@
import pickle
import zipfile, tarfile
import sys
-from unittest import mock, SkipTest, skipIf, skipUnless
+from unittest import mock, SkipTest, skipIf, skipUnless, expectedFailure
from contextlib import contextmanager
from glob import glob
from pathlib import (PurePath, Path)
@@ -32,7 +33,7 @@
is_sunos, windows_proof_rmtree, python_command, version_compare, split_args, quote_arg,
relpath, is_linux, git, search_version, do_conf_file, do_conf_str, default_prefix,
MesonException, EnvironmentException,
- windows_proof_rm
+ windows_proof_rm, first
)
from mesonbuild.options import OptionKey
from mesonbuild.programs import ExternalProgram
@@ -279,27 +280,44 @@ def test_prefix_dependent_defaults(self):
testdir = os.path.join(self.common_test_dir, '1 trivial')
expected = {
'/opt': {'prefix': '/opt',
- 'bindir': 'bin', 'datadir': 'share', 'includedir': 'include',
+ 'bindir': 'bin',
+ 'datadir': 'share',
+ 'includedir': 'include',
'infodir': 'share/info',
- 'libexecdir': 'libexec', 'localedir': 'share/locale',
- 'localstatedir': 'var', 'mandir': 'share/man',
- 'sbindir': 'sbin', 'sharedstatedir': 'com',
- 'sysconfdir': 'etc'},
+ 'libexecdir': 'libexec',
+ 'localedir': 'share/locale',
+ 'localstatedir': 'var',
+ 'mandir': 'share/man',
+ 'sbindir': 'sbin',
+ 'sharedstatedir': 'com',
+ 'sysconfdir': 'etc',
+ },
'/usr': {'prefix': '/usr',
- 'bindir': 'bin', 'datadir': 'share', 'includedir': 'include',
+ 'bindir': 'bin',
+ 'datadir': 'share',
+ 'includedir': 'include',
'infodir': 'share/info',
- 'libexecdir': 'libexec', 'localedir': 'share/locale',
- 'localstatedir': '/var', 'mandir': 'share/man',
- 'sbindir': 'sbin', 'sharedstatedir': '/var/lib',
- 'sysconfdir': '/etc'},
+ 'libexecdir': 'libexec',
+ 'localedir': 'share/locale',
+ 'localstatedir': '/var',
+ 'mandir': 'share/man',
+ 'sbindir': 'sbin',
+ 'sharedstatedir': '/var/lib',
+ 'sysconfdir': '/etc',
+ },
'/usr/local': {'prefix': '/usr/local',
- 'bindir': 'bin', 'datadir': 'share',
- 'includedir': 'include', 'infodir': 'share/info',
+ 'bindir': 'bin',
+ 'datadir': 'share',
+ 'includedir': 'include',
+ 'infodir': 'share/info',
'libexecdir': 'libexec',
'localedir': 'share/locale',
- 'localstatedir': '/var/local', 'mandir': 'share/man',
- 'sbindir': 'sbin', 'sharedstatedir': '/var/local/lib',
- 'sysconfdir': 'etc'},
+ 'localstatedir': '/var/local',
+ 'mandir': 'share/man',
+ 'sbindir': 'sbin',
+ 'sharedstatedir': '/var/local/lib',
+ 'sysconfdir': 'etc',
+ },
# N.B. We don't check 'libdir' as it's platform dependent, see
# default_libdir():
}
@@ -317,7 +335,8 @@ def test_prefix_dependent_defaults(self):
name = opt['name']
value = opt['value']
if name in expected[prefix]:
- self.assertEqual(value, expected[prefix][name])
+ with self.subTest(prefix=prefix, option=name):
+ self.assertEqual(value, expected[prefix][name], f'For option {name} and prefix {prefix}.')
self.wipe()
def test_default_options_prefix_dependent_defaults(self):
@@ -338,25 +357,27 @@ def test_default_options_prefix_dependent_defaults(self):
'sysconfdir': '/etc',
'localstatedir': '/var',
'sharedstatedir': '/sharedstate'},
+
'--sharedstatedir=/var/state':
{'prefix': '/usr',
'sysconfdir': '/etc',
'localstatedir': '/var',
'sharedstatedir': '/var/state'},
+
'--sharedstatedir=/var/state --prefix=/usr --sysconfdir=sysconf':
{'prefix': '/usr',
'sysconfdir': 'sysconf',
'localstatedir': '/var',
'sharedstatedir': '/var/state'},
}
- for args in expected:
- self.init(testdir, extra_args=args.split(), default_args=False)
+ for argument_string, expected_values in expected.items():
+ self.init(testdir, extra_args=argument_string.split(), default_args=False)
opts = self.introspect('--buildoptions')
for opt in opts:
name = opt['name']
value = opt['value']
- if name in expected[args]:
- self.assertEqual(value, expected[args][name])
+ if name in expected_values:
+ self.assertEqual(value, expected_values[name], f'For option {name}, Meson arg: {argument_string}')
self.wipe()
def test_clike_get_library_dirs(self):
@@ -508,7 +529,8 @@ def test_install_introspection(self):
if self.backend is not Backend.ninja:
raise SkipTest(f'{self.backend.name!r} backend can\'t install files')
testdir = os.path.join(self.common_test_dir, '8 install')
- self.init(testdir)
+ # sneak in a test that covers backend options...
+ self.init(testdir, extra_args=['-Dbackend_max_links=4'])
intro = self.introspect('--targets')
if intro[0]['type'] == 'executable':
intro = intro[::-1]
@@ -889,17 +911,17 @@ def test_mtest_reconfigure(self):
self.init(testdir)
self.utime(os.path.join(testdir, 'meson.build'))
o = self._run(self.mtest_command + ['--list'])
- self.assertIn('Regenerating build files.', o)
+ self.assertIn('Regenerating build files', o)
self.assertIn('test_features / xfail', o)
o = self._run(self.mtest_command + ['--list'])
- self.assertNotIn('Regenerating build files.', o)
+ self.assertNotIn('Regenerating build files', o)
# no real targets should have been built
tester = os.path.join(self.builddir, 'tester' + exe_suffix)
self.assertPathDoesNotExist(tester)
# check that we don't reconfigure if --no-rebuild is passed
self.utime(os.path.join(testdir, 'meson.build'))
o = self._run(self.mtest_command + ['--list', '--no-rebuild'])
- self.assertNotIn('Regenerating build files.', o)
+ self.assertNotIn('Regenerating build files', o)
def test_unexisting_test_name(self):
testdir = os.path.join(self.unit_test_dir, '4 suite selection')
@@ -1077,110 +1099,144 @@ def test_compiler_detection(self):
for lang, evar in langs:
# Detect with evar and do sanity checks on that
if evar in os.environ:
- ecc = compiler_from_language(env, lang, MachineChoice.HOST)
- self.assertTrue(ecc.version)
- elinker = detect_static_linker(env, ecc)
- # Pop it so we don't use it for the next detection
- evalue = os.environ.pop(evar)
- # Very rough/strict heuristics. Would never work for actual
- # compiler detection, but should be ok for the tests.
- ebase = os.path.basename(evalue)
- if ebase.startswith('g') or ebase.endswith(('-gcc', '-g++')):
- self.assertIsInstance(ecc, gnu)
- self.assertIsInstance(elinker, ar)
- elif 'clang-cl' in ebase:
- self.assertIsInstance(ecc, clangcl)
- self.assertIsInstance(elinker, lib)
- elif 'clang' in ebase:
- self.assertIsInstance(ecc, clang)
- self.assertIsInstance(elinker, ar)
- elif ebase.startswith('ic'):
- self.assertIsInstance(ecc, intel)
- self.assertIsInstance(elinker, ar)
- elif ebase.startswith('cl'):
- self.assertIsInstance(ecc, msvc)
- self.assertIsInstance(elinker, lib)
- else:
- raise AssertionError(f'Unknown compiler {evalue!r}')
- # Check that we actually used the evalue correctly as the compiler
- self.assertEqual(ecc.get_exelist(), split_args(evalue))
+ with self.subTest(lang=lang, evar=evar):
+ try:
+ ecc = compiler_from_language(env, lang, MachineChoice.HOST)
+ except EnvironmentException:
+ # always raise in ci, we expect to have a valid ObjC and ObjC++ compiler of some kind
+ if is_ci():
+ self.fail(f'Could not find a compiler for {lang}')
+ if sys.version_info < (3, 11):
+ continue
+ self.skipTest(f'No valid compiler for {lang}.')
+ finally:
+ # Pop it so we don't use it for the next detection
+ evalue = os.environ.pop(evar)
+ assert ecc is not None, "Something went really wrong"
+ self.assertTrue(ecc.version)
+ elinker = detect_static_linker(env, ecc)
+ # Very rough/strict heuristics. Would never work for actual
+ # compiler detection, but should be ok for the tests.
+ ebase = os.path.basename(evalue)
+ if ebase.startswith('g') or ebase.endswith(('-gcc', '-g++')):
+ self.assertIsInstance(ecc, gnu)
+ self.assertIsInstance(elinker, ar)
+ elif 'clang-cl' in ebase:
+ self.assertIsInstance(ecc, clangcl)
+ self.assertIsInstance(elinker, lib)
+ elif 'clang' in ebase:
+ self.assertIsInstance(ecc, clang)
+ self.assertIsInstance(elinker, ar)
+ elif ebase.startswith('ic'):
+ self.assertIsInstance(ecc, intel)
+ self.assertIsInstance(elinker, ar)
+ elif ebase.startswith('cl'):
+ self.assertIsInstance(ecc, msvc)
+ self.assertIsInstance(elinker, lib)
+ else:
+ self.fail(f'Unknown compiler {evalue!r}')
+ # Check that we actually used the evalue correctly as the compiler
+ self.assertEqual(ecc.get_exelist(), split_args(evalue))
+
# Do auto-detection of compiler based on platform, PATH, etc.
- cc = compiler_from_language(env, lang, MachineChoice.HOST)
- self.assertTrue(cc.version)
- linker = detect_static_linker(env, cc)
- # Check compiler type
- if isinstance(cc, gnu):
- self.assertIsInstance(linker, ar)
- if is_osx():
- self.assertIsInstance(cc.linker, linkers.AppleDynamicLinker)
- elif is_sunos():
- self.assertIsInstance(cc.linker, (linkers.SolarisDynamicLinker, linkers.GnuLikeDynamicLinkerMixin))
- else:
- self.assertIsInstance(cc.linker, linkers.GnuLikeDynamicLinkerMixin)
- if isinstance(cc, clangcl):
- self.assertIsInstance(linker, lib)
- self.assertIsInstance(cc.linker, linkers.ClangClDynamicLinker)
- if isinstance(cc, clang):
- self.assertIsInstance(linker, ar)
- if is_osx():
- self.assertIsInstance(cc.linker, linkers.AppleDynamicLinker)
- elif is_windows():
- # This is clang, not clang-cl. This can be either an
- # ld-like linker of link.exe-like linker (usually the
- # former for msys2, the latter otherwise)
- self.assertIsInstance(cc.linker, (linkers.MSVCDynamicLinker, linkers.GnuLikeDynamicLinkerMixin))
- elif is_sunos():
- self.assertIsInstance(cc.linker, (linkers.SolarisDynamicLinker, linkers.GnuLikeDynamicLinkerMixin))
- else:
- self.assertIsInstance(cc.linker, linkers.GnuLikeDynamicLinkerMixin)
- if isinstance(cc, intel):
- self.assertIsInstance(linker, ar)
- if is_osx():
- self.assertIsInstance(cc.linker, linkers.AppleDynamicLinker)
- elif is_windows():
- self.assertIsInstance(cc.linker, linkers.XilinkDynamicLinker)
- else:
- self.assertIsInstance(cc.linker, linkers.GnuDynamicLinker)
- if isinstance(cc, msvc):
- self.assertTrue(is_windows())
- self.assertIsInstance(linker, lib)
- self.assertEqual(cc.id, 'msvc')
- self.assertTrue(hasattr(cc, 'is_64'))
- self.assertIsInstance(cc.linker, linkers.MSVCDynamicLinker)
- # If we're on Windows CI, we know what the compiler will be
- if 'arch' in os.environ:
- if os.environ['arch'] == 'x64':
- self.assertTrue(cc.is_64)
+ with self.subTest(lang=lang):
+ try:
+ cc = compiler_from_language(env, lang, MachineChoice.HOST)
+ except EnvironmentException:
+ # always raise in ci, we expect to have a valid ObjC and ObjC++ compiler of some kind
+ if is_ci():
+ self.fail(f'Could not find a compiler for {lang}')
+ if sys.version_info < (3, 11):
+ continue
+ self.skipTest(f'No valid compiler for {lang}.')
+ assert cc is not None, "Something went really wrong"
+ self.assertTrue(cc.version)
+ linker = detect_static_linker(env, cc)
+ # Check compiler type
+ if isinstance(cc, gnu):
+ self.assertIsInstance(linker, ar)
+ if is_osx():
+ self.assertIsInstance(cc.linker, linkers.AppleDynamicLinker)
+ elif is_sunos():
+ self.assertIsInstance(cc.linker, (linkers.SolarisDynamicLinker, linkers.GnuLikeDynamicLinkerMixin))
else:
- self.assertFalse(cc.is_64)
+ self.assertIsInstance(cc.linker, linkers.GnuLikeDynamicLinkerMixin)
+ if isinstance(cc, clangcl):
+ self.assertIsInstance(linker, lib)
+ self.assertIsInstance(cc.linker, linkers.ClangClDynamicLinker)
+ if isinstance(cc, clang):
+ self.assertIsInstance(linker, ar)
+ if is_osx():
+ self.assertIsInstance(cc.linker, linkers.AppleDynamicLinker)
+ elif is_windows():
+ # This is clang, not clang-cl. This can be either an
+ # ld-like linker of link.exe-like linker (usually the
+ # former for msys2, the latter otherwise)
+ self.assertIsInstance(cc.linker, (linkers.MSVCDynamicLinker, linkers.GnuLikeDynamicLinkerMixin))
+ elif is_sunos():
+ self.assertIsInstance(cc.linker, (linkers.SolarisDynamicLinker, linkers.GnuLikeDynamicLinkerMixin))
+ else:
+ self.assertIsInstance(cc.linker, linkers.GnuLikeDynamicLinkerMixin)
+ if isinstance(cc, intel):
+ self.assertIsInstance(linker, ar)
+ if is_osx():
+ self.assertIsInstance(cc.linker, linkers.AppleDynamicLinker)
+ elif is_windows():
+ self.assertIsInstance(cc.linker, linkers.XilinkDynamicLinker)
+ else:
+ self.assertIsInstance(cc.linker, linkers.GnuDynamicLinker)
+ if isinstance(cc, msvc):
+ self.assertTrue(is_windows())
+ self.assertIsInstance(linker, lib)
+ self.assertEqual(cc.id, 'msvc')
+ self.assertTrue(hasattr(cc, 'is_64'))
+ self.assertIsInstance(cc.linker, linkers.MSVCDynamicLinker)
+ # If we're on Windows CI, we know what the compiler will be
+ if 'arch' in os.environ:
+ if os.environ['arch'] == 'x64':
+ self.assertTrue(cc.is_64)
+ else:
+ self.assertFalse(cc.is_64)
+
# Set evar ourselves to a wrapper script that just calls the same
# exelist + some argument. This is meant to test that setting
# something like `ccache gcc -pipe` or `distcc ccache gcc` works.
- wrapper = os.path.join(testdir, 'compiler wrapper.py')
- wrappercc = python_command + [wrapper] + cc.get_exelist() + ['-DSOME_ARG']
- os.environ[evar] = ' '.join(quote_arg(w) for w in wrappercc)
-
- # Check static linker too
- wrapperlinker = python_command + [wrapper] + linker.get_exelist() + linker.get_always_args()
- os.environ['AR'] = ' '.join(quote_arg(w) for w in wrapperlinker)
-
- # Need a new env to re-run environment loading
- env = get_fake_env(testdir, self.builddir, self.prefix)
-
- wcc = compiler_from_language(env, lang, MachineChoice.HOST)
- wlinker = detect_static_linker(env, wcc)
- # Pop it so we don't use it for the next detection
- os.environ.pop('AR')
- # Must be the same type since it's a wrapper around the same exelist
- self.assertIs(type(cc), type(wcc))
- self.assertIs(type(linker), type(wlinker))
- # Ensure that the exelist is correct
- self.assertEqual(wcc.get_exelist(), wrappercc)
- self.assertEqual(wlinker.get_exelist(), wrapperlinker)
- # Ensure that the version detection worked correctly
- self.assertEqual(cc.version, wcc.version)
- if hasattr(cc, 'is_64'):
- self.assertEqual(cc.is_64, wcc.is_64)
+ with self.subTest('wrapper script', lang=lang):
+ wrapper = os.path.join(testdir, 'compiler wrapper.py')
+ wrappercc = python_command + [wrapper] + cc.get_exelist() + ['-DSOME_ARG']
+ os.environ[evar] = ' '.join(quote_arg(w) for w in wrappercc)
+
+ # Check static linker too
+ wrapperlinker = python_command + [wrapper] + linker.get_exelist() + linker.get_always_args()
+ os.environ['AR'] = ' '.join(quote_arg(w) for w in wrapperlinker)
+
+ # Need a new env to re-run environment loading
+ env = get_fake_env(testdir, self.builddir, self.prefix)
+
+ try:
+ wcc = compiler_from_language(env, lang, MachineChoice.HOST)
+ except EnvironmentException:
+ # always raise in ci, we expect to have a valid ObjC and ObjC++ compiler of some kind
+ if is_ci():
+ self.fail(f'Could not find a compiler for {lang}')
+ if sys.version_info < (3, 11):
+ continue
+ self.skipTest(f'No valid compiler for {lang}.')
+ wlinker = detect_static_linker(env, wcc)
+ del os.environ['AR']
+
+ # Must be the same type since it's a wrapper around the same exelist
+ self.assertIs(type(cc), type(wcc))
+ self.assertIs(type(linker), type(wlinker))
+
+ # Ensure that the exelist is correct
+ self.assertEqual(wcc.get_exelist(), wrappercc)
+ self.assertEqual(wlinker.get_exelist(), wrapperlinker)
+
+ # Ensure that the version detection worked correctly
+ self.assertEqual(cc.version, wcc.version)
+ if hasattr(cc, 'is_64'):
+ self.assertEqual(cc.is_64, wcc.is_64)
def test_always_prefer_c_compiler_for_asm(self):
testdir = os.path.join(self.common_test_dir, '133 c cpp and asm')
@@ -1340,7 +1396,36 @@ def test_source_generator_program_cause_rebuild(self):
self.utime(os.path.join(testdir, 'srcgen.py'))
self.assertRebuiltTarget('basic')
- @skipIf(is_ci() and is_cygwin(), 'A GCC update on 2024-07-21 has broken LTO and is being investigated')
+ def test_long_opt_vs_D(self):
+ '''
+ Test that conflicts between -D for builtin options and the corresponding
+ long option are detected without false positives or negatives.
+ '''
+ testdir = os.path.join(self.unit_test_dir, '128 long opt vs D')
+
+ for opt in ['-Dsysconfdir=/etc', '-Dsysconfdir2=/etc']:
+ exception_raised = False
+ try:
+ self.init(testdir, extra_args=[opt, '--sysconfdir=/etc'])
+ except subprocess.CalledProcessError:
+ exception_raised = True
+ if 'sysconfdir2' in opt:
+ self.assertFalse(exception_raised, f'{opt} --sysconfdir raised an exception')
+ else:
+ self.assertTrue(exception_raised, f'{opt} --sysconfdir did not raise an exception')
+
+ exception_raised = False
+ try:
+ self.init(testdir, extra_args=['--sysconfdir=/etc', opt])
+ except subprocess.CalledProcessError:
+ exception_raised = True
+ if 'sysconfdir2' in opt:
+ self.assertFalse(exception_raised, f'--sysconfdir {opt} raised an exception')
+ else:
+ self.assertTrue(exception_raised, f'--sysconfdir {opt} did not raise an exception')
+
+ self.wipe()
+
def test_static_library_lto(self):
'''
Test that static libraries can be built with LTO and linked to
@@ -1768,7 +1853,7 @@ def build_shared_lib(self, compiler, source, objectfile, outfile, impfile, extra
def test_prebuilt_shared_lib(self):
(cc, _, object_suffix, shared_suffix) = self.detect_prebuild_env()
- tdir = os.path.join(self.unit_test_dir, '17 prebuilt shared')
+ tdir = self.copy_srcdir(os.path.join(self.unit_test_dir, '17 prebuilt shared'))
source = os.path.join(tdir, 'alexandria.c')
objectfile = os.path.join(tdir, 'alexandria.' + object_suffix)
impfile = os.path.join(tdir, 'alexandria.lib')
@@ -2629,7 +2714,7 @@ def test_command_line(self):
obj = mesonbuild.coredata.load(self.builddir)
self.assertEqual(obj.optstore.get_value('default_library'), 'static')
self.assertEqual(obj.optstore.get_value('warning_level'), '1')
- self.assertEqual(obj.optstore.get_value('set_sub_opt'), True)
+ self.assertEqual(obj.optstore.get_value(OptionKey('set_sub_opt', '')), True)
self.assertEqual(obj.optstore.get_value(OptionKey('subp_opt', 'subp')), 'default3')
self.wipe()
@@ -2739,7 +2824,7 @@ def test_command_line(self):
self.init(testdir, extra_args=['-Dset_percent_opt=myoption%', '--fatal-meson-warnings'])
obj = mesonbuild.coredata.load(self.builddir)
- self.assertEqual(obj.optstore.get_value('set_percent_opt'), 'myoption%')
+ self.assertEqual(obj.optstore.get_value(OptionKey('set_percent_opt', '')), 'myoption%')
self.wipe()
# Setting a 2nd time the same option should override the first value
@@ -2752,7 +2837,7 @@ def test_command_line(self):
obj = mesonbuild.coredata.load(self.builddir)
self.assertEqual(obj.optstore.get_value('bindir'), 'bar')
self.assertEqual(obj.optstore.get_value('buildtype'), 'release')
- self.assertEqual(obj.optstore.get_value('b_sanitize'), 'thread')
+ self.assertEqual(obj.optstore.get_value('b_sanitize'), ['thread'])
self.assertEqual(obj.optstore.get_value(OptionKey('c_args')), ['-Dbar'])
self.setconf(['--bindir=bar', '--bindir=foo',
'-Dbuildtype=release', '-Dbuildtype=plain',
@@ -2761,7 +2846,7 @@ def test_command_line(self):
obj = mesonbuild.coredata.load(self.builddir)
self.assertEqual(obj.optstore.get_value('bindir'), 'foo')
self.assertEqual(obj.optstore.get_value('buildtype'), 'plain')
- self.assertEqual(obj.optstore.get_value('b_sanitize'), 'address')
+ self.assertEqual(obj.optstore.get_value('b_sanitize'), ['address'])
self.assertEqual(obj.optstore.get_value(OptionKey('c_args')), ['-Dfoo'])
self.wipe()
except KeyError:
@@ -2953,7 +3038,10 @@ def test_reconfigure(self):
self.assertRegex(out, 'opt2 val2')
self.assertRegex(out, 'opt3 val3')
self.assertRegex(out, 'opt4 default4')
- self.assertRegex(out, 'sub1:werror true')
+ # Per subproject options are stored in augments,
+ # not in the options themselves so these status
+ # messages are no longer printed.
+ #self.assertRegex(out, 'sub1:werror true')
self.build()
self.run_tests()
@@ -2967,7 +3055,7 @@ def test_reconfigure(self):
self.assertRegex(out, 'opt2 val2')
self.assertRegex(out, 'opt3 val3')
self.assertRegex(out, 'opt4 val4')
- self.assertRegex(out, 'sub1:werror true')
+ #self.assertRegex(out, 'sub1:werror true')
self.assertTrue(Path(self.builddir, '.gitignore').exists())
self.build()
self.run_tests()
@@ -3189,14 +3277,20 @@ def test_clang_tidy_fix(self):
def test_identity_cross(self):
testdir = os.path.join(self.unit_test_dir, '69 cross')
# Do a build to generate a cross file where the host is this target
- self.init(testdir, extra_args=['-Dgenerate=true'])
+ # build.c_args is ignored here.
+ self.init(testdir, extra_args=['-Dgenerate=true', '-Dc_args=-funroll-loops',
+ '-Dbuild.c_args=-pedantic'])
+ self.meson_native_files = [os.path.join(self.builddir, "nativefile")]
+ self.assertTrue(os.path.exists(self.meson_native_files[0]))
self.meson_cross_files = [os.path.join(self.builddir, "crossfile")]
self.assertTrue(os.path.exists(self.meson_cross_files[0]))
- # Now verify that this is detected as cross
+ # Now verify that this is detected as cross and build options are
+ # processed correctly
self.new_builddir()
self.init(testdir)
- def test_introspect_buildoptions_without_configured_build(self):
+ # Disabled for now as the introspection format needs to change to add augments.
+ def DISABLED_test_introspect_buildoptions_without_configured_build(self):
testdir = os.path.join(self.unit_test_dir, '58 introspect buildoptions')
testfile = os.path.join(testdir, 'meson.build')
res_nb = self.introspect_directory(testfile, ['--buildoptions'] + self.meson_args)
@@ -3335,7 +3429,7 @@ def assertKeyTypes(key_type_list, obj, strict: bool = True):
('win_subsystem', (str, None)),
]
- targets_sources_typelist = [
+ targets_sources_unknown_lang_typelist = [
('language', str),
('compiler', list),
('parameters', list),
@@ -3344,6 +3438,10 @@ def assertKeyTypes(key_type_list, obj, strict: bool = True):
('unity_sources', (list, None)),
]
+ targets_sources_typelist = targets_sources_unknown_lang_typelist + [
+ ('machine', str),
+ ]
+
target_sources_linker_typelist = [
('linker', list),
('parameters', list),
@@ -3372,97 +3470,100 @@ def assertKeyTypes(key_type_list, obj, strict: bool = True):
src_to_id.update({os.path.relpath(src, testdir): i['id']
for src in group.get('sources', [])})
- # Check Tests and benchmarks
- tests_to_find = ['test case 1', 'test case 2', 'benchmark 1']
- deps_to_find = {'test case 1': [src_to_id['t1.cpp']],
- 'test case 2': [src_to_id['t2.cpp'], src_to_id['t3.cpp']],
- 'benchmark 1': [out_to_id['file2'], out_to_id['file3'], out_to_id['file4'], src_to_id['t3.cpp']]}
- for i in res['benchmarks'] + res['tests']:
- assertKeyTypes(test_keylist, i)
- if i['name'] in tests_to_find:
- tests_to_find.remove(i['name'])
- self.assertEqual(sorted(i['depends']),
- sorted(deps_to_find[i['name']]))
- self.assertListEqual(tests_to_find, [])
-
- # Check buildoptions
- buildopts_to_find = {'cpp_std': 'c++11'}
- for i in res['buildoptions']:
- assertKeyTypes(buildoptions_keylist, i)
- valid_type = False
- for j in buildoptions_typelist:
- if i['type'] == j[0]:
- self.assertIsInstance(i['value'], j[1])
- assertKeyTypes(j[2], i, strict=False)
- valid_type = True
- break
-
- self.assertIn(i['section'], buildoptions_sections)
- self.assertIn(i['machine'], buildoptions_machines)
- self.assertTrue(valid_type)
- if i['name'] in buildopts_to_find:
- self.assertEqual(i['value'], buildopts_to_find[i['name']])
- buildopts_to_find.pop(i['name'], None)
- self.assertDictEqual(buildopts_to_find, {})
-
- # Check buildsystem_files
- bs_files = ['meson.build', 'meson_options.txt', 'sharedlib/meson.build', 'staticlib/meson.build']
- bs_files = [os.path.join(testdir, x) for x in bs_files]
- self.assertPathListEqual(list(sorted(res['buildsystem_files'])), list(sorted(bs_files)))
-
- # Check dependencies
- dependencies_to_find = ['threads']
- for i in res['dependencies']:
- assertKeyTypes(dependencies_typelist, i)
- if i['name'] in dependencies_to_find:
- dependencies_to_find.remove(i['name'])
- self.assertListEqual(dependencies_to_find, [])
-
- # Check projectinfo
- self.assertDictEqual(res['projectinfo'], {
- 'version': '1.2.3',
- 'license': ['unknown'],
- 'license_files': [],
- 'descriptive_name': 'introspection',
- 'subproject_dir': 'subprojects',
- 'subprojects': []
- })
-
- # Check targets
- targets_to_find = {
- 'sharedTestLib': ('shared library', True, False, 'sharedlib/meson.build',
- [os.path.join(testdir, 'sharedlib', 'shared.cpp')]),
- 'staticTestLib': ('static library', True, False, 'staticlib/meson.build',
- [os.path.join(testdir, 'staticlib', 'static.c')]),
- 'custom target test 1': ('custom', False, False, 'meson.build',
- [os.path.join(testdir, 'cp.py')]),
- 'custom target test 2': ('custom', False, False, 'meson.build',
- name_to_out['custom target test 1']),
- 'test1': ('executable', True, True, 'meson.build',
- [os.path.join(testdir, 't1.cpp')]),
- 'test2': ('executable', True, False, 'meson.build',
- [os.path.join(testdir, 't2.cpp')]),
- 'test3': ('executable', True, False, 'meson.build',
- [os.path.join(testdir, 't3.cpp')]),
- 'custom target test 3': ('custom', False, False, 'meson.build',
- name_to_out['test3']),
- }
- for i in res['targets']:
- assertKeyTypes(targets_typelist, i)
- if i['name'] in targets_to_find:
- tgt = targets_to_find[i['name']]
- self.assertEqual(i['type'], tgt[0])
- self.assertEqual(i['build_by_default'], tgt[1])
- self.assertEqual(i['installed'], tgt[2])
- self.assertPathEqual(i['defined_in'], os.path.join(testdir, tgt[3]))
- targets_to_find.pop(i['name'], None)
- for j in i['target_sources']:
- if 'compiler' in j:
- assertKeyTypes(targets_sources_typelist, j)
- self.assertEqual(j['sources'], [os.path.normpath(f) for f in tgt[4]])
- else:
- assertKeyTypes(target_sources_linker_typelist, j)
- self.assertDictEqual(targets_to_find, {})
+ with self.subTest('Check Tests and Benchmarks'):
+ tests_to_find = ['test case 1', 'test case 2', 'benchmark 1']
+ deps_to_find = {'test case 1': [src_to_id['t1.cpp']],
+ 'test case 2': [src_to_id['t2.cpp'], src_to_id['t3.cpp']],
+ 'benchmark 1': [out_to_id['file2'], out_to_id['file3'], out_to_id['file4'], src_to_id['t3.cpp']]}
+ for i in res['benchmarks'] + res['tests']:
+ assertKeyTypes(test_keylist, i)
+ if i['name'] in tests_to_find:
+ tests_to_find.remove(i['name'])
+ self.assertEqual(sorted(i['depends']),
+ sorted(deps_to_find[i['name']]))
+ self.assertListEqual(tests_to_find, [])
+
+ with self.subTest('Check buildoptions'):
+ buildopts_to_find = {'cpp_std': 'c++11'}
+ for i in res['buildoptions']:
+ assertKeyTypes(buildoptions_keylist, i)
+ valid_type = False
+ for j in buildoptions_typelist:
+ if i['type'] == j[0]:
+ self.assertIsInstance(i['value'], j[1])
+ assertKeyTypes(j[2], i, strict=False)
+ valid_type = True
+ break
+
+ self.assertIn(i['section'], buildoptions_sections)
+ self.assertIn(i['machine'], buildoptions_machines)
+ self.assertTrue(valid_type)
+ if i['name'] in buildopts_to_find:
+ self.assertEqual(i['value'], buildopts_to_find[i['name']])
+ buildopts_to_find.pop(i['name'], None)
+ self.assertDictEqual(buildopts_to_find, {})
+
+ with self.subTest('Check buildsystem_files'):
+ bs_files = ['meson.build', 'meson_options.txt', 'sharedlib/meson.build', 'staticlib/meson.build']
+ bs_files = [os.path.join(testdir, x) for x in bs_files]
+ self.assertPathListEqual(list(sorted(res['buildsystem_files'])), list(sorted(bs_files)))
+
+ with self.subTest('Check dependencies'):
+ dependencies_to_find = ['threads']
+ for i in res['dependencies']:
+ assertKeyTypes(dependencies_typelist, i)
+ if i['name'] in dependencies_to_find:
+ dependencies_to_find.remove(i['name'])
+ self.assertListEqual(dependencies_to_find, [])
+
+ with self.subTest('Check projectinfo'):
+ self.assertDictEqual(res['projectinfo'], {
+ 'version': '1.2.3',
+ 'license': ['unknown'],
+ 'license_files': [],
+ 'descriptive_name': 'introspection',
+ 'subproject_dir': 'subprojects',
+ 'subprojects': []
+ })
+
+ with self.subTest('Check targets'):
+ targets_to_find = {
+ 'sharedTestLib': ('shared library', True, False, 'sharedlib/meson.build',
+ [os.path.join(testdir, 'sharedlib', 'shared.cpp')]),
+ 'staticTestLib': ('static library', True, False, 'staticlib/meson.build',
+ [os.path.join(testdir, 'staticlib', 'static.c')]),
+ 'custom target test 1': ('custom', False, False, 'meson.build',
+ [os.path.join(testdir, 'cp.py')]),
+ 'custom target test 2': ('custom', False, False, 'meson.build',
+ name_to_out['custom target test 1']),
+ 'test1': ('executable', True, True, 'meson.build',
+ [os.path.join(testdir, 't1.cpp')]),
+ 'test2': ('executable', True, False, 'meson.build',
+ [os.path.join(testdir, 't2.cpp')]),
+ 'test3': ('executable', True, False, 'meson.build',
+ [os.path.join(testdir, 't3.cpp')]),
+ 'custom target test 3': ('custom', False, False, 'meson.build',
+ name_to_out['test3']),
+ }
+ for i in res['targets']:
+ assertKeyTypes(targets_typelist, i)
+ if i['name'] in targets_to_find:
+ tgt = targets_to_find[i['name']]
+ self.assertEqual(i['type'], tgt[0])
+ self.assertEqual(i['build_by_default'], tgt[1])
+ self.assertEqual(i['installed'], tgt[2])
+ self.assertPathEqual(i['defined_in'], os.path.join(testdir, tgt[3]))
+ targets_to_find.pop(i['name'], None)
+ for j in i['target_sources']:
+ if 'compiler' in j:
+ if j['language'] == 'unknown':
+ assertKeyTypes(targets_sources_unknown_lang_typelist, j)
+ else:
+ assertKeyTypes(targets_sources_typelist, j)
+ self.assertEqual(j['sources'], [os.path.normpath(f) for f in tgt[4]])
+ else:
+ assertKeyTypes(target_sources_linker_typelist, j)
+ self.assertDictEqual(targets_to_find, {})
def test_introspect_file_dump_equals_all(self):
testdir = os.path.join(self.unit_test_dir, '56 introspection')
@@ -3508,7 +3609,8 @@ def test_introspect_meson_info(self):
self.assertEqual(res1['error'], False)
self.assertEqual(res1['build_files_updated'], True)
- def test_introspect_config_update(self):
+ # Disabled for now as the introspection file format needs to change to have augments.
+ def DISABLE_test_introspect_config_update(self):
testdir = os.path.join(self.unit_test_dir, '56 introspection')
introfile = os.path.join(self.builddir, 'meson-info', 'intro-buildoptions.json')
self.init(testdir)
@@ -3560,6 +3662,7 @@ def test_introspect_targets_from_source(self):
sources += j.get('sources', [])
i['target_sources'] = [{
'language': 'unknown',
+ 'machine': 'host',
'compiler': [],
'parameters': [],
'sources': sources,
@@ -4438,7 +4541,10 @@ def test_custom_target_implicit_include(self):
matches += 1
self.assertEqual(matches, 1)
- def test_env_flags_to_linker(self) -> None:
+ # This test no longer really makes sense. Linker flags are set in options
+ # when it is set up. Changing the compiler after the fact does not really
+ # make sense and is not supported.
+ def DISABLED_test_env_flags_to_linker(self) -> None:
# Compilers that act as drivers should add their compiler flags to the
# linker, those that do not shouldn't
with mock.patch.dict(os.environ, {'CFLAGS': '-DCFLAG', 'LDFLAGS': '-flto'}):
@@ -4448,17 +4554,20 @@ def test_env_flags_to_linker(self) -> None:
cc = detect_compiler_for(env, 'c', MachineChoice.HOST, True, '')
cc_type = type(cc)
- # Test a compiler that acts as a linker
+ # The compiler either invokes the linker or doesn't. Act accordingly.
with mock.patch.object(cc_type, 'INVOKES_LINKER', True):
+ env.coredata.get_external_link_args.cache_clear()
cc = detect_compiler_for(env, 'c', MachineChoice.HOST, True, '')
link_args = env.coredata.get_external_link_args(cc.for_machine, cc.language)
self.assertEqual(sorted(link_args), sorted(['-DCFLAG', '-flto']))
- # And one that doesn't
- with mock.patch.object(cc_type, 'INVOKES_LINKER', False):
- cc = detect_compiler_for(env, 'c', MachineChoice.HOST, True, '')
- link_args = env.coredata.get_external_link_args(cc.for_machine, cc.language)
- self.assertEqual(sorted(link_args), sorted(['-flto']))
+
+ ## And one that doesn't
+ #with mock.patch.object(cc_type, 'INVOKES_LINKER', False):
+ # env.coredata.get_external_link_args.cache_clear()
+ # cc = detect_compiler_for(env, 'c', MachineChoice.HOST, True, '')
+ # link_args = env.coredata.get_external_link_args(cc.for_machine, cc.language)
+ # self.assertEqual(sorted(link_args), sorted(['-flto']))
def test_install_tag(self) -> None:
testdir = os.path.join(self.unit_test_dir, '98 install all targets')
@@ -4878,9 +4987,45 @@ def output_name(name, type_):
with self.subTest(key='{}.{}'.format(data_type, file)):
self.assertEqual(res[data_type][file], details)
+ @skip_if_not_language('rust')
+ @unittest.skipIf(not shutil.which('rustdoc'), 'Test requires rustdoc')
+ def test_rustdoc(self) -> None:
+ if self.backend is not Backend.ninja:
+ raise unittest.SkipTest('Rust is only supported with ninja currently')
+ try:
+ with tempfile.TemporaryDirectory() as tmpdir:
+ testdir = os.path.join(tmpdir, 'a')
+ shutil.copytree(os.path.join(self.rust_test_dir, '9 unit tests'),
+ testdir)
+ self.init(testdir)
+ self.build('rustdoc')
+ except PermissionError:
+ # When run under Windows CI, something (virus scanner?)
+ # holds on to the git files so cleaning up the dir
+ # fails sometimes.
+ pass
+
@skip_if_not_language('rust')
@unittest.skipIf(not shutil.which('clippy-driver'), 'Test requires clippy-driver')
def test_rust_clippy(self) -> None:
+ if self.backend is not Backend.ninja:
+ raise unittest.SkipTest('Rust is only supported with ninja currently')
+ # When clippy is used, we should get an exception since a variable named
+ # "foo" is used, but is on our denylist
+ testdir = os.path.join(self.rust_test_dir, '1 basic')
+ self.init(testdir)
+ self.build('clippy')
+
+ self.wipe()
+ self.init(testdir, extra_args=['--werror', '-Db_colorout=never'])
+ with self.assertRaises(subprocess.CalledProcessError) as cm:
+ self.build('clippy')
+ self.assertTrue('error: use of a blacklisted/placeholder name `foo`' in cm.exception.stdout or
+ 'error: use of a disallowed/placeholder name `foo`' in cm.exception.stdout)
+
+ @skip_if_not_language('rust')
+ @unittest.skipIf(not shutil.which('clippy-driver'), 'Test requires clippy-driver')
+ def test_rust_clippy_as_rustc(self) -> None:
if self.backend is not Backend.ninja:
raise unittest.SkipTest('Rust is only supported with ninja currently')
# When clippy is used, we should get an exception since a variable named
@@ -4892,6 +5037,13 @@ def test_rust_clippy(self) -> None:
self.assertTrue('error: use of a blacklisted/placeholder name `foo`' in cm.exception.stdout or
'error: use of a disallowed/placeholder name `foo`' in cm.exception.stdout)
+ @skip_if_not_language('rust')
+ def test_rust_test_warnings(self) -> None:
+ if self.backend is not Backend.ninja:
+ raise unittest.SkipTest('Rust is only supported with ninja currently')
+ testdir = os.path.join(self.rust_test_dir, '9 unit tests')
+ self.init(testdir, extra_args=['--fatal-meson-warnings'])
+
@skip_if_not_language('rust')
def test_rust_rlib_linkage(self) -> None:
if self.backend is not Backend.ninja:
@@ -5012,9 +5164,11 @@ def test_configure_same_noop(self):
olddata = newdata
oldmtime = newmtime
- def test_c_cpp_stds(self):
+ def __test_multi_stds(self, test_c: bool = True, test_objc: bool = False) -> None:
+ assert test_c or test_objc, 'must test something'
testdir = os.path.join(self.unit_test_dir, '115 c cpp stds')
- self.init(testdir)
+ self.init(testdir, extra_args=[f'-Dwith-c={str(test_c).lower()}',
+ f'-Dwith-objc={str(test_objc).lower()}'])
# Invalid values should fail whatever compiler we have
with self.assertRaises(subprocess.CalledProcessError):
self.setconf('-Dc_std=invalid')
@@ -5023,8 +5177,20 @@ def test_c_cpp_stds(self):
with self.assertRaises(subprocess.CalledProcessError):
self.setconf('-Dc_std=c++11')
env = get_fake_env()
- cc = detect_c_compiler(env, MachineChoice.HOST)
- if cc.get_id() == 'msvc':
+ if test_c:
+ cc = detect_c_compiler(env, MachineChoice.HOST)
+ if test_objc:
+ objc = detect_compiler_for(env, 'objc', MachineChoice.HOST, True, '')
+ assert objc is not None
+ if test_c and cc.get_argument_syntax() != objc.get_argument_syntax():
+ # The test doesn't work correctly in this case because we can
+ # end up with incompatible stds, like gnu89 with cl.exe for C
+ # and clang.exe for ObjC
+ return
+ if not test_c:
+ cc = objc
+
+ if cc.get_id() in {'msvc', 'clang-cl'}:
# default_option should have selected those
self.assertEqual(self.getconf('c_std'), 'c89')
self.assertEqual(self.getconf('cpp_std'), 'vc++11')
@@ -5037,7 +5203,7 @@ def test_c_cpp_stds(self):
# The first supported std should be selected
self.setconf('-Dcpp_std=gnu++11,vc++11,c++11')
self.assertEqual(self.getconf('cpp_std'), 'vc++11')
- elif cc.get_id() == 'gcc':
+ elif cc.get_id() in {'gcc', 'clang'}:
# default_option should have selected those
self.assertEqual(self.getconf('c_std'), 'gnu89')
self.assertEqual(self.getconf('cpp_std'), 'gnu++98')
@@ -5045,6 +5211,48 @@ def test_c_cpp_stds(self):
self.setconf('-Dcpp_std=c++11,gnu++11,vc++11')
self.assertEqual(self.getconf('cpp_std'), 'c++11')
+ def test_c_cpp_stds(self) -> None:
+ self.__test_multi_stds()
+
+ @skip_if_not_language('objc')
+ @skip_if_not_language('objcpp')
+ def test_objc_objcpp_stds(self) -> None:
+ self.__test_multi_stds(test_c=False, test_objc=True)
+
+ @skip_if_not_language('objc')
+ @skip_if_not_language('objcpp')
+ def test_c_cpp_objc_objcpp_stds(self) -> None:
+ self.__test_multi_stds(test_objc=True)
+
+ def test_slice(self):
+ testdir = os.path.join(self.unit_test_dir, '126 test slice')
+ self.init(testdir)
+ self.build()
+
+ for arg, expectation in {'1/1': [1, 2, 3, 4, 5, 6, 7, 8, 9, 10],
+ '1/2': [1, 3, 5, 7, 9],
+ '2/2': [2, 4, 6, 8, 10],
+ '1/10': [1],
+ '2/10': [2],
+ '10/10': [10],
+ }.items():
+ output = self._run(self.mtest_command + ['--slice=' + arg])
+ tests = sorted([ int(x) for x in re.findall(r'\n[ 0-9]+/[0-9]+ test-([0-9]*)', output) ])
+ self.assertEqual(tests, expectation)
+
+ for arg, expectation in {'': 'error: argument --slice: value does not conform to format \'SLICE/NUM_SLICES\'',
+ '0': 'error: argument --slice: value does not conform to format \'SLICE/NUM_SLICES\'',
+ '0/1': 'error: argument --slice: SLICE is not a positive integer',
+ 'a/1': 'error: argument --slice: SLICE is not an integer',
+ '1/0': 'error: argument --slice: NUM_SLICES is not a positive integer',
+ '1/a': 'error: argument --slice: NUM_SLICES is not an integer',
+ '2/1': 'error: argument --slice: SLICE exceeds NUM_SLICES',
+ '1/11': 'ERROR: number of slices (11) exceeds number of tests (10)',
+ }.items():
+ with self.assertRaises(subprocess.CalledProcessError) as cm:
+ self._run(self.mtest_command + ['--slice=' + arg])
+ self.assertIn(expectation, cm.exception.output)
+
def test_rsp_support(self):
env = get_fake_env()
cc = detect_c_compiler(env, MachineChoice.HOST)
@@ -5053,3 +5261,75 @@ def test_rsp_support(self):
'link', 'lld-link', 'mwldarm', 'mwldeppc', 'optlink', 'xilink',
}
self.assertEqual(cc.linker.get_accepts_rsp(), has_rsp)
+
+ def test_nonexisting_bargs(self):
+ testdir = os.path.join(self.unit_test_dir, '116 empty project')
+ args = ['-Db_ndebug=if_release']
+ self.init(testdir, extra_args=args)
+
+ def test_wipe_with_args(self):
+ testdir = os.path.join(self.common_test_dir, '1 trivial')
+ self.init(testdir, extra_args=['-Dc_args=-DSOMETHING'])
+ self.init(testdir, extra_args=['--wipe'])
+
+ def test_interactive_tap(self):
+ testdir = os.path.join(self.unit_test_dir, '124 interactive tap')
+ self.init(testdir, extra_args=['--wrap-mode=forcefallback'])
+ output = self._run(self.mtest_command + ['--interactive'])
+ self.assertRegex(output, r'Ok:\s*0')
+ self.assertRegex(output, r'Fail:\s*0')
+ self.assertRegex(output, r'Ignored:\s*1')
+
+ @skip_if_not_language('fortran')
+ def test_fortran_cross_target_module_dep(self) -> None:
+ if self.backend is not Backend.ninja:
+ raise SkipTest('Test is only relavent on the ninja backend')
+ testdir = os.path.join(self.fortran_test_dir, '8 module names')
+ self.init(testdir, extra_args=['-Dunittest=true'])
+
+ # Find the correct output to compile, regardless of what compiler is being used
+ comp = self.get_compdb()
+ entry = first(comp, lambda e: e['file'].endswith('lib.f90'))
+ assert entry is not None, 'for mypy'
+ output = entry['output']
+
+ self.build(output, extra_args=['-j1'])
+
+ @skip_if_not_language('fortran')
+ def test_fortran_new_module_in_dep(self) -> None:
+ if self.backend is not Backend.ninja:
+ raise SkipTest('Test is only relavent on the ninja backend')
+ testdir = self.copy_srcdir(os.path.join(self.fortran_test_dir, '8 module names'))
+ self.init(testdir, extra_args=['-Dunittest=true'])
+ self.build()
+
+ with open(os.path.join(testdir, 'mod1.f90'), 'a', encoding='utf-8') as f:
+ f.write(textwrap.dedent("""\
+ module MyMod3
+ implicit none
+
+ integer, parameter :: myModVal3 =1
+
+ end module MyMod3
+ """))
+
+ with open(os.path.join(testdir, 'test.f90'), 'w', encoding='utf-8') as f:
+ f.write(textwrap.dedent("""\
+ program main
+ use MyMod2
+ use MyMod3
+ implicit none
+
+ call showvalues()
+ print*, "MyModValu3 = ", myModVal3
+
+ end program
+ """))
+
+ # Find the correct output to compile, regardless of what compiler is being used
+ comp = self.get_compdb()
+ entry = first(comp, lambda e: e['file'].endswith('lib.f90'))
+ assert entry is not None, 'for mypy'
+ output = entry['output']
+
+ self.build(output, extra_args=['-j1'])
diff --git a/unittests/baseplatformtests.py b/unittests/baseplatformtests.py
index 3770321925fa..f9bb58135318 100644
--- a/unittests/baseplatformtests.py
+++ b/unittests/baseplatformtests.py
@@ -42,7 +42,6 @@
# e.g. for assertXXX helpers.
__unittest = True
-@mock.patch.dict(os.environ)
class BasePlatformTests(TestCase):
prefix = '/usr'
libdir = 'lib'
@@ -79,6 +78,7 @@ def setUpClass(cls) -> None:
cls.objc_test_dir = os.path.join(src_root, 'test cases/objc')
cls.objcpp_test_dir = os.path.join(src_root, 'test cases/objcpp')
cls.darwin_test_dir = os.path.join(src_root, 'test cases/darwin')
+ cls.fortran_test_dir = os.path.join(src_root, 'test cases/fortran')
# Misc stuff
if cls.backend is Backend.ninja:
@@ -87,9 +87,18 @@ def setUpClass(cls) -> None:
# VS doesn't have a stable output when no changes are done
# XCode backend is untested with unit tests, help welcome!
cls.no_rebuild_stdout = [f'UNKNOWN BACKEND {cls.backend.name!r}']
+
+ cls.env_patch = mock.patch.dict(os.environ)
+ cls.env_patch.start()
+
os.environ['COLUMNS'] = '80'
os.environ['PYTHONIOENCODING'] = 'utf8'
+ @classmethod
+ def tearDownClass(cls) -> None:
+ super().tearDownClass()
+ cls.env_patch.stop()
+
def setUp(self):
super().setUp()
self.meson_native_files = []
@@ -117,18 +126,6 @@ def new_builddir(self):
newdir = os.path.realpath(newdir)
self.change_builddir(newdir)
- def new_builddir_in_tempdir(self):
- # Can't keep the builddir inside the source tree for the umask tests:
- # https://github.com/mesonbuild/meson/pull/5546#issuecomment-509666523
- # And we can't do this for all tests because it causes the path to be
- # a short-path which breaks other tests:
- # https://github.com/mesonbuild/meson/pull/9497
- newdir = tempfile.mkdtemp()
- # In case the directory is inside a symlinked directory, find the real
- # path otherwise we might not find the srcdir from inside the builddir.
- newdir = os.path.realpath(newdir)
- self.change_builddir(newdir)
-
def _open_meson_log(self) -> io.TextIOWrapper:
log = os.path.join(self.logdir, 'meson-log.txt')
return open(log, encoding='utf-8')
@@ -291,6 +288,8 @@ def setconf(self, arg: T.Sequence[str], will_build: bool = True) -> None:
else:
arg = list(arg)
self._run(self.mconf_command + arg + [self.builddir])
+ if will_build:
+ self.build()
def getconf(self, optname: str):
opts = self.introspect('--buildoptions')
diff --git a/unittests/datatests.py b/unittests/datatests.py
index cb6542db8f71..bd83b81f86de 100644
--- a/unittests/datatests.py
+++ b/unittests/datatests.py
@@ -138,9 +138,15 @@ def test_builtin_options_documented(self):
self.assertEqual(len(found_entries & options), 0)
found_entries |= options
+ # TODO: put the module name back in the OptionKey
+ def remove_module_name(key: OptionKey) -> OptionKey:
+ if '.' in key.name:
+ return key.evolve(name=key.name.split('.', 1)[1])
+ return key
+
self.assertEqual(found_entries, {
- *(str(k.without_module_prefix()) for k in mesonbuild.options.BUILTIN_OPTIONS),
- *(str(k.without_module_prefix()) for k in mesonbuild.options.BUILTIN_OPTIONS_PER_MACHINE),
+ *(str(remove_module_name(k)) for k in mesonbuild.options.BUILTIN_OPTIONS),
+ *(str(remove_module_name(k)) for k in mesonbuild.options.BUILTIN_OPTIONS_PER_MACHINE),
})
# Check that `buildtype` table inside `Core options` matches how
@@ -240,5 +246,5 @@ def test_all_functions_defined_in_ast_interpreter(self):
del os.environ['MESON_RUNNING_IN_PROJECT_TESTS']
env = get_fake_env()
interp = Interpreter(FakeBuild(env))
- astint = AstInterpreter('.', '', '')
+ astint = AstInterpreter('.', '', '', '', env)
self.assertEqual(set(interp.funcs.keys()), set(astint.funcs.keys()))
diff --git a/unittests/failuretests.py b/unittests/failuretests.py
index 8a802120b6f3..18d0c5e70b47 100644
--- a/unittests/failuretests.py
+++ b/unittests/failuretests.py
@@ -1,11 +1,13 @@
# SPDX-License-Identifier: Apache-2.0
# Copyright 2016-2021 The Meson development team
+from __future__ import annotations
import subprocess
import tempfile
import os
import shutil
import unittest
+import typing as T
from contextlib import contextmanager
from mesonbuild.mesonlib import (
@@ -75,12 +77,13 @@ def tearDown(self):
super().tearDown()
windows_proof_rmtree(self.srcdir)
- def assertMesonRaises(self, contents, match, *,
- extra_args=None,
- langs=None,
- meson_version=None,
- options=None,
- override_envvars=None):
+ def assertMesonRaises(self, contents: str,
+ match: T.Union[str, T.Pattern[str]], *,
+ extra_args: T.Optional[T.List[str]] = None,
+ langs: T.Optional[T.List[str]] = None,
+ meson_version: T.Optional[str] = None,
+ options: T.Optional[str] = None,
+ override_envvars: T.Optional[T.MutableMapping[str, str]] = None) -> None:
'''
Assert that running meson configure on the specified @contents raises
a error message matching regex @match.
@@ -238,19 +241,26 @@ def test_dependency_invalid_method(self):
'''
self.assertMesonRaises(code, ".* is not a config-tool dependency")
- def test_objc_cpp_detection(self):
+ def test_objc_detection(self) -> None:
'''
Test that when we can't detect objc or objcpp, we fail gracefully.
'''
env = get_fake_env()
try:
detect_objc_compiler(env, MachineChoice.HOST)
+ except EnvironmentException as e:
+ self.assertRegex(str(e), r"(Unknown compiler|GCC was not built with support)")
+ else:
+ raise unittest.SkipTest('Working objective-c Compiler found, cannot test error.')
+
+ def test_objcpp_detection(self) -> None:
+ env = get_fake_env()
+ try:
detect_objcpp_compiler(env, MachineChoice.HOST)
- except EnvironmentException:
- code = "add_languages('objc')\nadd_languages('objcpp')"
- self.assertMesonRaises(code, "Unknown compiler")
- return
- raise unittest.SkipTest("objc and objcpp found, can't test detection failure")
+ except EnvironmentException as e:
+ self.assertRegex(str(e), r"(Unknown compiler|GCC was not built with support)")
+ else:
+ raise unittest.SkipTest('Working objective-c++ Compiler found, cannot test error.')
def test_subproject_variables(self):
'''
@@ -381,3 +391,8 @@ def test_override_resolved_dependency(self):
def test_error_func(self):
self.assertMesonRaises("error('a', 'b', ['c', ['d', {'e': 'f'}]], 'g')",
r"Problem encountered: a b \['c', \['d', {'e' : 'f'}\]\] g")
+
+ def test_compiler_cache_without_compiler(self):
+ self.assertMesonRaises('',
+ 'Compiler cache specified without compiler: ccache',
+ override_envvars={'CC': 'ccache'})
diff --git a/unittests/internaltests.py b/unittests/internaltests.py
index 69f52a413627..d7994ee085f9 100644
--- a/unittests/internaltests.py
+++ b/unittests/internaltests.py
@@ -35,7 +35,7 @@
from mesonbuild.interpreterbase import typed_pos_args, InvalidArguments, typed_kwargs, ContainerTypeInfo, KwargInfo
from mesonbuild.mesonlib import (
LibType, MachineChoice, PerMachine, Version, is_windows, is_osx,
- is_cygwin, is_openbsd, search_version, MesonException,
+ is_cygwin, is_openbsd, search_version, MesonException, python_command,
)
from mesonbuild.options import OptionKey
from mesonbuild.interpreter.type_checking import in_set_validator, NoneType
@@ -674,6 +674,34 @@ def _call_pkgbin(self, args, env=None):
for lib in ('pthread', 'm', 'c', 'dl', 'rt'):
self.assertNotIn(f'lib{lib}.a', link_arg, msg=link_args)
+ def test_program_version(self):
+ with tempfile.TemporaryDirectory() as tmpdir:
+ script_path = Path(tmpdir) / 'script.py'
+ script_path.write_text('import sys\nprint(sys.argv[1])\n', encoding='utf-8')
+ script_path.chmod(0o755)
+
+ for output, expected in {
+ '': None,
+ '1': None,
+ '1.2.4': '1.2.4',
+ '1 1.2.4': '1.2.4',
+ 'foo version 1.2.4': '1.2.4',
+ 'foo 1.2.4.': '1.2.4',
+ 'foo 1.2.4': '1.2.4',
+ 'foo 1.2.4 bar': '1.2.4',
+ 'foo 10.0.0': '10.0.0',
+ '50 5.4.0': '5.4.0',
+ 'This is perl 5, version 40, subversion 0 (v5.40.0)': '5.40.0',
+ 'git version 2.48.0.rc1': '2.48.0',
+ }.items():
+ prog = ExternalProgram('script', command=[python_command, str(script_path), output], silent=True)
+
+ if expected is None:
+ with self.assertRaisesRegex(MesonException, 'Could not find a version number'):
+ prog.get_version()
+ else:
+ self.assertEqual(prog.get_version(), expected)
+
def test_version_compare(self):
comparefunc = mesonbuild.mesonlib.version_compare_many
for (a, b, result) in [
diff --git a/unittests/linuxcrosstests.py b/unittests/linuxcrosstests.py
index a35633cdd79b..910429b0e9c7 100644
--- a/unittests/linuxcrosstests.py
+++ b/unittests/linuxcrosstests.py
@@ -146,7 +146,7 @@ def test_exe_wrapper_behaviour(self):
self.meson_cross_files = [os.path.join(testdir, 'broken-cross.txt')]
# Force tracebacks so we can detect them properly
env = {'MESON_FORCE_BACKTRACE': '1'}
- error_message = "An exe_wrapper is needed but was not found. Please define one in cross file and check the command and/or add it to PATH."
+ error_message = "An exe_wrapper is needed for " + self.builddir + "/prog.exe but was not found. Please define one in cross file and check the command and/or add it to PATH."
with self.assertRaises(MesonException) as cm:
# Must run in-process or we'll get a generic CalledProcessError
diff --git a/unittests/linuxliketests.py b/unittests/linuxliketests.py
index 55e83750af34..c25449c98a4b 100644
--- a/unittests/linuxliketests.py
+++ b/unittests/linuxliketests.py
@@ -43,7 +43,7 @@
from run_tests import (
- get_fake_env
+ get_fake_env, Backend,
)
from .baseplatformtests import BasePlatformTests
@@ -446,6 +446,24 @@ def test_installed_soname(self):
libdir = self.installdir + os.path.join(self.prefix, self.libdir)
self._test_soname_impl(libdir, True)
+ @skip_if_not_base_option('b_sanitize')
+ def test_c_link_args_and_env(self):
+ '''
+ Test that the CFLAGS / CXXFLAGS environment variables are
+ included on the linker command line when c_link_args is
+ set but c_args is not.
+ '''
+ if is_cygwin():
+ raise SkipTest('asan not available on Cygwin')
+ if is_openbsd():
+ raise SkipTest('-fsanitize=address is not supported on OpenBSD')
+
+ testdir = os.path.join(self.common_test_dir, '1 trivial')
+ env = {'CFLAGS': '-fsanitize=address'}
+ self.init(testdir, extra_args=['-Dc_link_args="-L/usr/lib"'],
+ override_envvars=env)
+ self.build()
+
def test_compiler_check_flags_order(self):
'''
Test that compiler check flags override all other flags. This can't be
@@ -590,8 +608,6 @@ def test_installed_modes(self):
Test that files installed by these tests have the correct permissions.
Can't be an ordinary test because our installed_files.txt is very basic.
'''
- if is_cygwin():
- self.new_builddir_in_tempdir()
# Test file modes
testdir = os.path.join(self.common_test_dir, '12 data')
self.init(testdir)
@@ -644,8 +660,6 @@ def test_installed_modes_extended(self):
'''
Test that files are installed with correct permissions using install_mode.
'''
- if is_cygwin():
- self.new_builddir_in_tempdir()
testdir = os.path.join(self.common_test_dir, '190 install_mode')
self.init(testdir)
self.build()
@@ -684,8 +698,6 @@ def test_install_umask(self):
install umask of 022, regardless of the umask at time the worktree
was checked out or the build was executed.
'''
- if is_cygwin():
- self.new_builddir_in_tempdir()
# Copy source tree to a temporary directory and change permissions
# there to simulate a checkout with umask 002.
orig_testdir = os.path.join(self.unit_test_dir, '26 install umask')
@@ -991,6 +1003,22 @@ def test_global_rpath(self):
got_rpath = get_rpath(os.path.join(yonder_prefix, 'bin/rpathified'))
self.assertEqual(got_rpath, yonder_libdir, rpath_format)
+ @skip_if_not_base_option('b_sanitize')
+ def test_env_cflags_ldflags(self):
+ if is_cygwin():
+ raise SkipTest('asan not available on Cygwin')
+ if is_openbsd():
+ raise SkipTest('-fsanitize=address is not supported on OpenBSD')
+
+ testdir = os.path.join(self.common_test_dir, '1 trivial')
+ env = {'CFLAGS': '-fsanitize=address', 'LDFLAGS': '-I.'}
+ self.init(testdir, override_envvars=env)
+ self.build()
+ compdb = self.get_compdb()
+ for i in compdb:
+ self.assertIn("-fsanitize=address", i["command"])
+ self.wipe()
+
@skip_if_not_base_option('b_sanitize')
def test_pch_with_address_sanitizer(self):
if is_cygwin():
@@ -1141,6 +1169,42 @@ def test_pkgconfig_duplicate_path_entries(self):
pkg_config_path = env.coredata.optstore.get_value('pkg_config_path')
self.assertEqual(pkg_config_path, [pkg_dir])
+ def test_pkgconfig_uninstalled_env_added(self):
+ '''
+ Checks that the meson-uninstalled dir is added to PKG_CONFIG_PATH
+ '''
+ testdir = os.path.join(self.unit_test_dir, '111 pkgconfig duplicate path entries')
+ meson_uninstalled_dir = os.path.join(self.builddir, 'meson-uninstalled')
+
+ env = get_fake_env(testdir, self.builddir, self.prefix)
+
+ newEnv = PkgConfigInterface.setup_env({}, env, MachineChoice.HOST, uninstalled=True)
+
+ pkg_config_path_dirs = newEnv['PKG_CONFIG_PATH'].split(os.pathsep)
+
+ self.assertEqual(len(pkg_config_path_dirs), 1)
+ self.assertEqual(pkg_config_path_dirs[0], meson_uninstalled_dir)
+
+ def test_pkgconfig_uninstalled_env_prepended(self):
+ '''
+ Checks that the meson-uninstalled dir is prepended to PKG_CONFIG_PATH
+ '''
+ testdir = os.path.join(self.unit_test_dir, '111 pkgconfig duplicate path entries')
+ meson_uninstalled_dir = os.path.join(self.builddir, 'meson-uninstalled')
+ external_pkg_config_path_dir = os.path.join('usr', 'local', 'lib', 'pkgconfig')
+
+ env = get_fake_env(testdir, self.builddir, self.prefix)
+
+ env.coredata.set_options({OptionKey('pkg_config_path'): external_pkg_config_path_dir},
+ subproject='')
+
+ newEnv = PkgConfigInterface.setup_env({}, env, MachineChoice.HOST, uninstalled=True)
+
+ pkg_config_path_dirs = newEnv['PKG_CONFIG_PATH'].split(os.pathsep)
+
+ self.assertEqual(pkg_config_path_dirs[0], meson_uninstalled_dir)
+ self.assertEqual(pkg_config_path_dirs[1], external_pkg_config_path_dir)
+
@skipIfNoPkgconfig
def test_pkgconfig_internal_libraries(self):
'''
@@ -1829,5 +1893,102 @@ def test_complex_link_cases(self):
self.assertIn('build t13-e1: c_LINKER t13-e1.p/main.c.o | libt12-s1.a libt13-s3.a\n', content)
def test_top_options_in_sp(self):
- testdir = os.path.join(self.unit_test_dir, '123 pkgsubproj')
+ testdir = os.path.join(self.unit_test_dir, '125 pkgsubproj')
self.init(testdir)
+
+ def test_unreadable_dir_in_declare_dep(self):
+ testdir = os.path.join(self.unit_test_dir, '125 declare_dep var')
+ tmpdir = Path(tempfile.mkdtemp())
+ self.addCleanup(windows_proof_rmtree, tmpdir)
+ declaredepdir = tmpdir / 'test'
+ declaredepdir.mkdir()
+ try:
+ tmpdir.chmod(0o444)
+ self.init(testdir, extra_args=f'-Ddir={declaredepdir}')
+ finally:
+ tmpdir.chmod(0o755)
+
+ def check_has_flag(self, compdb, src, argument):
+ for i in compdb:
+ if src in i['file']:
+ self.assertIn(argument, i['command'])
+ return
+ self.assertTrue(False, f'Source {src} not found in compdb')
+
+ def test_persp_options(self):
+ if self.backend is not Backend.ninja:
+ raise SkipTest(f'{self.backend.name!r} backend can\'t install files')
+
+ testdir = os.path.join(self.unit_test_dir, '122 persp options')
+
+ with self.subTest('init'):
+ self.init(testdir, extra_args='-Doptimization=1')
+ compdb = self.get_compdb()
+ mainsrc = 'toplevel.c'
+ sub1src = 'sub1.c'
+ sub2src = 'sub2.c'
+ self.check_has_flag(compdb, mainsrc, '-O1')
+ self.check_has_flag(compdb, sub1src, '-O1')
+ self.check_has_flag(compdb, sub2src, '-O1')
+
+ # Set subproject option to O2
+ with self.subTest('set subproject option'):
+ self.setconf(['-Dround=2', '-D', 'sub2:optimization=3'])
+ compdb = self.get_compdb()
+ self.check_has_flag(compdb, mainsrc, '-O1')
+ self.check_has_flag(compdb, sub1src, '-O1')
+ self.check_has_flag(compdb, sub2src, '-O3')
+
+ # Change an already set override.
+ with self.subTest('change subproject option'):
+ self.setconf(['-Dround=3', '-D', 'sub2:optimization=2'])
+ compdb = self.get_compdb()
+ self.check_has_flag(compdb, mainsrc, '-O1')
+ self.check_has_flag(compdb, sub1src, '-O1')
+ self.check_has_flag(compdb, sub2src, '-O2')
+
+ # Set top level option to O3
+ with self.subTest('change main project option'):
+ self.setconf(['-Dround=4', '-D:optimization=3'])
+ compdb = self.get_compdb()
+ self.check_has_flag(compdb, mainsrc, '-O3')
+ self.check_has_flag(compdb, sub1src, '-O1')
+ self.check_has_flag(compdb, sub2src, '-O2')
+
+ # Unset subproject
+ with self.subTest('unset subproject option'):
+ self.setconf(['-Dround=5', '-U', 'sub2:optimization'])
+ compdb = self.get_compdb()
+ self.check_has_flag(compdb, mainsrc, '-O3')
+ self.check_has_flag(compdb, sub1src, '-O1')
+ self.check_has_flag(compdb, sub2src, '-O1')
+
+ # Set global value
+ with self.subTest('set global option'):
+ self.setconf(['-Dround=6', '-D', 'optimization=2'])
+ compdb = self.get_compdb()
+ self.check_has_flag(compdb, mainsrc, '-O3')
+ self.check_has_flag(compdb, sub1src, '-O2')
+ self.check_has_flag(compdb, sub2src, '-O2')
+
+ def test_sanitizers(self):
+ testdir = os.path.join(self.unit_test_dir, '127 sanitizers')
+
+ with self.subTest('no b_sanitize value'):
+ try:
+ out = self.init(testdir)
+ self.assertRegex(out, 'value *: *none')
+ finally:
+ self.wipe()
+
+ for value, expected in { '': 'none',
+ 'none': 'none',
+ 'address': 'address',
+ 'undefined,address': 'address,undefined',
+ 'address,undefined': 'address,undefined' }.items():
+ with self.subTest('b_sanitize=' + value):
+ try:
+ out = self.init(testdir, extra_args=['-Db_sanitize=' + value])
+ self.assertRegex(out, 'value *: *' + expected)
+ finally:
+ self.wipe()
diff --git a/unittests/machinefiletests.py b/unittests/machinefiletests.py
index ba9cb11530dd..b2839e6b289a 100644
--- a/unittests/machinefiletests.py
+++ b/unittests/machinefiletests.py
@@ -23,7 +23,7 @@
import mesonbuild.environment
import mesonbuild.coredata
import mesonbuild.modules.gnome
-
+from mesonbuild import mesonlib
from mesonbuild import machinefile
from mesonbuild.mesonlib import (
@@ -275,7 +275,12 @@ def cb(comp):
if not is_real_gnu_compiler(shutil.which('gcc')):
raise SkipTest('Only one compiler found, cannot test.')
return 'gcc', 'gcc'
- self.helper_for_compiler('objc', cb)
+ try:
+ self.helper_for_compiler('objc', cb)
+ except mesonlib.EnvironmentException as e:
+ if 'GCC was not built with support for objective-c' in str(e):
+ raise unittest.SkipTest("GCC doesn't support objective-c, test cannot run")
+ raise
@skip_if_not_language('objcpp')
@skip_if_env_set('OBJCXX')
@@ -288,7 +293,12 @@ def cb(comp):
if not is_real_gnu_compiler(shutil.which('g++')):
raise SkipTest('Only one compiler found, cannot test.')
return 'g++', 'gcc'
- self.helper_for_compiler('objcpp', cb)
+ try:
+ self.helper_for_compiler('objcpp', cb)
+ except mesonlib.EnvironmentException as e:
+ if 'GCC was not built with support for objective-c++' in str(e):
+ raise unittest.SkipTest("G++ doesn't support objective-c++, test cannot run")
+ raise
@skip_if_not_language('d')
@skip_if_env_set('DC')
@@ -536,7 +546,9 @@ def test_builtin_options_subprojects(self):
elif each['name'] == 'sub:default_library':
self.assertEqual(each['value'], 'static')
found += 1
- self.assertEqual(found, 4, 'Did not find all three sections')
+ # FIXME: check that the subproject option has beeb added
+ # into augments.
+ self.assertEqual(found, 2, 'Did not find all two sections')
def test_builtin_options_subprojects_overrides_buildfiles(self):
# If the buildfile says subproject(... default_library: shared), ensure that's overwritten
diff --git a/unittests/optiontests.py b/unittests/optiontests.py
new file mode 100644
index 000000000000..5758a2d5c8ac
--- /dev/null
+++ b/unittests/optiontests.py
@@ -0,0 +1,246 @@
+# SPDX-License-Identifier: Apache-2.0
+# Copyright 2024 Meson project contributors
+
+from mesonbuild.options import *
+
+import unittest
+
+
+def num_options(store: OptionStore) -> int:
+ return len(store.options)
+
+
+class OptionTests(unittest.TestCase):
+
+ def test_basic(self):
+ optstore = OptionStore(False)
+ name = 'someoption'
+ default_value = 'somevalue'
+ new_value = 'new_value'
+ vo = UserStringOption(name, 'An option of some sort', default_value)
+ optstore.add_system_option(name, vo)
+ self.assertEqual(optstore.get_value_for(name), default_value)
+ optstore.set_option(OptionKey.from_string(name), new_value)
+ self.assertEqual(optstore.get_value_for(name), new_value)
+
+ def test_toplevel_project(self):
+ optstore = OptionStore(False)
+ name = 'someoption'
+ default_value = 'somevalue'
+ new_value = 'new_value'
+ k = OptionKey(name)
+ vo = UserStringOption(k.name, 'An option of some sort', default_value)
+ optstore.add_system_option(k.name, vo)
+ self.assertEqual(optstore.get_value_for(k), default_value)
+ optstore.initialize_from_top_level_project_call({OptionKey('someoption'): new_value}, {}, {})
+ self.assertEqual(optstore.get_value_for(k), new_value)
+
+ def test_machine_vs_project(self):
+ optstore = OptionStore(False)
+ name = 'backend'
+ default_value = 'ninja'
+ proj_value = 'xcode'
+ mfile_value = 'vs2010'
+ k = OptionKey(name)
+ prefix = UserStringOption('prefix', 'This is needed by OptionStore', '/usr')
+ optstore.add_system_option('prefix', prefix)
+ vo = UserStringOption(k.name, 'You know what this is', default_value)
+ optstore.add_system_option(k.name, vo)
+ self.assertEqual(optstore.get_value_for(k), default_value)
+ optstore.initialize_from_top_level_project_call({OptionKey(name): proj_value}, {},
+ {OptionKey(name): mfile_value})
+ self.assertEqual(optstore.get_value_for(k), mfile_value)
+
+ def test_subproject_system_option(self):
+ """Test that subproject system options get their default value from the global
+ option (e.g. "sub:b_lto" can be initialized from "b_lto")."""
+ optstore = OptionStore(False)
+ name = 'someoption'
+ default_value = 'somevalue'
+ new_value = 'new_value'
+ k = OptionKey(name)
+ subk = k.evolve(subproject='sub')
+ optstore.initialize_from_top_level_project_call({}, {}, {OptionKey(name): new_value})
+ vo = UserStringOption(k.name, 'An option of some sort', default_value)
+ optstore.add_system_option(subk, vo)
+ self.assertEqual(optstore.get_value_for(subk), new_value)
+
+ def test_parsing(self):
+ with self.subTest('subproject'):
+ s1 = OptionKey.from_string('sub:optname')
+ s1_expected = OptionKey('optname', 'sub', MachineChoice.HOST)
+ self.assertEqual(s1, s1_expected)
+ self.assertEqual(str(s1), 'sub:optname')
+
+ with self.subTest('plain name'):
+ s2 = OptionKey.from_string('optname')
+ s2_expected = OptionKey('optname', None, MachineChoice.HOST)
+ self.assertEqual(s2, s2_expected)
+ self.assertEqual(str(s2), 'optname')
+
+ with self.subTest('root project'):
+ s3 = OptionKey.from_string(':optname')
+ s3_expected = OptionKey('optname', '', MachineChoice.HOST)
+ self.assertEqual(s3, s3_expected)
+ self.assertEqual(str(s3), ':optname')
+
+ def test_subproject_for_system(self):
+ optstore = OptionStore(False)
+ name = 'someoption'
+ default_value = 'somevalue'
+ vo = UserStringOption(name, 'An option of some sort', default_value)
+ optstore.add_system_option(name, vo)
+ self.assertEqual(optstore.get_value_for(name, 'somesubproject'), default_value)
+
+ def test_reset(self):
+ optstore = OptionStore(False)
+ name = 'someoption'
+ original_value = 'original'
+ reset_value = 'reset'
+ vo = UserStringOption(name, 'An option set twice', original_value)
+ optstore.add_system_option(name, vo)
+ self.assertEqual(optstore.get_value_for(name), original_value)
+ self.assertEqual(num_options(optstore), 1)
+ vo2 = UserStringOption(name, 'An option set twice', reset_value)
+ optstore.add_system_option(name, vo2)
+ self.assertEqual(optstore.get_value_for(name), original_value)
+ self.assertEqual(num_options(optstore), 1)
+
+ def test_project_nonyielding(self):
+ optstore = OptionStore(False)
+ name = 'someoption'
+ top_value = 'top'
+ sub_value = 'sub'
+ vo = UserStringOption(name, 'A top level option', top_value, False)
+ optstore.add_project_option(OptionKey(name, ''), vo)
+ self.assertEqual(optstore.get_value_for(name, ''), top_value, False)
+ self.assertEqual(num_options(optstore), 1)
+ vo2 = UserStringOption(name, 'A subproject option', sub_value)
+ optstore.add_project_option(OptionKey(name, 'sub'), vo2)
+ self.assertEqual(optstore.get_value_for(name, ''), top_value)
+ self.assertEqual(optstore.get_value_for(name, 'sub'), sub_value)
+ self.assertEqual(num_options(optstore), 2)
+
+ def test_project_yielding(self):
+ optstore = OptionStore(False)
+ name = 'someoption'
+ top_value = 'top'
+ sub_value = 'sub'
+ vo = UserStringOption(name, 'A top level option', top_value)
+ optstore.add_project_option(OptionKey(name, ''), vo)
+ self.assertEqual(optstore.get_value_for(name, ''), top_value)
+ self.assertEqual(num_options(optstore), 1)
+ vo2 = UserStringOption(name, 'A subproject option', sub_value, True)
+ optstore.add_project_option(OptionKey(name, 'sub'), vo2)
+ self.assertEqual(optstore.get_value_for(name, ''), top_value)
+ self.assertEqual(optstore.get_value_for(name, 'sub'), top_value)
+ self.assertEqual(num_options(optstore), 2)
+
+ def test_project_yielding_not_defined_in_top_project(self):
+ optstore = OptionStore(False)
+ top_name = 'a_name'
+ top_value = 'top'
+ sub_name = 'different_name'
+ sub_value = 'sub'
+ vo = UserStringOption(top_name, 'A top level option', top_value)
+ optstore.add_project_option(OptionKey(top_name, ''), vo)
+ self.assertEqual(optstore.get_value_for(top_name, ''), top_value)
+ self.assertEqual(num_options(optstore), 1)
+ vo2 = UserStringOption(sub_name, 'A subproject option', sub_value, True)
+ optstore.add_project_option(OptionKey(sub_name, 'sub'), vo2)
+ self.assertEqual(optstore.get_value_for(top_name, ''), top_value)
+ self.assertEqual(optstore.get_value_for(sub_name, 'sub'), sub_value)
+ self.assertEqual(num_options(optstore), 2)
+
+ def test_augments(self):
+ optstore = OptionStore(False)
+ name = 'cpp_std'
+ sub_name = 'sub'
+ sub2_name = 'sub2'
+ top_value = 'c++11'
+ aug_value = 'c++23'
+
+ co = UserComboOption(name,
+ 'C++ language standard to use',
+ top_value,
+ choices=['c++98', 'c++11', 'c++14', 'c++17', 'c++20', 'c++23'])
+ optstore.add_system_option(name, co)
+ self.assertEqual(optstore.get_value_for(name), top_value)
+ self.assertEqual(optstore.get_value_for(name, sub_name), top_value)
+ self.assertEqual(optstore.get_value_for(name, sub2_name), top_value)
+
+ # First augment a subproject
+ with self.subTest('set subproject override'):
+ optstore.set_from_configure_command([f'{sub_name}:{name}={aug_value}'], [])
+ self.assertEqual(optstore.get_value_for(name), top_value)
+ self.assertEqual(optstore.get_value_for(name, sub_name), aug_value)
+ self.assertEqual(optstore.get_value_for(name, sub2_name), top_value)
+
+ with self.subTest('unset subproject override'):
+ optstore.set_from_configure_command([], [f'{sub_name}:{name}'])
+ self.assertEqual(optstore.get_value_for(name), top_value)
+ self.assertEqual(optstore.get_value_for(name, sub_name), top_value)
+ self.assertEqual(optstore.get_value_for(name, sub2_name), top_value)
+
+ # And now augment the top level option
+ optstore.set_from_configure_command([f':{name}={aug_value}'], [])
+ self.assertEqual(optstore.get_value_for(name, None), top_value)
+ self.assertEqual(optstore.get_value_for(name, ''), aug_value)
+ self.assertEqual(optstore.get_value_for(name, sub_name), top_value)
+ self.assertEqual(optstore.get_value_for(name, sub2_name), top_value)
+
+ optstore.set_from_configure_command([], [f':{name}'])
+ self.assertEqual(optstore.get_value_for(name), top_value)
+ self.assertEqual(optstore.get_value_for(name, sub_name), top_value)
+ self.assertEqual(optstore.get_value_for(name, sub2_name), top_value)
+
+ def test_augment_set_sub(self):
+ optstore = OptionStore(False)
+ name = 'cpp_std'
+ sub_name = 'sub'
+ sub2_name = 'sub2'
+ top_value = 'c++11'
+ aug_value = 'c++23'
+ set_value = 'c++20'
+
+ co = UserComboOption(name,
+ 'C++ language standard to use',
+ top_value,
+ choices=['c++98', 'c++11', 'c++14', 'c++17', 'c++20', 'c++23'],
+ )
+ optstore.add_system_option(name, co)
+ optstore.set_from_configure_command([f'{sub_name}:{name}={aug_value}'], [])
+ optstore.set_from_configure_command([f'{sub_name}:{name}={set_value}'], [])
+ self.assertEqual(optstore.get_value_for(name), top_value)
+ self.assertEqual(optstore.get_value_for(name, sub_name), set_value)
+
+ def test_b_default(self):
+ optstore = OptionStore(False)
+ value = optstore.get_default_for_b_option(OptionKey('b_vscrt'))
+ self.assertEqual(value, 'from_buildtype')
+
+ def test_b_nonexistent(self):
+ optstore = OptionStore(False)
+ assert optstore.accept_as_pending_option(OptionKey('b_ndebug'))
+ assert not optstore.accept_as_pending_option(OptionKey('b_whatever'))
+
+ def test_reconfigure_b_nonexistent(self):
+ optstore = OptionStore(False)
+ optstore.set_from_configure_command(['b_ndebug=true'], [])
+
+ def test_subproject_nonexistent(self):
+ optstore = OptionStore(False)
+ subprojects = {'found'}
+ assert not optstore.accept_as_pending_option(OptionKey('foo', subproject='found'), subprojects)
+ assert optstore.accept_as_pending_option(OptionKey('foo', subproject='whatisthis'), subprojects)
+
+ def test_deprecated_nonstring_value(self):
+ # TODO: add a lot more deprecated option tests
+ optstore = OptionStore(False)
+ name = 'deprecated'
+ do = UserStringOption(name, 'An option with some deprecation', '0',
+ deprecated={'true': '1'})
+ optstore.add_system_option(name, do)
+ optstore.set_option(OptionKey(name), True)
+ value = optstore.get_value(name)
+ self.assertEqual(value, '1')
diff --git a/unittests/platformagnostictests.py b/unittests/platformagnostictests.py
index 228c04d4b68e..75071d9da55d 100644
--- a/unittests/platformagnostictests.py
+++ b/unittests/platformagnostictests.py
@@ -1,11 +1,12 @@
# SPDX-License-Identifier: Apache-2.0
# Copyright 2021 The Meson development team
-# Copyright © 2024 Intel Corporation
+# Copyright © 2024-2025 Intel Corporation
from __future__ import annotations
import json
import os
import pickle
+import subprocess
import tempfile
import subprocess
import textwrap
@@ -36,7 +37,7 @@ def test_relative_find_program(self):
self.init(testdir, workdir=testdir)
def test_invalid_option_names(self):
- store = OptionStore()
+ store = OptionStore(False)
interp = OptionInterpreter(store, '')
def write_file(code: str):
@@ -70,7 +71,7 @@ def write_file(code: str):
def test_option_validation(self):
"""Test cases that are not catch by the optinterpreter itself."""
- store = OptionStore()
+ store = OptionStore(False)
interp = OptionInterpreter(store, '')
def write_file(code: str):
@@ -166,21 +167,25 @@ def test_change_backend(self):
self.init(testdir)
# no-op change works
- self.setconf(f'--backend=ninja')
- self.init(testdir, extra_args=['--reconfigure', '--backend=ninja'])
+ with self.subTest('set the option to the same value'):
+ self.setconf('--backend=ninja')
+ self.init(testdir, extra_args=['--reconfigure', '--backend=ninja'])
# Change backend option is not allowed
- with self.assertRaises(subprocess.CalledProcessError) as cm:
- self.setconf('-Dbackend=none')
- self.assertIn("ERROR: Tried modify read only option 'backend'", cm.exception.stdout)
+ with self.subTest('Changing the backend'):
+ with self.assertRaises(subprocess.CalledProcessError) as cm:
+ self.setconf('-Dbackend=none')
+ self.assertIn("ERROR: Tried to modify read only option 'backend'", cm.exception.stdout)
- # Reconfigure with a different backend is not allowed
- with self.assertRaises(subprocess.CalledProcessError) as cm:
- self.init(testdir, extra_args=['--reconfigure', '--backend=none'])
- self.assertIn("ERROR: Tried modify read only option 'backend'", cm.exception.stdout)
+ # Check that the new value was not written in the store.
+ with self.subTest('option is stored correctly'):
+ self.assertEqual(self.getconf('backend'), 'ninja')
# Wipe with a different backend is allowed
- self.init(testdir, extra_args=['--wipe', '--backend=none'])
+ with self.subTest('Changing the backend with wipe'):
+ self.init(testdir, extra_args=['--wipe', '--backend=none'])
+
+ self.assertEqual(self.getconf('backend'), 'none')
def test_validate_dirs(self):
testdir = os.path.join(self.common_test_dir, '1 trivial')
@@ -198,10 +203,10 @@ def test_validate_dirs(self):
# Reconfigure of not empty builddir should work
self.new_builddir()
Path(self.builddir, 'dummy').touch()
- self.init(testdir, extra_args=['--reconfigure'])
+ self.init(testdir, extra_args=['--reconfigure', '--buildtype=custom'])
# Setup a valid builddir should update options but not reconfigure
- self.assertEqual(self.getconf('buildtype'), 'debug')
+ self.assertEqual(self.getconf('buildtype'), 'custom')
o = self.init(testdir, extra_args=['-Dbuildtype=release'])
self.assertIn('Directory already configured', o)
self.assertNotIn('The Meson build system', o)
@@ -375,14 +380,14 @@ def test_format_empty_file(self) -> None:
for code in ('', '\n'):
formatted = formatter.format(code, Path())
self.assertEqual('\n', formatted)
-
+
def test_format_indent_comment_in_brackets(self) -> None:
"""Ensure comments in arrays and dicts are correctly indented"""
formatter = Formatter(None, use_editor_config=False, fetch_subdirs=False)
code = 'a = [\n # comment\n]\n'
formatted = formatter.format(code, Path())
self.assertEqual(code, formatted)
-
+
code = 'a = [\n # comment\n 1,\n]\n'
formatted = formatter.format(code, Path())
self.assertEqual(code, formatted)
@@ -390,7 +395,7 @@ def test_format_indent_comment_in_brackets(self) -> None:
code = 'a = {\n # comment\n}\n'
formatted = formatter.format(code, Path())
self.assertEqual(code, formatted)
-
+
def test_error_configuring_subdir(self):
testdir = os.path.join(self.common_test_dir, '152 index customtarget')
out = self.init(os.path.join(testdir, 'subdir'), allow_fail=True)
@@ -400,23 +405,29 @@ def test_error_configuring_subdir(self):
self.assertIn(f'Did you mean to run meson from the directory: "{testdir}"?', out)
def test_reconfigure_base_options(self):
- testdir = os.path.join(self.unit_test_dir, '122 reconfigure base options')
+ testdir = os.path.join(self.unit_test_dir, '123 reconfigure base options')
out = self.init(testdir, extra_args=['-Db_ndebug=true'])
self.assertIn('\nMessage: b_ndebug: true\n', out)
self.assertIn('\nMessage: c_std: c89\n', out)
out = self.init(testdir, extra_args=['--reconfigure', '-Db_ndebug=if-release', '-Dsub:b_ndebug=false', '-Dc_std=c99', '-Dsub:c_std=c11'])
- self.assertIn('\nMessage: b_ndebug: if-release\n', out)
- self.assertIn('\nMessage: c_std: c99\n', out)
- self.assertIn('\nsub| Message: b_ndebug: false\n', out)
- self.assertIn('\nsub| Message: c_std: c11\n', out)
+ self.assertIn('\n b_ndebug : if-release\n', out)
+ self.assertIn('\n c_std : c99\n', out)
+ self.assertIn('\n sub:b_ndebug: false\n', out)
+ self.assertIn('\n sub:c_std : c11\n', out)
def test_setup_with_unknown_option(self):
testdir = os.path.join(self.common_test_dir, '1 trivial')
- for option in ('not_an_option', 'b_not_an_option'):
- out = self.init(testdir, extra_args=['--wipe', f'-D{option}=1'], allow_fail=True)
- self.assertIn(f'ERROR: Unknown options: "{option}"', out)
+ with self.subTest('unknown user option'):
+ out = self.init(testdir, extra_args=['-Dnot_an_option=1'], allow_fail=True)
+ self.assertIn('ERROR: Unknown options: "not_an_option"', out)
+
+ with self.subTest('unknown builtin option'):
+ self.new_builddir()
+ out = self.init(testdir, extra_args=['-Db_not_an_option=1'], allow_fail=True)
+ self.assertIn('ERROR: Unknown options: "b_not_an_option"', out)
+
def test_configure_new_option(self) -> None:
"""Adding a new option without reconfiguring should work."""
@@ -440,7 +451,17 @@ def test_configure_removed_option(self) -> None:
f.write(line)
with self.assertRaises(subprocess.CalledProcessError) as e:
self.setconf('-Dneg_int_opt=0')
- self.assertIn('Unknown options: "neg_int_opt"', e.exception.stdout)
+ self.assertIn('Unknown options: ":neg_int_opt"', e.exception.stdout)
+
+ def test_reconfigure_option(self) -> None:
+ testdir = self.copy_srcdir(os.path.join(self.common_test_dir, '40 options'))
+ self.init(testdir)
+ self.assertEqual(self.getconf('neg_int_opt'), -3)
+ with self.assertRaises(subprocess.CalledProcessError) as e:
+ self.init(testdir, extra_args=['--reconfigure', '-Dneg_int_opt=0'])
+ self.assertEqual(self.getconf('neg_int_opt'), -3)
+ self.init(testdir, extra_args=['--reconfigure', '-Dneg_int_opt=-2'])
+ self.assertEqual(self.getconf('neg_int_opt'), -2)
def test_configure_option_changed_constraints(self) -> None:
"""Changing the constraints of an option without reconfiguring should work."""
@@ -480,7 +501,7 @@ def test_configure_options_file_deleted(self) -> None:
os.unlink(os.path.join(testdir, 'meson_options.txt'))
with self.assertRaises(subprocess.CalledProcessError) as e:
self.setconf('-Dneg_int_opt=0')
- self.assertIn('Unknown options: "neg_int_opt"', e.exception.stdout)
+ self.assertIn('Unknown options: ":neg_int_opt"', e.exception.stdout)
def test_configure_options_file_added(self) -> None:
"""A new project option file should be detected."""
@@ -508,3 +529,17 @@ def test_configure_new_option_subproject(self) -> None:
f.write("option('new_option', type : 'boolean', value : false)")
self.setconf('-Dsubproject:new_option=true')
self.assertEqual(self.getconf('subproject:new_option'), True)
+
+ def test_mtest_rebuild_deps(self):
+ testdir = os.path.join(self.unit_test_dir, '106 underspecified mtest')
+ self.init(testdir)
+
+ with self.assertRaises(subprocess.CalledProcessError):
+ self._run(self.mtest_command)
+ self.clean()
+
+ with self.assertRaises(subprocess.CalledProcessError):
+ self._run(self.mtest_command + ['runner-without-dep'])
+ self.clean()
+
+ self._run(self.mtest_command + ['runner-with-exedep'])
diff --git a/unittests/pythontests.py b/unittests/pythontests.py
index aaea906ea829..c4926c83c450 100644
--- a/unittests/pythontests.py
+++ b/unittests/pythontests.py
@@ -1,7 +1,7 @@
# SPDX-License-Identifier: Apache-2.0
# Copyright 2016-2021 The Meson development team
-import glob, os, pathlib, shutil, subprocess, unittest
+import glob, os, pathlib, shutil, subprocess, sys, unittest
from run_tests import (
Backend
@@ -64,7 +64,12 @@ def _test_bytecompile(self, py2=False):
for file in files:
realfile = os.path.join(root, file)
if file.endswith('.py'):
- cached = glob.glob(realfile+'?') + glob.glob(os.path.join(root, '__pycache__', os.path.splitext(file)[0] + '*.pyc'))
+ # FIXME: relpath must be adjusted for windows path behaviour
+ if getattr(sys, "pycache_prefix", None) is not None:
+ root = os.path.join(sys.pycache_prefix, os.path.relpath(root, '/'))
+ else:
+ root = os.path.join(root, '__pycache__')
+ cached = glob.glob(realfile+'?') + glob.glob(os.path.join(root, os.path.splitext(file)[0] + '*.pyc'))
if py2 and cc.get_id() == 'msvc':
# MSVC python installs python2/python3 into the same directory
self.assertLength(cached, 4)
diff --git a/unittests/rewritetests.py b/unittests/rewritetests.py
index 7fad513f5271..57a6782dd383 100644
--- a/unittests/rewritetests.py
+++ b/unittests/rewritetests.py
@@ -407,3 +407,17 @@ def test_raw_printer_is_idempotent(self):
# Do it line per line because it is easier to debug like that
for orig_line, new_line in zip_longest(original_contents.splitlines(), new_contents.splitlines()):
self.assertEqual(orig_line, new_line)
+
+ def test_rewrite_prefix(self) -> None:
+ self.prime('7 prefix')
+ out = self.rewrite_raw(self.builddir, ['kwargs', 'info', 'project', '/'])
+ expected = {
+ 'kwargs': {
+ 'project#/': {
+ "default_options": [
+ 'prefix=/export/doocs'
+ ]
+ }
+ }
+ }
+ self.assertDictEqual(out, expected)
diff --git a/unittests/taptests.py b/unittests/taptests.py
index 26d96eafdec4..e91194cb564c 100644
--- a/unittests/taptests.py
+++ b/unittests/taptests.py
@@ -163,10 +163,57 @@ def test_one_test_late_plan(self):
self.assert_plan(events, num_tests=1, late=True)
self.assert_last(events)
+ def test_low_max_early_plan(self):
+ events = self.parse_tap('1..2\nok 1\nok 1')
+ self.assert_plan(events, num_tests=2, late=False)
+ self.assert_test(events, number=1, name='', result=TestResult.OK)
+ self.assert_test(events, number=1, name='', result=TestResult.OK)
+ self.assert_error(events) # incorrect high test number
+ self.assert_last(events)
+
+ def test_low_max_late_plan(self):
+ events = self.parse_tap('ok 1\nok 1\n1..2')
+ self.assert_test(events, number=1, name='', result=TestResult.OK)
+ self.assert_test(events, number=1, name='', result=TestResult.OK)
+ self.assert_plan(events, num_tests=2, late=True)
+ self.assert_error(events) # incorrect high test number
+ self.assert_last(events)
+
+ def test_high_max_early_plan(self):
+ events = self.parse_tap('1..2\nok 2\nok 3')
+ self.assert_plan(events, num_tests=2, late=False)
+ self.assert_test(events, number=2, name='', result=TestResult.OK)
+ self.assert_error(events) # high id
+ self.assert_test(events, number=3, name='', result=TestResult.OK)
+ self.assert_error(events) # incorrect high test number
+ self.assert_last(events)
+
+ def test_high_max_late_plan(self):
+ events = self.parse_tap('ok 2\nok 3\n1..2')
+ self.assert_test(events, number=2, name='', result=TestResult.OK)
+ self.assert_test(events, number=3, name='', result=TestResult.OK)
+ self.assert_plan(events, num_tests=2, late=True)
+ self.assert_error(events)
+ self.assert_last(events)
+
def test_out_of_order(self):
+ events = self.parse_tap('1..2\nok 2\nok 1')
+ self.assert_plan(events, num_tests=2, late=False)
+ self.assert_test(events, number=2, name='', result=TestResult.OK)
+ self.assert_test(events, number=1, name='', result=TestResult.OK)
+ self.assert_last(events)
+
+ def test_out_of_order_no_plan(self):
events = self.parse_tap('ok 2')
+ self.assert_test(events, number=2, name='', result=TestResult.OK)
self.assert_error(events)
+
+ def test_out_of_order_missing_numbers(self):
+ events = self.parse_tap('1..3\nok 2\nok\nok 1')
+ self.assert_plan(events, num_tests=3, late=False)
self.assert_test(events, number=2, name='', result=TestResult.OK)
+ self.assert_test(events, number=3, name='', result=TestResult.OK)
+ self.assert_test(events, number=1, name='', result=TestResult.OK)
self.assert_last(events)
def test_middle_plan(self):
@@ -184,7 +231,7 @@ def test_too_many_plans(self):
self.assert_test(events, number=1, name='', result=TestResult.OK)
self.assert_last(events)
- def test_too_many(self):
+ def test_too_many_late_plan(self):
events = self.parse_tap('ok 1\nnot ok 2\n1..1')
self.assert_test(events, number=1, name='', result=TestResult.OK)
self.assert_test(events, number=2, name='', result=TestResult.FAIL)
@@ -192,14 +239,16 @@ def test_too_many(self):
self.assert_error(events)
self.assert_last(events)
+ def test_too_many_early_plan(self):
events = self.parse_tap('1..1\nok 1\nnot ok 2')
self.assert_plan(events, num_tests=1, late=False)
self.assert_test(events, number=1, name='', result=TestResult.OK)
+ self.assert_error(events) # test number too high
self.assert_test(events, number=2, name='', result=TestResult.FAIL)
- self.assert_error(events)
+ self.assert_error(events) # too many tests run
self.assert_last(events)
- def test_too_few(self):
+ def test_too_few_late_plan(self):
events = self.parse_tap('ok 1\nnot ok 2\n1..3')
self.assert_test(events, number=1, name='', result=TestResult.OK)
self.assert_test(events, number=2, name='', result=TestResult.FAIL)
@@ -207,6 +256,7 @@ def test_too_few(self):
self.assert_error(events)
self.assert_last(events)
+ def test_too_few_early_plan(self):
events = self.parse_tap('1..3\nok 1\nnot ok 2')
self.assert_plan(events, num_tests=3, late=False)
self.assert_test(events, number=1, name='', result=TestResult.OK)
diff --git a/unittests/windowstests.py b/unittests/windowstests.py
index 79114a04708f..e3fdcc185128 100644
--- a/unittests/windowstests.py
+++ b/unittests/windowstests.py
@@ -251,9 +251,15 @@ def test_genvslite(self):
env=current_env)
# Check this has actually built the appropriate exes
- output_debug = subprocess.check_output(str(os.path.join(self.builddir+'_debug', 'genvslite.exe')))
- self.assertEqual( output_debug, b'Debug\r\n' )
- output_release = subprocess.check_output(str(os.path.join(self.builddir+'_release', 'genvslite.exe')))
+ exe_path = str(os.path.join(self.builddir+'_debug', 'genvslite.exe'))
+ self.assertTrue(os.path.exists(exe_path))
+ rc = subprocess.run([exe_path], stdout=subprocess.PIPE, stderr=subprocess.PIPE)
+ self.assertEqual(rc.returncode, 0, rc.stdout + rc.stderr)
+ output_debug = rc.stdout
+ self.assertEqual(output_debug, b'Debug\r\n' )
+ exe_path = str(os.path.join(self.builddir+'_release', 'genvslite.exe'))
+ self.assertTrue(os.path.exists(exe_path))
+ output_release = subprocess.check_output([exe_path])
self.assertEqual( output_release, b'Non-debug\r\n' )
finally: