diff --git a/.github/workflows/macos.yml b/.github/workflows/macos.yml index 449504096a77..3b726315537a 100644 --- a/.github/workflows/macos.yml +++ b/.github/workflows/macos.yml @@ -96,7 +96,7 @@ jobs: find /opt/homebrew/Cellar/python* -name EXTERNALLY-MANAGED -print0 | xargs -0 rm -vf # use python3 from homebrew because it is a valid framework, unlike the actions one: # https://github.com/actions/setup-python/issues/58 - - run: brew install pkg-config ninja llvm qt@5 boost ldc hdf5 openmpi lapack scalapack sdl2 boost-python3 gtk-doc zstd ncurses objfw + - run: brew install pkg-config ninja llvm qt@5 boost ldc hdf5 openmpi lapack scalapack sdl2 boost-python3 gtk-doc zstd ncurses objfw libomp - run: | python3 -m pip install --upgrade setuptools python3 -m pip install --upgrade pip diff --git a/.github/workflows/os_comp.yml b/.github/workflows/os_comp.yml index 1d779842e27a..4fa4a87ed15d 100644 --- a/.github/workflows/os_comp.yml +++ b/.github/workflows/os_comp.yml @@ -26,6 +26,12 @@ on: - ".github/workflows/os_comp.yml" - "run*tests.py" +# make GHA actions use node16 which still works with bionic +# See https://github.blog/changelog/2024-03-07-github-actions-all-actions-will-run-on-node20-instead-of-node16-by-default/ +# Unclear how long this will work though +env: + ACTIONS_ALLOW_USE_UNSECURE_NODE_VERSION: true + permissions: contents: read diff --git a/.pylintrc b/.pylintrc index b457544a6226..64316fe6e70e 100644 --- a/.pylintrc +++ b/.pylintrc @@ -17,6 +17,7 @@ disable= cell-var-from-loop, consider-using-f-string, consider-using-with, + contextmanager-generator-missing-cleanup, cyclic-import, deprecated-decorator, duplicate-code, @@ -47,6 +48,7 @@ disable= not-an-iterable, not-callable, pointless-string-statement, + possibly-used-before-assignment, protected-access, raise-missing-from, redeclared-assigned-name, @@ -75,6 +77,7 @@ disable= unsubscriptable-object, unused-argument, unused-variable, + used-before-assignment, useless-super-delegation, wrong-import-order, wrong-import-position, diff --git a/azure-pipelines.yml b/azure-pipelines.yml index 76440de7a8a7..ea511f33f94e 100644 --- a/azure-pipelines.yml +++ b/azure-pipelines.yml @@ -26,7 +26,7 @@ trigger: - 'test cases' - 'unittests' - 'azure-pipelines.yml' - - 'ci/azure-steps.yml' + - 'ci/run.ps1' - 'run_project_tests.py' - 'run_tests.py' - 'run_unittests.py' @@ -41,7 +41,7 @@ pr: - 'test cases' - 'unittests' - 'azure-pipelines.yml' - - 'ci/azure-steps.yml' + - 'ci/run.ps1' - 'run_project_tests.py' - 'run_tests.py' - 'run_unittests.py' diff --git a/ci/ciimage/build.py b/ci/ciimage/build.py index b355c47a78a9..b9d318158411 100755 --- a/ci/ciimage/build.py +++ b/ci/ciimage/build.py @@ -80,11 +80,6 @@ def gen_bashrc(self) -> None: fi ''' - if self.data_dir.name == 'gentoo': - out_data += ''' - source /etc/profile - ''' - out_file.write_text(out_data, encoding='utf-8') # make it executable diff --git a/ci/ciimage/cuda/install.sh b/ci/ciimage/cuda/install.sh index 7c79d28ec807..6c4fd3b096a6 100755 --- a/ci/ciimage/cuda/install.sh +++ b/ci/ciimage/cuda/install.sh @@ -18,4 +18,4 @@ install_minimal_python_packages # Manually remove cache to avoid GitHub space restrictions rm -rf /var/cache/pacman -echo "source /etc/profile.d/cuda.sh" >> /ci/env_vars.sh +echo "source /etc/profile" >> /ci/env_vars.sh diff --git a/ci/ciimage/gentoo/install.sh b/ci/ciimage/gentoo/install.sh index b2a697fcbc1e..8f7aa33f5d17 100755 --- a/ci/ciimage/gentoo/install.sh +++ b/ci/ciimage/gentoo/install.sh @@ -156,3 +156,5 @@ rm /usr/lib/python/EXTERNALLY-MANAGED python3 -m ensurepip install_python_packages python3 -m pip install "${base_python_pkgs[@]}" + +echo "source /etc/profile" >> /ci/env_vars.sh diff --git a/ci/run.ps1 b/ci/run.ps1 index 05bb6b69e216..596253fe2d62 100644 --- a/ci/run.ps1 +++ b/ci/run.ps1 @@ -8,21 +8,22 @@ if ($LastExitCode -ne 0) { $env:Path = ($env:Path.Split(';') | Where-Object { $_ -notmatch 'mingw|Strawberry|Chocolatey|PostgreSQL' }) -join ';' if ($env:arch -eq 'x64') { + rustup default 1.77 # Rust puts its shared stdlib in a secret place, but it is needed to run tests. - $env:Path += ";$HOME/.rustup/toolchains/stable-x86_64-pc-windows-msvc/bin" + $env:Path += ";$HOME/.rustup/toolchains/1.77-x86_64-pc-windows-msvc/bin" } elseif ($env:arch -eq 'x86') { # Switch to the x86 Rust toolchain - rustup default stable-i686-pc-windows-msvc - - # Also install clippy - rustup component add clippy + rustup default 1.77-i686-pc-windows-msvc # Rust puts its shared stdlib in a secret place, but it is needed to run tests. - $env:Path += ";$HOME/.rustup/toolchains/stable-i686-pc-windows-msvc/bin" + $env:Path += ";$HOME/.rustup/toolchains/1.77-i686-pc-windows-msvc/bin" # Need 32-bit Python for tests that need the Python dependency $env:Path = "C:\hostedtoolcache\windows\Python\3.7.9\x86;C:\hostedtoolcache\windows\Python\3.7.9\x86\Scripts;$env:Path" } +# Also install clippy +rustup component add clippy + # Set the CI env var for the meson test framework $env:CI = '1' diff --git a/data/shell-completions/bash/meson b/data/shell-completions/bash/meson index dc437f10fd66..88dc15ec3225 100644 --- a/data/shell-completions/bash/meson +++ b/data/shell-completions/bash/meson @@ -580,6 +580,7 @@ _meson-test() { quiet timeout-multiplier setup + max-lines test-args ) diff --git a/data/shell-completions/zsh/_meson b/data/shell-completions/zsh/_meson index 402539f1ba3a..7d6d89b7ef73 100644 --- a/data/shell-completions/zsh/_meson +++ b/data/shell-completions/zsh/_meson @@ -196,6 +196,7 @@ local -a meson_commands=( '(--quiet -q)'{'--quiet','-q'}'[produce less output to the terminal]' '(--timeout-multiplier -t)'{'--timeout-multiplier','-t'}'[a multiplier for test timeouts]:Python floating-point number: ' '--setup[which test setup to use]:test setup: ' + '--max-lines[Maximum number of lines to show from a long test log]:Python integer number: ' '--test-args[arguments to pass to the tests]: : ' '*:Meson tests:__meson_test_names' ) diff --git a/docs/markdown/Builtin-options.md b/docs/markdown/Builtin-options.md index b4039d646926..6adc4218bda1 100644 --- a/docs/markdown/Builtin-options.md +++ b/docs/markdown/Builtin-options.md @@ -408,7 +408,8 @@ interpreter directly, even if it is a venv. Setting to `venv` will instead use the paths for the virtualenv the python found installation comes from (or fail if it is not a virtualenv). Setting to `auto` will check if the found installation is a virtualenv, and use `venv` or `system` as appropriate (but -never `prefix`). This option is mutually exclusive with the `platlibdir`/`purelibdir`. +never `prefix`). Note that Conda environments are treated as `system`. +This option is mutually exclusive with the `platlibdir`/`purelibdir`. For backwards compatibility purposes, the default `install_env` is `prefix`. diff --git a/docs/markdown/Configuration.md b/docs/markdown/Configuration.md index b5875e55e6d9..48f071e6c9e4 100644 --- a/docs/markdown/Configuration.md +++ b/docs/markdown/Configuration.md @@ -39,7 +39,7 @@ use a single `configuration_data` object as many times as you like, but it becomes immutable after being passed to the `configure_file` function. That is, after it has been used once to generate output the `set` function becomes unusable and trying to call it causes an error. -*Since 1.5.0* Copy of immutable `configuration_data` is however mutable. +Copy of immutable `configuration_data` is still immutable. For more complex configuration file generation Meson provides a second form. To use it, put a line like this in your configuration file. diff --git a/docs/markdown/FAQ.md b/docs/markdown/FAQ.md index ffc9e1718eb6..810e9da23e95 100644 --- a/docs/markdown/FAQ.md +++ b/docs/markdown/FAQ.md @@ -695,3 +695,16 @@ directory. It glob ignores ```"*"```, since all generated files should not be checked into git. Users of older versions of Meson may need to set up ignore files themselves. + +## How to add preprocessor defines to a target? + +Just add `-DFOO` to `c_args` or `cpp_args`. This works for all known compilers. + +```meson +mylib = library('mylib', 'mysource.c', c_args: ['-DFOO']) +``` + +Even though [MSVC documentation](https://learn.microsoft.com/en-us/cpp/build/reference/d-preprocessor-definitions) +uses `/D` for preprocessor defines, its [command-line syntax](https://learn.microsoft.com/en-us/cpp/build/reference/compiler-command-line-syntax) +accepts `-` instead of `/`. +It's not necessary to treat preprocessor defines specially in Meson ([GH-6269](https://github.com/mesonbuild/meson/issues/6269#issuecomment-560003922)). diff --git a/docs/markdown/Python-module.md b/docs/markdown/Python-module.md index 05ae57de233a..c02eed91d596 100644 --- a/docs/markdown/Python-module.md +++ b/docs/markdown/Python-module.md @@ -12,6 +12,15 @@ authors: This module provides support for finding and building extensions against python installations, be they python 2 or 3. +If you want to build and package Python extension modules using tools +compatible with [PEP-517](https://peps.python.org/pep-0517/), check out +[meson-python](https://mesonbuild.com/meson-python/index.html). + +If you are building Python extension modules against a Python interpreter +located in a venv or Conda environment, you probably want to set +`python.install_venv=auto`; +see [Python module options](Builtin-options.md#python-module) for details. + *Added 0.46.0* ## Functions diff --git a/docs/markdown/Release-notes-for-1.5.0.md b/docs/markdown/Release-notes-for-1.5.0.md new file mode 100644 index 000000000000..7dfea9af2b13 --- /dev/null +++ b/docs/markdown/Release-notes-for-1.5.0.md @@ -0,0 +1,214 @@ +--- +title: Release 1.5.0 +short-description: Release notes for 1.5.0 +... + +# New features + +Meson 1.5.0 was released on 10 July 2024 +## Support for `bztar` in `meson dist` + +The `bztar` format is now supported in `meson dist`. This format is also known +as `bzip2`. + +## Cargo dependencies names now include the API version + +Cargo dependencies names are now in the format `--rs`: +- `package_name` is defined in `[package] name = ...` section of the `Cargo.toml`. +- `version` is the API version deduced from `[package] version = ...` as follow: + * `x.y.z` -> 'x' + * `0.x.y` -> '0.x' + * `0.0.x` -> '0' + It allows to make different dependencies for uncompatible versions of the same + crate. +- `-rs` suffix is added to distinguish from regular system dependencies, for + example `gstreamer-1.0` is a system pkg-config dependency and `gstreamer-0.22-rs` + is a Cargo dependency. + +That means the `.wrap` file should have `dependency_names = foo-1-rs` in their +`[provide]` section when `Cargo.toml` has package name `foo` and version `1.2`. + +This is a breaking change (Cargo subprojects are still experimental), previous +versions were using `-rs` format. + +## Added support `Cargo.lock` file + +When a (sub)project has a `Cargo.lock` file at its root, it is loaded to provide +an automatic fallback for dependencies it defines, fetching code from +https://crates.io or git. This is identical as providing `subprojects/*.wrap`, +see [cargo wraps](Wrap-dependency-system-manual.md#cargo-wraps) dependency naming convention. + +## Meson now propagates its build type to CMake + +When the CMake build type variable, `CMAKE_BUILD_TYPE`, is not set via the +`add_cmake_defines` method of the [`cmake options` object](CMake-module.md#cmake-options-object), +it is inferred from the [Meson build type](Builtin-options.md#details-for-buildtype). +Build types of the two build systems do not match perfectly. The mapping from +Meson build type to CMake build type is as follows: + +- `debug` -> `Debug` +- `debugoptimized` -> `RelWithDebInfo` +- `release` -> `Release` +- `minsize` -> `MinSizeRel` + +No CMake build type is set for the `plain` Meson build type. The inferred CMake +build type overrides any `CMAKE_BUILD_TYPE` environment variable. + +## compiler.run() method is now available for all languages + +It used to be only implemented for C-like and D languages, but it is now available +for all languages. + +## dependencies created by compiler.find_library implement the `name()` method + +Previously, for a [[@dep]] that might be returned by either [[dependency]] or +[[compiler.find_library]], the method might or might not exist with no way +of telling. + +## New version_argument kwarg for find_program + +When finding an external program with `find_program`, the `version_argument` +can be used to override the default `--version` argument when trying to parse +the version of the program. + +For example, if the following is used: +```meson +foo = find_program('foo', version_argument: '-version') +``` + +meson will internally run `foo -version` when trying to find the version of `foo`. + +## Meson configure handles changes to options in more cases + +Meson configure now correctly handles updates to the options file without a full +reconfigure. This allows making a change to the `meson.options` or +`meson_options.txt` file without a reconfigure. + +For example, this now works: +```sh +meson setup builddir +git pull +meson configure builddir -Doption-added-by-pull=value +``` + +## New meson format command + +This command is similar to `muon fmt` and allows to format a `meson.build` +document. + +## Added support for GCC's `null_terminated_string_arg` function attribute + +You can now check if a compiler support the `null_terminated_string_arg` +function attribute via the `has_function_attribute()` method on the +[[@compiler]] object. + +```meson +cc = meson.get_compiler('c') + +if cc.has_function_attribute('null_terminated_string_arg') + # We have it... +endif +``` + +## A new dependency for ObjFW is now supported + +For example, you can create a simple application written using ObjFW like this: + +```meson +project('SimpleApp', 'objc') + +objfw_dep = dependency('objfw', version: '>= 1.0') + +executable('SimpleApp', 'SimpleApp.m', + dependencies: [objfw_dep]) +``` + +Modules are also supported. A test case using ObjFWTest can be created like +this: + +```meson +project('Tests', 'objc') + +objfwtest_dep = dependency('objfw', version: '>= 1.1', modules: ['ObjFWTest']) + +executable('Tests', ['FooTest.m', 'BarTest.m'], + dependencies: [objfwtest_dep]) +``` + +## Support of indexed `@PLAINNAME@` and `@BASENAME@` + +In `custom_target()` and `configure_file()` with multiple inputs, +it is now possible to specify index for `@PLAINNAME@` and `@BASENAME@` +macros in `output`: +``` +custom_target('target_name', + output: '@PLAINNAME0@.dl', + input: [dep1, dep2], + command: cmd) +``` + +## Required kwarg on more `compiler` methods + +The following `compiler` methods now support the `required` keyword argument: + +- `compiler.compiles()` +- `compiler.links()` +- `compiler.runs()` + +```meson +cc.compiles(valid, name: 'valid', required : true) +cc.links(valid, name: 'valid', required : true) +cc.run(valid, name: 'valid', required : true) + +assert(not cc.compiles(valid, name: 'valid', required : opt)) +assert(not cc.links(valid, name: 'valid', required : opt)) +res = cc.run(valid, name: 'valid', required : opt) +assert(res.compiled()) +assert(res.returncode() == 0) +assert(res.stdout() == '') +assert(res.stderr() == '') +``` + +## The Meson test program supports a new "--interactive" argument + +`meson test --interactive` invokes tests with stdout, stdin and stderr +connected directly to the calling terminal. This can be useful when running +integration tests that run in containers or virtual machines which can spawn a +debug shell if a test fails. + +## meson test now sets the `MESON_TEST_ITERATION` environment variable + +`meson test` will now set the `MESON_TEST_ITERATION` environment variable to the +current iteration of the test. This will always be `1` unless `--repeat` is used +to run the same test multiple times. + +## The Meson test program supports a new "--max-lines" argument + +By default `meson test` only shows the last 100 lines of test output from tests +that produce large amounts of output. This default can now be changed with the +new `--max-lines` option. For example, `--max-lines=1000` will increase the +maximum number of log output lines from 100 to 1000. + +## Basic support for TI Arm Clang (tiarmclang) + +Support for TI's newer [Clang-based ARM toolchain](https://www.ti.com/tool/ARM-CGT). + +## Support for Texas Instruments C6000 C/C++ compiler + +Meson now supports the TI C6000 C/C++ compiler use for the C6000 cpu family. +The example cross file is available in `cross/ti-c6000.txt`. + +## Wayland stable protocols can be versioned + +The wayland module now accepts a version number for stable protocols. + +```meson +wl_mod = import('unstable-wayland') + +wl_mod.find_protocol( + 'linux-dmabuf', + state: 'stable' + version: 1 +) +``` + diff --git a/docs/markdown/Unit-tests.md b/docs/markdown/Unit-tests.md index 6fda0f5f6861..b5d3a1b81831 100644 --- a/docs/markdown/Unit-tests.md +++ b/docs/markdown/Unit-tests.md @@ -274,6 +274,14 @@ other useful information as the environmental variables. This is useful, for example, when you run the tests on Travis-CI, Jenkins and the like. +By default, the output from tests will be limited to the last 100 lines. The +maximum number of lines to show can be configured with the `--max-lines` option +*(added 1.5.0)*: + +```console +$ meson test --max-lines=1000 testname +``` + **Timeout** In the test case options, the `timeout` option is specified in a number of seconds. diff --git a/docs/markdown/Users.md b/docs/markdown/Users.md index fe353a05977a..70060ab05247 100644 --- a/docs/markdown/Users.md +++ b/docs/markdown/Users.md @@ -52,7 +52,7 @@ topic](https://github.com/topics/meson). - [fwupd](https://github.com/hughsie/fwupd), a simple daemon to allow session software to update firmware - [GameMode](https://github.com/FeralInteractive/gamemode), a daemon/lib combo for Linux that allows games to request a set of optimisations be temporarily applied to the host OS - [Geary](https://wiki.gnome.org/Apps/Geary), an email application built around conversations, for the GNOME 3 desktop. - - [GIMP](https://gitlab.gnome.org/GNOME/gimp), an image manipulation program (experimental replacing autoconf) + - [GIMP](https://gitlab.gnome.org/GNOME/gimp), an image manipulation program (master branch) - [GLib](https://gitlab.gnome.org/GNOME/glib), cross-platform C library used by GTK+ and GStreamer - [Glorytun](https://github.com/angt/glorytun), a multipath UDP tunnel - [GNOME Boxes](https://gitlab.gnome.org/GNOME/gnome-boxes), a GNOME hypervisor @@ -168,18 +168,7 @@ format files - [ThorVG](https://www.thorvg.org/), vector-based scenes and animations library - [Tilix](https://github.com/gnunn1/tilix), a tiling terminal emulator for Linux using GTK+ 3 - [Tizonia](https://github.com/tizonia/tizonia-openmax-il), a command-line cloud music player for Linux with support for Spotify, Google Play Music, YouTube, SoundCloud, TuneIn, Plex servers and Chromecast devices - - [FSCL XTest](https://github.com/fossil-lib/tscl-xtest-c), a framework for testing C/C++ code - - [FSCL XMock](https://github.com/fossil-lib/tscl-xmock-c), a framework for mocking C/C++ code - - [FSCL XCore](https://github.com/fossil-lib/tscl-xcore-c), essintal compoments for C code - - [FSCL XTool](https://github.com/fossil-lib/tscl-xtool-c), essintal tool for low-level related task - - [FSCL XString](https://github.com/fossil-lib/tscl-xstring-c), string and char types in C - - [FSCL XToFu](https://github.com/fossil-lib/tscl-xtofu-c), a framework for generic types in C - - [FSCL XJellyfish](https://github.com/fossil-lib/tscl-xfish-c), a framework for AI development using JellyFish in C - - [FSCL XStructure](https://github.com/fossil-lib/tscl-xstructure-c), a framework for data structures in C - - [FSCL XAlgorithm](https://github.com/fossil-lib/tscl-xalgorithm-c), a framework for algorithms C - - [FSCL XPattern](https://github.com/fossil-lib/tscl-xpattern-c), a framework for design patterns C - - [FSCL XScience](https://github.com/fossil-lib/tscl-xscience-c), a framework for scientific projects in C - - [FSCL XCube](https://github.com/fossil-lib/tscl-xcube-c), a framework for creating a portable curses TUI in C + - [Fossil Logic](https://github.com/fossillogic), Fossil Logic is a cutting-edge software development company specializing in C/C++, Python, programming, Android development using Kotlin, and SQL solutions. - [UFJF-MLTK](https://github.com/mateus558/UFJF-Machine-Learning-Toolkit), A C++ cross-platform framework for machine learning algorithms development and testing - [Vala Language Server](https://github.com/benwaffle/vala-language-server), code intelligence engine for the Vala and Genie programming languages - [Valum](https://github.com/valum-framework/valum), a micro web framework written in Vala diff --git a/docs/markdown/Wrap-dependency-system-manual.md b/docs/markdown/Wrap-dependency-system-manual.md index 5f0b473e7a2b..3983d28771e9 100644 --- a/docs/markdown/Wrap-dependency-system-manual.md +++ b/docs/markdown/Wrap-dependency-system-manual.md @@ -377,6 +377,10 @@ Some naming conventions need to be respected: - The `extra_deps` variable is pre-defined and can be used to add extra dependencies. This is typically used as `extra_deps += dependency('foo')`. +Since *1.5.0* Cargo wraps can also be provided with `Cargo.lock` file at the root +of (sub)project source tree. Meson will automatically load that file and convert +it into a serie of wraps definitions. + ## Using wrapped projects Wraps provide a convenient way of obtaining a project into your diff --git a/docs/markdown/snippets/bztar_support.md b/docs/markdown/snippets/bztar_support.md deleted file mode 100644 index 3ee4a91add30..000000000000 --- a/docs/markdown/snippets/bztar_support.md +++ /dev/null @@ -1,4 +0,0 @@ -## Support for `bztar` in `meson dist` - -The `bztar` format is now supported in `meson dist`. This format is also known -as `bzip2`. diff --git a/docs/markdown/snippets/cargo_dep_name.md b/docs/markdown/snippets/cargo_dep_name.md deleted file mode 100644 index b769f2b9034f..000000000000 --- a/docs/markdown/snippets/cargo_dep_name.md +++ /dev/null @@ -1,19 +0,0 @@ -## Cargo dependencies names now include the API version - -Cargo dependencies names are now in the format `--rs`: -- `package_name` is defined in `[package] name = ...` section of the `Cargo.toml`. -- `version` is the API version deduced from `[package] version = ...` as follow: - * `x.y.z` -> 'x' - * `0.x.y` -> '0.x' - * `0.0.x` -> '0' - It allows to make different dependencies for uncompatible versions of the same - crate. -- `-rs` suffix is added to distinguish from regular system dependencies, for - example `gstreamer-1.0` is a system pkg-config dependency and `gstreamer-0.22-rs` - is a Cargo dependency. - -That means the `.wrap` file should have `dependency_names = foo-1-rs` in their -`[provide]` section when `Cargo.toml` has package name `foo` and version `1.2`. - -This is a breaking change (Cargo subprojects are still experimental), previous -versions were using `-rs` format. diff --git a/docs/markdown/snippets/cmake_build_type.md b/docs/markdown/snippets/cmake_build_type.md deleted file mode 100644 index af9e84dc8e85..000000000000 --- a/docs/markdown/snippets/cmake_build_type.md +++ /dev/null @@ -1,15 +0,0 @@ -## Meson now propagates its build type to CMake - -When the CMake build type variable, `CMAKE_BUILD_TYPE`, is not set via the -`add_cmake_defines` method of the [`cmake options` object](CMake-module.md#cmake-options-object), -it is inferred from the [Meson build type](Builtin-options.md#details-for-buildtype). -Build types of the two build systems do not match perfectly. The mapping from -Meson build type to CMake build type is as follows: - -- `debug` -> `Debug` -- `debugoptimized` -> `RelWithDebInfo` -- `release` -> `Release` -- `minsize` -> `MinSizeRel` - -No CMake build type is set for the `plain` Meson build type. The inferred CMake -build type overrides any `CMAKE_BUILD_TYPE` environment variable. diff --git a/docs/markdown/snippets/compiler_run.md b/docs/markdown/snippets/compiler_run.md deleted file mode 100644 index f4b0847a5dc4..000000000000 --- a/docs/markdown/snippets/compiler_run.md +++ /dev/null @@ -1,4 +0,0 @@ -## compiler.run() method is now available for all languages - -It used to be only implemented for C-like and D languages, but it is now available -for all languages. diff --git a/docs/markdown/snippets/find_library_name.md b/docs/markdown/snippets/find_library_name.md deleted file mode 100644 index e7b0e1a5bab2..000000000000 --- a/docs/markdown/snippets/find_library_name.md +++ /dev/null @@ -1,5 +0,0 @@ -## dependencies created by compiler.find_library implement the `name()` method - -Previously, for a [[@dep]] that might be returned by either [[dependency]] or -[[compiler.find_library]], the method might or might not exist with no way -of telling. diff --git a/docs/markdown/snippets/meson_configure_options_changes.md b/docs/markdown/snippets/meson_configure_options_changes.md deleted file mode 100644 index c86792ceb52a..000000000000 --- a/docs/markdown/snippets/meson_configure_options_changes.md +++ /dev/null @@ -1,12 +0,0 @@ -## Meson configure handles changes to options in more cases - -Meson configure now correctly handles updates to the options file without a full -reconfigure. This allows making a change to the `meson.options` or -`meson_options.txt` file without a reconfigure. - -For example, this now works: -```sh -meson setup builddir -git pull -meson configure builddir -Doption-added-by-pull=value -``` diff --git a/docs/markdown/snippets/meson_format_cmd.md b/docs/markdown/snippets/meson_format_cmd.md deleted file mode 100644 index 390f15d581c7..000000000000 --- a/docs/markdown/snippets/meson_format_cmd.md +++ /dev/null @@ -1,4 +0,0 @@ -## New meson format command - -This command is similar to `muon fmt` and allows to format a `meson.build` -document. diff --git a/docs/markdown/snippets/null_terminated_string_arg.md b/docs/markdown/snippets/null_terminated_string_arg.md deleted file mode 100644 index 2ba1755758f8..000000000000 --- a/docs/markdown/snippets/null_terminated_string_arg.md +++ /dev/null @@ -1,13 +0,0 @@ -## Added support for GCC's `null_terminated_string_arg` function attribute - -You can now check if a compiler support the `null_terminated_string_arg` -function attribute via the `has_function_attribute()` method on the -[[@compiler]] object. - -```meson -cc = meson.get_compiler('c') - -if cc.has_function_attribute('null_terminated_string_arg') - # We have it... -endif -``` diff --git a/docs/markdown/snippets/objfw_dep.md b/docs/markdown/snippets/objfw_dep.md deleted file mode 100644 index e65da2885b4a..000000000000 --- a/docs/markdown/snippets/objfw_dep.md +++ /dev/null @@ -1,24 +0,0 @@ -## A new dependency for ObjFW is now supported - -For example, you can create a simple application written using ObjFW like this: - -```meson -project('SimpleApp', 'objc') - -objfw_dep = dependency('objfw', version: '>= 1.0') - -executable('SimpleApp', 'SimpleApp.m', - dependencies: [objfw_dep]) -``` - -Modules are also supported. A test case using ObjFWTest can be created like -this: - -```meson -project('Tests', 'objc') - -objfwtest_dep = dependency('objfw', version: '>= 1.1', modules: ['ObjFWTest']) - -executable('Tests', ['FooTest.m', 'BarTest.m'], - dependencies: [objfwtest_dep]) -``` diff --git a/docs/markdown/snippets/pln_bsn_support.md b/docs/markdown/snippets/pln_bsn_support.md deleted file mode 100644 index 394339f1499d..000000000000 --- a/docs/markdown/snippets/pln_bsn_support.md +++ /dev/null @@ -1,11 +0,0 @@ -## Support of indexed `@PLAINNAME@` and `@BASENAME@` - -In `custom_target()` and `configure_file()` with multiple inputs, -it is now possible to specify index for `@PLAINNAME@` and `@BASENAME@` -macros in `output`: -``` -custom_target('target_name', - output: '@PLAINNAME0@.dl', - input: [dep1, dep2], - command: cmd) -``` diff --git a/docs/markdown/snippets/requires_kwarg_on_more_compiler_methods.md b/docs/markdown/snippets/requires_kwarg_on_more_compiler_methods.md deleted file mode 100644 index 693313c31413..000000000000 --- a/docs/markdown/snippets/requires_kwarg_on_more_compiler_methods.md +++ /dev/null @@ -1,21 +0,0 @@ -## Required kwarg on more `compiler` methods - -The following `compiler` methods now support the `required` keyword argument: - -- `compiler.compiles()` -- `compiler.links()` -- `compiler.runs()` - -```meson -cc.compiles(valid, name: 'valid', required : true) -cc.links(valid, name: 'valid', required : true) -cc.run(valid, name: 'valid', required : true) - -assert(not cc.compiles(valid, name: 'valid', required : opt)) -assert(not cc.links(valid, name: 'valid', required : opt)) -res = cc.run(valid, name: 'valid', required : opt) -assert(res.compiled()) -assert(res.returncode() == 0) -assert(res.stdout() == '') -assert(res.stderr() == '') -``` diff --git a/docs/markdown/snippets/test_interactive.md b/docs/markdown/snippets/test_interactive.md deleted file mode 100644 index 907147fd9ca9..000000000000 --- a/docs/markdown/snippets/test_interactive.md +++ /dev/null @@ -1,6 +0,0 @@ -## The Meson test program supports a new "--interactive" argument - -`meson test --interactive` invokes tests with stdout, stdin and stderr -connected directly to the calling terminal. This can be useful when running -integration tests that run in containers or virtual machines which can spawn a -debug shell if a test fails. diff --git a/docs/markdown/snippets/test_iteration.md b/docs/markdown/snippets/test_iteration.md deleted file mode 100644 index 67daf278129d..000000000000 --- a/docs/markdown/snippets/test_iteration.md +++ /dev/null @@ -1,5 +0,0 @@ -## meson test now sets the `MESON_TEST_ITERATION` environment variable - -`meson test` will now set the `MESON_TEST_ITERATION` environment variable to the -current iteration of the test. This will always be `1` unless `--repeat` is used -to run the same test multiple times. diff --git a/docs/markdown/snippets/ti_armclang.md b/docs/markdown/snippets/ti_armclang.md deleted file mode 100644 index 7f0f912c0ed8..000000000000 --- a/docs/markdown/snippets/ti_armclang.md +++ /dev/null @@ -1,3 +0,0 @@ -## Basic support for TI Arm Clang (tiarmclang) - -Support for TI's newer [Clang-based ARM toolchain](https://www.ti.com/tool/ARM-CGT). diff --git a/docs/markdown/snippets/ti_c6000_compiler_support.md b/docs/markdown/snippets/ti_c6000_compiler_support.md deleted file mode 100644 index 03533e6091cd..000000000000 --- a/docs/markdown/snippets/ti_c6000_compiler_support.md +++ /dev/null @@ -1,4 +0,0 @@ -## Support for Texas Instruments C6000 C/C++ compiler - -Meson now supports the TI C6000 C/C++ compiler use for the C6000 cpu family. -The example cross file is available in `cross/ti-c6000.txt`. diff --git a/docs/markdown/snippets/wayland_stable_prot_version.md b/docs/markdown/snippets/wayland_stable_prot_version.md deleted file mode 100644 index 78d0a50d5c89..000000000000 --- a/docs/markdown/snippets/wayland_stable_prot_version.md +++ /dev/null @@ -1,13 +0,0 @@ -## Wayland stable protocols can be versioned - -The wayland module now accepts a version number for stable protocols. - -```meson -wl_mod = import('unstable-wayland') - -wl_mod.find_protocol( - 'linux-dmabuf', - state: 'stable' - version: 1 -) -``` diff --git a/docs/sitemap.txt b/docs/sitemap.txt index 858eb38c0101..218d1a634230 100644 --- a/docs/sitemap.txt +++ b/docs/sitemap.txt @@ -89,6 +89,7 @@ index.md Wrap-best-practices-and-tips.md Shipping-prebuilt-binaries-as-wraps.md Release-notes.md + Release-notes-for-1.5.0.md Release-notes-for-1.4.0.md Release-notes-for-1.3.0.md Release-notes-for-1.2.0.md diff --git a/docs/yaml/functions/find_program.yaml b/docs/yaml/functions/find_program.yaml index 4a17e8637364..1899941ab0e8 100644 --- a/docs/yaml/functions/find_program.yaml +++ b/docs/yaml/functions/find_program.yaml @@ -102,13 +102,20 @@ kwargs: since: 0.52.0 description: | Specifies the required version, see - [[dependency]] for argument format. The version of the program + [[dependency]] for argument format. By default, the version of the program is determined by running `program_name --version` command. If stdout is empty it fallbacks to stderr. If the output contains more text than simply a version number, only the first occurrence of numbers separated by dots is kept. If the output is more complicated than that, the version checking will have to be done manually using [[run_command]]. + version_argument: + type: str + since: 1.5.0 + description: | + Specifies the argument to pass when trying to find the version of the program. + If this is unspecified, `program_name --version` will be used. + dirs: type: list[str] since: 0.53.0 diff --git a/docs/yaml/objects/cfg_data.yaml b/docs/yaml/objects/cfg_data.yaml index 069cadbf6eb1..03abb170970e 100644 --- a/docs/yaml/objects/cfg_data.yaml +++ b/docs/yaml/objects/cfg_data.yaml @@ -1,14 +1,10 @@ name: cfg_data long_name: Configuration data object description: | - This object encapsulates configuration values to be used for generating - configuration files. A more in-depth description can be found in the - [the configuration page](Configuration.md). - - This object becomes immutable after first use. This means that - calling set() or merge_from() will cause an error if this object has - already been used in any function arguments. However, assignment creates a - mutable copy. + This object encapsulates + configuration values to be used for generating configuration files. A + more in-depth description can be found in the [the configuration wiki + page](Configuration.md). methods: - name: set diff --git a/docs/yaml/objects/env.yaml b/docs/yaml/objects/env.yaml index 3b2e2a851b24..714da4fe422a 100644 --- a/docs/yaml/objects/env.yaml +++ b/docs/yaml/objects/env.yaml @@ -9,11 +9,6 @@ description: | on the same `varname`. Earlier Meson versions would warn and only the last operation took effect. - *Since 1.5.0* This object becomes immutable after first use. This means that - calling append(), prepend() or set() will cause a deprecation warning if this - object has already been used in any function arguments. However, assignment - creates a mutable copy. - example: | ```meson env = environment() @@ -23,14 +18,6 @@ example: | env.append('MY_PATH', '2') env.append('MY_PATH', '3') env.prepend('MY_PATH', '0') - - # Deprecated since 1.5.0 - run_command('script.py', env: env) - env.append('MY_PATH', '4') - - # Allowed and only env2 is modified - env2 = env - env2.append('MY_PATH', '4') ``` methods: diff --git a/man/meson.1 b/man/meson.1 index 0221faad13df..5c929bc1adf9 100644 --- a/man/meson.1 +++ b/man/meson.1 @@ -1,4 +1,4 @@ -.TH MESON "1" "March 2024" "meson 1.4.0" "User Commands" +.TH MESON "1" "September 2024" "meson 1.5.2" "User Commands" .SH NAME meson - a high productivity build system .SH DESCRIPTION @@ -83,6 +83,93 @@ To set values, use the \-D command line argument like this. .B meson configure \-Dopt1=value1 \-Dopt2=value2 +.SH The dist command + +.B meson dist +generates a release archive. + +.B meson dist [ +.I options +.B ] + +.SS "options:" +.TP +\fB\-h, \-\-help\fR +show this help message and exit + +.TP +\fB\-C WD\fR +directory to cd into before running + +.TP +\fB\-\-allow-dirty\fR +Allow even when repository contains uncommitted changes. + +.TP +\fB\-\-formats FORMATS\fR +Comma separated list of archive types to create. Supports xztar +(default), gztar, and zip. + +.TP +\fB\-\-include\-subprojects\fR +Include source code of subprojects that have been used for the build. + +.TP +\fB\-\-no\-tests\fR +Do not build and test generated packages. + +.SH The install command + +.B meson install +installs the project. + +.B meson install [ +.I options +.B ] + +.SS "options:" + +.TP +\fB\-h, \-\-help\fR +show this help message and exit + +.TP +\fB\-C WD\fR +directory to cd into before running + +.TP +\fB\-\-no-rebuild\fR +Do not rebuild before installing. + +.TP +\fB\-\-only\-changed\fR +Only overwrite files that are older than the copied file. + +.TP +\fB\-\-quiet\fR +Do not print every file that was installed. + +.TP +\fB\-\-destdir DESTDIR\fR +Sets or overrides DESTDIR environment. (Since 0.57.0) + +.TP +\fB\-\-dry\-run, \-n\fR +Doesn't actually install, but print logs. (Since 0.57.0) + +.TP +\fB\-\-skip\-subprojects [SKIP_SUBPROJECTS]\fR +Do not install files from given subprojects. (Since 0.58.0) + +.TP +\fB\-\-tags TAGS\fR +Install only targets having one of the given tags. (Since 0.60.0) + +.TP +\fB\-\-strip\fR +Strip targets even if strip option was not set during +configure. (Since 0.62.0) + .SH The introspect command Meson introspect is a command designed to make it simple to integrate with @@ -113,6 +200,68 @@ print all unit tests \fB\-\-help\fR print command line help +.SH The init command + +.B meson init +creates a new project + +.B meson init [ +.I options +.B ] [ +.I sourcefile... +.B ] + +.SS "positional arguments:" +.TP +sourcefile... +source files. default: all recognized files in current directory + +.SS "options:" +.TP +\fB\-h, \-\-help\fR +show this help message and exit + +.TP +\fB\-C WD\fR +directory to cd into before running + +.TP +\fB\-n NAME, \-\-name NAME\fR +project name. default: name of current directory + +.TP +\fB\-e EXECUTABLE, \-\-executable EXECUTABLE\fR +executable name. default: project name + +.TP +\fB\-d DEPS, \-\-deps DEPS\fR +dependencies, comma-separated + +.TP +\fB\-l {c,cpp,cs,cuda,d,fortran,java,objc,objcpp,rust,vala}, \ +\-\-language {c,cpp,cs,cuda,d,fortran,java,objc,objcpp,rust,vala}\fR +project language. default: autodetected based on source files + +.TP +\fB\-b, \-\-build +build after generation + +.TP +\fB\-\-builddir BUILDDIR\fR +directory for build + +.TP +\fB\-f, \-\-force\fR +force overwrite of existing files and directories. + +.TP +\fB\-\-type {executable,library}\fR +project type. default: executable based project + +.TP +\fB\-\-version VERSION\fR +project version. default: 0.1 + .SH The test command .B meson test @@ -214,6 +363,298 @@ show available versions of the specified project \fBstatus\fR show installed and available versions of currently used subprojects +.SH The subprojects command + +.B meson subprojects +is used to manage subprojects. + +.B meson subprojects [ +.I options +.B ] [ +.I command +.B ] + +.SS "options:" +.TP +\fB\-h, \-\-help\fR +show this help message and exit + +.SS "commands:" +.TP +\fBupdate\fR +Update all subprojects from wrap files + +.TP +\fBcheckout\fR +Checkout a branch (git only) + +.TP +\fBdownload\fR +Ensure subprojects are fetched, even if not in use. Already downloaded +subprojects are not modified. This can be used to pre-fetch all +subprojects and avoid downloads during configure. + +.TP +\fBforeach\fR +Execute a command in each subproject directory. + +.TP +\fBpurge\fR +Remove all wrap-based subproject artifacts + +.TP +\fBpackagefiles\fR +Manage the packagefiles overlay + +.SH The rewrite command + +.B meson rewrite +modifies the project definition. + +.B meson rewrite [ +.I options +.B ] [ +.I command +.B ] + +.SS "options:" + +.TP +\fB\-h, \-\-help\fR +show this help message and exit + +.TP +\fB\-s SRCDIR, \-\-sourcedir SRCDIR\fR +Path to source directory. + +.TP +\fB\-V, \-\-verbose\fR +Enable verbose output + +.TP +\fB\-S, \-\-skip\-errors\fR +Skip errors instead of aborting + +.SS "commands:" + +.TP +\fBtarget (tgt)\fR +Modify a target + +.TP +\fBkwargs\fR +Modify keyword arguments + +.TP +\fBdefault-options (def)\fR +Modify the project default options + +.TP +\fBcommand (cmd)\fR +Execute a JSON array of commands + +.SH The compile command + +.B meson compile +builds the project. + +.B meson compile [ +.I options +.B ] [ +.I TARGET... +.B ] + +.SS "positional arguments:" +.TP +\fBTARGET\fR +Targets to build. Target has the following format: +[PATH_TO_TARGET/]TARGET_NAME.TARGET_SUFFIX[:TARGET_TYPE]. + +.SS "options:" + +.TP +\fB\-h, \-\-help\fR +show this help message and exit + +.TP +\fB\-\-clean\fR +Clean the build directory. + +.TP +\fB\-C WD\fR +directory to cd into before running + +.TP +\fB\-j JOBS, \-\-jobs JOBS\fR +The number of worker jobs to run (if supported). If the value is less +than 1 the build program will guess. + +.TP +\fB\-l LOAD_AVERAGE, \-\-load-average LOAD_AVERAGE\fR +The system load average to try to maintain (if supported). + +.TP +\fB\-v, \-\-verbose\fR +Show more verbose output. + +.TP +\fB\-\-ninja\-args NINJA_ARGS\fR +Arguments to pass to `ninja` (applied only on `ninja` backend). + +.TP +\fB\-\-vs\-args VS_ARGS\fR +Arguments to pass to `msbuild` (applied only on `vs` backend). + +.TP +\fB\-\-xcode\-args XCODE_ARGS\fR +Arguments to pass to `xcodebuild` (applied only on `xcode` backend). + +.SH The devenv command + +.B meson devenv +runs commands in the developer environment. + +.B meson devenv [ +.I options +.B ] [ +.I command +.B ] + +.SS "positional arguments:" + +.TP +\fBcommand\fR +Command to run in developer environment (default: interactive shell) + +.SS "options:" + +.TP +\fB\-h, \-\-help\fR +show this help message and exit + +.TP +\fB\-C BUILDDIR\fR +Path to build directory + +.TP +\fB\-\-workdir WORKDIR, \-w WORKDIR\fR +Directory to cd into before running (default: builddir, Since 1.0.0) + +.TP +\fB\-\-dump [DUMP]\fR +Only print required environment (Since 0.62.0) Takes an optional file +path (Since 1.1.0) + +.TP +\fB\-\-dump-format {sh,export,vscode}\fR +Format used with --dump (Since 1.1.0) + +.SH The env2mfile command + +.B meson env2mfile +converts the current environment to a cross or native file. + +.B meson env2mfile [ +.I options +.B ] + +.SS "options:" + +.TP +\fB\-h, \-\-help\fR +show this help message and exit + +.TP +\fB\-\-debarch DEBARCH\fR +The dpkg architecture to generate. + +.TP +\fB\-\-gccsuffix GCCSUFFIX\fR +A particular gcc version suffix if necessary. + +.TP +\fB\-o OUTFILE\fR +The output file. + +.TP +\fB\-\-cross\fR +Generate a cross compilation file. + +.TP +\fB\-\-native\fR +Generate a native compilation file. + +.TP +\fB\-\-system SYSTEM\fR +Define system for cross compilation. + +.TP +\fB\-\-subsystem SUBSYSTEM\fR +Define subsystem for cross compilation. + +.TP +\fB\-\-kernel KERNEL\fR +Define kernel for cross compilation. + +.TP +\fB\-\-cpu CPU\fR +Define cpu for cross compilation. + +.TP +\fB\-\-cpu-family CPU_FAMILY\fR +Define cpu family for cross compilation. + +.TP +\fB\-\-endian {big,little}\fR +Define endianness for cross compilation. + +.SH The format command + +.B meson format +formats a meson source file. + +.B meson format [ +.I options +.B ] [ +.I sources... +.B ] + +.SS "positional arguments:" + +.TP +\fBsources...\fR +meson source files + +.SS "options:" + +.TP +\fB-h, --help\fR +show this help message and exit + +.TP +\fB-q, --check-only\fR +exit with 1 if files would be modified by meson format + +.TP +\fB-i, --inplace\fR +format files in-place + +.TP +\fB-r, --recursive\fR +recurse subdirs (requires --check-only or --inplace option) + +.TP +\fB-c meson.format, --configuration meson.format\fR +read configuration from meson.format + +.TP +\fB-e, --editor-config\fR +try to read configuration from .editorconfig + +.TP +\fB-o OUTPUT, --output OUTPUT\fR +output file (implies having exactly one input) + .SH EXIT STATUS .TP diff --git a/mesonbuild/_typing.py b/mesonbuild/_typing.py index 05ff2b3da948..8336c46bd06d 100644 --- a/mesonbuild/_typing.py +++ b/mesonbuild/_typing.py @@ -1,6 +1,6 @@ # SPDX-License-Identifier: Apache-2.0 # Copyright 2020 The Meson development team -# Copyright © 2020-2023 Intel Corporation +# Copyright © 2020-2024 Intel Corporation """Meson specific typing helpers. diff --git a/mesonbuild/ast/introspection.py b/mesonbuild/ast/introspection.py index 5bf7e051a168..11975109d480 100644 --- a/mesonbuild/ast/introspection.py +++ b/mesonbuild/ast/introspection.py @@ -10,7 +10,7 @@ import os import typing as T -from .. import compilers, environment, mesonlib, optinterpreter +from .. import compilers, environment, mesonlib, optinterpreter, options from .. import coredata as cdata from ..build import Executable, Jar, SharedLibrary, SharedModule, StaticLibrary from ..compilers import detect_compiler_for @@ -145,13 +145,13 @@ def do_subproject(self, dirname: SubProject) -> None: subi.project_data['name'] = dirname self.project_data['subprojects'] += [subi.project_data] except (mesonlib.MesonException, RuntimeError): - return + pass def func_add_languages(self, node: BaseNode, args: T.List[TYPE_var], kwargs: T.Dict[str, TYPE_var]) -> None: kwargs = self.flatten_kwargs(kwargs) required = kwargs.get('required', True) - assert isinstance(required, (bool, cdata.UserFeatureOption)), 'for mypy' - if isinstance(required, cdata.UserFeatureOption): + assert isinstance(required, (bool, options.UserFeatureOption)), 'for mypy' + if isinstance(required, options.UserFeatureOption): required = required.is_enabled() if 'native' in kwargs: native = kwargs.get('native', False) @@ -182,7 +182,7 @@ def _add_languages(self, raw_langs: T.List[TYPE_var], required: bool, for_machin if self.subproject: options = {} for k in comp.get_options(): - v = copy.copy(self.coredata.options[k]) + v = copy.copy(self.coredata.optstore.get_value_object(k)) k = k.evolve(subproject=self.subproject) options[k] = v self.coredata.add_compiler_options(options, lang, for_machine, self.environment, self.subproject) diff --git a/mesonbuild/backend/backends.py b/mesonbuild/backend/backends.py index 740f349e4433..4c7faa5aa380 100644 --- a/mesonbuild/backend/backends.py +++ b/mesonbuild/backend/backends.py @@ -568,7 +568,8 @@ def get_executable_serialisation( else: extra_paths = [] - if self.environment.need_exe_wrapper(exe_for_machine): + is_cross_built = not self.environment.machines.matches_build_machine(exe_for_machine) + if is_cross_built and self.environment.need_exe_wrapper(): if not self.environment.has_exe_wrapper(): msg = 'An exe_wrapper is needed but was not found. Please define one ' \ 'in cross file and check the command and/or add it to PATH.' @@ -627,7 +628,7 @@ def as_meson_exe_cmdline(self, exe: T.Union[str, mesonlib.File, build.BuildTarge # It's also overridden for a few conditions that can't be handled # inside a command line - can_use_env = not force_serialize + can_use_env = env.can_use_env and not force_serialize force_serialize = force_serialize or bool(reasons) if capture: @@ -962,7 +963,7 @@ def create_msvc_pch_implementation(self, target: build.BuildTarget, lang: str, p def target_uses_pch(self, target: build.BuildTarget) -> bool: try: return T.cast('bool', target.get_option(OptionKey('b_pch'))) - except KeyError: + except (KeyError, AttributeError): return False @staticmethod @@ -1169,7 +1170,7 @@ def extract_dll_paths(cls, target: build.BuildTarget) -> T.Set[str]: def determine_windows_extra_paths( self, target: T.Union[build.BuildTarget, build.CustomTarget, build.CustomTargetIndex, programs.ExternalProgram, mesonlib.File, str], - extra_bdeps: T.Sequence[T.Union[build.BuildTarget, build.CustomTarget]]) -> T.List[str]: + extra_bdeps: T.Sequence[T.Union[build.BuildTarget, build.CustomTarget, build.CustomTargetIndex]]) -> T.List[str]: """On Windows there is no such thing as an rpath. We must determine all locations of DLLs that this exe @@ -1229,7 +1230,7 @@ def create_test_serialisation(self, tests: T.List['Test']) -> T.List[TestSeriali exe_wrapper = self.environment.get_exe_wrapper() machine = self.environment.machines[exe.for_machine] if machine.is_windows() or machine.is_cygwin(): - extra_bdeps: T.List[T.Union[build.BuildTarget, build.CustomTarget]] = [] + extra_bdeps: T.List[T.Union[build.BuildTarget, build.CustomTarget, build.CustomTargetIndex]] = [] if isinstance(exe, build.CustomTarget): extra_bdeps = list(exe.get_transitive_build_target_deps()) extra_paths = self.determine_windows_extra_paths(exe, extra_bdeps) @@ -1263,12 +1264,16 @@ def create_test_serialisation(self, tests: T.List['Test']) -> T.List[TestSeriali t_env = copy.deepcopy(t.env) if not machine.is_windows() and not machine.is_cygwin() and not machine.is_darwin(): - ld_lib_path: T.Set[str] = set() + ld_lib_path_libs: T.Set[build.SharedLibrary] = set() for d in depends: if isinstance(d, build.BuildTarget): for l in d.get_all_link_deps(): if isinstance(l, build.SharedLibrary): - ld_lib_path.add(os.path.join(self.environment.get_build_dir(), l.get_subdir())) + ld_lib_path_libs.add(l) + + env_build_dir = self.environment.get_build_dir() + ld_lib_path: T.Set[str] = set(os.path.join(env_build_dir, l.get_subdir()) for l in ld_lib_path_libs) + if ld_lib_path: t_env.prepend('LD_LIBRARY_PATH', list(ld_lib_path), ':') @@ -1422,7 +1427,7 @@ def get_testlike_targets(self, benchmark: bool = False) -> T.OrderedDict[str, T. continue result[arg.get_id()] = arg for dep in t.depends: - assert isinstance(dep, (build.CustomTarget, build.BuildTarget)) + assert isinstance(dep, (build.CustomTarget, build.BuildTarget, build.CustomTargetIndex)) result[dep.get_id()] = dep return result @@ -1574,9 +1579,9 @@ def eval_custom_target_command( dfilename = os.path.join(outdir, target.depfile) i = i.replace('@DEPFILE@', dfilename) if '@PRIVATE_DIR@' in i: - if target.absolute_paths: - pdir = self.get_target_private_dir_abs(target) - else: + pdir = self.get_target_private_dir_abs(target) + os.makedirs(pdir, exist_ok=True) + if not target.absolute_paths: pdir = self.get_target_private_dir(target) i = i.replace('@PRIVATE_DIR@', pdir) else: diff --git a/mesonbuild/backend/ninjabackend.py b/mesonbuild/backend/ninjabackend.py index 28f5d532fca8..9c6932884e4a 100644 --- a/mesonbuild/backend/ninjabackend.py +++ b/mesonbuild/backend/ninjabackend.py @@ -98,10 +98,11 @@ def get_rsp_threshold() -> int: # and that has a limit of 8k. limit = 8192 else: - # On Linux, ninja always passes the commandline as a single - # big string to /bin/sh, and the kernel limits the size of a - # single argument; see MAX_ARG_STRLEN - limit = 131072 + # Unix-like OSes usualy have very large command line limits, (On Linux, + # for example, this is limited by the kernel's MAX_ARG_STRLEN). However, + # some programs place much lower limits, notably Wine which enforces a + # 32k limit like Windows. Therefore, we limit the command line to 32k. + limit = 32768 # Be conservative limit = limit // 2 return int(os.environ.get('MESON_RSP_THRESHOLD', limit)) @@ -291,7 +292,7 @@ def length_estimate(self, infiles: str, outfiles: str, estimate = len(command) for m in re.finditer(r'(\${\w+}|\$\w+)?[^$]*', command): if m.start(1) != -1: - estimate -= m.end(1) - m.start(1) + 1 + estimate -= m.end(1) - m.start(1) chunk = m.group(1) if chunk[1] == '{': chunk = chunk[2:-1] @@ -502,13 +503,12 @@ def __init__(self, build: T.Optional[build.Build], interpreter: T.Optional[Inter # hence we disable them if 'cuda' is enabled globally. See also # - https://github.com/mesonbuild/meson/pull/9453 # - https://github.com/mesonbuild/meson/issues/9479#issuecomment-953485040 - self._allow_thin_archives = PerMachine[bool]( - 'cuda' not in self.environment.coredata.compilers.build, - 'cuda' not in self.environment.coredata.compilers.host) if self.environment else PerMachine[bool](True, True) - if not self._allow_thin_archives.build: - mlog.debug('cuda enabled globally, disabling thin archives for build machine, since nvcc/nvlink cannot handle thin archives natively') - if not self._allow_thin_archives.host: - mlog.debug('cuda enabled globally, disabling thin archives for host machine, since nvcc/nvlink cannot handle thin archives natively') + self.allow_thin_archives = PerMachine[bool](True, True) + if self.environment: + for for_machine in MachineChoice: + if 'cuda' in self.environment.coredata.compilers[for_machine]: + mlog.debug('cuda enabled globally, disabling thin archives for {}, since nvcc/nvlink cannot handle thin archives natively'.format(for_machine)) + self.allow_thin_archives[for_machine] = False def create_phony_target(self, dummy_outfile: str, rulename: str, phony_infilename: str) -> NinjaBuildElement: ''' @@ -624,7 +624,7 @@ def generate(self, capture: bool = False, vslite_ctx: T.Optional[T.Dict] = None) outfile.write('# Do not edit by hand.\n\n') outfile.write('ninja_required_version = 1.8.2\n\n') - num_pools = self.environment.coredata.options[OptionKey('backend_max_links')].value + num_pools = self.environment.coredata.optstore.get_value('backend_max_links') if num_pools > 0: outfile.write(f'''pool link_pool depth = {num_pools} @@ -657,8 +657,8 @@ def generate(self, capture: bool = False, vslite_ctx: T.Optional[T.Dict] = None) self.generate_dist() mlog.log_timestamp("Dist generated") key = OptionKey('b_coverage') - if (key in self.environment.coredata.options and - self.environment.coredata.options[key].value): + if (key in self.environment.coredata.optstore and + self.environment.coredata.optstore.get_value(key)): gcovr_exe, gcovr_version, lcov_exe, lcov_version, genhtml_exe, llvm_cov_exe = environment.find_coverage_tools(self.environment.coredata) mlog.debug(f'Using {gcovr_exe} ({gcovr_version}), {lcov_exe} and {llvm_cov_exe} for code coverage') if gcovr_exe or (lcov_exe and genhtml_exe): @@ -700,10 +700,11 @@ def generate_rust_project_json(self) -> None: return with open(os.path.join(self.environment.get_build_dir(), 'rust-project.json'), 'w', encoding='utf-8') as f: + sysroot = self.environment.coredata.compilers.host['rust'].get_sysroot() json.dump( { - "sysroot_src": os.path.join(self.environment.coredata.compilers.host['rust'].get_sysroot(), - 'lib/rustlib/src/rust/library/'), + "sysroot": sysroot, + "sysroot_src": os.path.join(sysroot, 'lib/rustlib/src/rust/library/'), "crates": [c.to_json() for c in self.rust_crates.values()], }, f, indent=4) @@ -865,11 +866,8 @@ def create_target_linker_introspection(self, target: build.Target, linker: T.Uni tgt[lnk_hash] = lnk_block def generate_target(self, target): - try: - if isinstance(target, build.BuildTarget): - os.makedirs(self.get_target_private_dir_abs(target)) - except FileExistsError: - pass + if isinstance(target, build.BuildTarget): + os.makedirs(self.get_target_private_dir_abs(target), exist_ok=True) if isinstance(target, build.CustomTarget): self.generate_custom_target(target) if isinstance(target, build.RunTarget): @@ -1413,7 +1411,6 @@ def generate_jar_target(self, target: build.Jar): outname_rel = os.path.join(self.get_target_dir(target), fname) src_list = target.get_sources() resources = target.get_java_resources() - class_list = [] compiler = target.compilers['java'] c = 'c' m = 'm' @@ -1431,10 +1428,8 @@ def generate_jar_target(self, target: build.Jar): if rel_src.endswith('.java'): gen_src_list.append(raw_src) - compile_args = self.determine_single_java_compile_args(target, compiler) - for src in src_list + gen_src_list: - plain_class_path = self.generate_single_java_compile(src, target, compiler, compile_args) - class_list.append(plain_class_path) + compile_args = self.determine_java_compile_args(target, compiler) + class_list = self.generate_java_compile(src_list + gen_src_list, target, compiler, compile_args) class_dep_list = [os.path.join(self.get_target_private_dir(target), i) for i in class_list] manifest_path = os.path.join(self.get_target_private_dir(target), 'META-INF', 'MANIFEST.MF') manifest_fullpath = os.path.join(self.environment.get_build_dir(), manifest_path) @@ -1531,7 +1526,8 @@ def generate_cs_target(self, target: build.BuildTarget): self.generate_generator_list_rules(target) self.create_target_source_introspection(target, compiler, commands, rel_srcs, generated_rel_srcs) - def determine_single_java_compile_args(self, target, compiler): + def determine_java_compile_args(self, target, compiler): + args = [] args = self.generate_basic_compiler_args(target, compiler) args += target.get_java_args() args += compiler.get_output_args(self.get_target_private_dir(target)) @@ -1545,20 +1541,30 @@ def determine_single_java_compile_args(self, target, compiler): args += ['-sourcepath', sourcepath] return args - def generate_single_java_compile(self, src, target, compiler, args): + def generate_java_compile(self, srcs, target, compiler, args): deps = [os.path.join(self.get_target_dir(l), l.get_filename()) for l in target.link_targets] generated_sources = self.get_target_generated_sources(target) for rel_src in generated_sources.keys(): if rel_src.endswith('.java'): deps.append(rel_src) - rel_src = src.rel_to_builddir(self.build_to_src) - plain_class_path = src.fname[:-4] + 'class' - rel_obj = os.path.join(self.get_target_private_dir(target), plain_class_path) - element = NinjaBuildElement(self.all_outputs, rel_obj, self.compiler_to_rule_name(compiler), rel_src) + + rel_srcs = [] + plain_class_paths = [] + rel_objs = [] + for src in srcs: + rel_src = src.rel_to_builddir(self.build_to_src) + rel_srcs.append(rel_src) + + plain_class_path = src.fname[:-4] + 'class' + plain_class_paths.append(plain_class_path) + rel_obj = os.path.join(self.get_target_private_dir(target), plain_class_path) + rel_objs.append(rel_obj) + element = NinjaBuildElement(self.all_outputs, rel_objs, self.compiler_to_rule_name(compiler), rel_srcs) element.add_dep(deps) element.add_item('ARGS', args) + element.add_item('FOR_JAR', self.get_target_filename(target)) self.add_build(element) - return plain_class_path + return plain_class_paths def generate_java_link(self): rule = 'java_LINKER' @@ -1984,7 +1990,7 @@ def generate_rust_target(self, target: build.BuildTarget) -> None: buildtype = target.get_option(OptionKey('buildtype')) crt = target.get_option(OptionKey('b_vscrt')) args += rustc.get_crt_link_args(crt, buildtype) - except KeyError: + except (KeyError, AttributeError): pass if mesonlib.version_compare(rustc.version, '>= 1.67.0'): @@ -2288,7 +2294,7 @@ def _rsp_options(self, tool: T.Union['Compiler', 'StaticLinker', 'DynamicLinker' return options def generate_static_link_rules(self): - num_pools = self.environment.coredata.options[OptionKey('backend_max_links')].value + num_pools = self.environment.coredata.optstore.get_value('backend_max_links') if 'java' in self.environment.coredata.compilers.host: self.generate_java_link() for for_machine in MachineChoice: @@ -2336,7 +2342,7 @@ def generate_static_link_rules(self): self.add_rule(NinjaRule(rule, cmdlist, args, description, **options, extra=pool)) def generate_dynamic_link_rules(self): - num_pools = self.environment.coredata.options[OptionKey('backend_max_links')].value + num_pools = self.environment.coredata.optstore.get_value('backend_max_links') for for_machine in MachineChoice: complist = self.environment.coredata.compilers[for_machine] for langname, compiler in complist.items(): @@ -2377,7 +2383,7 @@ def generate_dynamic_link_rules(self): def generate_java_compile_rule(self, compiler): rule = self.compiler_to_rule_name(compiler) command = compiler.get_exelist() + ['$ARGS', '$in'] - description = 'Compiling Java object $in' + description = 'Compiling Java sources for $FOR_JAR' self.add_rule(NinjaRule(rule, command, [], description)) def generate_cs_compile_rule(self, compiler: 'CsCompiler') -> None: @@ -3268,7 +3274,7 @@ def get_target_type_link_args(self, target, linker): if target.import_filename: commands += linker.gen_import_library_args(self.get_import_filename(target)) elif isinstance(target, build.StaticLibrary): - produce_thin_archive = self._allow_thin_archives[target.for_machine] and not target.should_install() + produce_thin_archive = self.allow_thin_archives[target.for_machine] and not target.should_install() commands += linker.get_std_link_args(self.environment, produce_thin_archive) else: raise RuntimeError('Unknown build target type.') @@ -3598,7 +3604,7 @@ def generate_gcov_clean(self) -> None: def get_user_option_args(self): cmds = [] - for (k, v) in self.environment.coredata.options.items(): + for k, v in self.environment.coredata.optstore.items(): if k.is_project(): cmds.append('-D' + str(k) + '=' + (v.value if isinstance(v.value, str) else str(v.value).lower())) # The order of these arguments must be the same between runs of Meson @@ -3727,8 +3733,8 @@ def generate_ending(self) -> None: if ctlist: elem.add_dep(self.generate_custom_target_clean(ctlist)) - if OptionKey('b_coverage') in self.environment.coredata.options and \ - self.environment.coredata.options[OptionKey('b_coverage')].value: + if OptionKey('b_coverage') in self.environment.coredata.optstore and \ + self.environment.coredata.optstore.get_value('b_coverage'): self.generate_gcov_clean() elem.add_dep('clean-gcda') elem.add_dep('clean-gcno') diff --git a/mesonbuild/backend/vs2010backend.py b/mesonbuild/backend/vs2010backend.py index 91275c77e7b9..496e8ffed404 100644 --- a/mesonbuild/backend/vs2010backend.py +++ b/mesonbuild/backend/vs2010backend.py @@ -532,7 +532,7 @@ def generate_solution(self, sln_filename: str, projlist: T.List[Project]) -> Non replace_if_different(sln_filename, sln_filename_tmp) def generate_projects(self, vslite_ctx: dict = None) -> T.List[Project]: - startup_project = self.environment.coredata.options[OptionKey('backend_startup_project')].value + startup_project = self.environment.coredata.optstore.get_value('backend_startup_project') projlist: T.List[Project] = [] startup_idx = 0 for (i, (name, target)) in enumerate(self.build.targets.items()): @@ -627,6 +627,8 @@ def create_basic_project(self, target_name, *, target_platform = self.platform multi_config_buildtype_list = coredata.get_genvs_default_buildtype_list() if self.gen_lite else [self.buildtype] + if "debug" not in multi_config_buildtype_list: + multi_config_buildtype_list.append('debug') for buildtype in multi_config_buildtype_list: prjconf = ET.SubElement(confitems, 'ProjectConfiguration', {'Include': buildtype + '|' + target_platform}) diff --git a/mesonbuild/backend/xcodebackend.py b/mesonbuild/backend/xcodebackend.py index c801b5aae260..69a544b240a0 100644 --- a/mesonbuild/backend/xcodebackend.py +++ b/mesonbuild/backend/xcodebackend.py @@ -3,7 +3,7 @@ from __future__ import annotations -import functools, uuid, os, operator +import functools, uuid, os, operator, re import typing as T from . import backends @@ -58,6 +58,35 @@ } BOOL2XCODEBOOL = {True: 'YES', False: 'NO'} LINKABLE_EXTENSIONS = {'.o', '.a', '.obj', '.so', '.dylib'} +XCODEVERSIONS = {'1500': ('Xcode 15.0', 60), + '1400': ('Xcode 14.0', 56), + '1300': ('Xcode 13.0', 55), + '1200': ('Xcode 12.0', 54), + '1140': ('Xcode 11.4', 53), + '1100': ('Xcode 11.0', 52), + '1000': ('Xcode 10.0', 51), + '930': ('Xcode 9.3', 50), + '800': ('Xcode 8.0', 48), + '630': ('Xcode 6.3', 47), + '320': ('Xcode 3.2', 46), + '310': ('Xcode 3.1', 45) + } + +def autodetect_xcode_version() -> T.Tuple[str, int]: + try: + pc, stdout, stderr = mesonlib.Popen_safe(['xcodebuild', '-version']) + except FileNotFoundError: + raise MesonException('Could not detect Xcode. Please install it if you wish to use the Xcode backend.') + if pc.returncode != 0: + raise MesonException(f'An error occurred while detecting Xcode: {stderr}') + version = int(''.join(re.search(r'\d*\.\d*\.*\d*', stdout).group(0).split('.'))) + # If the version number does not have two decimal points, pretend it does. + if stdout.count('.') < 2: + version *= 10 + for v, r in XCODEVERSIONS.items(): + if int(v) <= version: + return r + raise MesonException('Your Xcode installation is too old and is not supported.') class FileTreeEntry: @@ -203,6 +232,7 @@ def __init__(self, build: T.Optional[build.Build], interpreter: T.Optional[Inter self.arch = self.build.environment.machines.host.cpu if self.arch == 'aarch64': self.arch = 'arm64' + self.xcodeversion, self.objversion = autodetect_xcode_version() # In Xcode files are not accessed via their file names, but rather every one of them # gets an unique id. More precisely they get one unique id per target they are used # in. If you generate only one id per file and use them, compilation will work but the @@ -266,7 +296,8 @@ def generate(self, capture: bool = False, vslite_ctx: T.Optional[T.Dict] = None) self.build_targets = self.build.get_build_targets() self.custom_targets = self.build.get_custom_targets() self.generate_filemap() - self.generate_buildstylemap() + if self.objversion < 50: + self.generate_buildstylemap() self.generate_build_phase_map() self.generate_build_configuration_map() self.generate_build_configurationlist_map() @@ -298,9 +329,10 @@ def generate(self, capture: bool = False, vslite_ctx: T.Optional[T.Dict] = None) self.generate_pbx_build_rule(objects_dict) objects_dict.add_comment(PbxComment('End PBXBuildRule section')) objects_dict.add_comment(PbxComment('Begin PBXBuildStyle section')) - self.generate_pbx_build_style(objects_dict) - objects_dict.add_comment(PbxComment('End PBXBuildStyle section')) - objects_dict.add_comment(PbxComment('Begin PBXContainerItemProxy section')) + if self.objversion < 50: + self.generate_pbx_build_style(objects_dict) + objects_dict.add_comment(PbxComment('End PBXBuildStyle section')) + objects_dict.add_comment(PbxComment('Begin PBXContainerItemProxy section')) self.generate_pbx_container_item_proxy(objects_dict) objects_dict.add_comment(PbxComment('End PBXContainerItemProxy section')) objects_dict.add_comment(PbxComment('Begin PBXFileReference section')) @@ -728,8 +760,8 @@ def create_generator_shellphase(self, objects_dict, tname, generator_id) -> None odict.add_item('isa', 'PBXBuildFile') odict.add_item('fileRef', ref_id) + # This is skipped if Xcode 9 or above is installed, as PBXBuildStyle was removed on that version. def generate_pbx_build_style(self, objects_dict: PbxDict) -> None: - # FIXME: Xcode 9 and later does not uses PBXBuildStyle and it gets removed. Maybe we can remove this part. for name, idval in self.buildstylemap.items(): styledict = PbxDict() objects_dict.add_item(idval, styledict, name) @@ -1237,11 +1269,12 @@ def generate_pbx_project(self, objects_dict: PbxDict) -> None: attr_dict.add_item('BuildIndependentTargetsInParallel', 'YES') project_dict.add_item('buildConfigurationList', self.project_conflist, f'Build configuration list for PBXProject "{self.build.project_name}"') project_dict.add_item('buildSettings', PbxDict()) - style_arr = PbxArray() - project_dict.add_item('buildStyles', style_arr) - for name, idval in self.buildstylemap.items(): - style_arr.add_item(idval, name) - project_dict.add_item('compatibilityVersion', '"Xcode 3.2"') + if self.objversion < 50: + style_arr = PbxArray() + project_dict.add_item('buildStyles', style_arr) + for name, idval in self.buildstylemap.items(): + style_arr.add_item(idval, name) + project_dict.add_item('compatibilityVersion', f'"{self.xcodeversion}"') project_dict.add_item('hasScannedForEncodings', 0) project_dict.add_item('mainGroup', self.maingroup_id) project_dict.add_item('projectDirPath', '"' + self.environment.get_source_dir() + '"') @@ -1840,7 +1873,7 @@ def generate_xc_configurationList(self, objects_dict: PbxDict) -> None: def generate_prefix(self, pbxdict: PbxDict) -> PbxDict: pbxdict.add_item('archiveVersion', '1') pbxdict.add_item('classes', PbxDict()) - pbxdict.add_item('objectVersion', '46') + pbxdict.add_item('objectVersion', self.objversion) objects_dict = PbxDict() pbxdict.add_item('objects', objects_dict) diff --git a/mesonbuild/build.py b/mesonbuild/build.py index 743398757f72..02d2bb94df83 100644 --- a/mesonbuild/build.py +++ b/mesonbuild/build.py @@ -46,7 +46,6 @@ from .mesonlib import ExecutableSerialisation, FileMode, FileOrString from .modules import ModuleState from .mparser import BaseNode - from .wrap import WrapMode GeneratedTypes = T.Union['CustomTarget', 'CustomTargetIndex', 'GeneratedList'] LibTypes = T.Union['SharedLibrary', 'StaticLibrary', 'CustomTarget', 'CustomTargetIndex'] @@ -534,7 +533,7 @@ def __post_init__(self, overrides: T.Optional[T.Dict[OptionKey, str]]) -> None: for k, v in overrides.items()} else: ovr = {} - self.options = coredata.OptionsView(self.environment.coredata.options, self.subproject, ovr) + self.options = coredata.OptionsView(self.environment.coredata.optstore, self.subproject, ovr) # XXX: this should happen in the interpreter if has_path_sep(self.name): # Fix failing test 53 when this becomes an error. @@ -662,12 +661,10 @@ def set_option_overrides(self, option_overrides: T.Dict[OptionKey, str]) -> None def get_options(self) -> coredata.OptionsView: return self.options - def get_option(self, key: 'OptionKey') -> T.Union[str, int, bool, 'WrapMode']: - # We don't actually have wrapmode here to do an assert, so just do a - # cast, we know what's in coredata anyway. + def get_option(self, key: 'OptionKey') -> T.Union[str, int, bool]: # TODO: if it's possible to annotate get_option or validate_option_value # in the future we might be able to remove the cast here - return T.cast('T.Union[str, int, bool, WrapMode]', self.options[key].value) + return T.cast('T.Union[str, int, bool]', self.options.get_value(key)) @staticmethod def parse_overrides(kwargs: T.Dict[str, T.Any]) -> T.Dict[OptionKey, str]: @@ -1245,8 +1242,8 @@ def _extract_pic_pie(self, kwargs: T.Dict[str, T.Any], arg: str, option: str) -> k = OptionKey(option) if kwargs.get(arg) is not None: val = T.cast('bool', kwargs[arg]) - elif k in self.environment.coredata.options: - val = self.environment.coredata.options[k].value + elif k in self.environment.coredata.optstore: + val = self.environment.coredata.optstore.get_value(k) else: val = False @@ -1283,10 +1280,10 @@ def get_dependencies(self) -> OrderedSet[BuildTargetTypes]: if t not in result: result.add(t) if isinstance(t, StaticLibrary): - t.get_dependencies_recurse(result) + t.get_dependencies_recurse(result, include_proc_macros = self.uses_rust()) return result - def get_dependencies_recurse(self, result: OrderedSet[BuildTargetTypes], include_internals: bool = True) -> None: + def get_dependencies_recurse(self, result: OrderedSet[BuildTargetTypes], include_internals: bool = True, include_proc_macros: bool = False) -> None: # self is always a static library because we don't need to pull dependencies # of shared libraries. If self is installed (not internal) it already # include objects extracted from all its internal dependencies so we can @@ -1295,12 +1292,14 @@ def get_dependencies_recurse(self, result: OrderedSet[BuildTargetTypes], include for t in self.link_targets: if t in result: continue + if not include_proc_macros and t.rust_crate_type == 'proc-macro': + continue if include_internals or not t.is_internal(): result.add(t) if isinstance(t, StaticLibrary): - t.get_dependencies_recurse(result, include_internals) + t.get_dependencies_recurse(result, include_internals, include_proc_macros) for t in self.link_whole_targets: - t.get_dependencies_recurse(result, include_internals) + t.get_dependencies_recurse(result, include_internals, include_proc_macros) def get_source_subdir(self): return self.subdir @@ -1950,8 +1949,8 @@ def __init__( compilers: T.Dict[str, 'Compiler'], kwargs): key = OptionKey('b_pie') - if 'pie' not in kwargs and key in environment.coredata.options: - kwargs['pie'] = environment.coredata.options[key].value + if 'pie' not in kwargs and key in environment.coredata.optstore: + kwargs['pie'] = environment.coredata.optstore.get_value(key) super().__init__(name, subdir, subproject, for_machine, sources, structured_sources, objects, environment, compilers, kwargs) self.win_subsystem = kwargs.get('win_subsystem') or 'console' diff --git a/mesonbuild/cargo/__init__.py b/mesonbuild/cargo/__init__.py index 0007b9d6469b..10cb0be103c0 100644 --- a/mesonbuild/cargo/__init__.py +++ b/mesonbuild/cargo/__init__.py @@ -1,5 +1,6 @@ __all__ = [ - 'interpret' + 'interpret', + 'load_wraps', ] -from .interpreter import interpret +from .interpreter import interpret, load_wraps diff --git a/mesonbuild/cargo/interpreter.py b/mesonbuild/cargo/interpreter.py index e1b0928978f8..029e5a1b8006 100644 --- a/mesonbuild/cargo/interpreter.py +++ b/mesonbuild/cargo/interpreter.py @@ -18,20 +18,22 @@ import os import shutil import collections +import urllib.parse import typing as T from . import builder from . import version from ..mesonlib import MesonException, Popen_safe, OptionKey -from .. import coredata +from .. import coredata, options, mlog +from ..wrap.wrap import PackageDefinition if T.TYPE_CHECKING: from types import ModuleType + from typing import Any from . import manifest from .. import mparser from ..environment import Environment - from ..coredata import KeyedOptionDictType # tomllib is present in python 3.11, before that it is a pypi module called tomli, # we try to import tomllib, then tomli, @@ -52,13 +54,17 @@ toml2json = shutil.which('toml2json') +class TomlImplementationMissing(MesonException): + pass + + def load_toml(filename: str) -> T.Dict[object, object]: if tomllib: with open(filename, 'rb') as f: raw = tomllib.load(f) else: if toml2json is None: - raise MesonException('Could not find an implementation of tomllib, nor toml2json') + raise TomlImplementationMissing('Could not find an implementation of tomllib, nor toml2json') p, out, err = Popen_safe([toml2json, filename]) if p.returncode != 0: @@ -700,7 +706,7 @@ def _create_lib(cargo: Manifest, build: builder.Builder, crate_type: manifest.CR ] -def interpret(subp_name: str, subdir: str, env: Environment) -> T.Tuple[mparser.CodeBlockNode, KeyedOptionDictType]: +def interpret(subp_name: str, subdir: str, env: Environment) -> T.Tuple[mparser.CodeBlockNode, dict[OptionKey, options.UserOption[Any]]]: # subp_name should be in the form "foo-0.1-rs" package_name = subp_name.rsplit('-', 2)[0] manifests = _load_manifests(os.path.join(env.source_dir, subdir)) @@ -712,11 +718,11 @@ def interpret(subp_name: str, subdir: str, env: Environment) -> T.Tuple[mparser. build = builder.Builder(filename) # Generate project options - options: T.Dict[OptionKey, coredata.UserOption] = {} + project_options: T.Dict[OptionKey, options.UserOption] = {} for feature in cargo.features: key = OptionKey(_option_name(feature), subproject=subp_name) enabled = feature == 'default' - options[key] = coredata.UserBooleanOption(key.name, f'Cargo {feature} feature', enabled) + project_options[key] = options.UserBooleanOption(key.name, f'Cargo {feature} feature', enabled) ast = _create_project(cargo, build) ast += [build.assign(build.function('import', [build.string('rust')]), 'rust')] @@ -730,4 +736,53 @@ def interpret(subp_name: str, subdir: str, env: Environment) -> T.Tuple[mparser. for crate_type in cargo.lib.crate_type: ast.extend(_create_lib(cargo, build, crate_type)) - return build.block(ast), options + return build.block(ast), project_options + + +def load_wraps(source_dir: str, subproject_dir: str) -> T.List[PackageDefinition]: + """ Convert Cargo.lock into a list of wraps """ + + wraps: T.List[PackageDefinition] = [] + filename = os.path.join(source_dir, 'Cargo.lock') + if os.path.exists(filename): + try: + cargolock = T.cast('manifest.CargoLock', load_toml(filename)) + except TomlImplementationMissing as e: + mlog.warning('Failed to load Cargo.lock:', str(e), fatal=False) + return wraps + for package in cargolock['package']: + name = package['name'] + version = package['version'] + subp_name = _dependency_name(name, _version_to_api(version)) + source = package.get('source') + if source is None: + # This is project's package, or one of its workspace members. + pass + elif source == 'registry+https://github.com/rust-lang/crates.io-index': + checksum = package.get('checksum') + if checksum is None: + checksum = cargolock['metadata'][f'checksum {name} {version} ({source})'] + url = f'https://crates.io/api/v1/crates/{name}/{version}/download' + directory = f'{name}-{version}' + wraps.append(PackageDefinition.from_values(subp_name, subproject_dir, 'file', { + 'directory': directory, + 'source_url': url, + 'source_filename': f'{directory}.tar.gz', + 'source_hash': checksum, + 'method': 'cargo', + })) + elif source.startswith('git+'): + parts = urllib.parse.urlparse(source[4:]) + query = urllib.parse.parse_qs(parts.query) + branch = query['branch'][0] if 'branch' in query else '' + revision = parts.fragment or branch + url = urllib.parse.urlunparse(parts._replace(params='', query='', fragment='')) + wraps.append(PackageDefinition.from_values(subp_name, subproject_dir, 'git', { + 'directory': name, + 'url': url, + 'revision': revision, + 'method': 'cargo', + })) + else: + mlog.warning(f'Unsupported source URL in {filename}: {source}') + return wraps diff --git a/mesonbuild/cargo/manifest.py b/mesonbuild/cargo/manifest.py index e6192d03cd98..50c048991333 100644 --- a/mesonbuild/cargo/manifest.py +++ b/mesonbuild/cargo/manifest.py @@ -225,3 +225,21 @@ class VirtualManifest(TypedDict): """ workspace: Workspace + +class CargoLockPackage(TypedDict, total=False): + + """A description of a package in the Cargo.lock file format.""" + + name: str + version: str + source: str + checksum: str + + +class CargoLock(TypedDict, total=False): + + """A description of the Cargo.lock file format.""" + + version: str + package: T.List[CargoLockPackage] + metadata: T.Dict[str, str] diff --git a/mesonbuild/cmake/common.py b/mesonbuild/cmake/common.py index f117a9f6b562..ad4ec6b1a002 100644 --- a/mesonbuild/cmake/common.py +++ b/mesonbuild/cmake/common.py @@ -51,9 +51,9 @@ ] def cmake_is_debug(env: 'Environment') -> bool: - if OptionKey('b_vscrt') in env.coredata.options: + if OptionKey('b_vscrt') in env.coredata.optstore: is_debug = env.coredata.get_option(OptionKey('buildtype')) == 'debug' - if env.coredata.options[OptionKey('b_vscrt')].value in {'mdd', 'mtd'}: + if env.coredata.optstore.get_value('b_vscrt') in {'mdd', 'mtd'}: is_debug = True return is_debug else: diff --git a/mesonbuild/cmake/executor.py b/mesonbuild/cmake/executor.py index dd43cc04ac17..392063d58590 100644 --- a/mesonbuild/cmake/executor.py +++ b/mesonbuild/cmake/executor.py @@ -51,7 +51,7 @@ def __init__(self, environment: 'Environment', version: str, for_machine: Machin self.cmakebin = None return - self.prefix_paths = self.environment.coredata.options[OptionKey('cmake_prefix_path', machine=self.for_machine)].value + self.prefix_paths = self.environment.coredata.optstore.get_value(OptionKey('cmake_prefix_path', machine=self.for_machine)) if self.prefix_paths: self.extra_cmake_args += ['-DCMAKE_PREFIX_PATH={}'.format(';'.join(self.prefix_paths))] diff --git a/mesonbuild/cmake/interpreter.py b/mesonbuild/cmake/interpreter.py index 4c42dbb095f3..f67591f68b98 100644 --- a/mesonbuild/cmake/interpreter.py +++ b/mesonbuild/cmake/interpreter.py @@ -532,7 +532,7 @@ def _all_source_suffixes(self) -> 'ImmutableListProtocol[str]': @lru_cache(maxsize=None) def _all_lang_stds(self, lang: str) -> 'ImmutableListProtocol[str]': try: - res = self.env.coredata.options[OptionKey('std', machine=MachineChoice.BUILD, lang=lang)].choices + res = self.env.coredata.optstore.get_value_object(OptionKey('std', machine=MachineChoice.BUILD, lang=lang)).choices except KeyError: return [] diff --git a/mesonbuild/cmake/toolchain.py b/mesonbuild/cmake/toolchain.py index 1aad0bc3b5bc..89d5d84449e0 100644 --- a/mesonbuild/cmake/toolchain.py +++ b/mesonbuild/cmake/toolchain.py @@ -188,6 +188,8 @@ def make_abs(exe: str) -> str: defaults[prefix + 'COMPILER'] = exe_list if comp_obj.get_id() == 'clang-cl': defaults['CMAKE_LINKER'] = comp_obj.get_linker_exelist() + if lang.startswith('objc') and comp_obj.get_id().startswith('clang'): + defaults[f'{prefix}FLAGS'] = ['-D__STDC__=1'] return defaults diff --git a/mesonbuild/cmake/tracetargets.py b/mesonbuild/cmake/tracetargets.py index aee67ea595ce..5a9d35284e29 100644 --- a/mesonbuild/cmake/tracetargets.py +++ b/mesonbuild/cmake/tracetargets.py @@ -137,9 +137,9 @@ def resolve_cmake_trace_targets(target_name: str, elif 'IMPORTED_IMPLIB' in tgt.properties: res.libraries += [x for x in tgt.properties['IMPORTED_IMPLIB'] if x] elif f'IMPORTED_LOCATION_{cfg}' in tgt.properties: - res.libraries += [x for x in tgt.properties[f'IMPORTED_LOCATION_{cfg}'] if x] + targets += [x for x in tgt.properties[f'IMPORTED_LOCATION_{cfg}'] if x] elif 'IMPORTED_LOCATION' in tgt.properties: - res.libraries += [x for x in tgt.properties['IMPORTED_LOCATION'] if x] + targets += [x for x in tgt.properties['IMPORTED_LOCATION'] if x] if 'LINK_LIBRARIES' in tgt.properties: targets += [x for x in tgt.properties['LINK_LIBRARIES'] if x] diff --git a/mesonbuild/compilers/asm.py b/mesonbuild/compilers/asm.py index bfe436b0b256..e25f18d6746d 100644 --- a/mesonbuild/compilers/asm.py +++ b/mesonbuild/compilers/asm.py @@ -75,7 +75,7 @@ def get_output_args(self, outputname: str) -> T.List[str]: def unix_args_to_native(self, args: T.List[str]) -> T.List[str]: outargs: T.List[str] = [] for arg in args: - if arg == '-pthread': + if arg in {'-mms-bitfields', '-pthread'}: continue outargs.append(arg) return outargs diff --git a/mesonbuild/compilers/c.py b/mesonbuild/compilers/c.py index 7e2146111563..cbc1bea95d6e 100644 --- a/mesonbuild/compilers/c.py +++ b/mesonbuild/compilers/c.py @@ -6,10 +6,11 @@ import os.path import typing as T -from .. import coredata +from .. import options from .. import mlog -from ..mesonlib import MesonException, version_compare, OptionKey +from ..mesonlib import MesonException, version_compare from .c_function_attributes import C_FUNC_ATTRIBUTES +from .mixins.apple import AppleCompilerMixin from .mixins.clike import CLikeCompiler from .mixins.ccrx import CcrxCompiler from .mixins.xc16 import Xc16Compiler @@ -94,9 +95,9 @@ def has_header_symbol(self, hname: str, symbol: str, prefix: str, def get_options(self) -> 'MutableKeyedOptionDictType': opts = super().get_options() - key = OptionKey('std', machine=self.for_machine, lang=self.language) + key = self.form_langopt_key('std') opts.update({ - key: coredata.UserStdOption('C', _ALL_STDS), + key: options.UserStdOption('C', _ALL_STDS), }) return opts @@ -127,8 +128,9 @@ def get_options(self) -> 'MutableKeyedOptionDictType': stds += ['c2x'] if version_compare(self.version, self._C23_VERSION): stds += ['c23'] - std_opt = opts[OptionKey('std', machine=self.for_machine, lang=self.language)] - assert isinstance(std_opt, coredata.UserStdOption), 'for mypy' + key = self.form_langopt_key('std') + std_opt = opts[key] + assert isinstance(std_opt, options.UserStdOption), 'for mypy' std_opt.set_versions(stds, gnu=True) return opts @@ -154,8 +156,8 @@ def get_options(self) -> 'MutableKeyedOptionDictType': if self.info.is_windows() or self.info.is_cygwin(): self.update_options( opts, - self.create_option(coredata.UserArrayOption, - OptionKey('winlibs', machine=self.for_machine, lang=self.language), + self.create_option(options.UserArrayOption, + self.form_langopt_key('winlibs'), 'Standard Win libraries to link against', gnu_winlibs), ) @@ -163,15 +165,17 @@ def get_options(self) -> 'MutableKeyedOptionDictType': def get_option_compile_args(self, options: 'KeyedOptionDictType') -> T.List[str]: args = [] - std = options[OptionKey('std', machine=self.for_machine, lang=self.language)] - if std.value != 'none': - args.append('-std=' + std.value) + key = self.form_langopt_key('std') + std = options.get_value(key) + if std != 'none': + args.append('-std=' + std) return args def get_option_link_args(self, options: 'KeyedOptionDictType') -> T.List[str]: if self.info.is_windows() or self.info.is_cygwin(): # without a typedict mypy can't understand this. - libs = options[OptionKey('winlibs', machine=self.for_machine, lang=self.language)].value.copy() + key = self.form_langopt_key('winlibs') + libs = options.get_value(key).copy() assert isinstance(libs, list) for l in libs: assert isinstance(l, str) @@ -184,7 +188,7 @@ class ArmLtdClangCCompiler(ClangCCompiler): id = 'armltdclang' -class AppleClangCCompiler(ClangCCompiler): +class AppleClangCCompiler(AppleCompilerMixin, ClangCCompiler): """Handle the differences between Apple Clang and Vanilla Clang. @@ -201,6 +205,16 @@ class EmscriptenCCompiler(EmscriptenMixin, ClangCCompiler): id = 'emscripten' + # Emscripten uses different version numbers than Clang; `emcc -v` will show + # the Clang version number used as well (but `emcc --version` does not). + # See https://github.com/pyodide/pyodide/discussions/4762 for more on + # emcc <--> clang versions. Note that c17/c18/c2x are always available, since + # the lowest supported Emscripten version used a new-enough Clang version. + _C17_VERSION = '>=1.38.35' + _C18_VERSION = '>=1.38.35' + _C2X_VERSION = '>=1.38.35' # 1.38.35 used Clang 9.0.0 + _C23_VERSION = '>=3.0.0' # 3.0.0 used Clang 18.0.0 + def __init__(self, ccache: T.List[str], exelist: T.List[str], version: str, for_machine: MachineChoice, is_cross: bool, info: 'MachineInfo', linker: T.Optional['DynamicLinker'] = None, @@ -236,16 +250,18 @@ def __init__(self, ccache: T.List[str], exelist: T.List[str], version: str, for_ def get_options(self) -> 'MutableKeyedOptionDictType': opts = CCompiler.get_options(self) - std_opt = opts[OptionKey('std', machine=self.for_machine, lang=self.language)] - assert isinstance(std_opt, coredata.UserStdOption), 'for mypy' + key = self.form_langopt_key('std') + std_opt = opts[key] + assert isinstance(std_opt, options.UserStdOption), 'for mypy' std_opt.set_versions(['c90', 'c99', 'c11'], gnu=True) return opts def get_option_compile_args(self, options: 'KeyedOptionDictType') -> T.List[str]: args = [] - std = options[OptionKey('std', machine=self.for_machine, lang=self.language)] - if std.value != 'none': - args.append('-std=' + std.value) + key = self.form_langopt_key('std') + std = options.get_value(key) + if std != 'none': + args.append('-std=' + std) return args def get_option_link_args(self, options: 'KeyedOptionDictType') -> T.List[str]: @@ -286,14 +302,14 @@ def get_options(self) -> 'MutableKeyedOptionDictType': stds += ['c2x'] if version_compare(self.version, self._C23_VERSION): stds += ['c23'] - key = OptionKey('std', machine=self.for_machine, lang=self.language) + key = self.form_langopt_key('std') std_opt = opts[key] - assert isinstance(std_opt, coredata.UserStdOption), 'for mypy' + assert isinstance(std_opt, options.UserStdOption), 'for mypy' std_opt.set_versions(stds, gnu=True) if self.info.is_windows() or self.info.is_cygwin(): self.update_options( opts, - self.create_option(coredata.UserArrayOption, + self.create_option(options.UserArrayOption, key.evolve('winlibs'), 'Standard Win libraries to link against', gnu_winlibs), @@ -302,15 +318,17 @@ def get_options(self) -> 'MutableKeyedOptionDictType': def get_option_compile_args(self, options: 'KeyedOptionDictType') -> T.List[str]: args = [] - std = options[OptionKey('std', lang=self.language, machine=self.for_machine)] - if std.value != 'none': - args.append('-std=' + std.value) + key = self.form_langopt_key('std') + std = options.get_value(key) + if std != 'none': + args.append('-std=' + std) return args def get_option_link_args(self, options: 'KeyedOptionDictType') -> T.List[str]: if self.info.is_windows() or self.info.is_cygwin(): # without a typeddict mypy can't figure this out - libs: T.List[str] = options[OptionKey('winlibs', lang=self.language, machine=self.for_machine)].value.copy() + key = self.form_langopt_key('winlibs') + libs: T.List[str] = options.get_value(key).copy() assert isinstance(libs, list) for l in libs: assert isinstance(l, str) @@ -366,8 +384,9 @@ def get_options(self) -> 'MutableKeyedOptionDictType': stds += ['c90', 'c1x', 'gnu90', 'gnu1x', 'iso9899:2011'] if version_compare(self.version, '>=1.26.00'): stds += ['c17', 'c18', 'iso9899:2017', 'iso9899:2018', 'gnu17', 'gnu18'] - std_opt = opts[OptionKey('std', machine=self.for_machine, lang=self.language)] - assert isinstance(std_opt, coredata.UserStdOption), 'for mypy' + key = self.form_langopt_key('std') + std_opt = opts[key] + assert isinstance(std_opt, options.UserStdOption), 'for mypy' std_opt.set_versions(stds) return opts @@ -405,16 +424,18 @@ def get_options(self) -> 'MutableKeyedOptionDictType': stds = ['c89', 'c99'] if version_compare(self.version, '>=16.0.0'): stds += ['c11'] - std_opt = opts[OptionKey('std', machine=self.for_machine, lang=self.language)] - assert isinstance(std_opt, coredata.UserStdOption), 'for mypy' + key = self.form_langopt_key('std') + std_opt = opts[key] + assert isinstance(std_opt, options.UserStdOption), 'for mypy' std_opt.set_versions(stds, gnu=True) return opts def get_option_compile_args(self, options: 'KeyedOptionDictType') -> T.List[str]: args = [] - std = options[OptionKey('std', machine=self.for_machine, lang=self.language)] - if std.value != 'none': - args.append('-std=' + std.value) + key = self.form_langopt_key('std') + std = options.get_value(key) + if std != 'none': + args.append('-std=' + std) return args @@ -431,8 +452,8 @@ def get_options(self) -> MutableKeyedOptionDictType: return self.update_options( super().get_options(), self.create_option( - coredata.UserArrayOption, - OptionKey('winlibs', machine=self.for_machine, lang=self.language), + options.UserArrayOption, + self.form_langopt_key('winlibs'), 'Windows libs to link against.', msvc_winlibs, ), @@ -440,8 +461,8 @@ def get_options(self) -> MutableKeyedOptionDictType: def get_option_link_args(self, options: 'KeyedOptionDictType') -> T.List[str]: # need a TypeDict to make this work - key = OptionKey('winlibs', machine=self.for_machine, lang=self.language) - libs = options[key].value.copy() + key = self.form_langopt_key('winlibs') + libs = options.get_value(key).copy() assert isinstance(libs, list) for l in libs: assert isinstance(l, str) @@ -469,18 +490,20 @@ def get_options(self) -> 'MutableKeyedOptionDictType': stds += ['c11'] if version_compare(self.version, self._C17_VERSION): stds += ['c17', 'c18'] - std_opt = opts[OptionKey('std', machine=self.for_machine, lang=self.language)] - assert isinstance(std_opt, coredata.UserStdOption), 'for mypy' + key = self.form_langopt_key('std') + std_opt = opts[key] + assert isinstance(std_opt, options.UserStdOption), 'for mypy' std_opt.set_versions(stds, gnu=True, gnu_deprecated=True) return opts def get_option_compile_args(self, options: 'KeyedOptionDictType') -> T.List[str]: args = [] - std = options[OptionKey('std', machine=self.for_machine, lang=self.language)] + key = self.form_langopt_key('std') + std = options.get_value(key) # As of MVSC 16.8, /std:c11 and /std:c17 are the only valid C standard options. - if std.value in {'c11'}: + if std == 'c11': args.append('/std:c11') - elif std.value in {'c17', 'c18'}: + elif std in {'c17', 'c18'}: args.append('/std:c17') return args @@ -496,8 +519,8 @@ def __init__(self, exelist: T.List[str], version: str, for_machine: MachineChoic ClangClCompiler.__init__(self, target) def get_option_compile_args(self, options: 'KeyedOptionDictType') -> T.List[str]: - key = OptionKey('std', machine=self.for_machine, lang=self.language) - std = options[key].value + key = self.form_langopt_key('std') + std = options.get_value(key) if std != "none": return [f'/clang:-std={std}'] return [] @@ -518,19 +541,23 @@ def __init__(self, exelist: T.List[str], version: str, for_machine: MachineChoic def get_options(self) -> 'MutableKeyedOptionDictType': opts = super().get_options() - std_opt = opts[OptionKey('std', machine=self.for_machine, lang=self.language)] - assert isinstance(std_opt, coredata.UserStdOption), 'for mypy' + key = self.form_langopt_key('std') + # To shut up mypy. + if isinstance(opts, dict): + raise RuntimeError('This is a transitory issue that should not happen. Please report with full backtrace.') + std_opt = opts.get_value_object(key) + assert isinstance(std_opt, options.UserStdOption), 'for mypy' std_opt.set_versions(['c89', 'c99', 'c11']) return opts def get_option_compile_args(self, options: 'KeyedOptionDictType') -> T.List[str]: args = [] - key = OptionKey('std', machine=self.for_machine, lang=self.language) - std = options[key] - if std.value == 'c89': + key = self.form_langopt_key('std') + std = options.get_value(key) + if std == 'c89': mlog.log("ICL doesn't explicitly implement c89, setting the standard to 'none', which is close.", once=True) - elif std.value != 'none': - args.append('/Qstd:' + std.value) + elif std != 'none': + args.append('/Qstd:' + std) return args @@ -551,17 +578,18 @@ def __init__(self, ccache: T.List[str], exelist: T.List[str], version: str, for_ def get_options(self) -> 'MutableKeyedOptionDictType': opts = CCompiler.get_options(self) - std_opt = opts[OptionKey('std', machine=self.for_machine, lang=self.language)] - assert isinstance(std_opt, coredata.UserStdOption), 'for mypy' + key = self.form_langopt_key('std') + std_opt = opts[key] + assert isinstance(std_opt, options.UserStdOption), 'for mypy' std_opt.set_versions(['c89', 'c99', 'c11']) return opts def get_option_compile_args(self, options: 'KeyedOptionDictType') -> T.List[str]: args = [] - key = OptionKey('std', machine=self.for_machine, lang=self.language) - std = options[key] - if std.value != 'none': - args.append('--' + std.value) + key = self.form_langopt_key('std') + std = options.get_value(key) + if std != 'none': + args.append('--' + std) return args @@ -580,8 +608,9 @@ def get_always_args(self) -> T.List[str]: def get_options(self) -> 'MutableKeyedOptionDictType': opts = CCompiler.get_options(self) - std_opt = opts[OptionKey('std', machine=self.for_machine, lang=self.language)] - assert isinstance(std_opt, coredata.UserStdOption), 'for mypy' + key = self.form_langopt_key('std') + std_opt = opts[key] + assert isinstance(std_opt, options.UserStdOption), 'for mypy' std_opt.set_versions(['c89', 'c99']) return opts @@ -590,11 +619,11 @@ def get_no_stdinc_args(self) -> T.List[str]: def get_option_compile_args(self, options: 'KeyedOptionDictType') -> T.List[str]: args = [] - key = OptionKey('std', machine=self.for_machine, lang=self.language) - std = options[key] - if std.value == 'c89': + key = self.form_langopt_key('std') + std = options.get_value(key) + if std == 'c89': args.append('-lang=c') - elif std.value == 'c99': + elif std == 'c99': args.append('-lang=c99') return args @@ -627,8 +656,9 @@ def __init__(self, ccache: T.List[str], exelist: T.List[str], version: str, for_ def get_options(self) -> 'MutableKeyedOptionDictType': opts = CCompiler.get_options(self) - std_opt = opts[OptionKey('std', machine=self.for_machine, lang=self.language)] - assert isinstance(std_opt, coredata.UserStdOption), 'for mypy' + key = self.form_langopt_key('std') + std_opt = opts[key] + assert isinstance(std_opt, options.UserStdOption), 'for mypy' std_opt.set_versions(['c89', 'c99'], gnu=True) return opts @@ -637,11 +667,11 @@ def get_no_stdinc_args(self) -> T.List[str]: def get_option_compile_args(self, options: 'KeyedOptionDictType') -> T.List[str]: args = [] - key = OptionKey('std', machine=self.for_machine, lang=self.language) - std = options[key] - if std.value != 'none': + key = self.form_langopt_key('std') + std = options.get_value(key) + if std != 'none': args.append('-ansi') - args.append('-std=' + std.value) + args.append('-std=' + std) return args def get_compile_only_args(self) -> T.List[str]: @@ -672,8 +702,9 @@ def __init__(self, ccache: T.List[str], exelist: T.List[str], version: str, for_ def get_options(self) -> 'MutableKeyedOptionDictType': opts = CCompiler.get_options(self) - std_opt = opts[OptionKey('std', machine=self.for_machine, lang=self.language)] - assert isinstance(std_opt, coredata.UserStdOption), 'for mypy' + key = self.form_langopt_key('std') + std_opt = opts[key] + assert isinstance(std_opt, options.UserStdOption), 'for mypy' std_opt.set_versions(['c89', 'c99']) return opts @@ -709,8 +740,9 @@ def get_always_args(self) -> T.List[str]: def get_options(self) -> 'MutableKeyedOptionDictType': opts = CCompiler.get_options(self) - std_opt = opts[OptionKey('std', machine=self.for_machine, lang=self.language)] - assert isinstance(std_opt, coredata.UserStdOption), 'for mypy' + key = self.form_langopt_key('std') + std_opt = opts[key] + assert isinstance(std_opt, options.UserStdOption), 'for mypy' std_opt.set_versions(['c89', 'c99', 'c11']) return opts @@ -719,10 +751,10 @@ def get_no_stdinc_args(self) -> T.List[str]: def get_option_compile_args(self, options: 'KeyedOptionDictType') -> T.List[str]: args = [] - key = OptionKey('std', machine=self.for_machine, lang=self.language) - std = options[key] - if std.value != 'none': - args.append('--' + std.value) + key = self.form_langopt_key('std') + std = options.get_value(key) + if std != 'none': + args.append('--' + std) return args class C2000CCompiler(TICCompiler): @@ -749,15 +781,17 @@ def get_instruction_set_args(self, instruction_set: str) -> T.Optional[T.List[st def get_options(self) -> 'MutableKeyedOptionDictType': opts = CCompiler.get_options(self) c_stds = ['c99'] - opts[OptionKey('std', machine=self.for_machine, lang=self.language)].choices = ['none'] + c_stds + key = self.form_langopt_key('std') + opts[key].choices = ['none'] + c_stds return opts def get_option_compile_args(self, options: 'KeyedOptionDictType') -> T.List[str]: args = [] - std = options[OptionKey('std', machine=self.for_machine, lang=self.language)] - if std.value != 'none': + key = self.form_langopt_key('std') + std = options.get_value(key) + if std != 'none': args.append('-lang') - args.append(std.value) + args.append(std) return args class MetrowerksCCompilerEmbeddedPowerPC(MetrowerksCompiler, CCompiler): @@ -777,12 +811,14 @@ def get_instruction_set_args(self, instruction_set: str) -> T.Optional[T.List[st def get_options(self) -> 'MutableKeyedOptionDictType': opts = CCompiler.get_options(self) c_stds = ['c99'] - opts[OptionKey('std', machine=self.for_machine, lang=self.language)].choices = ['none'] + c_stds + key = self.form_langopt_key('std') + opts[key].choices = ['none'] + c_stds return opts def get_option_compile_args(self, options: 'KeyedOptionDictType') -> T.List[str]: args = [] - std = options[OptionKey('std', machine=self.for_machine, lang=self.language)] - if std.value != 'none': - args.append('-lang ' + std.value) + key = self.form_langopt_key('std') + std = options.get_value(key) + if std != 'none': + args.append('-lang ' + std) return args diff --git a/mesonbuild/compilers/compilers.py b/mesonbuild/compilers/compilers.py index 44b998a67cda..08a596c7078c 100644 --- a/mesonbuild/compilers/compilers.py +++ b/mesonbuild/compilers/compilers.py @@ -1,6 +1,6 @@ # SPDX-License-Identifier: Apache-2.0 # Copyright 2012-2022 The Meson development team -# Copyright © 2023 Intel Corporation +# Copyright © 2023-2024 Intel Corporation from __future__ import annotations @@ -15,6 +15,7 @@ from .. import coredata from .. import mlog from .. import mesonlib +from .. import options from ..mesonlib import ( HoldableObject, EnvironmentException, MesonException, @@ -24,6 +25,7 @@ from ..arglist import CompilerArgs if T.TYPE_CHECKING: + from typing import Any from ..build import BuildTarget, DFeatures from ..coredata import MutableKeyedOptionDictType, KeyedOptionDictType from ..envconfig import MachineInfo @@ -35,7 +37,7 @@ CompilerType = T.TypeVar('CompilerType', bound='Compiler') _T = T.TypeVar('_T') - UserOptionType = T.TypeVar('UserOptionType', bound=coredata.UserOption) + UserOptionType = T.TypeVar('UserOptionType', bound=options.UserOption) """This file contains the data files of all compilers Meson knows about. To support a new compiler, add its information below. @@ -50,7 +52,7 @@ # First suffix is the language's default. lang_suffixes = { 'c': ('c',), - 'cpp': ('cpp', 'cc', 'cxx', 'c++', 'hh', 'hpp', 'ipp', 'hxx', 'ino', 'ixx', 'C', 'H'), + 'cpp': ('cpp', 'cppm', 'cc', 'cxx', 'c++', 'hh', 'hpp', 'ipp', 'hxx', 'ino', 'ixx', 'C', 'H'), 'cuda': ('cu',), # f90, f95, f03, f08 are for free-form fortran ('f90' recommended) # f, for, ftn, fpp are for fixed-form fortran ('f' or 'for' recommended) @@ -209,51 +211,51 @@ class CompileCheckMode(enum.Enum): MSCRT_VALS = ['none', 'md', 'mdd', 'mt', 'mtd'] @dataclass -class BaseOption(T.Generic[coredata._T, coredata._U]): - opt_type: T.Type[coredata._U] +class BaseOption(T.Generic[options._T, options._U]): + opt_type: T.Type[options._U] description: str default: T.Any = None choices: T.Any = None - def init_option(self, name: OptionKey) -> coredata._U: + def init_option(self, name: OptionKey) -> options._U: keywords = {'value': self.default} if self.choices: keywords['choices'] = self.choices return self.opt_type(name.name, self.description, **keywords) BASE_OPTIONS: T.Mapping[OptionKey, BaseOption] = { - OptionKey('b_pch'): BaseOption(coredata.UserBooleanOption, 'Use precompiled headers', True), - OptionKey('b_lto'): BaseOption(coredata.UserBooleanOption, 'Use link time optimization', False), - OptionKey('b_lto_threads'): BaseOption(coredata.UserIntegerOption, 'Use multiple threads for Link Time Optimization', (None, None, 0)), - OptionKey('b_lto_mode'): BaseOption(coredata.UserComboOption, 'Select between different LTO modes.', 'default', + OptionKey('b_pch'): BaseOption(options.UserBooleanOption, 'Use precompiled headers', True), + OptionKey('b_lto'): BaseOption(options.UserBooleanOption, 'Use link time optimization', False), + OptionKey('b_lto_threads'): BaseOption(options.UserIntegerOption, 'Use multiple threads for Link Time Optimization', (None, None, 0)), + OptionKey('b_lto_mode'): BaseOption(options.UserComboOption, 'Select between different LTO modes.', 'default', choices=['default', 'thin']), - OptionKey('b_thinlto_cache'): BaseOption(coredata.UserBooleanOption, 'Use LLVM ThinLTO caching for faster incremental builds', False), - OptionKey('b_thinlto_cache_dir'): BaseOption(coredata.UserStringOption, 'Directory to store ThinLTO cache objects', ''), - OptionKey('b_sanitize'): BaseOption(coredata.UserComboOption, 'Code sanitizer to use', 'none', + OptionKey('b_thinlto_cache'): BaseOption(options.UserBooleanOption, 'Use LLVM ThinLTO caching for faster incremental builds', False), + OptionKey('b_thinlto_cache_dir'): BaseOption(options.UserStringOption, 'Directory to store ThinLTO cache objects', ''), + OptionKey('b_sanitize'): BaseOption(options.UserComboOption, 'Code sanitizer to use', 'none', choices=['none', 'address', 'thread', 'undefined', 'memory', 'leak', 'address,undefined']), - OptionKey('b_lundef'): BaseOption(coredata.UserBooleanOption, 'Use -Wl,--no-undefined when linking', True), - OptionKey('b_asneeded'): BaseOption(coredata.UserBooleanOption, 'Use -Wl,--as-needed when linking', True), - OptionKey('b_pgo'): BaseOption(coredata.UserComboOption, 'Use profile guided optimization', 'off', + OptionKey('b_lundef'): BaseOption(options.UserBooleanOption, 'Use -Wl,--no-undefined when linking', True), + OptionKey('b_asneeded'): BaseOption(options.UserBooleanOption, 'Use -Wl,--as-needed when linking', True), + OptionKey('b_pgo'): BaseOption(options.UserComboOption, 'Use profile guided optimization', 'off', choices=['off', 'generate', 'use']), - OptionKey('b_coverage'): BaseOption(coredata.UserBooleanOption, 'Enable coverage tracking.', False), - OptionKey('b_colorout'): BaseOption(coredata.UserComboOption, 'Use colored output', 'always', + OptionKey('b_coverage'): BaseOption(options.UserBooleanOption, 'Enable coverage tracking.', False), + OptionKey('b_colorout'): BaseOption(options.UserComboOption, 'Use colored output', 'always', choices=['auto', 'always', 'never']), - OptionKey('b_ndebug'): BaseOption(coredata.UserComboOption, 'Disable asserts', 'false', choices=['true', 'false', 'if-release']), - OptionKey('b_staticpic'): BaseOption(coredata.UserBooleanOption, 'Build static libraries as position independent', True), - OptionKey('b_pie'): BaseOption(coredata.UserBooleanOption, 'Build executables as position independent', False), - OptionKey('b_bitcode'): BaseOption(coredata.UserBooleanOption, 'Generate and embed bitcode (only macOS/iOS/tvOS)', False), - OptionKey('b_vscrt'): BaseOption(coredata.UserComboOption, 'VS run-time library type to use.', 'from_buildtype', + OptionKey('b_ndebug'): BaseOption(options.UserComboOption, 'Disable asserts', 'false', choices=['true', 'false', 'if-release']), + OptionKey('b_staticpic'): BaseOption(options.UserBooleanOption, 'Build static libraries as position independent', True), + OptionKey('b_pie'): BaseOption(options.UserBooleanOption, 'Build executables as position independent', False), + OptionKey('b_bitcode'): BaseOption(options.UserBooleanOption, 'Generate and embed bitcode (only macOS/iOS/tvOS)', False), + OptionKey('b_vscrt'): BaseOption(options.UserComboOption, 'VS run-time library type to use.', 'from_buildtype', choices=MSCRT_VALS + ['from_buildtype', 'static_from_buildtype']), } -base_options: KeyedOptionDictType = {key: base_opt.init_option(key) for key, base_opt in BASE_OPTIONS.items()} +base_options = {key: base_opt.init_option(key) for key, base_opt in BASE_OPTIONS.items()} def option_enabled(boptions: T.Set[OptionKey], options: 'KeyedOptionDictType', option: OptionKey) -> bool: try: if option not in boptions: return False - ret = options[option].value + ret = options.get_value(option) assert isinstance(ret, bool), 'must return bool' # could also be str return ret except KeyError: @@ -263,8 +265,8 @@ def option_enabled(boptions: T.Set[OptionKey], options: 'KeyedOptionDictType', def get_option_value(options: 'KeyedOptionDictType', opt: OptionKey, fallback: '_T') -> '_T': """Get the value of an option, or the fallback value.""" try: - v: '_T' = options[opt].value - except KeyError: + v: '_T' = options.get_value(opt) + except (KeyError, AttributeError): return fallback assert isinstance(v, type(fallback)), f'Should have {type(fallback)!r} but was {type(v)!r}' @@ -278,52 +280,52 @@ def are_asserts_disabled(options: KeyedOptionDictType) -> bool: :param options: OptionDictionary :return: whether to disable assertions or not """ - return (options[OptionKey('b_ndebug')].value == 'true' or - (options[OptionKey('b_ndebug')].value == 'if-release' and - options[OptionKey('buildtype')].value in {'release', 'plain'})) + return (options.get_value('b_ndebug') == 'true' or + (options.get_value('b_ndebug') == 'if-release' and + options.get_value('buildtype') in {'release', 'plain'})) def get_base_compile_args(options: 'KeyedOptionDictType', compiler: 'Compiler', env: 'Environment') -> T.List[str]: args: T.List[str] = [] try: - if options[OptionKey('b_lto')].value: + if options.get_value(OptionKey('b_lto')): args.extend(compiler.get_lto_compile_args( threads=get_option_value(options, OptionKey('b_lto_threads'), 0), mode=get_option_value(options, OptionKey('b_lto_mode'), 'default'))) - except KeyError: + except (KeyError, AttributeError): pass try: - args += compiler.get_colorout_args(options[OptionKey('b_colorout')].value) - except KeyError: + args += compiler.get_colorout_args(options.get_value(OptionKey('b_colorout'))) + except (KeyError, AttributeError): pass try: - args += compiler.sanitizer_compile_args(options[OptionKey('b_sanitize')].value) - except KeyError: + args += compiler.sanitizer_compile_args(options.get_value(OptionKey('b_sanitize'))) + except (KeyError, AttributeError): pass try: - pgo_val = options[OptionKey('b_pgo')].value + pgo_val = options.get_value(OptionKey('b_pgo')) if pgo_val == 'generate': args.extend(compiler.get_profile_generate_args()) elif pgo_val == 'use': args.extend(compiler.get_profile_use_args()) - except KeyError: + except (KeyError, AttributeError): pass try: - if options[OptionKey('b_coverage')].value: + if options.get_value(OptionKey('b_coverage')): args += compiler.get_coverage_args() - except KeyError: + except (KeyError, AttributeError): pass try: args += compiler.get_assert_args(are_asserts_disabled(options), env) - except KeyError: + except (KeyError, AttributeError): pass # This does not need a try...except if option_enabled(compiler.base_options, options, OptionKey('b_bitcode')): args.append('-fembed-bitcode') try: - crt_val = options[OptionKey('b_vscrt')].value - buildtype = options[OptionKey('buildtype')].value try: + crt_val = options.get_value(OptionKey('b_vscrt')) + buildtype = options.get_value(OptionKey('buildtype')) args += compiler.get_crt_compile_args(crt_val, buildtype) except AttributeError: pass @@ -335,8 +337,8 @@ def get_base_link_args(options: 'KeyedOptionDictType', linker: 'Compiler', is_shared_module: bool, build_dir: str) -> T.List[str]: args: T.List[str] = [] try: - if options[OptionKey('b_lto')].value: - if options[OptionKey('werror')].value: + if options.get_value('b_lto'): + if options.get_value('werror'): args.extend(linker.get_werror_args()) thinlto_cache_dir = None @@ -348,24 +350,24 @@ def get_base_link_args(options: 'KeyedOptionDictType', linker: 'Compiler', threads=get_option_value(options, OptionKey('b_lto_threads'), 0), mode=get_option_value(options, OptionKey('b_lto_mode'), 'default'), thinlto_cache_dir=thinlto_cache_dir)) - except KeyError: + except (KeyError, AttributeError): pass try: - args += linker.sanitizer_link_args(options[OptionKey('b_sanitize')].value) - except KeyError: + args += linker.sanitizer_link_args(options.get_value('b_sanitize')) + except (KeyError, AttributeError): pass try: - pgo_val = options[OptionKey('b_pgo')].value + pgo_val = options.get_value('b_pgo') if pgo_val == 'generate': args.extend(linker.get_profile_generate_args()) elif pgo_val == 'use': args.extend(linker.get_profile_use_args()) - except KeyError: + except (KeyError, AttributeError): pass try: - if options[OptionKey('b_coverage')].value: + if options.get_value('b_coverage'): args += linker.get_coverage_link_args() - except KeyError: + except (KeyError, AttributeError): pass as_needed = option_enabled(linker.base_options, options, OptionKey('b_asneeded')) @@ -389,9 +391,9 @@ def get_base_link_args(options: 'KeyedOptionDictType', linker: 'Compiler', args.extend(linker.get_allow_undefined_link_args()) try: - crt_val = options[OptionKey('b_vscrt')].value - buildtype = options[OptionKey('buildtype')].value try: + crt_val = options.get_value(OptionKey('b_vscrt')) + buildtype = options.get_value(OptionKey('buildtype')) args += linker.get_crt_link_args(crt_val, buildtype) except AttributeError: pass @@ -934,11 +936,11 @@ def thread_flags(self, env: 'Environment') -> T.List[str]: def thread_link_flags(self, env: 'Environment') -> T.List[str]: return self.linker.thread_flags(env) - def openmp_flags(self) -> T.List[str]: + def openmp_flags(self, env: Environment) -> T.List[str]: raise EnvironmentException('Language %s does not support OpenMP flags.' % self.get_display_language()) - def openmp_link_flags(self) -> T.List[str]: - return self.openmp_flags() + def openmp_link_flags(self, env: Environment) -> T.List[str]: + return self.openmp_flags(env) def language_stdlib_only_link_flags(self, env: 'Environment') -> T.List[str]: return [] @@ -1046,7 +1048,7 @@ def get_dependency_compile_args(self, dep: 'Dependency') -> T.List[str]: return dep.get_compile_args() def get_dependency_link_args(self, dep: 'Dependency') -> T.List[str]: - return dep.get_link_args() + return dep.get_link_args(self.get_language()) @classmethod def use_linker_args(cls, linker: str, version: str) -> T.List[str]: @@ -1350,10 +1352,13 @@ def get_preprocessor(self) -> Compiler: """ raise EnvironmentException(f'{self.get_id()} does not support preprocessor') + def form_langopt_key(self, basename: str) -> OptionKey: + return OptionKey(basename, machine=self.for_machine, lang=self.language) + def get_global_options(lang: str, comp: T.Type[Compiler], for_machine: MachineChoice, - env: 'Environment') -> 'KeyedOptionDictType': + env: 'Environment') -> 'dict[OptionKey, options.UserOption[Any]]': """Retrieve options that apply to all compilers for a given language.""" description = f'Extra arguments passed to the {lang}' argkey = OptionKey('args', lang=lang, machine=for_machine) @@ -1365,12 +1370,12 @@ def get_global_options(lang: str, comp_options = env.options.get(comp_key, []) link_options = env.options.get(largkey, []) - cargs = coredata.UserArrayOption( + cargs = options.UserArrayOption( f'{lang}_{argkey.name}', description + ' compiler', comp_options, split_args=True, allow_dups=True) - largs = coredata.UserArrayOption( + largs = options.UserArrayOption( f'{lang}_{largkey.name}', description + ' linker', link_options, split_args=True, allow_dups=True) @@ -1383,6 +1388,6 @@ def get_global_options(lang: str, # autotools compatibility. largs.extend_value(comp_options) - opts: 'KeyedOptionDictType' = {argkey: cargs, largkey: largs} + opts: 'dict[OptionKey, options.UserOption[Any]]' = {argkey: cargs, largkey: largs} return opts diff --git a/mesonbuild/compilers/cpp.py b/mesonbuild/compilers/cpp.py index 525c9fcdf378..044cd2b378a4 100644 --- a/mesonbuild/compilers/cpp.py +++ b/mesonbuild/compilers/cpp.py @@ -8,9 +8,9 @@ import os.path import typing as T -from .. import coredata +from .. import options from .. import mlog -from ..mesonlib import MesonException, version_compare, OptionKey +from ..mesonlib import MesonException, version_compare from .compilers import ( gnu_winlibs, @@ -19,6 +19,7 @@ CompileCheckMode, ) from .c_function_attributes import CXX_FUNC_ATTRIBUTES, C_FUNC_ATTRIBUTES +from .mixins.apple import AppleCompilerMixin from .mixins.clike import CLikeCompiler from .mixins.ccrx import CcrxCompiler from .mixins.ti import TICompiler @@ -172,9 +173,9 @@ def _find_best_cpp_std(self, cpp_std: str) -> str: def get_options(self) -> 'MutableKeyedOptionDictType': opts = super().get_options() - key = OptionKey('std', machine=self.for_machine, lang=self.language) + key = self.form_langopt_key('std') opts.update({ - key: coredata.UserStdOption('C++', _ALL_STDS), + key: options.UserStdOption('C++', _ALL_STDS), }) return opts @@ -185,7 +186,7 @@ class _StdCPPLibMixin(CompilerMixinBase): def language_stdlib_provider(self, env: Environment) -> str: # https://stackoverflow.com/a/31658120 - header = 'version' if self.has_header('', '', env) else 'ciso646' + header = 'version' if self.has_header('version', '', env)[0] else 'ciso646' is_libcxx = self.has_header_symbol(header, '_LIBCPP_VERSION', '', env)[0] lib = 'c++' if is_libcxx else 'stdc++' return lib @@ -239,20 +240,19 @@ def __init__(self, ccache: T.List[str], exelist: T.List[str], version: str, for_ def get_options(self) -> 'MutableKeyedOptionDictType': opts = CPPCompiler.get_options(self) - key = OptionKey('key', machine=self.for_machine, lang=self.language) self.update_options( opts, - self.create_option(coredata.UserComboOption, - key.evolve('eh'), + self.create_option(options.UserComboOption, + self.form_langopt_key('eh'), 'C++ exception handling type.', ['none', 'default', 'a', 's', 'sc'], 'default'), - self.create_option(coredata.UserBooleanOption, - key.evolve('rtti'), + self.create_option(options.UserBooleanOption, + self.form_langopt_key('rtti'), 'Enable RTTI', True), - self.create_option(coredata.UserBooleanOption, - key.evolve('debugstl'), + self.create_option(options.UserBooleanOption, + self.form_langopt_key('debugstl'), 'STL debug mode', False), ) @@ -263,14 +263,14 @@ def get_options(self) -> 'MutableKeyedOptionDictType': cppstd_choices.append('c++23') if version_compare(self.version, self._CPP26_VERSION): cppstd_choices.append('c++26') - std_opt = opts[key.evolve('std')] - assert isinstance(std_opt, coredata.UserStdOption), 'for mypy' + std_opt = opts[self.form_langopt_key('std')] + assert isinstance(std_opt, options.UserStdOption), 'for mypy' std_opt.set_versions(cppstd_choices, gnu=True) if self.info.is_windows() or self.info.is_cygwin(): self.update_options( opts, - self.create_option(coredata.UserArrayOption, - key.evolve('winlibs'), + self.create_option(options.UserArrayOption, + self.form_langopt_key('winlibs'), 'Standard Win libraries to link against', gnu_winlibs), ) @@ -278,14 +278,16 @@ def get_options(self) -> 'MutableKeyedOptionDictType': def get_option_compile_args(self, options: 'KeyedOptionDictType') -> T.List[str]: args: T.List[str] = [] - key = OptionKey('std', machine=self.for_machine, lang=self.language) - std = options[key] - if std.value != 'none': - args.append(self._find_best_cpp_std(std.value)) + key = self.form_langopt_key('std') + std = options.get_value(key) + if std != 'none': + args.append(self._find_best_cpp_std(std)) - non_msvc_eh_options(options[key.evolve('eh')].value, args) + key = self.form_langopt_key('eh') + non_msvc_eh_options(options.get_value(key), args) - if options[key.evolve('debugstl')].value: + key = self.form_langopt_key('debugstl') + if options.get_value(key): args.append('-D_GLIBCXX_DEBUG=1') # We can't do _LIBCPP_DEBUG because it's unreliable unless libc++ was built with it too: @@ -294,7 +296,8 @@ def get_option_compile_args(self, options: 'KeyedOptionDictType') -> T.List[str] if version_compare(self.version, '>=18'): args.append('-D_LIBCPP_HARDENING_MODE=_LIBCPP_HARDENING_MODE_DEBUG') - if not options[key.evolve('rtti')].value: + key = self.form_langopt_key('rtti') + if not options.get_value(key): args.append('-fno-rtti') return args @@ -302,8 +305,8 @@ def get_option_compile_args(self, options: 'KeyedOptionDictType') -> T.List[str] def get_option_link_args(self, options: 'KeyedOptionDictType') -> T.List[str]: if self.info.is_windows() or self.info.is_cygwin(): # without a typedict mypy can't understand this. - key = OptionKey('winlibs', machine=self.for_machine, lang=self.language) - libs = options[key].value.copy() + key = self.form_langopt_key('winlibs') + libs = options.get_value(key).copy() assert isinstance(libs, list) for l in libs: assert isinstance(l, str) @@ -335,7 +338,7 @@ class ArmLtdClangCPPCompiler(ClangCPPCompiler): id = 'armltdclang' -class AppleClangCPPCompiler(ClangCPPCompiler): +class AppleClangCPPCompiler(AppleCompilerMixin, ClangCPPCompiler): _CPP23_VERSION = '>=13.0.0' # TODO: We don't know which XCode version will include LLVM 17 yet, so @@ -362,10 +365,10 @@ def __init__(self, ccache: T.List[str], exelist: T.List[str], version: str, for_ def get_option_compile_args(self, options: 'KeyedOptionDictType') -> T.List[str]: args: T.List[str] = [] - key = OptionKey('std', machine=self.for_machine, lang=self.language) - std = options[key] - if std.value != 'none': - args.append(self._find_best_cpp_std(std.value)) + key = self.form_langopt_key('std') + std = options.get_value(key) + if std != 'none': + args.append(self._find_best_cpp_std(std)) return args @@ -390,28 +393,29 @@ def __init__(self, ccache: T.List[str], exelist: T.List[str], version: str, for_ def get_options(self) -> 'MutableKeyedOptionDictType': opts = CPPCompiler.get_options(self) - key = OptionKey('std', machine=self.for_machine, lang=self.language) + key = self.form_langopt_key('std') self.update_options( opts, - self.create_option(coredata.UserComboOption, + self.create_option(options.UserComboOption, key.evolve('eh'), 'C++ exception handling type.', ['none', 'default', 'a', 's', 'sc'], 'default'), ) std_opt = opts[key] - assert isinstance(std_opt, coredata.UserStdOption), 'for mypy' + assert isinstance(std_opt, options.UserStdOption), 'for mypy' std_opt.set_versions(['c++98', 'c++03', 'c++11', 'c++14', 'c++17'], gnu=True) return opts def get_option_compile_args(self, options: 'KeyedOptionDictType') -> T.List[str]: args: T.List[str] = [] - key = OptionKey('std', machine=self.for_machine, lang=self.language) - std = options[key] - if std.value != 'none': - args.append('-std=' + std.value) + key = self.form_langopt_key('std') + std = options.get_value(key) + if std != 'none': + args.append('-std=' + std) - non_msvc_eh_options(options[key.evolve('eh')].value, args) + key = self.form_langopt_key('eh') + non_msvc_eh_options(options.get_value(key), args) return args @@ -438,21 +442,21 @@ def __init__(self, ccache: T.List[str], exelist: T.List[str], version: str, for_ self.supported_warn_args(gnu_cpp_warning_args))} def get_options(self) -> 'MutableKeyedOptionDictType': - key = OptionKey('std', machine=self.for_machine, lang=self.language) + key = self.form_langopt_key('std') opts = CPPCompiler.get_options(self) self.update_options( opts, - self.create_option(coredata.UserComboOption, - key.evolve('eh'), + self.create_option(options.UserComboOption, + self.form_langopt_key('eh'), 'C++ exception handling type.', ['none', 'default', 'a', 's', 'sc'], 'default'), - self.create_option(coredata.UserBooleanOption, - key.evolve('rtti'), + self.create_option(options.UserBooleanOption, + self.form_langopt_key('rtti'), 'Enable RTTI', True), - self.create_option(coredata.UserBooleanOption, - key.evolve('debugstl'), + self.create_option(options.UserBooleanOption, + self.form_langopt_key('debugstl'), 'STL debug mode', False), ) @@ -465,12 +469,12 @@ def get_options(self) -> 'MutableKeyedOptionDictType': if version_compare(self.version, '>=14.0.0'): cppstd_choices.append('c++26') std_opt = opts[key] - assert isinstance(std_opt, coredata.UserStdOption), 'for mypy' + assert isinstance(std_opt, options.UserStdOption), 'for mypy' std_opt.set_versions(cppstd_choices, gnu=True) if self.info.is_windows() or self.info.is_cygwin(): self.update_options( opts, - self.create_option(coredata.UserArrayOption, + self.create_option(options.UserArrayOption, key.evolve('winlibs'), 'Standard Win libraries to link against', gnu_winlibs), @@ -479,25 +483,25 @@ def get_options(self) -> 'MutableKeyedOptionDictType': def get_option_compile_args(self, options: 'KeyedOptionDictType') -> T.List[str]: args: T.List[str] = [] - key = OptionKey('std', machine=self.for_machine, lang=self.language) - std = options[key] - if std.value != 'none': - args.append(self._find_best_cpp_std(std.value)) + key = self.form_langopt_key('std') + std = options.get_value(key) + if std != 'none': + args.append(self._find_best_cpp_std(std)) - non_msvc_eh_options(options[key.evolve('eh')].value, args) + non_msvc_eh_options(options.get_value(key.evolve('eh')), args) - if not options[key.evolve('rtti')].value: + if not options.get_value(key.evolve('rtti')): args.append('-fno-rtti') - if options[key.evolve('debugstl')].value: + if options.get_value(key.evolve('debugstl')): args.append('-D_GLIBCXX_DEBUG=1') return args def get_option_link_args(self, options: 'KeyedOptionDictType') -> T.List[str]: if self.info.is_windows() or self.info.is_cygwin(): # without a typedict mypy can't understand this. - key = OptionKey('winlibs', machine=self.for_machine, lang=self.language) - libs = options[key].value.copy() + key = self.form_langopt_key('winlibs') + libs = options.get_value(key).copy() assert isinstance(libs, list) for l in libs: assert isinstance(l, str) @@ -579,21 +583,21 @@ def get_options(self) -> 'MutableKeyedOptionDictType': if version_compare(self.version, '>=1.26.00'): cpp_stds += ['c++20'] - key = OptionKey('std', machine=self.for_machine, lang=self.language) + key = self.form_langopt_key('std') self.update_options( opts, - self.create_option(coredata.UserComboOption, - key.evolve('eh'), + self.create_option(options.UserComboOption, + self.form_langopt_key('eh'), 'C++ exception handling type.', ['none', 'default', 'a', 's', 'sc'], 'default'), - self.create_option(coredata.UserBooleanOption, - key.evolve('debugstl'), + self.create_option(options.UserBooleanOption, + self.form_langopt_key('debugstl'), 'STL debug mode', False), ) std_opt = opts[key] - assert isinstance(std_opt, coredata.UserStdOption), 'for mypy' + assert isinstance(std_opt, options.UserStdOption), 'for mypy' std_opt.set_versions(cpp_stds, gnu=True) return opts @@ -612,14 +616,16 @@ def has_function(self, funcname: str, prefix: str, env: 'Environment', *, # Elbrus C++ compiler does not support RTTI, so don't check for it. def get_option_compile_args(self, options: 'KeyedOptionDictType') -> T.List[str]: args: T.List[str] = [] - key = OptionKey('std', machine=self.for_machine, lang=self.language) - std = options[key] - if std.value != 'none': - args.append(self._find_best_cpp_std(std.value)) + key = self.form_langopt_key('std') + std = options.get_value(key) + if std != 'none': + args.append(self._find_best_cpp_std(std)) - non_msvc_eh_options(options[key.evolve('eh')].value, args) + key = self.form_langopt_key('eh') + non_msvc_eh_options(options.get_value(key), args) - if options[key.evolve('debugstl')].value: + key = self.form_langopt_key('debugstl') + if options.get_value(key): args.append('-D_GLIBCXX_DEBUG=1') return args @@ -658,43 +664,43 @@ def get_options(self) -> 'MutableKeyedOptionDictType': c_stds += ['c++2a'] g_stds += ['gnu++2a'] - key = OptionKey('std', machine=self.for_machine, lang=self.language) + key = self.form_langopt_key('std') self.update_options( opts, - self.create_option(coredata.UserComboOption, - key.evolve('eh'), + self.create_option(options.UserComboOption, + self.form_langopt_key('eh'), 'C++ exception handling type.', ['none', 'default', 'a', 's', 'sc'], 'default'), - self.create_option(coredata.UserBooleanOption, - key.evolve('rtti'), + self.create_option(options.UserBooleanOption, + self.form_langopt_key('rtti'), 'Enable RTTI', True), - self.create_option(coredata.UserBooleanOption, - key.evolve('debugstl'), + self.create_option(options.UserBooleanOption, + self.form_langopt_key('debugstl'), 'STL debug mode', False), ) std_opt = opts[key] - assert isinstance(std_opt, coredata.UserStdOption), 'for mypy' + assert isinstance(std_opt, options.UserStdOption), 'for mypy' std_opt.set_versions(c_stds + g_stds) return opts def get_option_compile_args(self, options: 'KeyedOptionDictType') -> T.List[str]: args: T.List[str] = [] - key = OptionKey('std', machine=self.for_machine, lang=self.language) - std = options[key] - if std.value != 'none': + key = self.form_langopt_key('std') + std = options.get_value(key) + if std != 'none': remap_cpp03 = { 'c++03': 'c++98', 'gnu++03': 'gnu++98' } - args.append('-std=' + remap_cpp03.get(std.value, std.value)) - if options[key.evolve('eh')].value == 'none': + args.append('-std=' + remap_cpp03.get(std, std)) + if options.get_value(key.evolve('eh')) == 'none': args.append('-fno-exceptions') - if not options[key.evolve('rtti')].value: + if not options.get_value(key.evolve('rtti')): args.append('-fno-rtti') - if options[key.evolve('debugstl')].value: + if options.get_value(key.evolve('debugstl')): args.append('-D_GLIBCXX_DEBUG=1') return args @@ -727,48 +733,48 @@ class VisualStudioLikeCPPCompilerMixin(CompilerMixinBase): def get_option_link_args(self, options: 'KeyedOptionDictType') -> T.List[str]: # need a typeddict for this - key = OptionKey('winlibs', machine=self.for_machine, lang=self.language) - return T.cast('T.List[str]', options[key].value[:]) + key = self.form_langopt_key('winlibs') + return T.cast('T.List[str]', options.get_value(key)[:]) def _get_options_impl(self, opts: 'MutableKeyedOptionDictType', cpp_stds: T.List[str]) -> 'MutableKeyedOptionDictType': - key = OptionKey('std', machine=self.for_machine, lang=self.language) + key = self.form_langopt_key('std') self.update_options( opts, - self.create_option(coredata.UserComboOption, - key.evolve('eh'), + self.create_option(options.UserComboOption, + self.form_langopt_key('eh'), 'C++ exception handling type.', ['none', 'default', 'a', 's', 'sc'], 'default'), - self.create_option(coredata.UserBooleanOption, - key.evolve('rtti'), + self.create_option(options.UserBooleanOption, + self.form_langopt_key('rtti'), 'Enable RTTI', True), - self.create_option(coredata.UserArrayOption, - key.evolve('winlibs'), + self.create_option(options.UserArrayOption, + self.form_langopt_key('winlibs'), 'Windows libs to link against.', msvc_winlibs), ) std_opt = opts[key] - assert isinstance(std_opt, coredata.UserStdOption), 'for mypy' + assert isinstance(std_opt, options.UserStdOption), 'for mypy' std_opt.set_versions(cpp_stds) return opts def get_option_compile_args(self, options: 'KeyedOptionDictType') -> T.List[str]: args: T.List[str] = [] - key = OptionKey('std', machine=self.for_machine, lang=self.language) + key = self.form_langopt_key('std') - eh = options[key.evolve('eh')] - if eh.value == 'default': + eh = options.get_value(self.form_langopt_key('eh')) + if eh == 'default': args.append('/EHsc') - elif eh.value == 'none': + elif eh == 'none': args.append('/EHs-c-') else: - args.append('/EH' + eh.value) + args.append('/EH' + eh) - if not options[key.evolve('rtti')].value: + if not options.get_value(self.form_langopt_key('rtti')): args.append('/GR-') - permissive, ver = self.VC_VERSION_MAP[options[key].value] + permissive, ver = self.VC_VERSION_MAP[options.get_value(key)] if ver is not None: args.append(f'/std:c++{ver}') @@ -795,8 +801,8 @@ def get_option_compile_args(self, options: 'KeyedOptionDictType') -> T.List[str] # which means setting the C++ standard version to C++14, in compilers that support it # (i.e., after VS2015U3) # if one is using anything before that point, one cannot set the standard. - key = OptionKey('std', machine=self.for_machine, lang=self.language) - if options[key].value in {'vc++11', 'c++11'}: + key = self.form_langopt_key('std') + if options.get_value(key) in {'vc++11', 'c++11'}: mlog.warning(self.id, 'does not support C++11;', 'attempting best effort; setting the standard to C++14', once=True, fatal=False) @@ -804,10 +810,10 @@ def get_option_compile_args(self, options: 'KeyedOptionDictType') -> T.List[str] # deepcopy since we're messing with members, and we can't simply # copy the members because the option proxy doesn't support it. options = copy.deepcopy(options) - if options[key].value == 'vc++11': - options[key].value = 'vc++14' + if options.get_value(key) == 'vc++11': + options.set_value(key, 'vc++14') else: - options[key].value = 'c++14' + options.set_value(key, 'c++14') return super().get_option_compile_args(options) @@ -842,11 +848,11 @@ def get_options(self) -> 'MutableKeyedOptionDictType': return self._get_options_impl(super().get_options(), cpp_stds) def get_option_compile_args(self, options: 'KeyedOptionDictType') -> T.List[str]: - key = OptionKey('std', machine=self.for_machine, lang=self.language) - if options[key].value != 'none' and version_compare(self.version, '<19.00.24210'): + key = self.form_langopt_key('std') + if options.get_value(key) != 'none' and version_compare(self.version, '<19.00.24210'): mlog.warning('This version of MSVC does not support cpp_std arguments', fatal=False) options = copy.copy(options) - options[key].value = 'none' + options.set_value(key, 'none') args = super().get_option_compile_args(options) @@ -911,18 +917,18 @@ def __init__(self, ccache: T.List[str], exelist: T.List[str], version: str, for_ def get_options(self) -> 'MutableKeyedOptionDictType': opts = CPPCompiler.get_options(self) - std_opt = opts[OptionKey('std', machine=self.for_machine, lang=self.language)] - assert isinstance(std_opt, coredata.UserStdOption), 'for mypy' + std_opt = self.form_langopt_key('std') + assert isinstance(std_opt, options.UserStdOption), 'for mypy' std_opt.set_versions(['c++03', 'c++11']) return opts def get_option_compile_args(self, options: 'KeyedOptionDictType') -> T.List[str]: args: T.List[str] = [] - key = OptionKey('std', machine=self.for_machine, lang=self.language) - std = options[key] - if std.value == 'c++11': + key = self.form_langopt_key('std') + std = options.get_value(key) + if std == 'c++11': args.append('--cpp11') - elif std.value == 'c++03': + elif std == 'c++03': args.append('--cpp') return args @@ -972,17 +978,18 @@ def __init__(self, ccache: T.List[str], exelist: T.List[str], version: str, for_ def get_options(self) -> 'MutableKeyedOptionDictType': opts = CPPCompiler.get_options(self) - std_opt = opts[OptionKey('std', machine=self.for_machine, lang=self.language)] - assert isinstance(std_opt, coredata.UserStdOption), 'for mypy' + key = self.form_langopt_key('std') + std_opt = opts[key] + assert isinstance(std_opt, options.UserStdOption), 'for mypy' std_opt.set_versions(['c++03']) return opts def get_option_compile_args(self, options: 'KeyedOptionDictType') -> T.List[str]: args: T.List[str] = [] - key = OptionKey('std', machine=self.for_machine, lang=self.language) - std = options[key] - if std.value != 'none': - args.append('--' + std.value) + key = self.form_langopt_key('std') + std = options.get_value(key) + if std != 'none': + args.append('--' + std) return args def get_always_args(self) -> T.List[str]: @@ -1014,16 +1021,17 @@ def get_instruction_set_args(self, instruction_set: str) -> T.Optional[T.List[st def get_options(self) -> 'MutableKeyedOptionDictType': opts = CPPCompiler.get_options(self) - key = OptionKey('std', machine=self.for_machine, lang=self.language) + key = self.form_langopt_key('std') opts[key].choices = ['none'] return opts def get_option_compile_args(self, options: 'KeyedOptionDictType') -> T.List[str]: args: T.List[str] = [] - std = options[OptionKey('std', machine=self.for_machine, lang=self.language)] - if std.value != 'none': + key = self.form_langopt_key('std') + std = options.get_value(key) + if std != 'none': args.append('-lang') - args.append(std.value) + args.append(std) return args class MetrowerksCPPCompilerEmbeddedPowerPC(MetrowerksCompiler, CPPCompiler): @@ -1042,13 +1050,14 @@ def get_instruction_set_args(self, instruction_set: str) -> T.Optional[T.List[st def get_options(self) -> 'MutableKeyedOptionDictType': opts = CPPCompiler.get_options(self) - key = OptionKey('std', machine=self.for_machine, lang=self.language) + key = self.form_langopt_key('std') opts[key].choices = ['none'] return opts def get_option_compile_args(self, options: 'KeyedOptionDictType') -> T.List[str]: args: T.List[str] = [] - std = options[OptionKey('std', machine=self.for_machine, lang=self.language)] - if std.value != 'none': - args.append('-lang ' + std.value) + key = self.form_langopt_key('std') + std = options.get_value(key) + if std != 'none': + args.append('-lang ' + std) return args diff --git a/mesonbuild/compilers/cuda.py b/mesonbuild/compilers/cuda.py index 3761019b9945..a6049a8439cd 100644 --- a/mesonbuild/compilers/cuda.py +++ b/mesonbuild/compilers/cuda.py @@ -9,10 +9,11 @@ import typing as T from .. import coredata +from .. import options from .. import mlog from ..mesonlib import ( EnvironmentException, Popen_safe, - is_windows, LibType, OptionKey, version_compare, + is_windows, LibType, version_compare, OptionKey ) from .compilers import Compiler @@ -548,10 +549,10 @@ def sanity_check(self, work_dir: str, env: 'Environment') -> None: # Use the -ccbin option, if available, even during sanity checking. # Otherwise, on systems where CUDA does not support the default compiler, # NVCC becomes unusable. - flags += self.get_ccbin_args(env.coredata.options) + flags += self.get_ccbin_args(env.coredata.optstore) # If cross-compiling, we can't run the sanity check, only compile it. - if env.need_exe_wrapper(self.for_machine) and not env.has_exe_wrapper(): + if self.is_cross and not env.has_exe_wrapper(): # Linking cross built apps is painful. You can't really # tell if you should use -nostdlib or not and for example # on OSX the compiler binary is the same but you need @@ -573,7 +574,7 @@ def sanity_check(self, work_dir: str, env: 'Environment') -> None: raise EnvironmentException(f'Compiler {self.name_string()} cannot compile programs.') # Run sanity check (if possible) - if env.need_exe_wrapper(self.for_machine): + if self.is_cross: if not env.has_exe_wrapper(): return else: @@ -643,27 +644,28 @@ def get_options(self) -> 'MutableKeyedOptionDictType': return self.update_options( super().get_options(), - self.create_option(coredata.UserComboOption, - OptionKey('std', machine=self.for_machine, lang=self.language), + self.create_option(options.UserComboOption, + self.form_langopt_key('std'), 'C++ language standard to use with CUDA', cpp_stds, 'none'), - self.create_option(coredata.UserStringOption, - OptionKey('ccbindir', machine=self.for_machine, lang=self.language), + self.create_option(options.UserStringOption, + self.form_langopt_key('ccbindir'), 'CUDA non-default toolchain directory to use (-ccbin)', ''), ) - def _to_host_compiler_options(self, options: 'KeyedOptionDictType') -> 'KeyedOptionDictType': + def _to_host_compiler_options(self, master_options: 'KeyedOptionDictType') -> 'KeyedOptionDictType': """ Convert an NVCC Option set to a host compiler's option set. """ # We must strip the -std option from the host compiler option set, as NVCC has # its own -std flag that may not agree with the host compiler's. - host_options = {key: options.get(key, opt) for key, opt in self.host_compiler.get_options().items()} + host_options = {key: master_options.get(key, opt) for key, opt in self.host_compiler.get_options().items()} std_key = OptionKey('std', machine=self.for_machine, lang=self.host_compiler.language) overrides = {std_key: 'none'} + # To shut up mypy. return coredata.OptionsView(host_options, overrides=overrides) def get_option_compile_args(self, options: 'KeyedOptionDictType') -> T.List[str]: @@ -672,10 +674,10 @@ def get_option_compile_args(self, options: 'KeyedOptionDictType') -> T.List[str] # the combination of CUDA version and MSVC version; the --std= is thus ignored # and attempting to use it will result in a warning: https://stackoverflow.com/a/51272091/741027 if not is_windows(): - key = OptionKey('std', machine=self.for_machine, lang=self.language) - std = options[key] - if std.value != 'none': - args.append('--std=' + std.value) + key = self.form_langopt_key('std') + std = options.get_value(key) + if std != 'none': + args.append('--std=' + std) return args + self._to_host_flags(self.host_compiler.get_option_compile_args(self._to_host_compiler_options(options))) @@ -768,7 +770,7 @@ def get_std_exe_link_args(self) -> T.List[str]: def find_library(self, libname: str, env: 'Environment', extra_dirs: T.List[str], libtype: LibType = LibType.PREFER_SHARED, lib_prefix_warning: bool = True) -> T.Optional[T.List[str]]: - return ['-l' + libname] # FIXME + return self.host_compiler.find_library(libname, env, extra_dirs, libtype, lib_prefix_warning) def get_crt_compile_args(self, crt_val: str, buildtype: str) -> T.List[str]: return self._to_host_flags(self.host_compiler.get_crt_compile_args(crt_val, buildtype)) @@ -791,9 +793,9 @@ def get_dependency_compile_args(self, dep: 'Dependency') -> T.List[str]: def get_dependency_link_args(self, dep: 'Dependency') -> T.List[str]: return self._to_host_flags(super().get_dependency_link_args(dep), _Phase.LINKER) - def get_ccbin_args(self, options: 'KeyedOptionDictType') -> T.List[str]: - key = OptionKey('ccbindir', machine=self.for_machine, lang=self.language) - ccbindir = options[key].value + def get_ccbin_args(self, ccoptions: 'KeyedOptionDictType') -> T.List[str]: + key = self.form_langopt_key('ccbindir') + ccbindir = ccoptions.get_value(key) if isinstance(ccbindir, str) and ccbindir != '': return [self._shield_nvcc_list_arg('-ccbin='+ccbindir, False)] else: diff --git a/mesonbuild/compilers/cython.py b/mesonbuild/compilers/cython.py index 30cec81e369f..7c1128692e40 100644 --- a/mesonbuild/compilers/cython.py +++ b/mesonbuild/compilers/cython.py @@ -6,8 +6,8 @@ import typing as T -from .. import coredata -from ..mesonlib import EnvironmentException, OptionKey, version_compare +from .. import options +from ..mesonlib import EnvironmentException, version_compare from .compilers import Compiler if T.TYPE_CHECKING: @@ -69,13 +69,13 @@ def compute_parameters_with_absolute_paths(self, parameter_list: T.List[str], def get_options(self) -> 'MutableKeyedOptionDictType': return self.update_options( super().get_options(), - self.create_option(coredata.UserComboOption, - OptionKey('version', machine=self.for_machine, lang=self.language), + self.create_option(options.UserComboOption, + self.form_langopt_key('version'), 'Python version to target', ['2', '3'], '3'), - self.create_option(coredata.UserComboOption, - OptionKey('language', machine=self.for_machine, lang=self.language), + self.create_option(options.UserComboOption, + self.form_langopt_key('language'), 'Output C or C++ files', ['c', 'cpp'], 'c'), @@ -83,9 +83,11 @@ def get_options(self) -> 'MutableKeyedOptionDictType': def get_option_compile_args(self, options: 'KeyedOptionDictType') -> T.List[str]: args: T.List[str] = [] - key = options[OptionKey('version', machine=self.for_machine, lang=self.language)] - args.append(f'-{key.value}') - lang = options[OptionKey('language', machine=self.for_machine, lang=self.language)] - if lang.value == 'cpp': + key = self.form_langopt_key('version') + version = options.get_value(key) + args.append(f'-{version}') + key = self.form_langopt_key('language') + lang = options.get_value(key) + if lang == 'cpp': args.append('--cplus') return args diff --git a/mesonbuild/compilers/d.py b/mesonbuild/compilers/d.py index de344c05781a..c478c040ba9c 100644 --- a/mesonbuild/compilers/d.py +++ b/mesonbuild/compilers/d.py @@ -443,11 +443,19 @@ def sanity_check(self, work_dir: str, environment: 'Environment') -> None: output_name = os.path.join(work_dir, 'dtest') with open(source_name, 'w', encoding='utf-8') as ofile: ofile.write('''void main() { }''') - pc = subprocess.Popen(self.exelist + self.get_output_args(output_name) + self._get_target_arch_args() + [source_name], cwd=work_dir) + + compile_cmdlist = self.exelist + self.get_output_args(output_name) + self._get_target_arch_args() + [source_name] + + # If cross-compiling, we can't run the sanity check, only compile it. + if self.is_cross and not environment.has_exe_wrapper(): + compile_cmdlist += self.get_compile_only_args() + + pc = subprocess.Popen(compile_cmdlist, cwd=work_dir) pc.wait() if pc.returncode != 0: raise EnvironmentException('D compiler %s cannot compile programs.' % self.name_string()) - if environment.need_exe_wrapper(self.for_machine): + + if self.is_cross: if not environment.has_exe_wrapper(): # Can't check if the binaries run so we have to assume they do return @@ -545,7 +553,9 @@ def _get_target_arch_args(self) -> T.List[str]: # LDC2 on Windows targets to current OS architecture, but # it should follow the target specified by the MSVC toolchain. if self.info.is_windows(): - if self.arch == 'x86_64': + if self.is_cross: + return [f'-mtriple={self.arch}-windows-msvc'] + elif self.arch == 'x86_64': return ['-m64'] return ['-m32'] return [] diff --git a/mesonbuild/compilers/detect.py b/mesonbuild/compilers/detect.py index 90a3ac597ebb..c8b67f479471 100644 --- a/mesonbuild/compilers/detect.py +++ b/mesonbuild/compilers/detect.py @@ -46,8 +46,8 @@ # There is currently no pgc++ for Windows, only for Mac and Linux. defaults['cpp'] = ['icl', 'cl', 'c++', 'g++', 'clang++', 'clang-cl'] defaults['fortran'] = ['ifort', 'gfortran', 'flang', 'pgfortran', 'g95'] - defaults['objc'] = ['clang-cl', 'gcc'] - defaults['objcpp'] = ['clang-cl', 'g++'] + defaults['objc'] = ['clang', 'clang-cl', 'gcc'] + defaults['objcpp'] = ['clang-cl', 'clang-cl', 'g++'] defaults['cs'] = ['csc', 'mcs'] else: if platform.machine().lower() == 'e2k': @@ -179,7 +179,7 @@ def detect_static_linker(env: 'Environment', compiler: Compiler) -> StaticLinker else: trials = default_linkers elif compiler.id == 'intel-cl' and compiler.language == 'c': # why not cpp? Is this a bug? - # Intel has it's own linker that acts like microsoft's lib + # Intel has its own linker that acts like microsoft's lib trials = [['xilib']] elif is_windows() and compiler.id == 'pgi': # this handles cpp / nvidia HPC, in addition to just c/fortran trials = [['ar']] # For PGI on Windows, "ar" is just a wrapper calling link/lib. @@ -340,7 +340,7 @@ def sanitize(p: T.Optional[str]) -> T.Optional[str]: guess_gcc_or_lcc = None if guess_gcc_or_lcc: - defines = _get_gnu_compiler_defines(compiler) + defines = _get_gnu_compiler_defines(compiler, lang) if not defines: popen_exceptions[join_args(compiler)] = 'no pre-processor defines' continue @@ -449,7 +449,7 @@ def sanitize(p: T.Optional[str]) -> T.Optional[str]: if 'clang' in out or 'Clang' in out: linker = None - defines = _get_clang_compiler_defines(compiler) + defines = _get_clang_compiler_defines(compiler, lang) # Even if the for_machine is darwin, we could be using vanilla # clang. @@ -676,7 +676,7 @@ def detect_fortran_compiler(env: 'Environment', for_machine: MachineChoice) -> C guess_gcc_or_lcc = 'lcc' if guess_gcc_or_lcc: - defines = _get_gnu_compiler_defines(compiler) + defines = _get_gnu_compiler_defines(compiler, 'fortran') if not defines: popen_exceptions[join_args(compiler)] = 'no pre-processor defines' continue @@ -843,7 +843,7 @@ def _detect_objc_or_objcpp_compiler(env: 'Environment', lang: str, for_machine: continue version = search_version(out) if 'Free Software Foundation' in out: - defines = _get_gnu_compiler_defines(compiler) + defines = _get_gnu_compiler_defines(compiler, lang) if not defines: popen_exceptions[join_args(compiler)] = 'no pre-processor defines' continue @@ -855,7 +855,7 @@ def _detect_objc_or_objcpp_compiler(env: 'Environment', lang: str, for_machine: defines, linker=linker) if 'clang' in out: linker = None - defines = _get_clang_compiler_defines(compiler) + defines = _get_clang_compiler_defines(compiler, lang) if not defines: popen_exceptions[join_args(compiler)] = 'no pre-processor defines' continue @@ -1143,11 +1143,15 @@ def detect_d_compiler(env: 'Environment', for_machine: MachineChoice) -> Compile try: if info.is_windows() or info.is_cygwin(): objfile = os.path.basename(f)[:-1] + 'obj' + extra_args = [f] + if is_cross: + extra_args.append(f'-mtriple={info.cpu}-windows') + linker = guess_win_linker(env, exelist, cls, full_version, for_machine, use_linker_prefix=True, invoked_directly=False, - extra_args=[f]) + extra_args=extra_args) else: # LDC writes an object file to the current working directory. # Clean it up. @@ -1161,7 +1165,8 @@ def detect_d_compiler(env: 'Environment', for_machine: MachineChoice) -> Compile return cls( exelist, version, for_machine, info, arch, - full_version=full_version, linker=linker, version_output=out) + full_version=full_version, linker=linker, + is_cross=is_cross, version_output=out) elif 'gdc' in out: cls = d.GnuDCompiler linker = guess_nix_linker(env, exelist, cls, version, for_machine) @@ -1324,19 +1329,43 @@ def detect_masm_compiler(env: 'Environment', for_machine: MachineChoice) -> Comp # GNU/Clang defines and version # ============================= -def _get_gnu_compiler_defines(compiler: T.List[str]) -> T.Dict[str, str]: +def _get_gnu_compiler_defines(compiler: T.List[str], lang: str) -> T.Dict[str, str]: """ - Detect GNU compiler platform type (Apple, MinGW, Unix) + Get the list of GCC pre-processor defines """ + from .mixins.gnu import gnu_lang_map + + def _try_obtain_compiler_defines(args: T.List[str]) -> str: + mlog.debug(f'Running command: {join_args(args)}') + p, output, error = Popen_safe(compiler + args, write='', stdin=subprocess.PIPE) + if p.returncode != 0: + raise EnvironmentException('Unable to get gcc pre-processor defines:\n' + f'Compiler stdout:\n{output}\n-----\n' + f'Compiler stderr:\n{error}\n-----\n') + return output + # Arguments to output compiler pre-processor defines to stdout # gcc, g++, and gfortran all support these arguments - args = compiler + ['-E', '-dM', '-'] - mlog.debug(f'Running command: {join_args(args)}') - p, output, error = Popen_safe(args, write='', stdin=subprocess.PIPE) - if p.returncode != 0: - raise EnvironmentException('Unable to detect GNU compiler type:\n' - f'Compiler stdout:\n{output}\n-----\n' - f'Compiler stderr:\n{error}\n-----\n') + baseline_test_args = ['-E', '-dM', '-'] + try: + # We assume that when _get_gnu_compiler_defines is called, it's + # close enough to a GCCish compiler so we reuse the _LANG_MAP + # from the GCC mixin. This isn't a dangerous assumption because + # we fallback if the detection fails anyway. + + # We might not have a match for Fortran, so fallback to detection + # based on the driver. + lang = gnu_lang_map[lang] + + # The compiler may not infer the target language based on the driver name + # so first, try with '-cpp -x lang', then fallback without given it's less + # portable. We try with '-cpp' as GCC needs it for Fortran at least, and + # it seems to do no harm. + output = _try_obtain_compiler_defines(['-cpp', '-x', lang] + baseline_test_args) + except (EnvironmentException, KeyError): + mlog.debug(f'pre-processor extraction using -cpp -x {lang} failed, falling back w/o lang') + output = _try_obtain_compiler_defines(baseline_test_args) + # Parse several lines of the type: # `#define ___SOME_DEF some_value` # and extract `___SOME_DEF` @@ -1353,17 +1382,42 @@ def _get_gnu_compiler_defines(compiler: T.List[str]) -> T.Dict[str, str]: defines[rest[0]] = rest[1] return defines -def _get_clang_compiler_defines(compiler: T.List[str]) -> T.Dict[str, str]: +def _get_clang_compiler_defines(compiler: T.List[str], lang: str) -> T.Dict[str, str]: """ Get the list of Clang pre-processor defines """ - args = compiler + ['-E', '-dM', '-'] - mlog.debug(f'Running command: {join_args(args)}') - p, output, error = Popen_safe(args, write='', stdin=subprocess.PIPE) - if p.returncode != 0: - raise EnvironmentException('Unable to get clang pre-processor defines:\n' - f'Compiler stdout:\n{output}\n-----\n' - f'Compiler stderr:\n{error}\n-----\n') + from .mixins.clang import clang_lang_map + + def _try_obtain_compiler_defines(args: T.List[str]) -> str: + mlog.debug(f'Running command: {join_args(args)}') + p, output, error = Popen_safe(compiler + args, write='', stdin=subprocess.PIPE) + if p.returncode != 0: + raise EnvironmentException('Unable to get clang pre-processor defines:\n' + f'Compiler stdout:\n{output}\n-----\n' + f'Compiler stderr:\n{error}\n-----\n') + return output + + # Arguments to output compiler pre-processor defines to stdout + baseline_test_args = ['-E', '-dM', '-'] + try: + # We assume that when _get_clang_compiler_defines is called, it's + # close enough to a Clangish compiler so we reuse the _LANG_MAP + # from the Clang mixin. This isn't a dangerous assumption because + # we fallback if the detection fails anyway. + + # We might not have a match for Fortran, so fallback to detection + # based on the driver. + lang = clang_lang_map[lang] + + # The compiler may not infer the target language based on the driver name + # so first, try with '-cpp -x lang', then fallback without given it's less + # portable. We try with '-cpp' as GCC needs it for Fortran at least, and + # it seems to do no harm. + output = _try_obtain_compiler_defines(['-cpp', '-x', lang] + baseline_test_args) + except (EnvironmentException, KeyError): + mlog.debug(f'pre-processor extraction using -cpp -x {lang} failed, falling back w/o lang') + output = _try_obtain_compiler_defines(baseline_test_args) + defines: T.Dict[str, str] = {} for line in output.split('\n'): if not line: diff --git a/mesonbuild/compilers/fortran.py b/mesonbuild/compilers/fortran.py index 428251560535..3e332381d53b 100644 --- a/mesonbuild/compilers/fortran.py +++ b/mesonbuild/compilers/fortran.py @@ -6,7 +6,7 @@ import typing as T import os -from .. import coredata +from .. import options from .compilers import ( clike_debug_args, Compiler, @@ -21,7 +21,7 @@ from mesonbuild.mesonlib import ( version_compare, MesonException, - LibType, OptionKey, + LibType, ) if T.TYPE_CHECKING: @@ -59,8 +59,8 @@ def _get_basic_compiler_args(self, env: 'Environment', mode: CompileCheckMode) - return cargs, largs def sanity_check(self, work_dir: str, environment: 'Environment') -> None: - source_name = 'sanitycheckf.f90' - code = 'program main; print *, "Fortran compilation is working."; end program\n' + source_name = 'sanitycheckf.f' + code = ' PROGRAM MAIN\n PRINT *, "Fortran compilation is working."\n END\n' return self._sanity_check_impl(work_dir, environment, source_name, code) def get_optimization_args(self, optimization_level: str) -> T.List[str]: @@ -114,8 +114,8 @@ def has_multi_link_arguments(self, args: T.List[str], env: 'Environment') -> T.T def get_options(self) -> 'MutableKeyedOptionDictType': return self.update_options( super().get_options(), - self.create_option(coredata.UserComboOption, - OptionKey('std', machine=self.for_machine, lang=self.language), + self.create_option(options.UserComboOption, + self.form_langopt_key('std'), 'Fortran language standard to use', ['none'], 'none'), @@ -147,16 +147,16 @@ def get_options(self) -> 'MutableKeyedOptionDictType': fortran_stds += ['f2008'] if version_compare(self.version, '>=8.0.0'): fortran_stds += ['f2018'] - key = OptionKey('std', machine=self.for_machine, lang=self.language) + key = self.form_langopt_key('std') opts[key].choices = ['none'] + fortran_stds return opts def get_option_compile_args(self, options: 'KeyedOptionDictType') -> T.List[str]: args: T.List[str] = [] - key = OptionKey('std', machine=self.for_machine, lang=self.language) - std = options[key] - if std.value != 'none': - args.append('-std=' + std.value) + key = self.form_langopt_key('std') + std = options.get_value(key) + if std != 'none': + args.append('-std=' + std) return args def get_dependency_gen_args(self, outtarget: str, outfile: str) -> T.List[str]: @@ -205,7 +205,7 @@ def __init__(self, exelist: T.List[str], version: str, for_machine: MachineChoic def get_options(self) -> 'MutableKeyedOptionDictType': opts = FortranCompiler.get_options(self) fortran_stds = ['f95', 'f2003', 'f2008', 'gnu', 'legacy', 'f2008ts'] - key = OptionKey('std', machine=self.for_machine, lang=self.language) + key = self.form_langopt_key('std') opts[key].choices = ['none'] + fortran_stds return opts @@ -256,7 +256,7 @@ def get_module_incdir_args(self) -> T.Tuple[str, ...]: def get_module_outdir_args(self, path: str) -> T.List[str]: return ['-moddir=' + path] - def openmp_flags(self) -> T.List[str]: + def openmp_flags(self, env: Environment) -> T.List[str]: return ['-xopenmp'] @@ -284,17 +284,17 @@ def __init__(self, exelist: T.List[str], version: str, for_machine: MachineChoic def get_options(self) -> 'MutableKeyedOptionDictType': opts = FortranCompiler.get_options(self) - key = OptionKey('std', machine=self.for_machine, lang=self.language) + key = self.form_langopt_key('std') opts[key].choices = ['none', 'legacy', 'f95', 'f2003', 'f2008', 'f2018'] return opts def get_option_compile_args(self, options: 'KeyedOptionDictType') -> T.List[str]: args: T.List[str] = [] - key = OptionKey('std', machine=self.for_machine, lang=self.language) - std = options[key] + key = self.form_langopt_key('std') + std = options.get_value(key) stds = {'legacy': 'none', 'f95': 'f95', 'f2003': 'f03', 'f2008': 'f08', 'f2018': 'f18'} - if std.value != 'none': - args.append('-stand=' + stds[std.value]) + if std != 'none': + args.append('-stand=' + stds[std]) return args def get_preprocess_only_args(self) -> T.List[str]: @@ -339,17 +339,17 @@ def __init__(self, exelist: T.List[str], version: str, for_machine: MachineChoic def get_options(self) -> 'MutableKeyedOptionDictType': opts = FortranCompiler.get_options(self) - key = OptionKey('std', machine=self.for_machine, lang=self.language) + key = self.form_langopt_key('std') opts[key].choices = ['none', 'legacy', 'f95', 'f2003', 'f2008', 'f2018'] return opts def get_option_compile_args(self, options: 'KeyedOptionDictType') -> T.List[str]: args: T.List[str] = [] - key = OptionKey('std', machine=self.for_machine, lang=self.language) - std = options[key] + key = self.form_langopt_key('std') + std = options.get_value(key) stds = {'legacy': 'none', 'f95': 'f95', 'f2003': 'f03', 'f2008': 'f08', 'f2018': 'f18'} - if std.value != 'none': - args.append('/stand:' + stds[std.value]) + if std != 'none': + args.append('/stand:' + stds[std]) return args def get_werror_args(self) -> T.List[str]: @@ -381,7 +381,7 @@ def __init__(self, exelist: T.List[str], version: str, for_machine: MachineChoic '3': default_warn_args, 'everything': default_warn_args} - def openmp_flags(self) -> T.List[str]: + def openmp_flags(self, env: Environment) -> T.List[str]: return ['-mp'] @@ -482,7 +482,7 @@ def __init__(self, exelist: T.List[str], version: str, for_machine: MachineChoic '3': default_warn_args, 'everything': default_warn_args} - def openmp_flags(self) -> T.List[str]: + def openmp_flags(self, env: Environment) -> T.List[str]: return ['-mp'] @@ -525,5 +525,5 @@ def get_preprocess_only_args(self) -> T.List[str]: def get_std_exe_link_args(self) -> T.List[str]: return self.get_always_args() - def openmp_flags(self) -> T.List[str]: + def openmp_flags(self, env: Environment) -> T.List[str]: return ['-openmp'] diff --git a/mesonbuild/compilers/mixins/apple.py b/mesonbuild/compilers/mixins/apple.py new file mode 100644 index 000000000000..98c4bfa1a18b --- /dev/null +++ b/mesonbuild/compilers/mixins/apple.py @@ -0,0 +1,57 @@ +# SPDX-License-Identifier: Apache-2.0 +# Copyright © 2024 Intel Corporation + +"""Provides mixins for Apple compilers.""" + +from __future__ import annotations +import typing as T + +from ...mesonlib import MesonException + +if T.TYPE_CHECKING: + from ..._typing import ImmutableListProtocol + from ...environment import Environment + from ..compilers import Compiler +else: + # This is a bit clever, for mypy we pretend that these mixins descend from + # Compiler, so we get all of the methods and attributes defined for us, but + # for runtime we make them descend from object (which all classes normally + # do). This gives up DRYer type checking, with no runtime impact + Compiler = object + + +class AppleCompilerMixin(Compiler): + + """Handle differences between Vanilla Clang and the Clang shipped with XCode.""" + + __BASE_OMP_FLAGS: ImmutableListProtocol[str] = ['-Xpreprocessor', '-fopenmp'] + + def openmp_flags(self, env: Environment) -> T.List[str]: + """Flags required to compile with OpenMP on Apple. + + The Apple Clang Compiler doesn't have builtin support for OpenMP, it + must be provided separately. As such, we need to add the -Xpreprocessor + argument so that an external OpenMP can be found. + + :return: A list of arguments + """ + m = env.machines[self.for_machine] + assert m is not None, 'for mypy' + if m.cpu_family.startswith('x86'): + root = '/usr/local' + else: + root = '/opt/homebrew' + return self.__BASE_OMP_FLAGS + [f'-I{root}/opt/libomp/include'] + + def openmp_link_flags(self, env: Environment) -> T.List[str]: + m = env.machines[self.for_machine] + assert m is not None, 'for mypy' + if m.cpu_family.startswith('x86'): + root = '/usr/local' + else: + root = '/opt/homebrew' + + link = self.find_library('omp', env, [f'{root}/opt/libomp/lib']) + if not link: + raise MesonException("Couldn't find libomp") + return self.__BASE_OMP_FLAGS + link diff --git a/mesonbuild/compilers/mixins/clang.py b/mesonbuild/compilers/mixins/clang.py index f982509f436b..d99dc3abf9f9 100644 --- a/mesonbuild/compilers/mixins/clang.py +++ b/mesonbuild/compilers/mixins/clang.py @@ -36,6 +36,13 @@ 's': ['-Oz'], } +clang_lang_map = { + 'c': 'c', + 'cpp': 'c++', + 'objc': 'objective-c', + 'objcpp': 'objective-c++', +} + class ClangCompiler(GnuLikeCompiler): id = 'clang' @@ -51,6 +58,8 @@ def __init__(self, defines: T.Optional[T.Dict[str, str]]): # linkers don't have base_options. if isinstance(self.linker, AppleDynamicLinker): self.base_options.add(OptionKey('b_bitcode')) + elif isinstance(self.linker, MSVCDynamicLinker): + self.base_options.add(OptionKey('b_vscrt')) # All Clang backends can also do LLVM IR self.can_compile_suffixes.add('ll') @@ -77,9 +86,23 @@ def get_pch_use_args(self, pch_dir: str, header: str) -> T.List[str]: def get_compiler_check_args(self, mode: CompileCheckMode) -> T.List[str]: # Clang is different than GCC, it will return True when a symbol isn't - # defined in a header. Specifically this seems to have something to do - # with functions that may be in a header on some systems, but not all of - # them. `strlcat` specifically with can trigger this. + # defined in a header. Specifically this is caused by a functionality + # both GCC and clang have: for some "well known" functions, arbitrarily + # chosen, they provide fixit suggestions for the header you should try + # including. + # + # - With GCC, this is a note appended to the prexisting diagnostic + # "error: undeclared identifier" + # + # - With clang, the error is converted to a c89'ish implicit function + # declaration instead, which can be disabled with -Wno-error and on + # clang < 16, simply passes compilation by default. + # + # One example of a clang fixit suggestion is for `strlcat`, which + # triggers this. + # + # This was reported in 2017 and promptly fixed. Just kidding! + # https://github.com/llvm/llvm-project/issues/33905 myargs: T.List[str] = ['-Werror=implicit-function-declaration'] if mode is CompileCheckMode.COMPILE: myargs.extend(['-Werror=unknown-warning-option', '-Werror=unused-command-line-argument']) @@ -102,7 +125,7 @@ def has_function(self, funcname: str, prefix: str, env: 'Environment', *, return super().has_function(funcname, prefix, env, extra_args=extra_args, dependencies=dependencies) - def openmp_flags(self) -> T.List[str]: + def openmp_flags(self, env: Environment) -> T.List[str]: if mesonlib.version_compare(self.version, '>=3.8.0'): return ['-fopenmp'] elif mesonlib.version_compare(self.version, '>=3.7.0'): diff --git a/mesonbuild/compilers/mixins/clike.py b/mesonbuild/compilers/mixins/clike.py index d273015bcae7..174104b079ee 100644 --- a/mesonbuild/compilers/mixins/clike.py +++ b/mesonbuild/compilers/mixins/clike.py @@ -26,7 +26,7 @@ from ... import mesonlib from ... import mlog from ...linkers.linkers import GnuLikeDynamicLinkerMixin, SolarisDynamicLinker, CompCertDynamicLinker -from ...mesonlib import LibType, OptionKey +from ...mesonlib import LibType from .. import compilers from ..compilers import CompileCheckMode from .visualstudio import VisualStudioLikeCompiler @@ -98,12 +98,12 @@ def to_native(self, copy: bool = False) -> T.List[str]: continue # Remove the -isystem and the path if the path is a default path - if (each == '-isystem' and - i < (len(new) - 1) and - self._cached_realpath(new[i + 1]) in real_default_dirs): - bad_idx_list += [i, i + 1] - elif each.startswith('-isystem=') and self._cached_realpath(each[9:]) in real_default_dirs: - bad_idx_list += [i] + if each == '-isystem': + if i < (len(new) - 1) and self._cached_realpath(new[i + 1]) in real_default_dirs: + bad_idx_list += [i, i + 1] + elif each.startswith('-isystem='): + if self._cached_realpath(each[9:]) in real_default_dirs: + bad_idx_list += [i] elif self._cached_realpath(each[8:]) in real_default_dirs: bad_idx_list += [i] for i in reversed(bad_idx_list): @@ -278,7 +278,7 @@ def _sanity_check_impl(self, work_dir: str, environment: 'Environment', mode = CompileCheckMode.LINK if self.is_cross: binname += '_cross' - if environment.need_exe_wrapper(self.for_machine) and not environment.has_exe_wrapper(): + if not environment.has_exe_wrapper(): # Linking cross built C/C++ apps is painful. You can't really # tell if you should use -nostdlib or not and for example # on OSX the compiler binary is the same but you need @@ -308,7 +308,7 @@ def _sanity_check_impl(self, work_dir: str, environment: 'Environment', if pc.returncode != 0: raise mesonlib.EnvironmentException(f'Compiler {self.name_string()} cannot compile programs.') # Run sanity check - if environment.need_exe_wrapper(self.for_machine): + if self.is_cross: if not environment.has_exe_wrapper(): # Can't check if the binaries run so we have to assume they do return @@ -376,8 +376,8 @@ def _get_basic_compiler_args(self, env: 'Environment', mode: CompileCheckMode) - # linking with static libraries since MSVC won't select a CRT for # us in that case and will error out asking us to pick one. try: - crt_val = env.coredata.options[OptionKey('b_vscrt')].value - buildtype = env.coredata.options[OptionKey('buildtype')].value + crt_val = env.coredata.optstore.get_value('b_vscrt') + buildtype = env.coredata.optstore.get_value('buildtype') cargs += self.get_crt_compile_args(crt_val, buildtype) except (KeyError, AttributeError): pass @@ -417,7 +417,7 @@ def build_wrapper_args(self, env: 'Environment', else: # TODO: we want to do this in the caller extra_args = mesonlib.listify(extra_args) - extra_args = mesonlib.listify([e(mode.value) if callable(e) else e for e in extra_args]) + extra_args = mesonlib.listify([e(mode) if callable(e) else e for e in extra_args]) if dependencies is None: dependencies = [] @@ -870,11 +870,12 @@ def has_members(self, typename: str, membernames: T.List[str], if extra_args is None: extra_args = [] # Create code that accesses all members - members = ''.join(f'foo.{member};\n' for member in membernames) + members = ''.join(f'(void) ( foo.{member} );\n' for member in membernames) t = f'''{prefix} void bar(void) {{ {typename} foo; {members} + (void) foo; }}''' return self.compiles(t, env, extra_args=extra_args, dependencies=dependencies) @@ -884,7 +885,7 @@ def has_type(self, typename: str, prefix: str, env: 'Environment', dependencies: T.Optional[T.List['Dependency']] = None) -> T.Tuple[bool, bool]: t = f'''{prefix} void bar(void) {{ - sizeof({typename}); + (void) sizeof({typename}); }}''' return self.compiles(t, env, extra_args=extra_args, dependencies=dependencies) diff --git a/mesonbuild/compilers/mixins/elbrus.py b/mesonbuild/compilers/mixins/elbrus.py index 27cba803c9fc..71cf722c8192 100644 --- a/mesonbuild/compilers/mixins/elbrus.py +++ b/mesonbuild/compilers/mixins/elbrus.py @@ -1,5 +1,5 @@ # SPDX-License-Identifier: Apache-2.0 -# Copyright © 2023 Intel Corporation +# Copyright © 2023-2024 Intel Corporation from __future__ import annotations @@ -84,10 +84,10 @@ def get_pch_suffix(self) -> str: def get_option_compile_args(self, options: 'KeyedOptionDictType') -> T.List[str]: args: T.List[str] = [] - std = options[OptionKey('std', lang=self.language, machine=self.for_machine)] - if std.value != 'none': - args.append('-std=' + std.value) + std = options.get_value(OptionKey('std', lang=self.language, machine=self.for_machine)) + if std != 'none': + args.append('-std=' + std) return args - def openmp_flags(self) -> T.List[str]: + def openmp_flags(self, env: Environment) -> T.List[str]: return ['-fopenmp'] diff --git a/mesonbuild/compilers/mixins/emscripten.py b/mesonbuild/compilers/mixins/emscripten.py index bb8a520546d4..6b7f087ba6e1 100644 --- a/mesonbuild/compilers/mixins/emscripten.py +++ b/mesonbuild/compilers/mixins/emscripten.py @@ -9,6 +9,7 @@ import typing as T from ... import coredata +from ... import options from ... import mesonlib from ...mesonlib import OptionKey from ...mesonlib import LibType @@ -50,7 +51,7 @@ def _get_compile_output(self, dirname: str, mode: CompileCheckMode) -> str: def thread_link_flags(self, env: 'Environment') -> T.List[str]: args = ['-pthread'] - count: int = env.coredata.options[OptionKey('thread_count', lang=self.language, machine=self.for_machine)].value + count: int = env.coredata.optstore.get_value(OptionKey('thread_count', lang=self.language, machine=self.for_machine)) if count: args.append(f'-sPTHREAD_POOL_SIZE={count}') return args @@ -59,7 +60,7 @@ def get_options(self) -> coredata.MutableKeyedOptionDictType: return self.update_options( super().get_options(), self.create_option( - coredata.UserIntegerOption, + options.UserIntegerOption, OptionKey('thread_count', machine=self.for_machine, lang=self.language), 'Number of threads to use in web assembly, set to 0 to disable', (0, None, 4), # Default was picked at random diff --git a/mesonbuild/compilers/mixins/gnu.py b/mesonbuild/compilers/mixins/gnu.py index 79f271607434..4a9eb8848489 100644 --- a/mesonbuild/compilers/mixins/gnu.py +++ b/mesonbuild/compilers/mixins/gnu.py @@ -309,7 +309,7 @@ ], } -_LANG_MAP = { +gnu_lang_map = { 'c': 'c', 'cpp': 'c++', 'objc': 'objective-c', @@ -318,9 +318,9 @@ @functools.lru_cache(maxsize=None) def gnulike_default_include_dirs(compiler: T.Tuple[str, ...], lang: str) -> 'ImmutableListProtocol[str]': - if lang not in _LANG_MAP: + if lang not in gnu_lang_map: return [] - lang = _LANG_MAP[lang] + lang = gnu_lang_map[lang] env = os.environ.copy() env["LC_ALL"] = 'C' cmd = list(compiler) + [f'-x{lang}', '-E', '-v', '-'] @@ -402,7 +402,7 @@ def get_default_include_dirs(self) -> T.List[str]: return gnulike_default_include_dirs(tuple(self.get_exelist(ccache=False)), self.language).copy() @abc.abstractmethod - def openmp_flags(self) -> T.List[str]: + def openmp_flags(self, env: Environment) -> T.List[str]: pass def gnu_symbol_visibility_args(self, vistype: str) -> T.List[str]: @@ -534,7 +534,7 @@ def get_preprocess_to_file_args(self) -> T.List[str]: # We want to allow preprocessing files with any extension, such as # foo.c.in. In that case we need to tell GCC/CLANG to treat them as # assembly file. - lang = _LANG_MAP.get(self.language, 'assembler-with-cpp') + lang = gnu_lang_map.get(self.language, 'assembler-with-cpp') return self.get_preprocess_only_args() + [f'-x{lang}'] @@ -585,7 +585,7 @@ def get_optimization_args(self, optimization_level: str) -> T.List[str]: def get_pch_suffix(self) -> str: return 'gch' - def openmp_flags(self) -> T.List[str]: + def openmp_flags(self, env: Environment) -> T.List[str]: return ['-fopenmp'] def has_arguments(self, args: T.List[str], env: 'Environment', code: str, diff --git a/mesonbuild/compilers/mixins/intel.py b/mesonbuild/compilers/mixins/intel.py index d38a42ebbf52..902cc748145f 100644 --- a/mesonbuild/compilers/mixins/intel.py +++ b/mesonbuild/compilers/mixins/intel.py @@ -19,6 +19,9 @@ from .gnu import GnuLikeCompiler from .visualstudio import VisualStudioLikeCompiler +if T.TYPE_CHECKING: + from ...environment import Environment + # XXX: avoid circular dependencies # TODO: this belongs in a posix compiler class # NOTE: the default Intel optimization is -O2, unlike GNU which defaults to -O0. @@ -78,7 +81,7 @@ def get_pch_use_args(self, pch_dir: str, header: str) -> T.List[str]: def get_pch_name(self, name: str) -> str: return os.path.basename(name) + '.' + self.get_pch_suffix() - def openmp_flags(self) -> T.List[str]: + def openmp_flags(self, env: Environment) -> T.List[str]: if mesonlib.version_compare(self.version, '>=15.0.0'): return ['-qopenmp'] else: @@ -154,7 +157,7 @@ def get_toolset_version(self) -> T.Optional[str]: version = int(v1 + v2) return self._calculate_toolset_version(version) - def openmp_flags(self) -> T.List[str]: + def openmp_flags(self, env: Environment) -> T.List[str]: return ['/Qopenmp'] def get_debug_args(self, is_debug: bool) -> T.List[str]: diff --git a/mesonbuild/compilers/mixins/pgi.py b/mesonbuild/compilers/mixins/pgi.py index 0d8245a21540..71ad81f38a01 100644 --- a/mesonbuild/compilers/mixins/pgi.py +++ b/mesonbuild/compilers/mixins/pgi.py @@ -51,7 +51,7 @@ def get_pic_args(self) -> T.List[str]: return ['-fPIC'] return [] - def openmp_flags(self) -> T.List[str]: + def openmp_flags(self, env: Environment) -> T.List[str]: return ['-mp'] def get_optimization_args(self, optimization_level: str) -> T.List[str]: diff --git a/mesonbuild/compilers/mixins/visualstudio.py b/mesonbuild/compilers/mixins/visualstudio.py index 4e2ce099fef4..bdf293bd010f 100644 --- a/mesonbuild/compilers/mixins/visualstudio.py +++ b/mesonbuild/compilers/mixins/visualstudio.py @@ -204,10 +204,10 @@ def gen_pch_args(self, header: str, source: str, pchname: str) -> T.Tuple[str, T objname = os.path.splitext(source)[0] + '.obj' return objname, ['/Yc' + header, '/Fp' + pchname, '/Fo' + objname] - def openmp_flags(self) -> T.List[str]: + def openmp_flags(self, env: Environment) -> T.List[str]: return ['/openmp'] - def openmp_link_flags(self) -> T.List[str]: + def openmp_link_flags(self, env: Environment) -> T.List[str]: return [] # FIXME, no idea what these should be. @@ -381,6 +381,8 @@ def symbols_have_underscore_prefix(self, env: 'Environment') -> bool: # As a last resort, try search in a compiled binary return self._symbols_have_underscore_prefix_searchbin(env) + def get_pie_args(self) -> T.List[str]: + return [] class MSVCCompiler(VisualStudioLikeCompiler): @@ -483,3 +485,10 @@ def get_dependency_compile_args(self, dep: 'Dependency') -> T.List[str]: return converted else: return dep.get_compile_args() + + def openmp_link_flags(self, env: Environment) -> T.List[str]: + # see https://github.com/mesonbuild/meson/issues/5298 + libs = self.find_library('libomp', env, []) + if libs is None: + raise mesonlib.MesonBugException('Could not find libomp') + return super().openmp_link_flags(env) + libs diff --git a/mesonbuild/compilers/objc.py b/mesonbuild/compilers/objc.py index 7c19c1b7d591..c63f288e314a 100644 --- a/mesonbuild/compilers/objc.py +++ b/mesonbuild/compilers/objc.py @@ -6,6 +6,7 @@ import typing as T from .. import coredata +from .. import options from ..mesonlib import OptionKey from .compilers import Compiler @@ -80,7 +81,7 @@ def __init__(self, ccache: T.List[str], exelist: T.List[str], version: str, for_ def get_options(self) -> 'coredata.MutableKeyedOptionDictType': return self.update_options( super().get_options(), - self.create_option(coredata.UserComboOption, + self.create_option(options.UserComboOption, OptionKey('std', machine=self.for_machine, lang='c'), 'C language standard to use', ['none', 'c89', 'c99', 'c11', 'c17', 'gnu89', 'gnu99', 'gnu11', 'gnu17'], @@ -89,9 +90,9 @@ def get_options(self) -> 'coredata.MutableKeyedOptionDictType': def get_option_compile_args(self, options: 'coredata.KeyedOptionDictType') -> T.List[str]: args = [] - std = options[OptionKey('std', machine=self.for_machine, lang='c')] - if std.value != 'none': - args.append('-std=' + std.value) + std = options.get_value(OptionKey('std', machine=self.for_machine, lang='c')) + if std != 'none': + args.append('-std=' + std) return args class AppleClangObjCCompiler(ClangObjCCompiler): diff --git a/mesonbuild/compilers/objcpp.py b/mesonbuild/compilers/objcpp.py index 46eaa504904d..e24406c32cc5 100644 --- a/mesonbuild/compilers/objcpp.py +++ b/mesonbuild/compilers/objcpp.py @@ -6,6 +6,7 @@ import typing as T from .. import coredata +from .. import options from ..mesonlib import OptionKey from .mixins.clike import CLikeCompiler @@ -80,7 +81,7 @@ def __init__(self, ccache: T.List[str], exelist: T.List[str], version: str, for_ def get_options(self) -> coredata.MutableKeyedOptionDictType: return self.update_options( super().get_options(), - self.create_option(coredata.UserComboOption, + self.create_option(options.UserComboOption, OptionKey('std', machine=self.for_machine, lang='cpp'), 'C++ language standard to use', ['none', 'c++98', 'c++11', 'c++14', 'c++17', 'c++20', 'c++2b', @@ -91,9 +92,9 @@ def get_options(self) -> coredata.MutableKeyedOptionDictType: def get_option_compile_args(self, options: 'coredata.KeyedOptionDictType') -> T.List[str]: args = [] - std = options[OptionKey('std', machine=self.for_machine, lang='cpp')] - if std.value != 'none': - args.append('-std=' + std.value) + std = options.get_value(OptionKey('std', machine=self.for_machine, lang='cpp')) + if std != 'none': + args.append('-std=' + std) return args diff --git a/mesonbuild/compilers/rust.py b/mesonbuild/compilers/rust.py index ce1079190d98..0ac07a8be763 100644 --- a/mesonbuild/compilers/rust.py +++ b/mesonbuild/compilers/rust.py @@ -9,7 +9,7 @@ import re import typing as T -from .. import coredata +from .. import options from ..mesonlib import EnvironmentException, MesonException, Popen_safe_logged, OptionKey from .compilers import Compiler, clike_debug_args @@ -86,7 +86,7 @@ def sanity_check(self, work_dir: str, environment: 'Environment') -> None: if pc.returncode != 0: raise EnvironmentException(f'Rust compiler {self.name_string()} cannot compile programs.') self._native_static_libs(work_dir, source_name) - if environment.need_exe_wrapper(self.for_machine): + if self.is_cross: if not environment.has_exe_wrapper(): # Can't check if the binaries run so we have to assume they do return @@ -158,8 +158,8 @@ def use_linker_args(cls, linker: str, version: str) -> T.List[str]: # use_linker_args method instead. def get_options(self) -> MutableKeyedOptionDictType: - return dict((self.create_option(coredata.UserComboOption, - OptionKey('std', machine=self.for_machine, lang=self.language), + return dict((self.create_option(options.UserComboOption, + self.form_langopt_key('std'), 'Rust edition to use', ['none', '2015', '2018', '2021'], 'none'),)) @@ -172,10 +172,10 @@ def get_dependency_compile_args(self, dep: 'Dependency') -> T.List[str]: def get_option_compile_args(self, options: 'KeyedOptionDictType') -> T.List[str]: args = [] - key = OptionKey('std', machine=self.for_machine, lang=self.language) - std = options[key] - if std.value != 'none': - args.append('--edition=' + std.value) + key = self.form_langopt_key('std') + std = options.get_value(key) + if std != 'none': + args.append('--edition=' + std) return args def get_crt_compile_args(self, crt_val: str, buildtype: str) -> T.List[str]: diff --git a/mesonbuild/coredata.py b/mesonbuild/coredata.py index 7213115603a2..8c27c3ae1fc9 100644 --- a/mesonbuild/coredata.py +++ b/mesonbuild/coredata.py @@ -6,33 +6,33 @@ import copy -from . import mlog, mparser +from . import mlog, options import pickle, os, uuid import sys from itertools import chain from pathlib import PurePath from collections import OrderedDict, abc -from dataclasses import dataclass +import dataclasses from .mesonlib import ( - HoldableObject, MesonBugException, - MesonException, EnvironmentException, MachineChoice, PerMachine, - PerMachineDefaultable, default_libdir, default_libexecdir, - default_prefix, default_datadir, default_includedir, default_infodir, - default_localedir, default_mandir, default_sbindir, default_sysconfdir, - listify_array_value, OptionKey, OptionType, stringlistify, + MesonBugException, + MesonException, MachineChoice, PerMachine, + PerMachineDefaultable, + OptionKey, OptionType, stringlistify, pickle_load ) -from .wrap import WrapMode + +from .machinefile import CmdLineFileParser + import ast import argparse -import configparser import enum import shlex import typing as T if T.TYPE_CHECKING: from typing_extensions import Protocol + from typing import Any from . import dependencies from .compilers.compilers import Compiler, CompileResult, RunResult, CompileCheckMode @@ -41,6 +41,7 @@ from .mesonlib import FileOrString from .cmake.traceparser import CMakeCacheEntry from .interpreterbase import SubProject + from .options import UserOption class SharedCMDOptions(Protocol): @@ -57,9 +58,9 @@ class SharedCMDOptions(Protocol): cross_file: T.List[str] native_file: T.List[str] - OptionDictType = T.Union[T.Dict[str, 'UserOption[T.Any]'], 'OptionsView'] - MutableKeyedOptionDictType = T.Dict['OptionKey', 'UserOption[T.Any]'] - KeyedOptionDictType = T.Union[MutableKeyedOptionDictType, 'OptionsView'] + OptionDictType = T.Union[T.Dict[str, 'options.UserOption[T.Any]'], 'OptionsView'] + MutableKeyedOptionDictType = T.Dict['OptionKey', 'options.UserOption[T.Any]'] + KeyedOptionDictType = T.Union['options.OptionStore', 'OptionsView'] CompilerCheckCacheKey = T.Tuple[T.Tuple[str, ...], str, FileOrString, T.Tuple[str, ...], CompileCheckMode] # code, args RunCheckCacheKey = T.Tuple[str, T.Tuple[str, ...]] @@ -71,7 +72,7 @@ class SharedCMDOptions(Protocol): # # Pip requires that RCs are named like this: '0.1.0.rc1' # But the corresponding Git tag needs to be '0.1.0rc1' -version = '1.4.99' +version = '1.5.2' # The next stable version when we are in dev. This is used to allow projects to # require meson version >=1.2.0 when using 1.1.99. FeatureNew won't warn when @@ -83,20 +84,11 @@ class SharedCMDOptions(Protocol): stable_version_array[-2] = str(int(stable_version_array[-2]) + 1) stable_version = '.'.join(stable_version_array) -backendlist = ['ninja', 'vs', 'vs2010', 'vs2012', 'vs2013', 'vs2015', 'vs2017', 'vs2019', 'vs2022', 'xcode', 'none'] -genvslitelist = ['vs2022'] -buildtypelist = ['plain', 'debug', 'debugoptimized', 'release', 'minsize', 'custom'] - -DEFAULT_YIELDING = False - -# Can't bind this near the class method it seems, sadly. -_T = T.TypeVar('_T') - def get_genvs_default_buildtype_list() -> list[str]: # just debug, debugoptimized, and release for now # but this should probably be configurable through some extra option, alongside --genvslite. - return buildtypelist[1:-2] + return options.buildtypelist[1:-2] class MesonVersionMismatchException(MesonException): @@ -109,312 +101,6 @@ def __init__(self, old_version: str, current_version: str, extra_msg: str = '') self.current_version = current_version -class UserOption(T.Generic[_T], HoldableObject): - def __init__(self, name: str, description: str, choices: T.Optional[T.Union[str, T.List[_T]]], - yielding: bool, - deprecated: T.Union[bool, str, T.Dict[str, str], T.List[str]] = False): - super().__init__() - self.name = name - self.choices = choices - self.description = description - if not isinstance(yielding, bool): - raise MesonException('Value of "yielding" must be a boolean.') - self.yielding = yielding - self.deprecated = deprecated - self.readonly = False - - def listify(self, value: T.Any) -> T.List[T.Any]: - return [value] - - def printable_value(self) -> T.Union[str, int, bool, T.List[T.Union[str, int, bool]]]: - assert isinstance(self.value, (str, int, bool, list)) - return self.value - - # Check that the input is a valid value and return the - # "cleaned" or "native" version. For example the Boolean - # option could take the string "true" and return True. - def validate_value(self, value: T.Any) -> _T: - raise RuntimeError('Derived option class did not override validate_value.') - - def set_value(self, newvalue: T.Any) -> bool: - oldvalue = getattr(self, 'value', None) - self.value = self.validate_value(newvalue) - return self.value != oldvalue - -class UserStringOption(UserOption[str]): - def __init__(self, name: str, description: str, value: T.Any, yielding: bool = DEFAULT_YIELDING, - deprecated: T.Union[bool, str, T.Dict[str, str], T.List[str]] = False): - super().__init__(name, description, None, yielding, deprecated) - self.set_value(value) - - def validate_value(self, value: T.Any) -> str: - if not isinstance(value, str): - raise MesonException(f'The value of option "{self.name}" is "{value}", which is not a string.') - return value - -class UserBooleanOption(UserOption[bool]): - def __init__(self, name: str, description: str, value: bool, yielding: bool = DEFAULT_YIELDING, - deprecated: T.Union[bool, str, T.Dict[str, str], T.List[str]] = False): - super().__init__(name, description, [True, False], yielding, deprecated) - self.set_value(value) - - def __bool__(self) -> bool: - return self.value - - def validate_value(self, value: T.Any) -> bool: - if isinstance(value, bool): - return value - if not isinstance(value, str): - raise MesonException(f'Option "{self.name}" value {value} cannot be converted to a boolean') - if value.lower() == 'true': - return True - if value.lower() == 'false': - return False - raise MesonException(f'Option "{self.name}" value {value} is not boolean (true or false).') - -class UserIntegerOption(UserOption[int]): - def __init__(self, name: str, description: str, value: T.Any, yielding: bool = DEFAULT_YIELDING, - deprecated: T.Union[bool, str, T.Dict[str, str], T.List[str]] = False): - min_value, max_value, default_value = value - self.min_value = min_value - self.max_value = max_value - c: T.List[str] = [] - if min_value is not None: - c.append('>=' + str(min_value)) - if max_value is not None: - c.append('<=' + str(max_value)) - choices = ', '.join(c) - super().__init__(name, description, choices, yielding, deprecated) - self.set_value(default_value) - - def validate_value(self, value: T.Any) -> int: - if isinstance(value, str): - value = self.toint(value) - if not isinstance(value, int): - raise MesonException(f'Value {value!r} for option "{self.name}" is not an integer.') - if self.min_value is not None and value < self.min_value: - raise MesonException(f'Value {value} for option "{self.name}" is less than minimum value {self.min_value}.') - if self.max_value is not None and value > self.max_value: - raise MesonException(f'Value {value} for option "{self.name}" is more than maximum value {self.max_value}.') - return value - - def toint(self, valuestring: str) -> int: - try: - return int(valuestring) - except ValueError: - raise MesonException(f'Value string "{valuestring}" for option "{self.name}" is not convertible to an integer.') - -class OctalInt(int): - # NinjaBackend.get_user_option_args uses str() to converts it to a command line option - # UserUmaskOption.toint() uses int(str, 8) to convert it to an integer - # So we need to use oct instead of dec here if we do not want values to be misinterpreted. - def __str__(self) -> str: - return oct(int(self)) - -class UserUmaskOption(UserIntegerOption, UserOption[T.Union[str, OctalInt]]): - def __init__(self, name: str, description: str, value: T.Any, yielding: bool = DEFAULT_YIELDING, - deprecated: T.Union[bool, str, T.Dict[str, str], T.List[str]] = False): - super().__init__(name, description, (0, 0o777, value), yielding, deprecated) - self.choices = ['preserve', '0000-0777'] - - def printable_value(self) -> str: - if self.value == 'preserve': - return self.value - return format(self.value, '04o') - - def validate_value(self, value: T.Any) -> T.Union[str, OctalInt]: - if value == 'preserve': - return 'preserve' - return OctalInt(super().validate_value(value)) - - def toint(self, valuestring: T.Union[str, OctalInt]) -> int: - try: - return int(valuestring, 8) - except ValueError as e: - raise MesonException(f'Invalid mode for option "{self.name}" {e}') - -class UserComboOption(UserOption[str]): - def __init__(self, name: str, description: str, choices: T.List[str], value: T.Any, - yielding: bool = DEFAULT_YIELDING, - deprecated: T.Union[bool, str, T.Dict[str, str], T.List[str]] = False): - super().__init__(name, description, choices, yielding, deprecated) - if not isinstance(self.choices, list): - raise MesonException(f'Combo choices for option "{self.name}" must be an array.') - for i in self.choices: - if not isinstance(i, str): - raise MesonException(f'Combo choice elements for option "{self.name}" must be strings.') - self.set_value(value) - - def validate_value(self, value: T.Any) -> str: - if value not in self.choices: - if isinstance(value, bool): - _type = 'boolean' - elif isinstance(value, (int, float)): - _type = 'number' - else: - _type = 'string' - optionsstring = ', '.join([f'"{item}"' for item in self.choices]) - raise MesonException('Value "{}" (of type "{}") for option "{}" is not one of the choices.' - ' Possible choices are (as string): {}.'.format( - value, _type, self.name, optionsstring)) - return value - -class UserArrayOption(UserOption[T.List[str]]): - def __init__(self, name: str, description: str, value: T.Union[str, T.List[str]], - split_args: bool = False, - allow_dups: bool = False, yielding: bool = DEFAULT_YIELDING, - choices: T.Optional[T.List[str]] = None, - deprecated: T.Union[bool, str, T.Dict[str, str], T.List[str]] = False): - super().__init__(name, description, choices if choices is not None else [], yielding, deprecated) - self.split_args = split_args - self.allow_dups = allow_dups - self.set_value(value) - - def listify(self, value: T.Any) -> T.List[T.Any]: - try: - return listify_array_value(value, self.split_args) - except MesonException as e: - raise MesonException(f'error in option "{self.name}": {e!s}') - - def validate_value(self, value: T.Union[str, T.List[str]]) -> T.List[str]: - newvalue = self.listify(value) - - if not self.allow_dups and len(set(newvalue)) != len(newvalue): - msg = 'Duplicated values in array option is deprecated. ' \ - 'This will become a hard error in the future.' - mlog.deprecation(msg) - for i in newvalue: - if not isinstance(i, str): - raise MesonException(f'String array element "{newvalue!s}" for option "{self.name}" is not a string.') - if self.choices: - bad = [x for x in newvalue if x not in self.choices] - if bad: - raise MesonException('Value{} "{}" for option "{}" {} not in allowed choices: "{}"'.format( - '' if len(bad) == 1 else 's', - ', '.join(bad), - self.name, - 'is' if len(bad) == 1 else 'are', - ', '.join(self.choices)) - ) - return newvalue - - def extend_value(self, value: T.Union[str, T.List[str]]) -> None: - """Extend the value with an additional value.""" - new = self.validate_value(value) - self.set_value(self.value + new) - - -class UserFeatureOption(UserComboOption): - static_choices = ['enabled', 'disabled', 'auto'] - - def __init__(self, name: str, description: str, value: T.Any, yielding: bool = DEFAULT_YIELDING, - deprecated: T.Union[bool, str, T.Dict[str, str], T.List[str]] = False): - super().__init__(name, description, self.static_choices, value, yielding, deprecated) - self.name: T.Optional[str] = None # TODO: Refactor options to all store their name - - def is_enabled(self) -> bool: - return self.value == 'enabled' - - def is_disabled(self) -> bool: - return self.value == 'disabled' - - def is_auto(self) -> bool: - return self.value == 'auto' - -class UserStdOption(UserComboOption): - ''' - UserOption specific to c_std and cpp_std options. User can set a list of - STDs in preference order and it selects the first one supported by current - compiler. - - For historical reasons, some compilers (msvc) allowed setting a GNU std and - silently fell back to C std. This is now deprecated. Projects that support - both GNU and MSVC compilers should set e.g. c_std=gnu11,c11. - - This is not using self.deprecated mechanism we already have for project - options because we want to print a warning if ALL values are deprecated, not - if SOME values are deprecated. - ''' - def __init__(self, lang: str, all_stds: T.List[str]) -> None: - self.lang = lang.lower() - self.all_stds = ['none'] + all_stds - # Map a deprecated std to its replacement. e.g. gnu11 -> c11. - self.deprecated_stds: T.Dict[str, str] = {} - opt_name = 'cpp_std' if lang == 'c++' else f'{lang}_std' - super().__init__(opt_name, f'{lang} language standard to use', ['none'], 'none') - - def set_versions(self, versions: T.List[str], gnu: bool = False, gnu_deprecated: bool = False) -> None: - assert all(std in self.all_stds for std in versions) - self.choices += versions - if gnu: - gnu_stds_map = {f'gnu{std[1:]}': std for std in versions} - if gnu_deprecated: - self.deprecated_stds.update(gnu_stds_map) - else: - self.choices += gnu_stds_map.keys() - - def validate_value(self, value: T.Union[str, T.List[str]]) -> str: - try: - candidates = listify_array_value(value) - except MesonException as e: - raise MesonException(f'error in option "{self.name}": {e!s}') - unknown = ','.join(std for std in candidates if std not in self.all_stds) - if unknown: - raise MesonException(f'Unknown option "{self.name}" value {unknown}. Possible values are {self.all_stds}.') - # Check first if any of the candidates are not deprecated - for std in candidates: - if std in self.choices: - return std - # Fallback to a deprecated std if any - for std in candidates: - newstd = self.deprecated_stds.get(std) - if newstd is not None: - mlog.deprecation( - f'None of the values {candidates} are supported by the {self.lang} compiler.\n' + - f'However, the deprecated {std} std currently falls back to {newstd}.\n' + - 'This will be an error in the future.\n' + - 'If the project supports both GNU and MSVC compilers, a value such as\n' + - '"c_std=gnu11,c11" specifies that GNU is preferred but it can safely fallback to plain c11.') - return newstd - raise MesonException(f'None of values {candidates} are supported by the {self.lang.upper()} compiler. ' + - f'Possible values for option "{self.name}" are {self.choices}') - -@dataclass -class OptionsView(abc.Mapping): - '''A view on an options dictionary for a given subproject and with overrides. - ''' - - # TODO: the typing here could be made more explicit using a TypeDict from - # python 3.8 or typing_extensions - options: KeyedOptionDictType - subproject: T.Optional[str] = None - overrides: T.Optional[T.Mapping[OptionKey, T.Union[str, int, bool, T.List[str]]]] = None - - def __getitem__(self, key: OptionKey) -> UserOption: - # FIXME: This is fundamentally the same algorithm than interpreter.get_option_internal(). - # We should try to share the code somehow. - key = key.evolve(subproject=self.subproject) - if not key.is_project(): - opt = self.options.get(key) - if opt is None or opt.yielding: - opt = self.options[key.as_root()] - else: - opt = self.options[key] - if opt.yielding: - opt = self.options.get(key.as_root(), opt) - if self.overrides: - override_value = self.overrides.get(key.as_root()) - if override_value is not None: - opt = copy.copy(opt) - opt.set_value(override_value) - return opt - - def __iter__(self) -> T.Iterator[OptionKey]: - return iter(self.options) - - def __len__(self) -> int: - return len(self.options) - class DependencyCacheType(enum.Enum): OTHER = 0 @@ -466,8 +152,8 @@ def __init__(self, builtins: 'KeyedOptionDictType', for_machine: MachineChoice): def __calculate_subkey(self, type_: DependencyCacheType) -> T.Tuple[str, ...]: data: T.Dict[DependencyCacheType, T.List[str]] = { - DependencyCacheType.PKG_CONFIG: stringlistify(self.__builtins[self.__pkg_conf_key].value), - DependencyCacheType.CMAKE: stringlistify(self.__builtins[self.__cmake_key].value), + DependencyCacheType.PKG_CONFIG: stringlistify(self.__builtins.get_value(self.__pkg_conf_key)), + DependencyCacheType.CMAKE: stringlistify(self.__builtins.get_value(self.__cmake_key)), DependencyCacheType.OTHER: [], } assert type_ in data, 'Someone forgot to update subkey calculations for a new type' @@ -557,7 +243,7 @@ def languages(self) -> T.Set[str]: class CoreData: - def __init__(self, options: SharedCMDOptions, scratch_dir: str, meson_command: T.List[str]): + def __init__(self, cmd_options: SharedCMDOptions, scratch_dir: str, meson_command: T.List[str]): self.lang_guids = { 'default': '8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942', 'c': '8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942', @@ -571,8 +257,8 @@ def __init__(self, options: SharedCMDOptions, scratch_dir: str, meson_command: T self.meson_command = meson_command self.target_guids = {} self.version = version - self.options: 'MutableKeyedOptionDictType' = {} - self.cross_files = self.__load_config_files(options, scratch_dir, 'cross') + self.optstore = options.OptionStore() + self.cross_files = self.__load_config_files(cmd_options, scratch_dir, 'cross') self.compilers: PerMachine[T.Dict[str, Compiler]] = PerMachine(OrderedDict(), OrderedDict()) # Stores the (name, hash) of the options file, The name will be either @@ -588,8 +274,8 @@ def __init__(self, options: SharedCMDOptions, scratch_dir: str, meson_command: T # For host == build configurations these caches should be the same. self.deps: PerMachine[DependencyCache] = PerMachineDefaultable.default( self.is_cross_build(), - DependencyCache(self.options, MachineChoice.BUILD), - DependencyCache(self.options, MachineChoice.HOST)) + DependencyCache(self.optstore, MachineChoice.BUILD), + DependencyCache(self.optstore, MachineChoice.HOST)) self.compiler_check_cache: T.Dict['CompilerCheckCacheKey', 'CompileResult'] = OrderedDict() self.run_check_cache: T.Dict['RunCheckCacheKey', 'RunResult'] = OrderedDict() @@ -598,18 +284,18 @@ def __init__(self, options: SharedCMDOptions, scratch_dir: str, meson_command: T self.cmake_cache: PerMachine[CMakeStateCache] = PerMachine(CMakeStateCache(), CMakeStateCache()) # Only to print a warning if it changes between Meson invocations. - self.config_files = self.__load_config_files(options, scratch_dir, 'native') + self.config_files = self.__load_config_files(cmd_options, scratch_dir, 'native') self.builtin_options_libdir_cross_fixup() self.init_builtins('') @staticmethod - def __load_config_files(options: SharedCMDOptions, scratch_dir: str, ftype: str) -> T.List[str]: + def __load_config_files(cmd_options: SharedCMDOptions, scratch_dir: str, ftype: str) -> T.List[str]: # Need to try and make the passed filenames absolute because when the # files are parsed later we'll have chdir()d. if ftype == 'cross': - filenames = options.cross_file + filenames = cmd_options.cross_file else: - filenames = options.native_file + filenames = cmd_options.native_file if not filenames: return [] @@ -665,7 +351,7 @@ def builtin_options_libdir_cross_fixup(self) -> None: # getting the "system default" is always wrong on multiarch # platforms as it gets a value like lib/x86_64-linux-gnu. if self.cross_files: - BUILTIN_OPTIONS[OptionKey('libdir')].default = 'lib' + options.BUILTIN_OPTIONS[OptionKey('libdir')].default = 'lib' def sanitize_prefix(self, prefix: str) -> str: prefix = os.path.expanduser(prefix) @@ -699,7 +385,7 @@ def sanitize_dir_option_value(self, prefix: str, option: OptionKey, value: T.Any except TypeError: return value if option.name.endswith('dir') and value.is_absolute() and \ - option not in BUILTIN_DIR_NOPREFIX_OPTIONS: + option not in options.BUILTIN_DIR_NOPREFIX_OPTIONS: try: # Try to relativize the path. value = value.relative_to(prefix) @@ -718,51 +404,47 @@ def sanitize_dir_option_value(self, prefix: str, option: OptionKey, value: T.Any def init_builtins(self, subproject: str) -> None: # Create builtin options with default values - for key, opt in BUILTIN_OPTIONS.items(): - self.add_builtin_option(self.options, key.evolve(subproject=subproject), opt) + for key, opt in options.BUILTIN_OPTIONS.items(): + self.add_builtin_option(self.optstore, key.evolve(subproject=subproject), opt) for for_machine in iter(MachineChoice): - for key, opt in BUILTIN_OPTIONS_PER_MACHINE.items(): - self.add_builtin_option(self.options, key.evolve(subproject=subproject, machine=for_machine), opt) + for key, opt in options.BUILTIN_OPTIONS_PER_MACHINE.items(): + self.add_builtin_option(self.optstore, key.evolve(subproject=subproject, machine=for_machine), opt) @staticmethod def add_builtin_option(opts_map: 'MutableKeyedOptionDictType', key: OptionKey, - opt: 'BuiltinOption') -> None: + opt: 'options.BuiltinOption') -> None: if key.subproject: if opt.yielding: # This option is global and not per-subproject return - value = opts_map[key.as_root()].value + value = opts_map.get_value(key.as_root()) else: value = None - opts_map[key] = opt.init_option(key, value, default_prefix()) + opts_map.add_system_option(key, opt.init_option(key, value, options.default_prefix())) def init_backend_options(self, backend_name: str) -> None: if backend_name == 'ninja': - self.options[OptionKey('backend_max_links')] = UserIntegerOption( + self.optstore.add_system_option('backend_max_links', options.UserIntegerOption( 'backend_max_links', 'Maximum number of linker processes to run or 0 for no ' 'limit', - (0, None, 0)) + (0, None, 0))) elif backend_name.startswith('vs'): - self.options[OptionKey('backend_startup_project')] = UserStringOption( + self.optstore.add_system_option('backend_startup_project', options.UserStringOption( 'backend_startup_project', 'Default project to execute in Visual Studio', - '') + '')) - def get_option(self, key: OptionKey) -> T.Union[T.List[str], str, int, bool, WrapMode]: + def get_option(self, key: OptionKey) -> T.Union[T.List[str], str, int, bool]: try: - v = self.options[key].value - if key.name == 'wrap_mode': - return WrapMode[v] + v = self.optstore.get_value(key) return v except KeyError: pass try: - v = self.options[key.as_root()] + v = self.optstore.get_value_object(key.as_root()) if v.yielding: - if key.name == 'wrap_mode': - return WrapMode[v.value] return v.value except KeyError: pass @@ -775,11 +457,11 @@ def set_option(self, key: OptionKey, value, first_invocation: bool = False) -> b if key.name == 'prefix': value = self.sanitize_prefix(value) else: - prefix = self.options[OptionKey('prefix')].value + prefix = self.optstore.get_value('prefix') value = self.sanitize_dir_option_value(prefix, key, value) try: - opt = self.options[key] + opt = self.optstore.get_value_object(key) except KeyError: raise MesonException(f'Tried to set unknown builtin option {str(key)}') @@ -830,7 +512,7 @@ def clear_cache(self) -> None: def get_nondefault_buildtype_args(self) -> T.List[T.Union[T.Tuple[str, str, str], T.Tuple[str, bool, bool]]]: result: T.List[T.Union[T.Tuple[str, str, str], T.Tuple[str, bool, bool]]] = [] - value = self.options[OptionKey('buildtype')].value + value = self.optstore.get_value('buildtype') if value == 'plain': opt = 'plain' debug = False @@ -849,8 +531,8 @@ def get_nondefault_buildtype_args(self) -> T.List[T.Union[T.Tuple[str, str, str] else: assert value == 'custom' return [] - actual_opt = self.options[OptionKey('optimization')].value - actual_debug = self.options[OptionKey('debug')].value + actual_opt = self.optstore.get_value('optimization') + actual_debug = self.optstore.get_value('debug') if actual_opt != opt: result.append(('optimization', actual_opt, opt)) if actual_debug != debug: @@ -879,43 +561,45 @@ def _set_others_from_buildtype(self, value: str) -> bool: assert value == 'custom' return False - dirty |= self.options[OptionKey('optimization')].set_value(opt) - dirty |= self.options[OptionKey('debug')].set_value(debug) + dirty |= self.optstore.set_value('optimization', opt) + dirty |= self.optstore.set_value('debug', debug) return dirty @staticmethod def is_per_machine_option(optname: OptionKey) -> bool: - if optname.as_host() in BUILTIN_OPTIONS_PER_MACHINE: + if optname.as_host() in options.BUILTIN_OPTIONS_PER_MACHINE: return True return optname.lang is not None def get_external_args(self, for_machine: MachineChoice, lang: str) -> T.List[str]: # mypy cannot analyze type of OptionKey - return T.cast('T.List[str]', self.options[OptionKey('args', machine=for_machine, lang=lang)].value) + key = OptionKey('args', machine=for_machine, lang=lang) + return T.cast('T.List[str]', self.optstore.get_value(key)) def get_external_link_args(self, for_machine: MachineChoice, lang: str) -> T.List[str]: # mypy cannot analyze type of OptionKey - return T.cast('T.List[str]', self.options[OptionKey('link_args', machine=for_machine, lang=lang)].value) + key = OptionKey('link_args', machine=for_machine, lang=lang) + return T.cast('T.List[str]', self.optstore.get_value(key)) - def update_project_options(self, options: 'MutableKeyedOptionDictType', subproject: SubProject) -> None: - for key, value in options.items(): + def update_project_options(self, project_options: 'MutableKeyedOptionDictType', subproject: SubProject) -> None: + for key, value in project_options.items(): if not key.is_project(): continue - if key not in self.options: - self.options[key] = value + if key not in self.optstore: + self.optstore.add_project_option(key, value) continue if key.subproject != subproject: raise MesonBugException(f'Tried to set an option for subproject {key.subproject} from {subproject}!') - oldval = self.options[key] + oldval = self.optstore.get_value_object(key) if type(oldval) is not type(value): - self.options[key] = value + self.optstore.set_value(key, value.value) elif oldval.choices != value.choices: # If the choices have changed, use the new value, but attempt # to keep the old options. If they are not valid keep the new # defaults but warn. - self.options[key] = value + self.optstore.set_value_object(key, value) try: value.set_value(oldval.value) except MesonException: @@ -923,9 +607,9 @@ def update_project_options(self, options: 'MutableKeyedOptionDictType', subproje fatal=False) # Find any extranious keys for this project and remove them - for key in self.options.keys() - options.keys(): + for key in self.optstore.keys() - project_options.keys(): if key.is_project() and key.subproject == subproject: - del self.options[key] + self.optstore.remove(key) def is_cross_build(self, when_building_for: MachineChoice = MachineChoice.HOST) -> bool: if when_building_for == MachineChoice.BUILD: @@ -935,38 +619,38 @@ def is_cross_build(self, when_building_for: MachineChoice = MachineChoice.HOST) def copy_build_options_from_regular_ones(self) -> bool: dirty = False assert not self.is_cross_build() - for k in BUILTIN_OPTIONS_PER_MACHINE: - o = self.options[k] - dirty |= self.options[k.as_build()].set_value(o.value) - for bk, bv in self.options.items(): + for k in options.BUILTIN_OPTIONS_PER_MACHINE: + o = self.optstore.get_value_object(k) + dirty |= self.optstore.set_value(k.as_build(), o.value) + for bk, bv in self.optstore.items(): if bk.machine is MachineChoice.BUILD: hk = bk.as_host() try: - hv = self.options[hk] + hv = self.optstore.get_value_object(hk) dirty |= bv.set_value(hv.value) except KeyError: continue return dirty - def set_options(self, options: T.Dict[OptionKey, T.Any], subproject: str = '', first_invocation: bool = False) -> bool: + def set_options(self, opts_to_set: T.Dict[OptionKey, T.Any], subproject: str = '', first_invocation: bool = False) -> bool: dirty = False if not self.is_cross_build(): - options = {k: v for k, v in options.items() if k.machine is not MachineChoice.BUILD} + opts_to_set = {k: v for k, v in opts_to_set.items() if k.machine is not MachineChoice.BUILD} # Set prefix first because it's needed to sanitize other options pfk = OptionKey('prefix') - if pfk in options: - prefix = self.sanitize_prefix(options[pfk]) - dirty |= self.options[OptionKey('prefix')].set_value(prefix) - for key in BUILTIN_DIR_NOPREFIX_OPTIONS: - if key not in options: - dirty |= self.options[key].set_value(BUILTIN_OPTIONS[key].prefixed_default(key, prefix)) + if pfk in opts_to_set: + prefix = self.sanitize_prefix(opts_to_set[pfk]) + dirty |= self.optstore.set_value('prefix', prefix) + for key in options.BUILTIN_DIR_NOPREFIX_OPTIONS: + if key not in opts_to_set: + dirty |= self.optstore.set_value(key, options.BUILTIN_OPTIONS[key].prefixed_default(key, prefix)) unknown_options: T.List[OptionKey] = [] - for k, v in options.items(): + for k, v in opts_to_set.items(): if k == pfk: continue - elif k in self.options: + elif k in self.optstore: dirty |= self.set_option(k, v, first_invocation) elif k.machine != MachineChoice.BUILD and k.type != OptionType.COMPILER: unknown_options.append(k) @@ -1010,7 +694,7 @@ def set_default_options(self, default_options: T.MutableMapping[OptionKey, str], # Always test this using the HOST machine, as many builtin options # are not valid for the BUILD machine, but the yielding value does # not differ between them even when they are valid for both. - if subproject and k.is_builtin() and self.options[k.evolve(subproject='', machine=MachineChoice.HOST)].yielding: + if subproject and k.is_builtin() and self.optstore.get_value_object(k.evolve(subproject='', machine=MachineChoice.HOST)).yielding: continue # Skip base, compiler, and backend options, they are handled when # adding languages and setting backend. @@ -1023,23 +707,23 @@ def set_default_options(self, default_options: T.MutableMapping[OptionKey, str], self.set_options(options, subproject=subproject, first_invocation=env.first_invocation) - def add_compiler_options(self, options: MutableKeyedOptionDictType, lang: str, for_machine: MachineChoice, + def add_compiler_options(self, c_options: MutableKeyedOptionDictType, lang: str, for_machine: MachineChoice, env: Environment, subproject: str) -> None: - for k, o in options.items(): + for k, o in c_options.items(): value = env.options.get(k) if value is not None: o.set_value(value) if not subproject: - self.options[k] = o # override compiler option on reconfigure - self.options.setdefault(k, o) + self.optstore.set_value_object(k, o) # override compiler option on reconfigure + self.optstore.setdefault(k, o) if subproject: sk = k.evolve(subproject=subproject) value = env.options.get(sk) or value if value is not None: o.set_value(value) - self.options[sk] = o # override compiler option on reconfigure - self.options.setdefault(sk, o) + self.optstore.set_value_object(sk, o) # override compiler option on reconfigure + self.optstore.setdefault(sk, o) def add_lang_args(self, lang: str, comp: T.Type['Compiler'], for_machine: MachineChoice, env: 'Environment') -> None: @@ -1047,8 +731,8 @@ def add_lang_args(self, lang: str, comp: T.Type['Compiler'], from .compilers import compilers # These options are all new at this point, because the compiler is # responsible for adding its own options, thus calling - # `self.options.update()`` is perfectly safe. - self.options.update(compilers.get_global_options(lang, comp, for_machine, env)) + # `self.optstore.update()`` is perfectly safe. + self.optstore.update(compilers.get_global_options(lang, comp, for_machine, env)) def process_compiler_options(self, lang: str, comp: Compiler, env: Environment, subproject: str) -> None: from . import compilers @@ -1061,20 +745,20 @@ def process_compiler_options(self, lang: str, comp: Compiler, env: Environment, skey = key.evolve(subproject=subproject) else: skey = key - if skey not in self.options: - self.options[skey] = copy.deepcopy(compilers.base_options[key]) + if skey not in self.optstore: + self.optstore.add_system_option(skey, copy.deepcopy(compilers.base_options[key])) if skey in env.options: - self.options[skey].set_value(env.options[skey]) + self.optstore.set_value(skey, env.options[skey]) enabled_opts.append(skey) elif subproject and key in env.options: - self.options[skey].set_value(env.options[key]) + self.optstore.set_value(skey, env.options[key]) enabled_opts.append(skey) - if subproject and key not in self.options: - self.options[key] = copy.deepcopy(self.options[skey]) + if subproject and key not in self.optstore: + self.optstore.add_system_option(key, copy.deepcopy(self.optstore.get_value_object(skey))) elif skey in env.options: - self.options[skey].set_value(env.options[skey]) + self.optstore.set_value(skey, env.options[skey]) elif subproject and key in env.options: - self.options[skey].set_value(env.options[key]) + self.optstore.set_value(skey, env.options[key]) self.emit_base_options_warnings(enabled_opts) def emit_base_options_warnings(self, enabled_opts: T.List[OptionKey]) -> None: @@ -1082,99 +766,6 @@ def emit_base_options_warnings(self, enabled_opts: T.List[OptionKey]) -> None: mlog.warning('Base option \'b_bitcode\' is enabled, which is incompatible with many linker options. Incompatible options such as \'b_asneeded\' have been disabled.', fatal=False) mlog.warning('Please see https://mesonbuild.com/Builtin-options.html#Notes_about_Apple_Bitcode_support for more details.', fatal=False) -class CmdLineFileParser(configparser.ConfigParser): - def __init__(self) -> None: - # We don't want ':' as key delimiter, otherwise it would break when - # storing subproject options like "subproject:option=value" - super().__init__(delimiters=['='], interpolation=None) - - def read(self, filenames: T.Union['StrOrBytesPath', T.Iterable['StrOrBytesPath']], encoding: T.Optional[str] = 'utf-8') -> T.List[str]: - return super().read(filenames, encoding) - - def optionxform(self, optionstr: str) -> str: - # Don't call str.lower() on keys - return optionstr - -class MachineFileParser(): - def __init__(self, filenames: T.List[str], sourcedir: str) -> None: - self.parser = CmdLineFileParser() - self.constants: T.Dict[str, T.Union[str, bool, int, T.List[str]]] = {'True': True, 'False': False} - self.sections: T.Dict[str, T.Dict[str, T.Union[str, bool, int, T.List[str]]]] = {} - - for fname in filenames: - try: - with open(fname, encoding='utf-8') as f: - content = f.read() - except UnicodeDecodeError as e: - raise EnvironmentException(f'Malformed machine file {fname!r} failed to parse as unicode: {e}') - - content = content.replace('@GLOBAL_SOURCE_ROOT@', sourcedir) - content = content.replace('@DIRNAME@', os.path.dirname(fname)) - try: - self.parser.read_string(content, fname) - except configparser.Error as e: - raise EnvironmentException(f'Malformed machine file: {e}') - - # Parse [constants] first so they can be used in other sections - if self.parser.has_section('constants'): - self.constants.update(self._parse_section('constants')) - - for s in self.parser.sections(): - if s == 'constants': - continue - self.sections[s] = self._parse_section(s) - - def _parse_section(self, s: str) -> T.Dict[str, T.Union[str, bool, int, T.List[str]]]: - self.scope = self.constants.copy() - section: T.Dict[str, T.Union[str, bool, int, T.List[str]]] = {} - for entry, value in self.parser.items(s): - if ' ' in entry or '\t' in entry or "'" in entry or '"' in entry: - raise EnvironmentException(f'Malformed variable name {entry!r} in machine file.') - # Windows paths... - value = value.replace('\\', '\\\\') - try: - ast = mparser.Parser(value, 'machinefile').parse() - if not ast.lines: - raise EnvironmentException('value cannot be empty') - res = self._evaluate_statement(ast.lines[0]) - except MesonException as e: - raise EnvironmentException(f'Malformed value in machine file variable {entry!r}: {str(e)}.') - except KeyError as e: - raise EnvironmentException(f'Undefined constant {e.args[0]!r} in machine file variable {entry!r}.') - section[entry] = res - self.scope[entry] = res - return section - - def _evaluate_statement(self, node: mparser.BaseNode) -> T.Union[str, bool, int, T.List[str]]: - if isinstance(node, (mparser.StringNode)): - return node.value - elif isinstance(node, mparser.BooleanNode): - return node.value - elif isinstance(node, mparser.NumberNode): - return node.value - elif isinstance(node, mparser.ParenthesizedNode): - return self._evaluate_statement(node.inner) - elif isinstance(node, mparser.ArrayNode): - # TODO: This is where recursive types would come in handy - return [self._evaluate_statement(arg) for arg in node.args.arguments] - elif isinstance(node, mparser.IdNode): - return self.scope[node.value] - elif isinstance(node, mparser.ArithmeticNode): - l = self._evaluate_statement(node.left) - r = self._evaluate_statement(node.right) - if node.operation == 'add': - if (isinstance(l, str) and isinstance(r, str)) or \ - (isinstance(l, list) and isinstance(r, list)): - return l + r - elif node.operation == 'div': - if isinstance(l, str) and isinstance(r, str): - return os.path.join(l, r) - raise EnvironmentException('Unsupported node type') - -def parse_machine_files(filenames: T.List[str], sourcedir: str): - parser = MachineFileParser(filenames, sourcedir) - return parser.sections - def get_cmd_line_file(build_dir: str) -> str: return os.path.join(build_dir, 'meson-private', 'cmd_line.txt') @@ -1260,9 +851,9 @@ def save(obj: CoreData, build_dir: str) -> str: def register_builtin_arguments(parser: argparse.ArgumentParser) -> None: - for n, b in BUILTIN_OPTIONS.items(): + for n, b in options.BUILTIN_OPTIONS.items(): b.add_to_argparse(str(n), parser, '') - for n, b in BUILTIN_OPTIONS_PER_MACHINE.items(): + for n, b in options.BUILTIN_OPTIONS_PER_MACHINE.items(): b.add_to_argparse(str(n), parser, ' (just for host machine)') b.add_to_argparse(str(n.as_build()), parser, ' (just for build machine)') parser.add_argument('-D', action='append', dest='projectoptions', default=[], metavar="option", @@ -1286,185 +877,73 @@ def parse_cmd_line_options(args: SharedCMDOptions) -> None: # Merge builtin options set with --option into the dict. for key in chain( - BUILTIN_OPTIONS.keys(), - (k.as_build() for k in BUILTIN_OPTIONS_PER_MACHINE.keys()), - BUILTIN_OPTIONS_PER_MACHINE.keys(), + options.BUILTIN_OPTIONS.keys(), + (k.as_build() for k in options.BUILTIN_OPTIONS_PER_MACHINE.keys()), + options.BUILTIN_OPTIONS_PER_MACHINE.keys(), ): name = str(key) value = getattr(args, name, None) if value is not None: if key in args.cmd_line_options: - cmdline_name = BuiltinOption.argparse_name_to_arg(name) + cmdline_name = options.BuiltinOption.argparse_name_to_arg(name) raise MesonException( f'Got argument {name} as both -D{name} and {cmdline_name}. Pick one.') args.cmd_line_options[key] = value delattr(args, name) +@dataclasses.dataclass +class OptionsView(abc.Mapping): + '''A view on an options dictionary for a given subproject and with overrides. + ''' -_U = T.TypeVar('_U', bound=UserOption[_T]) - -class BuiltinOption(T.Generic[_T, _U]): - - """Class for a builtin option type. - - There are some cases that are not fully supported yet. - """ - - def __init__(self, opt_type: T.Type[_U], description: str, default: T.Any, yielding: bool = True, *, - choices: T.Any = None, readonly: bool = False): - self.opt_type = opt_type - self.description = description - self.default = default - self.choices = choices - self.yielding = yielding - self.readonly = readonly - - def init_option(self, name: 'OptionKey', value: T.Optional[T.Any], prefix: str) -> _U: - """Create an instance of opt_type and return it.""" - if value is None: - value = self.prefixed_default(name, prefix) - keywords = {'yielding': self.yielding, 'value': value} - if self.choices: - keywords['choices'] = self.choices - o = self.opt_type(name.name, self.description, **keywords) - o.readonly = self.readonly - return o - - def _argparse_action(self) -> T.Optional[str]: - # If the type is a boolean, the presence of the argument in --foo form - # is to enable it. Disabling happens by using -Dfoo=false, which is - # parsed under `args.projectoptions` and does not hit this codepath. - if isinstance(self.default, bool): - return 'store_true' - return None - - def _argparse_choices(self) -> T.Any: - if self.opt_type is UserBooleanOption: - return [True, False] - elif self.opt_type is UserFeatureOption: - return UserFeatureOption.static_choices - return self.choices + # TODO: the typing here could be made more explicit using a TypeDict from + # python 3.8 or typing_extensions + original_options: T.Union[KeyedOptionDictType, 'dict[OptionKey, UserOption[Any]]'] + subproject: T.Optional[str] = None + overrides: T.Optional[T.Mapping[OptionKey, T.Union[str, int, bool, T.List[str]]]] = dataclasses.field(default_factory=dict) - @staticmethod - def argparse_name_to_arg(name: str) -> str: - if name == 'warning_level': - return '--warnlevel' + def __getitem__(self, key: OptionKey) -> options.UserOption: + # FIXME: This is fundamentally the same algorithm than interpreter.get_option_internal(). + # We should try to share the code somehow. + key = key.evolve(subproject=self.subproject) + if not key.is_project(): + opt = self.original_options.get(key) + if opt is None or opt.yielding: + key2 = key.as_root() + # This hack goes away once wi start using OptionStore + # to hold overrides. + if isinstance(self.original_options, options.OptionStore): + if key2 not in self.original_options: + raise KeyError + opt = self.original_options.get_value_object(key2) + else: + opt = self.original_options[key2] else: - return '--' + name.replace('_', '-') + opt = self.original_options[key] + if opt.yielding: + opt = self.original_options.get(key.as_root(), opt) + if self.overrides: + override_value = self.overrides.get(key.as_root()) + if override_value is not None: + opt = copy.copy(opt) + opt.set_value(override_value) + return opt - def prefixed_default(self, name: 'OptionKey', prefix: str = '') -> T.Any: - if self.opt_type in [UserComboOption, UserIntegerOption]: - return self.default - try: - return BUILTIN_DIR_NOPREFIX_OPTIONS[name][prefix] - except KeyError: - pass - return self.default + def get_value(self, key: T.Union[str, OptionKey]): + if isinstance(key, str): + key = OptionKey(key) + return self[key].value - def add_to_argparse(self, name: str, parser: argparse.ArgumentParser, help_suffix: str) -> None: - kwargs = OrderedDict() + def set_value(self, key: T.Union[str, OptionKey], value: T.Union[str, int, bool, T.List[str]]): + if isinstance(key, str): + key = OptionKey(key) + self.overrides[key] = value - c = self._argparse_choices() - b = self._argparse_action() - h = self.description - if not b: - h = '{} (default: {}).'.format(h.rstrip('.'), self.prefixed_default(name)) - else: - kwargs['action'] = b - if c and not b: - kwargs['choices'] = c - kwargs['default'] = argparse.SUPPRESS - kwargs['dest'] = name - - cmdline_name = self.argparse_name_to_arg(name) - parser.add_argument(cmdline_name, help=h + help_suffix, **kwargs) - - -# Update `docs/markdown/Builtin-options.md` after changing the options below -# Also update mesonlib._BUILTIN_NAMES. See the comment there for why this is required. -# Please also update completion scripts in $MESONSRC/data/shell-completions/ -BUILTIN_DIR_OPTIONS: T.Dict['OptionKey', 'BuiltinOption'] = OrderedDict([ - (OptionKey('prefix'), BuiltinOption(UserStringOption, 'Installation prefix', default_prefix())), - (OptionKey('bindir'), BuiltinOption(UserStringOption, 'Executable directory', 'bin')), - (OptionKey('datadir'), BuiltinOption(UserStringOption, 'Data file directory', default_datadir())), - (OptionKey('includedir'), BuiltinOption(UserStringOption, 'Header file directory', default_includedir())), - (OptionKey('infodir'), BuiltinOption(UserStringOption, 'Info page directory', default_infodir())), - (OptionKey('libdir'), BuiltinOption(UserStringOption, 'Library directory', default_libdir())), - (OptionKey('licensedir'), BuiltinOption(UserStringOption, 'Licenses directory', '')), - (OptionKey('libexecdir'), BuiltinOption(UserStringOption, 'Library executable directory', default_libexecdir())), - (OptionKey('localedir'), BuiltinOption(UserStringOption, 'Locale data directory', default_localedir())), - (OptionKey('localstatedir'), BuiltinOption(UserStringOption, 'Localstate data directory', 'var')), - (OptionKey('mandir'), BuiltinOption(UserStringOption, 'Manual page directory', default_mandir())), - (OptionKey('sbindir'), BuiltinOption(UserStringOption, 'System executable directory', default_sbindir())), - (OptionKey('sharedstatedir'), BuiltinOption(UserStringOption, 'Architecture-independent data directory', 'com')), - (OptionKey('sysconfdir'), BuiltinOption(UserStringOption, 'Sysconf data directory', default_sysconfdir())), -]) - -BUILTIN_CORE_OPTIONS: T.Dict['OptionKey', 'BuiltinOption'] = OrderedDict([ - (OptionKey('auto_features'), BuiltinOption(UserFeatureOption, "Override value of all 'auto' features", 'auto')), - (OptionKey('backend'), BuiltinOption(UserComboOption, 'Backend to use', 'ninja', choices=backendlist, - readonly=True)), - (OptionKey('genvslite'), - BuiltinOption( - UserComboOption, - 'Setup multiple buildtype-suffixed ninja-backend build directories, ' - 'and a [builddir]_vs containing a Visual Studio meta-backend with multiple configurations that calls into them', - 'vs2022', - choices=genvslitelist) - ), - (OptionKey('buildtype'), BuiltinOption(UserComboOption, 'Build type to use', 'debug', - choices=buildtypelist)), - (OptionKey('debug'), BuiltinOption(UserBooleanOption, 'Enable debug symbols and other information', True)), - (OptionKey('default_library'), BuiltinOption(UserComboOption, 'Default library type', 'shared', choices=['shared', 'static', 'both'], - yielding=False)), - (OptionKey('errorlogs'), BuiltinOption(UserBooleanOption, "Whether to print the logs from failing tests", True)), - (OptionKey('install_umask'), BuiltinOption(UserUmaskOption, 'Default umask to apply on permissions of installed files', '022')), - (OptionKey('layout'), BuiltinOption(UserComboOption, 'Build directory layout', 'mirror', choices=['mirror', 'flat'])), - (OptionKey('optimization'), BuiltinOption(UserComboOption, 'Optimization level', '0', choices=['plain', '0', 'g', '1', '2', '3', 's'])), - (OptionKey('prefer_static'), BuiltinOption(UserBooleanOption, 'Whether to try static linking before shared linking', False)), - (OptionKey('stdsplit'), BuiltinOption(UserBooleanOption, 'Split stdout and stderr in test logs', True)), - (OptionKey('strip'), BuiltinOption(UserBooleanOption, 'Strip targets on install', False)), - (OptionKey('unity'), BuiltinOption(UserComboOption, 'Unity build', 'off', choices=['on', 'off', 'subprojects'])), - (OptionKey('unity_size'), BuiltinOption(UserIntegerOption, 'Unity block size', (2, None, 4))), - (OptionKey('warning_level'), BuiltinOption(UserComboOption, 'Compiler warning level to use', '1', choices=['0', '1', '2', '3', 'everything'], yielding=False)), - (OptionKey('werror'), BuiltinOption(UserBooleanOption, 'Treat warnings as errors', False, yielding=False)), - (OptionKey('wrap_mode'), BuiltinOption(UserComboOption, 'Wrap mode', 'default', choices=['default', 'nofallback', 'nodownload', 'forcefallback', 'nopromote'])), - (OptionKey('force_fallback_for'), BuiltinOption(UserArrayOption, 'Force fallback for those subprojects', [])), - (OptionKey('vsenv'), BuiltinOption(UserBooleanOption, 'Activate Visual Studio environment', False, readonly=True)), - - # Pkgconfig module - (OptionKey('relocatable', module='pkgconfig'), - BuiltinOption(UserBooleanOption, 'Generate pkgconfig files as relocatable', False)), - - # Python module - (OptionKey('bytecompile', module='python'), - BuiltinOption(UserIntegerOption, 'Whether to compile bytecode', (-1, 2, 0))), - (OptionKey('install_env', module='python'), - BuiltinOption(UserComboOption, 'Which python environment to install to', 'prefix', choices=['auto', 'prefix', 'system', 'venv'])), - (OptionKey('platlibdir', module='python'), - BuiltinOption(UserStringOption, 'Directory for site-specific, platform-specific files.', '')), - (OptionKey('purelibdir', module='python'), - BuiltinOption(UserStringOption, 'Directory for site-specific, non-platform-specific files.', '')), - (OptionKey('allow_limited_api', module='python'), - BuiltinOption(UserBooleanOption, 'Whether to allow use of the Python Limited API', True)), -]) - -BUILTIN_OPTIONS = OrderedDict(chain(BUILTIN_DIR_OPTIONS.items(), BUILTIN_CORE_OPTIONS.items())) - -BUILTIN_OPTIONS_PER_MACHINE: T.Dict['OptionKey', 'BuiltinOption'] = OrderedDict([ - (OptionKey('pkg_config_path'), BuiltinOption(UserArrayOption, 'List of additional paths for pkg-config to search', [])), - (OptionKey('cmake_prefix_path'), BuiltinOption(UserArrayOption, 'List of additional prefixes for cmake to search', [])), -]) - -# Special prefix-dependent defaults for installation directories that reside in -# a path outside of the prefix in FHS and common usage. -BUILTIN_DIR_NOPREFIX_OPTIONS: T.Dict[OptionKey, T.Dict[str, str]] = { - OptionKey('sysconfdir'): {'/usr': '/etc'}, - OptionKey('localstatedir'): {'/usr': '/var', '/usr/local': '/var/local'}, - OptionKey('sharedstatedir'): {'/usr': '/var/lib', '/usr/local': '/var/local/lib'}, - OptionKey('platlibdir', module='python'): {}, - OptionKey('purelibdir', module='python'): {}, -} + def __iter__(self) -> T.Iterator[OptionKey]: + return iter(self.original_options) + + def __len__(self) -> int: + return len(self.original_options) FORBIDDEN_TARGET_NAMES = frozenset({ 'clean', diff --git a/mesonbuild/dependencies/base.py b/mesonbuild/dependencies/base.py index 930dbe7f369d..9b218c6432b8 100644 --- a/mesonbuild/dependencies/base.py +++ b/mesonbuild/dependencies/base.py @@ -1,6 +1,5 @@ # SPDX-License-Identifier: Apache-2.0 # Copyright 2013-2018 The Meson development team -# Copyright © 2024 Intel Corporation # This file contains the detection logic for external dependencies. # Custom logic for several other packages are in separate files. @@ -107,9 +106,6 @@ def _process_include_type_kw(cls, kwargs: T.Dict[str, T.Any]) -> str: return kwargs['include_type'] def __init__(self, type_name: DependencyTypeName, kwargs: T.Dict[str, T.Any]) -> None: - # This allows two Dependencies to be compared even after being copied. - # The purpose is to allow the name to be changed, but still have a proper comparison - self.__id = id(self) self.name = f'dep{id(self)}' self.version: T.Optional[str] = None self.language: T.Optional[str] = None # None means C-like @@ -128,14 +124,6 @@ def __init__(self, type_name: DependencyTypeName, kwargs: T.Dict[str, T.Any]) -> self.featurechecks: T.List['FeatureCheckBase'] = [] self.feature_since: T.Optional[T.Tuple[str, str]] = None - def __eq__(self, other: object) -> bool: - if not isinstance(other, Dependency): - return NotImplemented - return self.__id == other.__id - - def __hash__(self) -> int: - return self.__id - def __repr__(self) -> str: return f'<{self.__class__.__name__} {self.name}: {self.is_found}>' diff --git a/mesonbuild/dependencies/boost.py b/mesonbuild/dependencies/boost.py index cccc0c3bde47..7a461637c4a9 100644 --- a/mesonbuild/dependencies/boost.py +++ b/mesonbuild/dependencies/boost.py @@ -580,8 +580,8 @@ def filter_libraries(self, libs: T.List[BoostLibraryFile], lib_vers: str) -> T.L # MSVC is very picky with the library tags vscrt = '' try: - crt_val = self.env.coredata.options[mesonlib.OptionKey('b_vscrt')].value - buildtype = self.env.coredata.options[mesonlib.OptionKey('buildtype')].value + crt_val = self.env.coredata.optstore.get_value('b_vscrt') + buildtype = self.env.coredata.optstore.get_value('buildtype') vscrt = self.clib_compiler.get_crt_compile_args(crt_val, buildtype)[0] except (KeyError, IndexError, AttributeError): pass @@ -653,9 +653,19 @@ def detect_roots(self) -> None: try: boost_pc = PkgConfigDependency('boost', self.env, {'required': False}) if boost_pc.found(): - boost_root = boost_pc.get_variable(pkgconfig='prefix') - if boost_root: - roots += [Path(boost_root)] + boost_lib_dir = boost_pc.get_variable(pkgconfig='libdir') + boost_inc_dir = boost_pc.get_variable(pkgconfig='includedir') + if boost_lib_dir and boost_inc_dir: + mlog.debug('Trying to find boost with:') + mlog.debug(f' - boost_includedir = {Path(boost_inc_dir)}') + mlog.debug(f' - boost_librarydir = {Path(boost_lib_dir)}') + + self.detect_split_root(Path(boost_inc_dir), Path(boost_lib_dir)) + return + else: + boost_root = boost_pc.get_variable(pkgconfig='prefix') + if boost_root: + roots += [Path(boost_root)] except DependencyException: pass diff --git a/mesonbuild/dependencies/cuda.py b/mesonbuild/dependencies/cuda.py index a38e325b8741..82bf5ad82b94 100644 --- a/mesonbuild/dependencies/cuda.py +++ b/mesonbuild/dependencies/cuda.py @@ -41,11 +41,6 @@ def __init__(self, environment: 'Environment', kwargs: T.Dict[str, T.Any]) -> No req_modules = ['cudart'] if kwargs.get('static', True): req_modules = ['cudart_static'] - machine = self.env.machines[self.for_machine] - if machine.is_linux(): - # extracted by running - # nvcc -v foo.o - req_modules += ['rt', 'pthread', 'dl'] self.requested_modules = req_modules + self.requested_modules (self.cuda_path, self.version, self.is_found) = self._detect_cuda_path_and_version() @@ -61,12 +56,9 @@ def __init__(self, environment: 'Environment', kwargs: T.Dict[str, T.Any]) -> No self.incdir = os.path.join(self.cuda_path, 'include') self.compile_args += [f'-I{self.incdir}'] - if self.language != 'cuda': - arch_libdir = self._detect_arch_libdir() - self.libdir = os.path.join(self.cuda_path, arch_libdir) - mlog.debug('CUDA library directory is', mlog.bold(self.libdir)) - else: - self.libdir = None + arch_libdir = self._detect_arch_libdir() + self.libdir = os.path.join(self.cuda_path, arch_libdir) + mlog.debug('CUDA library directory is', mlog.bold(self.libdir)) self.is_found = self._find_requested_libraries() @@ -244,7 +236,14 @@ def _find_requested_libraries(self) -> bool: all_found = True for module in self.requested_modules: - args = self.clib_compiler.find_library(module, self.env, [self.libdir] if self.libdir else []) + args = self.clib_compiler.find_library(module, self.env, [self.libdir]) + if module == 'cudart_static' and self.language != 'cuda': + machine = self.env.machines[self.for_machine] + if machine.is_linux(): + # extracted by running + # nvcc -v foo.o + args += ['-lrt', '-lpthread', '-ldl'] + if args is None: self._report_dependency_error(f'Couldn\'t find requested CUDA module \'{module}\'') all_found = False @@ -286,10 +285,24 @@ def get_requested(self, kwargs: T.Dict[str, T.Any]) -> T.List[str]: def get_link_args(self, language: T.Optional[str] = None, raw: bool = False) -> T.List[str]: args: T.List[str] = [] - if self.libdir: - args += self.clib_compiler.get_linker_search_args(self.libdir) for lib in self.requested_modules: - args += self.lib_modules[lib] + link_args = self.lib_modules[lib] + # Turn canonical arguments like + # /opt/cuda/lib64/libcublas.so + # back into + # -lcublas + # since this is how CUDA modules were passed to nvcc since time immemorial + if language == 'cuda': + if lib in frozenset(['cudart', 'cudart_static']): + # nvcc always links these unconditionally + mlog.debug(f'Not adding \'{lib}\' to dependency, since nvcc will link it implicitly') + link_args = [] + elif link_args and link_args[0].startswith(self.libdir): + # module included with CUDA, nvcc knows how to find these itself + mlog.debug(f'CUDA module \'{lib}\' found in CUDA libdir') + link_args = ['-l' + lib] + args += link_args + return args packages['cuda'] = CudaDependency diff --git a/mesonbuild/dependencies/misc.py b/mesonbuild/dependencies/misc.py index fd5965233dc6..4011c60fb189 100644 --- a/mesonbuild/dependencies/misc.py +++ b/mesonbuild/dependencies/misc.py @@ -95,50 +95,47 @@ def __init__(self, environment: 'Environment', kwargs: T.Dict[str, T.Any]) -> No # No macro defined for OpenMP, but OpenMP 3.1 is supported. self.version = '3.1' self.is_found = True - self.compile_args = self.link_args = self.clib_compiler.openmp_flags() + self.compile_args = self.link_args = self.clib_compiler.openmp_flags(environment) return if self.clib_compiler.get_id() == 'pgi': # through at least PGI 19.4, there is no macro defined for OpenMP, but OpenMP 3.1 is supported. self.version = '3.1' self.is_found = True - self.compile_args = self.link_args = self.clib_compiler.openmp_flags() + self.compile_args = self.link_args = self.clib_compiler.openmp_flags(environment) + return + + # Set these now so they're available for the following compiler checks + try: + self.compile_args.extend(self.clib_compiler.openmp_flags(environment)) + self.link_args.extend(self.clib_compiler.openmp_link_flags(environment)) + except mesonlib.MesonException as e: + mlog.warning('OpenMP support not available because:', str(e), fatal=False) return try: openmp_date = self.clib_compiler.get_define( - '_OPENMP', '', self.env, self.clib_compiler.openmp_flags(), [self], disable_cache=True)[0] + '_OPENMP', '', self.env, [], [self], disable_cache=True)[0] except mesonlib.EnvironmentException as e: mlog.debug('OpenMP support not available in the compiler') mlog.debug(e) - openmp_date = None - - if openmp_date: - try: - self.version = self.VERSIONS[openmp_date] - except KeyError: - mlog.debug(f'Could not find an OpenMP version matching {openmp_date}') - if openmp_date == '_OPENMP': - mlog.debug('This can be caused by flags such as gcc\'s `-fdirectives-only`, which affect preprocessor behavior.') - return + return - if self.clib_compiler.get_id() == 'clang-cl': - # this is necessary for clang-cl, see https://github.com/mesonbuild/meson/issues/5298 - clangcl_openmp_link_args = self.clib_compiler.find_library("libomp", self.env, []) - if not clangcl_openmp_link_args: - mlog.log(mlog.yellow('WARNING:'), 'OpenMP found but libomp for clang-cl missing.') - return - self.link_args.extend(clangcl_openmp_link_args) - - # Flang has omp_lib.h - header_names = ('omp.h', 'omp_lib.h') - for name in header_names: - if self.clib_compiler.has_header(name, '', self.env, dependencies=[self], disable_cache=True)[0]: - self.is_found = True - self.compile_args.extend(self.clib_compiler.openmp_flags()) - self.link_args.extend(self.clib_compiler.openmp_link_flags()) - break - if not self.is_found: - mlog.log(mlog.yellow('WARNING:'), 'OpenMP found but omp.h missing.') + try: + self.version = self.VERSIONS[openmp_date] + except KeyError: + mlog.debug(f'Could not find an OpenMP version matching {openmp_date}') + if openmp_date == '_OPENMP': + mlog.debug('This can be caused by flags such as gcc\'s `-fdirectives-only`, which affect preprocessor behavior.') + return + + # Flang has omp_lib.h + header_names = ('omp.h', 'omp_lib.h') + for name in header_names: + if self.clib_compiler.has_header(name, '', self.env, dependencies=[self], disable_cache=True)[0]: + self.is_found = True + break + else: + mlog.warning('OpenMP found but omp.h missing.', fatal=False) packages['openmp'] = OpenMPDependency @@ -422,7 +419,6 @@ def __init__(self, name: str, env: 'Environment', kwargs: T.Dict[str, T.Any]): if self.static: if not self._add_sub_dependency(iconv_factory(env, self.for_machine, {'static': True})): self.is_found = False - return class OpensslSystemDependency(SystemDependency): diff --git a/mesonbuild/dependencies/pkgconfig.py b/mesonbuild/dependencies/pkgconfig.py index 30e3d2896d46..a87f413ad9df 100644 --- a/mesonbuild/dependencies/pkgconfig.py +++ b/mesonbuild/dependencies/pkgconfig.py @@ -238,7 +238,7 @@ def _check_pkgconfig(self, pkgbin: ExternalProgram) -> T.Optional[str]: def _get_env(self, uninstalled: bool = False) -> EnvironmentVariables: env = EnvironmentVariables() key = OptionKey('pkg_config_path', machine=self.for_machine) - extra_paths: T.List[str] = self.env.coredata.options[key].value[:] + extra_paths: T.List[str] = self.env.coredata.optstore.get_value(key)[:] if uninstalled: uninstalled_path = Path(self.env.get_build_dir(), 'meson-uninstalled').as_posix() if uninstalled_path not in extra_paths: @@ -397,7 +397,7 @@ def _search_libs(self, libs_in: ImmutableListProtocol[str], raw_libs_in: Immutab # # Only prefix_libpaths are reordered here because there should not be # too many system_libpaths to cause library version issues. - pkg_config_path: T.List[str] = self.env.coredata.options[OptionKey('pkg_config_path', machine=self.for_machine)].value + pkg_config_path: T.List[str] = self.env.coredata.optstore.get_value(OptionKey('pkg_config_path', machine=self.for_machine)) pkg_config_path = self._convert_mingw_paths(pkg_config_path) prefix_libpaths = OrderedSet(sort_libpaths(list(prefix_libpaths), pkg_config_path)) system_libpaths: OrderedSet[str] = OrderedSet() diff --git a/mesonbuild/dependencies/python.py b/mesonbuild/dependencies/python.py index f00d0e6ae325..46d12f309855 100644 --- a/mesonbuild/dependencies/python.py +++ b/mesonbuild/dependencies/python.py @@ -30,6 +30,7 @@ class PythonIntrospectionDict(TypedDict): install_paths: T.Dict[str, str] is_pypy: bool is_venv: bool + is_freethreaded: bool link_libpython: bool sysconfig_paths: T.Dict[str, str] paths: T.Dict[str, str] @@ -83,6 +84,7 @@ def __init__(self, name: str, command: T.Optional[T.List[str]] = None, self.command = ext_prog.command self.path = ext_prog.path self.cached_version = None + self.version_arg = '--version' # We want strong key values, so we always populate this with bogus data. # Otherwise to make the type checkers happy we'd have to do .get() for @@ -92,6 +94,7 @@ def __init__(self, name: str, command: T.Optional[T.List[str]] = None, 'install_paths': {}, 'is_pypy': False, 'is_venv': False, + 'is_freethreaded': False, 'link_libpython': False, 'sysconfig_paths': {}, 'paths': {}, @@ -152,6 +155,7 @@ def __init__(self, python_holder: 'BasicPythonExternalProgram', embed: bool): self.variables = python_holder.info['variables'] self.paths = python_holder.info['paths'] self.is_pypy = python_holder.info['is_pypy'] + self.is_freethreaded = python_holder.info['is_freethreaded'] # The "-embed" version of python.pc / python-config was introduced in 3.8, # and distutils extension linking was changed to be considered a non embed # usage. Before then, this dependency always uses the embed=True handling @@ -167,68 +171,11 @@ def __init__(self, python_holder: 'BasicPythonExternalProgram', embed: bool): else: self.major_version = 2 - -class PythonPkgConfigDependency(PkgConfigDependency, _PythonDependencyBase): - - def __init__(self, name: str, environment: 'Environment', - kwargs: T.Dict[str, T.Any], installation: 'BasicPythonExternalProgram', - libpc: bool = False): - if libpc: - mlog.debug(f'Searching for {name!r} via pkgconfig lookup in LIBPC') - else: - mlog.debug(f'Searching for {name!r} via fallback pkgconfig lookup in default paths') - - PkgConfigDependency.__init__(self, name, environment, kwargs) - _PythonDependencyBase.__init__(self, installation, kwargs.get('embed', False)) - - if libpc and not self.is_found: - mlog.debug(f'"python-{self.version}" could not be found in LIBPC, this is likely due to a relocated python installation') - - # pkg-config files are usually accurate starting with python 3.8 - if not self.link_libpython and mesonlib.version_compare(self.version, '< 3.8'): - self.link_args = [] - - -class PythonFrameworkDependency(ExtraFrameworkDependency, _PythonDependencyBase): - - def __init__(self, name: str, environment: 'Environment', - kwargs: T.Dict[str, T.Any], installation: 'BasicPythonExternalProgram'): - ExtraFrameworkDependency.__init__(self, name, environment, kwargs) - _PythonDependencyBase.__init__(self, installation, kwargs.get('embed', False)) - - -class PythonSystemDependency(SystemDependency, _PythonDependencyBase): - - def __init__(self, name: str, environment: 'Environment', - kwargs: T.Dict[str, T.Any], installation: 'BasicPythonExternalProgram'): - SystemDependency.__init__(self, name, environment, kwargs) - _PythonDependencyBase.__init__(self, installation, kwargs.get('embed', False)) - - # match pkg-config behavior - if self.link_libpython: - # link args - if mesonlib.is_windows(): - self.find_libpy_windows(environment, limited_api=False) - else: - self.find_libpy(environment) - else: - self.is_found = True - - # compile args - inc_paths = mesonlib.OrderedSet([ - self.variables.get('INCLUDEPY'), - self.paths.get('include'), - self.paths.get('platinclude')]) - - self.compile_args += ['-I' + path for path in inc_paths if path] - - # https://sourceforge.net/p/mingw-w64/mailman/message/30504611/ - # https://github.com/python/cpython/pull/100137 - if mesonlib.is_windows() and self.get_windows_python_arch().endswith('64') and mesonlib.version_compare(self.version, '<3.12'): - self.compile_args += ['-DMS_WIN64='] - - if not self.clib_compiler.has_header('Python.h', '', environment, extra_args=self.compile_args)[0]: - self.is_found = False + # pyconfig.h is shared between regular and free-threaded builds in the + # Windows installer from python.org, and hence does not define + # Py_GIL_DISABLED correctly. So do it here: + if mesonlib.is_windows() and self.is_freethreaded: + self.compile_args += ['-DPy_GIL_DISABLED'] def find_libpy(self, environment: 'Environment') -> None: if self.is_pypy: @@ -277,6 +224,8 @@ def get_windows_link_args(self, limited_api: bool) -> T.Optional[T.List[str]]: if self.static: libpath = Path('libs') / f'libpython{vernum}.a' else: + if limited_api: + vernum = vernum[0] comp = self.get_compiler() if comp.id == "gcc": if imp_lower == 'pypy' and verdot == '3.8': @@ -287,9 +236,10 @@ def get_windows_link_args(self, limited_api: bool) -> T.Optional[T.List[str]]: else: libpath = Path(f'python{vernum}.dll') else: - if limited_api: - vernum = vernum[0] - libpath = Path('libs') / f'python{vernum}.lib' + if self.is_freethreaded: + libpath = Path('libs') / f'python{vernum}t.lib' + else: + libpath = Path('libs') / f'python{vernum}.lib' # For a debug build, pyconfig.h may force linking with # pythonX_d.lib (see meson#10776). This cannot be avoided # and won't work unless we also have a debug build of @@ -302,8 +252,8 @@ def get_windows_link_args(self, limited_api: bool) -> T.Optional[T.List[str]]: # `debugoptimized` buildtype may not set debug=True currently, see gh-11645 is_debug_build = debug or buildtype == 'debug' vscrt_debug = False - if mesonlib.OptionKey('b_vscrt') in self.env.coredata.options: - vscrt = self.env.coredata.options[mesonlib.OptionKey('b_vscrt')].value + if mesonlib.OptionKey('b_vscrt') in self.env.coredata.optstore: + vscrt = self.env.coredata.optstore.get_value('b_vscrt') if vscrt in {'mdd', 'mtd', 'from_buildtype', 'static_from_buildtype'}: vscrt_debug = True if is_debug_build and vscrt_debug and not self.variables.get('Py_DEBUG'): @@ -317,9 +267,15 @@ def get_windows_link_args(self, limited_api: bool) -> T.Optional[T.List[str]]: lib = Path(self.variables.get('base_prefix')) / libpath elif self.platform.startswith('mingw'): if self.static: - libname = self.variables.get('LIBRARY') + if limited_api: + libname = self.variables.get('ABI3DLLLIBRARY') + else: + libname = self.variables.get('LIBRARY') else: - libname = self.variables.get('LDLIBRARY') + if limited_api: + libname = self.variables.get('ABI3LDLIBRARY') + else: + libname = self.variables.get('LDLIBRARY') lib = Path(self.variables.get('LIBDIR')) / libname else: raise mesonlib.MesonBugException( @@ -353,6 +309,68 @@ def find_libpy_windows(self, env: 'Environment', limited_api: bool = False) -> N self.link_args = largs self.is_found = True +class PythonPkgConfigDependency(PkgConfigDependency, _PythonDependencyBase): + + def __init__(self, name: str, environment: 'Environment', + kwargs: T.Dict[str, T.Any], installation: 'BasicPythonExternalProgram', + libpc: bool = False): + if libpc: + mlog.debug(f'Searching for {name!r} via pkgconfig lookup in LIBPC') + else: + mlog.debug(f'Searching for {name!r} via fallback pkgconfig lookup in default paths') + + PkgConfigDependency.__init__(self, name, environment, kwargs) + _PythonDependencyBase.__init__(self, installation, kwargs.get('embed', False)) + + if libpc and not self.is_found: + mlog.debug(f'"python-{self.version}" could not be found in LIBPC, this is likely due to a relocated python installation') + + # pkg-config files are usually accurate starting with python 3.8 + if not self.link_libpython and mesonlib.version_compare(self.version, '< 3.8'): + self.link_args = [] + + +class PythonFrameworkDependency(ExtraFrameworkDependency, _PythonDependencyBase): + + def __init__(self, name: str, environment: 'Environment', + kwargs: T.Dict[str, T.Any], installation: 'BasicPythonExternalProgram'): + ExtraFrameworkDependency.__init__(self, name, environment, kwargs) + _PythonDependencyBase.__init__(self, installation, kwargs.get('embed', False)) + + +class PythonSystemDependency(SystemDependency, _PythonDependencyBase): + + def __init__(self, name: str, environment: 'Environment', + kwargs: T.Dict[str, T.Any], installation: 'BasicPythonExternalProgram'): + SystemDependency.__init__(self, name, environment, kwargs) + _PythonDependencyBase.__init__(self, installation, kwargs.get('embed', False)) + + # match pkg-config behavior + if self.link_libpython: + # link args + if mesonlib.is_windows(): + self.find_libpy_windows(environment, limited_api=False) + else: + self.find_libpy(environment) + else: + self.is_found = True + + # compile args + inc_paths = mesonlib.OrderedSet([ + self.variables.get('INCLUDEPY'), + self.paths.get('include'), + self.paths.get('platinclude')]) + + self.compile_args += ['-I' + path for path in inc_paths if path] + + # https://sourceforge.net/p/mingw-w64/mailman/message/30504611/ + # https://github.com/python/cpython/pull/100137 + if mesonlib.is_windows() and self.get_windows_python_arch().endswith('64') and mesonlib.version_compare(self.version, '<3.12'): + self.compile_args += ['-DMS_WIN64='] + + if not self.clib_compiler.has_header('Python.h', '', environment, extra_args=self.compile_args)[0]: + self.is_found = False + @staticmethod def log_tried() -> str: return 'sysconfig' diff --git a/mesonbuild/dependencies/qt.py b/mesonbuild/dependencies/qt.py index 65b6a5810961..86e32140e924 100644 --- a/mesonbuild/dependencies/qt.py +++ b/mesonbuild/dependencies/qt.py @@ -297,8 +297,8 @@ def __init__(self, name: str, env: 'Environment', kwargs: T.Dict[str, T.Any]): # Use the buildtype by default, but look at the b_vscrt option if the # compiler supports it. is_debug = self.env.coredata.get_option(mesonlib.OptionKey('buildtype')) == 'debug' - if mesonlib.OptionKey('b_vscrt') in self.env.coredata.options: - if self.env.coredata.options[mesonlib.OptionKey('b_vscrt')].value in {'mdd', 'mtd'}: + if mesonlib.OptionKey('b_vscrt') in self.env.coredata.optstore: + if self.env.coredata.optstore.get_value('b_vscrt') in {'mdd', 'mtd'}: is_debug = True modules_lib_suffix = _get_modules_lib_suffix(self.version, self.env.machines[self.for_machine], is_debug) diff --git a/mesonbuild/environment.py b/mesonbuild/environment.py index 19b9e81b53b5..484ef45d478d 100644 --- a/mesonbuild/environment.py +++ b/mesonbuild/environment.py @@ -11,6 +11,10 @@ from . import coredata from . import mesonlib +from . import machinefile + +CmdLineFileParser = machinefile.CmdLineFileParser + from .mesonlib import ( MesonException, MachineChoice, Popen_safe, PerMachine, PerMachineDefaultable, PerThreeMachineDefaultable, split_args, quote_arg, OptionKey, @@ -144,6 +148,9 @@ def find_coverage_tools(coredata: coredata.CoreData) -> T.Tuple[T.Optional[str], gcovr_exe, gcovr_version = detect_gcovr() llvm_cov_exe = detect_llvm_cov(compute_llvm_suffix(coredata)) + # Some platforms may provide versioned clang but only non-versioned llvm utils + if llvm_cov_exe is None: + llvm_cov_exe = detect_llvm_cov('') lcov_exe, lcov_version, genhtml_exe = detect_lcov_genhtml() @@ -188,6 +195,8 @@ def get_llvm_tool_names(tool: str) -> T.List[str]: # unless it becomes a stable release. suffixes = [ '', # base (no suffix) + '-19.1', '19.1', + '-19', '19', '-18.1', '18.1', '-18', '18', '-17', '17', @@ -209,7 +218,7 @@ def get_llvm_tool_names(tool: str) -> T.List[str]: '-3.7', '37', '-3.6', '36', '-3.5', '35', - '-19', # Debian development snapshot + '-20', # Debian development snapshot '-devel', # FreeBSD development snapshot ] names: T.List[str] = [] @@ -505,18 +514,18 @@ def machine_info_can_run(machine_info: MachineInfo): if machine_info.system != detect_system(): return False true_build_cpu_family = detect_cpu_family({}) + assert machine_info.cpu_family is not None, 'called on incomplete machine_info' return \ (machine_info.cpu_family == true_build_cpu_family) or \ ((true_build_cpu_family == 'x86_64') and (machine_info.cpu_family == 'x86')) or \ - ((true_build_cpu_family == 'mips64') and (machine_info.cpu_family == 'mips')) or \ - ((true_build_cpu_family == 'aarch64') and (machine_info.cpu_family == 'arm')) + ((true_build_cpu_family == 'mips64') and (machine_info.cpu_family == 'mips')) class Environment: private_dir = 'meson-private' log_dir = 'meson-logs' info_dir = 'meson-info' - def __init__(self, source_dir: str, build_dir: str, options: coredata.SharedCMDOptions) -> None: + def __init__(self, source_dir: str, build_dir: str, cmd_options: coredata.SharedCMDOptions) -> None: self.source_dir = source_dir self.build_dir = build_dir # Do not try to create build directories when build_dir is none. @@ -532,26 +541,26 @@ def __init__(self, source_dir: str, build_dir: str, options: coredata.SharedCMDO self.coredata: coredata.CoreData = coredata.load(self.get_build_dir(), suggest_reconfigure=False) self.first_invocation = False except FileNotFoundError: - self.create_new_coredata(options) + self.create_new_coredata(cmd_options) except coredata.MesonVersionMismatchException as e: # This is routine, but tell the user the update happened mlog.log('Regenerating configuration from scratch:', str(e)) - coredata.read_cmd_line_file(self.build_dir, options) - self.create_new_coredata(options) + coredata.read_cmd_line_file(self.build_dir, cmd_options) + self.create_new_coredata(cmd_options) except MesonException as e: # If we stored previous command line options, we can recover from # a broken/outdated coredata. if os.path.isfile(coredata.get_cmd_line_file(self.build_dir)): mlog.warning('Regenerating configuration from scratch.', fatal=False) mlog.log('Reason:', mlog.red(str(e))) - coredata.read_cmd_line_file(self.build_dir, options) - self.create_new_coredata(options) + coredata.read_cmd_line_file(self.build_dir, cmd_options) + self.create_new_coredata(cmd_options) else: raise MesonException(f'{str(e)} Try regenerating using "meson setup --wipe".') else: # Just create a fresh coredata in this case self.scratch_dir = '' - self.create_new_coredata(options) + self.create_new_coredata(cmd_options) ## locally bind some unfrozen configuration @@ -589,7 +598,7 @@ def __init__(self, source_dir: str, build_dir: str, options: coredata.SharedCMDO ## Read in native file(s) to override build machine configuration if self.coredata.config_files is not None: - config = coredata.parse_machine_files(self.coredata.config_files, self.source_dir) + config = machinefile.parse_machine_files(self.coredata.config_files, self.source_dir) binaries.build = BinaryTable(config.get('binaries', {})) properties.build = Properties(config.get('properties', {})) cmakevars.build = CMakeVariables(config.get('cmake', {})) @@ -600,7 +609,7 @@ def __init__(self, source_dir: str, build_dir: str, options: coredata.SharedCMDO ## Read in cross file(s) to override host machine configuration if self.coredata.cross_files: - config = coredata.parse_machine_files(self.coredata.cross_files, self.source_dir) + config = machinefile.parse_machine_files(self.coredata.cross_files, self.source_dir) properties.host = Properties(config.get('properties', {})) binaries.host = BinaryTable(config.get('binaries', {})) cmakevars.host = CMakeVariables(config.get('cmake', {})) @@ -623,7 +632,7 @@ def __init__(self, source_dir: str, build_dir: str, options: coredata.SharedCMDO self.cmakevars = cmakevars.default_missing() # Command line options override those from cross/native files - self.options.update(options.cmd_line_options) + self.options.update(cmd_options.cmd_line_options) # Take default value from env if not set in cross/native files or command line. self._set_default_options_from_env() diff --git a/mesonbuild/interpreter/compiler.py b/mesonbuild/interpreter/compiler.py index 50a850af4482..1bdb321e47e8 100644 --- a/mesonbuild/interpreter/compiler.py +++ b/mesonbuild/interpreter/compiler.py @@ -13,6 +13,7 @@ from .. import build from .. import coredata from .. import dependencies +from .. import options from .. import mesonlib from .. import mlog from ..compilers import SUFFIX_TO_LANG, RunResult @@ -89,7 +90,7 @@ class FindLibraryKW(ExtractRequired, ExtractSearchDirs): header_include_directories: T.List[build.IncludeDirs] header_no_builtin_args: bool header_prefix: str - header_required: T.Union[bool, coredata.UserFeatureOption] + header_required: T.Union[bool, options.UserFeatureOption] class PreprocessKW(TypedDict): output: str @@ -269,7 +270,7 @@ def _determine_args(self, kwargs: BaseCompileKW, for idir in i.to_string_list(self.environment.get_source_dir(), self.environment.get_build_dir()): args.extend(self.compiler.get_include_args(idir, False)) if not kwargs['no_builtin_args']: - opts = coredata.OptionsView(self.environment.coredata.options, self.subproject) + opts = coredata.OptionsView(self.environment.coredata.optstore, self.subproject) args += self.compiler.get_option_compile_args(opts) if mode is CompileCheckMode.LINK: args.extend(self.compiler.get_option_link_args(opts)) @@ -685,7 +686,7 @@ def notfound_library(self, libname: str) -> 'dependencies.ExternalLibrary': @typed_pos_args('compiler.find_library', str) @typed_kwargs( 'compiler.find_library', - KwargInfo('required', (bool, coredata.UserFeatureOption), default=True), + KwargInfo('required', (bool, options.UserFeatureOption), default=True), KwargInfo('has_headers', ContainerTypeInfo(list, str), listify=True, default=[], since='0.50.0'), KwargInfo('static', (bool, NoneType), since='0.51.0'), KwargInfo('disabler', bool, default=False, since='0.49.0'), diff --git a/mesonbuild/interpreter/dependencyfallbacks.py b/mesonbuild/interpreter/dependencyfallbacks.py index eca6a2c71796..d5e0740e0974 100644 --- a/mesonbuild/interpreter/dependencyfallbacks.py +++ b/mesonbuild/interpreter/dependencyfallbacks.py @@ -315,8 +315,7 @@ def lookup(self, kwargs: TYPE_nkwargs, force_fallback: bool = False) -> Dependen return self._notfound_dependency() # Check if usage of the subproject fallback is forced - wrap_mode = self.coredata.get_option(OptionKey('wrap_mode')) - assert isinstance(wrap_mode, WrapMode), 'for mypy' + wrap_mode = WrapMode.from_string(self.coredata.get_option(OptionKey('wrap_mode'))) force_fallback_for = self.coredata.get_option(OptionKey('force_fallback_for')) assert isinstance(force_fallback_for, list), 'for mypy' self.nofallback = wrap_mode == WrapMode.nofallback diff --git a/mesonbuild/interpreter/interpreter.py b/mesonbuild/interpreter/interpreter.py index afa68e88bc79..0711bbf75283 100644 --- a/mesonbuild/interpreter/interpreter.py +++ b/mesonbuild/interpreter/interpreter.py @@ -11,6 +11,7 @@ from .. import coredata from .. import dependencies from .. import mlog +from .. import options from .. import build from .. import optinterpreter from .. import compilers @@ -163,7 +164,7 @@ def add_section(self, section: str, values: T.Dict[str, T.Any], bool_yn: bool, elif isinstance(i, Disabler): FeatureNew.single_use('disabler in summary', '0.64.0', subproject) formatted_values.append(mlog.red('NO')) - elif isinstance(i, coredata.UserOption): + elif isinstance(i, options.UserOption): FeatureNew.single_use('feature option in summary', '0.58.0', subproject) formatted_values.append(i.printable_value()) else: @@ -450,7 +451,7 @@ def build_holder_map(self) -> None: build.StructuredSources: OBJ.StructuredSourcesHolder, compilers.RunResult: compilerOBJ.TryRunResultHolder, dependencies.ExternalLibrary: OBJ.ExternalLibraryHolder, - coredata.UserFeatureOption: OBJ.FeatureOptionHolder, + options.UserFeatureOption: OBJ.FeatureOptionHolder, envconfig.MachineInfo: OBJ.MachineHolder, build.ConfigurationData: OBJ.ConfigurationDataHolder, }) @@ -1012,7 +1013,7 @@ def _do_subproject_cmake(self, subp_name: str, subdir: str, kwargs: kwtypes.DoSubproject) -> SubprojectHolder: from ..cmake import CMakeInterpreter with mlog.nested(subp_name): - prefix = self.coredata.options[OptionKey('prefix')].value + prefix = self.coredata.optstore.get_value('prefix') from ..modules.cmake import CMakeSubprojectOptions options = kwargs.get('options') or CMakeSubprojectOptions() @@ -1047,22 +1048,22 @@ def _do_subproject_cargo(self, subp_name: str, subdir: str, # FIXME: Are there other files used by cargo interpreter? [os.path.join(subdir, 'Cargo.toml')]) - def get_option_internal(self, optname: str) -> coredata.UserOption: + def get_option_internal(self, optname: str) -> options.UserOption: key = OptionKey.from_string(optname).evolve(subproject=self.subproject) if not key.is_project(): - for opts in [self.coredata.options, compilers.base_options]: + for opts in [self.coredata.optstore, compilers.base_options]: v = opts.get(key) if v is None or v.yielding: v = opts.get(key.as_root()) if v is not None: - assert isinstance(v, coredata.UserOption), 'for mypy' + assert isinstance(v, options.UserOption), 'for mypy' return v try: - opt = self.coredata.options[key] - if opt.yielding and key.subproject and key.as_root() in self.coredata.options: - popt = self.coredata.options[key.as_root()] + opt = self.coredata.optstore.get_value_object(key) + if opt.yielding and key.subproject and key.as_root() in self.coredata.optstore: + popt = self.coredata.optstore.get_value_object(key.as_root()) if type(opt) is type(popt): opt = popt else: @@ -1085,7 +1086,7 @@ def get_option_internal(self, optname: str) -> coredata.UserOption: @typed_pos_args('get_option', str) @noKwargs def func_get_option(self, nodes: mparser.BaseNode, args: T.Tuple[str], - kwargs: 'TYPE_kwargs') -> T.Union[coredata.UserOption, 'TYPE_var']: + kwargs: 'TYPE_kwargs') -> T.Union[options.UserOption, 'TYPE_var']: optname = args[0] if ':' in optname: raise InterpreterException('Having a colon in option name is forbidden, ' @@ -1096,10 +1097,10 @@ def func_get_option(self, nodes: mparser.BaseNode, args: T.Tuple[str], raise InterpreterException(f'Invalid option name {optname!r}') opt = self.get_option_internal(optname) - if isinstance(opt, coredata.UserFeatureOption): + if isinstance(opt, options.UserFeatureOption): opt.name = optname return opt - elif isinstance(opt, coredata.UserOption): + elif isinstance(opt, options.UserOption): if isinstance(opt.value, str): return P_OBJ.OptionString(opt.value, f'{{{optname}}}') return opt.value @@ -1284,7 +1285,7 @@ def func_project(self, node: mparser.FunctionNode, args: T.Tuple[str, T.List[str self.build.subproject_dir = self.subproject_dir # Load wrap files from this (sub)project. - wrap_mode = self.coredata.get_option(OptionKey('wrap_mode')) + wrap_mode = WrapMode.from_string(self.coredata.get_option(OptionKey('wrap_mode'))) if not self.is_subproject() or wrap_mode != WrapMode.nopromote: subdir = os.path.join(self.subdir, spdirname) r = wrap.Resolver(self.environment.get_source_dir(), subdir, self.subproject, wrap_mode) @@ -1530,6 +1531,10 @@ def add_languages_for(self, args: T.List[str], required: bool, for_machine: Mach continue else: raise + if lang == 'cuda' and hasattr(self.backend, 'allow_thin_archives'): + # see NinjaBackend.__init__() why we need to disable thin archives for cuda + mlog.debug('added cuda as language, disabling thin archives for {}, since nvcc/nvlink cannot handle thin archives natively'.format(for_machine)) + self.backend.allow_thin_archives[for_machine] = False else: # update new values from commandline, if it applies self.coredata.process_compiler_options(lang, comp, self.environment, self.subproject) @@ -1538,7 +1543,7 @@ def add_languages_for(self, args: T.List[str], required: bool, for_machine: Mach if self.subproject: options = {} for k in comp.get_options(): - v = copy.copy(self.coredata.options[k]) + v = copy.copy(self.coredata.optstore.get_value_object(k)) k = k.evolve(subproject=self.subproject) options[k] = v self.coredata.add_compiler_options(options, lang, for_machine, self.environment, self.subproject) @@ -1637,12 +1642,13 @@ def find_program_impl(self, args: T.List[mesonlib.FileOrString], required: bool = True, silent: bool = True, wanted: T.Union[str, T.List[str]] = '', search_dirs: T.Optional[T.List[str]] = None, + version_arg: T.Optional[str] = '', version_func: T.Optional[ProgramVersionFunc] = None ) -> T.Union['ExternalProgram', 'build.Executable', 'OverrideProgram']: args = mesonlib.listify(args) extra_info: T.List[mlog.TV_Loggable] = [] - progobj = self.program_lookup(args, for_machine, default_options, required, search_dirs, wanted, version_func, extra_info) + progobj = self.program_lookup(args, for_machine, default_options, required, search_dirs, wanted, version_arg, version_func, extra_info) if progobj is None or not self.check_program_version(progobj, wanted, version_func, extra_info): progobj = self.notfound_program(args) @@ -1667,6 +1673,7 @@ def program_lookup(self, args: T.List[mesonlib.FileOrString], for_machine: Machi required: bool, search_dirs: T.List[str], wanted: T.Union[str, T.List[str]], + version_arg: T.Optional[str], version_func: T.Optional[ProgramVersionFunc], extra_info: T.List[mlog.TV_Loggable] ) -> T.Optional[T.Union[ExternalProgram, build.Executable, OverrideProgram]]: @@ -1679,7 +1686,7 @@ def program_lookup(self, args: T.List[mesonlib.FileOrString], for_machine: Machi return ExternalProgram('meson', self.environment.get_build_command(), silent=True) fallback = None - wrap_mode = self.coredata.get_option(OptionKey('wrap_mode')) + wrap_mode = WrapMode.from_string(self.coredata.get_option(OptionKey('wrap_mode'))) if wrap_mode != WrapMode.nofallback and self.environment.wrap_resolver: fallback = self.environment.wrap_resolver.find_program_provider(args) if fallback and wrap_mode == WrapMode.forcefallback: @@ -1692,6 +1699,8 @@ def program_lookup(self, args: T.List[mesonlib.FileOrString], for_machine: Machi prog = ExternalProgram('python3', mesonlib.python_command, silent=True) progobj = prog if prog.found() else None + if isinstance(progobj, ExternalProgram) and version_arg: + progobj.version_arg = version_arg if progobj and not self.check_program_version(progobj, wanted, version_func, extra_info): progobj = None @@ -1751,6 +1760,7 @@ def find_program_fallback(self, fallback: str, args: T.List[mesonlib.FileOrStrin REQUIRED_KW, KwargInfo('dirs', ContainerTypeInfo(list, str), default=[], listify=True, since='0.53.0'), KwargInfo('version', ContainerTypeInfo(list, str), default=[], listify=True, since='0.52.0'), + KwargInfo('version_argument', str, default='', since='1.5.0'), DEFAULT_OPTIONS.evolve(since='1.3.0') ) @disablerIfNotFound @@ -1765,7 +1775,7 @@ def func_find_program(self, node: mparser.BaseNode, args: T.Tuple[T.List[mesonli search_dirs = extract_search_dirs(kwargs) default_options = kwargs['default_options'] return self.find_program_impl(args[0], kwargs['native'], default_options=default_options, required=required, - silent=False, wanted=kwargs['version'], + silent=False, wanted=kwargs['version'], version_arg=kwargs['version_argument'], search_dirs=search_dirs) # When adding kwargs, please check if they make sense in dependencies.get_dep_identifier() @@ -2584,7 +2594,6 @@ def func_install_subdir(self, node: mparser.BaseNode, args: T.Tuple[str], ), KwargInfo( 'copy', bool, default=False, since='0.47.0', - deprecated='0.64.0', deprecated_message='Use fs.copyfile instead', ), KwargInfo('encoding', str, default='utf-8', since='0.47.0'), KwargInfo('format', str, default='meson', since='0.46.0', @@ -3037,13 +3046,13 @@ def print_extra_warnings(self) -> None: break def check_clang_asan_lundef(self) -> None: - if OptionKey('b_lundef') not in self.coredata.options: + if OptionKey('b_lundef') not in self.coredata.optstore: return - if OptionKey('b_sanitize') not in self.coredata.options: + if OptionKey('b_sanitize') not in self.coredata.optstore: return - if (self.coredata.options[OptionKey('b_lundef')].value and - self.coredata.options[OptionKey('b_sanitize')].value != 'none'): - value = self.coredata.options[OptionKey('b_sanitize')].value + if (self.coredata.optstore.get_value('b_lundef') and + self.coredata.optstore.get_value('b_sanitize') != 'none'): + value = self.coredata.optstore.get_value('b_sanitize') mlog.warning(textwrap.dedent(f'''\ Trying to use {value} sanitizer on Clang with b_lundef. This will probably not work. @@ -3417,7 +3426,7 @@ def build_target(self, node: mparser.BaseNode, args: T.Tuple[str, SourcesVarargs if kwargs['implib']: if kwargs['export_dynamic'] is False: - FeatureDeprecated.single_use('implib overrides explict export_dynamic off', '1.3.0', self.subprojct, + FeatureDeprecated.single_use('implib overrides explict export_dynamic off', '1.3.0', self.subproject, 'Do not set ths if want export_dynamic disabled if implib is enabled', location=node) kwargs['export_dynamic'] = True diff --git a/mesonbuild/interpreter/interpreterobjects.py b/mesonbuild/interpreter/interpreterobjects.py index 8cd9a2be6286..79e205d31d0f 100644 --- a/mesonbuild/interpreter/interpreterobjects.py +++ b/mesonbuild/interpreter/interpreterobjects.py @@ -8,11 +8,11 @@ from pathlib import Path, PurePath from .. import mesonlib -from .. import coredata +from .. import options from .. import build from .. import mlog -from ..modules import ModuleReturnValue, ModuleObject, ModuleState, ExtensionModule +from ..modules import ModuleReturnValue, ModuleObject, ModuleState, ExtensionModule, NewExtensionModule from ..backend.backends import TestProtocol from ..interpreterbase import ( ContainerTypeInfo, KwargInfo, MesonOperator, @@ -24,7 +24,7 @@ from ..interpreter.type_checking import NoneType, ENV_KW, ENV_SEPARATOR_KW, PKGCONFIG_DEFINE_KW from ..dependencies import Dependency, ExternalLibrary, InternalDependency from ..programs import ExternalProgram -from ..mesonlib import HoldableObject, OptionKey, listify, Popen_safe +from ..mesonlib import HoldableObject, listify, Popen_safe import typing as T @@ -52,7 +52,7 @@ def extract_required_kwarg(kwargs: 'kwargs.ExtractRequired', disabled = False required = False feature: T.Optional[str] = None - if isinstance(val, coredata.UserFeatureOption): + if isinstance(val, options.UserFeatureOption): if not feature_check: feature_check = FeatureNew('User option "feature"', '0.47.0') feature_check.use(subproject) @@ -85,12 +85,12 @@ def extract_search_dirs(kwargs: 'kwargs.ExtractSearchDirs') -> T.List[str]: raise InvalidCode(f'Search directory {d} is not an absolute path.') return [str(s) for s in search_dirs] -class FeatureOptionHolder(ObjectHolder[coredata.UserFeatureOption]): - def __init__(self, option: coredata.UserFeatureOption, interpreter: 'Interpreter'): +class FeatureOptionHolder(ObjectHolder[options.UserFeatureOption]): + def __init__(self, option: options.UserFeatureOption, interpreter: 'Interpreter'): super().__init__(option, interpreter) if option and option.is_auto(): # TODO: we need to cast here because options is not a TypedDict - auto = T.cast('coredata.UserFeatureOption', self.env.coredata.options[OptionKey('auto_features')]) + auto = T.cast('options.UserFeatureOption', self.env.coredata.optstore.get_value_object('auto_features')) self.held_object = copy.copy(auto) self.held_object.name = option.name self.methods.update({'enabled': self.enabled_method, @@ -108,12 +108,12 @@ def __init__(self, option: coredata.UserFeatureOption, interpreter: 'Interpreter def value(self) -> str: return 'disabled' if not self.held_object else self.held_object.value - def as_disabled(self) -> coredata.UserFeatureOption: + def as_disabled(self) -> options.UserFeatureOption: disabled = copy.deepcopy(self.held_object) disabled.value = 'disabled' return disabled - def as_enabled(self) -> coredata.UserFeatureOption: + def as_enabled(self) -> options.UserFeatureOption: enabled = copy.deepcopy(self.held_object) enabled.value = 'enabled' return enabled @@ -139,7 +139,7 @@ def allowed_method(self, args: T.List[TYPE_var], kwargs: TYPE_kwargs) -> bool: def auto_method(self, args: T.List[TYPE_var], kwargs: TYPE_kwargs) -> bool: return self.value == 'auto' - def _disable_if(self, condition: bool, message: T.Optional[str]) -> coredata.UserFeatureOption: + def _disable_if(self, condition: bool, message: T.Optional[str]) -> options.UserFeatureOption: if not condition: return copy.deepcopy(self.held_object) @@ -156,7 +156,7 @@ def _disable_if(self, condition: bool, message: T.Optional[str]) -> coredata.Use 'feature_option.require', _ERROR_MSG_KW, ) - def require_method(self, args: T.Tuple[bool], kwargs: 'kwargs.FeatureOptionRequire') -> coredata.UserFeatureOption: + def require_method(self, args: T.Tuple[bool], kwargs: 'kwargs.FeatureOptionRequire') -> options.UserFeatureOption: return self._disable_if(not args[0], kwargs['error_message']) @FeatureNew('feature_option.disable_if()', '1.1.0') @@ -165,7 +165,7 @@ def require_method(self, args: T.Tuple[bool], kwargs: 'kwargs.FeatureOptionRequi 'feature_option.disable_if', _ERROR_MSG_KW, ) - def disable_if_method(self, args: T.Tuple[bool], kwargs: 'kwargs.FeatureOptionRequire') -> coredata.UserFeatureOption: + def disable_if_method(self, args: T.Tuple[bool], kwargs: 'kwargs.FeatureOptionRequire') -> options.UserFeatureOption: return self._disable_if(args[0], kwargs['error_message']) @FeatureNew('feature_option.enable_if()', '1.1.0') @@ -174,7 +174,7 @@ def disable_if_method(self, args: T.Tuple[bool], kwargs: 'kwargs.FeatureOptionRe 'feature_option.enable_if', _ERROR_MSG_KW, ) - def enable_if_method(self, args: T.Tuple[bool], kwargs: 'kwargs.FeatureOptionRequire') -> coredata.UserFeatureOption: + def enable_if_method(self, args: T.Tuple[bool], kwargs: 'kwargs.FeatureOptionRequire') -> options.UserFeatureOption: if not args[0]: return copy.deepcopy(self.held_object) @@ -188,13 +188,13 @@ def enable_if_method(self, args: T.Tuple[bool], kwargs: 'kwargs.FeatureOptionReq @FeatureNew('feature_option.disable_auto_if()', '0.59.0') @noKwargs @typed_pos_args('feature_option.disable_auto_if', bool) - def disable_auto_if_method(self, args: T.Tuple[bool], kwargs: TYPE_kwargs) -> coredata.UserFeatureOption: + def disable_auto_if_method(self, args: T.Tuple[bool], kwargs: TYPE_kwargs) -> options.UserFeatureOption: return copy.deepcopy(self.held_object) if self.value != 'auto' or not args[0] else self.as_disabled() @FeatureNew('feature_option.enable_auto_if()', '1.1.0') @noKwargs @typed_pos_args('feature_option.enable_auto_if', bool) - def enable_auto_if_method(self, args: T.Tuple[bool], kwargs: TYPE_kwargs) -> coredata.UserFeatureOption: + def enable_auto_if_method(self, args: T.Tuple[bool], kwargs: TYPE_kwargs) -> options.UserFeatureOption: return self.as_enabled() if self.value == 'auto' and args[0] else copy.deepcopy(self.held_object) @@ -284,7 +284,6 @@ class EnvironmentVariablesHolder(ObjectHolder[mesonlib.EnvironmentVariables], Mu def __init__(self, obj: mesonlib.EnvironmentVariables, interpreter: 'Interpreter'): super().__init__(obj, interpreter) - MutableInterpreterObject.__init__(self) self.methods.update({'set': self.set_method, 'unset': self.unset_method, 'append': self.append_method, @@ -309,14 +308,12 @@ def warn_if_has_name(self, name: str) -> None: @typed_kwargs('environment.set', ENV_SEPARATOR_KW) def set_method(self, args: T.Tuple[str, T.List[str]], kwargs: 'EnvironmentSeparatorKW') -> None: name, values = args - self.check_used(self.interpreter, fatal=False) self.held_object.set(name, values, kwargs['separator']) @FeatureNew('environment.unset', '1.4.0') @typed_pos_args('environment.unset', str) @noKwargs def unset_method(self, args: T.Tuple[str], kwargs: TYPE_kwargs) -> None: - self.check_used(self.interpreter, fatal=False) self.held_object.unset(args[0]) @typed_pos_args('environment.append', str, varargs=str, min_varargs=1) @@ -324,7 +321,6 @@ def unset_method(self, args: T.Tuple[str], kwargs: TYPE_kwargs) -> None: def append_method(self, args: T.Tuple[str, T.List[str]], kwargs: 'EnvironmentSeparatorKW') -> None: name, values = args self.warn_if_has_name(name) - self.check_used(self.interpreter, fatal=False) self.held_object.append(name, values, kwargs['separator']) @typed_pos_args('environment.prepend', str, varargs=str, min_varargs=1) @@ -332,7 +328,6 @@ def append_method(self, args: T.Tuple[str, T.List[str]], kwargs: 'EnvironmentSep def prepend_method(self, args: T.Tuple[str, T.List[str]], kwargs: 'EnvironmentSeparatorKW') -> None: name, values = args self.warn_if_has_name(name) - self.check_used(self.interpreter, fatal=False) self.held_object.prepend(name, values, kwargs['separator']) @@ -343,7 +338,6 @@ class ConfigurationDataHolder(ObjectHolder[build.ConfigurationData], MutableInte def __init__(self, obj: build.ConfigurationData, interpreter: 'Interpreter'): super().__init__(obj, interpreter) - MutableInterpreterObject.__init__(self) self.methods.update({'set': self.set_method, 'set10': self.set10_method, 'set_quoted': self.set_quoted_method, @@ -355,31 +349,32 @@ def __init__(self, obj: build.ConfigurationData, interpreter: 'Interpreter'): }) def __deepcopy__(self, memo: T.Dict) -> 'ConfigurationDataHolder': - obj = ConfigurationDataHolder(copy.deepcopy(self.held_object), self.interpreter) + return ConfigurationDataHolder(copy.deepcopy(self.held_object), self.interpreter) + + def is_used(self) -> bool: + return self.held_object.used + + def __check_used(self) -> None: if self.is_used(): - # Copies of used ConfigurationData used to be immutable. It is now - # allowed but we need this flag to print a FeatureNew warning if - # that happens. - obj.mutable_feature_new = True - return obj + raise InterpreterException("Can not set values on configuration object that has been used.") @typed_pos_args('configuration_data.set', str, (str, int, bool)) @typed_kwargs('configuration_data.set', _CONF_DATA_SET_KWS) def set_method(self, args: T.Tuple[str, T.Union[str, int, bool]], kwargs: 'kwargs.ConfigurationDataSet') -> None: - self.check_used(self.interpreter) + self.__check_used() self.held_object.values[args[0]] = (args[1], kwargs['description']) @typed_pos_args('configuration_data.set_quoted', str, str) @typed_kwargs('configuration_data.set_quoted', _CONF_DATA_SET_KWS) def set_quoted_method(self, args: T.Tuple[str, str], kwargs: 'kwargs.ConfigurationDataSet') -> None: - self.check_used(self.interpreter) + self.__check_used() escaped_val = '\\"'.join(args[1].split('"')) self.held_object.values[args[0]] = (f'"{escaped_val}"', kwargs['description']) @typed_pos_args('configuration_data.set10', str, (int, bool)) @typed_kwargs('configuration_data.set10', _CONF_DATA_SET_KWS) def set10_method(self, args: T.Tuple[str, T.Union[int, bool]], kwargs: 'kwargs.ConfigurationDataSet') -> None: - self.check_used(self.interpreter) + self.__check_used() # bool is a subclass of int, so we need to check for bool explicitly. # We already have typed_pos_args checking that this is either a bool or # an int. @@ -442,7 +437,6 @@ def keys(self) -> T.List[str]: @noKwargs def merge_from_method(self, args: T.Tuple[build.ConfigurationData], kwargs: TYPE_kwargs) -> None: from_object = args[0] - self.check_used(self.interpreter) self.held_object.values.update(from_object.values) @@ -850,6 +844,10 @@ def method_call(self, method_name: str, args: T.List[TYPE_var], kwargs: TYPE_kwa args = flatten(args) if not getattr(method, 'no-second-level-holder-flattening', False): args, kwargs = resolve_second_level_holders(args, kwargs) + if not self.interpreter.active_projectname: + assert isinstance(modobj, (ExtensionModule, NewExtensionModule)), 'for mypy' + full_method_name = f'{modobj.INFO.name}.{method_name}' + raise mesonlib.MesonException(f'Module methods ({full_method_name}) cannot be invoked during project declaration.') state = ModuleState(self.interpreter) # Many modules do for example self.interpreter.find_program_impl(), # so we have to ensure they use the current interpreter and not the one diff --git a/mesonbuild/interpreter/kwargs.py b/mesonbuild/interpreter/kwargs.py index 17f7876a04d0..85779bc00b08 100644 --- a/mesonbuild/interpreter/kwargs.py +++ b/mesonbuild/interpreter/kwargs.py @@ -10,7 +10,7 @@ from typing_extensions import TypedDict, Literal, Protocol, NotRequired from .. import build -from .. import coredata +from .. import options from ..compilers import Compiler from ..dependencies.base import Dependency from ..mesonlib import EnvironmentVariables, MachineChoice, File, FileMode, FileOrString, OptionKey @@ -73,7 +73,7 @@ class ExtractRequired(TypedDict): a boolean or a feature option should inherit it's arguments from this class. """ - required: T.Union[bool, coredata.UserFeatureOption] + required: T.Union[bool, options.UserFeatureOption] class ExtractSearchDirs(TypedDict): diff --git a/mesonbuild/interpreter/mesonmain.py b/mesonbuild/interpreter/mesonmain.py index fb18fa8b6806..4d1f427da210 100644 --- a/mesonbuild/interpreter/mesonmain.py +++ b/mesonbuild/interpreter/mesonmain.py @@ -1,9 +1,8 @@ # SPDX-License-Identifier: Apache-2.0 # Copyright 2012-2021 The Meson development team -# Copyright © 2021-2024 Intel Corporation +# Copyright © 2021 Intel Corporation from __future__ import annotations -import copy import os import typing as T @@ -348,16 +347,6 @@ def override_dependency_method(self, args: T.Tuple[str, dependencies.Dependency] if not name: raise InterpreterException('First argument must be a string and cannot be empty') - # Make a copy since we're going to mutate. - # - # dep = declare_dependency() - # meson.override_dependency('foo', dep) - # meson.override_dependency('foo-1.0', dep) - # dep = dependency('foo') - # dep.name() # == 'foo-1.0' - dep = copy.copy(dep) - dep.name = name - optkey = OptionKey('default_library', subproject=self.interpreter.subproject) default_library = self.interpreter.coredata.get_option(optkey) assert isinstance(default_library, str), 'for mypy' diff --git a/mesonbuild/interpreter/type_checking.py b/mesonbuild/interpreter/type_checking.py index 9b7e35c637ef..2856136361f8 100644 --- a/mesonbuild/interpreter/type_checking.py +++ b/mesonbuild/interpreter/type_checking.py @@ -11,7 +11,7 @@ from ..build import (CustomTarget, BuildTarget, CustomTargetIndex, ExtractedObjects, GeneratedList, IncludeDirs, BothLibraries, SharedLibrary, StaticLibrary, Jar, Executable, StructuredSources) -from ..coredata import UserFeatureOption +from ..options import UserFeatureOption from ..dependencies import Dependency, InternalDependency from ..interpreterbase.decorators import KwargInfo, ContainerTypeInfo from ..mesonlib import (File, FileMode, MachineChoice, listify, has_path_sep, diff --git a/mesonbuild/interpreterbase/_unholder.py b/mesonbuild/interpreterbase/_unholder.py index e32b77fc7d4b..c62aafe8e97b 100644 --- a/mesonbuild/interpreterbase/_unholder.py +++ b/mesonbuild/interpreterbase/_unholder.py @@ -5,7 +5,7 @@ import typing as T -from .baseobjects import InterpreterObject, MesonInterpreterObject, ObjectHolder, HoldableTypes, MutableInterpreterObject +from .baseobjects import InterpreterObject, MesonInterpreterObject, ObjectHolder, HoldableTypes from .exceptions import InvalidArguments from ..mesonlib import HoldableObject, MesonBugException @@ -13,8 +13,6 @@ from .baseobjects import TYPE_var def _unholder(obj: InterpreterObject) -> TYPE_var: - if isinstance(obj, MutableInterpreterObject): - obj.mark_used() if isinstance(obj, ObjectHolder): assert isinstance(obj.held_object, HoldableTypes) return obj.held_object diff --git a/mesonbuild/interpreterbase/baseobjects.py b/mesonbuild/interpreterbase/baseobjects.py index c6864a4788c5..9a119a98a75d 100644 --- a/mesonbuild/interpreterbase/baseobjects.py +++ b/mesonbuild/interpreterbase/baseobjects.py @@ -120,27 +120,6 @@ class MesonInterpreterObject(InterpreterObject): class MutableInterpreterObject: ''' Dummy class to mark the object type as mutable ''' - def __init__(self) -> None: - self.used = False - self.mutable_feature_new = False - - def mark_used(self) -> None: - self.used = True - - def is_used(self) -> bool: - return self.used - - def check_used(self, interpreter: Interpreter, fatal: bool = True) -> None: - from .decorators import FeatureDeprecated, FeatureNew - if self.is_used(): - if fatal: - raise InvalidArguments('Can not modify object after it has been used.') - FeatureDeprecated.single_use('Modify object after it has been used', '1.5.0', - interpreter.subproject, location=interpreter.current_node) - elif self.mutable_feature_new: - FeatureNew.single_use('Modify a copy of an immutable object', '1.5.0', - interpreter.subproject, location=interpreter.current_node) - self.mutable_feature_new = False HoldableTypes = (HoldableObject, int, bool, str, list, dict) TYPE_HoldableTypes = T.Union[TYPE_elementary, HoldableObject] diff --git a/mesonbuild/interpreterbase/helpers.py b/mesonbuild/interpreterbase/helpers.py index 3942f2c9f4c4..0b0436209dbe 100644 --- a/mesonbuild/interpreterbase/helpers.py +++ b/mesonbuild/interpreterbase/helpers.py @@ -5,7 +5,7 @@ from .. import mesonlib, mparser from .exceptions import InterpreterException, InvalidArguments -from ..coredata import UserOption +from ..options import UserOption import collections.abc diff --git a/mesonbuild/linkers/detect.py b/mesonbuild/linkers/detect.py index 65d77f6424c8..cd3c7b2efd0f 100644 --- a/mesonbuild/linkers/detect.py +++ b/mesonbuild/linkers/detect.py @@ -45,7 +45,7 @@ def guess_win_linker(env: 'Environment', compiler: T.List[str], comp_class: T.Ty check_args = ['/logo', '--version'] elif isinstance(comp_class.LINKER_PREFIX, str): check_args = [comp_class.LINKER_PREFIX + '/logo', comp_class.LINKER_PREFIX + '--version'] - elif isinstance(comp_class.LINKER_PREFIX, list): + else: # list check_args = comp_class.LINKER_PREFIX + ['/logo'] + comp_class.LINKER_PREFIX + ['--version'] check_args += env.coredata.get_external_link_args(for_machine, comp_class.language) @@ -64,7 +64,7 @@ def guess_win_linker(env: 'Environment', compiler: T.List[str], comp_class: T.Ty p, o, _ = Popen_safe(compiler + check_args) if 'LLD' in o.split('\n', maxsplit=1)[0]: - if '(compatible with GNU linkers)' in o: + if 'compatible with GNU linkers' in o: return linkers.LLVMDynamicLinker( compiler, for_machine, comp_class.LINKER_PREFIX, override, version=search_version(o)) diff --git a/mesonbuild/linkers/linkers.py b/mesonbuild/linkers/linkers.py index de08e0f443e9..4eec82edd177 100644 --- a/mesonbuild/linkers/linkers.py +++ b/mesonbuild/linkers/linkers.py @@ -62,7 +62,7 @@ def build_rpath_args(self, env: 'Environment', build_dir: str, from_dir: str, def thread_link_flags(self, env: 'Environment') -> T.List[str]: return [] - def openmp_flags(self) -> T.List[str]: + def openmp_flags(self, env: Environment) -> T.List[str]: return [] def get_option_link_args(self, options: 'KeyedOptionDictType') -> T.List[str]: @@ -819,21 +819,28 @@ def build_rpath_args(self, env: 'Environment', build_dir: str, from_dir: str, if not rpath_paths and not install_rpath and not build_rpath: return ([], set()) args: T.List[str] = [] + rpath_dirs_to_remove: T.Set[bytes] = set() # @loader_path is the equivalent of $ORIGIN on macOS # https://stackoverflow.com/q/26280738 origin_placeholder = '@loader_path' processed_rpaths = prepare_rpaths(rpath_paths, build_dir, from_dir) all_paths = mesonlib.OrderedSet([os.path.join(origin_placeholder, p) for p in processed_rpaths]) if build_rpath != '': - all_paths.add(build_rpath) + all_paths.update(build_rpath.split(':')) for rp in all_paths: + rpath_dirs_to_remove.add(rp.encode('utf8')) args.extend(self._apply_prefix('-rpath,' + rp)) - return (args, set()) + return (args, rpath_dirs_to_remove) def get_thinlto_cache_args(self, path: str) -> T.List[str]: return ["-Wl,-cache_path_lto," + path] + def export_dynamic_args(self, env: 'Environment') -> T.List[str]: + if mesonlib.version_compare(self.version, '>=224.1'): + return self._apply_prefix('-export_dynamic') + return [] + class LLVMLD64DynamicLinker(AppleDynamicLinker): @@ -1319,6 +1326,9 @@ def import_library_args(self, implibname: str) -> T.List[str]: def rsp_file_syntax(self) -> RSPFileSyntax: return RSPFileSyntax.MSVC + def get_pie_args(self) -> T.List[str]: + return [] + class MSVCDynamicLinker(VisualStudioLikeLinkerMixin, DynamicLinker): diff --git a/mesonbuild/machinefile.py b/mesonbuild/machinefile.py new file mode 100644 index 000000000000..afeb4d05637c --- /dev/null +++ b/mesonbuild/machinefile.py @@ -0,0 +1,117 @@ +# SPDX-License-Identifier: Apache-2.0 +# Copyright 2013-2024 Contributors to the The Meson project + +import typing as T +import configparser +import os + +from . import mparser + +from .mesonlib import MesonException + +if T.TYPE_CHECKING: + from .compilers import Compiler + from .coredata import StrOrBytesPath + + CompilersDict = T.Dict[str, Compiler] + + +class CmdLineFileParser(configparser.ConfigParser): + def __init__(self) -> None: + # We don't want ':' as key delimiter, otherwise it would break when + # storing subproject options like "subproject:option=value" + super().__init__(delimiters=['='], interpolation=None) + + def read(self, filenames: T.Union['StrOrBytesPath', T.Iterable['StrOrBytesPath']], encoding: T.Optional[str] = 'utf-8') -> T.List[str]: + return super().read(filenames, encoding) + + def optionxform(self, optionstr: str) -> str: + # Don't call str.lower() on keys + return optionstr + + +class MachineFileParser(): + def __init__(self, filenames: T.List[str], sourcedir: str) -> None: + self.parser = CmdLineFileParser() + self.constants: T.Dict[str, T.Union[str, bool, int, T.List[str]]] = {'True': True, 'False': False} + self.sections: T.Dict[str, T.Dict[str, T.Union[str, bool, int, T.List[str]]]] = {} + + for fname in filenames: + try: + with open(fname, encoding='utf-8') as f: + content = f.read() + except UnicodeDecodeError as e: + raise MesonException(f'Malformed machine file {fname!r} failed to parse as unicode: {e}') + + content = content.replace('@GLOBAL_SOURCE_ROOT@', sourcedir) + content = content.replace('@DIRNAME@', os.path.dirname(fname)) + try: + self.parser.read_string(content, fname) + except configparser.Error as e: + raise MesonException(f'Malformed machine file: {e}') + + # Parse [constants] first so they can be used in other sections + if self.parser.has_section('constants'): + self.constants.update(self._parse_section('constants')) + + for s in self.parser.sections(): + if s == 'constants': + continue + self.sections[s] = self._parse_section(s) + + def _parse_section(self, s: str) -> T.Dict[str, T.Union[str, bool, int, T.List[str]]]: + self.scope = self.constants.copy() + section: T.Dict[str, T.Union[str, bool, int, T.List[str]]] = {} + for entry, value in self.parser.items(s): + if ' ' in entry or '\t' in entry or "'" in entry or '"' in entry: + raise MesonException(f'Malformed variable name {entry!r} in machine file.') + # Windows paths... + value = value.replace('\\', '\\\\') + try: + ast = mparser.Parser(value, 'machinefile').parse() + if not ast.lines: + raise MesonException('value cannot be empty') + res = self._evaluate_statement(ast.lines[0]) + except MesonException as e: + raise MesonException(f'Malformed value in machine file variable {entry!r}: {str(e)}.') + except KeyError as e: + raise MesonException(f'Undefined constant {e.args[0]!r} in machine file variable {entry!r}.') + section[entry] = res + self.scope[entry] = res + return section + + def _evaluate_statement(self, node: mparser.BaseNode) -> T.Union[str, bool, int, T.List[str]]: + if isinstance(node, (mparser.StringNode)): + return node.value + elif isinstance(node, mparser.BooleanNode): + return node.value + elif isinstance(node, mparser.NumberNode): + return node.value + elif isinstance(node, mparser.ParenthesizedNode): + return self._evaluate_statement(node.inner) + elif isinstance(node, mparser.ArrayNode): + # TODO: This is where recursive types would come in handy + return [self._evaluate_statement(arg) for arg in node.args.arguments] + elif isinstance(node, mparser.IdNode): + return self.scope[node.value] + elif isinstance(node, mparser.ArithmeticNode): + l = self._evaluate_statement(node.left) + r = self._evaluate_statement(node.right) + if node.operation == 'add': + if (isinstance(l, str) and isinstance(r, str)) or \ + (isinstance(l, list) and isinstance(r, list)): + return l + r + elif node.operation == 'div': + if isinstance(l, str) and isinstance(r, str): + return os.path.join(l, r) + raise MesonException('Unsupported node type') + +def parse_machine_files(filenames: T.List[str], sourcedir: str): + parser = MachineFileParser(filenames, sourcedir) + return parser.sections + + +class MachineFileStore: + def __init__(self, native_files, cross_files, source_dir): + self.native = MachineFileParser(native_files if native_files is not None else [], source_dir).sections + self.cross = MachineFileParser(cross_files if cross_files is not None else [], source_dir).sections diff --git a/mesonbuild/mconf.py b/mesonbuild/mconf.py index 2cef24fd77e1..da96ac41ff3b 100644 --- a/mesonbuild/mconf.py +++ b/mesonbuild/mconf.py @@ -14,6 +14,7 @@ from . import build from . import coredata +from . import options from . import environment from . import mesonlib from . import mintro @@ -24,6 +25,8 @@ if T.TYPE_CHECKING: from typing_extensions import Protocol + from typing import Any + from .options import UserOption import argparse class CMDOptions(coredata.SharedCMDOptions, Protocol): @@ -83,12 +86,12 @@ def __init__(self, build_dir: str): # if the option file has been updated, reload it # This cannot handle options for a new subproject that has not yet # been configured. - for sub, options in self.coredata.options_files.items(): - if options is not None and os.path.exists(options[0]): - opfile = options[0] + for sub, conf_options in self.coredata.options_files.items(): + if conf_options is not None and os.path.exists(conf_options[0]): + opfile = conf_options[0] with open(opfile, 'rb') as f: ophash = hashlib.sha1(f.read()).hexdigest() - if ophash != options[1]: + if ophash != conf_options[1]: oi = OptionInterpreter(sub) oi.process(opfile) self.coredata.update_project_options(oi.options, sub) @@ -185,7 +188,7 @@ def wrap_text(text: LOGLINE, width: int) -> mlog.TV_LoggableList: items = [l[i] if l[i] else ' ' * four_column[i] for i in range(4)] mlog.log(*items) - def split_options_per_subproject(self, options: 'coredata.KeyedOptionDictType') -> T.Dict[str, 'coredata.MutableKeyedOptionDictType']: + def split_options_per_subproject(self, options: 'T.Union[dict[OptionKey, UserOption[Any]], coredata.KeyedOptionDictType]') -> T.Dict[str, 'coredata.MutableKeyedOptionDictType']: result: T.Dict[str, 'coredata.MutableKeyedOptionDictType'] = {} for k, o in options.items(): if k.subproject: @@ -223,18 +226,18 @@ def add_section(self, section: str) -> None: self._add_line(mlog.normal_yellow(section + ':'), '', '', '') self.print_margin = 2 - def print_options(self, title: str, options: 'coredata.KeyedOptionDictType') -> None: - if not options: + def print_options(self, title: str, opts: 'T.Union[dict[OptionKey, UserOption[Any]], coredata.KeyedOptionDictType]') -> None: + if not opts: return if title: self.add_title(title) - auto = T.cast('coredata.UserFeatureOption', self.coredata.options[OptionKey('auto_features')]) - for k, o in sorted(options.items()): + auto = T.cast('options.UserFeatureOption', self.coredata.optstore.get_value_object('auto_features')) + for k, o in sorted(opts.items()): printable_value = o.printable_value() root = k.as_root() - if o.yielding and k.subproject and root in self.coredata.options: + if o.yielding and k.subproject and root in self.coredata.optstore: printable_value = '' - if isinstance(o, coredata.UserFeatureOption) and o.is_auto(): + if isinstance(o, options.UserFeatureOption) and o.is_auto(): printable_value = auto.printable_value() self.add_option(str(root), o.description, printable_value, o.choices) @@ -255,7 +258,7 @@ def print_default_values_warning() -> None: if not self.default_values_only: mlog.log(' Build dir ', self.build_dir) - dir_option_names = set(coredata.BUILTIN_DIR_OPTIONS) + dir_option_names = set(options.BUILTIN_DIR_OPTIONS) test_option_names = {OptionKey('errorlogs'), OptionKey('stdsplit')} @@ -263,7 +266,7 @@ def print_default_values_warning() -> None: test_options: 'coredata.MutableKeyedOptionDictType' = {} core_options: 'coredata.MutableKeyedOptionDictType' = {} module_options: T.Dict[str, 'coredata.MutableKeyedOptionDictType'] = collections.defaultdict(dict) - for k, v in self.coredata.options.items(): + for k, v in self.coredata.optstore.items(): if k in dir_option_names: dir_options[k] = v elif k in test_option_names: @@ -279,17 +282,17 @@ def print_default_values_warning() -> None: host_core_options = self.split_options_per_subproject({k: v for k, v in core_options.items() if k.machine is MachineChoice.HOST}) build_core_options = self.split_options_per_subproject({k: v for k, v in core_options.items() if k.machine is MachineChoice.BUILD}) - host_compiler_options = self.split_options_per_subproject({k: v for k, v in self.coredata.options.items() if k.is_compiler() and k.machine is MachineChoice.HOST}) - build_compiler_options = self.split_options_per_subproject({k: v for k, v in self.coredata.options.items() if k.is_compiler() and k.machine is MachineChoice.BUILD}) - project_options = self.split_options_per_subproject({k: v for k, v in self.coredata.options.items() if k.is_project()}) + host_compiler_options = self.split_options_per_subproject({k: v for k, v in self.coredata.optstore.items() if k.is_compiler() and k.machine is MachineChoice.HOST}) + build_compiler_options = self.split_options_per_subproject({k: v for k, v in self.coredata.optstore.items() if k.is_compiler() and k.machine is MachineChoice.BUILD}) + project_options = self.split_options_per_subproject({k: v for k, v in self.coredata.optstore.items() if k.is_project()}) show_build_options = self.default_values_only or self.build.environment.is_cross_build() self.add_section('Main project options') self.print_options('Core options', host_core_options['']) if show_build_options: self.print_options('', build_core_options['']) - self.print_options('Backend options', {k: v for k, v in self.coredata.options.items() if k.is_backend()}) - self.print_options('Base options', {k: v for k, v in self.coredata.options.items() if k.is_base()}) + self.print_options('Backend options', {k: v for k, v in self.coredata.optstore.items() if k.is_backend()}) + self.print_options('Base options', {k: v for k, v in self.coredata.optstore.items() if k.is_base()}) self.print_options('Compiler options', host_compiler_options.get('', {})) if show_build_options: self.print_options('', build_compiler_options.get('', {})) diff --git a/mesonbuild/mdist.py b/mesonbuild/mdist.py index e4606306b6d2..d569f6015739 100644 --- a/mesonbuild/mdist.py +++ b/mesonbuild/mdist.py @@ -22,7 +22,7 @@ from glob import glob from pathlib import Path from mesonbuild.environment import Environment, detect_ninja -from mesonbuild.mesonlib import (MesonException, RealPathAction, get_meson_command, quiet_git, +from mesonbuild.mesonlib import (GIT, MesonException, RealPathAction, get_meson_command, quiet_git, windows_proof_rmtree, setup_vsenv, OptionKey) from mesonbuild.msetup import add_arguments as msetup_argparse from mesonbuild.wrap import wrap @@ -79,7 +79,36 @@ def is_git(src_root: str) -> bool: Checks if meson.build file at the root source directory is tracked by git. It could be a subproject part of the parent project git repository. ''' - return quiet_git(['ls-files', '--error-unmatch', 'meson.build'], src_root)[0] + if quiet_git(['ls-files', '--error-unmatch', 'meson.build'], src_root)[0]: + return True + + if os.path.exists(os.path.join(src_root, '.git')): + msg = 'Source tree looks like it may be a git repo, ' + if not GIT: + msg += 'but git is not installed!' + if 'GITLAB_CI' in os.environ: + msg += ' This is a gitlab bug.' + else: + msg += 'but git returned a failure. ' + p, oe = quiet_git(['status'], src_root) + if 'dubious ownership' in oe: + # For a few years now, git has absolved itself of the responsibility to implement + # robust, safe software. Instead of detecting the signs of a problematic scenario, + # they have chosen to consider many legitimate and reasonable use cases as "dangerous", + # and implemented the number one threat to security worldwide: alert fatigue. Having + # done so, they then washed their hands of the matter and permanently tabled the + # notion of adding fine-grained detection. This is not just useless, it is *worse* + # than useless. + # + # In our case, the error is triply meaningless since we are already executing build + # system commands from the same directory. Either way, reject the notion that git is + # well designed or that its error messaging is a valid approach to the problem space. + msg += 'This is a bug in git itself, please set `git config --global safe.directory "*"`' + else: + msg += 'meson.build may not have been committed to git?' + mlog.warning(msg) + return False + def is_hg(src_root: str) -> bool: return os.path.isdir(os.path.join(src_root, '.hg')) @@ -140,7 +169,9 @@ def git_root(self, dir_: str) -> Path: def have_dirty_index(self) -> bool: '''Check whether there are uncommitted changes in git''' - subprocess.check_call(['git', '-C', self.src_root, 'update-index', '-q', '--refresh']) + # Optimistically call update-index, and disregard its return value. It could be read-only, + # and only the output of diff-index matters. + subprocess.call(['git', '-C', self.src_root, 'update-index', '-q', '--refresh']) ret = subprocess.call(['git', '-C', self.src_root, 'diff-index', '--quiet', 'HEAD']) return ret == 1 @@ -220,7 +251,12 @@ def create_dist(self, archives: T.List[str]) -> T.List[str]: class HgDist(Dist): def have_dirty_index(self) -> bool: '''Check whether there are uncommitted changes in hg''' - out = subprocess.check_output(['hg', '-R', self.src_root, 'summary']) + env = os.environ.copy() + env['LC_ALL'] = 'C' + # cpython's gettext has a bug and uses LANGUAGE to override LC_ALL, + # contrary to the gettext spec + env.pop('LANGUAGE', None) + out = subprocess.check_output(['hg', '-R', self.src_root, 'summary'], env=env) return b'commit: (clean)' not in out def create_dist(self, archives: T.List[str]) -> T.List[str]: diff --git a/mesonbuild/mesonmain.py b/mesonbuild/mesonmain.py index 62ed8918cd2a..faa0f426d82a 100644 --- a/mesonbuild/mesonmain.py +++ b/mesonbuild/mesonmain.py @@ -27,6 +27,9 @@ def errorhandler(e: Exception, command: str) -> int: logfile = mlog.shutdown() if logfile is not None: mlog.log("\nA full log can be found at", mlog.bold(logfile)) + contents = mlog.ci_fold_file(logfile, f'CI platform detected, click here for {os.path.basename(logfile)} contents.') + if contents: + print(contents) if os.environ.get('MESON_FORCE_BACKTRACE'): raise e return 1 @@ -257,7 +260,7 @@ def run(original_args: T.List[str], mainfile: str) -> int: # https://github.com/mesonbuild/meson/issues/3653 if sys.platform == 'cygwin' and os.environ.get('MSYSTEM', '') not in ['MSYS', '']: mlog.error('This python3 seems to be msys/python on MSYS2 Windows, but you are in a MinGW environment') - mlog.error('Please install and use mingw-w64-x86_64-python3 and/or mingw-w64-x86_64-meson with Pacman') + mlog.error('Please install it via https://packages.msys2.org/base/mingw-w64-python') return 2 args = original_args[:] diff --git a/mesonbuild/mformat.py b/mesonbuild/mformat.py index 49ece4f034a4..7f3dbf01400e 100644 --- a/mesonbuild/mformat.py +++ b/mesonbuild/mformat.py @@ -6,7 +6,7 @@ import argparse import re import typing as T -from configparser import ConfigParser, MissingSectionHeaderError +from configparser import ConfigParser, MissingSectionHeaderError, ParsingError from copy import deepcopy from dataclasses import dataclass, field, fields, asdict from pathlib import Path @@ -276,6 +276,7 @@ def exit_node(self, node: mparser.BaseNode) -> None: def move_whitespaces(self, from_node: mparser.BaseNode, to_node: mparser.BaseNode) -> None: to_node.whitespaces.value = from_node.whitespaces.value + to_node.whitespaces.value + to_node.whitespaces.is_continuation = from_node.whitespaces.is_continuation from_node.whitespaces = None to_node.whitespaces.accept(self) @@ -317,7 +318,10 @@ def visit_WhitespaceNode(self, node: mparser.WhitespaceNode) -> None: for i, line in enumerate(lines): has_nl = line.endswith('\n') line = line.strip() - if line.startswith('#'): + if line.startswith('\\'): + node.value += ' ' # add space before \ + node.is_continuation = True + elif line.startswith('#'): if not in_block_comments: node.value += self.config.indent_before_comments else: @@ -328,6 +332,8 @@ def visit_WhitespaceNode(self, node: mparser.WhitespaceNode) -> None: in_block_comments = True if node.value.endswith('\n'): node.value += self.indent_comments + if node.is_continuation: + node.value += self.config.indent_by def visit_SymbolNode(self, node: mparser.SymbolNode) -> None: super().visit_SymbolNode(node) @@ -338,7 +344,7 @@ def visit_StringNode(self, node: mparser.StringNode) -> None: self.enter_node(node) if self.config.simplify_string_literals: - if node.is_multiline and '\n' not in node.value: + if node.is_multiline and not any(x in node.value for x in ['\n', "'"]): node.is_multiline = False node.value = node.escape() @@ -372,6 +378,8 @@ def visit_ArrayNode(self, node: mparser.ArrayNode) -> None: if node.args.arguments and not node.args.is_multiline and self.config.space_array: self.add_space_after(node.lbracket) self.add_space_after(node.args) + if not node.args.arguments: + self.move_whitespaces(node.lbracket, node.args) def visit_DictNode(self, node: mparser.DictNode) -> None: super().visit_DictNode(node) @@ -388,6 +396,7 @@ def visit_CodeBlockNode(self, node: mparser.CodeBlockNode) -> None: self.in_block_comments = False else: node.pre_whitespaces = mparser.WhitespaceNode(mparser.Token('whitespace', node.filename, 0, 0, 0, (0, 0), '')) + node.pre_whitespaces.block_indent = True for i in node.lines: i.accept(self) @@ -396,7 +405,11 @@ def visit_CodeBlockNode(self, node: mparser.CodeBlockNode) -> None: if node.lines: self.move_whitespaces(node.lines[-1], node) else: + node.whitespaces.value = node.pre_whitespaces.value + node.whitespaces.value + node.pre_whitespaces.value = '' + self.in_block_comments = True node.whitespaces.accept(self) + self.in_block_comments = False if node.condition_level == 0 and self.config.insert_final_newline: self.add_nl_after(node, force=True) @@ -451,6 +464,7 @@ def visit_ForeachClauseNode(self, node: mparser.ForeachClauseNode) -> None: self.add_space_after(node.colon) node.block.whitespaces.value += node.condition_level * self.config.indent_by + node.block.whitespaces.block_indent = True self.move_whitespaces(node.endforeach, node) @@ -458,19 +472,27 @@ def visit_IfClauseNode(self, node: mparser.IfClauseNode) -> None: super().visit_IfClauseNode(node) self.move_whitespaces(node.endif, node) + for if_node in node.ifs: + if_node.whitespaces.value += node.condition_level * self.config.indent_by if isinstance(node.elseblock, mparser.ElseNode): node.elseblock.whitespaces.value += node.condition_level * self.config.indent_by - else: - node.ifs[-1].whitespaces.value += node.condition_level * self.config.indent_by def visit_IfNode(self, node: mparser.IfNode) -> None: super().visit_IfNode(node) self.add_space_after(node.if_) + self.in_block_comments = True self.move_whitespaces(node.block, node) + self.in_block_comments = False + node.whitespaces.condition_level = node.condition_level + 1 + node.whitespaces.block_indent = True def visit_ElseNode(self, node: mparser.ElseNode) -> None: super().visit_ElseNode(node) + self.in_block_comments = True self.move_whitespaces(node.block, node) + self.in_block_comments = False + node.whitespaces.condition_level = node.condition_level + 1 + node.whitespaces.block_indent = True def visit_TernaryNode(self, node: mparser.TernaryNode) -> None: super().visit_TernaryNode(node) @@ -552,27 +574,36 @@ def visit_ArrayNode(self, node: mparser.ArrayNode) -> None: self.enter_node(node) if node.args.is_multiline: self.level += 1 - self.add_nl_after(node.lbracket, indent=self.level) + if node.args.arguments: + self.add_nl_after(node.lbracket, indent=self.level) + node.lbracket.accept(self) self.is_function_arguments = False node.args.accept(self) if node.args.is_multiline: self.level -= 1 + node.rbracket.accept(self) self.exit_node(node) def visit_DictNode(self, node: mparser.DictNode) -> None: self.enter_node(node) if node.args.is_multiline: self.level += 1 - self.add_nl_after(node.lcurl, indent=self.level) + if node.args.kwargs: + self.add_nl_after(node.lcurl, indent=self.level) + node.lcurl.accept(self) self.is_function_arguments = False node.args.accept(self) if node.args.is_multiline: self.level -= 1 + node.rcurl.accept(self) self.exit_node(node) def visit_MethodNode(self, node: mparser.MethodNode) -> None: self.enter_node(node) node.source_object.accept(self) + is_cont = node.source_object.whitespaces and node.source_object.whitespaces.is_continuation + if is_cont: + self.level += 1 if node.args.is_multiline: self.level += 1 self.add_nl_after(node.lpar, indent=self.level) @@ -580,6 +611,8 @@ def visit_MethodNode(self, node: mparser.MethodNode) -> None: node.args.accept(self) if node.args.is_multiline: self.level -= 1 + if is_cont: + self.level -= 1 self.exit_node(node) def visit_FunctionNode(self, node: mparser.FunctionNode) -> None: @@ -597,8 +630,8 @@ def visit_WhitespaceNode(self, node: mparser.WhitespaceNode) -> None: lines = node.value.splitlines(keepends=True) if lines: indent = (node.condition_level + self.level) * self.config.indent_by - node.value = lines[0] - for line in lines[1:]: + node.value = '' if node.block_indent else lines.pop(0) + for line in lines: if '#' in line and not line.startswith(indent): node.value += indent node.value += line @@ -626,6 +659,7 @@ def visit_ArgumentNode(self, node: mparser.ArgumentNode) -> None: if need_comma and not has_trailing_comma: comma = mparser.SymbolNode(mparser.Token('comma', node.filename, 0, 0, 0, (0, 0), ',')) comma.condition_level = node.condition_level + comma.whitespaces = mparser.WhitespaceNode(mparser.Token('whitespace', node.filename, 0, 0, 0, (0, 0), '')) node.commas.append(comma) elif has_trailing_comma and not need_comma: node.commas.pop(-1) @@ -647,7 +681,8 @@ def visit_ArgumentNode(self, node: mparser.ArgumentNode) -> None: for comma in node.commas[arg_index:-1]: self.add_nl_after(comma, self.level) - self.add_nl_after(node, self.level - 1) + if node.arguments or node.kwargs: + self.add_nl_after(node, self.level - 1) else: if has_trailing_comma and not (node.commas[-1].whitespaces and node.commas[-1].whitespaces.value): @@ -743,11 +778,10 @@ def visit_BreakNode(self, node: mparser.BreakNode) -> None: self.exit_node(node) def split_if_needed(self, node: mparser.ArgumentNode) -> None: - if not node.is_multiline and self.length > self.config.max_line_length: + if len(node) and not node.is_multiline and self.length > self.config.max_line_length: arg = self.argument_stack[self.level] if len(self.argument_stack) > self.level else node - if not arg.is_multiline: - arg.is_multiline = True - self.need_regenerate = True + arg.is_multiline = True + self.need_regenerate = True def visit_ArgumentNode(self, node: mparser.ArgumentNode) -> None: self.argument_stack.append(node) @@ -816,11 +850,15 @@ def load_editor_config(self, source_file: Path) -> EditorConfig: getter = f.metadata['getter'] for section in sections: - value = getter(cp, section, f.name, fallback=None) + try: + value = getter(cp, section, f.name, fallback=None) + except ValueError as e: + raise MesonException(f'Invalid type for key "{f.name}" in "{editorconfig_file}" file:\n{e}') from e if value is not None: setattr(config, f.name, value) - if cp.getboolean(cp.default_section, 'root'): + # Root is not required except in the top level .editorconfig. + if cp.getboolean(cp.default_section, 'root', fallback=False): break return config @@ -829,11 +867,22 @@ def load_configuration(self, configuration_file: T.Optional[Path]) -> FormatterC config = FormatterConfig() if configuration_file: cp = DefaultConfigParser() - cp.read_default(configuration_file) + try: + cp.read_default(configuration_file) + except ParsingError as e: + raise MesonException(f'Unable to parse configuration file "{configuration_file}":\n{e}') from e + + extra_keys = sorted(set(cp.defaults()).difference(f.name for f in fields(config))) + if extra_keys: + raise MesonException(f'Unknown config keys: "{", ".join(extra_keys)}" in configuration file "{configuration_file}"') for f in fields(config): getter = f.metadata['getter'] - value = getter(cp, cp.default_section, f.name, fallback=None) + try: + value = getter(cp, cp.default_section, f.name, fallback=None) + except ValueError as e: + raise MesonException( + f'Error parsing "{str(configuration_file)}", option "{f.name}", error: "{e!s}"') if value is not None: setattr(config, f.name, value) @@ -961,9 +1010,8 @@ def run(options: argparse.Namespace) -> int: # TODO: remove empty newlines when more than N (2...) # TODO: magic comment to prevent formatting -# TODO: handle meson.options ? # TODO: split long lines on binary operators -# TODO: align series of assignements +# TODO: align series of assignments # TODO: align comments # TODO: move comments on long lines diff --git a/mesonbuild/minstall.py b/mesonbuild/minstall.py index a82be5e0ec8f..36284f0affb5 100644 --- a/mesonbuild/minstall.py +++ b/mesonbuild/minstall.py @@ -148,23 +148,29 @@ def set_chown(path: str, user: T.Union[str, int, None] = None, # be actually passed properly. # Not nice, but better than actually rewriting shutil.chown until # this python bug is fixed: https://bugs.python.org/issue18108 - real_os_chown = os.chown - def chown(path: T.Union[int, str, 'os.PathLike[str]', bytes, 'os.PathLike[bytes]'], - uid: int, gid: int, *, dir_fd: T.Optional[int] = dir_fd, - follow_symlinks: bool = follow_symlinks) -> None: - """Override the default behavior of os.chown + if sys.version_info >= (3, 13): + # pylint: disable=unexpected-keyword-arg + # cannot handle sys.version_info, https://github.com/pylint-dev/pylint/issues/9138 + shutil.chown(path, user, group, dir_fd=dir_fd, follow_symlinks=follow_symlinks) + else: + real_os_chown = os.chown - Use a real function rather than a lambda to help mypy out. Also real - functions are faster. - """ - real_os_chown(path, uid, gid, dir_fd=dir_fd, follow_symlinks=follow_symlinks) + def chown(path: T.Union[int, str, 'os.PathLike[str]', bytes, 'os.PathLike[bytes]'], + uid: int, gid: int, *, dir_fd: T.Optional[int] = dir_fd, + follow_symlinks: bool = follow_symlinks) -> None: + """Override the default behavior of os.chown - try: - os.chown = chown - shutil.chown(path, user, group) - finally: - os.chown = real_os_chown + Use a real function rather than a lambda to help mypy out. Also real + functions are faster. + """ + real_os_chown(path, uid, gid, dir_fd=dir_fd, follow_symlinks=follow_symlinks) + + try: + os.chown = chown + shutil.chown(path, user, group) + finally: + os.chown = real_os_chown def set_chmod(path: str, mode: int, dir_fd: T.Optional[int] = None, diff --git a/mesonbuild/mintro.py b/mesonbuild/mintro.py index bdbb59e3a18d..dea67d82e1cd 100644 --- a/mesonbuild/mintro.py +++ b/mesonbuild/mintro.py @@ -19,7 +19,7 @@ import sys import typing as T -from . import build, mesonlib, coredata as cdata +from . import build, mesonlib, options, coredata as cdata from .ast import IntrospectionInterpreter, BUILD_TARGET_FUNCTIONS, AstConditionLevel, AstIDGenerator, AstIndentationGenerator, AstJSONPrinter from .backend import backends from .dependencies import Dependency @@ -30,6 +30,8 @@ if T.TYPE_CHECKING: import argparse + from typing import Any + from .options import UserOption from .interpreter import Interpreter from .mparser import BaseNode @@ -88,7 +90,7 @@ def add_arguments(parser: argparse.ArgumentParser) -> None: flag = '--' + key.replace('_', '-') parser.add_argument(flag, action='store_true', dest=key, default=False, help=val.desc) - parser.add_argument('--backend', choices=sorted(cdata.backendlist), dest='backend', default='ninja', + parser.add_argument('--backend', choices=sorted(options.backendlist), dest='backend', default='ninja', help='The backend to use for the --buildoptions introspection.') parser.add_argument('-a', '--all', action='store_true', dest='all', default=False, help='Print all available information.') @@ -284,14 +286,14 @@ def list_buildoptions(coredata: cdata.CoreData, subprojects: T.Optional[T.List[s optlist: T.List[T.Dict[str, T.Union[str, bool, int, T.List[str]]]] = [] subprojects = subprojects or [] - dir_option_names = set(cdata.BUILTIN_DIR_OPTIONS) + dir_option_names = set(options.BUILTIN_DIR_OPTIONS) test_option_names = {OptionKey('errorlogs'), OptionKey('stdsplit')} dir_options: 'cdata.MutableKeyedOptionDictType' = {} test_options: 'cdata.MutableKeyedOptionDictType' = {} core_options: 'cdata.MutableKeyedOptionDictType' = {} - for k, v in coredata.options.items(): + for k, v in coredata.optstore.items(): if k in dir_option_names: dir_options[k] = v elif k in test_option_names: @@ -302,20 +304,20 @@ def list_buildoptions(coredata: cdata.CoreData, subprojects: T.Optional[T.List[s for s in subprojects: core_options[k.evolve(subproject=s)] = v - def add_keys(options: 'cdata.KeyedOptionDictType', section: str) -> None: - for key, opt in sorted(options.items()): + def add_keys(opts: 'T.Union[dict[OptionKey, UserOption[Any]], cdata.KeyedOptionDictType]', section: str) -> None: + for key, opt in sorted(opts.items()): optdict = {'name': str(key), 'value': opt.value, 'section': section, 'machine': key.machine.get_lower_case_name() if coredata.is_per_machine_option(key) else 'any'} - if isinstance(opt, cdata.UserStringOption): + if isinstance(opt, options.UserStringOption): typestr = 'string' - elif isinstance(opt, cdata.UserBooleanOption): + elif isinstance(opt, options.UserBooleanOption): typestr = 'boolean' - elif isinstance(opt, cdata.UserComboOption): + elif isinstance(opt, options.UserComboOption): optdict['choices'] = opt.choices typestr = 'combo' - elif isinstance(opt, cdata.UserIntegerOption): + elif isinstance(opt, options.UserIntegerOption): typestr = 'integer' - elif isinstance(opt, cdata.UserArrayOption): + elif isinstance(opt, options.UserArrayOption): typestr = 'array' if opt.choices: optdict['choices'] = opt.choices @@ -326,14 +328,14 @@ def add_keys(options: 'cdata.KeyedOptionDictType', section: str) -> None: optlist.append(optdict) add_keys(core_options, 'core') - add_keys({k: v for k, v in coredata.options.items() if k.is_backend()}, 'backend') - add_keys({k: v for k, v in coredata.options.items() if k.is_base()}, 'base') + add_keys({k: v for k, v in coredata.optstore.items() if k.is_backend()}, 'backend') + add_keys({k: v for k, v in coredata.optstore.items() if k.is_base()}, 'base') add_keys( - {k: v for k, v in sorted(coredata.options.items(), key=lambda i: i[0].machine) if k.is_compiler()}, + {k: v for k, v in sorted(coredata.optstore.items(), key=lambda i: i[0].machine) if k.is_compiler()}, 'compiler', ) add_keys(dir_options, 'directory') - add_keys({k: v for k, v in coredata.options.items() if k.is_project()}, 'user') + add_keys({k: v for k, v in coredata.optstore.items() if k.is_project()}, 'user') add_keys(test_options, 'test') return optlist @@ -592,7 +594,7 @@ def write_intro_info(intro_info: T.Sequence[T.Tuple[str, T.Union[dict, T.List[T. out_file = os.path.join(info_dir, f'intro-{kind}.json') tmp_file = os.path.join(info_dir, 'tmp_dump.json') with open(tmp_file, 'w', encoding='utf-8') as fp: - json.dump(data, fp) + json.dump(data, fp, indent=2) fp.flush() # Not sure if this is needed os.replace(tmp_file, out_file) updated_introspection_files.append(kind) diff --git a/mesonbuild/mlog.py b/mesonbuild/mlog.py index a8b0185371d1..bc8faeba7d06 100644 --- a/mesonbuild/mlog.py +++ b/mesonbuild/mlog.py @@ -22,8 +22,9 @@ from pathlib import Path if T.TYPE_CHECKING: - from ._typing import StringProtocol, SizedStringProtocol + from typing_extensions import Literal + from ._typing import StringProtocol, SizedStringProtocol from .mparser import BaseNode TV_Loggable = T.Union[str, 'AnsiDecorator', StringProtocol] @@ -75,6 +76,7 @@ def setup_console() -> None: pass _in_ci = 'CI' in os.environ +_ci_is_github = 'GITHUB_ACTIONS' in os.environ class _Severity(enum.Enum): @@ -540,3 +542,30 @@ def code_line(text: str, line: str, colno: int) -> str: :return: A formatted string of the text, line, and a caret """ return f'{text}\n{line}\n{" " * colno}^' + +@T.overload +def ci_fold_file(fname: T.Union[str, os.PathLike], banner: str, force: Literal[True] = True) -> str: ... + +@T.overload +def ci_fold_file(fname: T.Union[str, os.PathLike], banner: str, force: Literal[False] = False) -> T.Optional[str]: ... + +def ci_fold_file(fname: T.Union[str, os.PathLike], banner: str, force: bool = False) -> T.Optional[str]: + if not _in_ci and not force: + return None + + if _ci_is_github: + header = f'::group::==== {banner} ====' + footer = '::endgroup::' + elif force: + header = banner + footer = '' + elif 'MESON_FORCE_SHOW_LOGS' in os.environ: + header = f'==== Forcing display of logs for {os.path.basename(fname)} ====' + footer = '' + else: + # only github is implemented + return None + + with open(fname, 'r', encoding='utf-8') as f: + data = f.read() + return f'{header}\n{data}\n{footer}\n' diff --git a/mesonbuild/modules/__init__.py b/mesonbuild/modules/__init__.py index 8d6b20f5a3b5..046c530a8404 100644 --- a/mesonbuild/modules/__init__.py +++ b/mesonbuild/modules/__init__.py @@ -18,7 +18,6 @@ from ..interpreter.interpreter import ProgramVersionFunc from ..interpreterbase import TYPE_var, TYPE_kwargs from ..programs import OverrideProgram - from ..wrap import WrapMode from ..dependencies import Dependency class ModuleState: @@ -134,7 +133,7 @@ def test(self, args: T.Tuple[str, T.Union[build.Executable, build.Jar, 'External def get_option(self, name: str, subproject: str = '', machine: MachineChoice = MachineChoice.HOST, lang: T.Optional[str] = None, - module: T.Optional[str] = None) -> T.Union[T.List[str], str, int, bool, 'WrapMode']: + module: T.Optional[str] = None) -> T.Union[T.List[str], str, int, bool]: return self.environment.coredata.get_option(mesonlib.OptionKey(name, subproject, machine, lang, module)) def is_user_defined_option(self, name: str, subproject: str = '', diff --git a/mesonbuild/modules/_qt.py b/mesonbuild/modules/_qt.py index 7effa1f58401..ebb8a3994097 100644 --- a/mesonbuild/modules/_qt.py +++ b/mesonbuild/modules/_qt.py @@ -11,7 +11,7 @@ from . import ModuleReturnValue, ExtensionModule from .. import build -from .. import coredata +from .. import options from .. import mlog from ..dependencies import find_external_dependency, Dependency, ExternalLibrary, InternalDependency from ..mesonlib import MesonException, File, version_compare, Popen_safe @@ -256,7 +256,7 @@ def _parse_qrc_deps(self, state: 'ModuleState', @noPosargs @typed_kwargs( 'qt.has_tools', - KwargInfo('required', (bool, coredata.UserFeatureOption), default=False), + KwargInfo('required', (bool, options.UserFeatureOption), default=False), KwargInfo('method', str, default='auto'), ) def has_tools(self, state: 'ModuleState', args: T.Tuple, kwargs: 'HasToolKwArgs') -> bool: diff --git a/mesonbuild/modules/gnome.py b/mesonbuild/modules/gnome.py index 410bf7411a36..9d872353ae02 100644 --- a/mesonbuild/modules/gnome.py +++ b/mesonbuild/modules/gnome.py @@ -440,6 +440,8 @@ def compile_resources(self, state: 'ModuleState', args: T.Tuple[str, 'FileOrStri depend_files, depends, subdirs = self._get_gresource_dependencies( state, ifile, source_dirs, dependencies) + else: + depend_files = [] # Make source dirs relative to build dir now source_dirs = [os.path.join(state.build_to_src, state.subdir, d) for d in source_dirs] @@ -476,8 +478,11 @@ def compile_resources(self, state: 'ModuleState', args: T.Tuple[str, 'FileOrStri else: raise MesonException('Compiling GResources into code is only supported in C and C++ projects') - if kwargs['install'] and not gresource: - raise MesonException('The install kwarg only applies to gresource bundles, see install_header') + if kwargs['install']: + if not gresource: + raise MesonException('The install kwarg only applies to gresource bundles, see install_header') + elif not kwargs['install_dir']: + raise MesonException('gnome.compile_resources: "install_dir" keyword argument must be set when "install" is true.') install_header = kwargs['install_header'] if install_header and gresource: @@ -492,7 +497,6 @@ def compile_resources(self, state: 'ModuleState', args: T.Tuple[str, 'FileOrStri target_cmd = cmd else: depfile = f'{output}.d' - depend_files = [] target_cmd = copy.copy(cmd) + ['--dependency-file', '@DEPFILE@'] target_c = GResourceTarget( name, @@ -905,7 +909,7 @@ def _get_langs_compilers_flags(state: 'ModuleState', langs_compilers: T.List[T.T if state.project_args.get(lang): cflags += state.project_args[lang] if mesonlib.OptionKey('b_sanitize') in compiler.base_options: - sanitize = state.environment.coredata.options[mesonlib.OptionKey('b_sanitize')].value + sanitize = state.environment.coredata.optstore.get_value('b_sanitize') cflags += compiler.sanitizer_compile_args(sanitize) sanitize = sanitize.split(',') # These must be first in ldflags diff --git a/mesonbuild/modules/pkgconfig.py b/mesonbuild/modules/pkgconfig.py index ebe0d92d5cb1..1a730707986f 100644 --- a/mesonbuild/modules/pkgconfig.py +++ b/mesonbuild/modules/pkgconfig.py @@ -14,7 +14,7 @@ from .. import dependencies from .. import mesonlib from .. import mlog -from ..coredata import BUILTIN_DIR_OPTIONS +from ..options import BUILTIN_DIR_OPTIONS from ..dependencies.pkgconfig import PkgConfigDependency, PkgConfigInterface from ..interpreter.type_checking import D_MODULE_VERSIONS_KW, INSTALL_DIR_KW, VARIABLES_KW, NoneType from ..interpreterbase import FeatureNew, FeatureDeprecated diff --git a/mesonbuild/modules/python.py b/mesonbuild/modules/python.py index 69ce8dadc067..30fc50bc2318 100644 --- a/mesonbuild/modules/python.py +++ b/mesonbuild/modules/python.py @@ -9,7 +9,7 @@ from . import ExtensionModule, ModuleInfo from .. import mesonlib from .. import mlog -from ..coredata import UserFeatureOption +from ..options import UserFeatureOption from ..build import known_shmod_kwargs, CustomTarget, CustomTargetIndex, BuildTarget, GeneratedList, StructuredSources, ExtractedObjects, SharedModule from ..dependencies import NotFoundDependency from ..dependencies.detect import get_dep_identifier, find_external_dependency @@ -184,13 +184,9 @@ def extension_module_method(self, args: T.Tuple[str, T.List[BuildTargetSource]], new_cpp_args.append(limited_api_definition) kwargs['cpp_args'] = new_cpp_args - # When compiled under MSVC, Python's PC/pyconfig.h forcibly inserts pythonMAJOR.MINOR.lib - # into the linker path when not running in debug mode via a series #pragma comment(lib, "") - # directives. We manually override these here as this interferes with the intended - # use of the 'limited_api' kwarg + # On Windows, the limited API DLL is python3.dll, not python3X.dll. for_machine = kwargs['native'] - compilers = self.interpreter.environment.coredata.compilers[for_machine] - if any(compiler.get_id() == 'msvc' for compiler in compilers.values()): + if self.interpreter.environment.machines[for_machine].is_windows(): pydep_copy = copy.copy(pydep) pydep_copy.find_libpy_windows(self.env, limited_api=True) if not pydep_copy.found(): @@ -199,13 +195,19 @@ def extension_module_method(self, args: T.Tuple[str, T.List[BuildTargetSource]], new_deps.remove(pydep) new_deps.append(pydep_copy) + # When compiled under MSVC, Python's PC/pyconfig.h forcibly inserts pythonMAJOR.MINOR.lib + # into the linker path when not running in debug mode via a series #pragma comment(lib, "") + # directives. We manually override these here as this interferes with the intended + # use of the 'limited_api' kwarg + compilers = self.interpreter.environment.coredata.compilers[for_machine] + if any(compiler.get_id() == 'msvc' for compiler in compilers.values()): pyver = pydep.version.replace('.', '') python_windows_debug_link_exception = f'/NODEFAULTLIB:python{pyver}_d.lib' python_windows_release_link_exception = f'/NODEFAULTLIB:python{pyver}.lib' new_link_args = mesonlib.extract_as_list(kwargs, 'link_args') - is_debug = self.interpreter.environment.coredata.options[OptionKey('debug')].value + is_debug = self.interpreter.environment.coredata.optstore.get_value('debug') if is_debug: new_link_args.append(python_windows_debug_link_exception) else: diff --git a/mesonbuild/modules/rust.py b/mesonbuild/modules/rust.py index 8534dfa31a1e..a8e22541c164 100644 --- a/mesonbuild/modules/rust.py +++ b/mesonbuild/modules/rust.py @@ -231,7 +231,7 @@ def bindgen(self, state: ModuleState, args: T.List, kwargs: FuncBindgen) -> Modu # bindgen always uses clang, so it's safe to hardcode -I here clang_args.extend([f'-I{x}' for x in i.to_string_list( state.environment.get_source_dir(), state.environment.get_build_dir())]) - if are_asserts_disabled(state.environment.coredata.options): + if are_asserts_disabled(state.environment.coredata.optstore): clang_args.append('-DNDEBUG') for de in kwargs['dependencies']: diff --git a/mesonbuild/mparser.py b/mesonbuild/mparser.py index ec08ccfb2583..4e359b3b9f20 100644 --- a/mesonbuild/mparser.py +++ b/mesonbuild/mparser.py @@ -266,6 +266,8 @@ def __init__(self, token: Token[str]): super().__init__(token.lineno, token.colno, token.filename) self.value = '' self.append(token) + self.block_indent = False + self.is_continuation = False def append(self, token: Token[str]) -> None: self.value += token.value @@ -377,7 +379,7 @@ def incorrect_order(self) -> bool: return self.order_error def __len__(self) -> int: - return self.num_args() # Fixme + return self.num_args() + self.num_kwargs() @dataclass(unsafe_hash=True) class ArrayNode(BaseNode): diff --git a/mesonbuild/msetup.py b/mesonbuild/msetup.py index c1d71e2e55e2..47b40af331c3 100644 --- a/mesonbuild/msetup.py +++ b/mesonbuild/msetup.py @@ -242,10 +242,11 @@ def _generate(self, env: environment.Environment, capture: bool, vslite_ctx: T.O self.finalize_postconf_hooks(b, intr) if self.options.profile: + localvars = locals() fname = f'profile-{intr.backend.name}-backend.log' fname = os.path.join(self.build_dir, 'meson-logs', fname) - profile.runctx('gen_result = intr.backend.generate(capture, vslite_ctx)', globals(), locals(), filename=fname) - captured_compile_args = locals()['gen_result'] + profile.runctx('gen_result = intr.backend.generate(capture, vslite_ctx)', globals(), localvars, filename=fname) + captured_compile_args = localvars['gen_result'] assert captured_compile_args is None or isinstance(captured_compile_args, dict) else: captured_compile_args = intr.backend.generate(capture, vslite_ctx) @@ -273,9 +274,9 @@ def _generate(self, env: environment.Environment, capture: bool, vslite_ctx: T.O # collect warnings about unsupported build configurations; must be done after full arg processing # by Interpreter() init, but this is most visible at the end - if env.coredata.options[mesonlib.OptionKey('backend')].value == 'xcode': + if env.coredata.optstore.get_value('backend') == 'xcode': mlog.warning('xcode backend is currently unmaintained, patches welcome') - if env.coredata.options[mesonlib.OptionKey('layout')].value == 'flat': + if env.coredata.optstore.get_value('layout') == 'flat': mlog.warning('-Dlayout=flat is unsupported and probably broken. It was a failed experiment at ' 'making Windows build artifacts runnable while uninstalled, due to PATH considerations, ' 'but was untested by CI and anyways breaks reasonable use of conflicting targets in different subdirs. ' diff --git a/mesonbuild/msubprojects.py b/mesonbuild/msubprojects.py index 15db3f928a61..c15415485217 100755 --- a/mesonbuild/msubprojects.py +++ b/mesonbuild/msubprojects.py @@ -176,7 +176,9 @@ def update_wrapdb(self) -> bool: latest_version = info['versions'][0] new_branch, new_revision = latest_version.rsplit('-', 1) if new_branch != branch or new_revision != revision: - filename = self.wrap.filename if self.wrap.has_wrap else f'{self.wrap.filename}.wrap' + filename = self.wrap.original_filename + if not filename: + filename = os.path.join(self.wrap.subprojects_dir, f'{self.wrap.name}.wrap') update_wrap_file(filename, self.wrap.name, new_branch, new_revision, options.allow_insecure) @@ -521,16 +523,10 @@ def purge(self) -> bool: return True if self.wrap.redirected: - redirect_file = Path(self.wrap.original_filename).resolve() + wrapfile = Path(self.wrap.original_filename).resolve() if options.confirm: - redirect_file.unlink() - mlog.log(f'Deleting {redirect_file}') - - if self.wrap.type == 'redirect': - redirect_file = Path(self.wrap.filename).resolve() - if options.confirm: - redirect_file.unlink() - self.log(f'Deleting {redirect_file}') + wrapfile.unlink() + mlog.log(f'Deleting {wrapfile}') if options.include_cache: packagecache = Path(self.wrap_resolver.cachedir).resolve() diff --git a/mesonbuild/mtest.py b/mesonbuild/mtest.py index 03d2eb254df0..c0ddb30bacf7 100644 --- a/mesonbuild/mtest.py +++ b/mesonbuild/mtest.py @@ -161,6 +161,8 @@ def add_arguments(parser: argparse.ArgumentParser) -> None: help='Which test setup to use.') parser.add_argument('--test-args', default=[], type=split_args, help='Arguments to pass to the specified test(s) or all tests') + parser.add_argument('--max-lines', default=100, dest='max_lines', type=int, + help='Maximum number of lines to show from a long test log. Since 1.5.0.') parser.add_argument('args', nargs='*', help='Optional list of test names to run. "testname" to run all tests with that name, ' '"subprojname:testname" to specifically run "testname" from "subprojname", ' @@ -510,7 +512,8 @@ class ConsoleLogger(TestLogger): HLINE = "\u2015" RTRI = "\u25B6 " - def __init__(self) -> None: + def __init__(self, max_lines: int) -> None: + self.max_lines = max_lines self.running_tests: OrderedSet['TestRun'] = OrderedSet() self.progress_test: T.Optional['TestRun'] = None self.progress_task: T.Optional[asyncio.Future] = None @@ -652,10 +655,10 @@ def shorten_log(self, harness: 'TestHarness', result: 'TestRun') -> str: return log lines = log.splitlines() - if len(lines) < 100: + if len(lines) < self.max_lines: return log else: - return str(mlog.bold('Listing only the last 100 lines from a long log.\n')) + '\n'.join(lines[-100:]) + return str(mlog.bold(f'Listing only the last {self.max_lines} lines from a long log.\n')) + '\n'.join(lines[-self.max_lines:]) def print_log(self, harness: 'TestHarness', result: 'TestRun') -> None: if not result.verbose: @@ -1591,7 +1594,7 @@ def __init__(self, options: argparse.Namespace): self.name_max_len = 0 self.is_run = False self.loggers: T.List[TestLogger] = [] - self.console_logger = ConsoleLogger() + self.console_logger = ConsoleLogger(options.max_lines) self.loggers.append(self.console_logger) self.need_console = False self.ninja: T.List[str] = None diff --git a/mesonbuild/optinterpreter.py b/mesonbuild/optinterpreter.py index 599da65d3273..ffa46cda650e 100644 --- a/mesonbuild/optinterpreter.py +++ b/mesonbuild/optinterpreter.py @@ -7,6 +7,7 @@ import typing as T from . import coredata +from . import options from . import mesonlib from . import mparser from . import mlog @@ -66,7 +67,7 @@ class OptionInterpreter: def __init__(self, subproject: 'SubProject') -> None: self.options: 'coredata.MutableKeyedOptionDictType' = {} self.subproject = subproject - self.option_types: T.Dict[str, T.Callable[..., coredata.UserOption]] = { + self.option_types: T.Dict[str, T.Callable[..., options.UserOption]] = { 'string': self.string_parser, 'boolean': self.boolean_parser, 'combo': self.combo_parser, @@ -179,7 +180,7 @@ def evaluate_statement(self, node: mparser.BaseNode) -> None: since='0.60.0', since_values={str: '0.63.0'}, ), - KwargInfo('yield', bool, default=coredata.DEFAULT_YIELDING, since='0.45.0'), + KwargInfo('yield', bool, default=options.DEFAULT_YIELDING, since='0.45.0'), allow_unknown=True, ) @typed_pos_args('option', str) @@ -208,8 +209,8 @@ def func_option(self, args: T.Tuple[str], kwargs: 'FuncOptionArgs') -> None: 'string option', KwargInfo('value', str, default=''), ) - def string_parser(self, name: str, description: str, args: T.Tuple[bool, _DEPRECATED_ARGS], kwargs: StringArgs) -> coredata.UserOption: - return coredata.UserStringOption(name, description, kwargs['value'], *args) + def string_parser(self, name: str, description: str, args: T.Tuple[bool, _DEPRECATED_ARGS], kwargs: StringArgs) -> options.UserOption: + return options.UserStringOption(name, description, kwargs['value'], *args) @typed_kwargs( 'boolean option', @@ -221,20 +222,20 @@ def string_parser(self, name: str, description: str, args: T.Tuple[bool, _DEPREC deprecated_values={str: ('1.1.0', 'use a boolean, not a string')}, ), ) - def boolean_parser(self, name: str, description: str, args: T.Tuple[bool, _DEPRECATED_ARGS], kwargs: BooleanArgs) -> coredata.UserOption: - return coredata.UserBooleanOption(name, description, kwargs['value'], *args) + def boolean_parser(self, name: str, description: str, args: T.Tuple[bool, _DEPRECATED_ARGS], kwargs: BooleanArgs) -> options.UserOption: + return options.UserBooleanOption(name, description, kwargs['value'], *args) @typed_kwargs( 'combo option', KwargInfo('value', (str, NoneType)), KwargInfo('choices', ContainerTypeInfo(list, str, allow_empty=False), required=True), ) - def combo_parser(self, name: str, description: str, args: T.Tuple[bool, _DEPRECATED_ARGS], kwargs: ComboArgs) -> coredata.UserOption: + def combo_parser(self, name: str, description: str, args: T.Tuple[bool, _DEPRECATED_ARGS], kwargs: ComboArgs) -> options.UserOption: choices = kwargs['choices'] value = kwargs['value'] if value is None: value = kwargs['choices'][0] - return coredata.UserComboOption(name, description, choices, value, *args) + return options.UserComboOption(name, description, choices, value, *args) @typed_kwargs( 'integer option', @@ -248,17 +249,17 @@ def combo_parser(self, name: str, description: str, args: T.Tuple[bool, _DEPRECA KwargInfo('min', (int, NoneType)), KwargInfo('max', (int, NoneType)), ) - def integer_parser(self, name: str, description: str, args: T.Tuple[bool, _DEPRECATED_ARGS], kwargs: IntegerArgs) -> coredata.UserOption: + def integer_parser(self, name: str, description: str, args: T.Tuple[bool, _DEPRECATED_ARGS], kwargs: IntegerArgs) -> options.UserOption: value = kwargs['value'] inttuple = (kwargs['min'], kwargs['max'], value) - return coredata.UserIntegerOption(name, description, inttuple, *args) + return options.UserIntegerOption(name, description, inttuple, *args) @typed_kwargs( 'string array option', KwargInfo('value', (ContainerTypeInfo(list, str), str, NoneType)), KwargInfo('choices', ContainerTypeInfo(list, str), default=[]), ) - def string_array_parser(self, name: str, description: str, args: T.Tuple[bool, _DEPRECATED_ARGS], kwargs: StringArrayArgs) -> coredata.UserOption: + def string_array_parser(self, name: str, description: str, args: T.Tuple[bool, _DEPRECATED_ARGS], kwargs: StringArrayArgs) -> options.UserOption: choices = kwargs['choices'] value = kwargs['value'] if kwargs['value'] is not None else choices if isinstance(value, str): @@ -266,14 +267,14 @@ def string_array_parser(self, name: str, description: str, args: T.Tuple[bool, _ FeatureDeprecated('String value for array option', '1.3.0').use(self.subproject) else: raise mesonlib.MesonException('Value does not define an array: ' + value) - return coredata.UserArrayOption(name, description, value, - choices=choices, - yielding=args[0], - deprecated=args[1]) + return options.UserArrayOption(name, description, value, + choices=choices, + yielding=args[0], + deprecated=args[1]) @typed_kwargs( 'feature option', KwargInfo('value', str, default='auto', validator=in_set_validator({'auto', 'enabled', 'disabled'})), ) - def feature_parser(self, name: str, description: str, args: T.Tuple[bool, _DEPRECATED_ARGS], kwargs: FeatureArgs) -> coredata.UserOption: - return coredata.UserFeatureOption(name, description, kwargs['value'], *args) + def feature_parser(self, name: str, description: str, args: T.Tuple[bool, _DEPRECATED_ARGS], kwargs: FeatureArgs) -> options.UserOption: + return options.UserFeatureOption(name, description, kwargs['value'], *args) diff --git a/mesonbuild/options.py b/mesonbuild/options.py new file mode 100644 index 000000000000..d83a312886d5 --- /dev/null +++ b/mesonbuild/options.py @@ -0,0 +1,538 @@ +# SPDX-License-Identifier: Apache-2.0 +# Copyright 2013-2024 Contributors to the The Meson project + +from collections import OrderedDict +from itertools import chain +import argparse + +from .mesonlib import ( + HoldableObject, + OptionKey, + default_prefix, + default_datadir, + default_includedir, + default_infodir, + default_libdir, + default_libexecdir, + default_localedir, + default_mandir, + default_sbindir, + default_sysconfdir, + MesonException, + listify_array_value, +) + +from . import mlog + +import typing as T +from typing import ItemsView + +DEFAULT_YIELDING = False + +# Can't bind this near the class method it seems, sadly. +_T = T.TypeVar('_T') + +backendlist = ['ninja', 'vs', 'vs2010', 'vs2012', 'vs2013', 'vs2015', 'vs2017', 'vs2019', 'vs2022', 'xcode', 'none'] +genvslitelist = ['vs2022'] +buildtypelist = ['plain', 'debug', 'debugoptimized', 'release', 'minsize', 'custom'] + + +class UserOption(T.Generic[_T], HoldableObject): + def __init__(self, name: str, description: str, choices: T.Optional[T.Union[str, T.List[_T]]], + yielding: bool, + deprecated: T.Union[bool, str, T.Dict[str, str], T.List[str]] = False): + super().__init__() + self.name = name + self.choices = choices + self.description = description + if not isinstance(yielding, bool): + raise MesonException('Value of "yielding" must be a boolean.') + self.yielding = yielding + self.deprecated = deprecated + self.readonly = False + + def listify(self, value: T.Any) -> T.List[T.Any]: + return [value] + + def printable_value(self) -> T.Union[str, int, bool, T.List[T.Union[str, int, bool]]]: + assert isinstance(self.value, (str, int, bool, list)) + return self.value + + # Check that the input is a valid value and return the + # "cleaned" or "native" version. For example the Boolean + # option could take the string "true" and return True. + def validate_value(self, value: T.Any) -> _T: + raise RuntimeError('Derived option class did not override validate_value.') + + def set_value(self, newvalue: T.Any) -> bool: + oldvalue = getattr(self, 'value', None) + self.value = self.validate_value(newvalue) + return self.value != oldvalue + +_U = T.TypeVar('_U', bound=UserOption[_T]) + + +class UserStringOption(UserOption[str]): + def __init__(self, name: str, description: str, value: T.Any, yielding: bool = DEFAULT_YIELDING, + deprecated: T.Union[bool, str, T.Dict[str, str], T.List[str]] = False): + super().__init__(name, description, None, yielding, deprecated) + self.set_value(value) + + def validate_value(self, value: T.Any) -> str: + if not isinstance(value, str): + raise MesonException(f'The value of option "{self.name}" is "{value}", which is not a string.') + return value + +class UserBooleanOption(UserOption[bool]): + def __init__(self, name: str, description: str, value: bool, yielding: bool = DEFAULT_YIELDING, + deprecated: T.Union[bool, str, T.Dict[str, str], T.List[str]] = False): + super().__init__(name, description, [True, False], yielding, deprecated) + self.set_value(value) + + def __bool__(self) -> bool: + return self.value + + def validate_value(self, value: T.Any) -> bool: + if isinstance(value, bool): + return value + if not isinstance(value, str): + raise MesonException(f'Option "{self.name}" value {value} cannot be converted to a boolean') + if value.lower() == 'true': + return True + if value.lower() == 'false': + return False + raise MesonException(f'Option "{self.name}" value {value} is not boolean (true or false).') + +class UserIntegerOption(UserOption[int]): + def __init__(self, name: str, description: str, value: T.Any, yielding: bool = DEFAULT_YIELDING, + deprecated: T.Union[bool, str, T.Dict[str, str], T.List[str]] = False): + min_value, max_value, default_value = value + self.min_value = min_value + self.max_value = max_value + c: T.List[str] = [] + if min_value is not None: + c.append('>=' + str(min_value)) + if max_value is not None: + c.append('<=' + str(max_value)) + choices = ', '.join(c) + super().__init__(name, description, choices, yielding, deprecated) + self.set_value(default_value) + + def validate_value(self, value: T.Any) -> int: + if isinstance(value, str): + value = self.toint(value) + if not isinstance(value, int): + raise MesonException(f'Value {value!r} for option "{self.name}" is not an integer.') + if self.min_value is not None and value < self.min_value: + raise MesonException(f'Value {value} for option "{self.name}" is less than minimum value {self.min_value}.') + if self.max_value is not None and value > self.max_value: + raise MesonException(f'Value {value} for option "{self.name}" is more than maximum value {self.max_value}.') + return value + + def toint(self, valuestring: str) -> int: + try: + return int(valuestring) + except ValueError: + raise MesonException(f'Value string "{valuestring}" for option "{self.name}" is not convertible to an integer.') + +class OctalInt(int): + # NinjaBackend.get_user_option_args uses str() to converts it to a command line option + # UserUmaskOption.toint() uses int(str, 8) to convert it to an integer + # So we need to use oct instead of dec here if we do not want values to be misinterpreted. + def __str__(self) -> str: + return oct(int(self)) + +class UserUmaskOption(UserIntegerOption, UserOption[T.Union[str, OctalInt]]): + def __init__(self, name: str, description: str, value: T.Any, yielding: bool = DEFAULT_YIELDING, + deprecated: T.Union[bool, str, T.Dict[str, str], T.List[str]] = False): + super().__init__(name, description, (0, 0o777, value), yielding, deprecated) + self.choices = ['preserve', '0000-0777'] + + def printable_value(self) -> str: + if self.value == 'preserve': + return self.value + return format(self.value, '04o') + + def validate_value(self, value: T.Any) -> T.Union[str, OctalInt]: + if value == 'preserve': + return 'preserve' + return OctalInt(super().validate_value(value)) + + def toint(self, valuestring: T.Union[str, OctalInt]) -> int: + try: + return int(valuestring, 8) + except ValueError as e: + raise MesonException(f'Invalid mode for option "{self.name}" {e}') + +class UserComboOption(UserOption[str]): + def __init__(self, name: str, description: str, choices: T.List[str], value: T.Any, + yielding: bool = DEFAULT_YIELDING, + deprecated: T.Union[bool, str, T.Dict[str, str], T.List[str]] = False): + super().__init__(name, description, choices, yielding, deprecated) + if not isinstance(self.choices, list): + raise MesonException(f'Combo choices for option "{self.name}" must be an array.') + for i in self.choices: + if not isinstance(i, str): + raise MesonException(f'Combo choice elements for option "{self.name}" must be strings.') + self.set_value(value) + + def validate_value(self, value: T.Any) -> str: + if value not in self.choices: + if isinstance(value, bool): + _type = 'boolean' + elif isinstance(value, (int, float)): + _type = 'number' + else: + _type = 'string' + optionsstring = ', '.join([f'"{item}"' for item in self.choices]) + raise MesonException('Value "{}" (of type "{}") for option "{}" is not one of the choices.' + ' Possible choices are (as string): {}.'.format( + value, _type, self.name, optionsstring)) + return value + +class UserArrayOption(UserOption[T.List[str]]): + def __init__(self, name: str, description: str, value: T.Union[str, T.List[str]], + split_args: bool = False, + allow_dups: bool = False, yielding: bool = DEFAULT_YIELDING, + choices: T.Optional[T.List[str]] = None, + deprecated: T.Union[bool, str, T.Dict[str, str], T.List[str]] = False): + super().__init__(name, description, choices if choices is not None else [], yielding, deprecated) + self.split_args = split_args + self.allow_dups = allow_dups + self.set_value(value) + + def listify(self, value: T.Any) -> T.List[T.Any]: + try: + return listify_array_value(value, self.split_args) + except MesonException as e: + raise MesonException(f'error in option "{self.name}": {e!s}') + + def validate_value(self, value: T.Union[str, T.List[str]]) -> T.List[str]: + newvalue = self.listify(value) + + if not self.allow_dups and len(set(newvalue)) != len(newvalue): + msg = 'Duplicated values in array option is deprecated. ' \ + 'This will become a hard error in the future.' + mlog.deprecation(msg) + for i in newvalue: + if not isinstance(i, str): + raise MesonException(f'String array element "{newvalue!s}" for option "{self.name}" is not a string.') + if self.choices: + bad = [x for x in newvalue if x not in self.choices] + if bad: + raise MesonException('Value{} "{}" for option "{}" {} not in allowed choices: "{}"'.format( + '' if len(bad) == 1 else 's', + ', '.join(bad), + self.name, + 'is' if len(bad) == 1 else 'are', + ', '.join(self.choices)) + ) + return newvalue + + def extend_value(self, value: T.Union[str, T.List[str]]) -> None: + """Extend the value with an additional value.""" + new = self.validate_value(value) + self.set_value(self.value + new) + + +class UserFeatureOption(UserComboOption): + static_choices = ['enabled', 'disabled', 'auto'] + + def __init__(self, name: str, description: str, value: T.Any, yielding: bool = DEFAULT_YIELDING, + deprecated: T.Union[bool, str, T.Dict[str, str], T.List[str]] = False): + super().__init__(name, description, self.static_choices, value, yielding, deprecated) + self.name: T.Optional[str] = None # TODO: Refactor options to all store their name + + def is_enabled(self) -> bool: + return self.value == 'enabled' + + def is_disabled(self) -> bool: + return self.value == 'disabled' + + def is_auto(self) -> bool: + return self.value == 'auto' + +class UserStdOption(UserComboOption): + ''' + UserOption specific to c_std and cpp_std options. User can set a list of + STDs in preference order and it selects the first one supported by current + compiler. + + For historical reasons, some compilers (msvc) allowed setting a GNU std and + silently fell back to C std. This is now deprecated. Projects that support + both GNU and MSVC compilers should set e.g. c_std=gnu11,c11. + + This is not using self.deprecated mechanism we already have for project + options because we want to print a warning if ALL values are deprecated, not + if SOME values are deprecated. + ''' + def __init__(self, lang: str, all_stds: T.List[str]) -> None: + self.lang = lang.lower() + self.all_stds = ['none'] + all_stds + # Map a deprecated std to its replacement. e.g. gnu11 -> c11. + self.deprecated_stds: T.Dict[str, str] = {} + opt_name = 'cpp_std' if lang == 'c++' else f'{lang}_std' + super().__init__(opt_name, f'{lang} language standard to use', ['none'], 'none') + + def set_versions(self, versions: T.List[str], gnu: bool = False, gnu_deprecated: bool = False) -> None: + assert all(std in self.all_stds for std in versions) + self.choices += versions + if gnu: + gnu_stds_map = {f'gnu{std[1:]}': std for std in versions} + if gnu_deprecated: + self.deprecated_stds.update(gnu_stds_map) + else: + self.choices += gnu_stds_map.keys() + + def validate_value(self, value: T.Union[str, T.List[str]]) -> str: + try: + candidates = listify_array_value(value) + except MesonException as e: + raise MesonException(f'error in option "{self.name}": {e!s}') + unknown = ','.join(std for std in candidates if std not in self.all_stds) + if unknown: + raise MesonException(f'Unknown option "{self.name}" value {unknown}. Possible values are {self.all_stds}.') + # Check first if any of the candidates are not deprecated + for std in candidates: + if std in self.choices: + return std + # Fallback to a deprecated std if any + for std in candidates: + newstd = self.deprecated_stds.get(std) + if newstd is not None: + mlog.deprecation( + f'None of the values {candidates} are supported by the {self.lang} compiler.\n' + + f'However, the deprecated {std} std currently falls back to {newstd}.\n' + + 'This will be an error in the future.\n' + + 'If the project supports both GNU and MSVC compilers, a value such as\n' + + '"c_std=gnu11,c11" specifies that GNU is preferred but it can safely fallback to plain c11.') + return newstd + raise MesonException(f'None of values {candidates} are supported by the {self.lang.upper()} compiler. ' + + f'Possible values for option "{self.name}" are {self.choices}') + + +class BuiltinOption(T.Generic[_T, _U]): + + """Class for a builtin option type. + + There are some cases that are not fully supported yet. + """ + + def __init__(self, opt_type: T.Type[_U], description: str, default: T.Any, yielding: bool = True, *, + choices: T.Any = None, readonly: bool = False): + self.opt_type = opt_type + self.description = description + self.default = default + self.choices = choices + self.yielding = yielding + self.readonly = readonly + + def init_option(self, name: 'OptionKey', value: T.Optional[T.Any], prefix: str) -> _U: + """Create an instance of opt_type and return it.""" + if value is None: + value = self.prefixed_default(name, prefix) + keywords = {'yielding': self.yielding, 'value': value} + if self.choices: + keywords['choices'] = self.choices + o = self.opt_type(name.name, self.description, **keywords) + o.readonly = self.readonly + return o + + def _argparse_action(self) -> T.Optional[str]: + # If the type is a boolean, the presence of the argument in --foo form + # is to enable it. Disabling happens by using -Dfoo=false, which is + # parsed under `args.projectoptions` and does not hit this codepath. + if isinstance(self.default, bool): + return 'store_true' + return None + + def _argparse_choices(self) -> T.Any: + if self.opt_type is UserBooleanOption: + return [True, False] + elif self.opt_type is UserFeatureOption: + return UserFeatureOption.static_choices + return self.choices + + @staticmethod + def argparse_name_to_arg(name: str) -> str: + if name == 'warning_level': + return '--warnlevel' + else: + return '--' + name.replace('_', '-') + + def prefixed_default(self, name: 'OptionKey', prefix: str = '') -> T.Any: + if self.opt_type in [UserComboOption, UserIntegerOption]: + return self.default + try: + return BUILTIN_DIR_NOPREFIX_OPTIONS[name][prefix] + except KeyError: + pass + return self.default + + def add_to_argparse(self, name: str, parser: argparse.ArgumentParser, help_suffix: str) -> None: + kwargs = OrderedDict() + + c = self._argparse_choices() + b = self._argparse_action() + h = self.description + if not b: + h = '{} (default: {}).'.format(h.rstrip('.'), self.prefixed_default(name)) + else: + kwargs['action'] = b + if c and not b: + kwargs['choices'] = c + kwargs['default'] = argparse.SUPPRESS + kwargs['dest'] = name + + cmdline_name = self.argparse_name_to_arg(name) + parser.add_argument(cmdline_name, help=h + help_suffix, **kwargs) + + +# Update `docs/markdown/Builtin-options.md` after changing the options below +# Also update mesonlib._BUILTIN_NAMES. See the comment there for why this is required. +# Please also update completion scripts in $MESONSRC/data/shell-completions/ +BUILTIN_DIR_OPTIONS: T.Dict['OptionKey', 'BuiltinOption'] = OrderedDict([ + (OptionKey('prefix'), BuiltinOption(UserStringOption, 'Installation prefix', default_prefix())), + (OptionKey('bindir'), BuiltinOption(UserStringOption, 'Executable directory', 'bin')), + (OptionKey('datadir'), BuiltinOption(UserStringOption, 'Data file directory', default_datadir())), + (OptionKey('includedir'), BuiltinOption(UserStringOption, 'Header file directory', default_includedir())), + (OptionKey('infodir'), BuiltinOption(UserStringOption, 'Info page directory', default_infodir())), + (OptionKey('libdir'), BuiltinOption(UserStringOption, 'Library directory', default_libdir())), + (OptionKey('licensedir'), BuiltinOption(UserStringOption, 'Licenses directory', '')), + (OptionKey('libexecdir'), BuiltinOption(UserStringOption, 'Library executable directory', default_libexecdir())), + (OptionKey('localedir'), BuiltinOption(UserStringOption, 'Locale data directory', default_localedir())), + (OptionKey('localstatedir'), BuiltinOption(UserStringOption, 'Localstate data directory', 'var')), + (OptionKey('mandir'), BuiltinOption(UserStringOption, 'Manual page directory', default_mandir())), + (OptionKey('sbindir'), BuiltinOption(UserStringOption, 'System executable directory', default_sbindir())), + (OptionKey('sharedstatedir'), BuiltinOption(UserStringOption, 'Architecture-independent data directory', 'com')), + (OptionKey('sysconfdir'), BuiltinOption(UserStringOption, 'Sysconf data directory', default_sysconfdir())), +]) + +BUILTIN_CORE_OPTIONS: T.Dict['OptionKey', 'BuiltinOption'] = OrderedDict([ + (OptionKey('auto_features'), BuiltinOption(UserFeatureOption, "Override value of all 'auto' features", 'auto')), + (OptionKey('backend'), BuiltinOption(UserComboOption, 'Backend to use', 'ninja', choices=backendlist, + readonly=True)), + (OptionKey('genvslite'), + BuiltinOption( + UserComboOption, + 'Setup multiple buildtype-suffixed ninja-backend build directories, ' + 'and a [builddir]_vs containing a Visual Studio meta-backend with multiple configurations that calls into them', + 'vs2022', + choices=genvslitelist) + ), + (OptionKey('buildtype'), BuiltinOption(UserComboOption, 'Build type to use', 'debug', + choices=buildtypelist)), + (OptionKey('debug'), BuiltinOption(UserBooleanOption, 'Enable debug symbols and other information', True)), + (OptionKey('default_library'), BuiltinOption(UserComboOption, 'Default library type', 'shared', choices=['shared', 'static', 'both'], + yielding=False)), + (OptionKey('errorlogs'), BuiltinOption(UserBooleanOption, "Whether to print the logs from failing tests", True)), + (OptionKey('install_umask'), BuiltinOption(UserUmaskOption, 'Default umask to apply on permissions of installed files', '022')), + (OptionKey('layout'), BuiltinOption(UserComboOption, 'Build directory layout', 'mirror', choices=['mirror', 'flat'])), + (OptionKey('optimization'), BuiltinOption(UserComboOption, 'Optimization level', '0', choices=['plain', '0', 'g', '1', '2', '3', 's'])), + (OptionKey('prefer_static'), BuiltinOption(UserBooleanOption, 'Whether to try static linking before shared linking', False)), + (OptionKey('stdsplit'), BuiltinOption(UserBooleanOption, 'Split stdout and stderr in test logs', True)), + (OptionKey('strip'), BuiltinOption(UserBooleanOption, 'Strip targets on install', False)), + (OptionKey('unity'), BuiltinOption(UserComboOption, 'Unity build', 'off', choices=['on', 'off', 'subprojects'])), + (OptionKey('unity_size'), BuiltinOption(UserIntegerOption, 'Unity block size', (2, None, 4))), + (OptionKey('warning_level'), BuiltinOption(UserComboOption, 'Compiler warning level to use', '1', choices=['0', '1', '2', '3', 'everything'], yielding=False)), + (OptionKey('werror'), BuiltinOption(UserBooleanOption, 'Treat warnings as errors', False, yielding=False)), + (OptionKey('wrap_mode'), BuiltinOption(UserComboOption, 'Wrap mode', 'default', choices=['default', 'nofallback', 'nodownload', 'forcefallback', 'nopromote'])), + (OptionKey('force_fallback_for'), BuiltinOption(UserArrayOption, 'Force fallback for those subprojects', [])), + (OptionKey('vsenv'), BuiltinOption(UserBooleanOption, 'Activate Visual Studio environment', False, readonly=True)), + + # Pkgconfig module + (OptionKey('relocatable', module='pkgconfig'), + BuiltinOption(UserBooleanOption, 'Generate pkgconfig files as relocatable', False)), + + # Python module + (OptionKey('bytecompile', module='python'), + BuiltinOption(UserIntegerOption, 'Whether to compile bytecode', (-1, 2, 0))), + (OptionKey('install_env', module='python'), + BuiltinOption(UserComboOption, 'Which python environment to install to', 'prefix', choices=['auto', 'prefix', 'system', 'venv'])), + (OptionKey('platlibdir', module='python'), + BuiltinOption(UserStringOption, 'Directory for site-specific, platform-specific files.', '')), + (OptionKey('purelibdir', module='python'), + BuiltinOption(UserStringOption, 'Directory for site-specific, non-platform-specific files.', '')), + (OptionKey('allow_limited_api', module='python'), + BuiltinOption(UserBooleanOption, 'Whether to allow use of the Python Limited API', True)), +]) + +BUILTIN_OPTIONS = OrderedDict(chain(BUILTIN_DIR_OPTIONS.items(), BUILTIN_CORE_OPTIONS.items())) + +BUILTIN_OPTIONS_PER_MACHINE: T.Dict['OptionKey', 'BuiltinOption'] = OrderedDict([ + (OptionKey('pkg_config_path'), BuiltinOption(UserArrayOption, 'List of additional paths for pkg-config to search', [])), + (OptionKey('cmake_prefix_path'), BuiltinOption(UserArrayOption, 'List of additional prefixes for cmake to search', [])), +]) + +# Special prefix-dependent defaults for installation directories that reside in +# a path outside of the prefix in FHS and common usage. +BUILTIN_DIR_NOPREFIX_OPTIONS: T.Dict[OptionKey, T.Dict[str, str]] = { + OptionKey('sysconfdir'): {'/usr': '/etc'}, + OptionKey('localstatedir'): {'/usr': '/var', '/usr/local': '/var/local'}, + OptionKey('sharedstatedir'): {'/usr': '/var/lib', '/usr/local': '/var/local/lib'}, + OptionKey('platlibdir', module='python'): {}, + OptionKey('purelibdir', module='python'): {}, +} + +class OptionStore: + def __init__(self): + self.d: T.Dict['OptionKey', 'UserOption[T.Any]'] = {} + + def __len__(self): + return len(self.d) + + def ensure_key(self, key: T.Union[OptionKey, str]) -> OptionKey: + if isinstance(key, str): + return OptionKey(key) + return key + + def get_value_object(self, key: T.Union[OptionKey, str]) -> 'UserOption[T.Any]': + return self.d[self.ensure_key(key)] + + def get_value(self, key: T.Union[OptionKey, str]) -> 'T.Any': + return self.get_value_object(key).value + + def add_system_option(self, key: T.Union[OptionKey, str], valobj: 'UserOption[T.Any]'): + key = self.ensure_key(key) + self.d[key] = valobj + + def add_project_option(self, key: T.Union[OptionKey, str], valobj: 'UserOption[T.Any]'): + key = self.ensure_key(key) + self.d[key] = valobj + + def set_value(self, key: T.Union[OptionKey, str], new_value: 'T.Any') -> bool: + key = self.ensure_key(key) + return self.d[key].set_value(new_value) + + # FIXME, this should be removed.or renamed to "change_type_of_existing_object" or something like that + def set_value_object(self, key: T.Union[OptionKey, str], new_object: 'UserOption[T.Any]') -> bool: + key = self.ensure_key(key) + self.d[key] = new_object + + def remove(self, key): + del self.d[key] + + def __contains__(self, key): + key = self.ensure_key(key) + return key in self.d + + def __repr__(self): + return repr(self.d) + + def keys(self): + return self.d.keys() + + def values(self): + return self.d.values() + + def items(self) -> ItemsView['OptionKey', 'UserOption[T.Any]']: + return self.d.items() + + def update(self, *args, **kwargs): + return self.d.update(*args, **kwargs) + + def setdefault(self, k, o): + return self.d.setdefault(k, o) + + def get(self, *args, **kwargs) -> UserOption: + return self.d.get(*args, **kwargs) diff --git a/mesonbuild/programs.py b/mesonbuild/programs.py index b73f9e4025df..fbe241d99607 100644 --- a/mesonbuild/programs.py +++ b/mesonbuild/programs.py @@ -36,6 +36,7 @@ def __init__(self, name: str, command: T.Optional[T.List[str]] = None, self.name = name self.path: T.Optional[str] = None self.cached_version: T.Optional[str] = None + self.version_arg = '--version' if command is not None: self.command = mesonlib.listify(command) if mesonlib.is_windows(): @@ -93,9 +94,9 @@ def description(self) -> str: def get_version(self, interpreter: T.Optional['Interpreter'] = None) -> str: if not self.cached_version: - raw_cmd = self.get_command() + ['--version'] + raw_cmd = self.get_command() + [self.version_arg] if interpreter: - res = interpreter.run_command_impl((self, ['--version']), + res = interpreter.run_command_impl((self, [self.version_arg]), {'capture': True, 'check': True, 'env': mesonlib.EnvironmentVariables()}, diff --git a/mesonbuild/rewriter.py b/mesonbuild/rewriter.py index 0a40a711c4ea..78517bf05f8b 100644 --- a/mesonbuild/rewriter.py +++ b/mesonbuild/rewriter.py @@ -470,11 +470,11 @@ def process_default_options(self, cmd): cdata = self.interpreter.coredata options = { - **{str(k): v for k, v in cdata.options.items()}, - **{str(k): v for k, v in cdata.options.items()}, - **{str(k): v for k, v in cdata.options.items()}, - **{str(k): v for k, v in cdata.options.items()}, - **{str(k): v for k, v in cdata.options.items()}, + **{str(k): v for k, v in cdata.optstore.items()}, + **{str(k): v for k, v in cdata.optstore.items()}, + **{str(k): v for k, v in cdata.optstore.items()}, + **{str(k): v for k, v in cdata.optstore.items()}, + **{str(k): v for k, v in cdata.optstore.items()}, } for key, val in sorted(cmd['options'].items()): diff --git a/mesonbuild/scripts/clangtidy.py b/mesonbuild/scripts/clangtidy.py index 353cdc19c288..1e0c4a5a396a 100644 --- a/mesonbuild/scripts/clangtidy.py +++ b/mesonbuild/scripts/clangtidy.py @@ -11,7 +11,7 @@ import typing as T def run_clang_tidy(fname: Path, builddir: Path) -> subprocess.CompletedProcess: - return subprocess.run(['clang-tidy', '-p', str(builddir), str(fname)]) + return subprocess.run(['clang-tidy', '-quiet', '-p', str(builddir), str(fname)]) def run_clang_tidy_fix(fname: Path, builddir: Path) -> subprocess.CompletedProcess: return subprocess.run(['run-clang-tidy', '-fix', '-format', '-quiet', '-p', str(builddir), str(fname)]) diff --git a/mesonbuild/scripts/coverage.py b/mesonbuild/scripts/coverage.py index bded052d4a49..17a4a10ae55f 100644 --- a/mesonbuild/scripts/coverage.py +++ b/mesonbuild/scripts/coverage.py @@ -161,7 +161,7 @@ def coverage(outputs: T.List[str], source_root: str, subproject_root: str, build os.mkdir(htmloutdir) subprocess.check_call(gcovr_base_cmd + gcovr_config + ['--html', - '--html-details', + '--html-nested', '--print-summary', '-o', os.path.join(htmloutdir, 'index.html'), ] + gcov_exe_args) diff --git a/mesonbuild/scripts/depfixer.py b/mesonbuild/scripts/depfixer.py index 71599f784e73..db9c97d98c6a 100644 --- a/mesonbuild/scripts/depfixer.py +++ b/mesonbuild/scripts/depfixer.py @@ -379,11 +379,14 @@ def fix_elf(fname: str, rpath_dirs_to_remove: T.Set[bytes], new_rpath: T.Optiona # note: e.get_rpath() and e.get_runpath() may be useful e.fix_rpath(fname, rpath_dirs_to_remove, new_rpath) -def get_darwin_rpaths(fname: str) -> T.List[str]: +def get_darwin_rpaths(fname: str) -> OrderedSet[str]: p, out, _ = Popen_safe(['otool', '-l', fname], stderr=subprocess.DEVNULL) if p.returncode != 0: raise subprocess.CalledProcessError(p.returncode, p.args, out) - result = [] + # Need to deduplicate rpaths, as macOS's install_name_tool + # is *very* allergic to duplicate -delete_rpath arguments + # when calling depfixer on installation. + result: OrderedSet[str] = OrderedSet() current_cmd = 'FOOBAR' for line in out.split('\n'): line = line.strip() @@ -394,7 +397,7 @@ def get_darwin_rpaths(fname: str) -> T.List[str]: current_cmd = value if key == 'path' and current_cmd == 'LC_RPATH': rp = value.split('(', 1)[0].strip() - result.append(rp) + result.add(rp) return result def fix_darwin(fname: str, rpath_dirs_to_remove: T.Set[bytes], new_rpath: str, final_path: str, install_name_mappings: T.Dict[str, str]) -> None: @@ -406,7 +409,7 @@ def fix_darwin(fname: str, rpath_dirs_to_remove: T.Set[bytes], new_rpath: str, f return new_rpaths: OrderedSet[str] = OrderedSet() if new_rpath: - new_rpaths.update(new_rpath) + new_rpaths.update(new_rpath.split(':')) # filter out build-only rpath entries, like in # fix_rpathtype_entry remove_rpaths = [x.decode('utf8') for x in rpath_dirs_to_remove] diff --git a/mesonbuild/scripts/python_info.py b/mesonbuild/scripts/python_info.py index c4508733848c..d3bf71b5e294 100755 --- a/mesonbuild/scripts/python_info.py +++ b/mesonbuild/scripts/python_info.py @@ -107,6 +107,9 @@ def links_against_libpython(): if is_pypy: limited_api_suffix = suffix +# Whether we're targeting a free-threaded CPython interpreter +is_freethreaded = bool(variables.get('Py_GIL_DISABLED', False)) + print(json.dumps({ 'variables': variables, 'paths': paths, @@ -119,4 +122,5 @@ def links_against_libpython(): 'link_libpython': links_against_libpython(), 'suffix': suffix, 'limited_api_suffix': limited_api_suffix, + 'is_freethreaded': is_freethreaded, })) diff --git a/mesonbuild/scripts/scanbuild.py b/mesonbuild/scripts/scanbuild.py index d7fbcf4fee33..b738aeee1b6e 100644 --- a/mesonbuild/scripts/scanbuild.py +++ b/mesonbuild/scripts/scanbuild.py @@ -7,7 +7,8 @@ import shutil import tempfile from ..environment import detect_ninja, detect_scanbuild -from ..coredata import get_cmd_line_file, CmdLineFileParser +from ..coredata import get_cmd_line_file +from ..machinefile import CmdLineFileParser from ..mesonlib import windows_proof_rmtree from pathlib import Path import typing as T diff --git a/mesonbuild/scripts/symbolextractor.py b/mesonbuild/scripts/symbolextractor.py index 52b9b80c51ea..5c45253d5702 100644 --- a/mesonbuild/scripts/symbolextractor.py +++ b/mesonbuild/scripts/symbolextractor.py @@ -134,9 +134,10 @@ def osx_syms(libfilename: str, outfilename: str) -> None: match = i break result = [arr[match + 2], arr[match + 5]] # Libreoffice stores all 5 lines but the others seem irrelevant. - # Get a list of all symbols exported - output = call_tool('nm', ['--extern-only', '--defined-only', - '--format=posix', libfilename]) + # Get a list of all symbols exported. `nm -g -U -P` is equivalent to, and more portable than, + # `nm --extern-only --defined-only --format=posix`; cctools-port only understands the one-character form, + # as does `nm` on very old macOS versions, (see meson#11131). `llvm-nm` understands both forms. + output = call_tool('nm', ['-g', '-U', '-P', libfilename]) if not output: dummy_syms(outfilename) return diff --git a/mesonbuild/utils/core.py b/mesonbuild/utils/core.py index 92f9d2c70d40..a87f77acc14f 100644 --- a/mesonbuild/utils/core.py +++ b/mesonbuild/utils/core.py @@ -64,6 +64,7 @@ def __init__(self, values: T.Optional[EnvInitValueType] = None, # The set of all env vars we have operations for. Only used for self.has_name() self.varnames: T.Set[str] = set() self.unset_vars: T.Set[str] = set() + self.can_use_env = True if values: init_func = getattr(self, init_method) @@ -95,7 +96,9 @@ def merge(self, other: EnvironmentVariables) -> None: self.envvars.append((method, name, values, separator)) if name in self.unset_vars: self.unset_vars.remove(name) - self.unset_vars.update(other.unset_vars) + if other.unset_vars: + self.can_use_env = False + self.unset_vars.update(other.unset_vars) def set(self, name: str, values: T.List[str], separator: str = os.pathsep) -> None: if name in self.unset_vars: @@ -104,17 +107,20 @@ def set(self, name: str, values: T.List[str], separator: str = os.pathsep) -> No self.envvars.append((self._set, name, values, separator)) def unset(self, name: str) -> None: + self.can_use_env = False if name in self.varnames: raise MesonException(f'You cannot unset the {name!r} variable because it is already set') self.unset_vars.add(name) def append(self, name: str, values: T.List[str], separator: str = os.pathsep) -> None: + self.can_use_env = False if name in self.unset_vars: raise MesonException(f'You cannot append to unset variable {name!r}') self.varnames.add(name) self.envvars.append((self._append, name, values, separator)) def prepend(self, name: str, values: T.List[str], separator: str = os.pathsep) -> None: + self.can_use_env = False if name in self.unset_vars: raise MesonException(f'You cannot prepend to unset variable {name!r}') self.varnames.add(name) diff --git a/mesonbuild/utils/universal.py b/mesonbuild/utils/universal.py index c831169c523d..6aee268ee21b 100644 --- a/mesonbuild/utils/universal.py +++ b/mesonbuild/utils/universal.py @@ -1182,24 +1182,21 @@ def do_replacement(regex: T.Pattern[str], line: str, variable_format: Literal['meson', 'cmake', 'cmake@'], confdata: T.Union[T.Dict[str, T.Tuple[str, T.Optional[str]]], 'ConfigurationData']) -> T.Tuple[str, T.Set[str]]: missing_variables: T.Set[str] = set() - if variable_format == 'cmake': - start_tag = '${' - backslash_tag = '\\${' - else: - start_tag = '@' - backslash_tag = '\\@' def variable_replace(match: T.Match[str]) -> str: - # Pairs of escape characters before '@' or '\@' + # Pairs of escape characters before '@', '\@', '${' or '\${' if match.group(0).endswith('\\'): num_escapes = match.end(0) - match.start(0) return '\\' * (num_escapes // 2) - # Single escape character and '@' - elif match.group(0) == backslash_tag: - return start_tag - # Template variable to be replaced + # Handle cmake escaped \${} tags + elif variable_format == 'cmake' and match.group(0) == '\\${': + return '${' + # \@escaped\@ variables + elif match.groupdict().get('escaped') is not None: + return match.group('escaped')[1:-2]+'@' else: - varname = match.group(1) + # Template variable to be replaced + varname = match.group('variable') var_str = '' if varname in confdata: var, _ = confdata.get(varname) @@ -1280,11 +1277,23 @@ def get_cmake_define(line: str, confdata: 'ConfigurationData') -> str: def get_variable_regex(variable_format: Literal['meson', 'cmake', 'cmake@'] = 'meson') -> T.Pattern[str]: # Only allow (a-z, A-Z, 0-9, _, -) as valid characters for a define - # Also allow escaping '@' with '\@' if variable_format in {'meson', 'cmake@'}: - regex = re.compile(r'(?:\\\\)+(?=\\?@)|\\@|@([-a-zA-Z0-9_]+)@') + # Also allow escaping pairs of '@' with '\@' + regex = re.compile(r''' + (?:\\\\)+(?=\\?@) # Matches multiple backslashes followed by an @ symbol + | # OR + (?[-a-zA-Z0-9_]+)@ # Match a variable enclosed in @ symbols and capture the variable name; no matches beginning with '\@' + | # OR + (?P\\@[-a-zA-Z0-9_]+\\@) # Match an escaped variable enclosed in @ symbols + ''', re.VERBOSE) else: - regex = re.compile(r'(?:\\\\)+(?=\\?\$)|\\\${|\${([-a-zA-Z0-9_]+)}') + regex = re.compile(r''' + (?:\\\\)+(?=\\?\$) # Match multiple backslashes followed by a dollar sign + | # OR + \\\${ # Match a backslash followed by a dollar sign and an opening curly brace + | # OR + \${(?P[-a-zA-Z0-9_]+)} # Match a variable enclosed in curly braces and capture the variable name + ''', re.VERBOSE) return regex def do_conf_str(src: str, data: T.List[str], confdata: 'ConfigurationData', diff --git a/mesonbuild/wrap/wrap.py b/mesonbuild/wrap/wrap.py index a8efd163a5d6..3fe40ed9f322 100644 --- a/mesonbuild/wrap/wrap.py +++ b/mesonbuild/wrap/wrap.py @@ -51,7 +51,7 @@ REQ_TIMEOUT = 30.0 WHITELIST_SUBDOMAIN = 'wrapdb.mesonbuild.com' -ALL_TYPES = ['file', 'git', 'hg', 'svn'] +ALL_TYPES = ['file', 'git', 'hg', 'svn', 'redirect'] PATCH = shutil.which('patch') @@ -76,9 +76,9 @@ def open_wrapdburl(urlstring: str, allow_insecure: bool = False, have_opt: bool if has_ssl: try: return T.cast('http.client.HTTPResponse', urllib.request.urlopen(urllib.parse.urlunparse(url), timeout=REQ_TIMEOUT)) - except urllib.error.URLError as excp: + except OSError as excp: msg = f'WrapDB connection failed to {urlstring} with error {excp}.' - if isinstance(excp.reason, ssl.SSLCertVerificationError): + if isinstance(excp, urllib.error.URLError) and isinstance(excp.reason, ssl.SSLCertVerificationError): if allow_insecure: mlog.warning(f'{msg}\n\n Proceeding without authentication.') else: @@ -95,7 +95,7 @@ def open_wrapdburl(urlstring: str, allow_insecure: bool = False, have_opt: bool nossl_url = url._replace(scheme='http') try: return T.cast('http.client.HTTPResponse', urllib.request.urlopen(urllib.parse.urlunparse(nossl_url), timeout=REQ_TIMEOUT)) - except urllib.error.URLError as excp: + except OSError as excp: raise WrapException(f'WrapDB connection failed to {urlstring} with error {excp}') def get_releases_data(allow_insecure: bool) -> bytes: @@ -137,46 +137,60 @@ class WrapNotFoundException(WrapException): pass class PackageDefinition: - def __init__(self, fname: str, subproject: str = ''): - self.filename = fname - self.subproject = SubProject(subproject) - self.type: T.Optional[str] = None - self.values: T.Dict[str, str] = {} + def __init__(self, name: str, subprojects_dir: str, type_: T.Optional[str] = None, values: T.Optional[T.Dict[str, str]] = None): + self.name = name + self.subprojects_dir = subprojects_dir + self.type = type_ + self.values = values or {} self.provided_deps: T.Dict[str, T.Optional[str]] = {} self.provided_programs: T.List[str] = [] self.diff_files: T.List[Path] = [] - self.basename = os.path.basename(fname) - self.has_wrap = self.basename.endswith('.wrap') - self.name = self.basename[:-5] if self.has_wrap else self.basename - # must be lowercase for consistency with dep=variable assignment - self.provided_deps[self.name.lower()] = None - # What the original file name was before redirection - self.original_filename = fname - self.redirected = False - if self.has_wrap: - self.parse_wrap() - with open(fname, 'r', encoding='utf-8') as file: - self.wrapfile_hash = hashlib.sha256(file.read().encode('utf-8')).hexdigest() + self.wrapfile_hash: T.Optional[str] = None + self.original_filename: T.Optional[str] = None + self.redirected: bool = False + self.filesdir = os.path.join(self.subprojects_dir, 'packagefiles') self.directory = self.values.get('directory', self.name) if os.path.dirname(self.directory): raise WrapException('Directory key must be a name and not a path') - if self.type and self.type not in ALL_TYPES: - raise WrapException(f'Unknown wrap type {self.type!r}') - self.filesdir = os.path.join(os.path.dirname(self.filename), 'packagefiles') + if 'diff_files' in self.values: + for s in self.values['diff_files'].split(','): + path = Path(s.strip()) + if path.is_absolute(): + raise WrapException('diff_files paths cannot be absolute') + if '..' in path.parts: + raise WrapException('diff_files paths cannot contain ".."') + self.diff_files.append(path) + # must be lowercase for consistency with dep=variable assignment + self.provided_deps[self.name.lower()] = None - def parse_wrap(self) -> None: - try: - config = configparser.ConfigParser(interpolation=None) - config.read(self.filename, encoding='utf-8') - except configparser.Error as e: - raise WrapException(f'Failed to parse {self.basename}: {e!s}') - self.parse_wrap_section(config) - if self.type == 'redirect': + @staticmethod + def from_values(name: str, subprojects_dir: str, type_: str, values: T.Dict[str, str]) -> PackageDefinition: + return PackageDefinition(name, subprojects_dir, type_, values) + + @staticmethod + def from_directory(filename: str) -> PackageDefinition: + name = os.path.basename(filename) + subprojects_dir = os.path.dirname(filename) + return PackageDefinition(name, subprojects_dir) + + @staticmethod + def from_wrap_file(filename: str, subproject: SubProject = SubProject('')) -> PackageDefinition: + config, type_, values = PackageDefinition._parse_wrap(filename) + if 'diff_files' in values: + FeatureNew('Wrap files with diff_files', '0.63.0').use(subproject) + if 'patch_directory' in values: + FeatureNew('Wrap files with patch_directory', '0.55.0').use(subproject) + for what in ['patch', 'source']: + if f'{what}_filename' in values and f'{what}_url' not in values: + FeatureNew(f'Local wrap patch files without {what}_url', '0.55.0').use(subproject) + + subprojects_dir = os.path.dirname(filename) + + if type_ == 'redirect': # [wrap-redirect] have a `filename` value pointing to the real wrap # file we should parse instead. It must be relative to the current # wrap file location and must be in the form foo/subprojects/bar.wrap. - dirname = Path(self.filename).parent - fname = Path(self.values['filename']) + fname = Path(values['filename']) for i, p in enumerate(fname.parts): if i % 2 == 0: if p == '..': @@ -186,37 +200,41 @@ def parse_wrap(self) -> None: raise WrapException('wrap-redirect filename must be in the form foo/subprojects/bar.wrap') if fname.suffix != '.wrap': raise WrapException('wrap-redirect filename must be a .wrap file') - fname = dirname / fname + fname = Path(subprojects_dir, fname) if not fname.is_file(): raise WrapException(f'wrap-redirect {fname} filename does not exist') - self.filename = str(fname) - self.parse_wrap() - self.redirected = True - else: - self.parse_provide_section(config) - if 'patch_directory' in self.values: - FeatureNew('Wrap files with patch_directory', '0.55.0').use(self.subproject) - for what in ['patch', 'source']: - if f'{what}_filename' in self.values and f'{what}_url' not in self.values: - FeatureNew(f'Local wrap patch files without {what}_url', '0.55.0').use(self.subproject) + wrap = PackageDefinition.from_wrap_file(str(fname), subproject) + wrap.original_filename = filename + wrap.redirected = True + return wrap + + name = os.path.basename(filename)[:-5] + wrap = PackageDefinition.from_values(name, subprojects_dir, type_, values) + wrap.original_filename = filename + wrap.parse_provide_section(config) + + with open(filename, 'r', encoding='utf-8') as file: + wrap.wrapfile_hash = hashlib.sha256(file.read().encode('utf-8')).hexdigest() - def parse_wrap_section(self, config: configparser.ConfigParser) -> None: + return wrap + + @staticmethod + def _parse_wrap(filename: str) -> T.Tuple[configparser.ConfigParser, str, T.Dict[str, str]]: + try: + config = configparser.ConfigParser(interpolation=None) + config.read(filename, encoding='utf-8') + except configparser.Error as e: + raise WrapException(f'Failed to parse {filename}: {e!s}') if len(config.sections()) < 1: - raise WrapException(f'Missing sections in {self.basename}') - self.wrap_section = config.sections()[0] - if not self.wrap_section.startswith('wrap-'): - raise WrapException(f'{self.wrap_section!r} is not a valid first section in {self.basename}') - self.type = self.wrap_section[5:] - self.values = dict(config[self.wrap_section]) - if 'diff_files' in self.values: - FeatureNew('Wrap files with diff_files', '0.63.0').use(self.subproject) - for s in self.values['diff_files'].split(','): - path = Path(s.strip()) - if path.is_absolute(): - raise WrapException('diff_files paths cannot be absolute') - if '..' in path.parts: - raise WrapException('diff_files paths cannot contain ".."') - self.diff_files.append(path) + raise WrapException(f'Missing sections in {filename}') + wrap_section = config.sections()[0] + if not wrap_section.startswith('wrap-'): + raise WrapException(f'{wrap_section!r} is not a valid first section in {filename}') + type_ = wrap_section[5:] + if type_ not in ALL_TYPES: + raise WrapException(f'Unknown wrap type {type_!r}') + values = dict(config[wrap_section]) + return config, type_, values def parse_provide_section(self, config: configparser.ConfigParser) -> None: if config.has_section('provides'): @@ -236,7 +254,7 @@ def parse_provide_section(self, config: configparser.ConfigParser) -> None: self.provided_programs += names_list continue if not v: - m = (f'Empty dependency variable name for {k!r} in {self.basename}. ' + m = (f'Empty dependency variable name for {k!r} in {self.name}.wrap. ' 'If the subproject uses meson.override_dependency() ' 'it can be added in the "dependency_names" special key.') raise WrapException(m) @@ -246,20 +264,21 @@ def get(self, key: str) -> str: try: return self.values[key] except KeyError: - raise WrapException(f'Missing key {key!r} in {self.basename}') + raise WrapException(f'Missing key {key!r} in {self.name}.wrap') - def get_hashfile(self, subproject_directory: str) -> str: + @staticmethod + def get_hashfile(subproject_directory: str) -> str: return os.path.join(subproject_directory, '.meson-subproject-wrap-hash.txt') def update_hash_cache(self, subproject_directory: str) -> None: - if self.has_wrap: + if self.wrapfile_hash: with open(self.get_hashfile(subproject_directory), 'w', encoding='utf-8') as file: file.write(self.wrapfile_hash + '\n') def get_directory(subdir_root: str, packagename: str) -> str: fname = os.path.join(subdir_root, packagename + '.wrap') if os.path.isfile(fname): - wrap = PackageDefinition(fname) + wrap = PackageDefinition.from_wrap_file(fname) return wrap.directory return packagename @@ -276,7 +295,7 @@ def verbose_git(cmd: T.List[str], workingdir: str, check: bool = False) -> bool: class Resolver: source_dir: str subdir: str - subproject: str = '' + subproject: SubProject = SubProject('') wrap_mode: WrapMode = WrapMode.default wrap_frontend: bool = False allow_insecure: bool = False @@ -305,25 +324,32 @@ def load_netrc(self) -> None: mlog.warning(f'failed to process netrc file: {e}.', fatal=False) def load_wraps(self) -> None: - if not os.path.isdir(self.subdir_root): - return - root, dirs, files = next(os.walk(self.subdir_root)) - ignore_dirs = {'packagecache', 'packagefiles'} - for i in files: - if not i.endswith('.wrap'): - continue - fname = os.path.join(self.subdir_root, i) - wrap = PackageDefinition(fname, self.subproject) - self.wraps[wrap.name] = wrap - ignore_dirs |= {wrap.directory, wrap.name} - # Add dummy package definition for directories not associated with a wrap file. - for i in dirs: - if i in ignore_dirs: - continue - fname = os.path.join(self.subdir_root, i) - wrap = PackageDefinition(fname, self.subproject) - self.wraps[wrap.name] = wrap - + # Load Cargo.lock at the root of source tree + source_dir = os.path.dirname(self.subdir_root) + if os.path.exists(os.path.join(source_dir, 'Cargo.lock')): + from .. import cargo + for wrap in cargo.load_wraps(source_dir, self.subdir_root): + self.wraps[wrap.name] = wrap + # Load subprojects/*.wrap + if os.path.isdir(self.subdir_root): + root, dirs, files = next(os.walk(self.subdir_root)) + for i in files: + if not i.endswith('.wrap'): + continue + fname = os.path.join(self.subdir_root, i) + wrap = PackageDefinition.from_wrap_file(fname, self.subproject) + self.wraps[wrap.name] = wrap + # Add dummy package definition for directories not associated with a wrap file. + ignore_dirs = {'packagecache', 'packagefiles'} + for wrap in self.wraps.values(): + ignore_dirs |= {wrap.directory, wrap.name} + for i in dirs: + if i in ignore_dirs: + continue + fname = os.path.join(self.subdir_root, i) + wrap = PackageDefinition.from_directory(fname) + self.wraps[wrap.name] = wrap + # Add provided deps and programs into our lookup tables for wrap in self.wraps.values(): self.add_wrap(wrap) @@ -331,13 +357,13 @@ def add_wrap(self, wrap: PackageDefinition) -> None: for k in wrap.provided_deps.keys(): if k in self.provided_deps: prev_wrap = self.provided_deps[k] - m = f'Multiple wrap files provide {k!r} dependency: {wrap.basename} and {prev_wrap.basename}' + m = f'Multiple wrap files provide {k!r} dependency: {wrap.name} and {prev_wrap.name}' raise WrapException(m) self.provided_deps[k] = wrap for k in wrap.provided_programs: if k in self.provided_programs: prev_wrap = self.provided_programs[k] - m = f'Multiple wrap files provide {k!r} program: {wrap.basename} and {prev_wrap.basename}' + m = f'Multiple wrap files provide {k!r} program: {wrap.name} and {prev_wrap.name}' raise WrapException(m) self.provided_programs[k] = wrap @@ -363,7 +389,7 @@ def get_from_wrapdb(self, subp_name: str) -> T.Optional[PackageDefinition]: with fname.open('wb') as f: f.write(url.read()) mlog.log(f'Installed {subp_name} version {version} revision {revision}') - wrap = PackageDefinition(str(fname)) + wrap = PackageDefinition.from_wrap_file(str(fname)) self.wraps[wrap.name] = wrap self.add_wrap(wrap) return wrap @@ -409,32 +435,26 @@ def resolve(self, packagename: str, force_method: T.Optional[Method] = None) -> raise WrapNotFoundException(f'Neither a subproject directory nor a {packagename}.wrap file was found.') self.wrap = wrap self.directory = self.wrap.directory + self.dirname = os.path.join(self.wrap.subprojects_dir, self.wrap.directory) + if not os.path.exists(self.dirname): + self.dirname = os.path.join(self.subdir_root, self.directory) + rel_path = os.path.relpath(self.dirname, self.source_dir) - if self.wrap.has_wrap: - # We have a .wrap file, use directory relative to the location of - # the wrap file if it exists, otherwise source code will be placed - # into main project's subproject_dir even if the wrap file comes - # from another subproject. - self.dirname = os.path.join(os.path.dirname(self.wrap.filename), self.wrap.directory) - if not os.path.exists(self.dirname): - self.dirname = os.path.join(self.subdir_root, self.directory) - # Check if the wrap comes from the main project. - main_fname = os.path.join(self.subdir_root, self.wrap.basename) - if self.wrap.filename != main_fname: - rel = os.path.relpath(self.wrap.filename, self.source_dir) + if self.wrap.original_filename: + # If the original wrap file is not in main project's subproject_dir, + # write a wrap-redirect. + basename = os.path.basename(self.wrap.original_filename) + main_fname = os.path.join(self.subdir_root, basename) + if self.wrap.original_filename != main_fname: + rel = os.path.relpath(self.wrap.original_filename, self.source_dir) mlog.log('Using', mlog.bold(rel)) # Write a dummy wrap file in main project that redirect to the # wrap we picked. with open(main_fname, 'w', encoding='utf-8') as f: f.write(textwrap.dedent(f'''\ [wrap-redirect] - filename = {PurePath(os.path.relpath(self.wrap.filename, self.subdir_root)).as_posix()} + filename = {PurePath(os.path.relpath(self.wrap.original_filename, self.subdir_root)).as_posix()} ''')) - else: - # No wrap file, it's a dummy package definition for an existing - # directory. Use the source code in place. - self.dirname = self.wrap.filename - rel_path = os.path.relpath(self.dirname, self.source_dir) # Map each supported method to a file that must exist at the root of source tree. methods_map: T.Dict[Method, str] = { @@ -606,7 +626,7 @@ def _get_git(self, packagename: str) -> None: def validate(self) -> None: # This check is only for subprojects with wraps. - if not self.wrap.has_wrap: + if not self.wrap.wrapfile_hash: return # Retrieve original hash, if it exists. @@ -618,10 +638,8 @@ def validate(self) -> None: # If stored hash doesn't exist then don't warn. return - actual_hash = self.wrap.wrapfile_hash - # Compare hashes and warn the user if they don't match. - if expected_hash != actual_hash: + if expected_hash != self.wrap.wrapfile_hash: mlog.warning(f'Subproject {self.wrap.name}\'s revision may be out of date; its wrap file has changed since it was first configured') def is_git_full_commit_id(self, revno: str) -> bool: @@ -654,7 +672,7 @@ def get_netrc_credentials(self, netloc: str) -> T.Optional[T.Tuple[str, str]]: return None login, account, password = self.netrc.authenticators(netloc) - if account is not None: + if account: login = account return login, password @@ -686,7 +704,7 @@ def get_data(self, urlstring: str) -> T.Tuple[str, str]: try: req = urllib.request.Request(urlstring, headers=headers) resp = urllib.request.urlopen(req, timeout=REQ_TIMEOUT) - except urllib.error.URLError as e: + except OSError as e: mlog.log(str(e)) raise WrapException(f'could not get {urlstring} is the internet available?') with contextlib.closing(resp) as resp, tmpfile as tmpfile: @@ -783,7 +801,7 @@ def _get_file_internal(self, what: str, packagename: str) -> str: def apply_patch(self, packagename: str) -> None: if 'patch_filename' in self.wrap.values and 'patch_directory' in self.wrap.values: - m = f'Wrap file {self.wrap.basename!r} must not have both "patch_filename" and "patch_directory"' + m = f'Wrap file {self.wrap.name!r} must not have both "patch_filename" and "patch_directory"' raise WrapException(m) if 'patch_filename' in self.wrap.values: path = self._get_file_internal('patch', packagename) diff --git a/run_project_tests.py b/run_project_tests.py index 23561d97357a..c11410486e23 100755 --- a/run_project_tests.py +++ b/run_project_tests.py @@ -41,7 +41,8 @@ from mesonbuild.build import ConfigurationData from mesonbuild.mesonlib import MachineChoice, Popen_safe, TemporaryDirectoryWinProof, setup_vsenv from mesonbuild.mlog import blue, bold, cyan, green, red, yellow, normal_green -from mesonbuild.coredata import backendlist, version as meson_version +from mesonbuild.coredata import version as meson_version +from mesonbuild.options import backendlist from mesonbuild.modules.python import PythonExternalProgram from run_tests import ( get_fake_options, run_configure, get_meson_script, get_backend_commands, @@ -304,7 +305,6 @@ def __lt__(self, other: object) -> bool: failing_logs: T.List[str] = [] print_debug = 'MESON_PRINT_TEST_OUTPUT' in os.environ under_ci = 'CI' in os.environ -ci_is_github = 'GITHUB_ACTIONS' in os.environ raw_ci_jobname = os.environ.get('MESON_CI_JOBNAME', None) ci_jobname = raw_ci_jobname if raw_ci_jobname != 'thirdparty' else None do_debug = under_ci or print_debug @@ -436,16 +436,12 @@ def log_text_file(logfile: T.TextIO, testdir: Path, result: TestResult) -> None: def _run_ci_include(args: T.List[str]) -> str: - header = f'Included file {args[0]}:' - footer = '' - if ci_is_github: - header = f'::group::==== {header} ====' - footer = '::endgroup::' if not args: return 'At least one parameter required' + + header = f'Included file {args[0]}:' try: - data = Path(args[0]).read_text(errors='ignore', encoding='utf-8') - return f'{header}\n{data}\n{footer}\n' + return mlog.ci_fold_file(args[0], header, force=True) except Exception: return 'Failed to open {}\n'.format(args[0]) diff --git a/run_tests.py b/run_tests.py index 6d33dd99e145..8ab53a1a5c80 100755 --- a/run_tests.py +++ b/run_tests.py @@ -33,7 +33,8 @@ from mesonbuild import mtest from mesonbuild import mlog from mesonbuild.environment import Environment, detect_ninja, detect_machine_info -from mesonbuild.coredata import backendlist, version as meson_version +from mesonbuild.coredata import version as meson_version +from mesonbuild.options import backendlist from mesonbuild.mesonlib import OptionKey, setup_vsenv if T.TYPE_CHECKING: @@ -150,7 +151,7 @@ def get_fake_env(sdir='', bdir=None, prefix='', opts=None): if opts is None: opts = get_fake_options(prefix) env = Environment(sdir, bdir, opts) - env.coredata.options[OptionKey('args', lang='c')] = FakeCompilerOptions() + env.coredata.optstore.set_value_object(OptionKey('args', lang='c'), FakeCompilerOptions()) env.machines.host.cpu_family = 'x86_64' # Used on macOS inside find_library # Invalidate cache when using a different Environment object. clear_meson_configure_class_caches() diff --git a/run_unittests.py b/run_unittests.py index 63b3119dba38..4a784dc07318 100755 --- a/run_unittests.py +++ b/run_unittests.py @@ -25,7 +25,7 @@ import mesonbuild.modules.pkgconfig from unittests.allplatformstests import AllPlatformTests -from unittests.cargotests import CargoVersionTest, CargoCfgTest +from unittests.cargotests import CargoVersionTest, CargoCfgTest, CargoLockTest from unittests.darwintests import DarwinTests from unittests.failuretests import FailureTests from unittests.linuxcrosstests import LinuxCrossArmTests, LinuxCrossMingwTests diff --git a/test cases/cmake/24 mixing languages/meson.build b/test cases/cmake/24 mixing languages/meson.build index 55d7a7d68bc9..a4662fe37c0c 100644 --- a/test cases/cmake/24 mixing languages/meson.build +++ b/test cases/cmake/24 mixing languages/meson.build @@ -7,6 +7,12 @@ if not add_languages('objc', required : false) error('MESON_SKIP_TEST: No ObjC compiler') endif +objc = meson.get_compiler('objc') +c = meson.get_compiler('c') +if c.get_argument_syntax() != objc.get_argument_syntax() + error('MESON_SKIP_TEST: cmake cannot mix compiler types on Windows') +endif + cm = import('cmake') sub_pro = cm.subproject('cmTest') diff --git a/test cases/cmake/24 mixing languages/subprojects/cmTest/CMakeLists.txt b/test cases/cmake/24 mixing languages/subprojects/cmTest/CMakeLists.txt index 80a256f0db1d..a1886115bf5f 100644 --- a/test cases/cmake/24 mixing languages/subprojects/cmTest/CMakeLists.txt +++ b/test cases/cmake/24 mixing languages/subprojects/cmTest/CMakeLists.txt @@ -1,6 +1,6 @@ cmake_minimum_required(VERSION 3.5) -project(cmTest) +project(cmTest LANGUAGES C OBJC) include_directories(${CMAKE_CURRENT_BINARY_DIR}) diff --git a/test cases/common/113 interpreter copy mutable var on assignment/check_env.py b/test cases/common/113 interpreter copy mutable var on assignment/check_env.py deleted file mode 100755 index 034d2924331b..000000000000 --- a/test cases/common/113 interpreter copy mutable var on assignment/check_env.py +++ /dev/null @@ -1,7 +0,0 @@ -#!/usr/bin/env python3 - -import os -import sys - -if sys.argv[1] not in os.environ: - exit(42) diff --git a/test cases/common/113 interpreter copy mutable var on assignment/meson.build b/test cases/common/113 interpreter copy mutable var on assignment/meson.build index 3d4f3b039c76..d414bfc74e0c 100644 --- a/test cases/common/113 interpreter copy mutable var on assignment/meson.build +++ b/test cases/common/113 interpreter copy mutable var on assignment/meson.build @@ -1,4 +1,4 @@ -project('foo', meson_version: '>=1.5') +project('foo') a = configuration_data() a.set('HELLO', 1) @@ -10,15 +10,6 @@ assert(b.has('HELLO'), 'Original config data should be set on copy') configure_file(output : 'b.h', configuration : b) -testcase expect_error('Can not modify object after it has been used.') - b.set('WORLD', 1) -endtestcase - -# A copy of immutable object is mutable. This should print FeatureNew warning -# if meson_version is lower than 1.5. -c = b -c.set('WORLD', 1) - # This should still work, as we didn't use the original above but a copy! a.set('WORLD', 1) @@ -26,26 +17,3 @@ assert(a.has('WORLD'), 'New config data should have been set') assert(not b.has('WORLD'), 'New config data set should not affect var copied earlier') configure_file(output : 'a.h', configuration : a) - -env1 = environment() -env1.set('FOO', '1') -env2 = env1 -env1.set('BAR', '1') - -# FOO should be in env1 and env2 -run_command('check_env.py', 'FOO', env: env1, check: true) -run_command('check_env.py', 'FOO', env: env2, check: true) - -# BAR should be only in env1 -run_command('check_env.py', 'BAR', env: env1, check: true) -assert(run_command('check_env.py', 'BAR', env: env2, check: false).returncode() == 42) - -# This should print deprecation warning but still work -env1.set('PLOP', '1') -run_command('check_env.py', 'PLOP', env: env1, check: true) - -# A copy of used env should be mutable and not print warning -env3 = env1 -env3.set('BAZ', '1') -run_command('check_env.py', 'PLOP', env: env3, check: true) -run_command('check_env.py', 'BAZ', env: env3, check: true) diff --git a/test cases/common/14 configure file/config6.h.in b/test cases/common/14 configure file/config6.h.in index 9719f8715210..0a9154282979 100644 --- a/test cases/common/14 configure file/config6.h.in +++ b/test cases/common/14 configure file/config6.h.in @@ -1,19 +1,40 @@ /* No escape */ #define MESSAGE1 "@var1@" -/* Single escape means no replace */ -#define MESSAGE2 "\@var1@" +/* Escaped whole variable */ +#define MESSAGE2 "\\@var1\\@" /* Replace pairs of escapes before '@' or '\@' with escape characters * (note we have to double number of pairs due to C string escaping) */ #define MESSAGE3 "\\\\@var1@" -/* Pairs of escapes and then single escape to avoid replace */ -#define MESSAGE4 "\\\\\@var1@" +/* Pairs of escapes and then an escaped variable */ +#define MESSAGE4 "\\\\\@var1\@" -/* Check escaped variable does not overlap following variable */ -#define MESSAGE5 "\@var1@var2@" +/* We don't gobble \@ prefixing some text */ +#define MESSAGE5 "\\\\@var1" -/* Check escape character outside variables */ -#define MESSAGE6 "\\ @ \@ \\\\@ \\\\\@" +/* Check escape character outside variables + \ @ \@ */ +#define MESSAGE6 "\\ @ \\\\@" + +/* Catch any edge cases */ + +/* no substitution - not a variable */ +#define MESSAGE7 "@var1" + +/* Escaped variable followed by another variable */ +#define MESSAGE8 "\\\\@var1@var2@" + +/* Variable followed by another variable */ +#define MESSAGE9 "@var1@var2@" + +/* Variable followed by another variable and escaped */ +#define MESSAGE10 "@var1@var2\\\\@" + +/* Lots of substitutions in a row*/ +#define MESSAGE11 "@var1@@var2@@var3@@var4@" + +/* This should never happen in the real world, right? */ +#define MESSAGE12 "@var1@var2\\\\@var3@var4\\\\@" diff --git a/test cases/common/14 configure file/meson.build b/test cases/common/14 configure file/meson.build index 90a468f5e191..036a562b796c 100644 --- a/test cases/common/14 configure file/meson.build +++ b/test cases/common/14 configure file/meson.build @@ -143,6 +143,8 @@ test('test5', executable('prog5', 'prog5.c')) conf6 = configuration_data() conf6.set('var1', 'foo') conf6.set('var2', 'bar') +conf6.set('var3', 'baz') +conf6.set('var4', 'qux') configure_file( input : 'config6.h.in', output : '@BASENAME@', @@ -308,10 +310,16 @@ cdata = configuration_data({ 'A_UNDEFINED' : false, }) +# merging a confdata into another one should not mark the first as immutable +cdata_test = configuration_data() +cdata_test.merge_from(cdata) +cdata.set_quoted('A_PATH', '/random/path') + configure_file(output : 'config9a.h', configuration : cdata, ) + configure_file(output : 'config9b.h', configuration : { 'B_STRING' : '"foo"', diff --git a/test cases/common/14 configure file/prog6.c b/test cases/common/14 configure file/prog6.c index 57f55860515c..b39f9daa7d4b 100644 --- a/test cases/common/14 configure file/prog6.c +++ b/test cases/common/14 configure file/prog6.c @@ -4,8 +4,14 @@ int main(void) { return strcmp(MESSAGE1, "foo") || strcmp(MESSAGE2, "@var1@") - || strcmp(MESSAGE3, "\\foo") + || strcmp(MESSAGE3, "\\@var1@") || strcmp(MESSAGE4, "\\@var1@") - || strcmp(MESSAGE5, "@var1bar") - || strcmp(MESSAGE6, "\\ @ @ \\@ \\@"); + || strcmp(MESSAGE5, "\\@var1") + || strcmp(MESSAGE6, "\\ @ \\@") + || strcmp(MESSAGE7, "@var1") + || strcmp(MESSAGE8, "\\@var1bar") + || strcmp(MESSAGE9, "foovar2@") + || strcmp(MESSAGE10, "foovar2\\@") + || strcmp(MESSAGE11, "foobarbazqux") + || strcmp(MESSAGE12, "foovar2\\@var3@var4\\@"); } diff --git a/test cases/common/184 openmp/meson.build b/test cases/common/184 openmp/meson.build index 4cbe80616970..ab09b2c480d1 100644 --- a/test cases/common/184 openmp/meson.build +++ b/test cases/common/184 openmp/meson.build @@ -16,9 +16,6 @@ endif if cc.get_id() == 'clang' and host_machine.system() == 'windows' error('MESON_SKIP_TEST Windows clang does not support OpenMP.') endif -if host_machine.system() == 'darwin' - error('MESON_SKIP_TEST macOS does not support OpenMP.') -endif openmp = dependency('openmp') env = environment() diff --git a/test cases/common/26 find program/meson.build b/test cases/common/26 find program/meson.build index 3c4d15cda35e..a20f6b45a142 100644 --- a/test cases/common/26 find program/meson.build +++ b/test cases/common/26 find program/meson.build @@ -32,6 +32,9 @@ assert(prog.version() == '1.0', 'Program version should be detectable') prog = find_program('print-version-with-prefix.py', version : '>=1.0') assert(prog.found(), 'Program version should match') +prog = find_program('print-version-custom-argument.py', version : '>=1.0', version_argument : '-version') +assert(prog.found(), 'Program version should match') + prog = find_program('test_subdir.py', required : false) assert(not prog.found(), 'Program should not be found') diff --git a/test cases/common/26 find program/print-version-custom-argument.py b/test cases/common/26 find program/print-version-custom-argument.py new file mode 100644 index 000000000000..7d9ad58febb1 --- /dev/null +++ b/test cases/common/26 find program/print-version-custom-argument.py @@ -0,0 +1,8 @@ +#!/usr/bin/env python3 + +import sys + +if len(sys.argv) != 2 or sys.argv[1] != '-version': + exit(1) + +print('1.0') diff --git a/test cases/common/277 custom target private dir/meson.build b/test cases/common/277 custom target private dir/meson.build new file mode 100644 index 000000000000..dc48d1affd31 --- /dev/null +++ b/test cases/common/277 custom target private dir/meson.build @@ -0,0 +1,16 @@ +project('277 custom target private dir') + +python = find_program('python3') + +custom_target( + 'check-private-dir', + command: [ + python, + '-c', + 'import os, sys; os.chdir(sys.argv[1]); open(sys.argv[2], "w")', + '@PRIVATE_DIR@', + '@OUTPUT@', + ], + output: 'check-private-dir', + build_by_default: true, +) diff --git a/test cases/common/41 test args/meson.build b/test cases/common/41 test args/meson.build index b21f1ad00b12..4894f3e163a8 100644 --- a/test cases/common/41 test args/meson.build +++ b/test cases/common/41 test args/meson.build @@ -33,3 +33,15 @@ testfilect = custom_target('testfile', build_by_default : true, command : [copy, '@INPUT@', '@OUTPUT@']) test('custom target arg', tester, args : testfilect, env : env_array) + +# https://github.com/mesonbuild/meson/issues/12327 +env = environment() +env.append('PATH', 'something') + +bash = find_program('bash') + +custompathtgt = custom_target('testpathappend', + output : 'nothing.txt', + build_always : true, + command : [bash, '-c', 'env'], + env : env) diff --git a/test cases/common/98 subproject subdir/meson.build b/test cases/common/98 subproject subdir/meson.build index d2bafedf5119..ef053d86c41c 100644 --- a/test cases/common/98 subproject subdir/meson.build +++ b/test cases/common/98 subproject subdir/meson.build @@ -1,7 +1,3 @@ -# SPDX-License-Identifier: Apache-2.0 -# Copyright 2016-2023 The Meson Developers -# Copyright © 2024 Intel Corporation - project('proj', 'c') subproject('sub') libSub = dependency('sub', fallback: ['sub', 'libSub']) @@ -10,19 +6,7 @@ exe = executable('prog', 'prog.c', dependencies: libSub) test('subproject subdir', exe) # Verify the subproject has placed dependency override. -d = dependency('sub-1.0') - -# verify that the name is the overridden name -assert(d.name() == 'sub-1.0', 'name was not properly set, should have been "sub-1.0", but was @0@'.format(d.name())) - -# Verify that when a dependency object is used for two overrides, the correct -# name is used -meson.override_dependency('new-dep', d) -d2 = dependency('new-dep') -assert(d2.name() == 'new-dep', 'name was not properly set, should have been "new-dep", but was @0@'.format(d2.name())) - -# And that the old dependency wasn't changed -assert(d.name() == 'sub-1.0', 'original dependency was mutated.') +dependency('sub-1.0') # Verify we can now take 'sub' dependency without fallback, but only version 1.0. dependency('sub') diff --git a/test cases/cuda/1 simple/meson.build b/test cases/cuda/1 simple/meson.build index 4f111d1b9ee1..e3069df9093b 100644 --- a/test cases/cuda/1 simple/meson.build +++ b/test cases/cuda/1 simple/meson.build @@ -1,4 +1,8 @@ project('simple', 'cuda', version : '1.0.0') +# https://github.com/mesonbuild/meson/issues/13240 +d = meson.get_compiler('cuda').find_library('doesnotexist', required : false) +assert(not d.found()) + exe = executable('prog', 'prog.cu') test('cudatest', exe) diff --git a/test cases/cuda/17 separate compilation linking/meson.build b/test cases/cuda/17 separate compilation linking/meson.build index ee86123eb40d..7ba7e220870d 100644 --- a/test cases/cuda/17 separate compilation linking/meson.build +++ b/test cases/cuda/17 separate compilation linking/meson.build @@ -3,12 +3,15 @@ # code: # https://docs.nvidia.com/cuda/cuda-compiler-driver-nvcc/index.html#examples -project('device linking', ['cpp', 'cuda'], version : '1.0.0') +project('device linking', ['cpp'], version : '1.0.0') + +# test that optional initialization of cuda works to disable thin archives +add_languages('cuda') nvcc = meson.get_compiler('cuda') cuda = import('unstable-cuda') -arch_flags = cuda.nvcc_arch_flags(nvcc.version(), 'Auto', detected : ['8.0']) +arch_flags = cuda.nvcc_arch_flags(nvcc.version(), 'Common') message('NVCC version: ' + nvcc.version()) message('NVCC flags: ' + ' '.join(arch_flags)) @@ -16,4 +19,8 @@ message('NVCC flags: ' + ' '.join(arch_flags)) # test device linking with -dc (which is equivalent to `--relocatable-device-code true`) lib = static_library('devicefuncs', ['b.cu'], cuda_args : ['-dc'] + arch_flags) exe = executable('app', 'main.cu', cuda_args : ['-dc'] + arch_flags, link_with : lib, link_args : arch_flags) -test('cudatest', exe) + +# if we don't have a CUDA-capable GPU available, avoid creating the test +if run_command('__nvcc_device_query', check : false).returncode() == 0 + test('cudatest', exe) +endif diff --git a/test cases/darwin/1 rpath removal on install/bar.c b/test cases/darwin/1 rpath removal on install/bar.c new file mode 100644 index 000000000000..b2fd0a664457 --- /dev/null +++ b/test cases/darwin/1 rpath removal on install/bar.c @@ -0,0 +1,5 @@ +#include "foo/foo.h" + +void bar() { + foo(); +} \ No newline at end of file diff --git a/test cases/darwin/1 rpath removal on install/foo/foo.c b/test cases/darwin/1 rpath removal on install/foo/foo.c new file mode 100644 index 000000000000..e355ed4592d2 --- /dev/null +++ b/test cases/darwin/1 rpath removal on install/foo/foo.c @@ -0,0 +1,3 @@ +int foo() { + return 1 + 2; +} \ No newline at end of file diff --git a/test cases/darwin/1 rpath removal on install/foo/foo.h b/test cases/darwin/1 rpath removal on install/foo/foo.h new file mode 100644 index 000000000000..176e7a3be8bd --- /dev/null +++ b/test cases/darwin/1 rpath removal on install/foo/foo.h @@ -0,0 +1 @@ +int foo(); \ No newline at end of file diff --git a/test cases/darwin/1 rpath removal on install/foo/meson.build b/test cases/darwin/1 rpath removal on install/foo/meson.build new file mode 100644 index 000000000000..cc6fbe4e3d78 --- /dev/null +++ b/test cases/darwin/1 rpath removal on install/foo/meson.build @@ -0,0 +1,3 @@ +foo = library('foo', 'foo.c', + install: true, +) \ No newline at end of file diff --git a/test cases/darwin/1 rpath removal on install/meson.build b/test cases/darwin/1 rpath removal on install/meson.build new file mode 100644 index 000000000000..093d7deba3a8 --- /dev/null +++ b/test cases/darwin/1 rpath removal on install/meson.build @@ -0,0 +1,8 @@ +project('proj', 'c') + +subdir('foo') + +bar = library('bar', 'bar.c', + link_with: foo, + install: true, +) \ No newline at end of file diff --git a/test cases/failing/132 module use inside project decl/meson.build b/test cases/failing/132 module use inside project decl/meson.build new file mode 100644 index 000000000000..8f82a5d3089b --- /dev/null +++ b/test cases/failing/132 module use inside project decl/meson.build @@ -0,0 +1,6 @@ +# GH issue 11393 +project('module use inside project decl', 'c', + version: run_command( + import('python').find_installation('python3') + ) +) diff --git a/test cases/failing/132 module use inside project decl/test.json b/test cases/failing/132 module use inside project decl/test.json new file mode 100644 index 000000000000..33e377b842f3 --- /dev/null +++ b/test cases/failing/132 module use inside project decl/test.json @@ -0,0 +1,7 @@ +{ + "stdout": [ + { + "line": "test cases/failing/132 module use inside project decl/meson.build:4:21: ERROR: Module methods (python.find_installation) cannot be invoked during project declaration." + } + ] + } diff --git a/test cases/failing/70 configuration immutable/input b/test cases/failing/70 configuration immutable/input new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/test cases/failing/70 configuration immutable/meson.build b/test cases/failing/70 configuration immutable/meson.build new file mode 100644 index 000000000000..b6cac41265e0 --- /dev/null +++ b/test cases/failing/70 configuration immutable/meson.build @@ -0,0 +1,12 @@ +project('configuration_data is immutable') + +a = configuration_data() + +configure_file( + configuration : a, + input : 'input', + output : 'output', +) + +still_immutable = a +still_immutable.set('hello', 'world') diff --git a/test cases/failing/70 configuration immutable/test.json b/test cases/failing/70 configuration immutable/test.json new file mode 100644 index 000000000000..fc735fa38ece --- /dev/null +++ b/test cases/failing/70 configuration immutable/test.json @@ -0,0 +1,7 @@ +{ + "stdout": [ + { + "line": "test cases/failing/70 configuration immutable/meson.build:12:16: ERROR: Can not set values on configuration object that has been used." + } + ] +} diff --git a/test cases/format/1 default/crazy_comments.meson b/test cases/format/1 default/crazy_comments.meson index f391ca28c263..f7ed67101bfd 100644 --- a/test cases/format/1 default/crazy_comments.meson +++ b/test cases/format/1 default/crazy_comments.meson @@ -1,5 +1,5 @@ # SPDX-FileCopyrightText: Stone Tickle -# SPDX-License-Identifier: GPL-3.0-only +# This file is used from muon sources, with permission project('a') diff --git a/test cases/format/1 default/gh13242.meson b/test cases/format/1 default/gh13242.meson new file mode 100644 index 000000000000..67f7dd1bb9e1 --- /dev/null +++ b/test cases/format/1 default/gh13242.meson @@ -0,0 +1,16 @@ +# Minimized meson.build +test( + args: [ + shared_library( + f'tstlib-@name@', + build_by_default: false, + override_options: opt, + ), + ], +) + +test( + should_fail: (settings.get('x', false) and not settings['y'] and dep.version().version_compare( + '>=1.2.3', + )), +) diff --git a/test cases/format/1 default/indentation.meson b/test cases/format/1 default/indentation.meson index 31a809abff51..fe78847f7667 100644 --- a/test cases/format/1 default/indentation.meson +++ b/test cases/format/1 default/indentation.meson @@ -11,7 +11,10 @@ project( ) a = [ + # comment + # comment 1, + # comment 2, 3, [ @@ -36,8 +39,13 @@ a = [ d = {} if meson.project_version().version_compare('>1.2') + # comment + # comment if meson.version().version_compare('>1.0') + # comment + # comment foreach i : a + # comment e = { 'a': 'a', 'b': 'b', @@ -69,5 +77,18 @@ if meson.project_version().version_compare('>1.2') ], } endforeach + + foreach j : a + # comment + # comment + # comment + endforeach + elif 42 in d + d += {'foo': 43} + else # ensure else is correctly indented (issue #13316) + # comment + k = 'k' + # comment + # comment endif endif diff --git a/test cases/format/1 default/meson.build b/test cases/format/1 default/meson.build index 5b5b1152af5e..d3bb153eeb92 100644 --- a/test cases/format/1 default/meson.build +++ b/test cases/format/1 default/meson.build @@ -7,8 +7,22 @@ meson_files = { 'self': files('meson.build'), 'comments': files('crazy_comments.meson'), 'indentation': files('indentation.meson'), + 'gh13242': files('gh13242.meson'), } +# Ensure empty function are formatted correctly on long lines +a = '@0@@1@@2@@3@@4@'.format('one', 'two', 'three', 'four', 'five').strip().strip() + +_opt = get_option('opt') \ + .disable_if( + true, + error_message: 'This is an error message because opt can\'t be enabled', + ) \ + .enable_if( + false, + error_message: 'This is an error message because opt can\'t be disabled', + ) + foreach name, f : meson_files test(name, meson_cmd, args: ['format', '--check-only', f]) endforeach diff --git a/test cases/format/1 default/meson.options b/test cases/format/1 default/meson.options new file mode 100644 index 000000000000..13f5e19c0e68 --- /dev/null +++ b/test cases/format/1 default/meson.options @@ -0,0 +1 @@ +option('opt', type : 'feature', value : 'auto') diff --git a/test cases/format/2 muon/crazy_comments.meson b/test cases/format/2 muon/crazy_comments.meson index 5ebda7d63655..fe5ae1402df5 100644 --- a/test cases/format/2 muon/crazy_comments.meson +++ b/test cases/format/2 muon/crazy_comments.meson @@ -1,5 +1,5 @@ # SPDX-FileCopyrightText: Stone Tickle -# SPDX-License-Identifier: GPL-3.0-only +# This file is used from muon sources, with permission project('a') diff --git a/test cases/format/3 editorconfig/crazy_comments.meson b/test cases/format/3 editorconfig/crazy_comments.meson index 788ea1c88dff..f9fdc8848d54 100644 --- a/test cases/format/3 editorconfig/crazy_comments.meson +++ b/test cases/format/3 editorconfig/crazy_comments.meson @@ -1,5 +1,5 @@ # SPDX-FileCopyrightText: Stone Tickle -# SPDX-License-Identifier: GPL-3.0-only +# This file is used from muon sources, with permission project('a') diff --git a/test cases/format/3 editorconfig/meson.build b/test cases/format/3 editorconfig/meson.build index b32974cb9e8e..2468411493c3 100644 --- a/test cases/format/3 editorconfig/meson.build +++ b/test cases/format/3 editorconfig/meson.build @@ -7,6 +7,7 @@ meson_files = { 'self': files('meson.build'), 'comments': files('crazy_comments.meson'), 'indentation': files('indentation.meson'), + 'subdir editorconfig': files('subdir/sub.meson'), } foreach name, f : meson_files diff --git a/test cases/format/3 editorconfig/subdir/.editorconfig b/test cases/format/3 editorconfig/subdir/.editorconfig new file mode 100644 index 000000000000..fac7a92caaba --- /dev/null +++ b/test cases/format/3 editorconfig/subdir/.editorconfig @@ -0,0 +1,2 @@ +[*] +max_line_length = 120 diff --git a/test cases/format/3 editorconfig/subdir/sub.meson b/test cases/format/3 editorconfig/subdir/sub.meson new file mode 100644 index 000000000000..623ca2836a9c --- /dev/null +++ b/test cases/format/3 editorconfig/subdir/sub.meson @@ -0,0 +1,3 @@ +project('line') + +msg = 'this is a very long line, and it should be be wrapped because we have line length limit of 120, not 60' diff --git a/test cases/format/4 config/crazy_comments.meson b/test cases/format/4 config/crazy_comments.meson index 557d5d409f65..71ae8f97fdd0 100644 --- a/test cases/format/4 config/crazy_comments.meson +++ b/test cases/format/4 config/crazy_comments.meson @@ -1,5 +1,5 @@ # SPDX-FileCopyrightText: Stone Tickle -# SPDX-License-Identifier: GPL-3.0-only +# This file is used from muon sources, with permission project('a') diff --git a/test cases/format/4 config/indentation.meson b/test cases/format/4 config/indentation.meson index 816b5f3026a1..96a977e5fb56 100644 --- a/test cases/format/4 config/indentation.meson +++ b/test cases/format/4 config/indentation.meson @@ -71,3 +71,18 @@ if meson.project_version().version_compare('>1.2') endforeach endif endif + +subproject( + '@0@-@1@-@2@-@3@'.format( + meson.project_name(), + meson.project_version(), + meson.project_build_root(), + meson.project_source_root(), + ), + default_options : [ + 'aaaaaaaa=bbbbbbbbbb', + 'cccccccccccc=ddddddddddddd', + 'eeeeeeeeeeeeeee=fffffffffffff', + 'gggggggggggggggggggggg=hhhhhhhhhhhhhhhhhhhh', + ], +) diff --git a/test cases/format/5 transform/default.expected.meson b/test cases/format/5 transform/default.expected.meson index 4201053e1f18..18af2f8afa79 100644 --- a/test cases/format/5 transform/default.expected.meson +++ b/test cases/format/5 transform/default.expected.meson @@ -47,6 +47,7 @@ d = {'a': 1, 'b': 2, 'c': 3} # string conversion 'This is not a multiline' 'This is not a fstring' +'''This isn't convertible''' # group arg value arguments = [ diff --git a/test cases/format/5 transform/file_compare.py b/test cases/format/5 transform/file_compare.py index 7b0d1b85668f..fd3ce10560a6 100644 --- a/test cases/format/5 transform/file_compare.py +++ b/test cases/format/5 transform/file_compare.py @@ -1,7 +1,31 @@ #!/usr/bin/env python3 +# SPDX-License-Identifier: Apache-2.0 +# Copyright © 2024 Intel Corporation +import argparse import sys +import difflib -with open(sys.argv[1], 'r', encoding='utf-8') as f, open(sys.argv[2], 'r', encoding='utf-8') as g: - if f.read() != g.read(): - sys.exit('contents are not equal') + +def main() -> int: + parser = argparse.ArgumentParser() + parser.add_argument('actual', help='The transformed contents') + parser.add_argument('expected', help='the contents we expected') + args = parser.parse_args() + + with open(args.actual, 'r') as f: + actual = f.readlines() + with open(args.expected, 'r') as f: + expected = f.readlines() + + if actual == expected: + return 0 + + diff = difflib.ndiff(expected, actual) + for line in diff: + print(line, file=sys.stderr, end='') + return 1 + + +if __name__ == "__main__": + sys.exit(main()) diff --git a/test cases/format/5 transform/muon.expected.meson b/test cases/format/5 transform/muon.expected.meson index 871ce27d722d..0e6309c7f722 100644 --- a/test cases/format/5 transform/muon.expected.meson +++ b/test cases/format/5 transform/muon.expected.meson @@ -47,6 +47,7 @@ d = {'a': 1, 'b': 2, 'c': 3} # string conversion '''This is not a multiline''' f'This is not a fstring' +'''This isn't convertible''' # group arg value arguments = [ diff --git a/test cases/format/5 transform/options.expected.meson b/test cases/format/5 transform/options.expected.meson index f7f45658d5c3..9ed6ac2be5af 100644 --- a/test cases/format/5 transform/options.expected.meson +++ b/test cases/format/5 transform/options.expected.meson @@ -29,6 +29,7 @@ d = { # string conversion 'This is not a multiline' 'This is not a fstring' +'''This isn't convertible''' # group arg value arguments = [ diff --git a/test cases/format/5 transform/source.meson b/test cases/format/5 transform/source.meson index 7274d4802699..4482884052d6 100644 --- a/test cases/format/5 transform/source.meson +++ b/test cases/format/5 transform/source.meson @@ -29,6 +29,7 @@ d = {'a': 1, 'b': 2, 'c': 3} # string conversion '''This is not a multiline''' f'This is not a fstring' +'''This isn't convertible''' # group arg value arguments = ['a', '--opt_a', 'opt_a_value', 'b', 'c', '--opt_d', '--opt_e', 'opt_e_value', diff --git a/test cases/frameworks/24 libgcrypt/test.json b/test cases/frameworks/24 libgcrypt/test.json index de9b738f6c45..9c282daa86e7 100644 --- a/test cases/frameworks/24 libgcrypt/test.json +++ b/test cases/frameworks/24 libgcrypt/test.json @@ -1,3 +1,3 @@ { - "expect_skip_on_jobname": ["azure", "msys2"] + "expect_skip_on_jobname": ["arch", "azure", "cygwin", "msys2"] } diff --git a/test cases/linuxlike/1 pkg-config/prog-checkver.c b/test cases/linuxlike/1 pkg-config/prog-checkver.c index ea1ed32f4ffa..14d574642923 100644 --- a/test cases/linuxlike/1 pkg-config/prog-checkver.c +++ b/test cases/linuxlike/1 pkg-config/prog-checkver.c @@ -1,11 +1,41 @@ #include +#include #include #include +static bool check_version(const char *zlib_ver, const char *found_zlib) { + if (zlib_ver == found_zlib) + return true; + + if (strcmp(zlib_ver, found_zlib) == 0) + return true; + +#ifdef ZLIBNG_VERSION + const char *ptr = strstr(zlib_ver, found_zlib); + + // If the needle isn't found or the needle isn't at the start, fail. + if (ptr == NULL || ptr != zlib_ver) + return false; + + /* In zlib-ng, ZLIB_VERSION is of the form X.Y.Z.zlib-ng. This will move the + * pointer to the start of the suffix, .zlib-ng. We know that at this point + * that FOUND_ZLIB is the start of ZLIB_VERSION, so compare the rest. + */ + ptr += strlen(found_zlib); + if (strcmp(ptr, ".zlib-ng") == 0) + return true; +#endif + + return false; +} + int main(void) { void * something = deflate; - if(strcmp(ZLIB_VERSION, FOUND_ZLIB) != 0) { + if (!check_version(ZLIB_VERSION, FOUND_ZLIB)) { printf("Meson found '%s' but zlib is '%s'\n", FOUND_ZLIB, ZLIB_VERSION); +#ifdef ZLIBNG_VERSION + puts("Note that in the case of zlib-ng, a version suffix of .zlib-ng is expected"); +#endif return 2; } if(something != 0) diff --git a/test cases/linuxlike/13 cmake dependency/prog-checkver.c b/test cases/linuxlike/13 cmake dependency/prog-checkver.c index ea1ed32f4ffa..14d574642923 100644 --- a/test cases/linuxlike/13 cmake dependency/prog-checkver.c +++ b/test cases/linuxlike/13 cmake dependency/prog-checkver.c @@ -1,11 +1,41 @@ #include +#include #include #include +static bool check_version(const char *zlib_ver, const char *found_zlib) { + if (zlib_ver == found_zlib) + return true; + + if (strcmp(zlib_ver, found_zlib) == 0) + return true; + +#ifdef ZLIBNG_VERSION + const char *ptr = strstr(zlib_ver, found_zlib); + + // If the needle isn't found or the needle isn't at the start, fail. + if (ptr == NULL || ptr != zlib_ver) + return false; + + /* In zlib-ng, ZLIB_VERSION is of the form X.Y.Z.zlib-ng. This will move the + * pointer to the start of the suffix, .zlib-ng. We know that at this point + * that FOUND_ZLIB is the start of ZLIB_VERSION, so compare the rest. + */ + ptr += strlen(found_zlib); + if (strcmp(ptr, ".zlib-ng") == 0) + return true; +#endif + + return false; +} + int main(void) { void * something = deflate; - if(strcmp(ZLIB_VERSION, FOUND_ZLIB) != 0) { + if (!check_version(ZLIB_VERSION, FOUND_ZLIB)) { printf("Meson found '%s' but zlib is '%s'\n", FOUND_ZLIB, ZLIB_VERSION); +#ifdef ZLIBNG_VERSION + puts("Note that in the case of zlib-ng, a version suffix of .zlib-ng is expected"); +#endif return 2; } if(something != 0) diff --git a/test cases/linuxlike/14 static dynamic linkage/verify_static.py b/test cases/linuxlike/14 static dynamic linkage/verify_static.py index 8d16d48c989e..25e97f368bb1 100755 --- a/test cases/linuxlike/14 static dynamic linkage/verify_static.py +++ b/test cases/linuxlike/14 static dynamic linkage/verify_static.py @@ -5,8 +5,16 @@ def handle_common(path): """Handle the common case.""" - output = subprocess.check_output(['nm', path]).decode('utf-8') - if 'T zlibVersion' in output: + try: + output = subprocess.check_output(['nm', '--defined-only', '-P', '-A', path]).decode('utf-8') + except subprocess.CalledProcessError: + # some NMs only support -U. Older binutils only supports --defined-only. + output = subprocess.check_output(['nm', '-UPA', path]).decode('utf-8') + # POSIX format. Prints all *defined* symbols, looks like this: + # builddir/main_static: zlibVersion T 1190 39 + # or + # builddir/main_static: zlibVersion D 1fde0 30 + if ': zlibVersion ' in output: return 0 return 1 diff --git a/test cases/nasm/1 configure file/hello.asm b/test cases/nasm/1 configure file/hello-x86.asm similarity index 100% rename from test cases/nasm/1 configure file/hello.asm rename to test cases/nasm/1 configure file/hello-x86.asm diff --git a/test cases/nasm/1 configure file/hello-x86_64.asm b/test cases/nasm/1 configure file/hello-x86_64.asm new file mode 100644 index 000000000000..b6cb776babbd --- /dev/null +++ b/test cases/nasm/1 configure file/hello-x86_64.asm @@ -0,0 +1,18 @@ +%include "config.asm" + + section .data +msg: db "Hello World", 10 +len: equ $ - msg + + section .text + global main +main: + mov eax, 1 ; sys_write + mov edi, 1 ; fd = STDOUT_FILENO + mov rsi, msg ; buf = msg + mov rdx, len ; count = len + syscall + + mov eax, 60 ; sys_exit + mov edi, HELLO ; exit code + syscall diff --git a/test cases/nasm/1 configure file/meson.build b/test cases/nasm/1 configure file/meson.build index fac46a687331..46a704ceb633 100644 --- a/test cases/nasm/1 configure file/meson.build +++ b/test cases/nasm/1 configure file/meson.build @@ -24,10 +24,16 @@ conf = configuration_data() conf.set('HELLO', 0) +debug_nasm = [] +if get_option('debug') + debug_nasm = ['-g'] +endif + asm_gen = generator(nasm, output : '@BASENAME@.o', arguments : [ '-f', asm_format, + debug_nasm, '-i', meson.current_source_dir() + '/', '-i', join_paths(meson.current_source_dir(), ''), '-P', join_paths(meson.current_build_dir(), 'config.asm'), @@ -44,7 +50,7 @@ config_file = configure_file( cc = meson.get_compiler('c') link_args = cc.get_supported_link_arguments(['-no-pie']) -exe = executable('hello', asm_gen.process('hello.asm'), +exe = executable('hello', asm_gen.process('hello-' + host_machine.cpu_family() + '.asm'), link_args: link_args, ) diff --git a/test cases/nasm/2 asm language/hello.asm b/test cases/nasm/2 asm language/hello-x86.asm similarity index 100% rename from test cases/nasm/2 asm language/hello.asm rename to test cases/nasm/2 asm language/hello-x86.asm diff --git a/test cases/nasm/2 asm language/hello-x86_64.asm b/test cases/nasm/2 asm language/hello-x86_64.asm new file mode 100644 index 000000000000..7e495f92ab5e --- /dev/null +++ b/test cases/nasm/2 asm language/hello-x86_64.asm @@ -0,0 +1,22 @@ +%include "config.asm" + +%ifdef FOO +%define RETVAL HELLO +%endif + + section .data +msg: db "Hello World", 10 +len: equ $ - msg + + section .text + global main +main: + mov eax, 1 ; sys_write + mov edi, 1 ; fd = STDOUT_FILENO + mov rsi, msg ; buf = msg + mov rdx, len ; count = len + syscall + + mov eax, 60 ; sys_exit + mov edi, RETVAL ; exit code + syscall diff --git a/test cases/nasm/2 asm language/meson.build b/test cases/nasm/2 asm language/meson.build index d5a2ba3eaeb2..594d12dc2496 100644 --- a/test cases/nasm/2 asm language/meson.build +++ b/test cases/nasm/2 asm language/meson.build @@ -46,7 +46,7 @@ config_file = configure_file( cc = meson.get_compiler('c') link_args = cc.get_supported_link_arguments(['-no-pie']) -exe = executable('hello', 'hello.asm', +exe = executable('hello', 'hello-' + host_machine.cpu_family() + '.asm', nasm_args: '-DFOO', link_args: link_args, ) @@ -55,7 +55,7 @@ test('hello', exe) #Test whether pthread dependency gets filtered out threads = dependency('threads') -exe2 = executable('hello_w_threads', 'hello.asm', +exe2 = executable('hello_w_threads', 'hello-' + host_machine.cpu_family() + '.asm', config_file, nasm_args: '-DFOO', link_args: link_args, diff --git a/test cases/objc/1 simple/meson.build b/test cases/objc/1 simple/meson.build index f9d5c145025e..bb45839f184f 100644 --- a/test cases/objc/1 simple/meson.build +++ b/test cases/objc/1 simple/meson.build @@ -1,4 +1,8 @@ project('objective c', 'objc', default_options: ['c_std=c99']) +if get_option('backend').startswith('vs') + error('MESON_SKIP_TEST: objc is not supported by vs backend') +endif + exe = executable('prog', 'prog.m') test('objctest', exe) diff --git a/test cases/objc/2 nsstring/meson.build b/test cases/objc/2 nsstring/meson.build index 2c483d50d687..b12710d73f71 100644 --- a/test cases/objc/2 nsstring/meson.build +++ b/test cases/objc/2 nsstring/meson.build @@ -1,5 +1,9 @@ project('nsstring', 'objc') +if get_option('backend').startswith('vs') + error('MESON_SKIP_TEST: objc is not supported by vs backend') +endif + if host_machine.system() == 'darwin' dep = dependency('appleframeworks', modules : 'Foundation') elif host_machine.system() == 'cygwin' diff --git a/test cases/objc/3 objc args/meson.build b/test cases/objc/3 objc args/meson.build index 8887d96bc7ac..dd6f50990b31 100644 --- a/test cases/objc/3 objc args/meson.build +++ b/test cases/objc/3 objc args/meson.build @@ -1,4 +1,8 @@ project('objective c args', 'objc') +if get_option('backend').startswith('vs') + error('MESON_SKIP_TEST: objc is not supported by vs backend') +endif + exe = executable('prog', 'prog.m', objc_args : ['-DMESON_TEST']) test('objective c args', exe) diff --git a/test cases/objc/4 c++ project objc subproject/meson.build b/test cases/objc/4 c++ project objc subproject/meson.build index 8a77dedce3c4..946af11f603b 100644 --- a/test cases/objc/4 c++ project objc subproject/meson.build +++ b/test cases/objc/4 c++ project objc subproject/meson.build @@ -1,5 +1,9 @@ project('master', ['cpp']) +if get_option('backend').startswith('vs') + error('MESON_SKIP_TEST: objc is not supported by vs backend') +endif + foo = subproject('foo') dep = foo.get_variable('foo_dep') diff --git a/test cases/objc/4 c++ project objc subproject/subprojects/foo/meson.build b/test cases/objc/4 c++ project objc subproject/subprojects/foo/meson.build index 2dbf8ab26764..5f4d9fd836ba 100644 --- a/test cases/objc/4 c++ project objc subproject/subprojects/foo/meson.build +++ b/test cases/objc/4 c++ project objc subproject/subprojects/foo/meson.build @@ -1,5 +1,9 @@ project('foo', ['objc']) +if get_option('backend').startswith('vs') + error('MESON_SKIP_TEST: objc is not supported by vs backend') +endif + l = static_library('foo', 'foo.m') foo_dep = declare_dependency(link_with : l) diff --git a/test cases/objc/5 objfw/meson.build b/test cases/objc/5 objfw/meson.build index 40ddb7968db4..761b49f67e9c 100644 --- a/test cases/objc/5 objfw/meson.build +++ b/test cases/objc/5 objfw/meson.build @@ -1,5 +1,9 @@ project('objfw build tests', 'objc') +if get_option('backend').startswith('vs') + error('MESON_SKIP_TEST: objc is not supported by vs backend') +endif + objfw_dep = dependency('objfw', required: false) objfwtest_dep = dependency('objfw', modules: ['ObjFWTest'], required: false) diff --git a/test cases/objcpp/1 simple/meson.build b/test cases/objcpp/1 simple/meson.build index c9a5c8498195..c3acc879ad1e 100644 --- a/test cases/objcpp/1 simple/meson.build +++ b/test cases/objcpp/1 simple/meson.build @@ -1,4 +1,8 @@ project('Objective C++', 'objcpp', default_options: 'cpp_std=c++14') +if get_option('backend').startswith('vs') + error('MESON_SKIP_TEST: objcpp is not supported by vs backend') +endif + exe = executable('objcppprog', 'prog.mm') test('objcpp', exe) diff --git a/test cases/objcpp/2 objc++ args/meson.build b/test cases/objcpp/2 objc++ args/meson.build index e0e34b0f1e17..8797b619d3e8 100644 --- a/test cases/objcpp/2 objc++ args/meson.build +++ b/test cases/objcpp/2 objc++ args/meson.build @@ -1,4 +1,8 @@ project('objective c++ args', 'objcpp') +if get_option('backend').startswith('vs') + error('MESON_SKIP_TEST: objcpp is not supported by vs backend') +endif + exe = executable('prog', 'prog.mm', objcpp_args : ['-DMESON_OBJCPP_TEST']) test('objective c++ args', exe) diff --git a/test cases/objcpp/3 objfw/meson.build b/test cases/objcpp/3 objfw/meson.build index da14681ebf1a..3fcc84dd554b 100644 --- a/test cases/objcpp/3 objfw/meson.build +++ b/test cases/objcpp/3 objfw/meson.build @@ -1,5 +1,9 @@ project('objfw build tests', 'objcpp') +if get_option('backend').startswith('vs') + error('MESON_SKIP_TEST: objcpp is not supported by vs backend') +endif + objfw_dep = dependency('objfw', required: false) objfwtest_dep = dependency('objfw', modules: ['ObjFWTest'], required: false) diff --git a/test cases/osx/9 framework recasting/meson.build b/test cases/osx/9 framework recasting/meson.build index f139485ac2b9..83fe19ed0a28 100644 --- a/test cases/osx/9 framework recasting/meson.build +++ b/test cases/osx/9 framework recasting/meson.build @@ -1,5 +1,5 @@ project('framework recasting', 'c', 'cpp') -x = dependency('openal') +x = dependency('OpenAL', method: 'cmake') y = executable('tt', files('main.cpp'), dependencies: x) diff --git a/test cases/python/9 extmodule limited api/limited.c b/test cases/python/9 extmodule limited api/limited.c index 0d1c718200ba..b977419ca0f5 100644 --- a/test cases/python/9 extmodule limited api/limited.c +++ b/test cases/python/9 extmodule limited api/limited.c @@ -6,12 +6,22 @@ #error Wrong value for Py_LIMITED_API #endif +static PyObject * +hello(PyObject * Py_UNUSED(self), PyObject * Py_UNUSED(args)) { + return PyUnicode_FromString("hello world"); +} + +static struct PyMethodDef methods[] = { + { "hello", hello, METH_NOARGS, NULL }, + { NULL, NULL, 0, NULL }, +}; + static struct PyModuleDef limited_module = { PyModuleDef_HEAD_INIT, - "limited_api_test", + "limited", NULL, -1, - NULL + methods }; PyMODINIT_FUNC PyInit_limited(void) { diff --git a/test cases/python/9 extmodule limited api/meson.build b/test cases/python/9 extmodule limited api/meson.build index 68afc96996cb..bdf1b7b904f1 100644 --- a/test cases/python/9 extmodule limited api/meson.build +++ b/test cases/python/9 extmodule limited api/meson.build @@ -14,3 +14,10 @@ ext_mod = py.extension_module('not_limited', 'not_limited.c', install: true, ) + +test('load-test', + py, + args: [files('test_limited.py')], + env: { 'PYTHONPATH': meson.current_build_dir() }, + workdir: meson.current_source_dir() +) diff --git a/test cases/python/9 extmodule limited api/test_limited.py b/test cases/python/9 extmodule limited api/test_limited.py new file mode 100644 index 000000000000..fcbf67b536e1 --- /dev/null +++ b/test cases/python/9 extmodule limited api/test_limited.py @@ -0,0 +1,6 @@ +from limited import hello + +def test_hello(): + assert hello() == "hello world" + +test_hello() diff --git a/test cases/rust/12 bindgen/meson.build b/test cases/rust/12 bindgen/meson.build index 09cb02a6d319..57e44a0635c6 100644 --- a/test cases/rust/12 bindgen/meson.build +++ b/test cases/rust/12 bindgen/meson.build @@ -15,7 +15,7 @@ endif cc_id = meson.get_compiler('c').get_id() compiler_specific_args = [] if cc_id == 'gcc' - compiler_specific_args = ['-mtls-dialect=gnu2'] + compiler_specific_args = ['-fipa-pta'] elif cc_id == 'msvc' compiler_specific_args = ['/fp:fast'] endif diff --git a/test cases/rust/21 transitive dependencies/meson.build b/test cases/rust/21 transitive dependencies/meson.build index 37687fd4db2f..3b1f3d63cd4d 100644 --- a/test cases/rust/21 transitive dependencies/meson.build +++ b/test cases/rust/21 transitive dependencies/meson.build @@ -21,6 +21,7 @@ foo = static_library('foo-rs', 'foo.rs', rust_abi: 'c', link_with: pm, ) +shared_library('shared', 'foo.c', link_with : foo) exe = executable('footest', 'foo.c', link_with: foo, ) diff --git a/test cases/rust/25 cargo lock/Cargo.lock b/test cases/rust/25 cargo lock/Cargo.lock new file mode 100644 index 000000000000..9bc98149bb3f --- /dev/null +++ b/test cases/rust/25 cargo lock/Cargo.lock @@ -0,0 +1,7 @@ +version = 3 + +[[package]] +name = "bar" +version = "0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cc2f34e570dcd5f9fe32e6863ee16ee73a356d3b77bce0d8c78501b8bc81a860" diff --git a/test cases/rust/25 cargo lock/meson.build b/test cases/rust/25 cargo lock/meson.build new file mode 100644 index 000000000000..b359f7bb0aa7 --- /dev/null +++ b/test cases/rust/25 cargo lock/meson.build @@ -0,0 +1,3 @@ +project('cargo lock') + +dependency('bar-0.1-rs') diff --git a/test cases/rust/25 cargo lock/subprojects/packagecache/bar-0.1.tar.gz b/test cases/rust/25 cargo lock/subprojects/packagecache/bar-0.1.tar.gz new file mode 100644 index 000000000000..f4c2ec6725a6 Binary files /dev/null and b/test cases/rust/25 cargo lock/subprojects/packagecache/bar-0.1.tar.gz differ diff --git a/test cases/unit/116 empty project/expected_mods.json b/test cases/unit/116 empty project/expected_mods.json index 7463bcb1275a..fa5e0ec6c606 100644 --- a/test cases/unit/116 empty project/expected_mods.json +++ b/test cases/unit/116 empty project/expected_mods.json @@ -217,6 +217,7 @@ "mesonbuild.linkers", "mesonbuild.linkers.base", "mesonbuild.linkers.detect", + "mesonbuild.machinefile", "mesonbuild.mesonlib", "mesonbuild.mesonmain", "mesonbuild.mintro", @@ -225,6 +226,7 @@ "mesonbuild.mparser", "mesonbuild.msetup", "mesonbuild.optinterpreter", + "mesonbuild.options", "mesonbuild.programs", "mesonbuild.scripts", "mesonbuild.scripts.meson_exe", @@ -237,6 +239,6 @@ "mesonbuild.wrap", "mesonbuild.wrap.wrap" ], - "count": 68 + "count": 69 } } diff --git a/test cases/wasm/1 basic/meson.build b/test cases/wasm/1 basic/meson.build index 1092a9ba7e85..d27599271b1c 100644 --- a/test cases/wasm/1 basic/meson.build +++ b/test cases/wasm/1 basic/meson.build @@ -1,4 +1,9 @@ -project('emcctest', 'c', 'cpp') +project('emcctest', 'c', 'cpp', + default_options: [ + 'c_std=c17', + 'cpp_std=c++17', + ] +) executable('hello-c', 'hello.c') executable('hello', 'hello.cpp') diff --git a/unittests/allplatformstests.py b/unittests/allplatformstests.py index 9c9f61678609..f471f6a95f46 100644 --- a/unittests/allplatformstests.py +++ b/unittests/allplatformstests.py @@ -25,6 +25,7 @@ import mesonbuild.envconfig import mesonbuild.environment import mesonbuild.coredata +import mesonbuild.machinefile import mesonbuild.modules.gnome from mesonbuild.mesonlib import ( BuildDirLock, MachineChoice, is_windows, is_osx, is_cygwin, is_dragonflybsd, @@ -61,6 +62,8 @@ from .baseplatformtests import BasePlatformTests from .helpers import * +UNIT_MACHINEFILE_DIR = Path(__file__).parent / 'machinefiles' + @contextmanager def temp_filename(): '''A context manager which provides a filename to an empty temporary file. @@ -1333,6 +1336,7 @@ def test_source_generator_program_cause_rebuild(self): self.utime(os.path.join(testdir, 'srcgen.py')) self.assertRebuiltTarget('basic') + @skipIf(is_ci() and is_cygwin(), 'A GCC update on 2024-07-21 has broken LTO and is being investigated') def test_static_library_lto(self): ''' Test that static libraries can be built with LTO and linked to @@ -1349,6 +1353,7 @@ def test_static_library_lto(self): self.build() self.run_tests() + @skipIf(is_ci() and is_cygwin(), 'A GCC update on 2024-07-21 has broken LTO and is being investigated') @skip_if_not_base_option('b_lto_threads') def test_lto_threads(self): testdir = os.path.join(self.common_test_dir, '6 linkshared') @@ -2618,35 +2623,35 @@ def test_command_line(self): out = self.init(testdir, extra_args=['--profile-self', '--fatal-meson-warnings']) self.assertNotIn('[default: true]', out) obj = mesonbuild.coredata.load(self.builddir) - self.assertEqual(obj.options[OptionKey('default_library')].value, 'static') - self.assertEqual(obj.options[OptionKey('warning_level')].value, '1') - self.assertEqual(obj.options[OptionKey('set_sub_opt')].value, True) - self.assertEqual(obj.options[OptionKey('subp_opt', 'subp')].value, 'default3') + self.assertEqual(obj.optstore.get_value('default_library'), 'static') + self.assertEqual(obj.optstore.get_value('warning_level'), '1') + self.assertEqual(obj.optstore.get_value('set_sub_opt'), True) + self.assertEqual(obj.optstore.get_value(OptionKey('subp_opt', 'subp')), 'default3') self.wipe() # warning_level is special, it's --warnlevel instead of --warning-level # for historical reasons self.init(testdir, extra_args=['--warnlevel=2', '--fatal-meson-warnings']) obj = mesonbuild.coredata.load(self.builddir) - self.assertEqual(obj.options[OptionKey('warning_level')].value, '2') + self.assertEqual(obj.optstore.get_value('warning_level'), '2') self.setconf('--warnlevel=3') obj = mesonbuild.coredata.load(self.builddir) - self.assertEqual(obj.options[OptionKey('warning_level')].value, '3') + self.assertEqual(obj.optstore.get_value('warning_level'), '3') self.setconf('--warnlevel=everything') obj = mesonbuild.coredata.load(self.builddir) - self.assertEqual(obj.options[OptionKey('warning_level')].value, 'everything') + self.assertEqual(obj.optstore.get_value('warning_level'), 'everything') self.wipe() # But when using -D syntax, it should be 'warning_level' self.init(testdir, extra_args=['-Dwarning_level=2', '--fatal-meson-warnings']) obj = mesonbuild.coredata.load(self.builddir) - self.assertEqual(obj.options[OptionKey('warning_level')].value, '2') + self.assertEqual(obj.optstore.get_value('warning_level'), '2') self.setconf('-Dwarning_level=3') obj = mesonbuild.coredata.load(self.builddir) - self.assertEqual(obj.options[OptionKey('warning_level')].value, '3') + self.assertEqual(obj.optstore.get_value('warning_level'), '3') self.setconf('-Dwarning_level=everything') obj = mesonbuild.coredata.load(self.builddir) - self.assertEqual(obj.options[OptionKey('warning_level')].value, 'everything') + self.assertEqual(obj.optstore.get_value('warning_level'), 'everything') self.wipe() # Mixing --option and -Doption is forbidden @@ -2670,15 +2675,15 @@ def test_command_line(self): # --default-library should override default value from project() self.init(testdir, extra_args=['--default-library=both', '--fatal-meson-warnings']) obj = mesonbuild.coredata.load(self.builddir) - self.assertEqual(obj.options[OptionKey('default_library')].value, 'both') + self.assertEqual(obj.optstore.get_value('default_library'), 'both') self.setconf('--default-library=shared') obj = mesonbuild.coredata.load(self.builddir) - self.assertEqual(obj.options[OptionKey('default_library')].value, 'shared') + self.assertEqual(obj.optstore.get_value('default_library'), 'shared') if self.backend is Backend.ninja: # reconfigure target works only with ninja backend self.build('reconfigure') obj = mesonbuild.coredata.load(self.builddir) - self.assertEqual(obj.options[OptionKey('default_library')].value, 'shared') + self.assertEqual(obj.optstore.get_value('default_library'), 'shared') self.wipe() # Should fail on unknown options @@ -2715,22 +2720,22 @@ def test_command_line(self): # Test we can set subproject option self.init(testdir, extra_args=['-Dsubp:subp_opt=foo', '--fatal-meson-warnings']) obj = mesonbuild.coredata.load(self.builddir) - self.assertEqual(obj.options[OptionKey('subp_opt', 'subp')].value, 'foo') + self.assertEqual(obj.optstore.get_value(OptionKey('subp_opt', 'subp')), 'foo') self.wipe() # c_args value should be parsed with split_args self.init(testdir, extra_args=['-Dc_args=-Dfoo -Dbar "-Dthird=one two"', '--fatal-meson-warnings']) obj = mesonbuild.coredata.load(self.builddir) - self.assertEqual(obj.options[OptionKey('args', lang='c')].value, ['-Dfoo', '-Dbar', '-Dthird=one two']) + self.assertEqual(obj.optstore.get_value(OptionKey('args', lang='c')), ['-Dfoo', '-Dbar', '-Dthird=one two']) self.setconf('-Dc_args="foo bar" one two') obj = mesonbuild.coredata.load(self.builddir) - self.assertEqual(obj.options[OptionKey('args', lang='c')].value, ['foo bar', 'one', 'two']) + self.assertEqual(obj.optstore.get_value(OptionKey('args', lang='c')), ['foo bar', 'one', 'two']) self.wipe() self.init(testdir, extra_args=['-Dset_percent_opt=myoption%', '--fatal-meson-warnings']) obj = mesonbuild.coredata.load(self.builddir) - self.assertEqual(obj.options[OptionKey('set_percent_opt')].value, 'myoption%') + self.assertEqual(obj.optstore.get_value('set_percent_opt'), 'myoption%') self.wipe() # Setting a 2nd time the same option should override the first value @@ -2741,19 +2746,19 @@ def test_command_line(self): '-Dc_args=-Dfoo', '-Dc_args=-Dbar', '-Db_lundef=false', '--fatal-meson-warnings']) obj = mesonbuild.coredata.load(self.builddir) - self.assertEqual(obj.options[OptionKey('bindir')].value, 'bar') - self.assertEqual(obj.options[OptionKey('buildtype')].value, 'release') - self.assertEqual(obj.options[OptionKey('b_sanitize')].value, 'thread') - self.assertEqual(obj.options[OptionKey('args', lang='c')].value, ['-Dbar']) + self.assertEqual(obj.optstore.get_value('bindir'), 'bar') + self.assertEqual(obj.optstore.get_value('buildtype'), 'release') + self.assertEqual(obj.optstore.get_value('b_sanitize'), 'thread') + self.assertEqual(obj.optstore.get_value(OptionKey('args', lang='c')), ['-Dbar']) self.setconf(['--bindir=bar', '--bindir=foo', '-Dbuildtype=release', '-Dbuildtype=plain', '-Db_sanitize=thread', '-Db_sanitize=address', '-Dc_args=-Dbar', '-Dc_args=-Dfoo']) obj = mesonbuild.coredata.load(self.builddir) - self.assertEqual(obj.options[OptionKey('bindir')].value, 'foo') - self.assertEqual(obj.options[OptionKey('buildtype')].value, 'plain') - self.assertEqual(obj.options[OptionKey('b_sanitize')].value, 'address') - self.assertEqual(obj.options[OptionKey('args', lang='c')].value, ['-Dfoo']) + self.assertEqual(obj.optstore.get_value('bindir'), 'foo') + self.assertEqual(obj.optstore.get_value('buildtype'), 'plain') + self.assertEqual(obj.optstore.get_value('b_sanitize'), 'address') + self.assertEqual(obj.optstore.get_value(OptionKey('args', lang='c')), ['-Dfoo']) self.wipe() except KeyError: # Ignore KeyError, it happens on CI for compilers that does not @@ -2767,25 +2772,25 @@ def test_warning_level_0(self): # Verify default values when passing no args self.init(testdir) obj = mesonbuild.coredata.load(self.builddir) - self.assertEqual(obj.options[OptionKey('warning_level')].value, '0') + self.assertEqual(obj.optstore.get_value('warning_level'), '0') self.wipe() # verify we can override w/ --warnlevel self.init(testdir, extra_args=['--warnlevel=1']) obj = mesonbuild.coredata.load(self.builddir) - self.assertEqual(obj.options[OptionKey('warning_level')].value, '1') + self.assertEqual(obj.optstore.get_value('warning_level'), '1') self.setconf('--warnlevel=0') obj = mesonbuild.coredata.load(self.builddir) - self.assertEqual(obj.options[OptionKey('warning_level')].value, '0') + self.assertEqual(obj.optstore.get_value('warning_level'), '0') self.wipe() # verify we can override w/ -Dwarning_level self.init(testdir, extra_args=['-Dwarning_level=1']) obj = mesonbuild.coredata.load(self.builddir) - self.assertEqual(obj.options[OptionKey('warning_level')].value, '1') + self.assertEqual(obj.optstore.get_value('warning_level'), '1') self.setconf('-Dwarning_level=0') obj = mesonbuild.coredata.load(self.builddir) - self.assertEqual(obj.options[OptionKey('warning_level')].value, '0') + self.assertEqual(obj.optstore.get_value('warning_level'), '0') self.wipe() def test_feature_check_usage_subprojects(self): @@ -4111,40 +4116,19 @@ def test_coverage_escaping(self): self._check_coverage_files() def test_cross_file_constants(self): - with temp_filename() as crossfile1, temp_filename() as crossfile2: - with open(crossfile1, 'w', encoding='utf-8') as f: - f.write(textwrap.dedent( - ''' - [constants] - compiler = 'gcc' - ''')) - with open(crossfile2, 'w', encoding='utf-8') as f: - f.write(textwrap.dedent( - ''' - [constants] - toolchain = '/toolchain/' - common_flags = ['--sysroot=' + toolchain / 'sysroot'] - - [properties] - c_args = common_flags + ['-DSOMETHING'] - cpp_args = c_args + ['-DSOMETHING_ELSE'] - rel_to_src = '@GLOBAL_SOURCE_ROOT@' / 'tool' - rel_to_file = '@DIRNAME@' / 'tool' - no_escaping = '@@DIRNAME@@' / 'tool' - - [binaries] - c = toolchain / compiler - ''')) - - values = mesonbuild.coredata.parse_machine_files([crossfile1, crossfile2], self.builddir) - self.assertEqual(values['binaries']['c'], '/toolchain/gcc') - self.assertEqual(values['properties']['c_args'], - ['--sysroot=/toolchain/sysroot', '-DSOMETHING']) - self.assertEqual(values['properties']['cpp_args'], - ['--sysroot=/toolchain/sysroot', '-DSOMETHING', '-DSOMETHING_ELSE']) - self.assertEqual(values['properties']['rel_to_src'], os.path.join(self.builddir, 'tool')) - self.assertEqual(values['properties']['rel_to_file'], os.path.join(os.path.dirname(crossfile2), 'tool')) - self.assertEqual(values['properties']['no_escaping'], os.path.join(f'@{os.path.dirname(crossfile2)}@', 'tool')) + crossfile1 = UNIT_MACHINEFILE_DIR / 'constant1.txt' + crossfile2 = UNIT_MACHINEFILE_DIR / 'constant2.txt' + values = mesonbuild.machinefile.parse_machine_files([crossfile1, + crossfile2], + self.builddir) + self.assertEqual(values['binaries']['c'], '/toolchain/gcc') + self.assertEqual(values['properties']['c_args'], + ['--sysroot=/toolchain/sysroot', '-DSOMETHING']) + self.assertEqual(values['properties']['cpp_args'], + ['--sysroot=/toolchain/sysroot', '-DSOMETHING', '-DSOMETHING_ELSE']) + self.assertEqual(values['properties']['rel_to_src'], os.path.join(self.builddir, 'tool')) + self.assertEqual(values['properties']['rel_to_file'], os.path.join(os.path.dirname(crossfile2), 'tool')) + self.assertEqual(values['properties']['no_escaping'], os.path.join(f'@{os.path.dirname(crossfile2)}@', 'tool')) @skipIf(is_windows(), 'Directory cleanup fails for some reason') def test_wrap_git(self): @@ -4235,7 +4219,7 @@ def test_wrap_redirect(self): filename = foo/subprojects/real.wrapper ''')) with self.assertRaisesRegex(WrapException, 'wrap-redirect filename must be a .wrap file'): - PackageDefinition(redirect_wrap) + PackageDefinition.from_wrap_file(redirect_wrap) # Invalid redirect, filename cannot be in parent directory with open(redirect_wrap, 'w', encoding='utf-8') as f: @@ -4244,7 +4228,7 @@ def test_wrap_redirect(self): filename = ../real.wrap ''')) with self.assertRaisesRegex(WrapException, 'wrap-redirect filename cannot contain ".."'): - PackageDefinition(redirect_wrap) + PackageDefinition.from_wrap_file(redirect_wrap) # Invalid redirect, filename must be in foo/subprojects/real.wrap with open(redirect_wrap, 'w', encoding='utf-8') as f: @@ -4253,7 +4237,7 @@ def test_wrap_redirect(self): filename = foo/real.wrap ''')) with self.assertRaisesRegex(WrapException, 'wrap-redirect filename must be in the form foo/subprojects/bar.wrap'): - PackageDefinition(redirect_wrap) + PackageDefinition.from_wrap_file(redirect_wrap) # Correct redirect with open(redirect_wrap, 'w', encoding='utf-8') as f: @@ -4266,7 +4250,7 @@ def test_wrap_redirect(self): [wrap-git] url = http://invalid ''')) - wrap = PackageDefinition(redirect_wrap) + wrap = PackageDefinition.from_wrap_file(redirect_wrap) self.assertEqual(wrap.get('url'), 'http://invalid') @skip_if_no_cmake diff --git a/unittests/baseplatformtests.py b/unittests/baseplatformtests.py index e94a2baac210..6e6a01d40fd3 100644 --- a/unittests/baseplatformtests.py +++ b/unittests/baseplatformtests.py @@ -78,6 +78,7 @@ def setUp(self): self.linuxlike_test_dir = os.path.join(src_root, 'test cases/linuxlike') self.objc_test_dir = os.path.join(src_root, 'test cases/objc') self.objcpp_test_dir = os.path.join(src_root, 'test cases/objcpp') + self.darwin_test_dir = os.path.join(src_root, 'test cases/darwin') # Misc stuff self.orig_env = os.environ.copy() diff --git a/unittests/cargotests.py b/unittests/cargotests.py index f0aedd0caec0..d1ac838ff59d 100644 --- a/unittests/cargotests.py +++ b/unittests/cargotests.py @@ -3,9 +3,12 @@ from __future__ import annotations import unittest +import os +import tempfile +import textwrap import typing as T -from mesonbuild.cargo import builder, cfg +from mesonbuild.cargo import builder, cfg, load_wraps from mesonbuild.cargo.cfg import TokenType from mesonbuild.cargo.version import convert @@ -185,3 +188,34 @@ def test_ir_to_meson(self) -> None: with self.subTest(): value = cfg.ir_to_meson(cfg.parse(iter(cfg.lexer(data))), build) self.assertEqual(value, expected) + +class CargoLockTest(unittest.TestCase): + def test_cargo_lock(self) -> None: + with tempfile.TemporaryDirectory() as tmpdir: + with open(os.path.join(tmpdir, 'Cargo.lock'), 'w', encoding='utf-8') as f: + f.write(textwrap.dedent('''\ + version = 3 + [[package]] + name = "foo" + version = "0.1" + source = "registry+https://github.com/rust-lang/crates.io-index" + checksum = "8a30b2e23b9e17a9f90641c7ab1549cd9b44f296d3ccbf309d2863cfe398a0cb" + [[package]] + name = "bar" + version = "0.1" + source = "git+https://github.com/gtk-rs/gtk-rs-core?branch=0.19#23c5599424cc75ec66618891c915d9f490f6e4c2" + ''')) + wraps = load_wraps(tmpdir, 'subprojects') + self.assertEqual(len(wraps), 2) + self.assertEqual(wraps[0].name, 'foo-0.1-rs') + self.assertEqual(wraps[0].directory, 'foo-0.1') + self.assertEqual(wraps[0].type, 'file') + self.assertEqual(wraps[0].get('method'), 'cargo') + self.assertEqual(wraps[0].get('source_url'), 'https://crates.io/api/v1/crates/foo/0.1/download') + self.assertEqual(wraps[0].get('source_hash'), '8a30b2e23b9e17a9f90641c7ab1549cd9b44f296d3ccbf309d2863cfe398a0cb') + self.assertEqual(wraps[1].name, 'bar-0.1-rs') + self.assertEqual(wraps[1].directory, 'bar') + self.assertEqual(wraps[1].type, 'git') + self.assertEqual(wraps[1].get('method'), 'cargo') + self.assertEqual(wraps[1].get('url'), 'https://github.com/gtk-rs/gtk-rs-core') + self.assertEqual(wraps[1].get('revision'), '23c5599424cc75ec66618891c915d9f490f6e4c2') diff --git a/unittests/darwintests.py b/unittests/darwintests.py index 5528bbc9fdc7..26dd996415af 100644 --- a/unittests/darwintests.py +++ b/unittests/darwintests.py @@ -4,10 +4,11 @@ import subprocess import re import os +import platform import unittest from mesonbuild.mesonlib import ( - MachineChoice, is_osx + MachineChoice, is_osx, version_compare ) from mesonbuild.compilers import ( detect_c_compiler @@ -81,6 +82,19 @@ def test_apple_bitcode_modules(self): self.build() self.run_tests() + @unittest.skipIf(version_compare(platform.mac_ver()[0], '<10.7'), '-export_dynamic was added in 10.7') + def test_apple_lto_export_dynamic(self): + ''' + Tests that -Wl,-export_dynamic is correctly added, when export_dynamic: true is set. + On macOS, this is relevant for LTO builds only. + ''' + testdir = os.path.join(self.common_test_dir, '148 shared module resolving symbol in executable') + # Ensure that it builds even with LTO enabled + env = {'CFLAGS': '-flto'} + self.init(testdir, override_envvars=env) + self.build() + self.run_tests() + def _get_darwin_versions(self, fname): fname = os.path.join(self.builddir, fname) out = subprocess.check_output(['otool', '-L', fname], universal_newlines=True) @@ -88,6 +102,12 @@ def _get_darwin_versions(self, fname): self.assertIsNotNone(m, msg=out) return m.groups() + def _get_darwin_rpaths(self, fname: str) -> T.List[str]: + out = subprocess.check_output(['otool', '-l', fname], universal_newlines=True) + pattern = re.compile(r'path (.*) \(offset \d+\)') + rpaths = pattern.findall(out) + return rpaths + @skipIfNoPkgconfig def test_library_versioning(self): ''' @@ -142,3 +162,15 @@ def test_darwin_get_object_archs(self): from mesonbuild.mesonlib import darwin_get_object_archs archs = darwin_get_object_archs('/bin/cat') self.assertEqual(archs, ['x86_64', 'aarch64']) + + def test_darwin_meson_rpaths_removed_on_install(self): + testdir = os.path.join(self.darwin_test_dir, '1 rpath removal on install') + self.init(testdir) + self.build() + # Meson-created RPATHs are usually only valid in the build directory + rpaths = self._get_darwin_rpaths(os.path.join(self.builddir, 'libbar.dylib')) + self.assertListEqual(rpaths, ['@loader_path/foo']) + self.install() + # Those RPATHs are no longer valid and should not be present after installation + rpaths = self._get_darwin_rpaths(os.path.join(self.installdir, 'usr/lib/libbar.dylib')) + self.assertListEqual(rpaths, []) diff --git a/unittests/datatests.py b/unittests/datatests.py index b14bbac5a29a..b7b2d322a5ed 100644 --- a/unittests/datatests.py +++ b/unittests/datatests.py @@ -14,6 +14,7 @@ import mesonbuild.envconfig import mesonbuild.environment import mesonbuild.coredata +import mesonbuild.options import mesonbuild.modules.gnome from mesonbuild.interpreter import Interpreter from mesonbuild.ast import AstInterpreter @@ -139,8 +140,8 @@ def test_builtin_options_documented(self): found_entries |= options self.assertEqual(found_entries, { - *(str(k.evolve(module=None)) for k in mesonbuild.coredata.BUILTIN_OPTIONS), - *(str(k.evolve(module=None)) for k in mesonbuild.coredata.BUILTIN_OPTIONS_PER_MACHINE), + *(str(k.evolve(module=None)) for k in mesonbuild.options.BUILTIN_OPTIONS), + *(str(k.evolve(module=None)) for k in mesonbuild.options.BUILTIN_OPTIONS_PER_MACHINE), }) # Check that `buildtype` table inside `Core options` matches how @@ -162,9 +163,9 @@ def test_builtin_options_documented(self): else: raise RuntimeError(f'Invalid debug value {debug!r} in row:\n{m.group()}') env.coredata.set_option(OptionKey('buildtype'), buildtype) - self.assertEqual(env.coredata.options[OptionKey('buildtype')].value, buildtype) - self.assertEqual(env.coredata.options[OptionKey('optimization')].value, opt) - self.assertEqual(env.coredata.options[OptionKey('debug')].value, debug) + self.assertEqual(env.coredata.optstore.get_value('buildtype'), buildtype) + self.assertEqual(env.coredata.optstore.get_value('optimization'), opt) + self.assertEqual(env.coredata.optstore.get_value('debug'), debug) def test_cpu_families_documented(self): with open("docs/markdown/Reference-tables.md", encoding='utf-8') as f: diff --git a/unittests/internaltests.py b/unittests/internaltests.py index 411c97b36036..fe9f0d4f5cfa 100644 --- a/unittests/internaltests.py +++ b/unittests/internaltests.py @@ -625,7 +625,7 @@ def create_static_lib(name): env = get_fake_env() compiler = detect_c_compiler(env, MachineChoice.HOST) env.coredata.compilers.host = {'c': compiler} - env.coredata.options[OptionKey('link_args', lang='c')] = FakeCompilerOptions() + env.coredata.optstore.set_value_object(OptionKey('link_args', lang='c'), FakeCompilerOptions()) p1 = Path(tmpdir) / '1' p2 = Path(tmpdir) / '2' p1.mkdir() diff --git a/unittests/linuxliketests.py b/unittests/linuxliketests.py index bfe3586c1b84..ea86784892f3 100644 --- a/unittests/linuxliketests.py +++ b/unittests/linuxliketests.py @@ -1124,7 +1124,7 @@ def test_pkgconfig_duplicate_path_entries(self): # option, adding the meson-uninstalled directory to it. PkgConfigInterface.setup_env({}, env, MachineChoice.HOST, uninstalled=True) - pkg_config_path = env.coredata.options[OptionKey('pkg_config_path')].value + pkg_config_path = env.coredata.optstore.get_value('pkg_config_path') self.assertEqual(pkg_config_path, [pkg_dir]) @skipIfNoPkgconfig diff --git a/unittests/machinefiles/constant1.txt b/unittests/machinefiles/constant1.txt new file mode 100644 index 000000000000..eeba7cb10cda --- /dev/null +++ b/unittests/machinefiles/constant1.txt @@ -0,0 +1,2 @@ +[constants] +compiler = 'gcc' diff --git a/unittests/machinefiles/constant2.txt b/unittests/machinefiles/constant2.txt new file mode 100644 index 000000000000..226dcc8529c5 --- /dev/null +++ b/unittests/machinefiles/constant2.txt @@ -0,0 +1,13 @@ +[constants] +toolchain = '/toolchain/' +common_flags = ['--sysroot=' + toolchain / 'sysroot'] + +[properties] +c_args = common_flags + ['-DSOMETHING'] +cpp_args = c_args + ['-DSOMETHING_ELSE'] +rel_to_src = '@GLOBAL_SOURCE_ROOT@' / 'tool' +rel_to_file = '@DIRNAME@' / 'tool' +no_escaping = '@@DIRNAME@@' / 'tool' + +[binaries] +c = toolchain / compiler diff --git a/unittests/machinefiletests.py b/unittests/machinefiletests.py index 22341cb9a000..5ff862cdcfb1 100644 --- a/unittests/machinefiletests.py +++ b/unittests/machinefiletests.py @@ -12,7 +12,7 @@ import threading import sys from itertools import chain -from unittest import mock, skipIf, SkipTest +from unittest import mock, skipIf, SkipTest, TestCase from pathlib import Path import typing as T @@ -23,6 +23,9 @@ import mesonbuild.environment import mesonbuild.coredata import mesonbuild.modules.gnome + +from mesonbuild import machinefile + from mesonbuild.mesonlib import ( MachineChoice, is_windows, is_osx, is_cygwin, is_haiku, is_sunos ) @@ -50,6 +53,14 @@ def is_real_gnu_compiler(path): out = subprocess.check_output([path, '--version'], universal_newlines=True, stderr=subprocess.STDOUT) return 'Free Software Foundation' in out +cross_dir = Path(__file__).parent.parent / 'cross' + +class MachineFileStoreTests(TestCase): + + def test_loading(self): + store = machinefile.MachineFileStore([cross_dir / 'ubuntu-armhf.txt'], [], str(cross_dir)) + self.assertIsNotNone(store) + class NativeFileTests(BasePlatformTests): def setUp(self): diff --git a/unittests/platformagnostictests.py b/unittests/platformagnostictests.py index 572e0be1f55e..775d0dc2bc1e 100644 --- a/unittests/platformagnostictests.py +++ b/unittests/platformagnostictests.py @@ -15,8 +15,8 @@ from .baseplatformtests import BasePlatformTests from .helpers import is_ci -from mesonbuild.mesonlib import EnvironmentVariables, ExecutableSerialisation, MesonException, is_linux, python_command -from mesonbuild.mformat import match_path +from mesonbuild.mesonlib import EnvironmentVariables, ExecutableSerialisation, MesonException, is_linux, python_command, windows_proof_rmtree +from mesonbuild.mformat import Formatter, match_path from mesonbuild.optinterpreter import OptionInterpreter, OptionException from run_tests import Backend @@ -274,7 +274,7 @@ def test_setup_loaded_modules(self): expected = json.load(f)['meson']['modules'] self.assertEqual(data['modules'], expected) - self.assertEqual(data['count'], 68) + self.assertEqual(data['count'], 70) def test_meson_package_cache_dir(self): # Copy testdir into temporary directory to not pollute meson source tree. @@ -334,6 +334,60 @@ def test_editorconfig_match_path(self): for filename, pattern, expected in cases: self.assertTrue(match_path(filename, pattern) is expected, f'{filename} -> {pattern}') + def test_format_invalid_config_key(self) -> None: + fd, fname = tempfile.mkstemp(suffix='.ini', text=True) + self.addCleanup(os.unlink, fname) + + with os.fdopen(fd, 'w', encoding='utf-8') as handle: + handle.write('not_an_option = 42\n') + + with self.assertRaises(MesonException): + Formatter(Path(fname), use_editor_config=False, fetch_subdirs=False) + + def test_format_invalid_config_value(self) -> None: + fd, fname = tempfile.mkstemp(suffix='.ini', text=True) + self.addCleanup(os.unlink, fname) + + with os.fdopen(fd, 'w', encoding='utf-8') as handle: + handle.write('max_line_length = string\n') + + with self.assertRaises(MesonException): + Formatter(Path(fname), use_editor_config=False, fetch_subdirs=False) + + def test_format_invalid_editorconfig_value(self) -> None: + dirpath = tempfile.mkdtemp() + self.addCleanup(windows_proof_rmtree, dirpath) + + editorconfig = Path(dirpath, '.editorconfig') + with open(editorconfig, 'w', encoding='utf-8') as handle: + handle.write('[*]\n') + handle.write('indent_size = string\n') + + formatter = Formatter(None, use_editor_config=True, fetch_subdirs=False) + with self.assertRaises(MesonException): + formatter.load_editor_config(editorconfig) + + def test_format_empty_file(self) -> None: + formatter = Formatter(None, use_editor_config=False, fetch_subdirs=False) + for code in ('', '\n'): + formatted = formatter.format(code, Path()) + self.assertEqual('\n', formatted) + + def test_format_indent_comment_in_brackets(self) -> None: + """Ensure comments in arrays and dicts are correctly indented""" + formatter = Formatter(None, use_editor_config=False, fetch_subdirs=False) + code = 'a = [\n # comment\n]\n' + formatted = formatter.format(code, Path()) + self.assertEqual(code, formatted) + + code = 'a = [\n # comment\n 1,\n]\n' + formatted = formatter.format(code, Path()) + self.assertEqual(code, formatted) + + code = 'a = {\n # comment\n}\n' + formatted = formatter.format(code, Path()) + self.assertEqual(code, formatted) + def test_error_configuring_subdir(self): testdir = os.path.join(self.common_test_dir, '152 index customtarget') out = self.init(os.path.join(testdir, 'subdir'), allow_fail=True) diff --git a/unittests/pythontests.py b/unittests/pythontests.py index 6079bd587681..aaea906ea829 100644 --- a/unittests/pythontests.py +++ b/unittests/pythontests.py @@ -11,7 +11,7 @@ from .baseplatformtests import BasePlatformTests from .helpers import * -from mesonbuild.mesonlib import MachineChoice, TemporaryDirectoryWinProof +from mesonbuild.mesonlib import MachineChoice, TemporaryDirectoryWinProof, is_windows from mesonbuild.modules.python import PythonModule class PythonTests(BasePlatformTests): @@ -86,3 +86,32 @@ def test_bytecompile_single(self): if shutil.which('python2') or PythonModule._get_win_pythonpath('python2'): raise self.skipTest('python2 installed, already tested') self._test_bytecompile() + + def test_limited_api_linked_correct_lib(self): + if not is_windows(): + return self.skipTest('Test only run on Windows.') + + testdir = os.path.join(self.src_root, 'test cases', 'python', '9 extmodule limited api') + + self.init(testdir) + self.build() + + from importlib.machinery import EXTENSION_SUFFIXES + limited_suffix = EXTENSION_SUFFIXES[1] + + limited_library_path = os.path.join(self.builddir, f'limited{limited_suffix}') + self.assertPathExists(limited_library_path) + + limited_dep_name = 'python3.dll' + if shutil.which('dumpbin'): + # MSVC + output = subprocess.check_output(['dumpbin', '/DEPENDENTS', limited_library_path], + stderr=subprocess.STDOUT) + self.assertIn(limited_dep_name, output.decode()) + elif shutil.which('objdump'): + # mingw + output = subprocess.check_output(['objdump', '-p', limited_library_path], + stderr=subprocess.STDOUT) + self.assertIn(limited_dep_name, output.decode()) + else: + raise self.skipTest('Test needs either dumpbin(MSVC) or objdump(mingw).')