From 9c5a8ade311bcf573810bcb531da649464fb1253 Mon Sep 17 00:00:00 2001 From: Alyssa Coghlan Date: Mon, 21 Oct 2024 17:26:21 +1000 Subject: [PATCH 1/3] Initial import from Project Amphibian Push the Project Amphibian implementation code to the public `venvstacks` repository. --- .gitattributes | 23 + .github/workflows/test.yml | 131 + .github/workflows/update-expected-output.yml | 220 ++ .gitignore | 16 + ci-bootstrap-requirements.txt | 116 + ci-constraints.txt | 116 + lock_dev_venv.sh | 18 + misc/README.md | 6 + misc/clean_local_branches.ps1 | 6 + misc/clean_local_branches.sh | 7 + misc/export_test_artifacts.sh | 14 + misc/find_shared_libs.py | 46 + pdm.lock | 873 ++++++ pyproject.toml | 81 + src/venvstacks/__init__.py | 0 src/venvstacks/__main__.py | 7 + src/venvstacks/_util.py | 103 + src/venvstacks/cli.py | 546 ++++ src/venvstacks/pack_venv.py | 601 ++++ src/venvstacks/py.typed | 0 src/venvstacks/stacks.py | 2451 +++++++++++++++++ tests/README.md | 86 + tests/expected-output-config.toml | 37 + tests/hash_fodder/different_file.txt | 1 + tests/hash_fodder/file.txt | 1 + tests/hash_fodder/file_duplicate.txt | 1 + tests/hash_fodder/folder1/file.txt | 1 + tests/hash_fodder/folder1/subfolder/file.txt | 1 + tests/hash_fodder/folder2/file_duplicate.txt | 1 + tests/minimal_project/.gitignore | 2 + tests/minimal_project/empty.py | 0 tests/minimal_project/venvstacks.toml | 22 + .../env_metadata/app-scipy-client.json | 21 + .../env_metadata/app-scipy-import.json | 20 + .../env_metadata/app-sklearn-import.json | 20 + .../env_metadata/cpython@3.11.json | 15 + .../env_metadata/cpython@3.12.json | 15 + .../env_metadata/framework-http-client.json | 15 + .../env_metadata/framework-scipy.json | 15 + .../env_metadata/framework-sklearn.json | 15 + .../linux_x86_64/venvstacks.json | 146 + .../env_metadata/app-scipy-client.json | 21 + .../env_metadata/app-scipy-import.json | 20 + .../env_metadata/cpython@3.11.json | 15 + .../env_metadata/cpython@3.12.json | 15 + .../env_metadata/framework-http-client.json | 15 + .../env_metadata/framework-scipy.json | 15 + .../env_metadata/framework-sklearn.json | 15 + .../macosx_arm64/venvstacks.json | 126 + .../env_metadata/app-scipy-client.json | 21 + .../env_metadata/app-scipy-import.json | 20 + .../win_amd64/env_metadata/cpython@3.11.json | 15 + .../win_amd64/env_metadata/cpython@3.12.json | 15 + .../env_metadata/framework-http-client.json | 15 + .../env_metadata/framework-scipy.json | 15 + .../env_metadata/framework-sklearn.json | 15 + .../win_amd64/venvstacks.json | 126 + .../launch_modules/scipy_client/__main__.py | 6 + .../launch_modules/scipy_client/cli.py | 14 + .../launch_modules/scipy_import.py | 13 + .../launch_modules/sklearn_import.py | 13 + ...uirements-app-scipy-client-linux_x86_64.in | 4 + ...irements-app-scipy-client-linux_x86_64.txt | 111 + ...nts-app-scipy-client-linux_x86_64.txt.json | 4 + ...uirements-app-scipy-client-macosx_arm64.in | 4 + ...irements-app-scipy-client-macosx_arm64.txt | 111 + ...nts-app-scipy-client-macosx_arm64.txt.json | 4 + ...requirements-app-scipy-client-win_amd64.in | 4 + ...equirements-app-scipy-client-win_amd64.txt | 111 + ...ements-app-scipy-client-win_amd64.txt.json | 4 + ...uirements-app-scipy-import-linux_x86_64.in | 3 + ...irements-app-scipy-import-linux_x86_64.txt | 90 + ...nts-app-scipy-import-linux_x86_64.txt.json | 4 + ...uirements-app-scipy-import-macosx_arm64.in | 3 + ...irements-app-scipy-import-macosx_arm64.txt | 90 + ...nts-app-scipy-import-macosx_arm64.txt.json | 4 + ...requirements-app-scipy-import-win_amd64.in | 3 + ...equirements-app-scipy-import-win_amd64.txt | 90 + ...ements-app-scipy-import-win_amd64.txt.json | 4 + ...rements-app-sklearn-import-linux_x86_64.in | 3 + ...ements-app-sklearn-import-linux_x86_64.txt | 123 + ...s-app-sklearn-import-linux_x86_64.txt.json | 4 + .../requirements-cpython@3.11-linux_x86_64.in | 4 + ...requirements-cpython@3.11-linux_x86_64.txt | 56 + ...rements-cpython@3.11-linux_x86_64.txt.json | 4 + .../requirements-cpython@3.11-macosx_arm64.in | 4 + ...requirements-cpython@3.11-macosx_arm64.txt | 56 + ...rements-cpython@3.11-macosx_arm64.txt.json | 4 + .../requirements-cpython@3.11-win_amd64.in | 4 + .../requirements-cpython@3.11-win_amd64.txt | 63 + ...quirements-cpython@3.11-win_amd64.txt.json | 4 + .../requirements-cpython@3.12-linux_x86_64.in | 3 + ...requirements-cpython@3.12-linux_x86_64.txt | 56 + ...rements-cpython@3.12-linux_x86_64.txt.json | 4 + .../requirements-cpython@3.12-macosx_arm64.in | 3 + ...requirements-cpython@3.12-macosx_arm64.txt | 56 + ...rements-cpython@3.12-macosx_arm64.txt.json | 4 + .../requirements-cpython@3.12-win_amd64.in | 3 + .../requirements-cpython@3.12-win_amd64.txt | 56 + ...quirements-cpython@3.12-win_amd64.txt.json | 4 + ...ents-framework-http-client-linux_x86_64.in | 3 + ...nts-framework-http-client-linux_x86_64.txt | 23 + ...ramework-http-client-linux_x86_64.txt.json | 4 + ...ents-framework-http-client-macosx_arm64.in | 3 + ...nts-framework-http-client-macosx_arm64.txt | 23 + ...ramework-http-client-macosx_arm64.txt.json | 4 + ...rements-framework-http-client-win_amd64.in | 3 + ...ements-framework-http-client-win_amd64.txt | 23 + ...s-framework-http-client-win_amd64.txt.json | 4 + ...quirements-framework-scipy-linux_x86_64.in | 3 + ...uirements-framework-scipy-linux_x86_64.txt | 90 + ...ents-framework-scipy-linux_x86_64.txt.json | 4 + ...quirements-framework-scipy-macosx_arm64.in | 3 + ...uirements-framework-scipy-macosx_arm64.txt | 90 + ...ents-framework-scipy-macosx_arm64.txt.json | 4 + .../requirements-framework-scipy-win_amd64.in | 3 + ...requirements-framework-scipy-win_amd64.txt | 90 + ...rements-framework-scipy-win_amd64.txt.json | 4 + ...irements-framework-sklearn-linux_x86_64.in | 3 + ...rements-framework-sklearn-linux_x86_64.txt | 123 + ...ts-framework-sklearn-linux_x86_64.txt.json | 4 + ...irements-framework-sklearn-macosx_arm64.in | 3 + ...rements-framework-sklearn-macosx_arm64.txt | 123 + ...ts-framework-sklearn-macosx_arm64.txt.json | 4 + ...equirements-framework-sklearn-win_amd64.in | 3 + ...quirements-framework-sklearn-win_amd64.txt | 123 + ...ments-framework-sklearn-win_amd64.txt.json | 4 + tests/sample_project/venvstacks.toml | 126 + tests/support.py | 199 ++ tests/test_basics.py | 30 + tests/test_cli_invocation.py | 720 +++++ tests/test_hashing.py | 165 ++ tests/test_index_config.py | 105 + tests/test_minimal_project.py | 847 ++++++ tests/test_sample_project.py | 571 ++++ tests/update-expected-output.sh | 30 + tox.ini | 39 + 137 files changed, 11115 insertions(+) create mode 100644 .gitattributes create mode 100644 .github/workflows/test.yml create mode 100644 .github/workflows/update-expected-output.yml create mode 100644 .gitignore create mode 100644 ci-bootstrap-requirements.txt create mode 100644 ci-constraints.txt create mode 100755 lock_dev_venv.sh create mode 100644 misc/README.md create mode 100644 misc/clean_local_branches.ps1 create mode 100755 misc/clean_local_branches.sh create mode 100755 misc/export_test_artifacts.sh create mode 100755 misc/find_shared_libs.py create mode 100644 pdm.lock create mode 100644 pyproject.toml create mode 100644 src/venvstacks/__init__.py create mode 100644 src/venvstacks/__main__.py create mode 100644 src/venvstacks/_util.py create mode 100644 src/venvstacks/cli.py create mode 100755 src/venvstacks/pack_venv.py create mode 100644 src/venvstacks/py.typed create mode 100755 src/venvstacks/stacks.py create mode 100644 tests/README.md create mode 100644 tests/expected-output-config.toml create mode 100644 tests/hash_fodder/different_file.txt create mode 100644 tests/hash_fodder/file.txt create mode 100644 tests/hash_fodder/file_duplicate.txt create mode 100644 tests/hash_fodder/folder1/file.txt create mode 100644 tests/hash_fodder/folder1/subfolder/file.txt create mode 100644 tests/hash_fodder/folder2/file_duplicate.txt create mode 100644 tests/minimal_project/.gitignore create mode 100644 tests/minimal_project/empty.py create mode 100644 tests/minimal_project/venvstacks.toml create mode 100644 tests/sample_project/expected_manifests/linux_x86_64/env_metadata/app-scipy-client.json create mode 100644 tests/sample_project/expected_manifests/linux_x86_64/env_metadata/app-scipy-import.json create mode 100644 tests/sample_project/expected_manifests/linux_x86_64/env_metadata/app-sklearn-import.json create mode 100644 tests/sample_project/expected_manifests/linux_x86_64/env_metadata/cpython@3.11.json create mode 100644 tests/sample_project/expected_manifests/linux_x86_64/env_metadata/cpython@3.12.json create mode 100644 tests/sample_project/expected_manifests/linux_x86_64/env_metadata/framework-http-client.json create mode 100644 tests/sample_project/expected_manifests/linux_x86_64/env_metadata/framework-scipy.json create mode 100644 tests/sample_project/expected_manifests/linux_x86_64/env_metadata/framework-sklearn.json create mode 100644 tests/sample_project/expected_manifests/linux_x86_64/venvstacks.json create mode 100644 tests/sample_project/expected_manifests/macosx_arm64/env_metadata/app-scipy-client.json create mode 100644 tests/sample_project/expected_manifests/macosx_arm64/env_metadata/app-scipy-import.json create mode 100644 tests/sample_project/expected_manifests/macosx_arm64/env_metadata/cpython@3.11.json create mode 100644 tests/sample_project/expected_manifests/macosx_arm64/env_metadata/cpython@3.12.json create mode 100644 tests/sample_project/expected_manifests/macosx_arm64/env_metadata/framework-http-client.json create mode 100644 tests/sample_project/expected_manifests/macosx_arm64/env_metadata/framework-scipy.json create mode 100644 tests/sample_project/expected_manifests/macosx_arm64/env_metadata/framework-sklearn.json create mode 100644 tests/sample_project/expected_manifests/macosx_arm64/venvstacks.json create mode 100644 tests/sample_project/expected_manifests/win_amd64/env_metadata/app-scipy-client.json create mode 100644 tests/sample_project/expected_manifests/win_amd64/env_metadata/app-scipy-import.json create mode 100644 tests/sample_project/expected_manifests/win_amd64/env_metadata/cpython@3.11.json create mode 100644 tests/sample_project/expected_manifests/win_amd64/env_metadata/cpython@3.12.json create mode 100644 tests/sample_project/expected_manifests/win_amd64/env_metadata/framework-http-client.json create mode 100644 tests/sample_project/expected_manifests/win_amd64/env_metadata/framework-scipy.json create mode 100644 tests/sample_project/expected_manifests/win_amd64/env_metadata/framework-sklearn.json create mode 100644 tests/sample_project/expected_manifests/win_amd64/venvstacks.json create mode 100644 tests/sample_project/launch_modules/scipy_client/__main__.py create mode 100644 tests/sample_project/launch_modules/scipy_client/cli.py create mode 100644 tests/sample_project/launch_modules/scipy_import.py create mode 100644 tests/sample_project/launch_modules/sklearn_import.py create mode 100644 tests/sample_project/requirements/app-scipy-client/requirements-app-scipy-client-linux_x86_64.in create mode 100644 tests/sample_project/requirements/app-scipy-client/requirements-app-scipy-client-linux_x86_64.txt create mode 100644 tests/sample_project/requirements/app-scipy-client/requirements-app-scipy-client-linux_x86_64.txt.json create mode 100644 tests/sample_project/requirements/app-scipy-client/requirements-app-scipy-client-macosx_arm64.in create mode 100644 tests/sample_project/requirements/app-scipy-client/requirements-app-scipy-client-macosx_arm64.txt create mode 100644 tests/sample_project/requirements/app-scipy-client/requirements-app-scipy-client-macosx_arm64.txt.json create mode 100644 tests/sample_project/requirements/app-scipy-client/requirements-app-scipy-client-win_amd64.in create mode 100644 tests/sample_project/requirements/app-scipy-client/requirements-app-scipy-client-win_amd64.txt create mode 100644 tests/sample_project/requirements/app-scipy-client/requirements-app-scipy-client-win_amd64.txt.json create mode 100644 tests/sample_project/requirements/app-scipy-import/requirements-app-scipy-import-linux_x86_64.in create mode 100644 tests/sample_project/requirements/app-scipy-import/requirements-app-scipy-import-linux_x86_64.txt create mode 100644 tests/sample_project/requirements/app-scipy-import/requirements-app-scipy-import-linux_x86_64.txt.json create mode 100644 tests/sample_project/requirements/app-scipy-import/requirements-app-scipy-import-macosx_arm64.in create mode 100644 tests/sample_project/requirements/app-scipy-import/requirements-app-scipy-import-macosx_arm64.txt create mode 100644 tests/sample_project/requirements/app-scipy-import/requirements-app-scipy-import-macosx_arm64.txt.json create mode 100644 tests/sample_project/requirements/app-scipy-import/requirements-app-scipy-import-win_amd64.in create mode 100644 tests/sample_project/requirements/app-scipy-import/requirements-app-scipy-import-win_amd64.txt create mode 100644 tests/sample_project/requirements/app-scipy-import/requirements-app-scipy-import-win_amd64.txt.json create mode 100644 tests/sample_project/requirements/app-sklearn-import/requirements-app-sklearn-import-linux_x86_64.in create mode 100644 tests/sample_project/requirements/app-sklearn-import/requirements-app-sklearn-import-linux_x86_64.txt create mode 100644 tests/sample_project/requirements/app-sklearn-import/requirements-app-sklearn-import-linux_x86_64.txt.json create mode 100644 tests/sample_project/requirements/cpython@3.11/requirements-cpython@3.11-linux_x86_64.in create mode 100644 tests/sample_project/requirements/cpython@3.11/requirements-cpython@3.11-linux_x86_64.txt create mode 100644 tests/sample_project/requirements/cpython@3.11/requirements-cpython@3.11-linux_x86_64.txt.json create mode 100644 tests/sample_project/requirements/cpython@3.11/requirements-cpython@3.11-macosx_arm64.in create mode 100644 tests/sample_project/requirements/cpython@3.11/requirements-cpython@3.11-macosx_arm64.txt create mode 100644 tests/sample_project/requirements/cpython@3.11/requirements-cpython@3.11-macosx_arm64.txt.json create mode 100644 tests/sample_project/requirements/cpython@3.11/requirements-cpython@3.11-win_amd64.in create mode 100644 tests/sample_project/requirements/cpython@3.11/requirements-cpython@3.11-win_amd64.txt create mode 100644 tests/sample_project/requirements/cpython@3.11/requirements-cpython@3.11-win_amd64.txt.json create mode 100644 tests/sample_project/requirements/cpython@3.12/requirements-cpython@3.12-linux_x86_64.in create mode 100644 tests/sample_project/requirements/cpython@3.12/requirements-cpython@3.12-linux_x86_64.txt create mode 100644 tests/sample_project/requirements/cpython@3.12/requirements-cpython@3.12-linux_x86_64.txt.json create mode 100644 tests/sample_project/requirements/cpython@3.12/requirements-cpython@3.12-macosx_arm64.in create mode 100644 tests/sample_project/requirements/cpython@3.12/requirements-cpython@3.12-macosx_arm64.txt create mode 100644 tests/sample_project/requirements/cpython@3.12/requirements-cpython@3.12-macosx_arm64.txt.json create mode 100644 tests/sample_project/requirements/cpython@3.12/requirements-cpython@3.12-win_amd64.in create mode 100644 tests/sample_project/requirements/cpython@3.12/requirements-cpython@3.12-win_amd64.txt create mode 100644 tests/sample_project/requirements/cpython@3.12/requirements-cpython@3.12-win_amd64.txt.json create mode 100644 tests/sample_project/requirements/framework-http-client/requirements-framework-http-client-linux_x86_64.in create mode 100644 tests/sample_project/requirements/framework-http-client/requirements-framework-http-client-linux_x86_64.txt create mode 100644 tests/sample_project/requirements/framework-http-client/requirements-framework-http-client-linux_x86_64.txt.json create mode 100644 tests/sample_project/requirements/framework-http-client/requirements-framework-http-client-macosx_arm64.in create mode 100644 tests/sample_project/requirements/framework-http-client/requirements-framework-http-client-macosx_arm64.txt create mode 100644 tests/sample_project/requirements/framework-http-client/requirements-framework-http-client-macosx_arm64.txt.json create mode 100644 tests/sample_project/requirements/framework-http-client/requirements-framework-http-client-win_amd64.in create mode 100644 tests/sample_project/requirements/framework-http-client/requirements-framework-http-client-win_amd64.txt create mode 100644 tests/sample_project/requirements/framework-http-client/requirements-framework-http-client-win_amd64.txt.json create mode 100644 tests/sample_project/requirements/framework-scipy/requirements-framework-scipy-linux_x86_64.in create mode 100644 tests/sample_project/requirements/framework-scipy/requirements-framework-scipy-linux_x86_64.txt create mode 100644 tests/sample_project/requirements/framework-scipy/requirements-framework-scipy-linux_x86_64.txt.json create mode 100644 tests/sample_project/requirements/framework-scipy/requirements-framework-scipy-macosx_arm64.in create mode 100644 tests/sample_project/requirements/framework-scipy/requirements-framework-scipy-macosx_arm64.txt create mode 100644 tests/sample_project/requirements/framework-scipy/requirements-framework-scipy-macosx_arm64.txt.json create mode 100644 tests/sample_project/requirements/framework-scipy/requirements-framework-scipy-win_amd64.in create mode 100644 tests/sample_project/requirements/framework-scipy/requirements-framework-scipy-win_amd64.txt create mode 100644 tests/sample_project/requirements/framework-scipy/requirements-framework-scipy-win_amd64.txt.json create mode 100644 tests/sample_project/requirements/framework-sklearn/requirements-framework-sklearn-linux_x86_64.in create mode 100644 tests/sample_project/requirements/framework-sklearn/requirements-framework-sklearn-linux_x86_64.txt create mode 100644 tests/sample_project/requirements/framework-sklearn/requirements-framework-sklearn-linux_x86_64.txt.json create mode 100644 tests/sample_project/requirements/framework-sklearn/requirements-framework-sklearn-macosx_arm64.in create mode 100644 tests/sample_project/requirements/framework-sklearn/requirements-framework-sklearn-macosx_arm64.txt create mode 100644 tests/sample_project/requirements/framework-sklearn/requirements-framework-sklearn-macosx_arm64.txt.json create mode 100644 tests/sample_project/requirements/framework-sklearn/requirements-framework-sklearn-win_amd64.in create mode 100644 tests/sample_project/requirements/framework-sklearn/requirements-framework-sklearn-win_amd64.txt create mode 100644 tests/sample_project/requirements/framework-sklearn/requirements-framework-sklearn-win_amd64.txt.json create mode 100644 tests/sample_project/venvstacks.toml create mode 100644 tests/support.py create mode 100644 tests/test_basics.py create mode 100644 tests/test_cli_invocation.py create mode 100644 tests/test_hashing.py create mode 100644 tests/test_index_config.py create mode 100644 tests/test_minimal_project.py create mode 100644 tests/test_sample_project.py create mode 100755 tests/update-expected-output.sh create mode 100644 tox.ini diff --git a/.gitattributes b/.gitattributes new file mode 100644 index 0000000..e83aa5d --- /dev/null +++ b/.gitattributes @@ -0,0 +1,23 @@ +# Ensure script file consistency +*.ps1 text eol=lf +*.py text eol=lf +*.sh text eol=lf + +# Ensure config file consistency +*.ini text eol=lf +*.toml text eol=lf +*.yml text eol=lf + +# Ensure metadata consistency +*.json text eol=lf +*.lock text eol=lf +ci-*.txt text eol=lf +*.in text eol=lf + +# Ensure documentation consistency +*.md text eol=lf +*.rst text eol=lf + +# Ensure file content hash consistency +requirements*.txt text eol=lf +tests/hash_fodder/** text eol=lf diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml new file mode 100644 index 0000000..d0f8696 --- /dev/null +++ b/.github/workflows/test.yml @@ -0,0 +1,131 @@ +name: Test + +on: + pull_request: + branches: + - "**" + paths: + # Run for changes to *this* workflow file, but not for other workflows + - ".github/workflows/test.yml" + # Trigger off all top level files by default + - "*" + # Trigger off source and test changes + - "src/**" + - "tests/**" + # Python scripts under misc still need linting & typechecks + - "misc/**.py" + # Skip running the source code checks when only documentation has been updated + - "!**.md" + - "!**.rst" + - "!**.txt" # Any requirements file changes will also involve changing other files + push: + branches: + - main + +defaults: + run: + # Use the Git for Windows bash shell, rather than supporting Powershell + # This also implies `set -eo pipefail` (rather than just `set -e`) + shell: bash + +permissions: + contents: read + +jobs: + tox: + runs-on: ${{ matrix.os }} + strategy: + fail-fast: false # Always report results for all targets + max-parallel: 6 + matrix: + python-version: [3.11, 3.12] + # Note: while venvstacks nominally supports x86-64 macOS, the actual demand + # for that is unclear, so skip macos-12 testing until it is requested + os: [ubuntu-20.04, windows-2019, macos-14] + + # Check https://github.com/actions/action-versions/tree/main/config/actions + # for latest versions if the standard actions start emitting warnings + + steps: + - uses: actions/checkout@v4 + + - name: Capture timestamp for debugging artifacts + id: timestamp + run: | + echo "minutes=$(date '+%Y%m%d-%H%M')" >> $GITHUB_OUTPUT + + - name: Set up Python ${{ matrix.python-version }} + uses: actions/setup-python@v5 + with: + python-version: ${{ matrix.python-version }} + + - name: Get pip cache dir + id: pip-cache + run: | + echo "dir=$(python -m pip cache dir)" >> $GITHUB_OUTPUT + + - name: Cache bootstrapping dependencies + uses: actions/cache@v4 + with: + path: ${{ steps.pip-cache.outputs.dir }} + key: + pip-${{ matrix.os }}-${{ matrix.python-version }}-v1-${{ hashFiles('pdm.lock') }} + restore-keys: | + pip-${{ matrix.os }}-${{ matrix.python-version }}-v1- + + - name: Install PDM + run: | + # Ensure `pdm` uses the same version as specified in `pdm.lock` + # while avoiding the error raised by https://github.com/pypa/pip/issues/12889 + python -m pip install --upgrade -r ci-bootstrap-requirements.txt + + - name: Create development virtual environment + run: | + python -m pdm sync --no-self --dev + # Handle Windows vs non-Windows differences in .venv layout + VIRTUAL_ENV_BIN_DIR="$PWD/.venv/bin" + test -e "$VIRTUAL_ENV_BIN_DIR" || VIRTUAL_ENV_BIN_DIR="$PWD/.venv/Scripts" + echo "VIRTUAL_ENV_BIN_DIR=$VIRTUAL_ENV_BIN_DIR" >> "$GITHUB_ENV" + + - name: Get uv cache dir + id: uv-cache + run: | + source "$VIRTUAL_ENV_BIN_DIR/activate" + echo "dir=$(python -m uv cache dir)" >> $GITHUB_OUTPUT + + - name: Cache test suite stack dependencies + uses: actions/cache@v4 + with: + path: ${{ steps.uv-cache.outputs.dir }} + key: + uv-${{ matrix.os }}-${{ matrix.python-version }}-v1-${{ hashFiles('tests/sample_project/requirements/**') }} + restore-keys: | + uv-${{ matrix.os }}-${{ matrix.python-version }}-v1- + + - name: Static checks + run: | + source "$VIRTUAL_ENV_BIN_DIR/activate" + python -m tox -v -m static + + - name: Fast tests + run: | + source "$VIRTUAL_ENV_BIN_DIR/activate" + python -m tox -v -- -m 'not slow' + + - name: Slow tests + id: slow_tests + run: | + export VENVSTACKS_EXPORT_TEST_ARTIFACTS="$GITHUB_WORKSPACE/export/tests" + mkdir -p "$VENVSTACKS_EXPORT_TEST_ARTIFACTS" + source "$VIRTUAL_ENV_BIN_DIR/activate" + python -m tox -v -- -m slow + + - name: Upload test failure debugging artifacts + if: failure() && steps.slow_tests.conclusion == 'failure' + uses: actions/upload-artifact@v4 + with: + # ensure test artifact upload names are unique + name: exported-test-artifacts-${{ steps.timestamp.outputs.minutes }}-${{ matrix.os }}-py${{ matrix.python-version }} + path: | + export/tests + retention-days: 3 # Just for debugging, don't need to keep these long term diff --git a/.github/workflows/update-expected-output.yml b/.github/workflows/update-expected-output.yml new file mode 100644 index 0000000..409e483 --- /dev/null +++ b/.github/workflows/update-expected-output.yml @@ -0,0 +1,220 @@ +name: Update expected output + +on: + pull_request: + # Don't update PRs on every push. PRs can be closed and + # reopened if the update action should be run again. + types: [opened, reopened] + branches: + - "**" + paths: + # Run for changes to *this* workflow file, but not for other workflows + - ".github/workflows/update-expected-output.yml" + # Check PRs that update the expected test suite output configuration + - "tests/expected-output-config.toml" + - "tests/sample_project/venvstacks.toml" + +defaults: + run: + # Use the Git for Windows bash shell, rather than supporting Powershell + # This also implies `set -eo pipefail` (rather than just `set -e`) + shell: bash + +permissions: + contents: read + +jobs: + timestamp: + runs-on: ubuntu-20.04 + outputs: + iso8601: ${{ steps.timestamp.outputs.iso8601 }} + rfc3339: ${{ steps.timestamp.outputs.rfc3339 }} + seconds: ${{ steps.timestamp.outputs.seconds }} + steps: + - name: Capture timestamp for branch name generation + id: timestamp + run: | + timestamp_iso8601="$(date --utc --iso-8601=seconds)" + echo "iso8601=$timestamp_iso8601"| tee -a "$GITHUB_OUTPUT" + timestamp_rfc3339="$(date --date="$timestamp_iso8601" --rfc-3339=seconds)" + echo "rfc3339=$timestamp_rfc3339"| tee -a "$GITHUB_OUTPUT" + timestamp_seconds="$(date --date="$timestamp_iso8601" '+%Y%m%d-%H%M%S')" + echo "seconds=$timestamp_seconds"| tee -a "$GITHUB_OUTPUT" + + test: + needs: timestamp + runs-on: ${{ matrix.os }} + outputs: + # Define multiple output variables to work around a matrix output + # limitation: https://github.com/orgs/community/discussions/17245 + want-pr-linux: ${{ steps.set-matrix-result.outputs.want-pr-ubuntu }} + want-pr-windows: ${{ steps.set-matrix-result.outputs.want-pr-windows }} + want-pr-macos: ${{ steps.set-matrix-result.outputs.want-pr-macos }} + strategy: + fail-fast: true # Don't bother updating if any test run fails + max-parallel: 3 + matrix: + # Expected test output is required to be Python version independent + # The version specified here should match the `test` label in `tox.ini` + python-version: [3.12] + os: [ubuntu-20.04, windows-2019, macos-14] + + # Check https://github.com/actions/action-versions/tree/main/config/actions + # for latest versions if the standard actions start emitting warnings + + steps: + - uses: actions/checkout@v4 + + - name: Set up Python ${{ matrix.python-version }} + uses: actions/setup-python@v5 + with: + python-version: ${{ matrix.python-version }} + + - name: Get pip cache dir + id: pip-cache + run: | + echo "dir=$(python -m pip cache dir)" >> $GITHUB_OUTPUT + + - name: Cache bootstrapping dependencies + uses: actions/cache@v4 + with: + path: + ${{ steps.pip-cache.outputs.dir }} + key: + pip-${{ matrix.os }}-${{ matrix.python-version }}-v1-${{ hashFiles('pdm.lock') }} + restore-keys: | + pip-${{ matrix.os }}-${{ matrix.python-version }}-v1- + + - name: Install PDM + run: | + # Ensure `pdm` uses the same version as specified in `pdm.lock` + # while avoiding the error raised by https://github.com/pypa/pip/issues/12889 + python -m pip install --upgrade -r ci-bootstrap-requirements.txt + + - name: Create development virtual environment + run: | + python -m pdm sync --no-self --dev + # Handle Windows vs non-Windows differences in .venv layout + VIRTUAL_ENV_BIN_DIR="$PWD/.venv/bin" + test -e "$VIRTUAL_ENV_BIN_DIR" || VIRTUAL_ENV_BIN_DIR="$PWD/.venv/Scripts" + echo "VIRTUAL_ENV_BIN_DIR=$VIRTUAL_ENV_BIN_DIR" >> "$GITHUB_ENV" + + - name: Get uv cache dir + id: uv-cache + run: | + source "$VIRTUAL_ENV_BIN_DIR/activate" + echo "dir=$(python -m uv cache dir)" >> $GITHUB_OUTPUT + + - name: Cache test suite stack dependencies + uses: actions/cache@v4 + with: + path: ${{ steps.uv-cache.outputs.dir }} + key: + uv-${{ matrix.os }}-${{ matrix.python-version }}-v1-${{ hashFiles('tests/sample_project/requirements/**') }} + restore-keys: | + uv-${{ matrix.os }}-${{ matrix.python-version }}-v1- + + - name: Static checks + run: | + source "$VIRTUAL_ENV_BIN_DIR/activate" + python -m tox -v -m static + + - name: Ensure other fast tests pass + run: | + source "$VIRTUAL_ENV_BIN_DIR/activate" + python -m tox -m test -- -m "not slow and not expected_output" + + - name: Ensure other slow tests pass + run: | + source "$VIRTUAL_ENV_BIN_DIR/activate" + python -m tox -m test -- -m "slow and not expected_output" + + - name: Update outputs + id: update-test-outputs + run: | + export VENVSTACKS_EXPORT_DIR="$GITHUB_WORKSPACE/export/" + mkdir -p "$VENVSTACKS_EXPORT_DIR" + export VENVSTACKS_UPDATED_TEST_OUTPUTS="$VENVSTACKS_EXPORT_DIR/updated-test-outputs.txt" + source "$VIRTUAL_ENV_BIN_DIR/activate" + tests/update-expected-output.sh "$VENVSTACKS_UPDATED_TEST_OUTPUTS" + UPDATED_TEST_OUTPUTS="$(cat "$VENVSTACKS_UPDATED_TEST_OUTPUTS")" + if [ -z "$UPDATED_TEST_OUTPUTS" ]; then + echo 'updated='| tee -a "$GITHUB_OUTPUT" + else + echo 'updated< tuple["str", ...]: + suffix_parts = extension.split(".") + return tuple(f".{part}" for part in suffix_parts if part) + + +_PYLIB_SUFFIX = ".so" # .dylib is never importable as a Python module, even on macOS +_LIB_SUFFIXES = frozenset((_PYLIB_SUFFIX, ".dylib")) + +# Skip libraries with extensions that are explicitly for importable Python extension modules +_IGNORED_SUFFIXES = frozenset( + _ext_to_suffixes(ext) for ext in EXTENSION_SUFFIXES if ext != _PYLIB_SUFFIX +) + + +def main() -> None: + _dir_to_search = sys.argv[1] + _paths_to_link = [] + for this_dir, _, files in os.walk(_dir_to_search): + dir_path = Path(this_dir) + for fname in files: + file_path = dir_path / fname + if file_path.suffix not in _LIB_SUFFIXES: + continue + if tuple(file_path.suffixes) in _IGNORED_SUFFIXES: + continue + _paths_to_link.append(file_path) + + for file_path in _paths_to_link: + print(file_path) + + +if __name__ == "__main__": + main() diff --git a/pdm.lock b/pdm.lock new file mode 100644 index 0000000..8bfda6e --- /dev/null +++ b/pdm.lock @@ -0,0 +1,873 @@ +# This file is @generated by PDM. +# It is not intended for manual editing. + +[metadata] +groups = ["default", "bootstrap", "dev", "git"] +strategy = ["inherit_metadata"] +lock_version = "4.5.0" +content_hash = "sha256:33249953f549d76f22722b4a6287cfa47235e550ffdf44b4a9202cd38e9f9c37" + +[[metadata.targets]] +requires_python = ">=3.11" +platform = "manylinux_2_17_x86_64" + +[[metadata.targets]] +requires_python = ">=3.11" +platform = "musllinux_1_1_x86_64" + +[[metadata.targets]] +requires_python = ">=3.11" +platform = "windows_amd64" + +[[metadata.targets]] +requires_python = ">=3.11" +platform = "windows_arm64" + +[[metadata.targets]] +requires_python = ">=3.11" +platform = "macos_12_0_x86_64" + +[[metadata.targets]] +requires_python = ">=3.11" +platform = "macos_12_0_arm64" + +[[package]] +name = "anyio" +version = "4.6.2.post1" +requires_python = ">=3.9" +summary = "High level compatibility layer for multiple asynchronous event loop implementations" +groups = ["default", "bootstrap"] +dependencies = [ + "exceptiongroup>=1.0.2; python_version < \"3.11\"", + "idna>=2.8", + "sniffio>=1.1", + "typing-extensions>=4.1; python_version < \"3.11\"", +] +files = [ + {file = "anyio-4.6.2.post1-py3-none-any.whl", hash = "sha256:6d170c36fba3bdd840c73d3868c1e777e33676a69c3a72cf0a0d5d6d8009b61d"}, + {file = "anyio-4.6.2.post1.tar.gz", hash = "sha256:4c8bc31ccdb51c7f7bd251f51c609e038d63e34219b44aa86e47576389880b4c"}, +] + +[[package]] +name = "attrs" +version = "24.2.0" +requires_python = ">=3.7" +summary = "Classes Without Boilerplate" +groups = ["dev"] +dependencies = [ + "importlib-metadata; python_version < \"3.8\"", +] +files = [ + {file = "attrs-24.2.0-py3-none-any.whl", hash = "sha256:81921eb96de3191c8258c199618104dd27ac608d9366f5e35d011eae1867ede2"}, + {file = "attrs-24.2.0.tar.gz", hash = "sha256:5cfb1b9148b5b086569baec03f20d7b6bf3bcacc9a42bebf87ffaaca362f6346"}, +] + +[[package]] +name = "blinker" +version = "1.8.2" +requires_python = ">=3.8" +summary = "Fast, simple object-to-object and broadcast signaling" +groups = ["default", "bootstrap"] +files = [ + {file = "blinker-1.8.2-py3-none-any.whl", hash = "sha256:1779309f71bf239144b9399d06ae925637cf6634cf6bd131104184531bf67c01"}, + {file = "blinker-1.8.2.tar.gz", hash = "sha256:8f77b09d3bf7c795e969e9486f39c2c5e9c39d4ee07424be2bc594ece9642d83"}, +] + +[[package]] +name = "build" +version = "1.2.2.post1" +requires_python = ">=3.8" +summary = "A simple, correct Python build frontend" +groups = ["default"] +dependencies = [ + "colorama; os_name == \"nt\"", + "importlib-metadata>=4.6; python_full_version < \"3.10.2\"", + "packaging>=19.1", + "pyproject-hooks", + "tomli>=1.1.0; python_version < \"3.11\"", +] +files = [ + {file = "build-1.2.2.post1-py3-none-any.whl", hash = "sha256:1d61c0887fa860c01971625baae8bdd338e517b836a2f70dd1f7aa3a6b2fc5b5"}, + {file = "build-1.2.2.post1.tar.gz", hash = "sha256:b36993e92ca9375a219c99e606a122ff365a760a2d4bba0caa09bd5278b608b7"}, +] + +[[package]] +name = "cachetools" +version = "5.5.0" +requires_python = ">=3.7" +summary = "Extensible memoizing collections and decorators" +groups = ["dev"] +files = [ + {file = "cachetools-5.5.0-py3-none-any.whl", hash = "sha256:02134e8439cdc2ffb62023ce1debca2944c3f289d66bb17ead3ab3dede74b292"}, + {file = "cachetools-5.5.0.tar.gz", hash = "sha256:2cc24fb4cbe39633fb7badd9db9ca6295d766d9c2995f245725a46715d050f2a"}, +] + +[[package]] +name = "certifi" +version = "2024.8.30" +requires_python = ">=3.6" +summary = "Python package for providing Mozilla's CA Bundle." +groups = ["default", "bootstrap"] +files = [ + {file = "certifi-2024.8.30-py3-none-any.whl", hash = "sha256:922820b53db7a7257ffbda3f597266d435245903d80737e34f8a45ff3e3230d8"}, + {file = "certifi-2024.8.30.tar.gz", hash = "sha256:bec941d2aa8195e248a60b31ff9f0558284cf01a52591ceda73ea9afffd69fd9"}, +] + +[[package]] +name = "chardet" +version = "5.2.0" +requires_python = ">=3.7" +summary = "Universal encoding detector for Python 3" +groups = ["dev"] +files = [ + {file = "chardet-5.2.0-py3-none-any.whl", hash = "sha256:e1cf59446890a00105fe7b7912492ea04b6e6f06d4b742b2c788469e34c82970"}, + {file = "chardet-5.2.0.tar.gz", hash = "sha256:1b3b6ff479a8c414bc3fa2c0852995695c4a026dcd6d0633b2dd092ca39c1cf7"}, +] + +[[package]] +name = "click" +version = "8.1.7" +requires_python = ">=3.7" +summary = "Composable command line interface toolkit" +groups = ["default"] +dependencies = [ + "colorama; platform_system == \"Windows\"", + "importlib-metadata; python_version < \"3.8\"", +] +files = [ + {file = "click-8.1.7-py3-none-any.whl", hash = "sha256:ae74fb96c20a0277a1d615f1e4d73c8414f5a98db8b799a7931d1582f3390c28"}, + {file = "click-8.1.7.tar.gz", hash = "sha256:ca9853ad459e787e2192211578cc907e7594e294c7ccc834310722b41b9ca6de"}, +] + +[[package]] +name = "colorama" +version = "0.4.6" +requires_python = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" +summary = "Cross-platform colored terminal text." +groups = ["default", "dev"] +files = [ + {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, + {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, +] + +[[package]] +name = "dep-logic" +version = "0.4.9" +requires_python = ">=3.8" +summary = "Python dependency specifications supporting logical operations" +groups = ["default", "bootstrap"] +dependencies = [ + "packaging>=22", +] +files = [ + {file = "dep_logic-0.4.9-py3-none-any.whl", hash = "sha256:06faa33814e5ff881922f644284a608d7da7946462760f710217d829ae864a0e"}, + {file = "dep_logic-0.4.9.tar.gz", hash = "sha256:5d455ea2a3da4fea2be6186d886905c57eeeebe3ea7fa967f599cb8e0f01d5c9"}, +] + +[[package]] +name = "distlib" +version = "0.3.9" +summary = "Distribution utilities" +groups = ["default", "bootstrap", "dev"] +files = [ + {file = "distlib-0.3.9-py2.py3-none-any.whl", hash = "sha256:47f8c22fd27c27e25a65601af709b38e4f0a45ea4fc2e710f65755fa8caaaf87"}, + {file = "distlib-0.3.9.tar.gz", hash = "sha256:a60f20dea646b8a33f3e7772f74dc0b2d0772d2837ee1342a00645c81edf9403"}, +] + +[[package]] +name = "dulwich" +version = "0.22.1" +requires_python = ">=3.7" +summary = "Python Git Library" +groups = ["git"] +dependencies = [ + "setuptools; python_version >= \"3.12\"", + "typing-extensions; python_version <= \"3.7\"", + "urllib3>=1.25", +] +files = [ + {file = "dulwich-0.22.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:82f26e592e9a36ab33bcdb419c7d53320e26c85dfc254cdb84f5f561a2fcaabf"}, + {file = "dulwich-0.22.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e90b8a2f24149c5803b733a24f1a016a2943b1f5a9ab2360db545e4638354c35"}, + {file = "dulwich-0.22.1-cp311-cp311-win_amd64.whl", hash = "sha256:a18d1392eabd02f337dcba23d723a4dcca87274ce8693cf88e6320f38bc3fdcd"}, + {file = "dulwich-0.22.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:12482e318895da9acabea7c0cc70b35d36833e7cb2def511ab3a63617f5c1af3"}, + {file = "dulwich-0.22.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6dc42afedc8cda4f2fd15a06d2e9e41281074a02cdf31bb2e0dde4d80766a408"}, + {file = "dulwich-0.22.1-cp312-cp312-win_amd64.whl", hash = "sha256:9d19f04ecd4628a0e4587b4c4e98e040b87924c1362ae5aa27420435f05d5dd8"}, + {file = "dulwich-0.22.1.tar.gz", hash = "sha256:e36d85967cfbf25da1c7bc3d6921adc5baa976969d926aaf1582bd5fd7e94758"}, +] + +[[package]] +name = "filelock" +version = "3.16.1" +requires_python = ">=3.8" +summary = "A platform independent file lock." +groups = ["default", "bootstrap", "dev"] +files = [ + {file = "filelock-3.16.1-py3-none-any.whl", hash = "sha256:2082e5703d51fbf98ea75855d9d5527e33d8ff23099bec374a134febee6946b0"}, + {file = "filelock-3.16.1.tar.gz", hash = "sha256:c249fbfcd5db47e5e2d6d62198e565475ee65e4831e2561c8e313fa7eb961435"}, +] + +[[package]] +name = "findpython" +version = "0.6.1" +requires_python = ">=3.8" +summary = "A utility to find python versions on your system" +groups = ["default", "bootstrap"] +dependencies = [ + "packaging>=20", +] +files = [ + {file = "findpython-0.6.1-py3-none-any.whl", hash = "sha256:1fb4d709205de185b0561900267dfff64a841c910fe28d6038b2394ff925a81a"}, + {file = "findpython-0.6.1.tar.gz", hash = "sha256:56e52b409a92bcbd495cf981c85acf137f3b3e51cc769b46eba219bb1ab7533c"}, +] + +[[package]] +name = "h11" +version = "0.14.0" +requires_python = ">=3.7" +summary = "A pure-Python, bring-your-own-I/O implementation of HTTP/1.1" +groups = ["default", "bootstrap"] +dependencies = [ + "typing-extensions; python_version < \"3.8\"", +] +files = [ + {file = "h11-0.14.0-py3-none-any.whl", hash = "sha256:e3fe4ac4b851c468cc8363d500db52c2ead036020723024a109d37346efaa761"}, + {file = "h11-0.14.0.tar.gz", hash = "sha256:8f19fbbe99e72420ff35c00b27a34cb9937e902a8b810e2c88300c6f0a3b699d"}, +] + +[[package]] +name = "hishel" +version = "0.0.33" +requires_python = ">=3.8" +summary = "Persistent cache implementation for httpx and httpcore" +groups = ["default", "bootstrap"] +dependencies = [ + "httpx>=0.22.0", + "typing-extensions>=4.8.0", +] +files = [ + {file = "hishel-0.0.33-py3-none-any.whl", hash = "sha256:6e6c6cdaf432ff4c4981e7792ef7d1fa4c8ede58b9dbbcefb9ab3fc9770f2a07"}, + {file = "hishel-0.0.33.tar.gz", hash = "sha256:ab5b2661d5e2252f305fd0fb20e8c76bfab3ea73458f20f2591c53c37b270089"}, +] + +[[package]] +name = "httpcore" +version = "1.0.6" +requires_python = ">=3.8" +summary = "A minimal low-level HTTP client." +groups = ["default", "bootstrap"] +dependencies = [ + "certifi", + "h11<0.15,>=0.13", +] +files = [ + {file = "httpcore-1.0.6-py3-none-any.whl", hash = "sha256:27b59625743b85577a8c0e10e55b50b5368a4f2cfe8cc7bcfa9cf00829c2682f"}, + {file = "httpcore-1.0.6.tar.gz", hash = "sha256:73f6dbd6eb8c21bbf7ef8efad555481853f5f6acdeaff1edb0694289269ee17f"}, +] + +[[package]] +name = "httpx" +version = "0.27.2" +requires_python = ">=3.8" +summary = "The next generation HTTP client." +groups = ["default", "bootstrap"] +dependencies = [ + "anyio", + "certifi", + "httpcore==1.*", + "idna", + "sniffio", +] +files = [ + {file = "httpx-0.27.2-py3-none-any.whl", hash = "sha256:7bb2708e112d8fdd7829cd4243970f0c223274051cb35ee80c03301ee29a3df0"}, + {file = "httpx-0.27.2.tar.gz", hash = "sha256:f7c2be1d2f3c3c3160d441802406b206c2b76f5947b11115e6df10c6c65e66c2"}, +] + +[[package]] +name = "httpx" +version = "0.27.2" +extras = ["socks"] +requires_python = ">=3.8" +summary = "The next generation HTTP client." +groups = ["default", "bootstrap"] +dependencies = [ + "httpx==0.27.2", + "socksio==1.*", +] +files = [ + {file = "httpx-0.27.2-py3-none-any.whl", hash = "sha256:7bb2708e112d8fdd7829cd4243970f0c223274051cb35ee80c03301ee29a3df0"}, + {file = "httpx-0.27.2.tar.gz", hash = "sha256:f7c2be1d2f3c3c3160d441802406b206c2b76f5947b11115e6df10c6c65e66c2"}, +] + +[[package]] +name = "idna" +version = "3.10" +requires_python = ">=3.6" +summary = "Internationalized Domain Names in Applications (IDNA)" +groups = ["default", "bootstrap"] +files = [ + {file = "idna-3.10-py3-none-any.whl", hash = "sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3"}, + {file = "idna-3.10.tar.gz", hash = "sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9"}, +] + +[[package]] +name = "iniconfig" +version = "2.0.0" +requires_python = ">=3.7" +summary = "brain-dead simple config-ini parsing" +groups = ["dev"] +files = [ + {file = "iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374"}, + {file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"}, +] + +[[package]] +name = "installer" +version = "0.7.0" +requires_python = ">=3.7" +summary = "A library for installing Python wheels." +groups = ["default", "bootstrap"] +files = [ + {file = "installer-0.7.0-py3-none-any.whl", hash = "sha256:05d1933f0a5ba7d8d6296bb6d5018e7c94fa473ceb10cf198a92ccea19c27b53"}, + {file = "installer-0.7.0.tar.gz", hash = "sha256:a26d3e3116289bb08216e0d0f7d925fcef0b0194eedfa0c944bcaaa106c4b631"}, +] + +[[package]] +name = "markdown-it-py" +version = "3.0.0" +requires_python = ">=3.8" +summary = "Python port of markdown-it. Markdown parsing, done right!" +groups = ["default", "bootstrap"] +dependencies = [ + "mdurl~=0.1", +] +files = [ + {file = "markdown-it-py-3.0.0.tar.gz", hash = "sha256:e3f60a94fa066dc52ec76661e37c851cb232d92f9886b15cb560aaada2df8feb"}, + {file = "markdown_it_py-3.0.0-py3-none-any.whl", hash = "sha256:355216845c60bd96232cd8d8c40e8f9765cc86f46880e43a8fd22dc1a1a8cab1"}, +] + +[[package]] +name = "mdurl" +version = "0.1.2" +requires_python = ">=3.7" +summary = "Markdown URL utilities" +groups = ["default", "bootstrap"] +files = [ + {file = "mdurl-0.1.2-py3-none-any.whl", hash = "sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8"}, + {file = "mdurl-0.1.2.tar.gz", hash = "sha256:bb413d29f5eea38f31dd4754dd7377d4465116fb207585f97bf925588687c1ba"}, +] + +[[package]] +name = "msgpack" +version = "1.1.0" +requires_python = ">=3.8" +summary = "MessagePack serializer" +groups = ["default", "bootstrap"] +files = [ + {file = "msgpack-1.1.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:3d364a55082fb2a7416f6c63ae383fbd903adb5a6cf78c5b96cc6316dc1cedc7"}, + {file = "msgpack-1.1.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:79ec007767b9b56860e0372085f8504db5d06bd6a327a335449508bbee9648fa"}, + {file = "msgpack-1.1.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:6ad622bf7756d5a497d5b6836e7fc3752e2dd6f4c648e24b1803f6048596f701"}, + {file = "msgpack-1.1.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5e1da8f11a3dd397f0a32c76165cf0c4eb95b31013a94f6ecc0b280c05c91b59"}, + {file = "msgpack-1.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:fd2906780f25c8ed5d7b323379f6138524ba793428db5d0e9d226d3fa6aa1788"}, + {file = "msgpack-1.1.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:d46cf9e3705ea9485687aa4001a76e44748b609d260af21c4ceea7f2212a501d"}, + {file = "msgpack-1.1.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:5dbad74103df937e1325cc4bfeaf57713be0b4f15e1c2da43ccdd836393e2ea2"}, + {file = "msgpack-1.1.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:58dfc47f8b102da61e8949708b3eafc3504509a5728f8b4ddef84bd9e16ad420"}, + {file = "msgpack-1.1.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:17fb65dd0bec285907f68b15734a993ad3fc94332b5bb21b0435846228de1f39"}, + {file = "msgpack-1.1.0-cp312-cp312-win_amd64.whl", hash = "sha256:115a7af8ee9e8cddc10f87636767857e7e3717b7a2e97379dc2054712693e90f"}, + {file = "msgpack-1.1.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:071603e2f0771c45ad9bc65719291c568d4edf120b44eb36324dcb02a13bfddf"}, + {file = "msgpack-1.1.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:0f92a83b84e7c0749e3f12821949d79485971f087604178026085f60ce109330"}, + {file = "msgpack-1.1.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:4a1964df7b81285d00a84da4e70cb1383f2e665e0f1f2a7027e683956d04b734"}, + {file = "msgpack-1.1.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0907e1a7119b337971a689153665764adc34e89175f9a34793307d9def08e6ca"}, + {file = "msgpack-1.1.0-cp313-cp313-win_amd64.whl", hash = "sha256:bce7d9e614a04d0883af0b3d4d501171fbfca038f12c77fa838d9f198147a23f"}, + {file = "msgpack-1.1.0.tar.gz", hash = "sha256:dd432ccc2c72b914e4cb77afce64aab761c1137cc698be3984eee260bcb2896e"}, +] + +[[package]] +name = "mypy" +version = "1.12.0" +requires_python = ">=3.8" +summary = "Optional static typing for Python" +groups = ["dev"] +dependencies = [ + "mypy-extensions>=1.0.0", + "tomli>=1.1.0; python_version < \"3.11\"", + "typing-extensions>=4.6.0", +] +files = [ + {file = "mypy-1.12.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4b86de37a0da945f6d48cf110d5206c5ed514b1ca2614d7ad652d4bf099c7de7"}, + {file = "mypy-1.12.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:20c7c5ce0c1be0b0aea628374e6cf68b420bcc772d85c3c974f675b88e3e6e57"}, + {file = "mypy-1.12.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a64ee25f05fc2d3d8474985c58042b6759100a475f8237da1f4faf7fcd7e6309"}, + {file = "mypy-1.12.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:faca7ab947c9f457a08dcb8d9a8664fd438080e002b0fa3e41b0535335edcf7f"}, + {file = "mypy-1.12.0-cp311-cp311-win_amd64.whl", hash = "sha256:5bc81701d52cc8767005fdd2a08c19980de9ec61a25dbd2a937dfb1338a826f9"}, + {file = "mypy-1.12.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:8462655b6694feb1c99e433ea905d46c478041a8b8f0c33f1dab00ae881b2164"}, + {file = "mypy-1.12.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:923ea66d282d8af9e0f9c21ffc6653643abb95b658c3a8a32dca1eff09c06475"}, + {file = "mypy-1.12.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:1ebf9e796521f99d61864ed89d1fb2926d9ab6a5fab421e457cd9c7e4dd65aa9"}, + {file = "mypy-1.12.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:e478601cc3e3fa9d6734d255a59c7a2e5c2934da4378f3dd1e3411ea8a248642"}, + {file = "mypy-1.12.0-cp312-cp312-win_amd64.whl", hash = "sha256:c72861b7139a4f738344faa0e150834467521a3fba42dc98264e5aa9507dd601"}, + {file = "mypy-1.12.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:52b9e1492e47e1790360a43755fa04101a7ac72287b1a53ce817f35899ba0521"}, + {file = "mypy-1.12.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:48d3e37dd7d9403e38fa86c46191de72705166d40b8c9f91a3de77350daa0893"}, + {file = "mypy-1.12.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:2f106db5ccb60681b622ac768455743ee0e6a857724d648c9629a9bd2ac3f721"}, + {file = "mypy-1.12.0-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:233e11b3f73ee1f10efada2e6da0f555b2f3a5316e9d8a4a1224acc10e7181d3"}, + {file = "mypy-1.12.0-cp313-cp313-win_amd64.whl", hash = "sha256:4ae8959c21abcf9d73aa6c74a313c45c0b5a188752bf37dace564e29f06e9c1b"}, + {file = "mypy-1.12.0-py3-none-any.whl", hash = "sha256:fd313226af375d52e1e36c383f39bf3836e1f192801116b31b090dfcd3ec5266"}, + {file = "mypy-1.12.0.tar.gz", hash = "sha256:65a22d87e757ccd95cbbf6f7e181e6caa87128255eb2b6be901bb71b26d8a99d"}, +] + +[[package]] +name = "mypy-extensions" +version = "1.0.0" +requires_python = ">=3.5" +summary = "Type system extensions for programs checked with the mypy type checker." +groups = ["dev"] +files = [ + {file = "mypy_extensions-1.0.0-py3-none-any.whl", hash = "sha256:4392f6c0eb8a5668a69e23d168ffa70f0be9ccfd32b5cc2d26a34ae5b844552d"}, + {file = "mypy_extensions-1.0.0.tar.gz", hash = "sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782"}, +] + +[[package]] +name = "packaging" +version = "24.1" +requires_python = ">=3.8" +summary = "Core utilities for Python packages" +groups = ["default", "bootstrap", "dev"] +files = [ + {file = "packaging-24.1-py3-none-any.whl", hash = "sha256:5b8f2217dbdbd2f7f384c41c628544e6d52f2d0f53c6d0c3ea61aa5d1d7ff124"}, + {file = "packaging-24.1.tar.gz", hash = "sha256:026ed72c8ed3fcce5bf8950572258698927fd1dbda10a5e981cdf0ac37f4f002"}, +] + +[[package]] +name = "pbs-installer" +version = "2024.10.10" +requires_python = ">=3.8" +summary = "Installer for Python Build Standalone" +groups = ["default", "bootstrap", "dev"] +files = [ + {file = "pbs_installer-2024.10.10-py3-none-any.whl", hash = "sha256:b82fb5c96a4ca2a8c2ea2521268fa83fa18c1bfa32decfb3d77139a07c13f90c"}, + {file = "pbs_installer-2024.10.10.tar.gz", hash = "sha256:228bba8e78134c407ee6637da6a5a16479aaa702332bfb1b95d873fc00802305"}, +] + +[[package]] +name = "pdm" +version = "2.19.2" +requires_python = ">=3.8" +summary = "A modern Python package and dependency manager supporting the latest PEP standards" +groups = ["default", "bootstrap"] +dependencies = [ + "blinker", + "dep-logic>=0.4.4", + "filelock>=3.13", + "findpython<1.0.0a0,>=0.6.0", + "hishel<0.1.0,>=0.0.32", + "httpx[socks]<1,>0.20", + "importlib-metadata>=3.6; python_version < \"3.10\"", + "importlib-resources>=5; python_version < \"3.9\"", + "installer<0.8,>=0.7", + "msgpack>=1.0", + "packaging!=22.0,>=20.9", + "pbs-installer>=2024.4.18", + "platformdirs", + "pyproject-hooks", + "python-dotenv>=0.15", + "resolvelib>=1.0.1", + "rich>=12.3.0", + "shellingham>=1.3.2", + "tomli>=1.1.0; python_version < \"3.11\"", + "tomlkit<1,>=0.11.1", + "truststore>=0.9; python_version >= \"3.10\"", + "unearth>=0.17.0", + "virtualenv>=20", +] +files = [ + {file = "pdm-2.19.2-py3-none-any.whl", hash = "sha256:42af4e0897b139656e003767e99c4f77014bf36d9a7b759d3e09b49ee5979143"}, + {file = "pdm-2.19.2.tar.gz", hash = "sha256:efb39264569181d0375536ef81c556648f16b540d429a53715730490a2283567"}, +] + +[[package]] +name = "pip" +version = "24.2" +requires_python = ">=3.8" +summary = "The PyPA recommended tool for installing Python packages." +groups = ["default"] +files = [ + {file = "pip-24.2-py3-none-any.whl", hash = "sha256:2cd581cf58ab7fcfca4ce8efa6dcacd0de5bf8d0a3eb9ec927e07405f4d9e2a2"}, + {file = "pip-24.2.tar.gz", hash = "sha256:5b5e490b5e9cb275c879595064adce9ebd31b854e3e803740b72f9ccf34a45b8"}, +] + +[[package]] +name = "pip-tools" +version = "7.4.1" +requires_python = ">=3.8" +summary = "pip-tools keeps your pinned dependencies fresh." +groups = ["default"] +dependencies = [ + "build>=1.0.0", + "click>=8", + "pip>=22.2", + "pyproject-hooks", + "setuptools", + "tomli; python_version < \"3.11\"", + "wheel", +] +files = [ + {file = "pip-tools-7.4.1.tar.gz", hash = "sha256:864826f5073864450e24dbeeb85ce3920cdfb09848a3d69ebf537b521f14bcc9"}, + {file = "pip_tools-7.4.1-py3-none-any.whl", hash = "sha256:4c690e5fbae2f21e87843e89c26191f0d9454f362d8acdbd695716493ec8b3a9"}, +] + +[[package]] +name = "platformdirs" +version = "4.3.6" +requires_python = ">=3.8" +summary = "A small Python package for determining appropriate platform-specific dirs, e.g. a `user data dir`." +groups = ["default", "bootstrap", "dev"] +files = [ + {file = "platformdirs-4.3.6-py3-none-any.whl", hash = "sha256:73e575e1408ab8103900836b97580d5307456908a03e92031bab39e4554cc3fb"}, + {file = "platformdirs-4.3.6.tar.gz", hash = "sha256:357fb2acbc885b0419afd3ce3ed34564c13c9b95c89360cd9563f73aa5e2b907"}, +] + +[[package]] +name = "pluggy" +version = "1.5.0" +requires_python = ">=3.8" +summary = "plugin and hook calling mechanisms for python" +groups = ["dev"] +files = [ + {file = "pluggy-1.5.0-py3-none-any.whl", hash = "sha256:44e1ad92c8ca002de6377e165f3e0f1be63266ab4d554740532335b9d75ea669"}, + {file = "pluggy-1.5.0.tar.gz", hash = "sha256:2cffa88e94fdc978c4c574f15f9e59b7f4201d439195c3715ca9e2486f1d0cf1"}, +] + +[[package]] +name = "pygments" +version = "2.18.0" +requires_python = ">=3.8" +summary = "Pygments is a syntax highlighting package written in Python." +groups = ["default", "bootstrap"] +files = [ + {file = "pygments-2.18.0-py3-none-any.whl", hash = "sha256:b8e6aca0523f3ab76fee51799c488e38782ac06eafcf95e7ba832985c8e7b13a"}, + {file = "pygments-2.18.0.tar.gz", hash = "sha256:786ff802f32e91311bff3889f6e9a86e81505fe99f2735bb6d60ae0c5004f199"}, +] + +[[package]] +name = "pyproject-api" +version = "1.8.0" +requires_python = ">=3.8" +summary = "API to interact with the python pyproject.toml based projects" +groups = ["dev"] +dependencies = [ + "packaging>=24.1", + "tomli>=2.0.1; python_version < \"3.11\"", +] +files = [ + {file = "pyproject_api-1.8.0-py3-none-any.whl", hash = "sha256:3d7d347a047afe796fd5d1885b1e391ba29be7169bd2f102fcd378f04273d228"}, + {file = "pyproject_api-1.8.0.tar.gz", hash = "sha256:77b8049f2feb5d33eefcc21b57f1e279636277a8ac8ad6b5871037b243778496"}, +] + +[[package]] +name = "pyproject-hooks" +version = "1.2.0" +requires_python = ">=3.7" +summary = "Wrappers to call pyproject.toml-based build backend hooks." +groups = ["default", "bootstrap"] +files = [ + {file = "pyproject_hooks-1.2.0-py3-none-any.whl", hash = "sha256:9e5c6bfa8dcc30091c74b0cf803c81fdd29d94f01992a7707bc97babb1141913"}, + {file = "pyproject_hooks-1.2.0.tar.gz", hash = "sha256:1e859bd5c40fae9448642dd871adf459e5e2084186e8d2c2a79a824c970da1f8"}, +] + +[[package]] +name = "pytest" +version = "8.3.3" +requires_python = ">=3.8" +summary = "pytest: simple powerful testing with Python" +groups = ["dev"] +dependencies = [ + "colorama; sys_platform == \"win32\"", + "exceptiongroup>=1.0.0rc8; python_version < \"3.11\"", + "iniconfig", + "packaging", + "pluggy<2,>=1.5", + "tomli>=1; python_version < \"3.11\"", +] +files = [ + {file = "pytest-8.3.3-py3-none-any.whl", hash = "sha256:a6853c7375b2663155079443d2e45de913a911a11d669df02a50814944db57b2"}, + {file = "pytest-8.3.3.tar.gz", hash = "sha256:70b98107bd648308a7952b06e6ca9a50bc660be218d53c257cc1fc94fda10181"}, +] + +[[package]] +name = "pytest-subtests" +version = "0.13.1" +requires_python = ">=3.7" +summary = "unittest subTest() support and subtests fixture" +groups = ["dev"] +dependencies = [ + "attrs>=19.2.0", + "pytest>=7.0", + "typing-extensions; python_version < \"3.8\"", +] +files = [ + {file = "pytest_subtests-0.13.1-py3-none-any.whl", hash = "sha256:ab616a22f64cd17c1aee65f18af94dbc30c444f8683de2b30895c3778265e3bd"}, + {file = "pytest_subtests-0.13.1.tar.gz", hash = "sha256:989e38f0f1c01bc7c6b2e04db7d9fd859db35d77c2c1a430c831a70cbf3fde2d"}, +] + +[[package]] +name = "python-dotenv" +version = "1.0.1" +requires_python = ">=3.8" +summary = "Read key-value pairs from a .env file and set them as environment variables" +groups = ["default", "bootstrap"] +files = [ + {file = "python-dotenv-1.0.1.tar.gz", hash = "sha256:e324ee90a023d808f1959c46bcbc04446a10ced277783dc6ee09987c37ec10ca"}, + {file = "python_dotenv-1.0.1-py3-none-any.whl", hash = "sha256:f7b63ef50f1b690dddf550d03497b66d609393b40b564ed0d674909a68ebf16a"}, +] + +[[package]] +name = "resolvelib" +version = "1.0.1" +summary = "Resolve abstract dependencies into concrete ones" +groups = ["default", "bootstrap"] +files = [ + {file = "resolvelib-1.0.1-py2.py3-none-any.whl", hash = "sha256:d2da45d1a8dfee81bdd591647783e340ef3bcb104b54c383f70d422ef5cc7dbf"}, + {file = "resolvelib-1.0.1.tar.gz", hash = "sha256:04ce76cbd63fded2078ce224785da6ecd42b9564b1390793f64ddecbe997b309"}, +] + +[[package]] +name = "rich" +version = "13.9.2" +requires_python = ">=3.8.0" +summary = "Render rich text, tables, progress bars, syntax highlighting, markdown and more to the terminal" +groups = ["default", "bootstrap"] +dependencies = [ + "markdown-it-py>=2.2.0", + "pygments<3.0.0,>=2.13.0", + "typing-extensions<5.0,>=4.0.0; python_version < \"3.11\"", +] +files = [ + {file = "rich-13.9.2-py3-none-any.whl", hash = "sha256:8c82a3d3f8dcfe9e734771313e606b39d8247bb6b826e196f4914b333b743cf1"}, + {file = "rich-13.9.2.tar.gz", hash = "sha256:51a2c62057461aaf7152b4d611168f93a9fc73068f8ded2790f29fe2b5366d0c"}, +] + +[[package]] +name = "ruff" +version = "0.6.9" +requires_python = ">=3.7" +summary = "An extremely fast Python linter and code formatter, written in Rust." +groups = ["dev"] +files = [ + {file = "ruff-0.6.9-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:140d4b5c9f5fc7a7b074908a78ab8d384dd7f6510402267bc76c37195c02a7ec"}, + {file = "ruff-0.6.9-py3-none-macosx_11_0_arm64.whl", hash = "sha256:53fd8ca5e82bdee8da7f506d7b03a261f24cd43d090ea9db9a1dc59d9313914c"}, + {file = "ruff-0.6.9-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a67267654edc23c97335586774790cde402fb6bbdb3c2314f1fc087dee320bfa"}, + {file = "ruff-0.6.9-py3-none-win_amd64.whl", hash = "sha256:785d31851c1ae91f45b3d8fe23b8ae4b5170089021fbb42402d811135f0b7117"}, + {file = "ruff-0.6.9-py3-none-win_arm64.whl", hash = "sha256:a9641e31476d601f83cd602608739a0840e348bda93fec9f1ee816f8b6798b93"}, + {file = "ruff-0.6.9.tar.gz", hash = "sha256:b076ef717a8e5bc819514ee1d602bbdca5b4420ae13a9cf61a0c0a4f53a2baa2"}, +] + +[[package]] +name = "setuptools" +version = "75.1.0" +requires_python = ">=3.8" +summary = "Easily download, build, install, upgrade, and uninstall Python packages" +groups = ["default", "git"] +files = [ + {file = "setuptools-75.1.0-py3-none-any.whl", hash = "sha256:35ab7fd3bcd95e6b7fd704e4a1539513edad446c097797f2985e0e4b960772f2"}, + {file = "setuptools-75.1.0.tar.gz", hash = "sha256:d59a21b17a275fb872a9c3dae73963160ae079f1049ed956880cd7c09b120538"}, +] + +[[package]] +name = "shellingham" +version = "1.5.4" +requires_python = ">=3.7" +summary = "Tool to Detect Surrounding Shell" +groups = ["default", "bootstrap"] +files = [ + {file = "shellingham-1.5.4-py2.py3-none-any.whl", hash = "sha256:7ecfff8f2fd72616f7481040475a65b2bf8af90a56c89140852d1120324e8686"}, + {file = "shellingham-1.5.4.tar.gz", hash = "sha256:8dbca0739d487e5bd35ab3ca4b36e11c4078f3a234bfce294b0a0291363404de"}, +] + +[[package]] +name = "sniffio" +version = "1.3.1" +requires_python = ">=3.7" +summary = "Sniff out which async library your code is running under" +groups = ["default", "bootstrap"] +files = [ + {file = "sniffio-1.3.1-py3-none-any.whl", hash = "sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2"}, + {file = "sniffio-1.3.1.tar.gz", hash = "sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc"}, +] + +[[package]] +name = "socksio" +version = "1.0.0" +requires_python = ">=3.6" +summary = "Sans-I/O implementation of SOCKS4, SOCKS4A, and SOCKS5." +groups = ["default", "bootstrap"] +files = [ + {file = "socksio-1.0.0-py3-none-any.whl", hash = "sha256:95dc1f15f9b34e8d7b16f06d74b8ccf48f609af32ab33c608d08761c5dcbb1f3"}, + {file = "socksio-1.0.0.tar.gz", hash = "sha256:f88beb3da5b5c38b9890469de67d0cb0f9d494b78b106ca1845f96c10b91c4ac"}, +] + +[[package]] +name = "tomlkit" +version = "0.13.2" +requires_python = ">=3.8" +summary = "Style preserving TOML library" +groups = ["default", "bootstrap"] +files = [ + {file = "tomlkit-0.13.2-py3-none-any.whl", hash = "sha256:7a974427f6e119197f670fbbbeae7bef749a6c14e793db934baefc1b5f03efde"}, + {file = "tomlkit-0.13.2.tar.gz", hash = "sha256:fff5fe59a87295b278abd31bec92c15d9bc4a06885ab12bcea52c71119392e79"}, +] + +[[package]] +name = "tox" +version = "4.21.2" +requires_python = ">=3.8" +summary = "tox is a generic virtualenv management and test command line tool" +groups = ["dev"] +dependencies = [ + "cachetools>=5.5", + "chardet>=5.2", + "colorama>=0.4.6", + "filelock>=3.16.1", + "packaging>=24.1", + "platformdirs>=4.3.6", + "pluggy>=1.5", + "pyproject-api>=1.8", + "tomli>=2.0.1; python_version < \"3.11\"", + "typing-extensions>=4.12.2; python_version < \"3.11\"", + "virtualenv>=20.26.6", +] +files = [ + {file = "tox-4.21.2-py3-none-any.whl", hash = "sha256:13d996adcd792e7c82994b0e116d85efd84f0c6d185254d83d156f73f86b2038"}, + {file = "tox-4.21.2.tar.gz", hash = "sha256:49381ff102296753e378fa5ff30e42a35e695f149b4dbf8a2c49d15fdb5797b2"}, +] + +[[package]] +name = "tox-gh" +version = "1.4.4" +requires_python = ">=3.9" +summary = "Seamless integration of tox into GitHub Actions." +groups = ["dev"] +dependencies = [ + "tox>=4.18.1", +] +files = [ + {file = "tox_gh-1.4.4-py3-none-any.whl", hash = "sha256:b962e0f8c4619e98d11c2a135939876691e148b843b7dac4cff7de1dc4f7c215"}, + {file = "tox_gh-1.4.4.tar.gz", hash = "sha256:4ea585f66585b90f5826b1677cfc9453747792a0f9ff83d468603bc17556e07b"}, +] + +[[package]] +name = "tox-pdm" +version = "0.7.2" +requires_python = ">=3.7" +summary = "A plugin for tox that utilizes PDM as the package manager and installer" +groups = ["dev"] +dependencies = [ + "tomli; python_version < \"3.11\"", + "tox>=4.0", +] +files = [ + {file = "tox_pdm-0.7.2-py3-none-any.whl", hash = "sha256:12f6215416b7acd00a80a9e7128f3dc3e3c89308d60707f5d0a24abdf83ac104"}, + {file = "tox_pdm-0.7.2.tar.gz", hash = "sha256:a841a7e1e942a71805624703b9a6d286663bd6af79bba6130ba756975c315308"}, +] + +[[package]] +name = "truststore" +version = "0.9.2" +requires_python = ">=3.10" +summary = "Verify certificates using native system trust stores" +groups = ["default", "bootstrap"] +files = [ + {file = "truststore-0.9.2-py3-none-any.whl", hash = "sha256:04559916f8810cc1a5ecc41f215eddc988746067b754fc0995da7a2ceaf54735"}, + {file = "truststore-0.9.2.tar.gz", hash = "sha256:a1dee0d0575ff22d2875476343783a5d64575419974e228f3248772613c3d993"}, +] + +[[package]] +name = "typer-slim" +version = "0.12.5" +requires_python = ">=3.7" +summary = "Typer, build great CLIs. Easy to code. Based on Python type hints." +groups = ["default"] +dependencies = [ + "click>=8.0.0", + "typing-extensions>=3.7.4.3", +] +files = [ + {file = "typer_slim-0.12.5-py3-none-any.whl", hash = "sha256:9a994f721b828783dbf144e17461b1c720bb4598e0d5eff7c1b3f08ee58cb062"}, + {file = "typer_slim-0.12.5.tar.gz", hash = "sha256:c8e3fcf93cc7dd584036df8755d2e2363f85f8a4dd028c7911eed3f00cf0ebb1"}, +] + +[[package]] +name = "typing-extensions" +version = "4.12.2" +requires_python = ">=3.8" +summary = "Backported and Experimental Type Hints for Python 3.8+" +groups = ["default", "bootstrap", "dev"] +files = [ + {file = "typing_extensions-4.12.2-py3-none-any.whl", hash = "sha256:04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d"}, + {file = "typing_extensions-4.12.2.tar.gz", hash = "sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8"}, +] + +[[package]] +name = "unearth" +version = "0.17.2" +requires_python = ">=3.8" +summary = "A utility to fetch and download python packages" +groups = ["default", "bootstrap"] +dependencies = [ + "httpx<1,>=0.27.0", + "packaging>=20", +] +files = [ + {file = "unearth-0.17.2-py3-none-any.whl", hash = "sha256:4d21af1238a583835fca156322f7225382e718cdcc42d6278050a88e605c4ad5"}, + {file = "unearth-0.17.2.tar.gz", hash = "sha256:0b8a2afd3476f1ab6155fc579501ac47fffe43547d88a70e5a5b76a7fe6caa2c"}, +] + +[[package]] +name = "urllib3" +version = "2.2.3" +requires_python = ">=3.8" +summary = "HTTP library with thread-safe connection pooling, file post, and more." +groups = ["git"] +files = [ + {file = "urllib3-2.2.3-py3-none-any.whl", hash = "sha256:ca899ca043dcb1bafa3e262d73aa25c465bfb49e0bd9dd5d59f1d0acba2f8fac"}, + {file = "urllib3-2.2.3.tar.gz", hash = "sha256:e7d814a81dad81e6caf2ec9fdedb284ecc9c73076b62654547cc64ccdcae26e9"}, +] + +[[package]] +name = "uv" +version = "0.4.21" +requires_python = ">=3.8" +summary = "An extremely fast Python package and project manager, written in Rust." +groups = ["default", "dev"] +files = [ + {file = "uv-0.4.21-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:ba3e3b40cc1d5a980d36589775d6a7e4defa1b33e7e06423af0e395b8e4d9505"}, + {file = "uv-0.4.21-py3-none-macosx_11_0_arm64.whl", hash = "sha256:19607da8ee024e4ff060804efb8251e3b821cbd7f830b58612600ffe739fd33d"}, + {file = "uv-0.4.21-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9c08b01f8571d2c64d45d569990aa7bffad5eb259cf64bc329d40d8c787fb9ba"}, + {file = "uv-0.4.21-py3-none-musllinux_1_1_x86_64.whl", hash = "sha256:a1a9a126ce48f0f0893891adb5a9749220425169092f3e4da1216168736ac16d"}, + {file = "uv-0.4.21-py3-none-win_amd64.whl", hash = "sha256:45df47a4f43db730bea72bd3150c206d00d1a4d854137ed63dc04bb73032f280"}, + {file = "uv-0.4.21.tar.gz", hash = "sha256:9dcddbb3b6e1662c6db41d63db539742450e2ce17d6c746329c016e3651bfb4a"}, +] + +[[package]] +name = "virtualenv" +version = "20.26.6" +requires_python = ">=3.7" +summary = "Virtual Python Environment builder" +groups = ["default", "bootstrap", "dev"] +dependencies = [ + "distlib<1,>=0.3.7", + "filelock<4,>=3.12.2", + "importlib-metadata>=6.6; python_version < \"3.8\"", + "platformdirs<5,>=3.9.1", +] +files = [ + {file = "virtualenv-20.26.6-py3-none-any.whl", hash = "sha256:7345cc5b25405607a624d8418154577459c3e0277f5466dd79c49d5e492995f2"}, + {file = "virtualenv-20.26.6.tar.gz", hash = "sha256:280aede09a2a5c317e409a00102e7077c6432c5a38f0ef938e643805a7ad2c48"}, +] + +[[package]] +name = "wheel" +version = "0.44.0" +requires_python = ">=3.8" +summary = "A built-package format for Python" +groups = ["default"] +files = [ + {file = "wheel-0.44.0-py3-none-any.whl", hash = "sha256:2376a90c98cc337d18623527a97c31797bd02bad0033d41547043a1cbfbe448f"}, + {file = "wheel-0.44.0.tar.gz", hash = "sha256:a29c3f2817e95ab89aa4660681ad547c0e9547f20e75b0562fe7723c9a2a9d49"}, +] diff --git a/pyproject.toml b/pyproject.toml new file mode 100644 index 0000000..7303c7e --- /dev/null +++ b/pyproject.toml @@ -0,0 +1,81 @@ +[project] +name = "venvstacks" +version = "0.1" +description = "Use layered Python virtual environment stacks to share large dependencies" +authors = [ + {name = "Alyssa Coghlan", email = "ncoghlan@gmail.com"}, +] +dependencies = [ + # Environment package installation is run externally (from the build tools environment) + "pip>=24.1.1", + # PDM is used to install the base runtime environments for deployment + "pdm>=2.17.3", + # pip-sync needs to be run externally otherwise it runs into problems + # when the potentially non-portable executables get cleaned up + "pip-tools>=7.4.1", + # `uv pip compile` is used rather than `pip-compile` as it is faster and + # doesn't have to be installed into the target Python runtime environment. + # Due to https://github.com/astral-sh/uv/issues/2500 `uv` can't be used to + # replace `pip install` or `pip-sync` yet. + # Due to https://github.com/astral-sh/uv/issues/2831 we're also not trying to + # replace `python -Im venv` with `uv venv` at this point since we want explicit + # control over whether files are symlinked or copied between the environments + "uv>=0.2.33", + # Typer is used for the CLI interface. Install 'rich-cli' extra for enhanced features. + "typer-slim>=0.12.4", +] +requires-python = ">=3.11" +readme = "README.md" +license = {text = "MIT"} + +[project.optional-dependencies] +rich-cli = [ + # Enable typer's enhanced functionality for local interactive use + "typer>=0.12.4", +] + +[build-system] +requires = ["pdm-backend"] +build-backend = "pdm.backend" + +[tool.pdm] +distribution = true + +[tool.pdm.dev-dependencies] +dev = [ + "tox>=4.16.0", + "tox-gh>=1.3.2", + "tox-pdm>=0.7.2", + "pytest>=8.3.1", + "ruff>=0.5.4", + "mypy>=1.11.0", + "pytest-subtests>=0.13.1", + # Use exact pin for dev as runtime environments are sensitive to the exact pbs version + "pbs-installer==2024.10.10", + # Uses exact pin for dev as lock file regeneration is sensitive to the exact uv version + "uv==0.4.21", +] +git = [ + # Used by `misc/add_lock_metadata.py` to generate missing lock metadata files + "dulwich>=0.22.1", +] +bootstrap = [ + "pdm>=2.16.1", +] + +[tool.pytest.ini_options] +# Allow skipping slow tests for local testing +addopts = "--strict-markers" +markers = [ + "slow: marks tests as slow (deselect with '-m \"not slow\"')", + "expected_output: tests to run when regenerating expected output", +] +# Make long diffs visible in pytest 8.3.3 and later +verbosity_assertions = 2 +# Ensure test suite doesn't consume too much space in /tmp, while still allowing debugging +tmp_path_retention_policy = "failed" +tmp_path_retention_count = 1 + +[tool.ruff] +# Assume Python 3.11 +target-version = "py311" diff --git a/src/venvstacks/__init__.py b/src/venvstacks/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/src/venvstacks/__main__.py b/src/venvstacks/__main__.py new file mode 100644 index 0000000..0863d3a --- /dev/null +++ b/src/venvstacks/__main__.py @@ -0,0 +1,7 @@ +"""Allow execution of the package as a script""" + +from .cli import main + +# Handle multiprocessing potentially re-running this module with a name other than `__main__` +if __name__ == "__main__": + main() diff --git a/src/venvstacks/_util.py b/src/venvstacks/_util.py new file mode 100644 index 0000000..0f3ed15 --- /dev/null +++ b/src/venvstacks/_util.py @@ -0,0 +1,103 @@ +"""Common utilities for stack creation and venv publication""" + +import os +import os.path +import subprocess +import sys +import tarfile + +from contextlib import contextmanager +from pathlib import Path +from typing import Any, Generator + +WINDOWS_BUILD = hasattr(os, "add_dll_directory") + +StrPath = str | os.PathLike[str] + + +def as_normalized_path(path: StrPath, /) -> Path: + """Normalize given path and make it absolute, *without* resolving symlinks + + Expands user directory references, but *not* environment variable references. + """ + # Ensure user directory references are handled as absolute paths + expanded_path = os.path.expanduser(path) + return Path(os.path.abspath(expanded_path)) + + +@contextmanager +def default_tarfile_filter(filter: str) -> Generator[None, None, None]: + """Temporarily set a global tarfile filter (useful for 3rd party API warnings)""" + if sys.version_info < (3, 12): + # Python 3.11 or earlier, can't set a default extraction filter + yield + return + # Python 3.12 or later, set a scoped default tarfile filter + if not filter.endswith("_filter"): + # Allow users to omit the `_filter` suffix + filter = f"{filter}_filter" + default_filter = getattr(tarfile, filter) + old_filter = tarfile.TarFile.extraction_filter + try: + tarfile.TarFile.extraction_filter = staticmethod(default_filter) + yield + finally: + tarfile.TarFile.extraction_filter = old_filter + + +def get_env_python(env_path: Path) -> Path: + """Return the main Python binary in the given Python environment""" + if WINDOWS_BUILD: + env_python = env_path / "Scripts" / "python.exe" + if not env_python.exists(): + # python-build-standalone puts the Windows Python CLI + # at the base of the runtime folder + env_python = env_path / "python.exe" + else: + env_python = env_path / "bin" / "python" + if env_python.exists(): + return env_python + raise FileNotFoundError(f"No Python runtime found in {env_path}") + + +_SUBPROCESS_PYTHON_CONFIG = { + # Ensure any Python invocations don't pick up unwanted sys.path entries + "PYTHONNOUSERSITE": "1", + "PYTHONSAFEPATH": "1", + "PYTHONPATH": "", + "PYTHONSTARTUP": "", + # Ensure UTF-8 mode is used + "PYTHONUTF8": "1", + "PYTHONLEGACYWINDOWSFSENCODING": "", + "PYTHONLEGACYWINDOWSSTDIO": "", + # There are other dev settings that may cause problems, but are also unlikely to be set + # See https://docs.python.org/3/using/cmdline.html#environment-variables + # These settings are here specifically to avoid the `pip-sync` issues noted + # in https://github.com/jazzband/pip-tools/issues/2117 +} + + +def run_python_command_unchecked( + # Narrow list/dict type specs here due to the way `subprocess.run` params are typed + command: list[str], + *, + env: dict[str, str] | None = None, + **kwds: Any, +) -> subprocess.CompletedProcess[str]: + if env is None: + env = os.environ.copy() + env.update(_SUBPROCESS_PYTHON_CONFIG) + result: subprocess.CompletedProcess[str] = subprocess.run( + command, env=env, text=True, **kwds + ) + return result + + +def run_python_command( + # Narrow list/dict type specs here due to the way `subprocess.run` params are typed + command: list[str], + **kwds: Any, +) -> subprocess.CompletedProcess[str]: + result = run_python_command_unchecked(command, **kwds) + result.check_returncode() + return result diff --git a/src/venvstacks/cli.py b/src/venvstacks/cli.py new file mode 100644 index 0000000..5499621 --- /dev/null +++ b/src/venvstacks/cli.py @@ -0,0 +1,546 @@ +"""Command line interface implementation""" + +import os.path + +from typing import Annotated + +import typer + +from .stacks import StackSpec, BuildEnvironment, _format_json, IndexConfig + +_cli = typer.Typer( + add_completion=False, + pretty_exceptions_show_locals=False, + no_args_is_help=True, +) + + +@_cli.callback(name="python -m venvstacks") +def handle_app_options() -> None: + """Lock, build, and publish Python virtual environment stacks.""" + # TODO: Handle app logging config via main command level options + # Part of https://github.com/lmstudio-ai/venvstacks/issues/5 + + +# The shared CLI argument and option annotations have to be module level globals, +# otherwise pylance complains about runtime variable use in type annotations +# +# Defined prefixes: +# +# * _CLI_ARG: required argument (no option name, arbitrary type accepting a string arg) +# * _CLI_OPT_FLAG: boolean option, must be True or False +# * _CLI_OPT_TRISTATE: boolean option, but allows None to indicate "not set" +# * _CLI_OPT_STR: optional string (defaulting to empty string to indicate "not set") +# * _CLI_OPT_STRLIST: multi-value list of strings +# +# The unit tests ensure the internal consistency of the CLI command annotations + +# Format skips below keep arg annotations from being collapsed onto fewer lines + +# Required arguments: where to find the stack spec +_CLI_ARG_spec_path = Annotated[ + str, + typer.Argument(help="Path to TOML file specifying layers to build") +] # fmt: skip + +# Optional directory arguments: where to put build and output artifacts +_CLI_OPT_STR_build_dir = Annotated[ + str, + typer.Option(help="Directory (relative to spec) for intermediate build artifacts"), +] # fmt: skip +_CLI_OPT_STR_output_dir = Annotated[ + str, + typer.Option(help="Directory (relative to spec) for output artifacts and metadata"), +] # fmt: skip + +# Build pipeline steps: cleaning, locking, building, and publishing archives +_CLI_OPT_FLAG_clean = Annotated[ + bool, + typer.Option(help="Remove existing environments before building") +] # fmt: skip +_CLI_OPT_FLAG_lock = Annotated[ + bool, + typer.Option(help="Update layer lock files before building") +] # fmt: skip +_CLI_OPT_FLAG_build = Annotated[ + bool, + typer.Option(help="Build layer environments") +] # fmt: skip +_CLI_OPT_FLAG_publish = Annotated[ + bool, + typer.Option(help="Create archives from built environments") +] # fmt: skip + +# Configuring the input artifact retrieval process +_CLI_OPT_FLAG_index = Annotated[ + bool, + typer.Option( + help="Query the default package index (PyPI) for installation artifacts" + ), +] # fmt: skip +_CLI_OPT_FLAG_allow_source = Annotated[ + bool, + typer.Option( + help="Allow implicit source builds (may affect archive reproducibility)" + ), +] # fmt: skip +_CLI_OPT_STRLIST_local_wheels = Annotated[ + list[str] | None, + typer.Option( + help="Additional directory (relative to spec) for locally built wheel archives" + ), +] # fmt: skip + +# Adjust naming of published archives and metadata files +_CLI_OPT_FLAG_tag_outputs = Annotated[ + bool, + typer.Option(help="Include platform and other details in published names") +] # fmt: skip + +# Archive publication options for the publish subcommand +_CLI_OPT_FLAG_force = Annotated[ + bool, + typer.Option(help="Publish archive even if input metadata is unchanged") +] # fmt: skip +_CLI_OPT_FLAG_dry_run = Annotated[ + bool, + typer.Option(help="List archives that would be published") +] # fmt: skip + +# Selective processing of defined layers +_CLI_OPT_STRLIST_include = Annotated[ + list[str] | None, + typer.Option( + help="Include specified layer in requested operations.\n" + "Option may be supplied multiple times to match multiple layer names.\n" + "Also accepts Python 'fnmatch' syntax to match multiple layer names.\n" + "If this option is omitted, all defined layers are included." + ), +] +_CLI_OPT_FLAG_allow_missing = Annotated[ + bool, + typer.Option(help="Allow '--include' entries that do not match any layers") +] # fmt: skip + +# Handling layers that included layers depend on +_CLI_OPT_FLAG_lock_dependencies = Annotated[ + bool, + typer.Option(help="Also lock dependencies of included layers") +] # fmt: skip +_CLI_OPT_FLAG_build_dependencies = Annotated[ + bool, + typer.Option(help="Also build dependencies of included layers") +] # fmt: skip +_CLI_OPT_FLAG_publish_dependencies = Annotated[ + bool, + typer.Option(help="Also publish dependencies of included layers") +] # fmt: skip +_CLI_OPT_TRISTATE_include_dependencies = Annotated[ + bool | None, + typer.Option(help="All operations include dependencies of included layers"), +] # fmt: skip + +# Handling layers that depend on included layers +# Note: when locking, layers that depend on included layers are *always* relocked +_CLI_OPT_FLAG_build_derived = Annotated[ + bool, + typer.Option(help="Also build environments that depend on included layers") +] # fmt: skip +_CLI_OPT_FLAG_publish_derived = Annotated[ + bool, + typer.Option(help="Also publish archives that depend on included layers") +] # fmt: skip + + +def _define_build_environment( + spec_path: str, + build_path: str, + *, + index: bool, + allow_source: bool, + local_wheels: list[str] | None, +) -> BuildEnvironment: + """Load given stack specification and define a build environment""" + stack_spec = StackSpec.load(spec_path) + index_config = IndexConfig( + query_default_index=index, + allow_source_builds=allow_source, + local_wheel_dirs=local_wheels, + ) + return stack_spec.define_build_environment(build_path, index_config) + + +def _handle_layer_include_options( + build_env: BuildEnvironment, + include: list[str], + allow_missing: bool, + lock: bool, + build: bool, + publish: bool, + lock_dependencies: bool, + build_dependencies: bool, + publish_dependencies: bool, + build_derived: bool, + publish_derived: bool, +) -> None: + unmatched_patterns = build_env.get_unmatched_patterns(include) + if unmatched_patterns: + err_details = f"No matching layers found for: {unmatched_patterns!r}" + if allow_missing: + print(f"WARNING: {err_details}") + else: + warning_hint = "Pass '--allow-missing' to convert to warning" + print(f"ERROR: {err_details}\n {warning_hint}") + raise typer.Exit(code=1) + build_env.select_layers( + include, + lock=lock, + build=build, + publish=publish, + lock_dependencies=lock_dependencies, + build_dependencies=build_dependencies, + publish_dependencies=publish_dependencies, + build_derived=build_derived, + publish_derived=publish_derived, + ) + + +def _publication_dry_run( + build_env: BuildEnvironment, + output_dir: str, + tag_outputs: bool, +) -> None: + base_output_path, dry_run_result = build_env.publish_artifacts( + output_dir, + dry_run=True, + tag_outputs=tag_outputs, + ) + print("Archive creation skipped, reporting publishing request details:") + print(_format_json(dry_run_result)) + print(f"Archives and metadata will be published to {base_output_path}") + print("Archive creation skipped (specify --publish to request archive build)") + + +def _publish_artifacts( + build_env: BuildEnvironment, + output_dir: str, + force: bool, + dry_run: bool, + tag_outputs: bool, +) -> None: + if dry_run: + _publication_dry_run(build_env, output_dir, tag_outputs=tag_outputs) + return + manifest_path, snippet_paths, archive_paths = build_env.publish_artifacts( + output_dir, + force=force, + tag_outputs=tag_outputs, + ) + base_output_path = os.path.commonpath( + [manifest_path, *snippet_paths, *archive_paths] + ) + print(manifest_path.read_text(encoding="utf-8")) + print( + f"Full stack manifest saved to: {manifest_path.relative_to(base_output_path)}" + ) + print("Individual layer manifests:") + for snippet_path in snippet_paths: + print(f" {snippet_path.relative_to(base_output_path)}") + print("Generated artifacts:") + for archive_path in archive_paths: + print(f" {archive_path.relative_to(base_output_path)}") + print(f"All paths reported relative to {base_output_path}") + + +def _export_dry_run( + build_env: BuildEnvironment, + output_dir: str, +) -> None: + base_output_path, dry_run_result = build_env.export_environments( + output_dir, + dry_run=True, + ) + print("Environment export skipped, reporting local export request details:") + print(_format_json(dry_run_result)) + print(f"Environments will be exported to {base_output_path}") + print("Environment export skipped (specify --publish to request local export)") + + +def _export_environments( + build_env: BuildEnvironment, + output_dir: str, + force: bool, + dry_run: bool, +) -> None: + if dry_run: + _export_dry_run(build_env, output_dir) + return + manifest_path, snippet_paths, env_paths = build_env.export_environments( + output_dir, + force=force, + ) + base_output_path = os.path.commonpath([manifest_path, *snippet_paths, *env_paths]) + print(manifest_path.read_text(encoding="utf-8")) + print( + f"Full export manifest saved to: {manifest_path.relative_to(base_output_path)}" + ) + print("Individual manifests:") + for snippet_path in snippet_paths: + print(f" {snippet_path.relative_to(base_output_path)}") + print("Exported environments:") + for env_path in env_paths: + print(f" {env_path.relative_to(base_output_path)}") + print(f"All paths reported relative to {base_output_path}") + + +@_cli.command() +def build( + # Required arguments: where to find the stack spec + spec_path: _CLI_ARG_spec_path, + # Optional directory arguments: where to put build and output artifacts + build_dir: _CLI_OPT_STR_build_dir = "_build", + output_dir: _CLI_OPT_STR_output_dir = "_artifacts", + # Pipeline steps: cleaning, locking, building, and publishing archives + clean: _CLI_OPT_FLAG_clean = False, + lock: _CLI_OPT_FLAG_lock = False, + build: _CLI_OPT_FLAG_build = True, + publish: _CLI_OPT_FLAG_publish = False, + # Package index access configuration + index: _CLI_OPT_FLAG_index = True, + allow_source: _CLI_OPT_FLAG_allow_source = False, + local_wheels: _CLI_OPT_STRLIST_local_wheels = None, + # Adjust naming of published archives and metadata files + tag_outputs: _CLI_OPT_FLAG_tag_outputs = False, + # Selective processing of defined layers + include: _CLI_OPT_STRLIST_include = None, + allow_missing: _CLI_OPT_FLAG_allow_missing = False, + # Handling layers that included layers depend on + lock_dependencies: _CLI_OPT_FLAG_lock_dependencies = False, + build_dependencies: _CLI_OPT_FLAG_build_dependencies = False, + publish_dependencies: _CLI_OPT_FLAG_publish_dependencies = False, + include_dependencies: _CLI_OPT_TRISTATE_include_dependencies = None, + # Handling layers that depend on included layers + # Note: when locking, layers that depend on included layers are *always* relocked + build_derived: _CLI_OPT_FLAG_build_derived = False, + publish_derived: _CLI_OPT_FLAG_publish_derived = False, +) -> None: + """Build (/lock/publish) Python virtual environment stacks.""" + if include_dependencies is not None: + # Override the per-operation settings + lock_dependencies = build_dependencies = publish_dependencies = ( + include_dependencies + ) + build_env = _define_build_environment( + spec_path, + build_dir, + index=index, + allow_source=allow_source, + local_wheels=local_wheels, + ) + # Update the various `want_*` flags on each environment + # Note: CLI `publish` controls the `dry_run` flag on the `publish_artifacts` method call + if include: + _handle_layer_include_options( + build_env, + include, + allow_missing=allow_missing, + lock=lock, + build=build, + publish=True, + lock_dependencies=lock_dependencies, + build_dependencies=build_dependencies, + publish_dependencies=publish_dependencies, + build_derived=build_derived, + publish_derived=publish_derived, + ) + else: + build_env.select_operations( + lock=lock, + build=build, + publish=True, + ) + build_env.create_environments(clean=clean, lock=lock) + _publish_artifacts( + build_env, output_dir, dry_run=not publish, force=clean, tag_outputs=tag_outputs + ) + + +@_cli.command() +def lock( + # Required arguments: where to find the stack spec, and where to emit any output files + spec_path: _CLI_ARG_spec_path, + # Optional directory arguments: where to put build artifacts (must create envs to lock them) + build_dir: _CLI_OPT_STR_build_dir = "_build", + # Pipeline steps: cleaning is the only optional step for this subcommand + clean: _CLI_OPT_FLAG_clean = False, + # Package index access configuration + index: _CLI_OPT_FLAG_index = True, + allow_source: _CLI_OPT_FLAG_allow_source = False, + local_wheels: _CLI_OPT_STRLIST_local_wheels = None, + # Selective processing of defined layers + include: _CLI_OPT_STRLIST_include = None, + allow_missing: _CLI_OPT_FLAG_allow_missing = False, + # Whether to lock the layers that the included layers depend on + lock_dependencies: _CLI_OPT_FLAG_lock_dependencies = False, + # When locking, layers that depend on included layers are *always* relocked +) -> None: + """Lock layer requirements for Python virtual environment stacks.""" + build_env = _define_build_environment( + spec_path, + build_dir, + index=index, + allow_source=allow_source, + local_wheels=local_wheels, + ) + # Update the various `want_*` flags on each environment + if include: + _handle_layer_include_options( + build_env, + include, + allow_missing=allow_missing, + lock=True, + build=False, + publish=False, + lock_dependencies=lock_dependencies, + build_dependencies=False, + publish_dependencies=False, + build_derived=False, + publish_derived=False, + ) + else: + build_env.select_operations( + lock=True, + build=False, + publish=False, + ) + lock_results = build_env.lock_environments(clean=clean) + if not lock_results: + print("No environments found to lock") + else: + base_output_path = os.path.commonpath( + [env.requirements_path for env in lock_results] + ) + print("Locked environments:") + for env_lock in lock_results: + relative_path = env_lock.requirements_path.relative_to(base_output_path) + print(f" {relative_path} (locked at: {env_lock.locked_at})") + print(f"All paths reported relative to {base_output_path}") + + +@_cli.command() +def publish( + # Required arguments: where to find the stack spec, and where to emit any output files + spec_path: _CLI_ARG_spec_path, + # Optional directory arguments: where to find build artifacts and put output artifacts + build_dir: _CLI_OPT_STR_build_dir = "_build", + output_dir: _CLI_OPT_STR_output_dir = "_artifacts", + # Optional behaviour + force: _CLI_OPT_FLAG_force = False, + dry_run: _CLI_OPT_FLAG_dry_run = False, + # Adjust naming of published archives and metadata files + tag_outputs: _CLI_OPT_FLAG_tag_outputs = False, + # Selective processing of defined layers + include: _CLI_OPT_STRLIST_include = None, + allow_missing: _CLI_OPT_FLAG_allow_missing = False, + # Handling layers that included layers depend on + publish_dependencies: _CLI_OPT_FLAG_publish_dependencies = False, + # Handling layers that depend on included layers + publish_derived: _CLI_OPT_FLAG_publish_derived = False, +) -> None: + """Publish layer archives for Python virtual environment stacks.""" + build_env = _define_build_environment( + spec_path, + build_dir, + # No locking or build steps will be invoked on the environment + index=False, + allow_source=False, + local_wheels=None, + ) + # Update the various `want_*` flags on each environment + # Note: CLI `publish` controls the `dry_run` flag on the `publish_artifacts` method call + if include: + _handle_layer_include_options( + build_env, + include, + allow_missing=allow_missing, + lock=False, + build=False, + publish=True, + lock_dependencies=False, + build_dependencies=False, + publish_dependencies=publish_dependencies, + build_derived=False, + publish_derived=publish_derived, + ) + else: + build_env.select_operations( + lock=False, + build=False, + publish=True, + ) + _publish_artifacts( + build_env, output_dir, force=force, dry_run=dry_run, tag_outputs=tag_outputs + ) + + +@_cli.command() +def local_export( + # Required arguments: where to find the stack spec, and where to emit any output files + spec_path: _CLI_ARG_spec_path, + # Optional directory arguments: where to find build artifacts and put output artifacts + build_dir: _CLI_OPT_STR_build_dir = "_build", + output_dir: _CLI_OPT_STR_output_dir = "_export", + # Optional behaviour + force: _CLI_OPT_FLAG_force = False, + dry_run: _CLI_OPT_FLAG_dry_run = False, + # Selective processing of defined layers + include: _CLI_OPT_STRLIST_include = None, + allow_missing: _CLI_OPT_FLAG_allow_missing = False, + # Handling layers that included layers depend on + publish_dependencies: _CLI_OPT_FLAG_publish_dependencies = False, + # Handling layers that depend on included layers + publish_derived: _CLI_OPT_FLAG_publish_derived = False, +) -> None: + """Export layer environments for Python virtual environment stacks.""" + build_env = _define_build_environment( + spec_path, + build_dir, + # No locking or build steps will be invoked on the environment + index=False, + allow_source=False, + local_wheels=None, + ) + # Update the various `want_*` flags on each environment + # Note: CLI `publish` controls the `dry_run` flag on the `publish_artifacts` method call + if include: + _handle_layer_include_options( + build_env, + include, + allow_missing=allow_missing, + lock=False, + build=False, + publish=True, + lock_dependencies=False, + build_dependencies=False, + publish_dependencies=publish_dependencies, + build_derived=False, + publish_derived=publish_derived, + ) + else: + build_env.select_operations( + lock=False, + build=False, + publish=True, + ) + _export_environments( + build_env, + output_dir, + force=force, + dry_run=dry_run, + ) + + +def main(args: list[str] | None = None) -> None: + # Indirectly calls the relevant click.Command variant's `main` method + # See https://click.palletsprojects.com/en/8.1.x/api/#click.BaseCommand.main + _cli(args) diff --git a/src/venvstacks/pack_venv.py b/src/venvstacks/pack_venv.py new file mode 100755 index 0000000..c09d28d --- /dev/null +++ b/src/venvstacks/pack_venv.py @@ -0,0 +1,601 @@ +#!/bin/python3 +"""Utility library to convert Python virtual environments to portable archives""" + +# This is conceptually inspired by conda-pack (but structured somewhat differently). +# venv-pack and venv-pack2 were considered, and may still be an option in the future, +# but for now, something narrowly focused on the needs of the venvstacks project +# is the preferred option. +# +# This is primarily about reducing the number of potential sources of bugs - while +# conda-pack appears to be reasonably well used, venv-pack/venv-pack2 are much less +# popular, as there's a competing approach for regular virtual environments in +# https://github.com/cloudify-cosmo/wagon (where an archive of pre-built wheels is +# shipped to target systems, and then `venv` and `pip` in `--no-index` mode are run +# directly on the target to create the deployed virtual environments). + +# Requirements: +# +# * must work on Linux, Windows, macOS +# * internal symlinks are permitted but not required (e.g. on Windows) +# * relative external symlinks to adjacent folders are similarly permitted +# * external symlinks beyond that boundary are converted to hard links +# * zip archives are used on Windows, tar.xz archives on other platforms + +# Allowances/limitations: +# +# * archives for a given target platform are built on the same platform +# * all entry point scripts are removed, as Python is explicitly invoked on target systems +# * environment activation scripts are dropped from the archives rather than fixed on target +# * all RECORD files are removed, as they may reference files with build dependent hashes +# (specifically, scripts that have their shebang lines rewritten at install time) +# * all __pycache__ folders are omitted (as their contents incorporate absolute paths) +# +# Note: stacks.py covers dropping the activation scripts and files with shebang lines +# + +import os +import shutil +import sys +import tempfile +import time + +from datetime import datetime, timedelta, timezone, tzinfo +from pathlib import Path +from typing import cast, Any, Callable, TextIO + +from ._util import as_normalized_path, StrPath, WINDOWS_BUILD as _WINDOWS_BUILD + +_PRECOMPILATION_COMMANDS = """\ +# Precompile Python library modules +from compileall import compile_dir +venv_pylib_path = venv_path / "lib" # "Lib" on Windows, but Windows is not case sensitive +compile_dir(venv_pylib_path, optimize=0, quiet=True) +""" + +_BASE_RUNTIME_POST_INSTALL_SCRIPT = ( + '''\ +"""Base runtime post-installation script + +* Precompiles all Python files in the library folder + +This post-installation script is automatically injected when packing environments that +do NOT include a `pyvenv.cfg` file (i.e. base runtime environments) +""" +from pathlib import Path +venv_path = Path(__file__).parent + +''' + + _PRECOMPILATION_COMMANDS +) + +_LAYERED_ENV_POST_INSTALL_SCRIPT = ( + '''\ +"""Layered environment post-installation script + +* Generates pyvenv.cfg based on the Python runtime executing this script +* Precompiles all Python files in the library folder + +This post-installation script is automatically injected when packing environments that +would otherwise include a `pyvenv.cfg` file (as `pyvenv.cfg` files are not relocatable) +""" +from pathlib import Path +venv_path = Path(__file__).parent + +# Generate `pyvenv.cfg` based on the deployed runtime location +import sys +venv_config_path = venv_path / "pyvenv.cfg" +runtime_executable_path = Path(sys.executable).resolve() +runtime_version = ".".join(map(str, sys.version_info[:3])) +venv_config = f"""\ +home = {runtime_executable_path.parent} +include-system-site-packages = false +version = {runtime_version} +executable = {runtime_executable_path} +""" +venv_config_path.write_text(venv_config, encoding="utf-8") + +''' + + _PRECOMPILATION_COMMANDS +) + +SymlinkInfo = tuple[Path, Path] + + +def convert_symlinks( + env_dir: StrPath, + containing_dir: StrPath | None = None, +) -> tuple[list[SymlinkInfo], list[SymlinkInfo]]: + """Make env portable by making internal symlinks relative and external links hard + + If set, containing path must be a parent directory of the environment path and is + used as the boundary for creating relative symlinks instead of hardlinks. If not set, + the environment path itself is used as the boundary for creating relative symlinks. + + Returns a 2-tuple containing lists of internal relative link conversions and + external hard link conversions. Each list contains source/target Path pairs. + """ + env_path = as_normalized_path(env_dir) + if containing_dir is None: + containing_path = env_path + else: + containing_path = as_normalized_path(containing_dir) + if not env_path.is_relative_to(containing_path): + raise ValueError( + f"{str(env_path)!r} is not within {str(containing_path)!r}" + ) + + relative_links = [] + external_links = [] + # Ensure internal symlinks are relative, collect external links for hardlink conversion. + # The external links are *not* eagerly converted, so only the final link in any internal + # symlink chains gets converted to a hard link. + for file_path in env_path.rglob("*"): + if not file_path.is_symlink(): + continue + target_path = file_path.readlink() + absolute_target_path = file_path.parent / target_path + if not absolute_target_path.is_relative_to(containing_path): + # Link target is outside the environment being packed, + # so replace it with a hard link to the actual underlying file + resolved_target_path = file_path.resolve() + external_links.append((file_path, resolved_target_path)) + continue + # Ensure symlinks within the containing path are relative + expected_path = Path( + os.path.relpath(str(absolute_target_path), start=str(file_path.parent)) + ) + if target_path == expected_path: + # Symlink is already relative as desired + continue + # Convert absolute symlink to relative symlink + file_path.unlink() + file_path.symlink_to(expected_path) + relative_links.append((file_path, file_path.readlink())) + # Convert any external symlinks to a hard link instead + for file_path, resolved_target_path in external_links: + file_path.unlink() + file_path.hardlink_to(resolved_target_path) + return relative_links, external_links + + +def get_archive_path(archive_base_name: StrPath) -> Path: + """Report the name of the archive that will be created for the given base name""" + extension = ".zip" if _WINDOWS_BUILD else ".tar.xz" + return Path(os.fspath(archive_base_name) + extension) + + +def _inject_postinstall_script( + env_path: Path, script_name: str = "postinstall.py" +) -> Path: + venv_config_path = env_path / "pyvenv.cfg" + if venv_config_path.exists(): + # The venv config contains absolute paths referencing the base runtime environment + # Remove it here, let the post-install script recreate it + venv_config_path.unlink() + script_contents = _LAYERED_ENV_POST_INSTALL_SCRIPT + else: + script_contents = _BASE_RUNTIME_POST_INSTALL_SCRIPT + script_path = env_path / script_name + script_path.write_text(script_contents, encoding="utf-8") + return script_path + + +def _supports_symlinks(target_path: Path) -> bool: + with tempfile.TemporaryDirectory(dir=target_path) as link_check_dir: + link_check_path = Path(link_check_dir) + link_path = link_check_path / "dest" + try: + os.symlink("src", link_path) + except OSError: + # Failed to create symlink under the target path + return False + # Successfully created a symlink under the target path + return True + + +def export_venv( + source_dir: StrPath, + target_dir: StrPath, + run_postinstall: Callable[[Path, Path], None] | None = None, +) -> Path: + """Export the given build environment, skipping archive creation and unpacking + + * injects a suitable `postinstall.py` script for the environment being exported + * excludes __pycache__ folders and package metadata RECORD files + * replaces symlinks with copies on Windows or if the target doesn't support symlinks + + If supplied, *run_postinstall* is called with the path to the environment's Python + interpreter and its postinstall script, allowing execution of the post-install + script by the calling application. The post-install script is NOT implicitly + executed by the export process. + + Returns the path to the exported environment. + """ + source_path = as_normalized_path(source_dir) + target_path = as_normalized_path(target_dir) + excluded = shutil.ignore_patterns("__pycache__", "RECORD") + # Avoid symlinks on Windows, as they need elevated privileges to create + # Also avoid them if the target folder doesn't support symlink creation + # (that way exports to FAT/FAT32/VFAT file systems should work, even if + # it means some files end up getting duplicated on the target) + # Otherwise, assume symlinks have already been converted with convert_symlinks + target_path.mkdir(parents=True, exist_ok=True) + publish_symlinks = not _WINDOWS_BUILD and _supports_symlinks(target_path) + shutil.copytree( + source_path, + target_path, + ignore=excluded, + symlinks=publish_symlinks, + dirs_exist_ok=True, + ) + postinstall_path = _inject_postinstall_script(target_path) + if run_postinstall is not None: + run_postinstall(target_path, postinstall_path) + return target_path + + +def create_archive( + source_dir: StrPath, + archive_base_name: StrPath, + *, + install_target: str | None = None, + clamp_mtime: datetime | None = None, + work_dir: StrPath | None = None, + show_progress: bool = True, +) -> Path: + """shutil.make_archive replacement, tailored for Python virtual environments + + * injects a suitable `postinstall.py` script for the environment being archived + * always creates zipfile archives on Windows and xztar archives elsewhere + * excludes __pycache__ folders and package metadata RECORD files + * replaces symlinks with copies on Windows and allows external symlinks elsewhere + * discards owner and group information for tar archives + * clamps mtime of archived files to the given clamp mtime at the latest + * shows progress reporting by default (archiving built ML/AI libs is *slooooow*) + + Set `work_dir` if /tmp is too small for archiving tasks + """ + archive_path = as_normalized_path(archive_base_name) + source_path = Path(source_dir) + if install_target is None: + install_target = source_path.name + with tempfile.TemporaryDirectory(dir=work_dir) as tmp_dir: + target_path = Path(tmp_dir) / install_target + env_path = export_venv(source_path, target_path) + if not show_progress: + + def report_progress(_: Any) -> None: + pass + else: + progress_bar = ProgressBar() + progress_bar.show(0.0) + num_archive_entries = 0 + total_entries_to_archive = sum(1 for __ in env_path.rglob("*")) + + def report_progress(_: Any) -> None: + nonlocal num_archive_entries + num_archive_entries += 1 + progress_bar.show(num_archive_entries / total_entries_to_archive) + + max_mtime: int | None = None + if clamp_mtime is not None: + # We force UTC here as all builds should be happening on a filesystem that uses + # UTC timestamps (i.e. no FAT/FAT32/VFAT allowed). + # That means NTFS on Windows and any vaguely modern POSIX filesystem elsewhere. + # To avoid filesystem time resolution quirks without relying on the resolution + # details of the various archive formats, truncate mtime to exact seconds + max_mtime = int(clamp_mtime.astimezone(timezone.utc).timestamp()) + archive_with_extension = _make_archive( + archive_path, env_path.parent, env_path.name, max_mtime, report_progress + ) + if show_progress: + # Ensure progress bar completion is reported, even if there's a discrepancy + # between the number of paths found by `rglob` and the number of archive entries + progress_bar.show(1.0) + # The name query and the archive creation should always report the same archive name + assert archive_with_extension == os.fspath(get_archive_path(archive_base_name)) + return Path(archive_with_extension) + + +# Would prefer to use shutil.make_archive, but the way it works doesn't quite fit this case +# _make_tar_archive below is adjusted to be similar to make_archive, but adapted from +# https://github.com/python/cpython/blob/99d945c0c006e3246ac00338e37c443c6e08fc5c/Lib/shutil.py#L930 +# to work around the limitations mentioned in https://github.com/python/cpython/issues/120036 +# Puts this utility module under the Python License, but the runtime layers already include +# CPython, so also using it in the build utility doesn't introduce any new licensing concerns +ProgressCallback = Callable[[str], None] + + +def _make_tar_archive( + base_name: StrPath, + root_dir: StrPath, + base_dir: StrPath, + max_mtime: float | None = None, + progress_callback: ProgressCallback | None = None, + *, + compress: str = "xz", +) -> str: + """Create a (possibly compressed) tar file from all the files under + 'base_dir'. + + 'compress' must be "gzip", "bzip2", "xz", or None. + + Owner and group info is always set to 0/"root" as per + https://reproducible-builds.org/docs/archives/. + + The output tar file will be named 'base_name' + ".tar", possibly plus + the appropriate compression extension (".gz", ".bz2", or ".xz"). + + Returns the output filename. + """ + import tarfile # lazy import since ideally shutil would handle everything + + # pylance complains if the tar_mode string is built dynamically + if compress is None or compress == "": + tar_mode = "w" + compress_ext = "" + elif compress == "gzip": + tar_mode = "w:gz" + compress_ext = ".gz" + elif compress == "bzip2": + tar_mode = "w:bz2" + compress_ext = ".gz" + elif compress == "xz": + tar_mode = "w:xz" + compress_ext = ".xz" + else: + raise ValueError( + "bad value for 'compress', or compression format not " + "supported : {0}".format(compress) + ) + + archive_name = os.fspath(base_name) + ".tar" + compress_ext + archive_dir = os.path.dirname(archive_name) + + if archive_dir and not os.path.exists(archive_dir): + os.makedirs(archive_dir) + + # Clamp mtime only if requested + # Force to an int to keep mypy happy: https://github.com/python/typeshed/issues/12520 + # Once a mypy update is published with that issue fixed, this workaround can + # be replaced by a minimum mypy version requirement in the dev dependencies. + if max_mtime is None: + _clamp_mtime = None + else: + truncated_max_mtime = int(max_mtime) + + def _clamp_mtime_impl(mtime: int | float) -> int: + # pyright has a newer typeshed than mypy, so resort to a cast + # on the input value until this entire workaround can be dropped + return min(truncated_max_mtime, cast(int, mtime)) + + # Work around for https://github.com/microsoft/pyright/issues/9114 + _clamp_mtime = _clamp_mtime_impl + + # Ensure archive entries are reproducible across repeated builds + def _process_archive_entry(tarinfo: tarfile.TarInfo) -> tarfile.TarInfo: + # Omit owner & group info from build system + tarinfo.uid = tarinfo.gid = 0 + tarinfo.uname = tarinfo.gname = "root" + if _clamp_mtime is not None: + tarinfo.mtime = _clamp_mtime(tarinfo.mtime) + # Report progress if requested + if progress_callback is not None: + progress_callback(tarinfo.name) + return tarinfo + + # creating the tarball + tar = tarfile.open(archive_name, tar_mode) + arcname = base_dir + if root_dir is not None: + base_dir = os.path.join(root_dir, base_dir) + try: + # In Python 3.7+, tar.add inherently adds entries in sorted order + tar.add(base_dir, arcname, filter=_process_archive_entry) + finally: + tar.close() + + if root_dir is not None: + archive_name = os.path.abspath(archive_name) + return archive_name + + +# _make_zipfile below is adjusted to be similar to make_archive, but adapted from +# https://github.com/python/cpython/blob/99d945c0c006e3246ac00338e37c443c6e08fc5c/Lib/shutil.py#L1000 + +if _WINDOWS_BUILD: + + def set_mtime(fspath: str, mtime: int | float) -> None: + # There's no `follow_symlinks` option available on Windows + os.utime(fspath, (mtime, mtime)) +else: + + def set_mtime(fspath: str, mtime: int | float) -> None: + os.utime(fspath, (mtime, mtime), follow_symlinks=False) + + +def _make_zipfile( + base_name: StrPath, + root_dir: StrPath, + base_dir: StrPath, + max_mtime: float | None = None, + progress_callback: ProgressCallback | None = None, +) -> str: + """Create a zip file from all the files under 'base_dir'. + + The output zip file will be named 'base_name' + ".zip". Returns the + name of the output zip file. + """ + import zipfile # lazy import since ideally shutil would handle everything + + zip_filename = os.fspath(base_name) + ".zip" + archive_dir = os.path.dirname(base_name) + + if archive_dir and not os.path.exists(archive_dir): + os.makedirs(archive_dir) + + # Unlike _make_tar_archive, progress is reported from multiple places, + # so define a dummy callback if no actual callback is given + if progress_callback is None: + + def _default_progress_callback(_: Any) -> None: + pass + + # Work around for https://github.com/microsoft/pyright/issues/9114 + progress_callback = _default_progress_callback + + # zipfile stores local timestamps: https://github.com/python/cpython/issues/123059 + # We don't want that, but zipfile doesn't currently provide a nice API to adjust the + # timestamps when adding files to the archive, so we instead intentionally make the + # filesystem timestamps *wrong* such that calling `time.localtime` reports a UTC time + need_mtime_adjustment = time.localtime().tm_gmtoff != 0 + if not need_mtime_adjustment: + # Local time is UTC anyway, so no timezone adjustment is needed + def adjust_mtime(mtime: float) -> float: + return mtime + else: + # Adjust filesystem mtime so `zipfile` sets the desired value in the archive entry + # casts are needed due to https://github.com/python/mypy/issues/10067 + local_tz = cast(tzinfo, datetime.now().astimezone().tzinfo) + local_tz_offset = cast(timedelta, local_tz.utcoffset(None)) + + def adjust_mtime(mtime: float) -> float: + # mtime is given here in UTC time. To get `zipfile` to see that time value + # when calling `time.localtime`, we need to do a local -> UTC conversion on the + # UTC timestamp, so `zipfile`'s UTC -> local conversion gives back the UTC time + local_mtime = datetime.fromtimestamp(mtime).astimezone() + adjusted_mtime = local_mtime - local_tz_offset + return adjusted_mtime.timestamp() + + with zipfile.ZipFile(zip_filename, "w", compression=zipfile.ZIP_DEFLATED) as zf: + if max_mtime is None: + # Clamp mtime only if requested + max_mtime = float("inf") + + def _add_zip_entry(fspath: str, arcname: str) -> None: + fs_mtime = os.lstat(fspath).st_mtime + zip_entry_mtime = adjust_mtime(min(fs_mtime, max_mtime)) + if zip_entry_mtime != fs_mtime: + set_mtime(fspath, zip_entry_mtime) + zf.write(fspath, arcname) + + arcname = os.path.normpath(base_dir) + if root_dir is not None: + base_dir = os.path.join(root_dir, base_dir) + base_dir = os.path.normpath(base_dir) + if arcname != os.curdir: + _add_zip_entry(base_dir, arcname) + for dirpath, dirnames, filenames in os.walk(base_dir): + arcdirpath = dirpath + if root_dir is not None: + arcdirpath = os.path.relpath(arcdirpath, root_dir) + arcdirpath = os.path.normpath(arcdirpath) + dirnames.sort() # Ensure recursion occurs in a consistent order + for name in dirnames: + path = os.path.join(dirpath, name) + arcname = os.path.join(arcdirpath, name) + _add_zip_entry(path, arcname) + progress_callback(name) + for name in sorted(filenames): + path = os.path.join(dirpath, name) + path = os.path.normpath(path) + if os.path.isfile(path): + arcname = os.path.join(arcdirpath, name) + _add_zip_entry(path, arcname) + progress_callback(name) + + if root_dir is not None: + zip_filename = os.path.abspath(zip_filename) + return zip_filename + + +if _WINDOWS_BUILD: + # No tar unpacking by default on windows, so use zipfile instead + _make_archive = _make_zipfile +else: + # Everywhere else, create XZ compressed tar archives + _make_archive = _make_tar_archive + +# Basic progress bar support, taken from my SO answer at +# https://stackoverflow.com/questions/3160699/python-progress-bar/78590319#78590319 +# (since the code originated with me, it isn't subject to Stack Overflow's CC-BY-SA terms) +# +# I originally skipped this, but archiving pytorch (and similarly large AI/ML libraries) +# takes a long time, so you really need some assurance that progress is being made. +# +# If compression times are a significant problem, it would be worth moving in the same +# direction as conda-pack did, and implementing support for parallel compression (the +# compression libraries all drop the GIL when compressing data chunks, so this approach +# scales effectively up to the number of available CPUs) +ProgressSummary = tuple[int, str] +ProgressReport = tuple[str, ProgressSummary] + + +class ProgressBar: + """Display & update a progress bar""" + + TEXT_ABORTING = "Aborting..." + TEXT_COMPLETE = "Complete!" + TEXT_PROGRESS = "Archiving" + + bar_length: int + stream: TextIO + _last_displayed_text: str | None + _last_displayed_summary: ProgressSummary | None + + def __init__(self, bar_length: int = 25, stream: TextIO = sys.stdout) -> None: + self.bar_length = bar_length + self.stream = stream + self._last_displayed_text = None + self._last_displayed_summary = None + + def reset(self) -> None: + """Forget any previously displayed text (affects subsequent call to show())""" + self._last_displayed_text = None + self._last_displayed_summary = None + + def _format_progress(self, progress: float, aborting: bool) -> ProgressReport: + """Internal helper that also reports the number of completed increments""" + bar_length = self.bar_length + progress = float(progress) + if progress >= 1: + # Report task completion + completed_increments = bar_length + status = " " + self.TEXT_COMPLETE + progress = 1.0 + else: + # Truncate progress to ensure bar only fills when complete + completed_increments = int(progress * bar_length) + status = (" " + self.TEXT_ABORTING) if aborting else "" + remaining_increments = bar_length - completed_increments + bar_content = f"{'#'*completed_increments}{'-'*remaining_increments}" + percentage = f"{progress*100:.2f}" + progress_text = f"{self.TEXT_PROGRESS}: [{bar_content}] {percentage}%{status}" + return progress_text, (completed_increments, status) + + def format_progress(self, progress: float, *, aborting: bool = False) -> str: + """Format progress bar, percentage, and status for given fractional progress""" + return self._format_progress(progress, aborting)[0] + + def show(self, progress: float, *, aborting: bool = False) -> None: + """Display the current progress on the console""" + progress_text, progress_summary = self._format_progress(progress, aborting) + if progress_text == self._last_displayed_text: + # No change to display output, so skip writing anything + # (this reduces overhead on both interactive and non-interactive streams) + return + interactive = self.stream.isatty() + if not interactive and progress_summary == self._last_displayed_summary: + # For non-interactive streams, skip output if only the percentage has changed + # (this avoids flooding the output on non-interactive streams that ignore '\r') + return + if not interactive or aborting or progress >= 1: + # Final output or non-interactive output, so advance to next line + line_end = "\n" + else: + # Interactive progress output, so try to return to start of current line + line_end = "\r" + sys.stdout.write(progress_text + line_end) + sys.stdout.flush() + self._last_displayed_text = progress_text + self._last_displayed_summary = progress_summary diff --git a/src/venvstacks/py.typed b/src/venvstacks/py.typed new file mode 100644 index 0000000..e69de29 diff --git a/src/venvstacks/stacks.py b/src/venvstacks/stacks.py new file mode 100755 index 0000000..97bae24 --- /dev/null +++ b/src/venvstacks/stacks.py @@ -0,0 +1,2451 @@ +#!/bin/python3 +"""Portable Python environment stacks + +Creates Python runtime, framework, and app environments based on venvstacks.toml +""" + +import hashlib +import json +import os +import shutil +import subprocess +import sys +import sysconfig +import tempfile +import tomllib +import warnings + +from abc import ABC, abstractmethod +from dataclasses import dataclass, field, InitVar +from datetime import datetime, timezone +from enum import StrEnum +from fnmatch import fnmatch +from itertools import chain +from pathlib import Path +from typing import ( + cast, + overload, + Any, + ClassVar, + Iterable, + Literal, + Mapping, + MutableMapping, + NamedTuple, + NewType, + NotRequired, + Self, + Sequence, + TypeVar, + TypedDict, +) + +from . import pack_venv +from ._util import ( + as_normalized_path, + default_tarfile_filter, + get_env_python, + run_python_command, + run_python_command_unchecked, + StrPath, + WINDOWS_BUILD as _WINDOWS_BUILD, +) + + +class EnvStackError(ValueError): + """Common base class for all errors specific to managing environment stacks""" + + +class LayerSpecError(EnvStackError): + """Raised when an internal inconsistency is found in a layer specification""" + + +class BuildEnvError(EnvStackError): + """Raised when a build environment doesn't comply with process restrictions""" + + +###################################################### +# Filesystem and git helpers +###################################################### + +try: + fs_sync = os.sync # type: ignore[attr-defined,unused-ignore] +except AttributeError: + # No os.sync on Windows + def fs_sync() -> None: + pass + + +def get_path_mtime(fspath: StrPath) -> datetime | None: + path = Path(fspath) + if not path.exists(): + return None + return datetime.fromtimestamp(path.lstat().st_mtime).astimezone() + + +def format_as_utc(dt: datetime) -> str: + return dt.astimezone(timezone.utc).isoformat() + + +def _format_json(data: Any) -> str: + return json.dumps(data, indent=2, sort_keys=True) + + +def _write_json(target_path: Path, data: Any) -> None: + formatted_data = _format_json(data) + with target_path.open("w", encoding="utf-8", newline="\n") as f: + f.write(formatted_data + "\n") + + +def _resolve_lexical_path(path: StrPath, base_path: Path, /) -> Path: + # Ensure `~/...` paths are treated as absolute + resolved_path = Path(path).expanduser() + if not resolved_path.is_absolute(): + # Resolve paths relative to the given base path + resolved_path = base_path / resolved_path + # Avoid resolving symlinks (so they're respected when calculating relative paths) + # but remove any `/../` segments + return as_normalized_path(resolved_path) + + +###################################################### +# Specifying package index access settings +###################################################### + + +@dataclass +class IndexConfig: + """Python package index access configuration""" + + query_default_index: bool = field(default=True) + allow_source_builds: bool = field(default=False) + local_wheel_dirs: InitVar[Sequence[StrPath] | None] = None + local_wheel_paths: list[Path] = field(init=False) + + def __post_init__(self, local_wheel_dirs: Sequence[StrPath] | None) -> None: + if isinstance(local_wheel_dirs, (str, Path)): + err_msg = f"local_wheel_dirs must be a sequence of paths (got {local_wheel_dirs!r})" + raise TypeError(err_msg) + if local_wheel_dirs: + self.local_wheel_paths = [Path(wheel_dir) for wheel_dir in local_wheel_dirs] + else: + self.local_wheel_paths = [] + + @classmethod + def disabled(cls) -> Self: + return cls( + query_default_index=False, + allow_source_builds=False, + local_wheel_dirs=None, + ) + + def resolve_lexical_paths(self, base_path: StrPath) -> None: + """Lexically resolve paths in config relative to the given base path""" + base_path = Path(base_path) + self.local_wheel_paths[:] = [ + _resolve_lexical_path(path, base_path) for path in self.local_wheel_paths + ] + + @staticmethod + def _get_require_binary_args() -> list[str]: + return ["--only-binary", ":all:"] + + def _get_common_pip_args(self, require_binary: bool) -> list[str]: + result = [] + if not self.query_default_index: + result.append("--no-index") + if require_binary: + result.extend(self._get_require_binary_args()) + for local_wheel_path in self.local_wheel_paths: + result.extend(("--find-links", os.fspath(local_wheel_path))) + return result + + def _get_uv_pip_compile_args(self) -> list[str]: + require_binary = not self.allow_source_builds + return self._get_common_pip_args(require_binary) + + def _get_pip_install_args(self, require_binary: bool | None) -> list[str]: + if require_binary is None: + require_binary = not self.allow_source_builds + return self._get_common_pip_args(require_binary) + + def _get_pip_sync_args(self) -> list[str]: + # Local cache should always have been populated by the time sync runs + # pip-sync wraps pip, so only some args are accepted at top level + sync_args = self._get_common_pip_args(require_binary=False) + pip_args = " ".join(self._get_require_binary_args()) + if pip_args: + sync_args.extend(("--pip-args", pip_args)) + return sync_args + + +###################################################### +# Specifying layered environments +###################################################### + +# Define dedicated nominal types to help reduce risk of confusing +# layer base names (no prefix), layer build environment names +# (potentially with a kind prefix), and deployed layer environment +# names (potentially with a version suffix) +LayerBaseName = NewType("LayerBaseName", str) +EnvNameBuild = NewType("EnvNameBuild", str) +EnvNameDeploy = NewType("EnvNameDeploy", str) + + +class EnvironmentLockMetadata(TypedDict): + # fmt: off + locked_at: str # ISO formatted date/time value + requirements_hash: str # Uses "algorithm:hexdigest" format + lock_version: int # Auto-incremented from previous lock metadata + # fmt: on + + +_T = TypeVar("_T") + + +@dataclass +class EnvironmentLock: + requirements_path: Path + versioned: bool + lock_metadata_path: Path = field(init=False, repr=False) + _requirements_hash: str | None = field(init=False, repr=False) + _last_locked: datetime | None = field(init=False, repr=False) + _lock_version: int | None = field(init=False, repr=False) + + def __post_init__(self) -> None: + self.lock_metadata_path = Path(f"{self.requirements_path}.json") + self._requirements_hash = requirements_hash = self._hash_requirements() + self._last_locked = self._get_last_locked_time(requirements_hash) + self._lock_version = self._get_last_locked_version(requirements_hash) + + def get_deployed_name(self, env_name: EnvNameBuild) -> EnvNameDeploy: + if self.versioned: + return EnvNameDeploy(f"{env_name}@{self.lock_version}") + return EnvNameDeploy(env_name) + + @staticmethod + def _raise_if_none(value: _T | None) -> _T: + if value is None: + raise BuildEnvError("Environment has not been locked") + return value + + @property + def requirements_hash(self) -> str: + return self._raise_if_none(self._requirements_hash) + + @property + def last_locked(self) -> datetime: + return self._raise_if_none(self._last_locked) + + @property + def lock_version(self) -> int: + if not self.versioned: + # Unversioned specs are always considered version 1 + return 1 + return self._raise_if_none(self._lock_version) + + @property + def is_locked(self) -> bool: + return self._last_locked is not None + + @property + def locked_at(self) -> str: + return format_as_utc(self.last_locked) + + def _hash_requirements(self) -> str | None: + requirements_path = self.requirements_path + if not requirements_path.exists(): + return None + return _hash_file(self.requirements_path) + + def _load_saved_metadata(self) -> EnvironmentLockMetadata | None: + """Loads last locked metadata from disk (if it exists)""" + lock_metadata_path = self.lock_metadata_path + if not lock_metadata_path.exists(): + return None + with lock_metadata_path.open("r", encoding="utf-8") as f: + # mypy is right to complain that the JSON hasn't been validated to conform to + # the EnvironmentLockMetadata interface, but we're OK with letting the runtime + # errors happen in that scenario. Longer term, explicit JSON schemas should be + # defined and used for validation when reading the metadata files. + return cast(EnvironmentLockMetadata, json.load(f)) + + def load_valid_metadata( + self, requirements_hash: str + ) -> EnvironmentLockMetadata | None: + """Loads last locked metadata only if the requirements hash matches""" + lock_metadata = self._load_saved_metadata() + if lock_metadata and requirements_hash == lock_metadata["requirements_hash"]: + return lock_metadata + return None + + def _get_last_locked_metadata(self, requirements_hash: str) -> datetime | None: + lock_metadata = self.load_valid_metadata(requirements_hash) + if lock_metadata is not None: + return datetime.fromisoformat(lock_metadata["locked_at"]) + return None + + def _get_path_mtime(self) -> datetime | None: + return get_path_mtime(self.requirements_path) + + def _get_last_locked_time(self, requirements_hash: str | None) -> datetime | None: + # Prefer the lock timestamp from an adjacent (still valid) lock metadata file + if requirements_hash is not None: + last_locked = self._get_last_locked_metadata(requirements_hash) + if last_locked is not None: + return last_locked + # Otherwise assume the local file mtime reflects when the environment was locked + # This will return None if the lock file doesn't exist yet + return self._get_path_mtime() + + def _get_last_locked_version(self, requirements_hash: str | None) -> int | None: + if requirements_hash is not None: + lock_metadata = self.load_valid_metadata(requirements_hash) + if lock_metadata is not None: + # Unversioned specs are always considered version 1 + return lock_metadata.get("lock_version", 1) + return None + + def _purge_lock(self) -> bool: + # Currently a test suite helper, but may become a public API if it proves + # useful when implementing https://github.com/lmstudio-ai/venvstacks/issues/10 + files_removed = False + for path_to_remove in (self.requirements_path, self.lock_metadata_path): + if path_to_remove.exists(): + path_to_remove.unlink() + files_removed = True + return files_removed + + def _write_lock_metadata(self) -> None: + requirements_hash = self._requirements_hash + if requirements_hash is None: + raise BuildEnvError( + "Environment must be locked before writing lock metadata" + ) + last_version = self._lock_version + if last_version is None: + lock_version = 1 + elif self.versioned: + lock_version = last_version + 1 + else: + lock_version = last_version + lock_metadata = EnvironmentLockMetadata( + locked_at=self.locked_at, + requirements_hash=requirements_hash, + lock_version=lock_version, + ) + _write_json(self.lock_metadata_path, lock_metadata) + + def update_lock_metadata(self) -> bool: + # Calculate current requirements hash + requirements_hash = self._hash_requirements() + if requirements_hash is None: + raise BuildEnvError( + "Environment must be locked before updating lock metadata" + ) + self._requirements_hash = requirements_hash + # Only update and save the last locked time if + # the lockfile contents have changed or if + # the lock metadata file doesn't exist yet + lock_metadata = self.load_valid_metadata(requirements_hash) + if lock_metadata is None: + self._last_locked = last_locked = self._get_path_mtime() + assert ( + last_locked is not None + ), "Failed to read lock time for locked environment" + self._write_lock_metadata() + return True + return False + + +# Identify target platforms using strings based on +# https://packaging.python.org/en/latest/specifications/platform-compatibility-tags/#basic-platform-tags +# macOS target system API version info is omitted (as that will be set universally for macOS builds) +class TargetPlatforms(StrEnum): + WINDOWS = "win_amd64" + LINUX = "linux_x86_64" + MACOS_APPLE = "macosx_arm64" + MACOS_INTEL = "macosx_x86_64" # Note: not currently tested in CI! + + @classmethod + def get_all_target_platforms(cls) -> list[Self]: + return sorted(set(cls.__members__.values())) + + @classmethod + def ensure_platform_list(cls, metadata: MutableMapping[str, Any]) -> None: + platform_list = metadata.get("platforms") + if platform_list is not None: + platform_list = [cls(target) for target in platform_list] + else: + platform_list = cls.get_all_target_platforms() + metadata["platforms"] = platform_list + + +TargetPlatform = ( + TargetPlatforms # Use singular name when creating instances from values +) + + +class LayerVariants(StrEnum): + RUNTIME = "runtime" + FRAMEWORK = "framework" + APPLICATION = "application" + + +class LayerCategories(StrEnum): + RUNTIMES = "runtimes" + FRAMEWORKS = "frameworks" + APPLICATIONS = "applications" + + +def ensure_optional_env_spec_fields(env_metadata: MutableMapping[str, Any]) -> None: + """Populate missing environment spec fields that are optional in the TOML file""" + TargetPlatforms.ensure_platform_list(env_metadata) + env_metadata.setdefault("build_requirements", []) + env_metadata.setdefault("versioned", False) + + +@dataclass +class _PythonEnvironmentSpec(ABC): + "Common base class for Python environment specifications" + + # Optionally overridden in concrete subclasses + ENV_PREFIX = "" + + # Specified in concrete subclasses + kind: ClassVar[LayerVariants] + category: ClassVar[LayerCategories] + + # Specified on creation (typically based on TOML layer spec fields) + name: LayerBaseName + versioned: bool + requirements: list[str] = field(repr=False) + build_requirements: list[str] = field(repr=False) + platforms: list[TargetPlatforms] = field(repr=False) + + def __post_init__(self) -> None: + # When instantiating specs that don't have a prefix, + # they're not allowed to use prefixes that *are* defined + if not self.ENV_PREFIX: + spec_name = self.name + for spec_type in _PythonEnvironmentSpec.__subclasses__(): + reserved_prefix = spec_type.ENV_PREFIX + if not reserved_prefix: + continue + if spec_name.startswith(reserved_prefix + "-"): + err = f"{spec_name} starts with reserved prefix {reserved_prefix}" + raise ValueError(err) + + @property + def env_name(self) -> EnvNameBuild: + prefix = self.ENV_PREFIX + if prefix: + return EnvNameBuild(f"{prefix}-{self.name}") + return EnvNameBuild(self.name) + + def get_requirements_fname(self, platform: str) -> str: + return f"requirements-{self.env_name}-{platform}.txt" + + def get_requirements_path(self, platform: str, requirements_dir: StrPath) -> Path: + requirements_fname = self.get_requirements_fname(platform) + return Path(requirements_dir) / self.env_name / requirements_fname + + +@dataclass +class RuntimeSpec(_PythonEnvironmentSpec): + kind = LayerVariants.RUNTIME + category = LayerCategories.RUNTIMES + fully_versioned_name: str = field(repr=False) + + @property + def py_version(self) -> str: + # fully_versioned_name should be of the form "implementation@X.Y.Z" + # (this may need adjusting if runtimes other than CPython are ever used...) + return self.fully_versioned_name.partition("@")[2] + + +@dataclass +class _VirtualEnvironmentSpec(_PythonEnvironmentSpec): + # Intermediate class for covariant property typing (never instantiated) + runtime: RuntimeSpec = field(repr=False) + + +@dataclass +class FrameworkSpec(_VirtualEnvironmentSpec): + ENV_PREFIX = "framework" + kind = LayerVariants.FRAMEWORK + category = LayerCategories.FRAMEWORKS + + +@dataclass +class ApplicationSpec(_VirtualEnvironmentSpec): + ENV_PREFIX = "app" + kind = LayerVariants.APPLICATION + category = LayerCategories.APPLICATIONS + launch_module_path: Path = field(repr=False) + frameworks: list[FrameworkSpec] = field(repr=False) + + +class LayerSpecMetadata(TypedDict): + # fmt: off + # Common fields defined for all layers, whether archived or exported + layer_name: EnvNameBuild # Prefixed layer name without lock version info + install_target: EnvNameDeploy # Target installation folder when unpacked + requirements_hash: str # Uses "algorithm:hexdigest" format + lock_version: int # Monotonically increasing version identifier + locked_at: str # ISO formatted date/time value + + # Extra fields only defined for framework and application environments + # runtime_name is set to fully_versioned_name if maintenance updates trigger a rebuild, + # otherwise set to runtime's layer spec name so only feature releases force a rebuild + runtime_name: NotRequired[str] + required_layers: NotRequired[Sequence[EnvNameDeploy]] + + # Extra fields only defined for application environments + app_launch_module: NotRequired[str] + app_launch_module_hash: NotRequired[str] + + # Note: hashes of layered environment dependencies are intentionally NOT incorporated + # into the published metadata. This allows an "only if needed" approach to + # rebuilding app and framework layers when the layers they depend on are + # updated (app layers will usually only depend on some of the components in the + # underlying environment, and such dependencies are picked up as version changes + # when regenerating the transitive dependency specifications for each environment) + # fmt: on + + +###################################################### +# Defining and describing published artifacts +###################################################### + + +class ArchiveHashes(TypedDict): + sha256: str + # Only SHA256 hashes for now. Mark both old and new hash fields with `typing.NotRequired` + # to migrate to a different hashing function in the future. + + +class ArchiveBuildMetadata(LayerSpecMetadata): + """Inputs to an archive build request for a single environment""" + + # fmt: off + archive_build: int # Auto-incremented from previous build metadata + archive_name: str # Adds archive file extension to layer name + target_platform: str # Target platform identifier + # fmt: on + + +class ArchiveMetadata(ArchiveBuildMetadata): + """Archive details for a single environment (includes build request details)""" + + archive_size: int + archive_hashes: ArchiveHashes + + +@dataclass +class ArchiveBuildRequest: + """Structured request to build a named output archive""" + + env_name: EnvNameBuild + env_lock: EnvironmentLock + archive_base_path: Path + build_metadata: ArchiveBuildMetadata = field(repr=False) + needs_build: bool = field(repr=False) + # TODO: Save full previous metadata for use when build is skipped + + @staticmethod + def needs_archive_build( + archive_path: Path, + metadata: ArchiveBuildMetadata, + previous_metadata: ArchiveMetadata | None, + ) -> bool: + if not previous_metadata or not archive_path.exists(): + return True + if archive_path.name != previous_metadata.get("archive_name"): + # Previous build produced a different archive name, force a rebuild + return True + # Check for any other changes to build input metadata + for key, value in metadata.items(): + if value != previous_metadata.get(key): + # Input metadata for the archive build has changed, force a rebuild + return True + # Only check the metadata so archive builds can be skipped just by downloading + # the metadata for previously built versions (rather than the entire archives) + return False + + @classmethod + def define_build( + cls, + env_name: EnvNameBuild, + env_lock: EnvironmentLock, + output_path: Path, + target_platform: str, + tag_output: bool = False, + previous_metadata: ArchiveMetadata | None = None, + force: bool = False, + ) -> Self: + # Bump or set the archive build version + lock_version = env_lock.lock_version + if previous_metadata is None: + last_build_iteration = 0 + else: + last_lock_version = previous_metadata.get("lock_version", 0) + if lock_version != last_lock_version: + # New lock version, reset the build iteration number + last_build_iteration = 0 + else: + # Rebuild with a change that isn't reflected in the lock version + last_build_iteration = previous_metadata.get("archive_build", 0) + # Work out the basic details of the build request (assuming no rebuild is needed) + deployed_name = env_lock.get_deployed_name(env_name) + build_iteration = last_build_iteration + + def update_archive_name() -> tuple[Path, Path]: + if tag_output: + base_name = f"{deployed_name}-{target_platform}-{build_iteration}" + else: + base_name = deployed_name + archive_base_path = output_path / base_name + built_archive_path = pack_venv.get_archive_path(archive_base_path) + return archive_base_path, built_archive_path + + archive_base_path, built_archive_path = update_archive_name() + build_metadata = ArchiveBuildMetadata( + archive_build=last_build_iteration, + archive_name=built_archive_path.name, + install_target=deployed_name, + layer_name=env_name, + lock_version=lock_version, + locked_at=env_lock.locked_at, + requirements_hash=env_lock.requirements_hash, + target_platform=str(target_platform), # Convert enums to plain strings + ) + needs_build = force or cls.needs_archive_build( + built_archive_path, build_metadata, previous_metadata + ) + if needs_build: + # Forced build or input hashes have changed, + # so this will be a new version of the archive + build_iteration += 1 + archive_base_path, built_archive_path = update_archive_name() + build_metadata["archive_build"] = build_iteration + build_metadata["archive_name"] = built_archive_path.name + return cls(env_name, env_lock, archive_base_path, build_metadata, needs_build) + + @staticmethod + def _hash_archive(archive_path: Path) -> ArchiveHashes: + hashes: dict[str, str] = {} + for algorithm in ArchiveHashes.__required_keys__: + hashes[algorithm] = _hash_file(archive_path, algorithm, omit_prefix=True) + # The required keys have been set, but mypy can't prove that, + # so use an explicit cast to allow it to make that assumption + return cast(ArchiveHashes, hashes) + + def create_archive( + self, + env_path: Path, + previous_metadata: ArchiveMetadata | None = None, + work_path: Path | None = None, + ) -> tuple[ArchiveMetadata, Path]: + if env_path.name != self.env_name: + err_msg = ( + f"Build mismatch (expected {self.env_name!r}, got {env_path.name!r})" + ) + raise BuildEnvError(err_msg) + build_metadata = self.build_metadata + archive_base_path = self.archive_base_path + built_archive_path = archive_base_path.parent / build_metadata["archive_name"] + if not self.needs_build: + # Already built archive looks OK, so just return the same metadata as last build + print(f"Using previously built archive at {str(built_archive_path)!r}") + assert previous_metadata is not None + return previous_metadata, built_archive_path + if built_archive_path.exists(): + print(f"Removing outdated archive at {str(built_archive_path)!r}") + built_archive_path.unlink() + print(f"Creating archive for {str(env_path)!r}") + last_locked = self.env_lock.last_locked + archive_path = Path( + pack_venv.create_archive( + env_path, + archive_base_path, + clamp_mtime=last_locked, + work_dir=work_path, + install_target=build_metadata["install_target"], + ) + ) + assert built_archive_path == archive_path # pack_venv ensures this is true + print(f"Created {str(archive_path)!r} from {str(env_path)!r}") + + metadata = ArchiveMetadata( + archive_size=archive_path.stat().st_size, + archive_hashes=self._hash_archive(archive_path), + **build_metadata, + ) + return metadata, archive_path + + +class StackPublishingRequest(TypedDict): + """Inputs to an archive build request for a full stack specification""" + + layers: Mapping[LayerCategories, Sequence[ArchiveBuildMetadata]] + + +LayeredArchiveMetadata = Mapping[LayerCategories, Sequence[ArchiveMetadata]] + + +class StackPublishingResult(TypedDict): + """Archive details for built stack specification (includes build request details)""" + + layers: LayeredArchiveMetadata + + +class PublishedArchivePaths(NamedTuple): + metadata_path: Path + snippet_paths: list[Path] + archive_paths: list[Path] + + +########################################################## +# Defining and describing locally exported environments +########################################################## + + +class ExportMetadata(LayerSpecMetadata): + """Metadata for a locally exported environment""" + + # Exports currently include only the common metadata + + +@dataclass +class EnvironmentExportRequest: + """Structured request to locally export an environment""" + + env_name: EnvNameBuild + env_lock: EnvironmentLock + export_path: Path + export_metadata: ExportMetadata = field(repr=False) + needs_export: bool = field(repr=False) + # TODO: Save full previous metadata for use when export is skipped + + @staticmethod + def needs_new_export( + export_path: Path, + metadata: ExportMetadata, + previous_metadata: ExportMetadata | None, + ) -> bool: + if not previous_metadata or not export_path.exists(): + return True + if export_path.name != previous_metadata.get("layer_name"): + # Previous export produced a different env name, force a new export + return True + # Check for any other changes to build input metadata + for key, value in metadata.items(): + if value != previous_metadata.get(key): + # Input metadata for the archive build has changed, force a rebuild + return True + # Previous export used the same build inputs, so probably doesn't need updating + return False + + @classmethod + def define_export( + cls, + env_name: EnvNameBuild, + env_lock: EnvironmentLock, + output_path: Path, + previous_metadata: ExportMetadata | None = None, + force: bool = False, + ) -> Self: + # Work out the details of the export request + deployed_name = env_lock.get_deployed_name(env_name) + export_path = output_path / deployed_name + export_metadata = ExportMetadata( + install_target=deployed_name, + layer_name=env_name, + lock_version=env_lock.lock_version, + locked_at=env_lock.locked_at, + requirements_hash=env_lock.requirements_hash, + ) + needs_export = force or cls.needs_new_export( + export_path, export_metadata, previous_metadata + ) + return cls(env_name, env_lock, export_path, export_metadata, needs_export) + + @staticmethod + def _run_postinstall( + src_path: Path, export_path: Path, postinstall_path: Path + ) -> None: + exported_env_python_path = get_env_python(export_path) + command = [str(exported_env_python_path), "-I", str(postinstall_path)] + result = run_python_command_unchecked(command) + if result.returncode == 0: + # All good, nothing else to check + return + # Running with the Python inside the exported environment didn't work + # This can happen on Windows when "pyvenv.cfg" doesn't exist yet + # If that is what has happened, the reported return code will be 106 + if result.returncode != 106: + result.check_returncode() + # Self-generating the venv config failed, retry with the build venv + # rather than finding and using the exported base runtime environment + src_env_python_path = get_env_python(src_path) + command[0] = str(src_env_python_path) + run_python_command(command) + + def export_environment( + self, + env_path: Path, + previous_metadata: ExportMetadata | None = None, + ) -> tuple[ExportMetadata, Path]: + if env_path.name != self.env_name: + err_msg = ( + f"Export mismatch (expected {self.env_name!r}, got {env_path.name!r})" + ) + raise BuildEnvError(err_msg) + export_metadata = self.export_metadata + export_path = self.export_path + if not self.needs_export: + # Previous export looks OK, so just return the same metadata as last time + print(f"Using previously exported environment at {str(export_path)!r}") + assert previous_metadata is not None + return previous_metadata, export_path + if export_path.exists(): + print(f"Removing outdated environment at {str(export_path)!r}") + export_path.unlink() + print(f"Exporting {str(env_path)!r} to {str(export_path)!r}") + + def _run_postinstall(export_path: Path, postinstall_path: Path) -> None: + self._run_postinstall(env_path, export_path, postinstall_path) + + exported_path = pack_venv.export_venv( + env_path, + export_path, + _run_postinstall, + ) + assert self.export_path == exported_path # pack_venv ensures this is true + print(f"Created {str(export_path)!r} from {str(env_path)!r}") + return export_metadata, export_path + + +LayeredExportMetadata = Mapping[LayerCategories, Sequence[ExportMetadata]] + + +class StackExportRequest(TypedDict): + layers: LayeredExportMetadata + + +class ExportedEnvironmentPaths(NamedTuple): + metadata_path: Path + snippet_paths: list[Path] + env_paths: list[Path] + + +###################################################### +# Building layered environments from specifications +###################################################### + + +def _pdm_python_install(target_path: Path, request: str) -> Path | None: + # from https://github.com/pdm-project/pdm/blob/ce60c223bbf8b5ab2bdb94bf8fa6409b9b16c409/src/pdm/cli/commands/python.py#L122 + # to work around https://github.com/Textualize/rich/issues/3437 + from pdm.core import Core + from pdm.environments import BareEnvironment + from pbs_installer import download, get_download_link, install_file + from pbs_installer._install import THIS_ARCH + + implementation, _, version = request.rpartition("@") + implementation = implementation.lower() or "cpython" + version, _, arch = version.partition("-") + arch = "x86" if arch == "32" else (arch or THIS_ARCH) + + # Weird structure here is to work around https://github.com/python/mypy/issues/12535 + # and https://github.com/frostming/pbs-installer/issues/6 + checked_impl: Literal["cpython", "pypy"] = "cpython" + if implementation == "pypy": + checked_impl = "pypy" + elif implementation != "cpython": + raise ValueError(f"Unknown interpreter implementation: {implementation}") + ver, python_file = get_download_link( + version, implementation=checked_impl, arch=arch, build_dir=False + ) + destination = target_path / str(ver) + interpreter = ( + destination / "bin" / "python3" + if not _WINDOWS_BUILD + else destination / "python.exe" + ) + project = Core().create_project() + env = BareEnvironment(project) + if not destination.exists() or not interpreter.exists(): + shutil.rmtree(destination, ignore_errors=True) + destination.mkdir(parents=True, exist_ok=True) + with tempfile.NamedTemporaryFile() as tf: + tf.close() + original_filename = download(python_file, tf.name, env.session) + # TODO: use "tar_filter" here instead of "fully_trusted" + # Currently blocked on Python 3.11 producing different results + # if Python 3.12+ enables a filter that actually makes any changes + # https://github.com/lmstudio-ai/venvstacks/issues/23 + with default_tarfile_filter("fully_trusted"): + install_file(tf.name, destination, original_filename) + if interpreter.exists(): + # Installation successful, return the path to the installation folder + return destination + # Failed to install the interpreter + return None + + +def _get_py_scheme_path(category: str, base_path: StrPath, py_version: str) -> Path: + py_version_short = py_version.rpartition(".")[0] + scheme_vars = { + "base": str(base_path), + "py_version": py_version, + "py_version_nodot": "".join(py_version_short.split(".")), + "py_version_short": py_version_short, + } + return Path(sysconfig.get_path(category, "venv", vars=scheme_vars)) + + +def _binary_with_extension(name: str) -> str: + binary_suffix = Path(sys.executable).suffix + return f"{name}{binary_suffix}" + + +def _hash_file( + path: Path, algorithm: str = "sha256", *, omit_prefix: bool = False +) -> str: + if not path.exists(): + return "" + with path.open("rb", buffering=0) as f: + # MyPy incorrectly complains here: https://github.com/python/typeshed/issues/12414 + file_hash = hashlib.file_digest(f, algorithm).hexdigest() # type: ignore[arg-type] + if omit_prefix: + return file_hash + return f"{algorithm}:{file_hash}" + + +def _hash_directory( + path: Path, algorithm: str = "sha256", *, omit_prefix: bool = False +) -> str: + incremental_hash = hashlib.new(algorithm) + # Alas, there's no `Path.walk` yet in Python 3.11 + for this_dir, subdirs, files in os.walk(path): + if not files: + continue + dir_path = Path(this_dir) + incremental_hash.update(dir_path.name.encode()) + # Ensure directory tree iteration order is deterministic + subdirs.sort() + for file in sorted(files): + file_path = dir_path / file + incremental_hash.update(file_path.name.encode()) + file_hash = _hash_file(file_path, algorithm) + incremental_hash.update(file_hash.encode()) + dir_hash = incremental_hash.hexdigest() + if omit_prefix: + return dir_hash + return f"{algorithm}/{dir_hash}" + + +def get_build_platform() -> TargetPlatform: + # Currently no need for cross-build support, so always query the running system + # Examples: win_amd64, linux_x86_64, macosx_10_12_x86_64, macosx_10_12_arm64 + platform_name = sysconfig.get_platform() + if platform_name.startswith("macosx"): + platform_os_name, *__, platform_arch = platform_name.split("-") + if platform_arch.startswith("universal"): + # Want to handle x86_64 and arm64 separately + if sys.platform == "win32": + assert False # Ensure mypy knows `uname` won't be used on Windows + platform_arch = os.uname().machine + platform_name = f"{platform_os_name}_{platform_arch}" + return TargetPlatform(platform_name.replace("-", "_")) # Let ValueError escape + + +@dataclass +class _PythonEnvironment(ABC): + # Specified in concrete subclasses + kind: ClassVar[LayerVariants] + category: ClassVar[LayerCategories] + + # Specified on creation + _env_spec: _PythonEnvironmentSpec = field(repr=False) + build_path: Path = field(repr=False) + requirements_path: Path = field(repr=False) + index_config: IndexConfig = field(repr=False) + + # Derived from target path and spec in __post_init__ + env_path: Path = field(init=False) + pylib_path: Path = field(init=False, repr=False) + dynlib_path: Path | None = field(init=False, repr=False) + executables_path: Path = field(init=False, repr=False) + python_path: Path = field(init=False, repr=False) + env_lock: EnvironmentLock = field(init=False) + + # Set in subclass or externally after creation + base_python_path: Path | None = field(init=False, repr=False) + tools_python_path: Path | None = field(init=False, repr=False) + py_version: str = field(init=False, repr=False) + + # Operation flags allow for requested commands to be applied only to selected layers + # Notes: + # - the "build if needed" (want_build=None) option is fairly ineffective, since + # there are some operations that are always considered "needed" (at least for now) + # - there is no "if needed" (want_publish=None) option for archive publication + want_lock: bool | None = field( + default=None, init=False, repr=False + ) # Default: if needed + want_build: bool | None = field( + default=True, init=False, repr=False + ) # Default: build + want_publish: bool = field(default=True, init=False, repr=False) # Default: publish + + # State flags used to selectively execute some cleanup operations + was_created: bool = field(default=False, init=False, repr=False) + was_built: bool = field(default=False, init=False, repr=False) + + def _get_py_scheme_path(self, category: str) -> Path: + return _get_py_scheme_path(category, self.env_path, self.py_version) + + def _get_python_dir_path(self) -> Path: + # Dedicated method so subclasses can adjust this if needed + return self._get_py_scheme_path("scripts") + + @property + def env_name(self) -> EnvNameBuild: + return self.env_spec.env_name + + @property + def install_target(self) -> EnvNameDeploy: + return self.env_lock.get_deployed_name(self.env_spec.env_name) + + def __post_init__(self) -> None: + self.env_path = self.build_path / self.env_name + self.pylib_path = self._get_py_scheme_path("purelib") + self.executables_path = self._get_py_scheme_path("scripts") + self.python_path = self._get_python_dir_path() / _binary_with_extension( + "python" + ) + if _WINDOWS_BUILD: + # Intel install some DLLs in a weird spot that needs extra config to handle + self.dynlib_path = self.env_path / "Library" / "bin" + else: + # No extra library search paths on non-Windows systems + # (but see https://github.com/lmstudio-ai/venvstacks/issues/1) + self.dynlib_path = None + self.env_lock = EnvironmentLock( + self.requirements_path, + self.env_spec.versioned, + ) + + @property + def env_spec(self) -> _PythonEnvironmentSpec: + # Define property to allow covariance of the declared type of `env_spec` + return self._env_spec + + def select_operations( + self, + lock: bool | None = False, + build: bool | None = True, + publish: bool = True, + ) -> None: + """Enable the selected operations for this environment""" + self.want_lock = lock + self.want_build = build + self.want_publish = publish + # Also reset operation state tracking + self.was_created = False + self.was_built = False + + def _create_environment( + self, *, clean: bool = False, build_only: bool = False + ) -> None: + env_path = self.env_path + env_updated = False + create_env = True + if not env_path.exists(): + print(f"{str(env_path)!r} does not exist, creating...") + elif clean: + print(f"{str(env_path)!r} exists, replacing...") + shutil.rmtree(self.env_path) + else: + if self.want_build or self.was_created: + # Run the update if requested, or if env was created earlier in the build + print(f"{str(env_path)!r} exists, updating...") + self._update_existing_environment(build_only=build_only) + env_updated = True + else: + print(f"{str(env_path)!r} exists, reusing without updating...") + create_env = False + if create_env: + self._create_new_environment(build_only=build_only) + self.was_created = create_env + self.was_built = create_env or env_updated + + def create_environment(self, clean: bool = False) -> None: + """Create or update specified environment. Returns True if env is new or updated.""" + self._create_environment(clean=clean) + self._ensure_portability() + + def report_python_site_details(self) -> subprocess.CompletedProcess[str]: + print(f"Reporting environment details for {str(self.env_path)!r}") + command = [ + str(self.python_path), + "-X", + "utf8", + "-Im", + "site", + ] + return run_python_command(command) + + def _run_uv( + self, cmd: str, cmd_args: list[str], **kwds: Any + ) -> subprocess.CompletedProcess[str]: + # Always run `uv` via `python -Im` so it gets a valid default interpreter to use + command = [ + str(self.tools_python_path), + "-X", + "utf8", + "-Im", + "uv", + cmd, + *cmd_args, + ] + return run_python_command(command, **kwds) + + def _run_uv_pip( + self, cmd_args: list[str], **kwds: Any + ) -> subprocess.CompletedProcess[str]: + return self._run_uv("pip", cmd_args, **kwds) + + def _run_uv_pip_compile( + self, + requirements_path: StrPath, + requirements_input_path: StrPath, + constraints: Sequence[StrPath], + ) -> subprocess.CompletedProcess[str]: + # TODO: Explore whether resolution in `--universal` mode might eliminate the + # need for per-platform lock files (it depends on whether `uv` is assuming + # dependency declarations don't vary across wheels, which is not a valid + # assumption for some Python packages, including `pytorch`) + cli_lock_command = f"{Path(sys.executable).name} -Im {__package__} lock" + uv_pip_args = [ + "compile", + "-o", + os.fspath(requirements_path), + "--python", + str(self.base_python_path), + "--python-version", + self.py_version, + "--custom-compile-command", + cli_lock_command, + *self.index_config._get_uv_pip_compile_args(), + "--quiet", + "--no-color", + "--no-config", + "--no-annotate", # Annotations include file paths, creating portability problems + "--generate-hashes", + "--strip-extras", + "--no-upgrade", # Delete the existing lock files to upgrade dependencies + "--allow-unsafe", # Despite the name, this turns off an unwanted legacy behaviour + # that disallowed pinning some packaging related PyPI projects + ] + for constraint_path in constraints: + uv_pip_args.extend(("-c", os.fspath(constraint_path))) + uv_pip_args.append(os.fspath(requirements_input_path)) + return self._run_uv_pip(uv_pip_args) + + def _run_pip( + self, cmd_args: list[str], **kwds: Any + ) -> subprocess.CompletedProcess[str]: + command = [ + str(self.tools_python_path), + "-X", + "utf8", + "-Im", + "pip", + "--python", + str(self.python_path), + "--no-input", + *cmd_args, + ] + return run_python_command(command, **kwds) + + def _run_pip_install( + self, *pip_install_args: str, require_binary: bool | None = None + ) -> subprocess.CompletedProcess[str]: + # TODO: Switch to `uv pip install` once https://github.com/astral-sh/uv/issues/2500 + # is resolved (so environment layering is still handled correctly) + pip_args = [ + "install", + "--no-warn-script-location", + *self.index_config._get_pip_install_args(require_binary), + *pip_install_args, + ] + result = self._run_pip(pip_args) + print(f"Dependencies installed and updated in {str(self.env_path)!r}") + return result + + def _run_pip_sync( + self, + env_python_path: StrPath, + requirements_paths: Sequence[StrPath], + ) -> subprocess.CompletedProcess[str]: + # TODO: Switch to `uv pip sync` once https://github.com/astral-sh/uv/issues/2500 + # is resolved (so environment layering is still handled correctly) + # Work around https://github.com/jazzband/pip-tools/issues/2103 by clearing + # `pip-sync`'s list of packages to leave behind before running the sync command + command = [ + str(self.tools_python_path), + "-X", + "utf8", + "-Ic", + ( + "import piptools.sync, piptools.scripts.sync; " + "piptools.sync.PACKAGES_TO_IGNORE.clear(); " + "piptools.scripts.sync.cli()" + ), + "--quiet", + "--no-config", + *self.index_config._get_pip_sync_args(), + "--python-executable", + os.fspath(env_python_path), + *( + os.fspath(p) for p in requirements_paths + ), # `map` upsets typecheckers here + ] + return run_python_command(command) + + def get_constraint_paths(self) -> list[Path]: + # No constraints files by default, subclasses override as necessary + return [] + + def lock_requirements(self) -> EnvironmentLock: + spec = self.env_spec + requirements_path = self.requirements_path + if not self.want_lock and requirements_path.exists(): + print(f"Using existing {str(requirements_path)!r}") + return self.env_lock + print(f"Generating {str(requirements_path)!r}") + requirements_path.parent.mkdir(parents=True, exist_ok=True) + constraints = self.get_constraint_paths() + requirements_input_path = requirements_path.with_suffix(".in") + with requirements_input_path.open("w", encoding="utf-8", newline="\n") as f: + lines = [ + "# DO NOT EDIT. Automatically generated by venvstacks.", + "# Relock layer dependencies to update.", + *spec.requirements, + "", + ] + f.write("\n".join(lines)) + self._run_uv_pip_compile( + requirements_path, requirements_input_path, constraints + ) + if not requirements_path.exists(): + raise BuildEnvError(f"Failed to generate {str(requirements_path)!r}") + if self.env_lock.update_lock_metadata(): + print(f" Environment lock time set: {self.env_lock.locked_at!r}") + return self.env_lock + + def install_build_requirements(self) -> subprocess.CompletedProcess[str] | None: + # Install build-only dependencies inside the target environment + # All build dependencies must be available as pre-built binary packages + # TODO: drop support for implicit source builds (superseded by local wheel dirs) + if not self.index_config.allow_source_builds: + return None # No implicit source builds -> never need build dependencies + build_requirements = self.env_spec.build_requirements + if not build_requirements: + return None # Nothing to remove + deprecation_msg = ( + "Support for implicit source builds is being removed (use local wheels)" + ) + warnings.warn(deprecation_msg, DeprecationWarning) + return self._run_pip_install( + "--upgrade", + *build_requirements, + require_binary=True, + ) + + def install_requirements(self) -> subprocess.CompletedProcess[str]: + # Run a pip dependency upgrade inside the target environment + # Build isolation is intentionally turned off so source builds that need big + # dependencies like `torch` can access the runtime environment + # Requirements are fully transitively locked, so no implicit deps are allowed + if not self.env_lock.is_locked: + raise BuildEnvError( + "Environment must be locked before installing dependencies" + ) + if self.index_config.allow_source_builds: + self.install_build_requirements() # Ensure source dependencies can be built + return self._run_pip_install( + "--no-build-isolation", + "--no-deps", + "--upgrade", + "-r", + str(self.requirements_path), + ) + + def remove_build_only_packages(self) -> subprocess.CompletedProcess[str] | None: + # Remove build-only dependencies before publishing the environment layer + if not self.index_config.allow_source_builds: + return None # No implicit source builds -> never install build dependencies + build_requirements = self.env_spec.build_requirements + if not build_requirements or not self.was_built: + return ( + None # Nothing to remove (no build requirements, or env wasn't built) + ) + requirements_paths = [ + self.requirements_path, + *self.get_constraint_paths(), + ] + result = self._run_pip_sync(self.python_path, requirements_paths) + print(f"Removed build-only packages from {str(self.env_path)!r}") + return result + + def ensure_runtime_dependencies(self) -> subprocess.CompletedProcess[str] | None: + # Second pass reinstalling purely from local cache + # This adds packages previously skipped due to build dependencies in lower layers + # Ideally, hashes wouldn't be rechecked to allow for cached wheels built from source + # artifacts, but `pip` doesn't allow the hash check to be turned off when hashes are + # present in the requirements file + if not self.was_built or not self.index_config.allow_source_builds: + # Nothing to ensure (env wasn't built or build deps were never installed) + return None + return self._run_pip_install( + # Downloads allowed to work around https://github.com/pypa/pip/issues/12807 + # "--no-index", + "--quiet", + "--no-deps", + "-r", + str(self.requirements_path), + require_binary=True, + ) + + def _update_existing_environment(self, *, build_only: bool = False) -> None: + if build_only: + self.install_build_requirements() + else: + self.install_requirements() + + @abstractmethod + def _create_new_environment(self, *, build_only: bool = False) -> None: + raise NotImplementedError + + def _ensure_portability(self) -> None: + # Wrapper and activation scripts are not used on deployment targets, + # so drop them entirely rather than making them portable + for executable in self.executables_path.iterdir(): + if not executable.name.lower().startswith("python"): + print(f" Dropping potentially non-portable file {str(executable)!r}") + executable.unlink() + # Symlinks within the build folder should be relative + # Symlinks outside the build folder shouldn't exist + build_path = self.build_path + relative, external = pack_venv.convert_symlinks(self.env_path, build_path) + if relative: + msg_lines = ["Converted absolute internal symlinks to relative symlinks:\n"] + for file_path, target_path in relative: + link_info = f"{str(file_path)!r} -> {str(target_path)!r}" + msg_lines.append(f" {link_info}") + print("\n".join(msg_lines)) + if external: + msg_lines = ["Converted absolute external symlinks to hard links:\n"] + for file_path, target_path in external: + link_info = f"{str(file_path)!r} -> {str(target_path)!r}" + msg_lines.append(f" {link_info}") + print("\n".join(msg_lines)) + + def _update_output_metadata(self, metadata: LayerSpecMetadata) -> None: + # Hook for subclasses to override + assert metadata is not None # Avoid linter complaints about unused parameters + + def define_archive_build( + self, + output_path: Path, + target_platform: str, + tag_output: bool = False, + previous_metadata: ArchiveMetadata | None = None, + force: bool = False, + ) -> ArchiveBuildRequest: + request = ArchiveBuildRequest.define_build( + self.env_name, + self.env_lock, + output_path, + target_platform, + tag_output, + previous_metadata, + force, + ) + self._update_output_metadata(request.build_metadata) + return request + + def create_archive( + self, + output_path: Path, + target_platform: str, + tag_output: bool = False, + previous_metadata: ArchiveMetadata | None = None, + force: bool = False, + ) -> tuple[ArchiveMetadata, Path]: + env_path = self.env_path + if not env_path.exists(): + raise RuntimeError( + "Must create environment before attempting to archive it" + ) + + # Define the input metadata that gets published in the archive manifest + build_request = self.define_archive_build( + output_path, target_platform, tag_output, previous_metadata, force + ) + work_path = self.build_path # /tmp is likely too small for ML environments + return build_request.create_archive(env_path, previous_metadata, work_path) + + def request_export( + self, + output_path: Path, + previous_metadata: ExportMetadata | None = None, + force: bool = False, + ) -> EnvironmentExportRequest: + request = EnvironmentExportRequest.define_export( + self.env_name, self.env_lock, output_path, previous_metadata, force + ) + self._update_output_metadata(request.export_metadata) + return request + + def export_environment( + self, + output_path: Path, + previous_metadata: ExportMetadata | None = None, + force: bool = False, + ) -> tuple[ExportMetadata, Path]: + env_path = self.env_path + if not env_path.exists(): + raise RuntimeError("Must create environment before attempting to export it") + + # Define the input metadata that gets published in the export manifest + export_request = self.request_export(output_path, previous_metadata, force) + return export_request.export_environment(env_path, previous_metadata) + + +class RuntimeEnv(_PythonEnvironment): + kind = LayerVariants.RUNTIME + category = LayerCategories.RUNTIMES + + def _get_python_dir_path(self) -> Path: + if _WINDOWS_BUILD: + # python-build-standalone Windows build doesn't put the binary in `Scripts` + return self.env_path + return super()._get_python_dir_path() + + def __post_init__(self) -> None: + self.py_version = py_version = self.env_spec.py_version + super().__post_init__() + tools_env_path = self.build_path / "build-tools" + if tools_env_path.exists(): + tools_bin_path = Path( + _get_py_scheme_path("scripts", tools_env_path, py_version) + ) + tools_python_path = tools_bin_path / _binary_with_extension("python") + else: + # No build tools environment created by wrapper script, so use the running Python + tools_python_path = Path(sys.executable) + # Runtimes have no base Python other than the build tools Python + self.base_python_path = tools_python_path + self.tools_python_path = tools_python_path + + @property + def env_spec(self) -> RuntimeSpec: + # Define property to allow covariance of the declared type of `env_spec` + assert isinstance(self._env_spec, RuntimeSpec) + return self._env_spec + + def _update_existing_environment(self, *, build_only: bool = False) -> None: + super()._update_existing_environment(build_only=build_only) + + def _remove_pip(self) -> subprocess.CompletedProcess[str] | None: + to_be_checked = ["pip", "wheel", "setuptools"] + to_be_removed = [] + for pylib in to_be_checked: + if (self.pylib_path / pylib).exists(): + to_be_removed.append(pylib) + if not to_be_removed: + return None + pip_args = ["uninstall", "-y", *to_be_removed] + return self._run_pip(pip_args) + + def _create_new_environment(self, *, build_only: bool = False) -> None: + python_runtime = self.env_spec.fully_versioned_name + install_path = _pdm_python_install(self.build_path, python_runtime) + if install_path is None: + raise BuildEnvError(f"Failed to install {python_runtime}") + shutil.move(install_path, self.env_path) + if not self.index_config.allow_source_builds: + # Only `pip-sync` needs `pip` to be installed in the target environment, + # and that step is skipped when implicit source builds are disabled + self._remove_pip() + fs_sync() + if build_only: + if self.index_config.allow_source_builds: + print( + f"Preparing {str(self.python_path)!r} build dependencies in {self}" + ) + self.install_build_requirements() + else: + print( + f"Using {str(self.python_path)!r} as runtime environment layer in {self}" + ) + self.install_requirements() + + def _update_output_metadata(self, metadata: LayerSpecMetadata) -> None: + super()._update_output_metadata(metadata) + # This *is* a runtime layer, so it needs to be updated on maintenance releases + metadata["runtime_name"] = self.env_spec.fully_versioned_name + + def create_build_environment(self, *, clean: bool = False) -> None: + """Create or update runtime build environment. Returns True if env is new or updated.""" + super()._create_environment(clean=clean, build_only=True) + + +class _VirtualEnvironment(_PythonEnvironment): + _include_system_site_packages = False + + linked_constraints_paths: list[Path] = field(init=False, repr=False) + + def __post_init__(self) -> None: + self.py_version = self.env_spec.runtime.py_version + super().__post_init__() + self.linked_constraints_paths = [] + + @property + def env_spec(self) -> _VirtualEnvironmentSpec: + # Define property to allow covariance of the declared type of `env_spec` + assert isinstance(self._env_spec, _VirtualEnvironmentSpec) + return self._env_spec + + def link_base_runtime_paths(self, runtime: RuntimeEnv) -> None: + # Link executable paths + self.base_python_path = runtime.python_path + self.tools_python_path = runtime.tools_python_path + if self.linked_constraints_paths: + raise BuildEnvError("Layered environment base runtime already linked") + self.linked_constraints_paths[:] = [runtime.requirements_path] + + def get_constraint_paths(self) -> list[Path]: + return self.linked_constraints_paths + + def _ensure_virtual_environment(self) -> subprocess.CompletedProcess[str]: + # Use the base Python installation to create a new virtual environment + if self.base_python_path is None: + raise RuntimeError("Base Python path not set in {self!r}") + options = ["--without-pip"] + if self._include_system_site_packages: + options.append("--system-site-packages") + if self.env_path.exists(): + options.append("--upgrade") + if _WINDOWS_BUILD: + options.append("--copies") + else: + options.append("--symlinks") + command = [ + str(self.base_python_path), + "-X", + "utf8", + "-Im", + "venv", + *options, + str(self.env_path), + ] + result = run_python_command(command) + self._link_layered_environment() + fs_sync() + print(f"Virtual environment configured in {str(self.env_path)!r}") + return result + + def _link_layered_environment(self) -> None: + pass # Nothing to do by default, subclasses override if necessary + + def _update_existing_environment(self, *, build_only: bool = False) -> None: + if build_only: + raise RuntimeError( + "Only runtime environments support build-only installation" + ) + self._ensure_virtual_environment() + super()._update_existing_environment() + + def _create_new_environment(self, *, build_only: bool = False) -> None: + self._update_existing_environment(build_only=build_only) + + def _update_output_metadata(self, metadata: LayerSpecMetadata) -> None: + super()._update_output_metadata(metadata) + # Non-windows platforms use symlinks, so only need updates on feature releases + # Windows copies the main Python binary and support libary, so always needs updates + if _WINDOWS_BUILD: + runtime_update_trigger = self.env_spec.runtime.fully_versioned_name + else: + runtime_update_trigger = self.env_spec.runtime.name + metadata["runtime_name"] = runtime_update_trigger + + +class FrameworkEnv(_VirtualEnvironment): + kind = LayerVariants.FRAMEWORK + category = LayerCategories.FRAMEWORKS + _include_system_site_packages = True + + @property + def env_spec(self) -> FrameworkSpec: + # Define property to allow covariance of the declared type of `env_spec` + assert isinstance(self._env_spec, FrameworkSpec) + return self._env_spec + + +class ApplicationEnv(_VirtualEnvironment): + kind = LayerVariants.APPLICATION + category = LayerCategories.APPLICATIONS + + launch_module_name: str = field(init=False, repr=False) + linked_pylib_paths: list[Path] = field(init=False, repr=False) + linked_dynlib_paths: list[Path] = field(init=False, repr=False) + linked_frameworks: list[FrameworkEnv] = field(init=False, repr=False) + + @property + def env_spec(self) -> ApplicationSpec: + # Define property to allow covariance of the declared type of `env_spec` + assert isinstance(self._env_spec, ApplicationSpec) + return self._env_spec + + def __post_init__(self) -> None: + super().__post_init__() + self.launch_module_name = self.env_spec.launch_module_path.stem + self.linked_pylib_paths = [] + self.linked_dynlib_paths = [] + self.linked_frameworks = [] + + def link_layered_environments( + self, runtime: RuntimeEnv, frameworks: Mapping[LayerBaseName, FrameworkEnv] + ) -> None: + self.link_base_runtime_paths(runtime) + constraints_paths = self.linked_constraints_paths + if not constraints_paths: + raise BuildEnvError("Failed to add base environment constraints path") + # The runtime site-packages folder is added here rather than via pyvenv.cfg + # to ensure it appears in sys.path after the framework site-packages folders + pylib_paths = self.linked_pylib_paths + dynlib_paths = self.linked_dynlib_paths + fw_envs = self.linked_frameworks + if pylib_paths or dynlib_paths or fw_envs: + raise BuildEnvError("Layered application environment already linked") + for env_spec in self.env_spec.frameworks: + env = frameworks[env_spec.name] + fw_envs.append(env) + constraints_paths.append(env.requirements_path) + install_target_path = Path(env.install_target) + + def _fw_env_path(build_path: Path) -> Path: + relative_path = build_path.relative_to(env.env_path) + return install_target_path / relative_path + + pylib_paths.append(_fw_env_path(env.pylib_path)) + if env.dynlib_path is not None: + dynlib_paths.append(_fw_env_path(env.pylib_path)) + runtime_target_path = Path(runtime.install_target) + + def _runtime_path(build_path: Path) -> Path: + relative_path = build_path.relative_to(runtime.env_path) + return runtime_target_path / relative_path + + pylib_paths.append(_runtime_path(runtime.pylib_path)) + if runtime.dynlib_path is not None: + dynlib_paths.append(_runtime_path(runtime.dynlib_path)) + + def _link_layered_environment(self) -> None: + # Create sitecustomize file + sc_dir_path = self.pylib_path + sc_contents = [ + "# Automatically generated by venvstacks", + "import site", + "import os", + "from os.path import abspath, dirname, join as joinpath", + "# Allow loading modules and packages from framework environments", + "this_dir = dirname(abspath(__file__))", + ] + # Add framework and runtime folders to sys.path + parent_path = self.env_path.parent + relative_prefix = Path( + os.path.relpath(str(parent_path), start=str(sc_dir_path)) + ) + for pylib_path in self.linked_pylib_paths: + relative_path = relative_prefix / pylib_path + sc_contents.extend( + [ + f"path_entry = abspath(joinpath(this_dir, {str(relative_path)!r}))", + "site.addsitedir(path_entry)", + ] + ) + # Add DLL search folders if needed + dynlib_paths = self.linked_dynlib_paths + if _WINDOWS_BUILD and dynlib_paths: + sc_contents.extend( + [ + "", + "# Allow loading misplaced DLLs on Windows", + ] + ) + for dynlib_path in dynlib_paths: + if not dynlib_path.exists(): + # Nothing added DLLs to this folder at build time, so skip it + continue + relative_path = relative_prefix / dynlib_path + sc_contents.extend( + [ + f"dll_dir = abspath(joinpath(this_dir, {str(relative_path)!r}))", + "os.add_dll_directory(dll_dir)", + ] + ) + sc_contents.append("") + sc_path = self.pylib_path / "sitecustomize.py" + print(f"Generating {sc_path!r}...") + with open(sc_path, "w", encoding="utf-8") as f: + f.write("\n".join(sc_contents)) + + def _update_existing_environment(self, *, build_only: bool = False) -> None: + super()._update_existing_environment(build_only=build_only) + # Also publish the specified launch module as an importable top level module + launch_module_source_path = self.env_spec.launch_module_path + launch_module_env_path = self.pylib_path / launch_module_source_path.name + print(f"Including launch module {launch_module_source_path!r}...") + if launch_module_source_path.is_file(): + shutil.copyfile(launch_module_source_path, launch_module_env_path) + else: + shutil.copytree( + launch_module_source_path, launch_module_env_path, dirs_exist_ok=True + ) + + def _update_output_metadata(self, metadata: LayerSpecMetadata) -> None: + super()._update_output_metadata(metadata) + metadata["app_launch_module"] = self.launch_module_name + if self.env_spec.launch_module_path.is_file(): + metadata["app_launch_module_hash"] = _hash_file( + self.env_spec.launch_module_path + ) + else: + metadata["app_launch_module_hash"] = _hash_directory( + self.env_spec.launch_module_path + ) + framework_env_names = [fw.install_target for fw in self.linked_frameworks] + metadata["required_layers"] = framework_env_names + + +###################################################### +# Building layered environments based on a TOML file +###################################################### + +BuildEnv = TypeVar("BuildEnv", bound=_PythonEnvironment) + + +@dataclass +class StackSpec: + # Specified on creation + spec_path: Path + runtimes: MutableMapping[LayerBaseName, RuntimeSpec] + frameworks: MutableMapping[LayerBaseName, FrameworkSpec] + applications: MutableMapping[LayerBaseName, ApplicationSpec] + requirements_dir_path: Path + + # Derived from runtime environment in __post_init__ + build_platform: str = field(init=False, repr=False) + + def __post_init__(self) -> None: + self.build_platform = get_build_platform() + self.spec_path = as_normalized_path(self.spec_path) + # Resolve requirements_dir_path relative to spec_path + self.requirements_dir_path = self.resolve_lexical_path( + self.requirements_dir_path + ) + + @classmethod + def load(cls, fname: StrPath) -> Self: + stack_spec_path = as_normalized_path(fname) + with open(stack_spec_path, "rb") as f: + data = tomllib.load(f) + spec_dir_path = stack_spec_path.parent + requirements_dir_path = spec_dir_path / "requirements" + # Collect the list of runtime specs + runtimes = {} + for rt in data["runtimes"]: + # No conversions needed for the runtime environment specs + name = rt["name"] + if name in runtimes: + msg = f"Runtime names must be distinct ({name!r} already defined)" + raise LayerSpecError(msg) + ensure_optional_env_spec_fields(rt) + runtimes[name] = RuntimeSpec(**rt) + # Collect the list of framework specs + frameworks = {} + for fw in data["frameworks"]: + name = fw["name"] + if name in frameworks: + msg = f"Framework names must be distinct ({name!r} already defined)" + raise LayerSpecError(msg) + runtime_name = fw["runtime"] + runtime = runtimes.get(runtime_name) + if runtime is None: + msg = f"Framework {name!r} references unknown runtime {runtime_name!r}" + raise LayerSpecError(msg) + fw["runtime"] = runtime + ensure_optional_env_spec_fields(fw) + frameworks[name] = FrameworkSpec(**fw) + # Collect the list of application specs + applications = {} + for app in data["applications"]: + name = app["name"] + if name in applications: + msg = f"Application names must be distinct ({name!r} already defined)" + raise LayerSpecError(msg) + launch_module_path = spec_dir_path / app.pop("launch_module") + if not launch_module_path.exists(): + msg = f"Specified launch module {launch_module_path!r} does not exist)" + raise LayerSpecError(msg) + runtime = None + app_frameworks = [] + for fw_name in app["frameworks"]: + app_fw = frameworks.get(fw_name) + if app_fw is None: + msg = ( + f"Application {name!r} references unknown framework {fw_name!r}" + ) + raise LayerSpecError(msg) + if runtime is None: + runtime = app_fw.runtime + elif app_fw.runtime is not runtime: + msg = ( + f"Application {name!r} references inconsistent frameworks. " + f"{app_frameworks[0].name!r} requires runtime {runtime.name!r}." + f"while {app_fw!r} requires runtime {app_fw.runtime.name!r}." + ) + raise LayerSpecError(msg) + app_frameworks.append(app_fw) + app["runtime"] = runtime + app["frameworks"] = app_frameworks + app["launch_module_path"] = launch_module_path + ensure_optional_env_spec_fields(app) + applications[name] = ApplicationSpec(**app) + return cls( + stack_spec_path, runtimes, frameworks, applications, requirements_dir_path + ) + + def all_environment_specs(self) -> Iterable[_PythonEnvironmentSpec]: + """Iterate over the specifications for all defined environments. + + All runtimes are produced first, then frameworks, then applications. + """ + return chain( + self.runtimes.values(), self.frameworks.values(), self.applications.values() + ) + + def _define_envs( + self, + build_path: Path, + index_config: IndexConfig, + env_class: type[BuildEnv], + specs: Mapping[LayerBaseName, _PythonEnvironmentSpec], + ) -> MutableMapping[LayerBaseName, BuildEnv]: + requirements_dir = self.requirements_dir_path + build_environments: dict[LayerBaseName, BuildEnv] = {} + build_platform = self.build_platform + for name, spec in specs.items(): + if build_platform not in spec.platforms: + print(f" Skipping env {name!r} as it does not target this platform") + continue + requirements_path = spec.get_requirements_path( + build_platform, requirements_dir + ) + build_env = env_class(spec, build_path, requirements_path, index_config) + build_environments[name] = build_env + print(f" Defined {name!r}: {build_env!r}") + return build_environments + + def resolve_lexical_path(self, related_location: StrPath, /) -> Path: + """Resolve a path relative to the location of the stack specification""" + return _resolve_lexical_path(related_location, self.spec_path.parent) + + def define_build_environment( + self, + build_dir: StrPath = "", + index_config: IndexConfig | None = None, + ) -> "BuildEnvironment": + build_path = self.resolve_lexical_path(build_dir) + if index_config is None: + index_config = IndexConfig() + index_config.resolve_lexical_paths(self.spec_path.parent) + print("Defining runtime environments:") + runtimes = self._define_envs( + build_path, index_config, RuntimeEnv, self.runtimes + ) + print("Defining framework environments:") + frameworks = self._define_envs( + build_path, index_config, FrameworkEnv, self.frameworks + ) + for fw_env in frameworks.values(): + runtime = runtimes[fw_env.env_spec.runtime.name] + fw_env.link_base_runtime_paths(runtime) + print("Defining application environments:") + applications = self._define_envs( + build_path, index_config, ApplicationEnv, self.applications + ) + for app_env in applications.values(): + runtime = runtimes[app_env.env_spec.runtime.name] + app_env.link_layered_environments(runtime, frameworks) + return BuildEnvironment( + self, + runtimes, + frameworks, + applications, + build_path, + ) + + +@dataclass +class BuildEnvironment: + METADATA_DIR = "__venvstacks__" # Output subdirectory for the build metadata + METADATA_MANIFEST = "venvstacks.json" # File with full metadata for this build + METADATA_ENV_DIR = ( + "env_metadata" # Files with metadata snippets for each environment + ) + + # Specified on creation + stack_spec: StackSpec + runtimes: MutableMapping[LayerBaseName, RuntimeEnv] = field(repr=False) + frameworks: MutableMapping[LayerBaseName, FrameworkEnv] = field(repr=False) + applications: MutableMapping[LayerBaseName, ApplicationEnv] = field(repr=False) + build_path: Path + + def __post_init__(self) -> None: + # Resolve local config folders relative to spec path + stack_spec = self.stack_spec + self.build_path = stack_spec.resolve_lexical_path(self.build_path) + + # Provide more convenient access to selected stack_spec attributes + @property + def requirements_dir_path(self) -> Path: + return self.stack_spec.requirements_dir_path + + @property + def build_platform(self) -> str: + return self.stack_spec.build_platform + + # Iterators over various categories of included environments + def all_environments(self) -> Iterable[_PythonEnvironment]: + """Iterate over all defined environments. + + All runtimes are produced first, then frameworks, then applications. + """ + return chain( + self.runtimes.values(), self.frameworks.values(), self.applications.values() + ) + + def environments_to_lock(self) -> Iterable[_PythonEnvironment]: + """Iterate over all environments where locking is requested or allowed. + + Runtimes are produced first, then frameworks, then applications. + """ + for env in self.all_environments(): + if env.want_lock is not False: # Accepts `None` as meaning "lock if needed" + yield env + + def runtimes_to_lock(self) -> Iterable[RuntimeEnv]: + """Iterate over runtime environments where locking is requested or allowed.""" + for env in self.runtimes.values(): + if env.want_lock is not False: # Accepts `None` as meaning "lock if needed" + yield env + + def environments_to_build(self) -> Iterable[_PythonEnvironment]: + """Iterate over all environments where building is requested or allowed. + + Runtimes are produced first, then frameworks, then applications. + """ + for env in self.all_environments(): + if ( + env.want_build is not False + ): # Accepts `None` as meaning "build if needed" + yield env + + def runtimes_to_build(self) -> Iterable[RuntimeEnv]: + """Iterate over runtime environments where building is requested or allowed.""" + for env in self.runtimes.values(): + if ( + env.want_build is not False + ): # Accepts `None` as meaning "build if needed" + yield env + + def venvstacks_to_build(self) -> Iterable[_VirtualEnvironment]: + """Iterate over non-runtime environments where building is requested or allowed. + + Frameworks are produced first, then applications. + """ + for env in chain(self.frameworks.values(), self.applications.values()): + if ( + env.want_build is not False + ): # Accepts `None` as meaning "build if needed" + yield env + + def built_environments(self) -> Iterable[_PythonEnvironment]: + """Iterate over all environments that were built by this build process. + + Runtimes are produced first, then frameworks, then applications. + """ + for env in self.all_environments(): + if env.was_built: + yield env + + def environments_to_publish(self) -> Iterable[_PythonEnvironment]: + """Iterate over all environments where publication is requested or allowed. + + Runtimes are produced first, then frameworks, then applications. + """ + for env in self.all_environments(): + if env.want_publish: # There's no "if needed" option for publication + yield env + + # Assign environments to the different operation categories + def select_operations( + self, + lock: bool | None = False, + build: bool | None = True, + publish: bool = True, + ) -> None: + """Configure the selected operations on all defined environments""" + for env in self.all_environments(): + env.select_operations(lock=lock, build=build, publish=publish) + + def get_unmatched_patterns(self, patterns: Iterable[str]) -> list[str]: + """Returns a list of the given patterns which do not match any environments""" + env_names = [env.env_name for env in self.all_environments()] + return [ + pattern + for pattern in patterns + if not any(fnmatch(env_name, pattern) for env_name in env_names) + ] + + def select_layers( + self, + include: Iterable[str], + lock: bool | None = False, + build: bool | None = True, + publish: bool = True, + lock_dependencies: bool = False, + build_dependencies: bool = False, + publish_dependencies: bool = False, + build_derived: bool = True, + publish_derived: bool = True, + ) -> None: + """Selectively configure operations only on the specified environments""" + # Ensure later pipeline stages are skipped when earlier ones are skipped + # Also update the related layer handling based on the enabled pipeline stages + if lock: + # When locking, locking derived layers is not optional + lock_derived = True + # Don't build or publish dependencies if they're not relocked + if not lock_dependencies: + build_dependencies = publish_dependencies = False + else: + # If the included layers aren't being locked, don't lock anything else + lock_derived = lock_dependencies = False + if build: + # When building, don't publish environments that haven't been built + if not build_dependencies: + publish_dependencies = False + if not build_derived: + publish_derived = False + else: + # If the included layers aren't being built, don't build anything else + build_derived = build_dependencies = False + if not publish: + # If the included layers aren't being published, don't publish anything else + publish_derived = publish_dependencies = False + # Identify explicitly included environments + envs_by_name: dict[EnvNameBuild, _PythonEnvironment] = { + env.env_name: env for env in self.all_environments() + } + inclusion_patterns = list(include) + included_envs: set[str] = set() + for env_name, env in envs_by_name.items(): + if any(fnmatch(env_name, pattern) for pattern in inclusion_patterns): + # Run all requested operations on this environment + included_envs.add(env_name) + env.select_operations(lock=lock, build=build, publish=publish) + else: + # Skip running operations on this environment + env.select_operations(lock=False, build=False, publish=False) + # Enable operations on related layers if requested + # Dependencies are always checked so they can be set to "if needed" locks & builds + check_derived = lock_derived or build_derived or publish_derived + derived_envs: set[EnvNameBuild] = set() + dependency_envs: set[EnvNameBuild] = set() + # Check frameworks + for fw_env in self.frameworks.values(): + env_name = fw_env.env_name + rt_env_name = fw_env.env_spec.runtime.env_name + if env_name in included_envs: + # This env is included, check if any of its dependencies need inclusion + if rt_env_name not in included_envs: + dependency_envs.add(rt_env_name) + elif check_derived: + # Check for the runtime this env depends on being included + if rt_env_name in included_envs: + derived_envs.add(env_name) + # Check applications + for app_env in self.applications.values(): + env_name = app_env.env_name + app_spec = app_env.env_spec + rt_env_name = app_spec.runtime.env_name + fw_env_names = [fw_spec.env_name for fw_spec in app_spec.frameworks] + if env_name in included_envs: + # This env is included, check if any of its dependencies need inclusion + for fw_env_name in fw_env_names: + if fw_env_name in included_envs: + continue + dependency_envs.add(fw_env_name) + if rt_env_name not in included_envs: + dependency_envs.add(rt_env_name) + elif check_derived: + # Check for any framework or the runtime this env depends on being included + if rt_env_name in included_envs: + derived_envs.add(env_name) + else: + for fw_env_name in fw_env_names: + if fw_env_name in included_envs: + derived_envs.add(env_name) + break + # Check if conflicting requirements have been given for any framework layers + potential_conflicts = derived_envs & dependency_envs + if potential_conflicts: + cause = None + if lock_derived != lock_dependencies: + cause = "lock" + elif build_derived != build_dependencies: + cause = "build" + elif publish_derived != publish_dependencies: + cause = "publication" + if cause is not None: + msg = f"Conflicting {cause} instructions for {sorted(potential_conflicts)}" + raise BuildEnvError(msg) + # Derived environments never need to be locked or built implicitly + for env_name in derived_envs: + env = envs_by_name[env_name] + env.select_operations( + lock=lock_derived, build=build_derived, publish=publish_derived + ) + # Dependencies are always allowed to be locked or built implicitly + # (this only happens if the operation's outputs don't exist yet) + for env_name in dependency_envs: + env = envs_by_name[env_name] + env.select_operations( + lock=lock_dependencies or None, # Allow locking if neeeded + build=build_dependencies or None, # Allow building if needed + publish=publish_dependencies, # No implicit publication + ) # fmt: skip + + # Define the various operations on the included environments + def _needs_lock(self) -> bool: + spec_dir = self.requirements_dir_path + build_platform = self.build_platform + + def lock_exists(spec: _PythonEnvironmentSpec) -> bool: + return spec.get_requirements_path(build_platform, spec_dir).exists() + + return not all(lock_exists(env.env_spec) for env in self.environments_to_lock()) + + def lock_environments(self, *, clean: bool = False) -> Sequence[EnvironmentLock]: + # Lock environments without fully building them + # Necessarily creates the runtime environments and + # installs any declared build dependencies + self.requirements_dir_path.mkdir(parents=True, exist_ok=True) + for rt_env in self.runtimes_to_lock(): + rt_env.create_build_environment(clean=clean) + return [env.lock_requirements() for env in self.environments_to_lock()] + + def create_environments(self, *, clean: bool = False, lock: bool = False) -> None: + # Base runtime environments need to exist before dependencies can be locked + self.build_path.mkdir(parents=True, exist_ok=True) + clean_runtime_envs = clean + if lock or self._needs_lock(): + clean_runtime_envs = False # Don't clean the runtime envs again below + self.lock_environments(clean=clean) + # Create base runtime environments + for rt_env in self.runtimes_to_build(): + rt_env.create_environment(clean=clean_runtime_envs) + # Create framework and application environments atop the base runtimes + for layered_env in self.venvstacks_to_build(): + layered_env.create_environment(clean=clean) + layered_env.report_python_site_details() + # Remove build packages that shouldn't be shipped + for env in reversed(list(self.built_environments())): + env.remove_build_only_packages() + # Fix up layered environments that were inadvertently relying on build dependencies + # Need to check all upper layers, not just those that declared the build-only dependencies + for env in self.built_environments(): + if env.kind == LayerVariants.RUNTIME: + # Runtimes have no dependencies, so nothing to check + continue + env.ensure_runtime_dependencies() + + @staticmethod + def _env_metadata_path( + env_metadata_dir: Path, env_name: EnvNameBuild, platform_tag: str = "" + ) -> Path: + return env_metadata_dir / f"{env_name}{platform_tag}.json" + + def _load_env_metadata( + self, env_metadata_dir: Path, env: _PythonEnvironment, platform_tag: str + ) -> Any: + metadata_path = self._env_metadata_path( + env_metadata_dir, env.env_name, platform_tag + ) + if not metadata_path.exists(): + return None + with metadata_path.open("r", encoding="utf-8") as f: + return json.load(f) + + def load_archive_metadata( + self, env_metadata_dir: Path, env: _PythonEnvironment, platform_tag: str = "" + ) -> ArchiveMetadata | None: + # mypy is right to complain that the JSON hasn't been validated to conform + # to the ArchiveMetadata interface, but we're OK with letting the runtime + # errors happen in that scenario. Longer term, explicit JSON schemas should be + # defined and used for validation when reading the metadata files. + metadata = self._load_env_metadata(env_metadata_dir, env, platform_tag) + return cast(ArchiveMetadata, metadata) + + def load_export_metadata( + self, env_metadata_dir: Path, env: _PythonEnvironment + ) -> ExportMetadata | None: + # mypy is right to complain that the JSON hasn't been validated to conform + # to the ExportMetadata interface, but we're OK with letting the runtime + # errors happen in that scenario. Longer term, explicit JSON schemas should be + # defined and used for validation when reading the metadata files. + metadata = self._load_env_metadata(env_metadata_dir, env, platform_tag="") + return cast(ExportMetadata, metadata) + + @overload + def publish_artifacts( + self, + output_dir: StrPath | None = ..., + *, + force: bool = ..., + tag_outputs: bool = ..., + dry_run: Literal[False] = ..., + ) -> PublishedArchivePaths: ... + @overload + def publish_artifacts( + self, + output_dir: StrPath | None = ..., + *, + force: bool = ..., + tag_outputs: bool = ..., + dry_run: Literal[True] = ..., + ) -> tuple[Path, StackPublishingRequest]: ... + def publish_artifacts( + self, + output_dir: StrPath | None = None, + *, + force: bool = False, + tag_outputs: bool = False, + dry_run: bool = False, + ) -> PublishedArchivePaths | tuple[Path, StackPublishingRequest]: + layer_data: dict[ + LayerCategories, list[ArchiveMetadata | ArchiveBuildMetadata] + ] = { + RuntimeEnv.category: [], + FrameworkEnv.category: [], + ApplicationEnv.category: [], + } + archive_paths = [] + platform_tag = f"-{self.build_platform}" if tag_outputs else "" + if output_dir is None: + output_dir = self.build_path + output_path = self.stack_spec.resolve_lexical_path(output_dir) + metadata_dir = output_path / self.METADATA_DIR + env_metadata_dir = metadata_dir / self.METADATA_ENV_DIR + + if dry_run: + # Return metadata generated by a dry run rather than writing it to disk + for env in self.environments_to_publish(): + previous_metadata = self.load_archive_metadata( + env_metadata_dir, env, platform_tag + ) + build_request = env.define_archive_build( + output_path, + target_platform=self.build_platform, + tag_output=tag_outputs, + previous_metadata=previous_metadata, + ) + layer_data[env.category].append(build_request.build_metadata) + publishing_request: StackPublishingRequest = {"layers": layer_data} + return output_path, publishing_request + # Build all requested archives and export the corresponding manifests + output_path.mkdir(parents=True, exist_ok=True) + result_data = cast(dict[LayerCategories, list[ArchiveMetadata]], layer_data) + for env in self.environments_to_publish(): + previous_metadata = self.load_archive_metadata( + env_metadata_dir, env, platform_tag + ) + build_metadata, archive_path = env.create_archive( + output_path, + target_platform=self.build_platform, + tag_output=tag_outputs, + previous_metadata=previous_metadata, + force=force, + ) + archive_paths.append(archive_path) + result_data[env.category].append(build_metadata) + manifest_data: StackPublishingResult = {"layers": result_data} + manifest_path, snippet_paths = self.write_artifacts_manifest( + metadata_dir, manifest_data, platform_tag + ) + return PublishedArchivePaths(manifest_path, snippet_paths, archive_paths) + + def write_archive_metadata( + self, + env_metadata_dir: StrPath, + archive_metadata: ArchiveMetadata, + platform_tag: str = "", + ) -> Path: + env_metadata_dir_path = self.stack_spec.resolve_lexical_path(env_metadata_dir) + metadata_path = self._env_metadata_path( + env_metadata_dir_path, archive_metadata["layer_name"], platform_tag + ) + _write_json(metadata_path, archive_metadata) + return metadata_path + + def write_artifacts_manifest( + self, + metadata_dir: StrPath, + manifest_data: StackPublishingResult, + platform_tag: str = "", + ) -> tuple[Path, list[Path]]: + formatted_manifest = _format_json(manifest_data) + # Save the full build metadata + metadata_dir_path = self.stack_spec.resolve_lexical_path(metadata_dir) + metadata_dir_path.mkdir(parents=True, exist_ok=True) + manifest_path = metadata_dir_path / self.METADATA_MANIFEST + if platform_tag: + stem, sep, suffixes = manifest_path.name.partition(".") + tagged_manifest_name = f"{stem}{platform_tag}{sep}{suffixes}" + manifest_path = manifest_path.with_name(tagged_manifest_name) + with manifest_path.open("w", encoding="utf-8", newline="\n") as f: + f.write(formatted_manifest + "\n") + # Save the environment snippets (some of these may also have been loaded from disk) + env_metadata_dir = metadata_dir_path / self.METADATA_ENV_DIR + env_metadata_dir.mkdir(parents=True, exist_ok=True) + snippet_paths: list[Path] = [] + layer_metadata = manifest_data["layers"] + for category in ( + LayerCategories.RUNTIMES, + LayerCategories.FRAMEWORKS, + LayerCategories.APPLICATIONS, + ): + for env in layer_metadata[category]: + snippet_paths.append( + self.write_archive_metadata(env_metadata_dir, env, platform_tag) + ) + return manifest_path, snippet_paths + + @overload + def export_environments( + self, + output_dir: StrPath | None = ..., + *, + force: bool = ..., + dry_run: Literal[False] = ..., + ) -> ExportedEnvironmentPaths: ... + @overload + def export_environments( + self, + output_dir: StrPath | None = ..., + *, + force: bool = ..., + dry_run: Literal[True] = ..., + ) -> tuple[Path, StackExportRequest]: ... + def export_environments( + self, + output_dir: StrPath | None = None, + *, + force: bool = False, + dry_run: bool = False, + ) -> ExportedEnvironmentPaths | tuple[Path, StackExportRequest]: + export_data: dict[LayerCategories, list[ExportMetadata]] = { + RuntimeEnv.category: [], + FrameworkEnv.category: [], + ApplicationEnv.category: [], + } + export_paths = [] + if output_dir is None: + output_dir = self.build_path + output_path = self.stack_spec.resolve_lexical_path(output_dir) + metadata_dir = output_path / self.METADATA_DIR + env_metadata_dir = metadata_dir / self.METADATA_ENV_DIR + + if dry_run: + # Return metadata generated by a dry run rather than writing it to disk + for env in self.environments_to_publish(): + previous_metadata = self.load_export_metadata(env_metadata_dir, env) + export_request = env.request_export( + output_path, + previous_metadata=previous_metadata, + ) + export_data[env.category].append(export_request.export_metadata) + output_request: StackExportRequest = {"layers": export_data} + return output_path, output_request + # Export the requested environments and the corresponding manifests + output_path.mkdir(parents=True, exist_ok=True) + for env in self.environments_to_publish(): + previous_metadata = self.load_export_metadata(env_metadata_dir, env) + export_metadata, export_path = env.export_environment( + output_path, + previous_metadata=previous_metadata, + force=force, + ) + export_paths.append(export_path) + export_data[env.category].append(export_metadata) + manifest_data: StackExportRequest = {"layers": export_data} + manifest_path, snippet_paths = self.write_export_manifest( + metadata_dir, manifest_data + ) + return ExportedEnvironmentPaths(manifest_path, snippet_paths, export_paths) + + def write_env_metadata( + self, + env_metadata_dir: StrPath, + env_metadata: ExportMetadata, + platform_tag: str = "", + ) -> Path: + env_metadata_dir_path = self.stack_spec.resolve_lexical_path(env_metadata_dir) + metadata_path = self._env_metadata_path( + env_metadata_dir_path, env_metadata["layer_name"], platform_tag + ) + _write_json(metadata_path, env_metadata) + return metadata_path + + def write_export_manifest( + self, metadata_dir: StrPath, manifest_data: StackExportRequest + ) -> tuple[Path, list[Path]]: + formatted_manifest = _format_json(manifest_data) + # Save the full build metadata + metadata_dir_path = self.stack_spec.resolve_lexical_path(metadata_dir) + metadata_dir_path.mkdir(parents=True, exist_ok=True) + manifest_path = metadata_dir_path / self.METADATA_MANIFEST + with manifest_path.open("w", encoding="utf-8", newline="\n") as f: + f.write(formatted_manifest + "\n") + # Save the environment snippets (some of these may also have been loaded from disk) + env_metadata_dir = metadata_dir_path / self.METADATA_ENV_DIR + env_metadata_dir.mkdir(parents=True, exist_ok=True) + snippet_paths: list[Path] = [] + env_metadata = manifest_data["layers"] + for category in ( + LayerCategories.RUNTIMES, + LayerCategories.FRAMEWORKS, + LayerCategories.APPLICATIONS, + ): + for env in env_metadata[category]: + snippet_paths.append(self.write_env_metadata(env_metadata_dir, env)) + return manifest_path, snippet_paths diff --git a/tests/README.md b/tests/README.md new file mode 100644 index 0000000..e61a163 --- /dev/null +++ b/tests/README.md @@ -0,0 +1,86 @@ +Python layered environments test suite +====================================== + +Currently mostly a monolothic functional test suite checking that the `sample_project` +folder builds as expected on all supported platforms. + +Individual test cases can be written using either `pytest` or `unittest` based on which +makes the most sense for a given test case (managing the lifecycle of complex resources can +get confusing with `pytest`, so explicit class-based lifecycle management with `unittest` +may be easier in situations where `pytest` fixtures get annoying). + +Regardless of the specific framework used, the convention for binary assertions that can be +written in either order is to use `assert actual == expected` (pytest) or +`self.assertEqual(actual, expected)` (unittest) such that the actual value is on the left +and the expected value is on the right. + + +Running checks locally +---------------------- + +Static analysis: + + tox -e lint,typecheck + +Full test suite (py3.11 testing is also defined): + + tox -e py3.12 + +Skip slow tests (options after `--` are passed to `pytest`): + + tox -e py3.12 -- -m "not slow" + +Specific tests (options after `--` are passed to `pytest`): + + tox -e py3.12 -- -k test_minimal_project + +Refer to https://docs.pytest.org/en/6.2.x/usage.html#specifying-tests-selecting-tests for +additional details on how to select which tests to run. + + +Marking slow tests +------------------ + +Tests which take more than a few seconds to run should be marked as slow: + + @pytest.mark.slow + def test_locking_and_publishing(self) -> None: + ... + + +Updating metadata and examining built artifacts +----------------------------------------------- + +To generate a full local build to update metadata or to debug failures: + + $ cd /path/to/repo/src/ + $ ../.venv/bin/python -m venvstacks build --publish ../tests/sample_project/venvstacks.toml ~/path/to/output/folder + +(use `../.venv/Scripts/python` on Windows) + +This assumes `pdm sync --no-self --dev` has been used to set up a local development venv. + +Alternatively, the following CI export variables may be set locally to export metadata and +built artifacts from the running test suite: + + VENVSTACKS_EXPORT_TEST_ARTIFACTS="~/path/to/output/folder" + VENVSTACKS_FORCE_TEST_EXPORT=1 + +The test suite can then be executed via `tox -e py3.11` or `tox -e py3.12` (the generated +metadata and artifacts should be identical regardless of which version of Python is used +to run `venvstacks`). + +If the forced export env var is not set or is set to the empty string, artifacts will only be +exported when test cases fail. Forcing exports can be useful for generating reference +artifacts and metadata when tests are passing locally but failing in pre-merge CI. + +The `misc/export_test_artifacts.sh` script can be used to simplify the creation of +reference artifacts for debugging purposes. + + +Debugging test suite failures related to artifact reproducibility +----------------------------------------------------------------- + +`diffoscope` is a very helpful utility when trying to track down artifact discrepancies +(only available for non-Windows systems, but can be used in WSL or another Linux environment +to examine artifacts produced on Windows). diff --git a/tests/expected-output-config.toml b/tests/expected-output-config.toml new file mode 100644 index 0000000..d105608 --- /dev/null +++ b/tests/expected-output-config.toml @@ -0,0 +1,37 @@ +######################################################## +# Expected output configuration for test cases +# +# Changes to this file will trigger a GitHub action that +# updates the relevant PR with the related changes to the +# expected lock file contents and output metadata. +######################################################## + +# Exact versions of packages which affect output details +# are pinned in the pyproject.toml dev dependencies. +# Those pins are repeated here and checked in the +# test suite so any attempts to update them will +# also trigger updates to the expected test results. +[pinned-dev-packages] +# Ensure the lockfiles don't unexpectedly change format +uv="0.4.21" +# Ensure the runtime layer hashes have the expected value +pbs-installer="2024.10.10" + +# Additional environment variable settings to ensure +# test_sample_project produces the expected metadata +[env] +UV_EXCLUDE_NEWER="2024-10-15 00:00:00+00:00" + +######################################################## +# Explicitly requested updates +######################################################## + +# While the metadata is not explicitly versioned, +# update the comment below to trigger metadata updates +# for new feature additions to the output metadata + +# Metadata updates can also be requested when the +# launch module content in the sample project changes + +# Last requested update: launch module autoformatting + diff --git a/tests/hash_fodder/different_file.txt b/tests/hash_fodder/different_file.txt new file mode 100644 index 0000000..80891fd --- /dev/null +++ b/tests/hash_fodder/different_file.txt @@ -0,0 +1 @@ +This file has different contents for the hashing tests diff --git a/tests/hash_fodder/file.txt b/tests/hash_fodder/file.txt new file mode 100644 index 0000000..c898f42 --- /dev/null +++ b/tests/hash_fodder/file.txt @@ -0,0 +1 @@ +Example file for hashing tests diff --git a/tests/hash_fodder/file_duplicate.txt b/tests/hash_fodder/file_duplicate.txt new file mode 100644 index 0000000..c898f42 --- /dev/null +++ b/tests/hash_fodder/file_duplicate.txt @@ -0,0 +1 @@ +Example file for hashing tests diff --git a/tests/hash_fodder/folder1/file.txt b/tests/hash_fodder/folder1/file.txt new file mode 100644 index 0000000..c898f42 --- /dev/null +++ b/tests/hash_fodder/folder1/file.txt @@ -0,0 +1 @@ +Example file for hashing tests diff --git a/tests/hash_fodder/folder1/subfolder/file.txt b/tests/hash_fodder/folder1/subfolder/file.txt new file mode 100644 index 0000000..c898f42 --- /dev/null +++ b/tests/hash_fodder/folder1/subfolder/file.txt @@ -0,0 +1 @@ +Example file for hashing tests diff --git a/tests/hash_fodder/folder2/file_duplicate.txt b/tests/hash_fodder/folder2/file_duplicate.txt new file mode 100644 index 0000000..c898f42 --- /dev/null +++ b/tests/hash_fodder/folder2/file_duplicate.txt @@ -0,0 +1 @@ +Example file for hashing tests diff --git a/tests/minimal_project/.gitignore b/tests/minimal_project/.gitignore new file mode 100644 index 0000000..a47ba99 --- /dev/null +++ b/tests/minimal_project/.gitignore @@ -0,0 +1,2 @@ +# Don't commit the locked requirements or the lock metadata for the minimal project +requirements/ diff --git a/tests/minimal_project/empty.py b/tests/minimal_project/empty.py new file mode 100644 index 0000000..e69de29 diff --git a/tests/minimal_project/venvstacks.toml b/tests/minimal_project/venvstacks.toml new file mode 100644 index 0000000..b9ca72a --- /dev/null +++ b/tests/minimal_project/venvstacks.toml @@ -0,0 +1,22 @@ +# Minimal Python runtime, framework and application layers for venvstacks testing + +# See `sample_project` for a more complex environment build that covers more features. +# This minimal project is aimed at testing features like archive tagging where the +# actual content of the layers doesn't matter, the environments just need to exist. + +[[runtimes]] +name = "cpython@3.11" +fully_versioned_name = "cpython@3.11.10" +requirements = [] +build_requirements = [] + +[[frameworks]] +name = "layer" +runtime = "cpython@3.11" +requirements = [] + +[[applications]] +name = "empty" +launch_module = "empty.py" +frameworks = ["layer"] +requirements = [] diff --git a/tests/sample_project/expected_manifests/linux_x86_64/env_metadata/app-scipy-client.json b/tests/sample_project/expected_manifests/linux_x86_64/env_metadata/app-scipy-client.json new file mode 100644 index 0000000..bbf76be --- /dev/null +++ b/tests/sample_project/expected_manifests/linux_x86_64/env_metadata/app-scipy-client.json @@ -0,0 +1,21 @@ +{ + "app_launch_module": "scipy_client", + "app_launch_module_hash": "sha256/344b1be70920bd9635ce38fa14fca86b531ce2e334f54968321469c5fbb5b608", + "archive_build": 1, + "archive_hashes": { + "sha256": "dc50b3c60b5a3b0f8e9b4c8c746129f4f7b2bef94b65935026643c814e40fa74" + }, + "archive_name": "app-scipy-client.tar.xz", + "archive_size": 1424, + "install_target": "app-scipy-client", + "layer_name": "app-scipy-client", + "lock_version": 1, + "locked_at": "2024-10-15T10:23:43.205589+00:00", + "required_layers": [ + "framework-scipy", + "framework-http-client" + ], + "requirements_hash": "sha256:fb8a843c694d03d7ee74b457cdac2bd82b6b439de0ed308d72fe698c6c9c6cf4", + "runtime_name": "cpython@3.11", + "target_platform": "linux_x86_64" +} diff --git a/tests/sample_project/expected_manifests/linux_x86_64/env_metadata/app-scipy-import.json b/tests/sample_project/expected_manifests/linux_x86_64/env_metadata/app-scipy-import.json new file mode 100644 index 0000000..1d645e8 --- /dev/null +++ b/tests/sample_project/expected_manifests/linux_x86_64/env_metadata/app-scipy-import.json @@ -0,0 +1,20 @@ +{ + "app_launch_module": "scipy_import", + "app_launch_module_hash": "sha256:6278ff255372146d752518ffdf49f3432667d7c93997ed980b3676fdc75406ee", + "archive_build": 1, + "archive_hashes": { + "sha256": "bb52646bffa6cebadc37f8f0d7463b8b581c96116c89b4b77135c72b0b4099c4" + }, + "archive_name": "app-scipy-import.tar.xz", + "archive_size": 1336, + "install_target": "app-scipy-import", + "layer_name": "app-scipy-import", + "lock_version": 1, + "locked_at": "2024-10-15T10:23:43.173589+00:00", + "required_layers": [ + "framework-scipy" + ], + "requirements_hash": "sha256:36b0dbfec94b7de6507f348f0823cd02fdca2ea79eeafe92d571c26ae347d150", + "runtime_name": "cpython@3.11", + "target_platform": "linux_x86_64" +} diff --git a/tests/sample_project/expected_manifests/linux_x86_64/env_metadata/app-sklearn-import.json b/tests/sample_project/expected_manifests/linux_x86_64/env_metadata/app-sklearn-import.json new file mode 100644 index 0000000..5d36d99 --- /dev/null +++ b/tests/sample_project/expected_manifests/linux_x86_64/env_metadata/app-sklearn-import.json @@ -0,0 +1,20 @@ +{ + "app_launch_module": "sklearn_import", + "app_launch_module_hash": "sha256:b6de2b52093004bcc39df16d115929021937f77b5feda45d090b06116ea34f49", + "archive_build": 1, + "archive_hashes": { + "sha256": "20d8aa01d2a0c36b2cfc00c7cc1798025349add5eddb9f053b06427bc43ae928" + }, + "archive_name": "app-sklearn-import.tar.xz", + "archive_size": 1332, + "install_target": "app-sklearn-import", + "layer_name": "app-sklearn-import", + "lock_version": 1, + "locked_at": "2024-10-15T10:23:43.237589+00:00", + "required_layers": [ + "framework-sklearn" + ], + "requirements_hash": "sha256:600b3bc658d940b756d5917e6fb7dec3431c5ce4ebc878f5d031e74f3ebdb7a9", + "runtime_name": "cpython@3.12", + "target_platform": "linux_x86_64" +} diff --git a/tests/sample_project/expected_manifests/linux_x86_64/env_metadata/cpython@3.11.json b/tests/sample_project/expected_manifests/linux_x86_64/env_metadata/cpython@3.11.json new file mode 100644 index 0000000..c7077ec --- /dev/null +++ b/tests/sample_project/expected_manifests/linux_x86_64/env_metadata/cpython@3.11.json @@ -0,0 +1,15 @@ +{ + "archive_build": 1, + "archive_hashes": { + "sha256": "ab57f3a6fd70e1855773382bda7ddb3e4c2a520cd17ed7549e1983ca3410f47a" + }, + "archive_name": "cpython@3.11.tar.xz", + "archive_size": 29727160, + "install_target": "cpython@3.11", + "layer_name": "cpython@3.11", + "lock_version": 1, + "locked_at": "2024-10-15T10:23:42.581585+00:00", + "requirements_hash": "sha256:8a2182bfe08ff2478e3fe614e4769fce8fa5fa2ea010a2976c8f74d5396fa706", + "runtime_name": "cpython@3.11.10", + "target_platform": "linux_x86_64" +} diff --git a/tests/sample_project/expected_manifests/linux_x86_64/env_metadata/cpython@3.12.json b/tests/sample_project/expected_manifests/linux_x86_64/env_metadata/cpython@3.12.json new file mode 100644 index 0000000..7cfc9e2 --- /dev/null +++ b/tests/sample_project/expected_manifests/linux_x86_64/env_metadata/cpython@3.12.json @@ -0,0 +1,15 @@ +{ + "archive_build": 1, + "archive_hashes": { + "sha256": "bf3ad4fd54b7e0d5ec6f1df88838f3a0d809d4b4ea07ee37b6e073a96c2ac995" + }, + "archive_name": "cpython@3.12.tar.xz", + "archive_size": 42714592, + "install_target": "cpython@3.12", + "layer_name": "cpython@3.12", + "lock_version": 1, + "locked_at": "2024-10-15T10:23:42.669585+00:00", + "requirements_hash": "sha256:8a2182bfe08ff2478e3fe614e4769fce8fa5fa2ea010a2976c8f74d5396fa706", + "runtime_name": "cpython@3.12.7", + "target_platform": "linux_x86_64" +} diff --git a/tests/sample_project/expected_manifests/linux_x86_64/env_metadata/framework-http-client.json b/tests/sample_project/expected_manifests/linux_x86_64/env_metadata/framework-http-client.json new file mode 100644 index 0000000..d7868bb --- /dev/null +++ b/tests/sample_project/expected_manifests/linux_x86_64/env_metadata/framework-http-client.json @@ -0,0 +1,15 @@ +{ + "archive_build": 1, + "archive_hashes": { + "sha256": "66c677c7e2d823df72107f50ac0a3ce107d532947c9b24464561f9d74801851f" + }, + "archive_name": "framework-http-client.tar.xz", + "archive_size": 362600, + "install_target": "framework-http-client", + "layer_name": "framework-http-client", + "lock_version": 1, + "locked_at": "2024-10-15T10:23:43.145588+00:00", + "requirements_hash": "sha256:c9668bc44dcd9728f98686cb7d72b4cdfc3c3ed44512d29b279a484723c9525a", + "runtime_name": "cpython@3.11", + "target_platform": "linux_x86_64" +} diff --git a/tests/sample_project/expected_manifests/linux_x86_64/env_metadata/framework-scipy.json b/tests/sample_project/expected_manifests/linux_x86_64/env_metadata/framework-scipy.json new file mode 100644 index 0000000..6bc1209 --- /dev/null +++ b/tests/sample_project/expected_manifests/linux_x86_64/env_metadata/framework-scipy.json @@ -0,0 +1,15 @@ +{ + "archive_build": 1, + "archive_hashes": { + "sha256": "b9cca34dcca0233adc912ab1cd637dc65e9bb2969dd74225a7a1c1951b61fd4d" + }, + "archive_name": "framework-scipy.tar.xz", + "archive_size": 23961856, + "install_target": "framework-scipy", + "layer_name": "framework-scipy", + "lock_version": 1, + "locked_at": "2024-10-15T10:23:42.825586+00:00", + "requirements_hash": "sha256:36b0dbfec94b7de6507f348f0823cd02fdca2ea79eeafe92d571c26ae347d150", + "runtime_name": "cpython@3.11", + "target_platform": "linux_x86_64" +} diff --git a/tests/sample_project/expected_manifests/linux_x86_64/env_metadata/framework-sklearn.json b/tests/sample_project/expected_manifests/linux_x86_64/env_metadata/framework-sklearn.json new file mode 100644 index 0000000..ad01616 --- /dev/null +++ b/tests/sample_project/expected_manifests/linux_x86_64/env_metadata/framework-sklearn.json @@ -0,0 +1,15 @@ +{ + "archive_build": 1, + "archive_hashes": { + "sha256": "babdbccb6c127a897aadce388b6bd9afa7a23c391548b024cf7b0aa7a3a2bfb5" + }, + "archive_name": "framework-sklearn.tar.xz", + "archive_size": 30377392, + "install_target": "framework-sklearn", + "layer_name": "framework-sklearn", + "lock_version": 1, + "locked_at": "2024-10-15T10:23:43.001587+00:00", + "requirements_hash": "sha256:600b3bc658d940b756d5917e6fb7dec3431c5ce4ebc878f5d031e74f3ebdb7a9", + "runtime_name": "cpython@3.12", + "target_platform": "linux_x86_64" +} diff --git a/tests/sample_project/expected_manifests/linux_x86_64/venvstacks.json b/tests/sample_project/expected_manifests/linux_x86_64/venvstacks.json new file mode 100644 index 0000000..b727e8b --- /dev/null +++ b/tests/sample_project/expected_manifests/linux_x86_64/venvstacks.json @@ -0,0 +1,146 @@ +{ + "layers": { + "applications": [ + { + "app_launch_module": "scipy_import", + "app_launch_module_hash": "sha256:6278ff255372146d752518ffdf49f3432667d7c93997ed980b3676fdc75406ee", + "archive_build": 1, + "archive_hashes": { + "sha256": "bb52646bffa6cebadc37f8f0d7463b8b581c96116c89b4b77135c72b0b4099c4" + }, + "archive_name": "app-scipy-import.tar.xz", + "archive_size": 1336, + "install_target": "app-scipy-import", + "layer_name": "app-scipy-import", + "lock_version": 1, + "locked_at": "2024-10-15T10:23:43.173589+00:00", + "required_layers": [ + "framework-scipy" + ], + "requirements_hash": "sha256:36b0dbfec94b7de6507f348f0823cd02fdca2ea79eeafe92d571c26ae347d150", + "runtime_name": "cpython@3.11", + "target_platform": "linux_x86_64" + }, + { + "app_launch_module": "scipy_client", + "app_launch_module_hash": "sha256/344b1be70920bd9635ce38fa14fca86b531ce2e334f54968321469c5fbb5b608", + "archive_build": 1, + "archive_hashes": { + "sha256": "dc50b3c60b5a3b0f8e9b4c8c746129f4f7b2bef94b65935026643c814e40fa74" + }, + "archive_name": "app-scipy-client.tar.xz", + "archive_size": 1424, + "install_target": "app-scipy-client", + "layer_name": "app-scipy-client", + "lock_version": 1, + "locked_at": "2024-10-15T10:23:43.205589+00:00", + "required_layers": [ + "framework-scipy", + "framework-http-client" + ], + "requirements_hash": "sha256:fb8a843c694d03d7ee74b457cdac2bd82b6b439de0ed308d72fe698c6c9c6cf4", + "runtime_name": "cpython@3.11", + "target_platform": "linux_x86_64" + }, + { + "app_launch_module": "sklearn_import", + "app_launch_module_hash": "sha256:b6de2b52093004bcc39df16d115929021937f77b5feda45d090b06116ea34f49", + "archive_build": 1, + "archive_hashes": { + "sha256": "20d8aa01d2a0c36b2cfc00c7cc1798025349add5eddb9f053b06427bc43ae928" + }, + "archive_name": "app-sklearn-import.tar.xz", + "archive_size": 1332, + "install_target": "app-sklearn-import", + "layer_name": "app-sklearn-import", + "lock_version": 1, + "locked_at": "2024-10-15T10:23:43.237589+00:00", + "required_layers": [ + "framework-sklearn" + ], + "requirements_hash": "sha256:600b3bc658d940b756d5917e6fb7dec3431c5ce4ebc878f5d031e74f3ebdb7a9", + "runtime_name": "cpython@3.12", + "target_platform": "linux_x86_64" + } + ], + "frameworks": [ + { + "archive_build": 1, + "archive_hashes": { + "sha256": "b9cca34dcca0233adc912ab1cd637dc65e9bb2969dd74225a7a1c1951b61fd4d" + }, + "archive_name": "framework-scipy.tar.xz", + "archive_size": 23961856, + "install_target": "framework-scipy", + "layer_name": "framework-scipy", + "lock_version": 1, + "locked_at": "2024-10-15T10:23:42.825586+00:00", + "requirements_hash": "sha256:36b0dbfec94b7de6507f348f0823cd02fdca2ea79eeafe92d571c26ae347d150", + "runtime_name": "cpython@3.11", + "target_platform": "linux_x86_64" + }, + { + "archive_build": 1, + "archive_hashes": { + "sha256": "babdbccb6c127a897aadce388b6bd9afa7a23c391548b024cf7b0aa7a3a2bfb5" + }, + "archive_name": "framework-sklearn.tar.xz", + "archive_size": 30377392, + "install_target": "framework-sklearn", + "layer_name": "framework-sklearn", + "lock_version": 1, + "locked_at": "2024-10-15T10:23:43.001587+00:00", + "requirements_hash": "sha256:600b3bc658d940b756d5917e6fb7dec3431c5ce4ebc878f5d031e74f3ebdb7a9", + "runtime_name": "cpython@3.12", + "target_platform": "linux_x86_64" + }, + { + "archive_build": 1, + "archive_hashes": { + "sha256": "66c677c7e2d823df72107f50ac0a3ce107d532947c9b24464561f9d74801851f" + }, + "archive_name": "framework-http-client.tar.xz", + "archive_size": 362600, + "install_target": "framework-http-client", + "layer_name": "framework-http-client", + "lock_version": 1, + "locked_at": "2024-10-15T10:23:43.145588+00:00", + "requirements_hash": "sha256:c9668bc44dcd9728f98686cb7d72b4cdfc3c3ed44512d29b279a484723c9525a", + "runtime_name": "cpython@3.11", + "target_platform": "linux_x86_64" + } + ], + "runtimes": [ + { + "archive_build": 1, + "archive_hashes": { + "sha256": "ab57f3a6fd70e1855773382bda7ddb3e4c2a520cd17ed7549e1983ca3410f47a" + }, + "archive_name": "cpython@3.11.tar.xz", + "archive_size": 29727160, + "install_target": "cpython@3.11", + "layer_name": "cpython@3.11", + "lock_version": 1, + "locked_at": "2024-10-15T10:23:42.581585+00:00", + "requirements_hash": "sha256:8a2182bfe08ff2478e3fe614e4769fce8fa5fa2ea010a2976c8f74d5396fa706", + "runtime_name": "cpython@3.11.10", + "target_platform": "linux_x86_64" + }, + { + "archive_build": 1, + "archive_hashes": { + "sha256": "bf3ad4fd54b7e0d5ec6f1df88838f3a0d809d4b4ea07ee37b6e073a96c2ac995" + }, + "archive_name": "cpython@3.12.tar.xz", + "archive_size": 42714592, + "install_target": "cpython@3.12", + "layer_name": "cpython@3.12", + "lock_version": 1, + "locked_at": "2024-10-15T10:23:42.669585+00:00", + "requirements_hash": "sha256:8a2182bfe08ff2478e3fe614e4769fce8fa5fa2ea010a2976c8f74d5396fa706", + "runtime_name": "cpython@3.12.7", + "target_platform": "linux_x86_64" + } + ] + } +} diff --git a/tests/sample_project/expected_manifests/macosx_arm64/env_metadata/app-scipy-client.json b/tests/sample_project/expected_manifests/macosx_arm64/env_metadata/app-scipy-client.json new file mode 100644 index 0000000..531d8ab --- /dev/null +++ b/tests/sample_project/expected_manifests/macosx_arm64/env_metadata/app-scipy-client.json @@ -0,0 +1,21 @@ +{ + "app_launch_module": "scipy_client", + "app_launch_module_hash": "sha256/344b1be70920bd9635ce38fa14fca86b531ce2e334f54968321469c5fbb5b608", + "archive_build": 1, + "archive_hashes": { + "sha256": "b4f061f14023391f588940cb8f509ae8c46dd8fca552346c15df153f09eeb85d" + }, + "archive_name": "app-scipy-client.tar.xz", + "archive_size": 1400, + "install_target": "app-scipy-client", + "layer_name": "app-scipy-client", + "lock_version": 1, + "locked_at": "2024-10-15T10:23:33.147967+00:00", + "required_layers": [ + "framework-scipy", + "framework-http-client" + ], + "requirements_hash": "sha256:fb8a843c694d03d7ee74b457cdac2bd82b6b439de0ed308d72fe698c6c9c6cf4", + "runtime_name": "cpython@3.11", + "target_platform": "macosx_arm64" +} diff --git a/tests/sample_project/expected_manifests/macosx_arm64/env_metadata/app-scipy-import.json b/tests/sample_project/expected_manifests/macosx_arm64/env_metadata/app-scipy-import.json new file mode 100644 index 0000000..1e30333 --- /dev/null +++ b/tests/sample_project/expected_manifests/macosx_arm64/env_metadata/app-scipy-import.json @@ -0,0 +1,20 @@ +{ + "app_launch_module": "scipy_import", + "app_launch_module_hash": "sha256:6278ff255372146d752518ffdf49f3432667d7c93997ed980b3676fdc75406ee", + "archive_build": 1, + "archive_hashes": { + "sha256": "a7d51a74e9c5b370af3038bb5abb08fd81b49b446c02e6c16ff6b1fd9ac9025b" + }, + "archive_name": "app-scipy-import.tar.xz", + "archive_size": 1284, + "install_target": "app-scipy-import", + "layer_name": "app-scipy-import", + "lock_version": 1, + "locked_at": "2024-10-15T10:23:33.121208+00:00", + "required_layers": [ + "framework-scipy" + ], + "requirements_hash": "sha256:36b0dbfec94b7de6507f348f0823cd02fdca2ea79eeafe92d571c26ae347d150", + "runtime_name": "cpython@3.11", + "target_platform": "macosx_arm64" +} diff --git a/tests/sample_project/expected_manifests/macosx_arm64/env_metadata/cpython@3.11.json b/tests/sample_project/expected_manifests/macosx_arm64/env_metadata/cpython@3.11.json new file mode 100644 index 0000000..383c281 --- /dev/null +++ b/tests/sample_project/expected_manifests/macosx_arm64/env_metadata/cpython@3.11.json @@ -0,0 +1,15 @@ +{ + "archive_build": 1, + "archive_hashes": { + "sha256": "bca47c4596578940d12523c4cdea9c54b80a4f3ff1bc74171320616d898ea11f" + }, + "archive_name": "cpython@3.11.tar.xz", + "archive_size": 14960344, + "install_target": "cpython@3.11", + "layer_name": "cpython@3.11", + "lock_version": 1, + "locked_at": "2024-10-15T10:23:32.798085+00:00", + "requirements_hash": "sha256:8a2182bfe08ff2478e3fe614e4769fce8fa5fa2ea010a2976c8f74d5396fa706", + "runtime_name": "cpython@3.11.10", + "target_platform": "macosx_arm64" +} diff --git a/tests/sample_project/expected_manifests/macosx_arm64/env_metadata/cpython@3.12.json b/tests/sample_project/expected_manifests/macosx_arm64/env_metadata/cpython@3.12.json new file mode 100644 index 0000000..c0e3a43 --- /dev/null +++ b/tests/sample_project/expected_manifests/macosx_arm64/env_metadata/cpython@3.12.json @@ -0,0 +1,15 @@ +{ + "archive_build": 1, + "archive_hashes": { + "sha256": "0a6c9c612d2efee08d7f0d9b51ff2403f55c521826e7b135b1ab022911dbdf08" + }, + "archive_name": "cpython@3.12.tar.xz", + "archive_size": 13600552, + "install_target": "cpython@3.12", + "layer_name": "cpython@3.12", + "lock_version": 1, + "locked_at": "2024-10-15T10:23:32.881474+00:00", + "requirements_hash": "sha256:8a2182bfe08ff2478e3fe614e4769fce8fa5fa2ea010a2976c8f74d5396fa706", + "runtime_name": "cpython@3.12.7", + "target_platform": "macosx_arm64" +} diff --git a/tests/sample_project/expected_manifests/macosx_arm64/env_metadata/framework-http-client.json b/tests/sample_project/expected_manifests/macosx_arm64/env_metadata/framework-http-client.json new file mode 100644 index 0000000..f6aa78d --- /dev/null +++ b/tests/sample_project/expected_manifests/macosx_arm64/env_metadata/framework-http-client.json @@ -0,0 +1,15 @@ +{ + "archive_build": 1, + "archive_hashes": { + "sha256": "ccaa6c54492390af869045894977bf7398b2922aad3622b6a6801ea2e02b1d23" + }, + "archive_name": "framework-http-client.tar.xz", + "archive_size": 362476, + "install_target": "framework-http-client", + "layer_name": "framework-http-client", + "lock_version": 1, + "locked_at": "2024-10-15T10:23:33.094515+00:00", + "requirements_hash": "sha256:c9668bc44dcd9728f98686cb7d72b4cdfc3c3ed44512d29b279a484723c9525a", + "runtime_name": "cpython@3.11", + "target_platform": "macosx_arm64" +} diff --git a/tests/sample_project/expected_manifests/macosx_arm64/env_metadata/framework-scipy.json b/tests/sample_project/expected_manifests/macosx_arm64/env_metadata/framework-scipy.json new file mode 100644 index 0000000..331a486 --- /dev/null +++ b/tests/sample_project/expected_manifests/macosx_arm64/env_metadata/framework-scipy.json @@ -0,0 +1,15 @@ +{ + "archive_build": 1, + "archive_hashes": { + "sha256": "45d6b5abeddedc12978b54d8ff4a29db76495bd5ab7b7a051ed45b8aa8fc76b9" + }, + "archive_name": "framework-scipy.tar.xz", + "archive_size": 15077296, + "install_target": "framework-scipy", + "layer_name": "framework-scipy", + "lock_version": 1, + "locked_at": "2024-10-15T10:23:32.960668+00:00", + "requirements_hash": "sha256:36b0dbfec94b7de6507f348f0823cd02fdca2ea79eeafe92d571c26ae347d150", + "runtime_name": "cpython@3.11", + "target_platform": "macosx_arm64" +} diff --git a/tests/sample_project/expected_manifests/macosx_arm64/env_metadata/framework-sklearn.json b/tests/sample_project/expected_manifests/macosx_arm64/env_metadata/framework-sklearn.json new file mode 100644 index 0000000..3acfd22 --- /dev/null +++ b/tests/sample_project/expected_manifests/macosx_arm64/env_metadata/framework-sklearn.json @@ -0,0 +1,15 @@ +{ + "archive_build": 1, + "archive_hashes": { + "sha256": "d7ec74fe1f72988bba8b6342e561037a01eee26b07039439b1fe92ad0f3594a1" + }, + "archive_name": "framework-sklearn.tar.xz", + "archive_size": 20687556, + "install_target": "framework-sklearn", + "layer_name": "framework-sklearn", + "lock_version": 1, + "locked_at": "2024-10-15T10:23:33.041734+00:00", + "requirements_hash": "sha256:600b3bc658d940b756d5917e6fb7dec3431c5ce4ebc878f5d031e74f3ebdb7a9", + "runtime_name": "cpython@3.12", + "target_platform": "macosx_arm64" +} diff --git a/tests/sample_project/expected_manifests/macosx_arm64/venvstacks.json b/tests/sample_project/expected_manifests/macosx_arm64/venvstacks.json new file mode 100644 index 0000000..379b65f --- /dev/null +++ b/tests/sample_project/expected_manifests/macosx_arm64/venvstacks.json @@ -0,0 +1,126 @@ +{ + "layers": { + "applications": [ + { + "app_launch_module": "scipy_import", + "app_launch_module_hash": "sha256:6278ff255372146d752518ffdf49f3432667d7c93997ed980b3676fdc75406ee", + "archive_build": 1, + "archive_hashes": { + "sha256": "a7d51a74e9c5b370af3038bb5abb08fd81b49b446c02e6c16ff6b1fd9ac9025b" + }, + "archive_name": "app-scipy-import.tar.xz", + "archive_size": 1284, + "install_target": "app-scipy-import", + "layer_name": "app-scipy-import", + "lock_version": 1, + "locked_at": "2024-10-15T10:23:33.121208+00:00", + "required_layers": [ + "framework-scipy" + ], + "requirements_hash": "sha256:36b0dbfec94b7de6507f348f0823cd02fdca2ea79eeafe92d571c26ae347d150", + "runtime_name": "cpython@3.11", + "target_platform": "macosx_arm64" + }, + { + "app_launch_module": "scipy_client", + "app_launch_module_hash": "sha256/344b1be70920bd9635ce38fa14fca86b531ce2e334f54968321469c5fbb5b608", + "archive_build": 1, + "archive_hashes": { + "sha256": "b4f061f14023391f588940cb8f509ae8c46dd8fca552346c15df153f09eeb85d" + }, + "archive_name": "app-scipy-client.tar.xz", + "archive_size": 1400, + "install_target": "app-scipy-client", + "layer_name": "app-scipy-client", + "lock_version": 1, + "locked_at": "2024-10-15T10:23:33.147967+00:00", + "required_layers": [ + "framework-scipy", + "framework-http-client" + ], + "requirements_hash": "sha256:fb8a843c694d03d7ee74b457cdac2bd82b6b439de0ed308d72fe698c6c9c6cf4", + "runtime_name": "cpython@3.11", + "target_platform": "macosx_arm64" + } + ], + "frameworks": [ + { + "archive_build": 1, + "archive_hashes": { + "sha256": "45d6b5abeddedc12978b54d8ff4a29db76495bd5ab7b7a051ed45b8aa8fc76b9" + }, + "archive_name": "framework-scipy.tar.xz", + "archive_size": 15077296, + "install_target": "framework-scipy", + "layer_name": "framework-scipy", + "lock_version": 1, + "locked_at": "2024-10-15T10:23:32.960668+00:00", + "requirements_hash": "sha256:36b0dbfec94b7de6507f348f0823cd02fdca2ea79eeafe92d571c26ae347d150", + "runtime_name": "cpython@3.11", + "target_platform": "macosx_arm64" + }, + { + "archive_build": 1, + "archive_hashes": { + "sha256": "d7ec74fe1f72988bba8b6342e561037a01eee26b07039439b1fe92ad0f3594a1" + }, + "archive_name": "framework-sklearn.tar.xz", + "archive_size": 20687556, + "install_target": "framework-sklearn", + "layer_name": "framework-sklearn", + "lock_version": 1, + "locked_at": "2024-10-15T10:23:33.041734+00:00", + "requirements_hash": "sha256:600b3bc658d940b756d5917e6fb7dec3431c5ce4ebc878f5d031e74f3ebdb7a9", + "runtime_name": "cpython@3.12", + "target_platform": "macosx_arm64" + }, + { + "archive_build": 1, + "archive_hashes": { + "sha256": "ccaa6c54492390af869045894977bf7398b2922aad3622b6a6801ea2e02b1d23" + }, + "archive_name": "framework-http-client.tar.xz", + "archive_size": 362476, + "install_target": "framework-http-client", + "layer_name": "framework-http-client", + "lock_version": 1, + "locked_at": "2024-10-15T10:23:33.094515+00:00", + "requirements_hash": "sha256:c9668bc44dcd9728f98686cb7d72b4cdfc3c3ed44512d29b279a484723c9525a", + "runtime_name": "cpython@3.11", + "target_platform": "macosx_arm64" + } + ], + "runtimes": [ + { + "archive_build": 1, + "archive_hashes": { + "sha256": "bca47c4596578940d12523c4cdea9c54b80a4f3ff1bc74171320616d898ea11f" + }, + "archive_name": "cpython@3.11.tar.xz", + "archive_size": 14960344, + "install_target": "cpython@3.11", + "layer_name": "cpython@3.11", + "lock_version": 1, + "locked_at": "2024-10-15T10:23:32.798085+00:00", + "requirements_hash": "sha256:8a2182bfe08ff2478e3fe614e4769fce8fa5fa2ea010a2976c8f74d5396fa706", + "runtime_name": "cpython@3.11.10", + "target_platform": "macosx_arm64" + }, + { + "archive_build": 1, + "archive_hashes": { + "sha256": "0a6c9c612d2efee08d7f0d9b51ff2403f55c521826e7b135b1ab022911dbdf08" + }, + "archive_name": "cpython@3.12.tar.xz", + "archive_size": 13600552, + "install_target": "cpython@3.12", + "layer_name": "cpython@3.12", + "lock_version": 1, + "locked_at": "2024-10-15T10:23:32.881474+00:00", + "requirements_hash": "sha256:8a2182bfe08ff2478e3fe614e4769fce8fa5fa2ea010a2976c8f74d5396fa706", + "runtime_name": "cpython@3.12.7", + "target_platform": "macosx_arm64" + } + ] + } +} diff --git a/tests/sample_project/expected_manifests/win_amd64/env_metadata/app-scipy-client.json b/tests/sample_project/expected_manifests/win_amd64/env_metadata/app-scipy-client.json new file mode 100644 index 0000000..89a0d76 --- /dev/null +++ b/tests/sample_project/expected_manifests/win_amd64/env_metadata/app-scipy-client.json @@ -0,0 +1,21 @@ +{ + "app_launch_module": "scipy_client", + "app_launch_module_hash": "sha256/344b1be70920bd9635ce38fa14fca86b531ce2e334f54968321469c5fbb5b608", + "archive_build": 1, + "archive_hashes": { + "sha256": "a4229a9be2b24b0739b11729278d9acb68320b1d36b1f2a7d46191b359f4df74" + }, + "archive_name": "app-scipy-client.zip", + "archive_size": 255062, + "install_target": "app-scipy-client", + "layer_name": "app-scipy-client", + "lock_version": 1, + "locked_at": "2024-10-15T10:24:36.468633+00:00", + "required_layers": [ + "framework-scipy", + "framework-http-client" + ], + "requirements_hash": "sha256:3bff0428616a2f1724732e78e7788e753dd5f1aa10aa5d3b87707b8dbde121de", + "runtime_name": "cpython@3.11.10", + "target_platform": "win_amd64" +} diff --git a/tests/sample_project/expected_manifests/win_amd64/env_metadata/app-scipy-import.json b/tests/sample_project/expected_manifests/win_amd64/env_metadata/app-scipy-import.json new file mode 100644 index 0000000..75bff50 --- /dev/null +++ b/tests/sample_project/expected_manifests/win_amd64/env_metadata/app-scipy-import.json @@ -0,0 +1,20 @@ +{ + "app_launch_module": "scipy_import", + "app_launch_module_hash": "sha256:6278ff255372146d752518ffdf49f3432667d7c93997ed980b3676fdc75406ee", + "archive_build": 1, + "archive_hashes": { + "sha256": "3ebb818738bdd10e5eebdb48fda4be41ac73af88890ba9f749c7dbb23ce2aaa8" + }, + "archive_name": "app-scipy-import.zip", + "archive_size": 254578, + "install_target": "app-scipy-import", + "layer_name": "app-scipy-import", + "lock_version": 1, + "locked_at": "2024-10-15T10:24:36.386938+00:00", + "required_layers": [ + "framework-scipy" + ], + "requirements_hash": "sha256:9aba38b5efe287f35d58825dce6b1c47ed556b930056e6edc00ca9e1a165796b", + "runtime_name": "cpython@3.11.10", + "target_platform": "win_amd64" +} diff --git a/tests/sample_project/expected_manifests/win_amd64/env_metadata/cpython@3.11.json b/tests/sample_project/expected_manifests/win_amd64/env_metadata/cpython@3.11.json new file mode 100644 index 0000000..5346397 --- /dev/null +++ b/tests/sample_project/expected_manifests/win_amd64/env_metadata/cpython@3.11.json @@ -0,0 +1,15 @@ +{ + "archive_build": 1, + "archive_hashes": { + "sha256": "70a00f1e469d6c4ab82e0d74946ca6da0fb97ee480b294d93e6b217d8a9f3cd0" + }, + "archive_name": "cpython@3.11.zip", + "archive_size": 46592259, + "install_target": "cpython@3.11", + "layer_name": "cpython@3.11", + "lock_version": 1, + "locked_at": "2024-10-15T10:24:35.734694+00:00", + "requirements_hash": "sha256:5df8a095e5cc8c181fd46278ab782f5ae09ab0cba19b74f1aaa25f9fe5a9f100", + "runtime_name": "cpython@3.11.10", + "target_platform": "win_amd64" +} diff --git a/tests/sample_project/expected_manifests/win_amd64/env_metadata/cpython@3.12.json b/tests/sample_project/expected_manifests/win_amd64/env_metadata/cpython@3.12.json new file mode 100644 index 0000000..508e5a9 --- /dev/null +++ b/tests/sample_project/expected_manifests/win_amd64/env_metadata/cpython@3.12.json @@ -0,0 +1,15 @@ +{ + "archive_build": 1, + "archive_hashes": { + "sha256": "ab8860e56b834ff4b9eacd11e8a0c705c83edac6b1627990f5317e1514314d76" + }, + "archive_name": "cpython@3.12.zip", + "archive_size": 45864491, + "install_target": "cpython@3.12", + "layer_name": "cpython@3.12", + "lock_version": 1, + "locked_at": "2024-10-15T10:24:35.816641+00:00", + "requirements_hash": "sha256:49df6762bcf65e93f7202af1527f129bb55dbd06ff5dff17e5b4949d3fba98de", + "runtime_name": "cpython@3.12.7", + "target_platform": "win_amd64" +} diff --git a/tests/sample_project/expected_manifests/win_amd64/env_metadata/framework-http-client.json b/tests/sample_project/expected_manifests/win_amd64/env_metadata/framework-http-client.json new file mode 100644 index 0000000..6af3112 --- /dev/null +++ b/tests/sample_project/expected_manifests/win_amd64/env_metadata/framework-http-client.json @@ -0,0 +1,15 @@ +{ + "archive_build": 1, + "archive_hashes": { + "sha256": "b0193afc9d1f4a51b9fcb60903d335696804d2b35115986c68e4ebf5aaf1b621" + }, + "archive_name": "framework-http-client.zip", + "archive_size": 817522, + "install_target": "framework-http-client", + "layer_name": "framework-http-client", + "lock_version": 1, + "locked_at": "2024-10-15T10:24:36.316012+00:00", + "requirements_hash": "sha256:71ba32717956dcf9c0d4e18b69061efbe53816f4b591ceb50553d6d3d12e1960", + "runtime_name": "cpython@3.11.10", + "target_platform": "win_amd64" +} diff --git a/tests/sample_project/expected_manifests/win_amd64/env_metadata/framework-scipy.json b/tests/sample_project/expected_manifests/win_amd64/env_metadata/framework-scipy.json new file mode 100644 index 0000000..c86bc68 --- /dev/null +++ b/tests/sample_project/expected_manifests/win_amd64/env_metadata/framework-scipy.json @@ -0,0 +1,15 @@ +{ + "archive_build": 1, + "archive_hashes": { + "sha256": "132cd5e29dae8d082c453b276e5e207e5f82f29458db62d6c7df2a01b8225f92" + }, + "archive_name": "framework-scipy.zip", + "archive_size": 45078361, + "install_target": "framework-scipy", + "layer_name": "framework-scipy", + "lock_version": 1, + "locked_at": "2024-10-15T10:24:35.999197+00:00", + "requirements_hash": "sha256:9aba38b5efe287f35d58825dce6b1c47ed556b930056e6edc00ca9e1a165796b", + "runtime_name": "cpython@3.11.10", + "target_platform": "win_amd64" +} diff --git a/tests/sample_project/expected_manifests/win_amd64/env_metadata/framework-sklearn.json b/tests/sample_project/expected_manifests/win_amd64/env_metadata/framework-sklearn.json new file mode 100644 index 0000000..6bb7c4f --- /dev/null +++ b/tests/sample_project/expected_manifests/win_amd64/env_metadata/framework-sklearn.json @@ -0,0 +1,15 @@ +{ + "archive_build": 1, + "archive_hashes": { + "sha256": "eaaa57b8f636eb0258bf4b4ac11a7a76a4691b1659d9a14bd8b908129f75e936" + }, + "archive_name": "framework-sklearn.zip", + "archive_size": 56185753, + "install_target": "framework-sklearn", + "layer_name": "framework-sklearn", + "lock_version": 1, + "locked_at": "2024-10-15T10:24:36.193328+00:00", + "requirements_hash": "sha256:52d03445e70ccc0f6f3fe8523185f9f4b0b2795b83927d8703fba35fdc82bf62", + "runtime_name": "cpython@3.12.7", + "target_platform": "win_amd64" +} diff --git a/tests/sample_project/expected_manifests/win_amd64/venvstacks.json b/tests/sample_project/expected_manifests/win_amd64/venvstacks.json new file mode 100644 index 0000000..f9690d0 --- /dev/null +++ b/tests/sample_project/expected_manifests/win_amd64/venvstacks.json @@ -0,0 +1,126 @@ +{ + "layers": { + "applications": [ + { + "app_launch_module": "scipy_import", + "app_launch_module_hash": "sha256:6278ff255372146d752518ffdf49f3432667d7c93997ed980b3676fdc75406ee", + "archive_build": 1, + "archive_hashes": { + "sha256": "3ebb818738bdd10e5eebdb48fda4be41ac73af88890ba9f749c7dbb23ce2aaa8" + }, + "archive_name": "app-scipy-import.zip", + "archive_size": 254578, + "install_target": "app-scipy-import", + "layer_name": "app-scipy-import", + "lock_version": 1, + "locked_at": "2024-10-15T10:24:36.386938+00:00", + "required_layers": [ + "framework-scipy" + ], + "requirements_hash": "sha256:9aba38b5efe287f35d58825dce6b1c47ed556b930056e6edc00ca9e1a165796b", + "runtime_name": "cpython@3.11.10", + "target_platform": "win_amd64" + }, + { + "app_launch_module": "scipy_client", + "app_launch_module_hash": "sha256/344b1be70920bd9635ce38fa14fca86b531ce2e334f54968321469c5fbb5b608", + "archive_build": 1, + "archive_hashes": { + "sha256": "a4229a9be2b24b0739b11729278d9acb68320b1d36b1f2a7d46191b359f4df74" + }, + "archive_name": "app-scipy-client.zip", + "archive_size": 255062, + "install_target": "app-scipy-client", + "layer_name": "app-scipy-client", + "lock_version": 1, + "locked_at": "2024-10-15T10:24:36.468633+00:00", + "required_layers": [ + "framework-scipy", + "framework-http-client" + ], + "requirements_hash": "sha256:3bff0428616a2f1724732e78e7788e753dd5f1aa10aa5d3b87707b8dbde121de", + "runtime_name": "cpython@3.11.10", + "target_platform": "win_amd64" + } + ], + "frameworks": [ + { + "archive_build": 1, + "archive_hashes": { + "sha256": "132cd5e29dae8d082c453b276e5e207e5f82f29458db62d6c7df2a01b8225f92" + }, + "archive_name": "framework-scipy.zip", + "archive_size": 45078361, + "install_target": "framework-scipy", + "layer_name": "framework-scipy", + "lock_version": 1, + "locked_at": "2024-10-15T10:24:35.999197+00:00", + "requirements_hash": "sha256:9aba38b5efe287f35d58825dce6b1c47ed556b930056e6edc00ca9e1a165796b", + "runtime_name": "cpython@3.11.10", + "target_platform": "win_amd64" + }, + { + "archive_build": 1, + "archive_hashes": { + "sha256": "eaaa57b8f636eb0258bf4b4ac11a7a76a4691b1659d9a14bd8b908129f75e936" + }, + "archive_name": "framework-sklearn.zip", + "archive_size": 56185753, + "install_target": "framework-sklearn", + "layer_name": "framework-sklearn", + "lock_version": 1, + "locked_at": "2024-10-15T10:24:36.193328+00:00", + "requirements_hash": "sha256:52d03445e70ccc0f6f3fe8523185f9f4b0b2795b83927d8703fba35fdc82bf62", + "runtime_name": "cpython@3.12.7", + "target_platform": "win_amd64" + }, + { + "archive_build": 1, + "archive_hashes": { + "sha256": "b0193afc9d1f4a51b9fcb60903d335696804d2b35115986c68e4ebf5aaf1b621" + }, + "archive_name": "framework-http-client.zip", + "archive_size": 817522, + "install_target": "framework-http-client", + "layer_name": "framework-http-client", + "lock_version": 1, + "locked_at": "2024-10-15T10:24:36.316012+00:00", + "requirements_hash": "sha256:71ba32717956dcf9c0d4e18b69061efbe53816f4b591ceb50553d6d3d12e1960", + "runtime_name": "cpython@3.11.10", + "target_platform": "win_amd64" + } + ], + "runtimes": [ + { + "archive_build": 1, + "archive_hashes": { + "sha256": "70a00f1e469d6c4ab82e0d74946ca6da0fb97ee480b294d93e6b217d8a9f3cd0" + }, + "archive_name": "cpython@3.11.zip", + "archive_size": 46592259, + "install_target": "cpython@3.11", + "layer_name": "cpython@3.11", + "lock_version": 1, + "locked_at": "2024-10-15T10:24:35.734694+00:00", + "requirements_hash": "sha256:5df8a095e5cc8c181fd46278ab782f5ae09ab0cba19b74f1aaa25f9fe5a9f100", + "runtime_name": "cpython@3.11.10", + "target_platform": "win_amd64" + }, + { + "archive_build": 1, + "archive_hashes": { + "sha256": "ab8860e56b834ff4b9eacd11e8a0c705c83edac6b1627990f5317e1514314d76" + }, + "archive_name": "cpython@3.12.zip", + "archive_size": 45864491, + "install_target": "cpython@3.12", + "layer_name": "cpython@3.12", + "lock_version": 1, + "locked_at": "2024-10-15T10:24:35.816641+00:00", + "requirements_hash": "sha256:49df6762bcf65e93f7202af1527f129bb55dbd06ff5dff17e5b4949d3fba98de", + "runtime_name": "cpython@3.12.7", + "target_platform": "win_amd64" + } + ] + } +} diff --git a/tests/sample_project/launch_modules/scipy_client/__main__.py b/tests/sample_project/launch_modules/scipy_client/__main__.py new file mode 100644 index 0000000..419ab58 --- /dev/null +++ b/tests/sample_project/launch_modules/scipy_client/__main__.py @@ -0,0 +1,6 @@ +"""Sample launch package using a helper module""" + +if __name__ == "__main__": + from .cli import main + + main() diff --git a/tests/sample_project/launch_modules/scipy_client/cli.py b/tests/sample_project/launch_modules/scipy_client/cli.py new file mode 100644 index 0000000..a48054b --- /dev/null +++ b/tests/sample_project/launch_modules/scipy_client/cli.py @@ -0,0 +1,14 @@ +"""Sample CLI helper module importing scipy and httpx""" + +import scipy +import httpx + + +def main(): + # The app-scipy-client environment should NOT have access to pip, or sklearn + from importlib.util import find_spec + + for disallowed in ("pip", "sklearn"): + if find_spec(disallowed): + raise RuntimeError(f"Should not be able to import {disallowed!r}!") + print("Environment launch module executed successfully") diff --git a/tests/sample_project/launch_modules/scipy_import.py b/tests/sample_project/launch_modules/scipy_import.py new file mode 100644 index 0000000..17ae63d --- /dev/null +++ b/tests/sample_project/launch_modules/scipy_import.py @@ -0,0 +1,13 @@ +"""Sample launch module importing scipy""" + +import scipy + +if __name__ == "__main__": + # The app-scipy-import environment should NOT have access to pip, sklearn or httpx + from importlib.util import find_spec + + for disallowed in ("pip", "sklearn", "httpx"): + if find_spec(disallowed): + raise RuntimeError(f"Should not be able to import {disallowed!r}!") + + print("Environment launch module executed successfully") diff --git a/tests/sample_project/launch_modules/sklearn_import.py b/tests/sample_project/launch_modules/sklearn_import.py new file mode 100644 index 0000000..2196c19 --- /dev/null +++ b/tests/sample_project/launch_modules/sklearn_import.py @@ -0,0 +1,13 @@ +"""Sample launch module importing sklearn""" + +import sklearn + +if __name__ == "__main__": + # The app-sklearn-import environment should NOT have access to pip or httpx + from importlib.util import find_spec + + for disallowed in ("pip", "httpx"): + if find_spec(disallowed): + raise RuntimeError(f"Should not be able to import {disallowed!r}!") + + print("Environment launch module executed successfully") diff --git a/tests/sample_project/requirements/app-scipy-client/requirements-app-scipy-client-linux_x86_64.in b/tests/sample_project/requirements/app-scipy-client/requirements-app-scipy-client-linux_x86_64.in new file mode 100644 index 0000000..26cc92e --- /dev/null +++ b/tests/sample_project/requirements/app-scipy-client/requirements-app-scipy-client-linux_x86_64.in @@ -0,0 +1,4 @@ +# DO NOT EDIT. Auto-generated as part of deployment bundle build. +# Relock bundle dependencies to update. +scipy +httpx diff --git a/tests/sample_project/requirements/app-scipy-client/requirements-app-scipy-client-linux_x86_64.txt b/tests/sample_project/requirements/app-scipy-client/requirements-app-scipy-client-linux_x86_64.txt new file mode 100644 index 0000000..dd35c26 --- /dev/null +++ b/tests/sample_project/requirements/app-scipy-client/requirements-app-scipy-client-linux_x86_64.txt @@ -0,0 +1,111 @@ +# This file was autogenerated by uv via the following command: +# python -Im layered_envs lock +anyio==4.6.2.post1 \ + --hash=sha256:4c8bc31ccdb51c7f7bd251f51c609e038d63e34219b44aa86e47576389880b4c \ + --hash=sha256:6d170c36fba3bdd840c73d3868c1e777e33676a69c3a72cf0a0d5d6d8009b61d +certifi==2024.8.30 \ + --hash=sha256:922820b53db7a7257ffbda3f597266d435245903d80737e34f8a45ff3e3230d8 \ + --hash=sha256:bec941d2aa8195e248a60b31ff9f0558284cf01a52591ceda73ea9afffd69fd9 +h11==0.14.0 \ + --hash=sha256:8f19fbbe99e72420ff35c00b27a34cb9937e902a8b810e2c88300c6f0a3b699d \ + --hash=sha256:e3fe4ac4b851c468cc8363d500db52c2ead036020723024a109d37346efaa761 +httpcore==1.0.6 \ + --hash=sha256:27b59625743b85577a8c0e10e55b50b5368a4f2cfe8cc7bcfa9cf00829c2682f \ + --hash=sha256:73f6dbd6eb8c21bbf7ef8efad555481853f5f6acdeaff1edb0694289269ee17f +httpx==0.27.2 \ + --hash=sha256:7bb2708e112d8fdd7829cd4243970f0c223274051cb35ee80c03301ee29a3df0 \ + --hash=sha256:f7c2be1d2f3c3c3160d441802406b206c2b76f5947b11115e6df10c6c65e66c2 +idna==3.10 \ + --hash=sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9 \ + --hash=sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3 +numpy==2.1.2 \ + --hash=sha256:05b2d4e667895cc55e3ff2b56077e4c8a5604361fc21a042845ea3ad67465aa8 \ + --hash=sha256:12edb90831ff481f7ef5f6bc6431a9d74dc0e5ff401559a71e5e4611d4f2d466 \ + --hash=sha256:13311c2db4c5f7609b462bc0f43d3c465424d25c626d95040f073e30f7570e35 \ + --hash=sha256:13532a088217fa624c99b843eeb54640de23b3414b14aa66d023805eb731066c \ + --hash=sha256:13602b3174432a35b16c4cfb5de9a12d229727c3dd47a6ce35111f2ebdf66ff4 \ + --hash=sha256:1600068c262af1ca9580a527d43dc9d959b0b1d8e56f8a05d830eea39b7c8af6 \ + --hash=sha256:1b8cde4f11f0a975d1fd59373b32e2f5a562ade7cde4f85b7137f3de8fbb29a0 \ + --hash=sha256:1c193d0b0238638e6fc5f10f1b074a6993cb13b0b431f64079a509d63d3aa8b7 \ + --hash=sha256:1ebec5fd716c5a5b3d8dfcc439be82a8407b7b24b230d0ad28a81b61c2f4659a \ + --hash=sha256:242b39d00e4944431a3cd2db2f5377e15b5785920421993770cddb89992c3f3a \ + --hash=sha256:259ec80d54999cc34cd1eb8ded513cb053c3bf4829152a2e00de2371bd406f5e \ + --hash=sha256:2abbf905a0b568706391ec6fa15161fad0fb5d8b68d73c461b3c1bab6064dd62 \ + --hash=sha256:2cbba4b30bf31ddbe97f1c7205ef976909a93a66bb1583e983adbd155ba72ac2 \ + --hash=sha256:2ffef621c14ebb0188a8633348504a35c13680d6da93ab5cb86f4e54b7e922b5 \ + --hash=sha256:30d53720b726ec36a7f88dc873f0eec8447fbc93d93a8f079dfac2629598d6ee \ + --hash=sha256:32e16a03138cabe0cb28e1007ee82264296ac0983714094380b408097a418cfe \ + --hash=sha256:43cca367bf94a14aca50b89e9bc2061683116cfe864e56740e083392f533ce7a \ + --hash=sha256:456e3b11cb79ac9946c822a56346ec80275eaf2950314b249b512896c0d2505e \ + --hash=sha256:4d6ec0d4222e8ffdab1744da2560f07856421b367928026fb540e1945f2eeeaf \ + --hash=sha256:5006b13a06e0b38d561fab5ccc37581f23c9511879be7693bd33c7cd15ca227c \ + --hash=sha256:675c741d4739af2dc20cd6c6a5c4b7355c728167845e3c6b0e824e4e5d36a6c3 \ + --hash=sha256:6cdb606a7478f9ad91c6283e238544451e3a95f30fb5467fbf715964341a8a86 \ + --hash=sha256:6d95f286b8244b3649b477ac066c6906fbb2905f8ac19b170e2175d3d799f4df \ + --hash=sha256:76322dcdb16fccf2ac56f99048af32259dcc488d9b7e25b51e5eca5147a3fb98 \ + --hash=sha256:7c1c60328bd964b53f8b835df69ae8198659e2b9302ff9ebb7de4e5a5994db3d \ + --hash=sha256:860ec6e63e2c5c2ee5e9121808145c7bf86c96cca9ad396c0bd3e0f2798ccbe2 \ + --hash=sha256:8e00ea6fc82e8a804433d3e9cedaa1051a1422cb6e443011590c14d2dea59146 \ + --hash=sha256:9c6c754df29ce6a89ed23afb25550d1c2d5fdb9901d9c67a16e0b16eaf7e2550 \ + --hash=sha256:a26ae94658d3ba3781d5e103ac07a876b3e9b29db53f68ed7df432fd033358a8 \ + --hash=sha256:a65acfdb9c6ebb8368490dbafe83c03c7e277b37e6857f0caeadbbc56e12f4fb \ + --hash=sha256:a7d80b2e904faa63068ead63107189164ca443b42dd1930299e0d1cb041cec2e \ + --hash=sha256:a84498e0d0a1174f2b3ed769b67b656aa5460c92c9554039e11f20a05650f00d \ + --hash=sha256:ab4754d432e3ac42d33a269c8567413bdb541689b02d93788af4131018cbf366 \ + --hash=sha256:ad369ed238b1959dfbade9018a740fb9392c5ac4f9b5173f420bd4f37ba1f7a0 \ + --hash=sha256:b1d0fcae4f0949f215d4632be684a539859b295e2d0cb14f78ec231915d644db \ + --hash=sha256:b42a1a511c81cc78cbc4539675713bbcf9d9c3913386243ceff0e9429ca892fe \ + --hash=sha256:bd33f82e95ba7ad632bc57837ee99dba3d7e006536200c4e9124089e1bf42426 \ + --hash=sha256:bdd407c40483463898b84490770199d5714dcc9dd9b792f6c6caccc523c00952 \ + --hash=sha256:c6eef7a2dbd0abfb0d9eaf78b73017dbfd0b54051102ff4e6a7b2980d5ac1a03 \ + --hash=sha256:c82af4b2ddd2ee72d1fc0c6695048d457e00b3582ccde72d8a1c991b808bb20f \ + --hash=sha256:d666cb72687559689e9906197e3bec7b736764df6a2e58ee265e360663e9baf7 \ + --hash=sha256:d7bf0a4f9f15b32b5ba53147369e94296f5fffb783db5aacc1be15b4bf72f43b \ + --hash=sha256:d82075752f40c0ddf57e6e02673a17f6cb0f8eb3f587f63ca1eaab5594da5b17 \ + --hash=sha256:da65fb46d4cbb75cb417cddf6ba5e7582eb7bb0b47db4b99c9fe5787ce5d91f5 \ + --hash=sha256:e2b49c3c0804e8ecb05d59af8386ec2f74877f7ca8fd9c1e00be2672e4d399b1 \ + --hash=sha256:e585c8ae871fd38ac50598f4763d73ec5497b0de9a0ab4ef5b69f01c6a046142 \ + --hash=sha256:e8d3ca0a72dd8846eb6f7dfe8f19088060fcb76931ed592d29128e0219652884 \ + --hash=sha256:ef444c57d664d35cac4e18c298c47d7b504c66b17c2ea91312e979fcfbdfb08a \ + --hash=sha256:f1eb068ead09f4994dec71c24b2844f1e4e4e013b9629f812f292f04bd1510d9 \ + --hash=sha256:f2ded8d9b6f68cc26f8425eda5d3877b47343e68ca23d0d0846f4d312ecaa445 \ + --hash=sha256:f751ed0a2f250541e19dfca9f1eafa31a392c71c832b6bb9e113b10d050cb0f1 \ + --hash=sha256:faa88bc527d0f097abdc2c663cddf37c05a1c2f113716601555249805cf573f1 \ + --hash=sha256:fc44e3c68ff00fd991b59092a54350e6e4911152682b4782f68070985aa9e648 +scipy==1.14.1 \ + --hash=sha256:0c2f95de3b04e26f5f3ad5bb05e74ba7f68b837133a4492414b3afd79dfe540e \ + --hash=sha256:1729560c906963fc8389f6aac023739ff3983e727b1a4d87696b7bf108316a79 \ + --hash=sha256:278266012eb69f4a720827bdd2dc54b2271c97d84255b2faaa8f161a158c3b37 \ + --hash=sha256:2843f2d527d9eebec9a43e6b406fb7266f3af25a751aa91d62ff416f54170bc5 \ + --hash=sha256:2da0469a4ef0ecd3693761acbdc20f2fdeafb69e6819cc081308cc978153c675 \ + --hash=sha256:2ff0a7e01e422c15739ecd64432743cf7aae2b03f3084288f399affcefe5222d \ + --hash=sha256:2ff38e22128e6c03ff73b6bb0f85f897d2362f8c052e3b8ad00532198fbdae3f \ + --hash=sha256:30ac8812c1d2aab7131a79ba62933a2a76f582d5dbbc695192453dae67ad6310 \ + --hash=sha256:3a1b111fac6baec1c1d92f27e76511c9e7218f1695d61b59e05e0fe04dc59617 \ + --hash=sha256:4079b90df244709e675cdc8b93bfd8a395d59af40b72e339c2287c91860deb8e \ + --hash=sha256:5149e3fd2d686e42144a093b206aef01932a0059c2a33ddfa67f5f035bdfe13e \ + --hash=sha256:5a275584e726026a5699459aa72f828a610821006228e841b94275c4a7c08417 \ + --hash=sha256:631f07b3734d34aced009aaf6fedfd0eb3498a97e581c3b1e5f14a04164a456d \ + --hash=sha256:716e389b694c4bb564b4fc0c51bc84d381735e0d39d3f26ec1af2556ec6aad94 \ + --hash=sha256:8426251ad1e4ad903a4514712d2fa8fdd5382c978010d1c6f5f37ef286a713ad \ + --hash=sha256:8475230e55549ab3f207bff11ebfc91c805dc3463ef62eda3ccf593254524ce8 \ + --hash=sha256:8bddf15838ba768bb5f5083c1ea012d64c9a444e16192762bd858f1e126196d0 \ + --hash=sha256:8e32dced201274bf96899e6491d9ba3e9a5f6b336708656466ad0522d8528f69 \ + --hash=sha256:8f9ea80f2e65bdaa0b7627fb00cbeb2daf163caa015e59b7516395fe3bd1e066 \ + --hash=sha256:97c5dddd5932bd2a1a31c927ba5e1463a53b87ca96b5c9bdf5dfd6096e27efc3 \ + --hash=sha256:a49f6ed96f83966f576b33a44257d869756df6cf1ef4934f59dd58b25e0327e5 \ + --hash=sha256:af29a935803cc707ab2ed7791c44288a682f9c8107bc00f0eccc4f92c08d6e07 \ + --hash=sha256:b05d43735bb2f07d689f56f7b474788a13ed8adc484a85aa65c0fd931cf9ccd2 \ + --hash=sha256:b28d2ca4add7ac16ae8bb6632a3c86e4b9e4d52d3e34267f6e1b0c1f8d87e389 \ + --hash=sha256:b99722ea48b7ea25e8e015e8341ae74624f72e5f21fc2abd45f3a93266de4c5d \ + --hash=sha256:baff393942b550823bfce952bb62270ee17504d02a1801d7fd0719534dfb9c84 \ + --hash=sha256:c0ee987efa6737242745f347835da2cc5bb9f1b42996a4d97d5c7ff7928cb6f2 \ + --hash=sha256:d0d2821003174de06b69e58cef2316a6622b60ee613121199cb2852a873f8cf3 \ + --hash=sha256:e0cf28db0f24a38b2a0ca33a85a54852586e43cf6fd876365c86e0657cfe7d73 \ + --hash=sha256:e4f5a7c49323533f9103d4dacf4e4f07078f360743dec7f7596949149efeec06 \ + --hash=sha256:eb58ca0abd96911932f688528977858681a59d61a7ce908ffd355957f7025cfc \ + --hash=sha256:edaf02b82cd7639db00dbff629995ef185c8df4c3ffa71a5562a595765a06ce1 \ + --hash=sha256:fef8c87f8abfb884dac04e97824b61299880c43f4ce675dd2cbeadd3c9b466d2 +sniffio==1.3.1 \ + --hash=sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2 \ + --hash=sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc diff --git a/tests/sample_project/requirements/app-scipy-client/requirements-app-scipy-client-linux_x86_64.txt.json b/tests/sample_project/requirements/app-scipy-client/requirements-app-scipy-client-linux_x86_64.txt.json new file mode 100644 index 0000000..2046c53 --- /dev/null +++ b/tests/sample_project/requirements/app-scipy-client/requirements-app-scipy-client-linux_x86_64.txt.json @@ -0,0 +1,4 @@ +{ + "locked_at": "2024-10-15T10:23:43.205589+00:00", + "requirements_hash": "sha256:fb8a843c694d03d7ee74b457cdac2bd82b6b439de0ed308d72fe698c6c9c6cf4" +} diff --git a/tests/sample_project/requirements/app-scipy-client/requirements-app-scipy-client-macosx_arm64.in b/tests/sample_project/requirements/app-scipy-client/requirements-app-scipy-client-macosx_arm64.in new file mode 100644 index 0000000..26cc92e --- /dev/null +++ b/tests/sample_project/requirements/app-scipy-client/requirements-app-scipy-client-macosx_arm64.in @@ -0,0 +1,4 @@ +# DO NOT EDIT. Auto-generated as part of deployment bundle build. +# Relock bundle dependencies to update. +scipy +httpx diff --git a/tests/sample_project/requirements/app-scipy-client/requirements-app-scipy-client-macosx_arm64.txt b/tests/sample_project/requirements/app-scipy-client/requirements-app-scipy-client-macosx_arm64.txt new file mode 100644 index 0000000..dd35c26 --- /dev/null +++ b/tests/sample_project/requirements/app-scipy-client/requirements-app-scipy-client-macosx_arm64.txt @@ -0,0 +1,111 @@ +# This file was autogenerated by uv via the following command: +# python -Im layered_envs lock +anyio==4.6.2.post1 \ + --hash=sha256:4c8bc31ccdb51c7f7bd251f51c609e038d63e34219b44aa86e47576389880b4c \ + --hash=sha256:6d170c36fba3bdd840c73d3868c1e777e33676a69c3a72cf0a0d5d6d8009b61d +certifi==2024.8.30 \ + --hash=sha256:922820b53db7a7257ffbda3f597266d435245903d80737e34f8a45ff3e3230d8 \ + --hash=sha256:bec941d2aa8195e248a60b31ff9f0558284cf01a52591ceda73ea9afffd69fd9 +h11==0.14.0 \ + --hash=sha256:8f19fbbe99e72420ff35c00b27a34cb9937e902a8b810e2c88300c6f0a3b699d \ + --hash=sha256:e3fe4ac4b851c468cc8363d500db52c2ead036020723024a109d37346efaa761 +httpcore==1.0.6 \ + --hash=sha256:27b59625743b85577a8c0e10e55b50b5368a4f2cfe8cc7bcfa9cf00829c2682f \ + --hash=sha256:73f6dbd6eb8c21bbf7ef8efad555481853f5f6acdeaff1edb0694289269ee17f +httpx==0.27.2 \ + --hash=sha256:7bb2708e112d8fdd7829cd4243970f0c223274051cb35ee80c03301ee29a3df0 \ + --hash=sha256:f7c2be1d2f3c3c3160d441802406b206c2b76f5947b11115e6df10c6c65e66c2 +idna==3.10 \ + --hash=sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9 \ + --hash=sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3 +numpy==2.1.2 \ + --hash=sha256:05b2d4e667895cc55e3ff2b56077e4c8a5604361fc21a042845ea3ad67465aa8 \ + --hash=sha256:12edb90831ff481f7ef5f6bc6431a9d74dc0e5ff401559a71e5e4611d4f2d466 \ + --hash=sha256:13311c2db4c5f7609b462bc0f43d3c465424d25c626d95040f073e30f7570e35 \ + --hash=sha256:13532a088217fa624c99b843eeb54640de23b3414b14aa66d023805eb731066c \ + --hash=sha256:13602b3174432a35b16c4cfb5de9a12d229727c3dd47a6ce35111f2ebdf66ff4 \ + --hash=sha256:1600068c262af1ca9580a527d43dc9d959b0b1d8e56f8a05d830eea39b7c8af6 \ + --hash=sha256:1b8cde4f11f0a975d1fd59373b32e2f5a562ade7cde4f85b7137f3de8fbb29a0 \ + --hash=sha256:1c193d0b0238638e6fc5f10f1b074a6993cb13b0b431f64079a509d63d3aa8b7 \ + --hash=sha256:1ebec5fd716c5a5b3d8dfcc439be82a8407b7b24b230d0ad28a81b61c2f4659a \ + --hash=sha256:242b39d00e4944431a3cd2db2f5377e15b5785920421993770cddb89992c3f3a \ + --hash=sha256:259ec80d54999cc34cd1eb8ded513cb053c3bf4829152a2e00de2371bd406f5e \ + --hash=sha256:2abbf905a0b568706391ec6fa15161fad0fb5d8b68d73c461b3c1bab6064dd62 \ + --hash=sha256:2cbba4b30bf31ddbe97f1c7205ef976909a93a66bb1583e983adbd155ba72ac2 \ + --hash=sha256:2ffef621c14ebb0188a8633348504a35c13680d6da93ab5cb86f4e54b7e922b5 \ + --hash=sha256:30d53720b726ec36a7f88dc873f0eec8447fbc93d93a8f079dfac2629598d6ee \ + --hash=sha256:32e16a03138cabe0cb28e1007ee82264296ac0983714094380b408097a418cfe \ + --hash=sha256:43cca367bf94a14aca50b89e9bc2061683116cfe864e56740e083392f533ce7a \ + --hash=sha256:456e3b11cb79ac9946c822a56346ec80275eaf2950314b249b512896c0d2505e \ + --hash=sha256:4d6ec0d4222e8ffdab1744da2560f07856421b367928026fb540e1945f2eeeaf \ + --hash=sha256:5006b13a06e0b38d561fab5ccc37581f23c9511879be7693bd33c7cd15ca227c \ + --hash=sha256:675c741d4739af2dc20cd6c6a5c4b7355c728167845e3c6b0e824e4e5d36a6c3 \ + --hash=sha256:6cdb606a7478f9ad91c6283e238544451e3a95f30fb5467fbf715964341a8a86 \ + --hash=sha256:6d95f286b8244b3649b477ac066c6906fbb2905f8ac19b170e2175d3d799f4df \ + --hash=sha256:76322dcdb16fccf2ac56f99048af32259dcc488d9b7e25b51e5eca5147a3fb98 \ + --hash=sha256:7c1c60328bd964b53f8b835df69ae8198659e2b9302ff9ebb7de4e5a5994db3d \ + --hash=sha256:860ec6e63e2c5c2ee5e9121808145c7bf86c96cca9ad396c0bd3e0f2798ccbe2 \ + --hash=sha256:8e00ea6fc82e8a804433d3e9cedaa1051a1422cb6e443011590c14d2dea59146 \ + --hash=sha256:9c6c754df29ce6a89ed23afb25550d1c2d5fdb9901d9c67a16e0b16eaf7e2550 \ + --hash=sha256:a26ae94658d3ba3781d5e103ac07a876b3e9b29db53f68ed7df432fd033358a8 \ + --hash=sha256:a65acfdb9c6ebb8368490dbafe83c03c7e277b37e6857f0caeadbbc56e12f4fb \ + --hash=sha256:a7d80b2e904faa63068ead63107189164ca443b42dd1930299e0d1cb041cec2e \ + --hash=sha256:a84498e0d0a1174f2b3ed769b67b656aa5460c92c9554039e11f20a05650f00d \ + --hash=sha256:ab4754d432e3ac42d33a269c8567413bdb541689b02d93788af4131018cbf366 \ + --hash=sha256:ad369ed238b1959dfbade9018a740fb9392c5ac4f9b5173f420bd4f37ba1f7a0 \ + --hash=sha256:b1d0fcae4f0949f215d4632be684a539859b295e2d0cb14f78ec231915d644db \ + --hash=sha256:b42a1a511c81cc78cbc4539675713bbcf9d9c3913386243ceff0e9429ca892fe \ + --hash=sha256:bd33f82e95ba7ad632bc57837ee99dba3d7e006536200c4e9124089e1bf42426 \ + --hash=sha256:bdd407c40483463898b84490770199d5714dcc9dd9b792f6c6caccc523c00952 \ + --hash=sha256:c6eef7a2dbd0abfb0d9eaf78b73017dbfd0b54051102ff4e6a7b2980d5ac1a03 \ + --hash=sha256:c82af4b2ddd2ee72d1fc0c6695048d457e00b3582ccde72d8a1c991b808bb20f \ + --hash=sha256:d666cb72687559689e9906197e3bec7b736764df6a2e58ee265e360663e9baf7 \ + --hash=sha256:d7bf0a4f9f15b32b5ba53147369e94296f5fffb783db5aacc1be15b4bf72f43b \ + --hash=sha256:d82075752f40c0ddf57e6e02673a17f6cb0f8eb3f587f63ca1eaab5594da5b17 \ + --hash=sha256:da65fb46d4cbb75cb417cddf6ba5e7582eb7bb0b47db4b99c9fe5787ce5d91f5 \ + --hash=sha256:e2b49c3c0804e8ecb05d59af8386ec2f74877f7ca8fd9c1e00be2672e4d399b1 \ + --hash=sha256:e585c8ae871fd38ac50598f4763d73ec5497b0de9a0ab4ef5b69f01c6a046142 \ + --hash=sha256:e8d3ca0a72dd8846eb6f7dfe8f19088060fcb76931ed592d29128e0219652884 \ + --hash=sha256:ef444c57d664d35cac4e18c298c47d7b504c66b17c2ea91312e979fcfbdfb08a \ + --hash=sha256:f1eb068ead09f4994dec71c24b2844f1e4e4e013b9629f812f292f04bd1510d9 \ + --hash=sha256:f2ded8d9b6f68cc26f8425eda5d3877b47343e68ca23d0d0846f4d312ecaa445 \ + --hash=sha256:f751ed0a2f250541e19dfca9f1eafa31a392c71c832b6bb9e113b10d050cb0f1 \ + --hash=sha256:faa88bc527d0f097abdc2c663cddf37c05a1c2f113716601555249805cf573f1 \ + --hash=sha256:fc44e3c68ff00fd991b59092a54350e6e4911152682b4782f68070985aa9e648 +scipy==1.14.1 \ + --hash=sha256:0c2f95de3b04e26f5f3ad5bb05e74ba7f68b837133a4492414b3afd79dfe540e \ + --hash=sha256:1729560c906963fc8389f6aac023739ff3983e727b1a4d87696b7bf108316a79 \ + --hash=sha256:278266012eb69f4a720827bdd2dc54b2271c97d84255b2faaa8f161a158c3b37 \ + --hash=sha256:2843f2d527d9eebec9a43e6b406fb7266f3af25a751aa91d62ff416f54170bc5 \ + --hash=sha256:2da0469a4ef0ecd3693761acbdc20f2fdeafb69e6819cc081308cc978153c675 \ + --hash=sha256:2ff0a7e01e422c15739ecd64432743cf7aae2b03f3084288f399affcefe5222d \ + --hash=sha256:2ff38e22128e6c03ff73b6bb0f85f897d2362f8c052e3b8ad00532198fbdae3f \ + --hash=sha256:30ac8812c1d2aab7131a79ba62933a2a76f582d5dbbc695192453dae67ad6310 \ + --hash=sha256:3a1b111fac6baec1c1d92f27e76511c9e7218f1695d61b59e05e0fe04dc59617 \ + --hash=sha256:4079b90df244709e675cdc8b93bfd8a395d59af40b72e339c2287c91860deb8e \ + --hash=sha256:5149e3fd2d686e42144a093b206aef01932a0059c2a33ddfa67f5f035bdfe13e \ + --hash=sha256:5a275584e726026a5699459aa72f828a610821006228e841b94275c4a7c08417 \ + --hash=sha256:631f07b3734d34aced009aaf6fedfd0eb3498a97e581c3b1e5f14a04164a456d \ + --hash=sha256:716e389b694c4bb564b4fc0c51bc84d381735e0d39d3f26ec1af2556ec6aad94 \ + --hash=sha256:8426251ad1e4ad903a4514712d2fa8fdd5382c978010d1c6f5f37ef286a713ad \ + --hash=sha256:8475230e55549ab3f207bff11ebfc91c805dc3463ef62eda3ccf593254524ce8 \ + --hash=sha256:8bddf15838ba768bb5f5083c1ea012d64c9a444e16192762bd858f1e126196d0 \ + --hash=sha256:8e32dced201274bf96899e6491d9ba3e9a5f6b336708656466ad0522d8528f69 \ + --hash=sha256:8f9ea80f2e65bdaa0b7627fb00cbeb2daf163caa015e59b7516395fe3bd1e066 \ + --hash=sha256:97c5dddd5932bd2a1a31c927ba5e1463a53b87ca96b5c9bdf5dfd6096e27efc3 \ + --hash=sha256:a49f6ed96f83966f576b33a44257d869756df6cf1ef4934f59dd58b25e0327e5 \ + --hash=sha256:af29a935803cc707ab2ed7791c44288a682f9c8107bc00f0eccc4f92c08d6e07 \ + --hash=sha256:b05d43735bb2f07d689f56f7b474788a13ed8adc484a85aa65c0fd931cf9ccd2 \ + --hash=sha256:b28d2ca4add7ac16ae8bb6632a3c86e4b9e4d52d3e34267f6e1b0c1f8d87e389 \ + --hash=sha256:b99722ea48b7ea25e8e015e8341ae74624f72e5f21fc2abd45f3a93266de4c5d \ + --hash=sha256:baff393942b550823bfce952bb62270ee17504d02a1801d7fd0719534dfb9c84 \ + --hash=sha256:c0ee987efa6737242745f347835da2cc5bb9f1b42996a4d97d5c7ff7928cb6f2 \ + --hash=sha256:d0d2821003174de06b69e58cef2316a6622b60ee613121199cb2852a873f8cf3 \ + --hash=sha256:e0cf28db0f24a38b2a0ca33a85a54852586e43cf6fd876365c86e0657cfe7d73 \ + --hash=sha256:e4f5a7c49323533f9103d4dacf4e4f07078f360743dec7f7596949149efeec06 \ + --hash=sha256:eb58ca0abd96911932f688528977858681a59d61a7ce908ffd355957f7025cfc \ + --hash=sha256:edaf02b82cd7639db00dbff629995ef185c8df4c3ffa71a5562a595765a06ce1 \ + --hash=sha256:fef8c87f8abfb884dac04e97824b61299880c43f4ce675dd2cbeadd3c9b466d2 +sniffio==1.3.1 \ + --hash=sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2 \ + --hash=sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc diff --git a/tests/sample_project/requirements/app-scipy-client/requirements-app-scipy-client-macosx_arm64.txt.json b/tests/sample_project/requirements/app-scipy-client/requirements-app-scipy-client-macosx_arm64.txt.json new file mode 100644 index 0000000..812ed61 --- /dev/null +++ b/tests/sample_project/requirements/app-scipy-client/requirements-app-scipy-client-macosx_arm64.txt.json @@ -0,0 +1,4 @@ +{ + "locked_at": "2024-10-15T10:23:33.147967+00:00", + "requirements_hash": "sha256:fb8a843c694d03d7ee74b457cdac2bd82b6b439de0ed308d72fe698c6c9c6cf4" +} diff --git a/tests/sample_project/requirements/app-scipy-client/requirements-app-scipy-client-win_amd64.in b/tests/sample_project/requirements/app-scipy-client/requirements-app-scipy-client-win_amd64.in new file mode 100644 index 0000000..26cc92e --- /dev/null +++ b/tests/sample_project/requirements/app-scipy-client/requirements-app-scipy-client-win_amd64.in @@ -0,0 +1,4 @@ +# DO NOT EDIT. Auto-generated as part of deployment bundle build. +# Relock bundle dependencies to update. +scipy +httpx diff --git a/tests/sample_project/requirements/app-scipy-client/requirements-app-scipy-client-win_amd64.txt b/tests/sample_project/requirements/app-scipy-client/requirements-app-scipy-client-win_amd64.txt new file mode 100644 index 0000000..0d12996 --- /dev/null +++ b/tests/sample_project/requirements/app-scipy-client/requirements-app-scipy-client-win_amd64.txt @@ -0,0 +1,111 @@ +# This file was autogenerated by uv via the following command: +# python.exe -Im layered_envs lock +anyio==4.6.2.post1 \ + --hash=sha256:4c8bc31ccdb51c7f7bd251f51c609e038d63e34219b44aa86e47576389880b4c \ + --hash=sha256:6d170c36fba3bdd840c73d3868c1e777e33676a69c3a72cf0a0d5d6d8009b61d +certifi==2024.8.30 \ + --hash=sha256:922820b53db7a7257ffbda3f597266d435245903d80737e34f8a45ff3e3230d8 \ + --hash=sha256:bec941d2aa8195e248a60b31ff9f0558284cf01a52591ceda73ea9afffd69fd9 +h11==0.14.0 \ + --hash=sha256:8f19fbbe99e72420ff35c00b27a34cb9937e902a8b810e2c88300c6f0a3b699d \ + --hash=sha256:e3fe4ac4b851c468cc8363d500db52c2ead036020723024a109d37346efaa761 +httpcore==1.0.6 \ + --hash=sha256:27b59625743b85577a8c0e10e55b50b5368a4f2cfe8cc7bcfa9cf00829c2682f \ + --hash=sha256:73f6dbd6eb8c21bbf7ef8efad555481853f5f6acdeaff1edb0694289269ee17f +httpx==0.27.2 \ + --hash=sha256:7bb2708e112d8fdd7829cd4243970f0c223274051cb35ee80c03301ee29a3df0 \ + --hash=sha256:f7c2be1d2f3c3c3160d441802406b206c2b76f5947b11115e6df10c6c65e66c2 +idna==3.10 \ + --hash=sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9 \ + --hash=sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3 +numpy==2.1.2 \ + --hash=sha256:05b2d4e667895cc55e3ff2b56077e4c8a5604361fc21a042845ea3ad67465aa8 \ + --hash=sha256:12edb90831ff481f7ef5f6bc6431a9d74dc0e5ff401559a71e5e4611d4f2d466 \ + --hash=sha256:13311c2db4c5f7609b462bc0f43d3c465424d25c626d95040f073e30f7570e35 \ + --hash=sha256:13532a088217fa624c99b843eeb54640de23b3414b14aa66d023805eb731066c \ + --hash=sha256:13602b3174432a35b16c4cfb5de9a12d229727c3dd47a6ce35111f2ebdf66ff4 \ + --hash=sha256:1600068c262af1ca9580a527d43dc9d959b0b1d8e56f8a05d830eea39b7c8af6 \ + --hash=sha256:1b8cde4f11f0a975d1fd59373b32e2f5a562ade7cde4f85b7137f3de8fbb29a0 \ + --hash=sha256:1c193d0b0238638e6fc5f10f1b074a6993cb13b0b431f64079a509d63d3aa8b7 \ + --hash=sha256:1ebec5fd716c5a5b3d8dfcc439be82a8407b7b24b230d0ad28a81b61c2f4659a \ + --hash=sha256:242b39d00e4944431a3cd2db2f5377e15b5785920421993770cddb89992c3f3a \ + --hash=sha256:259ec80d54999cc34cd1eb8ded513cb053c3bf4829152a2e00de2371bd406f5e \ + --hash=sha256:2abbf905a0b568706391ec6fa15161fad0fb5d8b68d73c461b3c1bab6064dd62 \ + --hash=sha256:2cbba4b30bf31ddbe97f1c7205ef976909a93a66bb1583e983adbd155ba72ac2 \ + --hash=sha256:2ffef621c14ebb0188a8633348504a35c13680d6da93ab5cb86f4e54b7e922b5 \ + --hash=sha256:30d53720b726ec36a7f88dc873f0eec8447fbc93d93a8f079dfac2629598d6ee \ + --hash=sha256:32e16a03138cabe0cb28e1007ee82264296ac0983714094380b408097a418cfe \ + --hash=sha256:43cca367bf94a14aca50b89e9bc2061683116cfe864e56740e083392f533ce7a \ + --hash=sha256:456e3b11cb79ac9946c822a56346ec80275eaf2950314b249b512896c0d2505e \ + --hash=sha256:4d6ec0d4222e8ffdab1744da2560f07856421b367928026fb540e1945f2eeeaf \ + --hash=sha256:5006b13a06e0b38d561fab5ccc37581f23c9511879be7693bd33c7cd15ca227c \ + --hash=sha256:675c741d4739af2dc20cd6c6a5c4b7355c728167845e3c6b0e824e4e5d36a6c3 \ + --hash=sha256:6cdb606a7478f9ad91c6283e238544451e3a95f30fb5467fbf715964341a8a86 \ + --hash=sha256:6d95f286b8244b3649b477ac066c6906fbb2905f8ac19b170e2175d3d799f4df \ + --hash=sha256:76322dcdb16fccf2ac56f99048af32259dcc488d9b7e25b51e5eca5147a3fb98 \ + --hash=sha256:7c1c60328bd964b53f8b835df69ae8198659e2b9302ff9ebb7de4e5a5994db3d \ + --hash=sha256:860ec6e63e2c5c2ee5e9121808145c7bf86c96cca9ad396c0bd3e0f2798ccbe2 \ + --hash=sha256:8e00ea6fc82e8a804433d3e9cedaa1051a1422cb6e443011590c14d2dea59146 \ + --hash=sha256:9c6c754df29ce6a89ed23afb25550d1c2d5fdb9901d9c67a16e0b16eaf7e2550 \ + --hash=sha256:a26ae94658d3ba3781d5e103ac07a876b3e9b29db53f68ed7df432fd033358a8 \ + --hash=sha256:a65acfdb9c6ebb8368490dbafe83c03c7e277b37e6857f0caeadbbc56e12f4fb \ + --hash=sha256:a7d80b2e904faa63068ead63107189164ca443b42dd1930299e0d1cb041cec2e \ + --hash=sha256:a84498e0d0a1174f2b3ed769b67b656aa5460c92c9554039e11f20a05650f00d \ + --hash=sha256:ab4754d432e3ac42d33a269c8567413bdb541689b02d93788af4131018cbf366 \ + --hash=sha256:ad369ed238b1959dfbade9018a740fb9392c5ac4f9b5173f420bd4f37ba1f7a0 \ + --hash=sha256:b1d0fcae4f0949f215d4632be684a539859b295e2d0cb14f78ec231915d644db \ + --hash=sha256:b42a1a511c81cc78cbc4539675713bbcf9d9c3913386243ceff0e9429ca892fe \ + --hash=sha256:bd33f82e95ba7ad632bc57837ee99dba3d7e006536200c4e9124089e1bf42426 \ + --hash=sha256:bdd407c40483463898b84490770199d5714dcc9dd9b792f6c6caccc523c00952 \ + --hash=sha256:c6eef7a2dbd0abfb0d9eaf78b73017dbfd0b54051102ff4e6a7b2980d5ac1a03 \ + --hash=sha256:c82af4b2ddd2ee72d1fc0c6695048d457e00b3582ccde72d8a1c991b808bb20f \ + --hash=sha256:d666cb72687559689e9906197e3bec7b736764df6a2e58ee265e360663e9baf7 \ + --hash=sha256:d7bf0a4f9f15b32b5ba53147369e94296f5fffb783db5aacc1be15b4bf72f43b \ + --hash=sha256:d82075752f40c0ddf57e6e02673a17f6cb0f8eb3f587f63ca1eaab5594da5b17 \ + --hash=sha256:da65fb46d4cbb75cb417cddf6ba5e7582eb7bb0b47db4b99c9fe5787ce5d91f5 \ + --hash=sha256:e2b49c3c0804e8ecb05d59af8386ec2f74877f7ca8fd9c1e00be2672e4d399b1 \ + --hash=sha256:e585c8ae871fd38ac50598f4763d73ec5497b0de9a0ab4ef5b69f01c6a046142 \ + --hash=sha256:e8d3ca0a72dd8846eb6f7dfe8f19088060fcb76931ed592d29128e0219652884 \ + --hash=sha256:ef444c57d664d35cac4e18c298c47d7b504c66b17c2ea91312e979fcfbdfb08a \ + --hash=sha256:f1eb068ead09f4994dec71c24b2844f1e4e4e013b9629f812f292f04bd1510d9 \ + --hash=sha256:f2ded8d9b6f68cc26f8425eda5d3877b47343e68ca23d0d0846f4d312ecaa445 \ + --hash=sha256:f751ed0a2f250541e19dfca9f1eafa31a392c71c832b6bb9e113b10d050cb0f1 \ + --hash=sha256:faa88bc527d0f097abdc2c663cddf37c05a1c2f113716601555249805cf573f1 \ + --hash=sha256:fc44e3c68ff00fd991b59092a54350e6e4911152682b4782f68070985aa9e648 +scipy==1.14.1 \ + --hash=sha256:0c2f95de3b04e26f5f3ad5bb05e74ba7f68b837133a4492414b3afd79dfe540e \ + --hash=sha256:1729560c906963fc8389f6aac023739ff3983e727b1a4d87696b7bf108316a79 \ + --hash=sha256:278266012eb69f4a720827bdd2dc54b2271c97d84255b2faaa8f161a158c3b37 \ + --hash=sha256:2843f2d527d9eebec9a43e6b406fb7266f3af25a751aa91d62ff416f54170bc5 \ + --hash=sha256:2da0469a4ef0ecd3693761acbdc20f2fdeafb69e6819cc081308cc978153c675 \ + --hash=sha256:2ff0a7e01e422c15739ecd64432743cf7aae2b03f3084288f399affcefe5222d \ + --hash=sha256:2ff38e22128e6c03ff73b6bb0f85f897d2362f8c052e3b8ad00532198fbdae3f \ + --hash=sha256:30ac8812c1d2aab7131a79ba62933a2a76f582d5dbbc695192453dae67ad6310 \ + --hash=sha256:3a1b111fac6baec1c1d92f27e76511c9e7218f1695d61b59e05e0fe04dc59617 \ + --hash=sha256:4079b90df244709e675cdc8b93bfd8a395d59af40b72e339c2287c91860deb8e \ + --hash=sha256:5149e3fd2d686e42144a093b206aef01932a0059c2a33ddfa67f5f035bdfe13e \ + --hash=sha256:5a275584e726026a5699459aa72f828a610821006228e841b94275c4a7c08417 \ + --hash=sha256:631f07b3734d34aced009aaf6fedfd0eb3498a97e581c3b1e5f14a04164a456d \ + --hash=sha256:716e389b694c4bb564b4fc0c51bc84d381735e0d39d3f26ec1af2556ec6aad94 \ + --hash=sha256:8426251ad1e4ad903a4514712d2fa8fdd5382c978010d1c6f5f37ef286a713ad \ + --hash=sha256:8475230e55549ab3f207bff11ebfc91c805dc3463ef62eda3ccf593254524ce8 \ + --hash=sha256:8bddf15838ba768bb5f5083c1ea012d64c9a444e16192762bd858f1e126196d0 \ + --hash=sha256:8e32dced201274bf96899e6491d9ba3e9a5f6b336708656466ad0522d8528f69 \ + --hash=sha256:8f9ea80f2e65bdaa0b7627fb00cbeb2daf163caa015e59b7516395fe3bd1e066 \ + --hash=sha256:97c5dddd5932bd2a1a31c927ba5e1463a53b87ca96b5c9bdf5dfd6096e27efc3 \ + --hash=sha256:a49f6ed96f83966f576b33a44257d869756df6cf1ef4934f59dd58b25e0327e5 \ + --hash=sha256:af29a935803cc707ab2ed7791c44288a682f9c8107bc00f0eccc4f92c08d6e07 \ + --hash=sha256:b05d43735bb2f07d689f56f7b474788a13ed8adc484a85aa65c0fd931cf9ccd2 \ + --hash=sha256:b28d2ca4add7ac16ae8bb6632a3c86e4b9e4d52d3e34267f6e1b0c1f8d87e389 \ + --hash=sha256:b99722ea48b7ea25e8e015e8341ae74624f72e5f21fc2abd45f3a93266de4c5d \ + --hash=sha256:baff393942b550823bfce952bb62270ee17504d02a1801d7fd0719534dfb9c84 \ + --hash=sha256:c0ee987efa6737242745f347835da2cc5bb9f1b42996a4d97d5c7ff7928cb6f2 \ + --hash=sha256:d0d2821003174de06b69e58cef2316a6622b60ee613121199cb2852a873f8cf3 \ + --hash=sha256:e0cf28db0f24a38b2a0ca33a85a54852586e43cf6fd876365c86e0657cfe7d73 \ + --hash=sha256:e4f5a7c49323533f9103d4dacf4e4f07078f360743dec7f7596949149efeec06 \ + --hash=sha256:eb58ca0abd96911932f688528977858681a59d61a7ce908ffd355957f7025cfc \ + --hash=sha256:edaf02b82cd7639db00dbff629995ef185c8df4c3ffa71a5562a595765a06ce1 \ + --hash=sha256:fef8c87f8abfb884dac04e97824b61299880c43f4ce675dd2cbeadd3c9b466d2 +sniffio==1.3.1 \ + --hash=sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2 \ + --hash=sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc diff --git a/tests/sample_project/requirements/app-scipy-client/requirements-app-scipy-client-win_amd64.txt.json b/tests/sample_project/requirements/app-scipy-client/requirements-app-scipy-client-win_amd64.txt.json new file mode 100644 index 0000000..657549d --- /dev/null +++ b/tests/sample_project/requirements/app-scipy-client/requirements-app-scipy-client-win_amd64.txt.json @@ -0,0 +1,4 @@ +{ + "locked_at": "2024-10-15T10:24:36.468633+00:00", + "requirements_hash": "sha256:3bff0428616a2f1724732e78e7788e753dd5f1aa10aa5d3b87707b8dbde121de" +} diff --git a/tests/sample_project/requirements/app-scipy-import/requirements-app-scipy-import-linux_x86_64.in b/tests/sample_project/requirements/app-scipy-import/requirements-app-scipy-import-linux_x86_64.in new file mode 100644 index 0000000..a0fa455 --- /dev/null +++ b/tests/sample_project/requirements/app-scipy-import/requirements-app-scipy-import-linux_x86_64.in @@ -0,0 +1,3 @@ +# DO NOT EDIT. Auto-generated as part of deployment bundle build. +# Relock bundle dependencies to update. +scipy diff --git a/tests/sample_project/requirements/app-scipy-import/requirements-app-scipy-import-linux_x86_64.txt b/tests/sample_project/requirements/app-scipy-import/requirements-app-scipy-import-linux_x86_64.txt new file mode 100644 index 0000000..7434c19 --- /dev/null +++ b/tests/sample_project/requirements/app-scipy-import/requirements-app-scipy-import-linux_x86_64.txt @@ -0,0 +1,90 @@ +# This file was autogenerated by uv via the following command: +# python -Im layered_envs lock +numpy==2.1.2 \ + --hash=sha256:05b2d4e667895cc55e3ff2b56077e4c8a5604361fc21a042845ea3ad67465aa8 \ + --hash=sha256:12edb90831ff481f7ef5f6bc6431a9d74dc0e5ff401559a71e5e4611d4f2d466 \ + --hash=sha256:13311c2db4c5f7609b462bc0f43d3c465424d25c626d95040f073e30f7570e35 \ + --hash=sha256:13532a088217fa624c99b843eeb54640de23b3414b14aa66d023805eb731066c \ + --hash=sha256:13602b3174432a35b16c4cfb5de9a12d229727c3dd47a6ce35111f2ebdf66ff4 \ + --hash=sha256:1600068c262af1ca9580a527d43dc9d959b0b1d8e56f8a05d830eea39b7c8af6 \ + --hash=sha256:1b8cde4f11f0a975d1fd59373b32e2f5a562ade7cde4f85b7137f3de8fbb29a0 \ + --hash=sha256:1c193d0b0238638e6fc5f10f1b074a6993cb13b0b431f64079a509d63d3aa8b7 \ + --hash=sha256:1ebec5fd716c5a5b3d8dfcc439be82a8407b7b24b230d0ad28a81b61c2f4659a \ + --hash=sha256:242b39d00e4944431a3cd2db2f5377e15b5785920421993770cddb89992c3f3a \ + --hash=sha256:259ec80d54999cc34cd1eb8ded513cb053c3bf4829152a2e00de2371bd406f5e \ + --hash=sha256:2abbf905a0b568706391ec6fa15161fad0fb5d8b68d73c461b3c1bab6064dd62 \ + --hash=sha256:2cbba4b30bf31ddbe97f1c7205ef976909a93a66bb1583e983adbd155ba72ac2 \ + --hash=sha256:2ffef621c14ebb0188a8633348504a35c13680d6da93ab5cb86f4e54b7e922b5 \ + --hash=sha256:30d53720b726ec36a7f88dc873f0eec8447fbc93d93a8f079dfac2629598d6ee \ + --hash=sha256:32e16a03138cabe0cb28e1007ee82264296ac0983714094380b408097a418cfe \ + --hash=sha256:43cca367bf94a14aca50b89e9bc2061683116cfe864e56740e083392f533ce7a \ + --hash=sha256:456e3b11cb79ac9946c822a56346ec80275eaf2950314b249b512896c0d2505e \ + --hash=sha256:4d6ec0d4222e8ffdab1744da2560f07856421b367928026fb540e1945f2eeeaf \ + --hash=sha256:5006b13a06e0b38d561fab5ccc37581f23c9511879be7693bd33c7cd15ca227c \ + --hash=sha256:675c741d4739af2dc20cd6c6a5c4b7355c728167845e3c6b0e824e4e5d36a6c3 \ + --hash=sha256:6cdb606a7478f9ad91c6283e238544451e3a95f30fb5467fbf715964341a8a86 \ + --hash=sha256:6d95f286b8244b3649b477ac066c6906fbb2905f8ac19b170e2175d3d799f4df \ + --hash=sha256:76322dcdb16fccf2ac56f99048af32259dcc488d9b7e25b51e5eca5147a3fb98 \ + --hash=sha256:7c1c60328bd964b53f8b835df69ae8198659e2b9302ff9ebb7de4e5a5994db3d \ + --hash=sha256:860ec6e63e2c5c2ee5e9121808145c7bf86c96cca9ad396c0bd3e0f2798ccbe2 \ + --hash=sha256:8e00ea6fc82e8a804433d3e9cedaa1051a1422cb6e443011590c14d2dea59146 \ + --hash=sha256:9c6c754df29ce6a89ed23afb25550d1c2d5fdb9901d9c67a16e0b16eaf7e2550 \ + --hash=sha256:a26ae94658d3ba3781d5e103ac07a876b3e9b29db53f68ed7df432fd033358a8 \ + --hash=sha256:a65acfdb9c6ebb8368490dbafe83c03c7e277b37e6857f0caeadbbc56e12f4fb \ + --hash=sha256:a7d80b2e904faa63068ead63107189164ca443b42dd1930299e0d1cb041cec2e \ + --hash=sha256:a84498e0d0a1174f2b3ed769b67b656aa5460c92c9554039e11f20a05650f00d \ + --hash=sha256:ab4754d432e3ac42d33a269c8567413bdb541689b02d93788af4131018cbf366 \ + --hash=sha256:ad369ed238b1959dfbade9018a740fb9392c5ac4f9b5173f420bd4f37ba1f7a0 \ + --hash=sha256:b1d0fcae4f0949f215d4632be684a539859b295e2d0cb14f78ec231915d644db \ + --hash=sha256:b42a1a511c81cc78cbc4539675713bbcf9d9c3913386243ceff0e9429ca892fe \ + --hash=sha256:bd33f82e95ba7ad632bc57837ee99dba3d7e006536200c4e9124089e1bf42426 \ + --hash=sha256:bdd407c40483463898b84490770199d5714dcc9dd9b792f6c6caccc523c00952 \ + --hash=sha256:c6eef7a2dbd0abfb0d9eaf78b73017dbfd0b54051102ff4e6a7b2980d5ac1a03 \ + --hash=sha256:c82af4b2ddd2ee72d1fc0c6695048d457e00b3582ccde72d8a1c991b808bb20f \ + --hash=sha256:d666cb72687559689e9906197e3bec7b736764df6a2e58ee265e360663e9baf7 \ + --hash=sha256:d7bf0a4f9f15b32b5ba53147369e94296f5fffb783db5aacc1be15b4bf72f43b \ + --hash=sha256:d82075752f40c0ddf57e6e02673a17f6cb0f8eb3f587f63ca1eaab5594da5b17 \ + --hash=sha256:da65fb46d4cbb75cb417cddf6ba5e7582eb7bb0b47db4b99c9fe5787ce5d91f5 \ + --hash=sha256:e2b49c3c0804e8ecb05d59af8386ec2f74877f7ca8fd9c1e00be2672e4d399b1 \ + --hash=sha256:e585c8ae871fd38ac50598f4763d73ec5497b0de9a0ab4ef5b69f01c6a046142 \ + --hash=sha256:e8d3ca0a72dd8846eb6f7dfe8f19088060fcb76931ed592d29128e0219652884 \ + --hash=sha256:ef444c57d664d35cac4e18c298c47d7b504c66b17c2ea91312e979fcfbdfb08a \ + --hash=sha256:f1eb068ead09f4994dec71c24b2844f1e4e4e013b9629f812f292f04bd1510d9 \ + --hash=sha256:f2ded8d9b6f68cc26f8425eda5d3877b47343e68ca23d0d0846f4d312ecaa445 \ + --hash=sha256:f751ed0a2f250541e19dfca9f1eafa31a392c71c832b6bb9e113b10d050cb0f1 \ + --hash=sha256:faa88bc527d0f097abdc2c663cddf37c05a1c2f113716601555249805cf573f1 \ + --hash=sha256:fc44e3c68ff00fd991b59092a54350e6e4911152682b4782f68070985aa9e648 +scipy==1.14.1 \ + --hash=sha256:0c2f95de3b04e26f5f3ad5bb05e74ba7f68b837133a4492414b3afd79dfe540e \ + --hash=sha256:1729560c906963fc8389f6aac023739ff3983e727b1a4d87696b7bf108316a79 \ + --hash=sha256:278266012eb69f4a720827bdd2dc54b2271c97d84255b2faaa8f161a158c3b37 \ + --hash=sha256:2843f2d527d9eebec9a43e6b406fb7266f3af25a751aa91d62ff416f54170bc5 \ + --hash=sha256:2da0469a4ef0ecd3693761acbdc20f2fdeafb69e6819cc081308cc978153c675 \ + --hash=sha256:2ff0a7e01e422c15739ecd64432743cf7aae2b03f3084288f399affcefe5222d \ + --hash=sha256:2ff38e22128e6c03ff73b6bb0f85f897d2362f8c052e3b8ad00532198fbdae3f \ + --hash=sha256:30ac8812c1d2aab7131a79ba62933a2a76f582d5dbbc695192453dae67ad6310 \ + --hash=sha256:3a1b111fac6baec1c1d92f27e76511c9e7218f1695d61b59e05e0fe04dc59617 \ + --hash=sha256:4079b90df244709e675cdc8b93bfd8a395d59af40b72e339c2287c91860deb8e \ + --hash=sha256:5149e3fd2d686e42144a093b206aef01932a0059c2a33ddfa67f5f035bdfe13e \ + --hash=sha256:5a275584e726026a5699459aa72f828a610821006228e841b94275c4a7c08417 \ + --hash=sha256:631f07b3734d34aced009aaf6fedfd0eb3498a97e581c3b1e5f14a04164a456d \ + --hash=sha256:716e389b694c4bb564b4fc0c51bc84d381735e0d39d3f26ec1af2556ec6aad94 \ + --hash=sha256:8426251ad1e4ad903a4514712d2fa8fdd5382c978010d1c6f5f37ef286a713ad \ + --hash=sha256:8475230e55549ab3f207bff11ebfc91c805dc3463ef62eda3ccf593254524ce8 \ + --hash=sha256:8bddf15838ba768bb5f5083c1ea012d64c9a444e16192762bd858f1e126196d0 \ + --hash=sha256:8e32dced201274bf96899e6491d9ba3e9a5f6b336708656466ad0522d8528f69 \ + --hash=sha256:8f9ea80f2e65bdaa0b7627fb00cbeb2daf163caa015e59b7516395fe3bd1e066 \ + --hash=sha256:97c5dddd5932bd2a1a31c927ba5e1463a53b87ca96b5c9bdf5dfd6096e27efc3 \ + --hash=sha256:a49f6ed96f83966f576b33a44257d869756df6cf1ef4934f59dd58b25e0327e5 \ + --hash=sha256:af29a935803cc707ab2ed7791c44288a682f9c8107bc00f0eccc4f92c08d6e07 \ + --hash=sha256:b05d43735bb2f07d689f56f7b474788a13ed8adc484a85aa65c0fd931cf9ccd2 \ + --hash=sha256:b28d2ca4add7ac16ae8bb6632a3c86e4b9e4d52d3e34267f6e1b0c1f8d87e389 \ + --hash=sha256:b99722ea48b7ea25e8e015e8341ae74624f72e5f21fc2abd45f3a93266de4c5d \ + --hash=sha256:baff393942b550823bfce952bb62270ee17504d02a1801d7fd0719534dfb9c84 \ + --hash=sha256:c0ee987efa6737242745f347835da2cc5bb9f1b42996a4d97d5c7ff7928cb6f2 \ + --hash=sha256:d0d2821003174de06b69e58cef2316a6622b60ee613121199cb2852a873f8cf3 \ + --hash=sha256:e0cf28db0f24a38b2a0ca33a85a54852586e43cf6fd876365c86e0657cfe7d73 \ + --hash=sha256:e4f5a7c49323533f9103d4dacf4e4f07078f360743dec7f7596949149efeec06 \ + --hash=sha256:eb58ca0abd96911932f688528977858681a59d61a7ce908ffd355957f7025cfc \ + --hash=sha256:edaf02b82cd7639db00dbff629995ef185c8df4c3ffa71a5562a595765a06ce1 \ + --hash=sha256:fef8c87f8abfb884dac04e97824b61299880c43f4ce675dd2cbeadd3c9b466d2 diff --git a/tests/sample_project/requirements/app-scipy-import/requirements-app-scipy-import-linux_x86_64.txt.json b/tests/sample_project/requirements/app-scipy-import/requirements-app-scipy-import-linux_x86_64.txt.json new file mode 100644 index 0000000..a8cd465 --- /dev/null +++ b/tests/sample_project/requirements/app-scipy-import/requirements-app-scipy-import-linux_x86_64.txt.json @@ -0,0 +1,4 @@ +{ + "locked_at": "2024-10-15T10:23:43.173589+00:00", + "requirements_hash": "sha256:36b0dbfec94b7de6507f348f0823cd02fdca2ea79eeafe92d571c26ae347d150" +} diff --git a/tests/sample_project/requirements/app-scipy-import/requirements-app-scipy-import-macosx_arm64.in b/tests/sample_project/requirements/app-scipy-import/requirements-app-scipy-import-macosx_arm64.in new file mode 100644 index 0000000..a0fa455 --- /dev/null +++ b/tests/sample_project/requirements/app-scipy-import/requirements-app-scipy-import-macosx_arm64.in @@ -0,0 +1,3 @@ +# DO NOT EDIT. Auto-generated as part of deployment bundle build. +# Relock bundle dependencies to update. +scipy diff --git a/tests/sample_project/requirements/app-scipy-import/requirements-app-scipy-import-macosx_arm64.txt b/tests/sample_project/requirements/app-scipy-import/requirements-app-scipy-import-macosx_arm64.txt new file mode 100644 index 0000000..7434c19 --- /dev/null +++ b/tests/sample_project/requirements/app-scipy-import/requirements-app-scipy-import-macosx_arm64.txt @@ -0,0 +1,90 @@ +# This file was autogenerated by uv via the following command: +# python -Im layered_envs lock +numpy==2.1.2 \ + --hash=sha256:05b2d4e667895cc55e3ff2b56077e4c8a5604361fc21a042845ea3ad67465aa8 \ + --hash=sha256:12edb90831ff481f7ef5f6bc6431a9d74dc0e5ff401559a71e5e4611d4f2d466 \ + --hash=sha256:13311c2db4c5f7609b462bc0f43d3c465424d25c626d95040f073e30f7570e35 \ + --hash=sha256:13532a088217fa624c99b843eeb54640de23b3414b14aa66d023805eb731066c \ + --hash=sha256:13602b3174432a35b16c4cfb5de9a12d229727c3dd47a6ce35111f2ebdf66ff4 \ + --hash=sha256:1600068c262af1ca9580a527d43dc9d959b0b1d8e56f8a05d830eea39b7c8af6 \ + --hash=sha256:1b8cde4f11f0a975d1fd59373b32e2f5a562ade7cde4f85b7137f3de8fbb29a0 \ + --hash=sha256:1c193d0b0238638e6fc5f10f1b074a6993cb13b0b431f64079a509d63d3aa8b7 \ + --hash=sha256:1ebec5fd716c5a5b3d8dfcc439be82a8407b7b24b230d0ad28a81b61c2f4659a \ + --hash=sha256:242b39d00e4944431a3cd2db2f5377e15b5785920421993770cddb89992c3f3a \ + --hash=sha256:259ec80d54999cc34cd1eb8ded513cb053c3bf4829152a2e00de2371bd406f5e \ + --hash=sha256:2abbf905a0b568706391ec6fa15161fad0fb5d8b68d73c461b3c1bab6064dd62 \ + --hash=sha256:2cbba4b30bf31ddbe97f1c7205ef976909a93a66bb1583e983adbd155ba72ac2 \ + --hash=sha256:2ffef621c14ebb0188a8633348504a35c13680d6da93ab5cb86f4e54b7e922b5 \ + --hash=sha256:30d53720b726ec36a7f88dc873f0eec8447fbc93d93a8f079dfac2629598d6ee \ + --hash=sha256:32e16a03138cabe0cb28e1007ee82264296ac0983714094380b408097a418cfe \ + --hash=sha256:43cca367bf94a14aca50b89e9bc2061683116cfe864e56740e083392f533ce7a \ + --hash=sha256:456e3b11cb79ac9946c822a56346ec80275eaf2950314b249b512896c0d2505e \ + --hash=sha256:4d6ec0d4222e8ffdab1744da2560f07856421b367928026fb540e1945f2eeeaf \ + --hash=sha256:5006b13a06e0b38d561fab5ccc37581f23c9511879be7693bd33c7cd15ca227c \ + --hash=sha256:675c741d4739af2dc20cd6c6a5c4b7355c728167845e3c6b0e824e4e5d36a6c3 \ + --hash=sha256:6cdb606a7478f9ad91c6283e238544451e3a95f30fb5467fbf715964341a8a86 \ + --hash=sha256:6d95f286b8244b3649b477ac066c6906fbb2905f8ac19b170e2175d3d799f4df \ + --hash=sha256:76322dcdb16fccf2ac56f99048af32259dcc488d9b7e25b51e5eca5147a3fb98 \ + --hash=sha256:7c1c60328bd964b53f8b835df69ae8198659e2b9302ff9ebb7de4e5a5994db3d \ + --hash=sha256:860ec6e63e2c5c2ee5e9121808145c7bf86c96cca9ad396c0bd3e0f2798ccbe2 \ + --hash=sha256:8e00ea6fc82e8a804433d3e9cedaa1051a1422cb6e443011590c14d2dea59146 \ + --hash=sha256:9c6c754df29ce6a89ed23afb25550d1c2d5fdb9901d9c67a16e0b16eaf7e2550 \ + --hash=sha256:a26ae94658d3ba3781d5e103ac07a876b3e9b29db53f68ed7df432fd033358a8 \ + --hash=sha256:a65acfdb9c6ebb8368490dbafe83c03c7e277b37e6857f0caeadbbc56e12f4fb \ + --hash=sha256:a7d80b2e904faa63068ead63107189164ca443b42dd1930299e0d1cb041cec2e \ + --hash=sha256:a84498e0d0a1174f2b3ed769b67b656aa5460c92c9554039e11f20a05650f00d \ + --hash=sha256:ab4754d432e3ac42d33a269c8567413bdb541689b02d93788af4131018cbf366 \ + --hash=sha256:ad369ed238b1959dfbade9018a740fb9392c5ac4f9b5173f420bd4f37ba1f7a0 \ + --hash=sha256:b1d0fcae4f0949f215d4632be684a539859b295e2d0cb14f78ec231915d644db \ + --hash=sha256:b42a1a511c81cc78cbc4539675713bbcf9d9c3913386243ceff0e9429ca892fe \ + --hash=sha256:bd33f82e95ba7ad632bc57837ee99dba3d7e006536200c4e9124089e1bf42426 \ + --hash=sha256:bdd407c40483463898b84490770199d5714dcc9dd9b792f6c6caccc523c00952 \ + --hash=sha256:c6eef7a2dbd0abfb0d9eaf78b73017dbfd0b54051102ff4e6a7b2980d5ac1a03 \ + --hash=sha256:c82af4b2ddd2ee72d1fc0c6695048d457e00b3582ccde72d8a1c991b808bb20f \ + --hash=sha256:d666cb72687559689e9906197e3bec7b736764df6a2e58ee265e360663e9baf7 \ + --hash=sha256:d7bf0a4f9f15b32b5ba53147369e94296f5fffb783db5aacc1be15b4bf72f43b \ + --hash=sha256:d82075752f40c0ddf57e6e02673a17f6cb0f8eb3f587f63ca1eaab5594da5b17 \ + --hash=sha256:da65fb46d4cbb75cb417cddf6ba5e7582eb7bb0b47db4b99c9fe5787ce5d91f5 \ + --hash=sha256:e2b49c3c0804e8ecb05d59af8386ec2f74877f7ca8fd9c1e00be2672e4d399b1 \ + --hash=sha256:e585c8ae871fd38ac50598f4763d73ec5497b0de9a0ab4ef5b69f01c6a046142 \ + --hash=sha256:e8d3ca0a72dd8846eb6f7dfe8f19088060fcb76931ed592d29128e0219652884 \ + --hash=sha256:ef444c57d664d35cac4e18c298c47d7b504c66b17c2ea91312e979fcfbdfb08a \ + --hash=sha256:f1eb068ead09f4994dec71c24b2844f1e4e4e013b9629f812f292f04bd1510d9 \ + --hash=sha256:f2ded8d9b6f68cc26f8425eda5d3877b47343e68ca23d0d0846f4d312ecaa445 \ + --hash=sha256:f751ed0a2f250541e19dfca9f1eafa31a392c71c832b6bb9e113b10d050cb0f1 \ + --hash=sha256:faa88bc527d0f097abdc2c663cddf37c05a1c2f113716601555249805cf573f1 \ + --hash=sha256:fc44e3c68ff00fd991b59092a54350e6e4911152682b4782f68070985aa9e648 +scipy==1.14.1 \ + --hash=sha256:0c2f95de3b04e26f5f3ad5bb05e74ba7f68b837133a4492414b3afd79dfe540e \ + --hash=sha256:1729560c906963fc8389f6aac023739ff3983e727b1a4d87696b7bf108316a79 \ + --hash=sha256:278266012eb69f4a720827bdd2dc54b2271c97d84255b2faaa8f161a158c3b37 \ + --hash=sha256:2843f2d527d9eebec9a43e6b406fb7266f3af25a751aa91d62ff416f54170bc5 \ + --hash=sha256:2da0469a4ef0ecd3693761acbdc20f2fdeafb69e6819cc081308cc978153c675 \ + --hash=sha256:2ff0a7e01e422c15739ecd64432743cf7aae2b03f3084288f399affcefe5222d \ + --hash=sha256:2ff38e22128e6c03ff73b6bb0f85f897d2362f8c052e3b8ad00532198fbdae3f \ + --hash=sha256:30ac8812c1d2aab7131a79ba62933a2a76f582d5dbbc695192453dae67ad6310 \ + --hash=sha256:3a1b111fac6baec1c1d92f27e76511c9e7218f1695d61b59e05e0fe04dc59617 \ + --hash=sha256:4079b90df244709e675cdc8b93bfd8a395d59af40b72e339c2287c91860deb8e \ + --hash=sha256:5149e3fd2d686e42144a093b206aef01932a0059c2a33ddfa67f5f035bdfe13e \ + --hash=sha256:5a275584e726026a5699459aa72f828a610821006228e841b94275c4a7c08417 \ + --hash=sha256:631f07b3734d34aced009aaf6fedfd0eb3498a97e581c3b1e5f14a04164a456d \ + --hash=sha256:716e389b694c4bb564b4fc0c51bc84d381735e0d39d3f26ec1af2556ec6aad94 \ + --hash=sha256:8426251ad1e4ad903a4514712d2fa8fdd5382c978010d1c6f5f37ef286a713ad \ + --hash=sha256:8475230e55549ab3f207bff11ebfc91c805dc3463ef62eda3ccf593254524ce8 \ + --hash=sha256:8bddf15838ba768bb5f5083c1ea012d64c9a444e16192762bd858f1e126196d0 \ + --hash=sha256:8e32dced201274bf96899e6491d9ba3e9a5f6b336708656466ad0522d8528f69 \ + --hash=sha256:8f9ea80f2e65bdaa0b7627fb00cbeb2daf163caa015e59b7516395fe3bd1e066 \ + --hash=sha256:97c5dddd5932bd2a1a31c927ba5e1463a53b87ca96b5c9bdf5dfd6096e27efc3 \ + --hash=sha256:a49f6ed96f83966f576b33a44257d869756df6cf1ef4934f59dd58b25e0327e5 \ + --hash=sha256:af29a935803cc707ab2ed7791c44288a682f9c8107bc00f0eccc4f92c08d6e07 \ + --hash=sha256:b05d43735bb2f07d689f56f7b474788a13ed8adc484a85aa65c0fd931cf9ccd2 \ + --hash=sha256:b28d2ca4add7ac16ae8bb6632a3c86e4b9e4d52d3e34267f6e1b0c1f8d87e389 \ + --hash=sha256:b99722ea48b7ea25e8e015e8341ae74624f72e5f21fc2abd45f3a93266de4c5d \ + --hash=sha256:baff393942b550823bfce952bb62270ee17504d02a1801d7fd0719534dfb9c84 \ + --hash=sha256:c0ee987efa6737242745f347835da2cc5bb9f1b42996a4d97d5c7ff7928cb6f2 \ + --hash=sha256:d0d2821003174de06b69e58cef2316a6622b60ee613121199cb2852a873f8cf3 \ + --hash=sha256:e0cf28db0f24a38b2a0ca33a85a54852586e43cf6fd876365c86e0657cfe7d73 \ + --hash=sha256:e4f5a7c49323533f9103d4dacf4e4f07078f360743dec7f7596949149efeec06 \ + --hash=sha256:eb58ca0abd96911932f688528977858681a59d61a7ce908ffd355957f7025cfc \ + --hash=sha256:edaf02b82cd7639db00dbff629995ef185c8df4c3ffa71a5562a595765a06ce1 \ + --hash=sha256:fef8c87f8abfb884dac04e97824b61299880c43f4ce675dd2cbeadd3c9b466d2 diff --git a/tests/sample_project/requirements/app-scipy-import/requirements-app-scipy-import-macosx_arm64.txt.json b/tests/sample_project/requirements/app-scipy-import/requirements-app-scipy-import-macosx_arm64.txt.json new file mode 100644 index 0000000..4d5b5ab --- /dev/null +++ b/tests/sample_project/requirements/app-scipy-import/requirements-app-scipy-import-macosx_arm64.txt.json @@ -0,0 +1,4 @@ +{ + "locked_at": "2024-10-15T10:23:33.121208+00:00", + "requirements_hash": "sha256:36b0dbfec94b7de6507f348f0823cd02fdca2ea79eeafe92d571c26ae347d150" +} diff --git a/tests/sample_project/requirements/app-scipy-import/requirements-app-scipy-import-win_amd64.in b/tests/sample_project/requirements/app-scipy-import/requirements-app-scipy-import-win_amd64.in new file mode 100644 index 0000000..a0fa455 --- /dev/null +++ b/tests/sample_project/requirements/app-scipy-import/requirements-app-scipy-import-win_amd64.in @@ -0,0 +1,3 @@ +# DO NOT EDIT. Auto-generated as part of deployment bundle build. +# Relock bundle dependencies to update. +scipy diff --git a/tests/sample_project/requirements/app-scipy-import/requirements-app-scipy-import-win_amd64.txt b/tests/sample_project/requirements/app-scipy-import/requirements-app-scipy-import-win_amd64.txt new file mode 100644 index 0000000..3c1be97 --- /dev/null +++ b/tests/sample_project/requirements/app-scipy-import/requirements-app-scipy-import-win_amd64.txt @@ -0,0 +1,90 @@ +# This file was autogenerated by uv via the following command: +# python.exe -Im layered_envs lock +numpy==2.1.2 \ + --hash=sha256:05b2d4e667895cc55e3ff2b56077e4c8a5604361fc21a042845ea3ad67465aa8 \ + --hash=sha256:12edb90831ff481f7ef5f6bc6431a9d74dc0e5ff401559a71e5e4611d4f2d466 \ + --hash=sha256:13311c2db4c5f7609b462bc0f43d3c465424d25c626d95040f073e30f7570e35 \ + --hash=sha256:13532a088217fa624c99b843eeb54640de23b3414b14aa66d023805eb731066c \ + --hash=sha256:13602b3174432a35b16c4cfb5de9a12d229727c3dd47a6ce35111f2ebdf66ff4 \ + --hash=sha256:1600068c262af1ca9580a527d43dc9d959b0b1d8e56f8a05d830eea39b7c8af6 \ + --hash=sha256:1b8cde4f11f0a975d1fd59373b32e2f5a562ade7cde4f85b7137f3de8fbb29a0 \ + --hash=sha256:1c193d0b0238638e6fc5f10f1b074a6993cb13b0b431f64079a509d63d3aa8b7 \ + --hash=sha256:1ebec5fd716c5a5b3d8dfcc439be82a8407b7b24b230d0ad28a81b61c2f4659a \ + --hash=sha256:242b39d00e4944431a3cd2db2f5377e15b5785920421993770cddb89992c3f3a \ + --hash=sha256:259ec80d54999cc34cd1eb8ded513cb053c3bf4829152a2e00de2371bd406f5e \ + --hash=sha256:2abbf905a0b568706391ec6fa15161fad0fb5d8b68d73c461b3c1bab6064dd62 \ + --hash=sha256:2cbba4b30bf31ddbe97f1c7205ef976909a93a66bb1583e983adbd155ba72ac2 \ + --hash=sha256:2ffef621c14ebb0188a8633348504a35c13680d6da93ab5cb86f4e54b7e922b5 \ + --hash=sha256:30d53720b726ec36a7f88dc873f0eec8447fbc93d93a8f079dfac2629598d6ee \ + --hash=sha256:32e16a03138cabe0cb28e1007ee82264296ac0983714094380b408097a418cfe \ + --hash=sha256:43cca367bf94a14aca50b89e9bc2061683116cfe864e56740e083392f533ce7a \ + --hash=sha256:456e3b11cb79ac9946c822a56346ec80275eaf2950314b249b512896c0d2505e \ + --hash=sha256:4d6ec0d4222e8ffdab1744da2560f07856421b367928026fb540e1945f2eeeaf \ + --hash=sha256:5006b13a06e0b38d561fab5ccc37581f23c9511879be7693bd33c7cd15ca227c \ + --hash=sha256:675c741d4739af2dc20cd6c6a5c4b7355c728167845e3c6b0e824e4e5d36a6c3 \ + --hash=sha256:6cdb606a7478f9ad91c6283e238544451e3a95f30fb5467fbf715964341a8a86 \ + --hash=sha256:6d95f286b8244b3649b477ac066c6906fbb2905f8ac19b170e2175d3d799f4df \ + --hash=sha256:76322dcdb16fccf2ac56f99048af32259dcc488d9b7e25b51e5eca5147a3fb98 \ + --hash=sha256:7c1c60328bd964b53f8b835df69ae8198659e2b9302ff9ebb7de4e5a5994db3d \ + --hash=sha256:860ec6e63e2c5c2ee5e9121808145c7bf86c96cca9ad396c0bd3e0f2798ccbe2 \ + --hash=sha256:8e00ea6fc82e8a804433d3e9cedaa1051a1422cb6e443011590c14d2dea59146 \ + --hash=sha256:9c6c754df29ce6a89ed23afb25550d1c2d5fdb9901d9c67a16e0b16eaf7e2550 \ + --hash=sha256:a26ae94658d3ba3781d5e103ac07a876b3e9b29db53f68ed7df432fd033358a8 \ + --hash=sha256:a65acfdb9c6ebb8368490dbafe83c03c7e277b37e6857f0caeadbbc56e12f4fb \ + --hash=sha256:a7d80b2e904faa63068ead63107189164ca443b42dd1930299e0d1cb041cec2e \ + --hash=sha256:a84498e0d0a1174f2b3ed769b67b656aa5460c92c9554039e11f20a05650f00d \ + --hash=sha256:ab4754d432e3ac42d33a269c8567413bdb541689b02d93788af4131018cbf366 \ + --hash=sha256:ad369ed238b1959dfbade9018a740fb9392c5ac4f9b5173f420bd4f37ba1f7a0 \ + --hash=sha256:b1d0fcae4f0949f215d4632be684a539859b295e2d0cb14f78ec231915d644db \ + --hash=sha256:b42a1a511c81cc78cbc4539675713bbcf9d9c3913386243ceff0e9429ca892fe \ + --hash=sha256:bd33f82e95ba7ad632bc57837ee99dba3d7e006536200c4e9124089e1bf42426 \ + --hash=sha256:bdd407c40483463898b84490770199d5714dcc9dd9b792f6c6caccc523c00952 \ + --hash=sha256:c6eef7a2dbd0abfb0d9eaf78b73017dbfd0b54051102ff4e6a7b2980d5ac1a03 \ + --hash=sha256:c82af4b2ddd2ee72d1fc0c6695048d457e00b3582ccde72d8a1c991b808bb20f \ + --hash=sha256:d666cb72687559689e9906197e3bec7b736764df6a2e58ee265e360663e9baf7 \ + --hash=sha256:d7bf0a4f9f15b32b5ba53147369e94296f5fffb783db5aacc1be15b4bf72f43b \ + --hash=sha256:d82075752f40c0ddf57e6e02673a17f6cb0f8eb3f587f63ca1eaab5594da5b17 \ + --hash=sha256:da65fb46d4cbb75cb417cddf6ba5e7582eb7bb0b47db4b99c9fe5787ce5d91f5 \ + --hash=sha256:e2b49c3c0804e8ecb05d59af8386ec2f74877f7ca8fd9c1e00be2672e4d399b1 \ + --hash=sha256:e585c8ae871fd38ac50598f4763d73ec5497b0de9a0ab4ef5b69f01c6a046142 \ + --hash=sha256:e8d3ca0a72dd8846eb6f7dfe8f19088060fcb76931ed592d29128e0219652884 \ + --hash=sha256:ef444c57d664d35cac4e18c298c47d7b504c66b17c2ea91312e979fcfbdfb08a \ + --hash=sha256:f1eb068ead09f4994dec71c24b2844f1e4e4e013b9629f812f292f04bd1510d9 \ + --hash=sha256:f2ded8d9b6f68cc26f8425eda5d3877b47343e68ca23d0d0846f4d312ecaa445 \ + --hash=sha256:f751ed0a2f250541e19dfca9f1eafa31a392c71c832b6bb9e113b10d050cb0f1 \ + --hash=sha256:faa88bc527d0f097abdc2c663cddf37c05a1c2f113716601555249805cf573f1 \ + --hash=sha256:fc44e3c68ff00fd991b59092a54350e6e4911152682b4782f68070985aa9e648 +scipy==1.14.1 \ + --hash=sha256:0c2f95de3b04e26f5f3ad5bb05e74ba7f68b837133a4492414b3afd79dfe540e \ + --hash=sha256:1729560c906963fc8389f6aac023739ff3983e727b1a4d87696b7bf108316a79 \ + --hash=sha256:278266012eb69f4a720827bdd2dc54b2271c97d84255b2faaa8f161a158c3b37 \ + --hash=sha256:2843f2d527d9eebec9a43e6b406fb7266f3af25a751aa91d62ff416f54170bc5 \ + --hash=sha256:2da0469a4ef0ecd3693761acbdc20f2fdeafb69e6819cc081308cc978153c675 \ + --hash=sha256:2ff0a7e01e422c15739ecd64432743cf7aae2b03f3084288f399affcefe5222d \ + --hash=sha256:2ff38e22128e6c03ff73b6bb0f85f897d2362f8c052e3b8ad00532198fbdae3f \ + --hash=sha256:30ac8812c1d2aab7131a79ba62933a2a76f582d5dbbc695192453dae67ad6310 \ + --hash=sha256:3a1b111fac6baec1c1d92f27e76511c9e7218f1695d61b59e05e0fe04dc59617 \ + --hash=sha256:4079b90df244709e675cdc8b93bfd8a395d59af40b72e339c2287c91860deb8e \ + --hash=sha256:5149e3fd2d686e42144a093b206aef01932a0059c2a33ddfa67f5f035bdfe13e \ + --hash=sha256:5a275584e726026a5699459aa72f828a610821006228e841b94275c4a7c08417 \ + --hash=sha256:631f07b3734d34aced009aaf6fedfd0eb3498a97e581c3b1e5f14a04164a456d \ + --hash=sha256:716e389b694c4bb564b4fc0c51bc84d381735e0d39d3f26ec1af2556ec6aad94 \ + --hash=sha256:8426251ad1e4ad903a4514712d2fa8fdd5382c978010d1c6f5f37ef286a713ad \ + --hash=sha256:8475230e55549ab3f207bff11ebfc91c805dc3463ef62eda3ccf593254524ce8 \ + --hash=sha256:8bddf15838ba768bb5f5083c1ea012d64c9a444e16192762bd858f1e126196d0 \ + --hash=sha256:8e32dced201274bf96899e6491d9ba3e9a5f6b336708656466ad0522d8528f69 \ + --hash=sha256:8f9ea80f2e65bdaa0b7627fb00cbeb2daf163caa015e59b7516395fe3bd1e066 \ + --hash=sha256:97c5dddd5932bd2a1a31c927ba5e1463a53b87ca96b5c9bdf5dfd6096e27efc3 \ + --hash=sha256:a49f6ed96f83966f576b33a44257d869756df6cf1ef4934f59dd58b25e0327e5 \ + --hash=sha256:af29a935803cc707ab2ed7791c44288a682f9c8107bc00f0eccc4f92c08d6e07 \ + --hash=sha256:b05d43735bb2f07d689f56f7b474788a13ed8adc484a85aa65c0fd931cf9ccd2 \ + --hash=sha256:b28d2ca4add7ac16ae8bb6632a3c86e4b9e4d52d3e34267f6e1b0c1f8d87e389 \ + --hash=sha256:b99722ea48b7ea25e8e015e8341ae74624f72e5f21fc2abd45f3a93266de4c5d \ + --hash=sha256:baff393942b550823bfce952bb62270ee17504d02a1801d7fd0719534dfb9c84 \ + --hash=sha256:c0ee987efa6737242745f347835da2cc5bb9f1b42996a4d97d5c7ff7928cb6f2 \ + --hash=sha256:d0d2821003174de06b69e58cef2316a6622b60ee613121199cb2852a873f8cf3 \ + --hash=sha256:e0cf28db0f24a38b2a0ca33a85a54852586e43cf6fd876365c86e0657cfe7d73 \ + --hash=sha256:e4f5a7c49323533f9103d4dacf4e4f07078f360743dec7f7596949149efeec06 \ + --hash=sha256:eb58ca0abd96911932f688528977858681a59d61a7ce908ffd355957f7025cfc \ + --hash=sha256:edaf02b82cd7639db00dbff629995ef185c8df4c3ffa71a5562a595765a06ce1 \ + --hash=sha256:fef8c87f8abfb884dac04e97824b61299880c43f4ce675dd2cbeadd3c9b466d2 diff --git a/tests/sample_project/requirements/app-scipy-import/requirements-app-scipy-import-win_amd64.txt.json b/tests/sample_project/requirements/app-scipy-import/requirements-app-scipy-import-win_amd64.txt.json new file mode 100644 index 0000000..0d36911 --- /dev/null +++ b/tests/sample_project/requirements/app-scipy-import/requirements-app-scipy-import-win_amd64.txt.json @@ -0,0 +1,4 @@ +{ + "locked_at": "2024-10-15T10:24:36.386938+00:00", + "requirements_hash": "sha256:9aba38b5efe287f35d58825dce6b1c47ed556b930056e6edc00ca9e1a165796b" +} diff --git a/tests/sample_project/requirements/app-sklearn-import/requirements-app-sklearn-import-linux_x86_64.in b/tests/sample_project/requirements/app-sklearn-import/requirements-app-sklearn-import-linux_x86_64.in new file mode 100644 index 0000000..1ee073a --- /dev/null +++ b/tests/sample_project/requirements/app-sklearn-import/requirements-app-sklearn-import-linux_x86_64.in @@ -0,0 +1,3 @@ +# DO NOT EDIT. Auto-generated as part of deployment bundle build. +# Relock bundle dependencies to update. +scikit-learn diff --git a/tests/sample_project/requirements/app-sklearn-import/requirements-app-sklearn-import-linux_x86_64.txt b/tests/sample_project/requirements/app-sklearn-import/requirements-app-sklearn-import-linux_x86_64.txt new file mode 100644 index 0000000..7cbe722 --- /dev/null +++ b/tests/sample_project/requirements/app-sklearn-import/requirements-app-sklearn-import-linux_x86_64.txt @@ -0,0 +1,123 @@ +# This file was autogenerated by uv via the following command: +# python -Im layered_envs lock +joblib==1.4.2 \ + --hash=sha256:06d478d5674cbc267e7496a410ee875abd68e4340feff4490bcb7afb88060ae6 \ + --hash=sha256:2382c5816b2636fbd20a09e0f4e9dad4736765fdfb7dca582943b9c1366b3f0e +numpy==2.1.2 \ + --hash=sha256:05b2d4e667895cc55e3ff2b56077e4c8a5604361fc21a042845ea3ad67465aa8 \ + --hash=sha256:12edb90831ff481f7ef5f6bc6431a9d74dc0e5ff401559a71e5e4611d4f2d466 \ + --hash=sha256:13311c2db4c5f7609b462bc0f43d3c465424d25c626d95040f073e30f7570e35 \ + --hash=sha256:13532a088217fa624c99b843eeb54640de23b3414b14aa66d023805eb731066c \ + --hash=sha256:13602b3174432a35b16c4cfb5de9a12d229727c3dd47a6ce35111f2ebdf66ff4 \ + --hash=sha256:1600068c262af1ca9580a527d43dc9d959b0b1d8e56f8a05d830eea39b7c8af6 \ + --hash=sha256:1b8cde4f11f0a975d1fd59373b32e2f5a562ade7cde4f85b7137f3de8fbb29a0 \ + --hash=sha256:1c193d0b0238638e6fc5f10f1b074a6993cb13b0b431f64079a509d63d3aa8b7 \ + --hash=sha256:1ebec5fd716c5a5b3d8dfcc439be82a8407b7b24b230d0ad28a81b61c2f4659a \ + --hash=sha256:242b39d00e4944431a3cd2db2f5377e15b5785920421993770cddb89992c3f3a \ + --hash=sha256:259ec80d54999cc34cd1eb8ded513cb053c3bf4829152a2e00de2371bd406f5e \ + --hash=sha256:2abbf905a0b568706391ec6fa15161fad0fb5d8b68d73c461b3c1bab6064dd62 \ + --hash=sha256:2cbba4b30bf31ddbe97f1c7205ef976909a93a66bb1583e983adbd155ba72ac2 \ + --hash=sha256:2ffef621c14ebb0188a8633348504a35c13680d6da93ab5cb86f4e54b7e922b5 \ + --hash=sha256:30d53720b726ec36a7f88dc873f0eec8447fbc93d93a8f079dfac2629598d6ee \ + --hash=sha256:32e16a03138cabe0cb28e1007ee82264296ac0983714094380b408097a418cfe \ + --hash=sha256:43cca367bf94a14aca50b89e9bc2061683116cfe864e56740e083392f533ce7a \ + --hash=sha256:456e3b11cb79ac9946c822a56346ec80275eaf2950314b249b512896c0d2505e \ + --hash=sha256:4d6ec0d4222e8ffdab1744da2560f07856421b367928026fb540e1945f2eeeaf \ + --hash=sha256:5006b13a06e0b38d561fab5ccc37581f23c9511879be7693bd33c7cd15ca227c \ + --hash=sha256:675c741d4739af2dc20cd6c6a5c4b7355c728167845e3c6b0e824e4e5d36a6c3 \ + --hash=sha256:6cdb606a7478f9ad91c6283e238544451e3a95f30fb5467fbf715964341a8a86 \ + --hash=sha256:6d95f286b8244b3649b477ac066c6906fbb2905f8ac19b170e2175d3d799f4df \ + --hash=sha256:76322dcdb16fccf2ac56f99048af32259dcc488d9b7e25b51e5eca5147a3fb98 \ + --hash=sha256:7c1c60328bd964b53f8b835df69ae8198659e2b9302ff9ebb7de4e5a5994db3d \ + --hash=sha256:860ec6e63e2c5c2ee5e9121808145c7bf86c96cca9ad396c0bd3e0f2798ccbe2 \ + --hash=sha256:8e00ea6fc82e8a804433d3e9cedaa1051a1422cb6e443011590c14d2dea59146 \ + --hash=sha256:9c6c754df29ce6a89ed23afb25550d1c2d5fdb9901d9c67a16e0b16eaf7e2550 \ + --hash=sha256:a26ae94658d3ba3781d5e103ac07a876b3e9b29db53f68ed7df432fd033358a8 \ + --hash=sha256:a65acfdb9c6ebb8368490dbafe83c03c7e277b37e6857f0caeadbbc56e12f4fb \ + --hash=sha256:a7d80b2e904faa63068ead63107189164ca443b42dd1930299e0d1cb041cec2e \ + --hash=sha256:a84498e0d0a1174f2b3ed769b67b656aa5460c92c9554039e11f20a05650f00d \ + --hash=sha256:ab4754d432e3ac42d33a269c8567413bdb541689b02d93788af4131018cbf366 \ + --hash=sha256:ad369ed238b1959dfbade9018a740fb9392c5ac4f9b5173f420bd4f37ba1f7a0 \ + --hash=sha256:b1d0fcae4f0949f215d4632be684a539859b295e2d0cb14f78ec231915d644db \ + --hash=sha256:b42a1a511c81cc78cbc4539675713bbcf9d9c3913386243ceff0e9429ca892fe \ + --hash=sha256:bd33f82e95ba7ad632bc57837ee99dba3d7e006536200c4e9124089e1bf42426 \ + --hash=sha256:bdd407c40483463898b84490770199d5714dcc9dd9b792f6c6caccc523c00952 \ + --hash=sha256:c6eef7a2dbd0abfb0d9eaf78b73017dbfd0b54051102ff4e6a7b2980d5ac1a03 \ + --hash=sha256:c82af4b2ddd2ee72d1fc0c6695048d457e00b3582ccde72d8a1c991b808bb20f \ + --hash=sha256:d666cb72687559689e9906197e3bec7b736764df6a2e58ee265e360663e9baf7 \ + --hash=sha256:d7bf0a4f9f15b32b5ba53147369e94296f5fffb783db5aacc1be15b4bf72f43b \ + --hash=sha256:d82075752f40c0ddf57e6e02673a17f6cb0f8eb3f587f63ca1eaab5594da5b17 \ + --hash=sha256:da65fb46d4cbb75cb417cddf6ba5e7582eb7bb0b47db4b99c9fe5787ce5d91f5 \ + --hash=sha256:e2b49c3c0804e8ecb05d59af8386ec2f74877f7ca8fd9c1e00be2672e4d399b1 \ + --hash=sha256:e585c8ae871fd38ac50598f4763d73ec5497b0de9a0ab4ef5b69f01c6a046142 \ + --hash=sha256:e8d3ca0a72dd8846eb6f7dfe8f19088060fcb76931ed592d29128e0219652884 \ + --hash=sha256:ef444c57d664d35cac4e18c298c47d7b504c66b17c2ea91312e979fcfbdfb08a \ + --hash=sha256:f1eb068ead09f4994dec71c24b2844f1e4e4e013b9629f812f292f04bd1510d9 \ + --hash=sha256:f2ded8d9b6f68cc26f8425eda5d3877b47343e68ca23d0d0846f4d312ecaa445 \ + --hash=sha256:f751ed0a2f250541e19dfca9f1eafa31a392c71c832b6bb9e113b10d050cb0f1 \ + --hash=sha256:faa88bc527d0f097abdc2c663cddf37c05a1c2f113716601555249805cf573f1 \ + --hash=sha256:fc44e3c68ff00fd991b59092a54350e6e4911152682b4782f68070985aa9e648 +scikit-learn==1.5.2 \ + --hash=sha256:03b6158efa3faaf1feea3faa884c840ebd61b6484167c711548fce208ea09445 \ + --hash=sha256:178ddd0a5cb0044464fc1bfc4cca5b1833bfc7bb022d70b05db8530da4bb3dd3 \ + --hash=sha256:1ff45e26928d3b4eb767a8f14a9a6efbf1cbff7c05d1fb0f95f211a89fd4f5de \ + --hash=sha256:299406827fb9a4f862626d0fe6c122f5f87f8910b86fe5daa4c32dcd742139b6 \ + --hash=sha256:2d4cad1119c77930b235579ad0dc25e65c917e756fe80cab96aa3b9428bd3fb0 \ + --hash=sha256:394397841449853c2290a32050382edaec3da89e35b3e03d6cc966aebc6a8ae6 \ + --hash=sha256:3a686885a4b3818d9e62904d91b57fa757fc2bed3e465c8b177be652f4dd37c8 \ + --hash=sha256:3b923d119d65b7bd555c73be5423bf06c0105678ce7e1f558cb4b40b0a5502b1 \ + --hash=sha256:3bed4909ba187aca80580fe2ef370d9180dcf18e621a27c4cf2ef10d279a7efe \ + --hash=sha256:52788f48b5d8bca5c0736c175fa6bdaab2ef00a8f536cda698db61bd89c551c1 \ + --hash=sha256:57cc1786cfd6bd118220a92ede80270132aa353647684efa385a74244a41e3b1 \ + --hash=sha256:643964678f4b5fbdc95cbf8aec638acc7aa70f5f79ee2cdad1eec3df4ba6ead8 \ + --hash=sha256:6c16d84a0d45e4894832b3c4d0bf73050939e21b99b01b6fd59cbb0cf39163b6 \ + --hash=sha256:757c7d514ddb00ae249832fe87100d9c73c6ea91423802872d9e74970a0e40b9 \ + --hash=sha256:8c412ccc2ad9bf3755915e3908e677b367ebc8d010acbb3f182814524f2e5540 \ + --hash=sha256:b0768ad641981f5d3a198430a1d31c3e044ed2e8a6f22166b4d546a5116d7908 \ + --hash=sha256:b4237ed7b3fdd0a4882792e68ef2545d5baa50aca3bb45aa7df468138ad8f94d \ + --hash=sha256:b7b0f9a0b1040830d38c39b91b3a44e1b643f4b36e36567b80b7c6bd2202a27f \ + --hash=sha256:c15b1ca23d7c5f33cc2cb0a0d6aaacf893792271cddff0edbd6a40e8319bc113 \ + --hash=sha256:ca64b3089a6d9b9363cd3546f8978229dcbb737aceb2c12144ee3f70f95684b7 \ + --hash=sha256:e9a702e2de732bbb20d3bad29ebd77fc05a6b427dc49964300340e4c9328b3f5 \ + --hash=sha256:f60021ec1574e56632be2a36b946f8143bf4e5e6af4a06d85281adc22938e0dd \ + --hash=sha256:f7284ade780084d94505632241bf78c44ab3b6f1e8ccab3d2af58e0e950f9c12 \ + --hash=sha256:f763897fe92d0e903aa4847b0aec0e68cadfff77e8a0687cabd946c89d17e675 \ + --hash=sha256:f8b0ccd4a902836493e026c03256e8b206656f91fbcc4fde28c57a5b752561f1 \ + --hash=sha256:f932a02c3f4956dfb981391ab24bda1dbd90fe3d628e4b42caef3e041c67707a +scipy==1.14.1 \ + --hash=sha256:0c2f95de3b04e26f5f3ad5bb05e74ba7f68b837133a4492414b3afd79dfe540e \ + --hash=sha256:1729560c906963fc8389f6aac023739ff3983e727b1a4d87696b7bf108316a79 \ + --hash=sha256:278266012eb69f4a720827bdd2dc54b2271c97d84255b2faaa8f161a158c3b37 \ + --hash=sha256:2843f2d527d9eebec9a43e6b406fb7266f3af25a751aa91d62ff416f54170bc5 \ + --hash=sha256:2da0469a4ef0ecd3693761acbdc20f2fdeafb69e6819cc081308cc978153c675 \ + --hash=sha256:2ff0a7e01e422c15739ecd64432743cf7aae2b03f3084288f399affcefe5222d \ + --hash=sha256:2ff38e22128e6c03ff73b6bb0f85f897d2362f8c052e3b8ad00532198fbdae3f \ + --hash=sha256:30ac8812c1d2aab7131a79ba62933a2a76f582d5dbbc695192453dae67ad6310 \ + --hash=sha256:3a1b111fac6baec1c1d92f27e76511c9e7218f1695d61b59e05e0fe04dc59617 \ + --hash=sha256:4079b90df244709e675cdc8b93bfd8a395d59af40b72e339c2287c91860deb8e \ + --hash=sha256:5149e3fd2d686e42144a093b206aef01932a0059c2a33ddfa67f5f035bdfe13e \ + --hash=sha256:5a275584e726026a5699459aa72f828a610821006228e841b94275c4a7c08417 \ + --hash=sha256:631f07b3734d34aced009aaf6fedfd0eb3498a97e581c3b1e5f14a04164a456d \ + --hash=sha256:716e389b694c4bb564b4fc0c51bc84d381735e0d39d3f26ec1af2556ec6aad94 \ + --hash=sha256:8426251ad1e4ad903a4514712d2fa8fdd5382c978010d1c6f5f37ef286a713ad \ + --hash=sha256:8475230e55549ab3f207bff11ebfc91c805dc3463ef62eda3ccf593254524ce8 \ + --hash=sha256:8bddf15838ba768bb5f5083c1ea012d64c9a444e16192762bd858f1e126196d0 \ + --hash=sha256:8e32dced201274bf96899e6491d9ba3e9a5f6b336708656466ad0522d8528f69 \ + --hash=sha256:8f9ea80f2e65bdaa0b7627fb00cbeb2daf163caa015e59b7516395fe3bd1e066 \ + --hash=sha256:97c5dddd5932bd2a1a31c927ba5e1463a53b87ca96b5c9bdf5dfd6096e27efc3 \ + --hash=sha256:a49f6ed96f83966f576b33a44257d869756df6cf1ef4934f59dd58b25e0327e5 \ + --hash=sha256:af29a935803cc707ab2ed7791c44288a682f9c8107bc00f0eccc4f92c08d6e07 \ + --hash=sha256:b05d43735bb2f07d689f56f7b474788a13ed8adc484a85aa65c0fd931cf9ccd2 \ + --hash=sha256:b28d2ca4add7ac16ae8bb6632a3c86e4b9e4d52d3e34267f6e1b0c1f8d87e389 \ + --hash=sha256:b99722ea48b7ea25e8e015e8341ae74624f72e5f21fc2abd45f3a93266de4c5d \ + --hash=sha256:baff393942b550823bfce952bb62270ee17504d02a1801d7fd0719534dfb9c84 \ + --hash=sha256:c0ee987efa6737242745f347835da2cc5bb9f1b42996a4d97d5c7ff7928cb6f2 \ + --hash=sha256:d0d2821003174de06b69e58cef2316a6622b60ee613121199cb2852a873f8cf3 \ + --hash=sha256:e0cf28db0f24a38b2a0ca33a85a54852586e43cf6fd876365c86e0657cfe7d73 \ + --hash=sha256:e4f5a7c49323533f9103d4dacf4e4f07078f360743dec7f7596949149efeec06 \ + --hash=sha256:eb58ca0abd96911932f688528977858681a59d61a7ce908ffd355957f7025cfc \ + --hash=sha256:edaf02b82cd7639db00dbff629995ef185c8df4c3ffa71a5562a595765a06ce1 \ + --hash=sha256:fef8c87f8abfb884dac04e97824b61299880c43f4ce675dd2cbeadd3c9b466d2 +threadpoolctl==3.5.0 \ + --hash=sha256:082433502dd922bf738de0d8bcc4fdcbf0979ff44c42bd40f5af8a282f6fa107 \ + --hash=sha256:56c1e26c150397e58c4926da8eeee87533b1e32bef131bd4bf6a2f45f3185467 diff --git a/tests/sample_project/requirements/app-sklearn-import/requirements-app-sklearn-import-linux_x86_64.txt.json b/tests/sample_project/requirements/app-sklearn-import/requirements-app-sklearn-import-linux_x86_64.txt.json new file mode 100644 index 0000000..e11b5ff --- /dev/null +++ b/tests/sample_project/requirements/app-sklearn-import/requirements-app-sklearn-import-linux_x86_64.txt.json @@ -0,0 +1,4 @@ +{ + "locked_at": "2024-10-15T10:23:43.237589+00:00", + "requirements_hash": "sha256:600b3bc658d940b756d5917e6fb7dec3431c5ce4ebc878f5d031e74f3ebdb7a9" +} diff --git a/tests/sample_project/requirements/cpython@3.11/requirements-cpython@3.11-linux_x86_64.in b/tests/sample_project/requirements/cpython@3.11/requirements-cpython@3.11-linux_x86_64.in new file mode 100644 index 0000000..f0f818e --- /dev/null +++ b/tests/sample_project/requirements/cpython@3.11/requirements-cpython@3.11-linux_x86_64.in @@ -0,0 +1,4 @@ +# DO NOT EDIT. Auto-generated as part of deployment bundle build. +# Relock bundle dependencies to update. +numpy +dlltracer; sys_platform == 'win32' diff --git a/tests/sample_project/requirements/cpython@3.11/requirements-cpython@3.11-linux_x86_64.txt b/tests/sample_project/requirements/cpython@3.11/requirements-cpython@3.11-linux_x86_64.txt new file mode 100644 index 0000000..aaa2ba1 --- /dev/null +++ b/tests/sample_project/requirements/cpython@3.11/requirements-cpython@3.11-linux_x86_64.txt @@ -0,0 +1,56 @@ +# This file was autogenerated by uv via the following command: +# python -Im layered_envs lock +numpy==2.1.2 \ + --hash=sha256:05b2d4e667895cc55e3ff2b56077e4c8a5604361fc21a042845ea3ad67465aa8 \ + --hash=sha256:12edb90831ff481f7ef5f6bc6431a9d74dc0e5ff401559a71e5e4611d4f2d466 \ + --hash=sha256:13311c2db4c5f7609b462bc0f43d3c465424d25c626d95040f073e30f7570e35 \ + --hash=sha256:13532a088217fa624c99b843eeb54640de23b3414b14aa66d023805eb731066c \ + --hash=sha256:13602b3174432a35b16c4cfb5de9a12d229727c3dd47a6ce35111f2ebdf66ff4 \ + --hash=sha256:1600068c262af1ca9580a527d43dc9d959b0b1d8e56f8a05d830eea39b7c8af6 \ + --hash=sha256:1b8cde4f11f0a975d1fd59373b32e2f5a562ade7cde4f85b7137f3de8fbb29a0 \ + --hash=sha256:1c193d0b0238638e6fc5f10f1b074a6993cb13b0b431f64079a509d63d3aa8b7 \ + --hash=sha256:1ebec5fd716c5a5b3d8dfcc439be82a8407b7b24b230d0ad28a81b61c2f4659a \ + --hash=sha256:242b39d00e4944431a3cd2db2f5377e15b5785920421993770cddb89992c3f3a \ + --hash=sha256:259ec80d54999cc34cd1eb8ded513cb053c3bf4829152a2e00de2371bd406f5e \ + --hash=sha256:2abbf905a0b568706391ec6fa15161fad0fb5d8b68d73c461b3c1bab6064dd62 \ + --hash=sha256:2cbba4b30bf31ddbe97f1c7205ef976909a93a66bb1583e983adbd155ba72ac2 \ + --hash=sha256:2ffef621c14ebb0188a8633348504a35c13680d6da93ab5cb86f4e54b7e922b5 \ + --hash=sha256:30d53720b726ec36a7f88dc873f0eec8447fbc93d93a8f079dfac2629598d6ee \ + --hash=sha256:32e16a03138cabe0cb28e1007ee82264296ac0983714094380b408097a418cfe \ + --hash=sha256:43cca367bf94a14aca50b89e9bc2061683116cfe864e56740e083392f533ce7a \ + --hash=sha256:456e3b11cb79ac9946c822a56346ec80275eaf2950314b249b512896c0d2505e \ + --hash=sha256:4d6ec0d4222e8ffdab1744da2560f07856421b367928026fb540e1945f2eeeaf \ + --hash=sha256:5006b13a06e0b38d561fab5ccc37581f23c9511879be7693bd33c7cd15ca227c \ + --hash=sha256:675c741d4739af2dc20cd6c6a5c4b7355c728167845e3c6b0e824e4e5d36a6c3 \ + --hash=sha256:6cdb606a7478f9ad91c6283e238544451e3a95f30fb5467fbf715964341a8a86 \ + --hash=sha256:6d95f286b8244b3649b477ac066c6906fbb2905f8ac19b170e2175d3d799f4df \ + --hash=sha256:76322dcdb16fccf2ac56f99048af32259dcc488d9b7e25b51e5eca5147a3fb98 \ + --hash=sha256:7c1c60328bd964b53f8b835df69ae8198659e2b9302ff9ebb7de4e5a5994db3d \ + --hash=sha256:860ec6e63e2c5c2ee5e9121808145c7bf86c96cca9ad396c0bd3e0f2798ccbe2 \ + --hash=sha256:8e00ea6fc82e8a804433d3e9cedaa1051a1422cb6e443011590c14d2dea59146 \ + --hash=sha256:9c6c754df29ce6a89ed23afb25550d1c2d5fdb9901d9c67a16e0b16eaf7e2550 \ + --hash=sha256:a26ae94658d3ba3781d5e103ac07a876b3e9b29db53f68ed7df432fd033358a8 \ + --hash=sha256:a65acfdb9c6ebb8368490dbafe83c03c7e277b37e6857f0caeadbbc56e12f4fb \ + --hash=sha256:a7d80b2e904faa63068ead63107189164ca443b42dd1930299e0d1cb041cec2e \ + --hash=sha256:a84498e0d0a1174f2b3ed769b67b656aa5460c92c9554039e11f20a05650f00d \ + --hash=sha256:ab4754d432e3ac42d33a269c8567413bdb541689b02d93788af4131018cbf366 \ + --hash=sha256:ad369ed238b1959dfbade9018a740fb9392c5ac4f9b5173f420bd4f37ba1f7a0 \ + --hash=sha256:b1d0fcae4f0949f215d4632be684a539859b295e2d0cb14f78ec231915d644db \ + --hash=sha256:b42a1a511c81cc78cbc4539675713bbcf9d9c3913386243ceff0e9429ca892fe \ + --hash=sha256:bd33f82e95ba7ad632bc57837ee99dba3d7e006536200c4e9124089e1bf42426 \ + --hash=sha256:bdd407c40483463898b84490770199d5714dcc9dd9b792f6c6caccc523c00952 \ + --hash=sha256:c6eef7a2dbd0abfb0d9eaf78b73017dbfd0b54051102ff4e6a7b2980d5ac1a03 \ + --hash=sha256:c82af4b2ddd2ee72d1fc0c6695048d457e00b3582ccde72d8a1c991b808bb20f \ + --hash=sha256:d666cb72687559689e9906197e3bec7b736764df6a2e58ee265e360663e9baf7 \ + --hash=sha256:d7bf0a4f9f15b32b5ba53147369e94296f5fffb783db5aacc1be15b4bf72f43b \ + --hash=sha256:d82075752f40c0ddf57e6e02673a17f6cb0f8eb3f587f63ca1eaab5594da5b17 \ + --hash=sha256:da65fb46d4cbb75cb417cddf6ba5e7582eb7bb0b47db4b99c9fe5787ce5d91f5 \ + --hash=sha256:e2b49c3c0804e8ecb05d59af8386ec2f74877f7ca8fd9c1e00be2672e4d399b1 \ + --hash=sha256:e585c8ae871fd38ac50598f4763d73ec5497b0de9a0ab4ef5b69f01c6a046142 \ + --hash=sha256:e8d3ca0a72dd8846eb6f7dfe8f19088060fcb76931ed592d29128e0219652884 \ + --hash=sha256:ef444c57d664d35cac4e18c298c47d7b504c66b17c2ea91312e979fcfbdfb08a \ + --hash=sha256:f1eb068ead09f4994dec71c24b2844f1e4e4e013b9629f812f292f04bd1510d9 \ + --hash=sha256:f2ded8d9b6f68cc26f8425eda5d3877b47343e68ca23d0d0846f4d312ecaa445 \ + --hash=sha256:f751ed0a2f250541e19dfca9f1eafa31a392c71c832b6bb9e113b10d050cb0f1 \ + --hash=sha256:faa88bc527d0f097abdc2c663cddf37c05a1c2f113716601555249805cf573f1 \ + --hash=sha256:fc44e3c68ff00fd991b59092a54350e6e4911152682b4782f68070985aa9e648 diff --git a/tests/sample_project/requirements/cpython@3.11/requirements-cpython@3.11-linux_x86_64.txt.json b/tests/sample_project/requirements/cpython@3.11/requirements-cpython@3.11-linux_x86_64.txt.json new file mode 100644 index 0000000..21bdd18 --- /dev/null +++ b/tests/sample_project/requirements/cpython@3.11/requirements-cpython@3.11-linux_x86_64.txt.json @@ -0,0 +1,4 @@ +{ + "locked_at": "2024-10-15T10:23:42.581585+00:00", + "requirements_hash": "sha256:8a2182bfe08ff2478e3fe614e4769fce8fa5fa2ea010a2976c8f74d5396fa706" +} diff --git a/tests/sample_project/requirements/cpython@3.11/requirements-cpython@3.11-macosx_arm64.in b/tests/sample_project/requirements/cpython@3.11/requirements-cpython@3.11-macosx_arm64.in new file mode 100644 index 0000000..f0f818e --- /dev/null +++ b/tests/sample_project/requirements/cpython@3.11/requirements-cpython@3.11-macosx_arm64.in @@ -0,0 +1,4 @@ +# DO NOT EDIT. Auto-generated as part of deployment bundle build. +# Relock bundle dependencies to update. +numpy +dlltracer; sys_platform == 'win32' diff --git a/tests/sample_project/requirements/cpython@3.11/requirements-cpython@3.11-macosx_arm64.txt b/tests/sample_project/requirements/cpython@3.11/requirements-cpython@3.11-macosx_arm64.txt new file mode 100644 index 0000000..aaa2ba1 --- /dev/null +++ b/tests/sample_project/requirements/cpython@3.11/requirements-cpython@3.11-macosx_arm64.txt @@ -0,0 +1,56 @@ +# This file was autogenerated by uv via the following command: +# python -Im layered_envs lock +numpy==2.1.2 \ + --hash=sha256:05b2d4e667895cc55e3ff2b56077e4c8a5604361fc21a042845ea3ad67465aa8 \ + --hash=sha256:12edb90831ff481f7ef5f6bc6431a9d74dc0e5ff401559a71e5e4611d4f2d466 \ + --hash=sha256:13311c2db4c5f7609b462bc0f43d3c465424d25c626d95040f073e30f7570e35 \ + --hash=sha256:13532a088217fa624c99b843eeb54640de23b3414b14aa66d023805eb731066c \ + --hash=sha256:13602b3174432a35b16c4cfb5de9a12d229727c3dd47a6ce35111f2ebdf66ff4 \ + --hash=sha256:1600068c262af1ca9580a527d43dc9d959b0b1d8e56f8a05d830eea39b7c8af6 \ + --hash=sha256:1b8cde4f11f0a975d1fd59373b32e2f5a562ade7cde4f85b7137f3de8fbb29a0 \ + --hash=sha256:1c193d0b0238638e6fc5f10f1b074a6993cb13b0b431f64079a509d63d3aa8b7 \ + --hash=sha256:1ebec5fd716c5a5b3d8dfcc439be82a8407b7b24b230d0ad28a81b61c2f4659a \ + --hash=sha256:242b39d00e4944431a3cd2db2f5377e15b5785920421993770cddb89992c3f3a \ + --hash=sha256:259ec80d54999cc34cd1eb8ded513cb053c3bf4829152a2e00de2371bd406f5e \ + --hash=sha256:2abbf905a0b568706391ec6fa15161fad0fb5d8b68d73c461b3c1bab6064dd62 \ + --hash=sha256:2cbba4b30bf31ddbe97f1c7205ef976909a93a66bb1583e983adbd155ba72ac2 \ + --hash=sha256:2ffef621c14ebb0188a8633348504a35c13680d6da93ab5cb86f4e54b7e922b5 \ + --hash=sha256:30d53720b726ec36a7f88dc873f0eec8447fbc93d93a8f079dfac2629598d6ee \ + --hash=sha256:32e16a03138cabe0cb28e1007ee82264296ac0983714094380b408097a418cfe \ + --hash=sha256:43cca367bf94a14aca50b89e9bc2061683116cfe864e56740e083392f533ce7a \ + --hash=sha256:456e3b11cb79ac9946c822a56346ec80275eaf2950314b249b512896c0d2505e \ + --hash=sha256:4d6ec0d4222e8ffdab1744da2560f07856421b367928026fb540e1945f2eeeaf \ + --hash=sha256:5006b13a06e0b38d561fab5ccc37581f23c9511879be7693bd33c7cd15ca227c \ + --hash=sha256:675c741d4739af2dc20cd6c6a5c4b7355c728167845e3c6b0e824e4e5d36a6c3 \ + --hash=sha256:6cdb606a7478f9ad91c6283e238544451e3a95f30fb5467fbf715964341a8a86 \ + --hash=sha256:6d95f286b8244b3649b477ac066c6906fbb2905f8ac19b170e2175d3d799f4df \ + --hash=sha256:76322dcdb16fccf2ac56f99048af32259dcc488d9b7e25b51e5eca5147a3fb98 \ + --hash=sha256:7c1c60328bd964b53f8b835df69ae8198659e2b9302ff9ebb7de4e5a5994db3d \ + --hash=sha256:860ec6e63e2c5c2ee5e9121808145c7bf86c96cca9ad396c0bd3e0f2798ccbe2 \ + --hash=sha256:8e00ea6fc82e8a804433d3e9cedaa1051a1422cb6e443011590c14d2dea59146 \ + --hash=sha256:9c6c754df29ce6a89ed23afb25550d1c2d5fdb9901d9c67a16e0b16eaf7e2550 \ + --hash=sha256:a26ae94658d3ba3781d5e103ac07a876b3e9b29db53f68ed7df432fd033358a8 \ + --hash=sha256:a65acfdb9c6ebb8368490dbafe83c03c7e277b37e6857f0caeadbbc56e12f4fb \ + --hash=sha256:a7d80b2e904faa63068ead63107189164ca443b42dd1930299e0d1cb041cec2e \ + --hash=sha256:a84498e0d0a1174f2b3ed769b67b656aa5460c92c9554039e11f20a05650f00d \ + --hash=sha256:ab4754d432e3ac42d33a269c8567413bdb541689b02d93788af4131018cbf366 \ + --hash=sha256:ad369ed238b1959dfbade9018a740fb9392c5ac4f9b5173f420bd4f37ba1f7a0 \ + --hash=sha256:b1d0fcae4f0949f215d4632be684a539859b295e2d0cb14f78ec231915d644db \ + --hash=sha256:b42a1a511c81cc78cbc4539675713bbcf9d9c3913386243ceff0e9429ca892fe \ + --hash=sha256:bd33f82e95ba7ad632bc57837ee99dba3d7e006536200c4e9124089e1bf42426 \ + --hash=sha256:bdd407c40483463898b84490770199d5714dcc9dd9b792f6c6caccc523c00952 \ + --hash=sha256:c6eef7a2dbd0abfb0d9eaf78b73017dbfd0b54051102ff4e6a7b2980d5ac1a03 \ + --hash=sha256:c82af4b2ddd2ee72d1fc0c6695048d457e00b3582ccde72d8a1c991b808bb20f \ + --hash=sha256:d666cb72687559689e9906197e3bec7b736764df6a2e58ee265e360663e9baf7 \ + --hash=sha256:d7bf0a4f9f15b32b5ba53147369e94296f5fffb783db5aacc1be15b4bf72f43b \ + --hash=sha256:d82075752f40c0ddf57e6e02673a17f6cb0f8eb3f587f63ca1eaab5594da5b17 \ + --hash=sha256:da65fb46d4cbb75cb417cddf6ba5e7582eb7bb0b47db4b99c9fe5787ce5d91f5 \ + --hash=sha256:e2b49c3c0804e8ecb05d59af8386ec2f74877f7ca8fd9c1e00be2672e4d399b1 \ + --hash=sha256:e585c8ae871fd38ac50598f4763d73ec5497b0de9a0ab4ef5b69f01c6a046142 \ + --hash=sha256:e8d3ca0a72dd8846eb6f7dfe8f19088060fcb76931ed592d29128e0219652884 \ + --hash=sha256:ef444c57d664d35cac4e18c298c47d7b504c66b17c2ea91312e979fcfbdfb08a \ + --hash=sha256:f1eb068ead09f4994dec71c24b2844f1e4e4e013b9629f812f292f04bd1510d9 \ + --hash=sha256:f2ded8d9b6f68cc26f8425eda5d3877b47343e68ca23d0d0846f4d312ecaa445 \ + --hash=sha256:f751ed0a2f250541e19dfca9f1eafa31a392c71c832b6bb9e113b10d050cb0f1 \ + --hash=sha256:faa88bc527d0f097abdc2c663cddf37c05a1c2f113716601555249805cf573f1 \ + --hash=sha256:fc44e3c68ff00fd991b59092a54350e6e4911152682b4782f68070985aa9e648 diff --git a/tests/sample_project/requirements/cpython@3.11/requirements-cpython@3.11-macosx_arm64.txt.json b/tests/sample_project/requirements/cpython@3.11/requirements-cpython@3.11-macosx_arm64.txt.json new file mode 100644 index 0000000..1168b6a --- /dev/null +++ b/tests/sample_project/requirements/cpython@3.11/requirements-cpython@3.11-macosx_arm64.txt.json @@ -0,0 +1,4 @@ +{ + "locked_at": "2024-10-15T10:23:32.798085+00:00", + "requirements_hash": "sha256:8a2182bfe08ff2478e3fe614e4769fce8fa5fa2ea010a2976c8f74d5396fa706" +} diff --git a/tests/sample_project/requirements/cpython@3.11/requirements-cpython@3.11-win_amd64.in b/tests/sample_project/requirements/cpython@3.11/requirements-cpython@3.11-win_amd64.in new file mode 100644 index 0000000..f0f818e --- /dev/null +++ b/tests/sample_project/requirements/cpython@3.11/requirements-cpython@3.11-win_amd64.in @@ -0,0 +1,4 @@ +# DO NOT EDIT. Auto-generated as part of deployment bundle build. +# Relock bundle dependencies to update. +numpy +dlltracer; sys_platform == 'win32' diff --git a/tests/sample_project/requirements/cpython@3.11/requirements-cpython@3.11-win_amd64.txt b/tests/sample_project/requirements/cpython@3.11/requirements-cpython@3.11-win_amd64.txt new file mode 100644 index 0000000..5d4b135 --- /dev/null +++ b/tests/sample_project/requirements/cpython@3.11/requirements-cpython@3.11-win_amd64.txt @@ -0,0 +1,63 @@ +# This file was autogenerated by uv via the following command: +# python.exe -Im layered_envs lock +dlltracer==1.0.2 \ + --hash=sha256:0a6468533a58553d7a8ff1874c2d9c4de473a83e9c12b550cc60c7a6e65b6302 \ + --hash=sha256:36d705fdd8bcc29b21045c3eceec75831099b95d14c547166a16635297b6e0e4 \ + --hash=sha256:7749c30d6372bfb3e5c1d624b0d5d05580b7fc3de92dfaa4f41ef1e5180d219c \ + --hash=sha256:92f382b11f76fd75f87e2d3dcc8f22c27f7f765c56251fc1c970bb403bb2334c \ + --hash=sha256:cef14f1390b9b85c25277a4798ce6653650005908267296f3b4db153d5842e66 \ + --hash=sha256:d75f0923f44f115a94e396d4ddde01b46733296cd18a5e14151f937cf96b7139 +numpy==2.1.2 \ + --hash=sha256:05b2d4e667895cc55e3ff2b56077e4c8a5604361fc21a042845ea3ad67465aa8 \ + --hash=sha256:12edb90831ff481f7ef5f6bc6431a9d74dc0e5ff401559a71e5e4611d4f2d466 \ + --hash=sha256:13311c2db4c5f7609b462bc0f43d3c465424d25c626d95040f073e30f7570e35 \ + --hash=sha256:13532a088217fa624c99b843eeb54640de23b3414b14aa66d023805eb731066c \ + --hash=sha256:13602b3174432a35b16c4cfb5de9a12d229727c3dd47a6ce35111f2ebdf66ff4 \ + --hash=sha256:1600068c262af1ca9580a527d43dc9d959b0b1d8e56f8a05d830eea39b7c8af6 \ + --hash=sha256:1b8cde4f11f0a975d1fd59373b32e2f5a562ade7cde4f85b7137f3de8fbb29a0 \ + --hash=sha256:1c193d0b0238638e6fc5f10f1b074a6993cb13b0b431f64079a509d63d3aa8b7 \ + --hash=sha256:1ebec5fd716c5a5b3d8dfcc439be82a8407b7b24b230d0ad28a81b61c2f4659a \ + --hash=sha256:242b39d00e4944431a3cd2db2f5377e15b5785920421993770cddb89992c3f3a \ + --hash=sha256:259ec80d54999cc34cd1eb8ded513cb053c3bf4829152a2e00de2371bd406f5e \ + --hash=sha256:2abbf905a0b568706391ec6fa15161fad0fb5d8b68d73c461b3c1bab6064dd62 \ + --hash=sha256:2cbba4b30bf31ddbe97f1c7205ef976909a93a66bb1583e983adbd155ba72ac2 \ + --hash=sha256:2ffef621c14ebb0188a8633348504a35c13680d6da93ab5cb86f4e54b7e922b5 \ + --hash=sha256:30d53720b726ec36a7f88dc873f0eec8447fbc93d93a8f079dfac2629598d6ee \ + --hash=sha256:32e16a03138cabe0cb28e1007ee82264296ac0983714094380b408097a418cfe \ + --hash=sha256:43cca367bf94a14aca50b89e9bc2061683116cfe864e56740e083392f533ce7a \ + --hash=sha256:456e3b11cb79ac9946c822a56346ec80275eaf2950314b249b512896c0d2505e \ + --hash=sha256:4d6ec0d4222e8ffdab1744da2560f07856421b367928026fb540e1945f2eeeaf \ + --hash=sha256:5006b13a06e0b38d561fab5ccc37581f23c9511879be7693bd33c7cd15ca227c \ + --hash=sha256:675c741d4739af2dc20cd6c6a5c4b7355c728167845e3c6b0e824e4e5d36a6c3 \ + --hash=sha256:6cdb606a7478f9ad91c6283e238544451e3a95f30fb5467fbf715964341a8a86 \ + --hash=sha256:6d95f286b8244b3649b477ac066c6906fbb2905f8ac19b170e2175d3d799f4df \ + --hash=sha256:76322dcdb16fccf2ac56f99048af32259dcc488d9b7e25b51e5eca5147a3fb98 \ + --hash=sha256:7c1c60328bd964b53f8b835df69ae8198659e2b9302ff9ebb7de4e5a5994db3d \ + --hash=sha256:860ec6e63e2c5c2ee5e9121808145c7bf86c96cca9ad396c0bd3e0f2798ccbe2 \ + --hash=sha256:8e00ea6fc82e8a804433d3e9cedaa1051a1422cb6e443011590c14d2dea59146 \ + --hash=sha256:9c6c754df29ce6a89ed23afb25550d1c2d5fdb9901d9c67a16e0b16eaf7e2550 \ + --hash=sha256:a26ae94658d3ba3781d5e103ac07a876b3e9b29db53f68ed7df432fd033358a8 \ + --hash=sha256:a65acfdb9c6ebb8368490dbafe83c03c7e277b37e6857f0caeadbbc56e12f4fb \ + --hash=sha256:a7d80b2e904faa63068ead63107189164ca443b42dd1930299e0d1cb041cec2e \ + --hash=sha256:a84498e0d0a1174f2b3ed769b67b656aa5460c92c9554039e11f20a05650f00d \ + --hash=sha256:ab4754d432e3ac42d33a269c8567413bdb541689b02d93788af4131018cbf366 \ + --hash=sha256:ad369ed238b1959dfbade9018a740fb9392c5ac4f9b5173f420bd4f37ba1f7a0 \ + --hash=sha256:b1d0fcae4f0949f215d4632be684a539859b295e2d0cb14f78ec231915d644db \ + --hash=sha256:b42a1a511c81cc78cbc4539675713bbcf9d9c3913386243ceff0e9429ca892fe \ + --hash=sha256:bd33f82e95ba7ad632bc57837ee99dba3d7e006536200c4e9124089e1bf42426 \ + --hash=sha256:bdd407c40483463898b84490770199d5714dcc9dd9b792f6c6caccc523c00952 \ + --hash=sha256:c6eef7a2dbd0abfb0d9eaf78b73017dbfd0b54051102ff4e6a7b2980d5ac1a03 \ + --hash=sha256:c82af4b2ddd2ee72d1fc0c6695048d457e00b3582ccde72d8a1c991b808bb20f \ + --hash=sha256:d666cb72687559689e9906197e3bec7b736764df6a2e58ee265e360663e9baf7 \ + --hash=sha256:d7bf0a4f9f15b32b5ba53147369e94296f5fffb783db5aacc1be15b4bf72f43b \ + --hash=sha256:d82075752f40c0ddf57e6e02673a17f6cb0f8eb3f587f63ca1eaab5594da5b17 \ + --hash=sha256:da65fb46d4cbb75cb417cddf6ba5e7582eb7bb0b47db4b99c9fe5787ce5d91f5 \ + --hash=sha256:e2b49c3c0804e8ecb05d59af8386ec2f74877f7ca8fd9c1e00be2672e4d399b1 \ + --hash=sha256:e585c8ae871fd38ac50598f4763d73ec5497b0de9a0ab4ef5b69f01c6a046142 \ + --hash=sha256:e8d3ca0a72dd8846eb6f7dfe8f19088060fcb76931ed592d29128e0219652884 \ + --hash=sha256:ef444c57d664d35cac4e18c298c47d7b504c66b17c2ea91312e979fcfbdfb08a \ + --hash=sha256:f1eb068ead09f4994dec71c24b2844f1e4e4e013b9629f812f292f04bd1510d9 \ + --hash=sha256:f2ded8d9b6f68cc26f8425eda5d3877b47343e68ca23d0d0846f4d312ecaa445 \ + --hash=sha256:f751ed0a2f250541e19dfca9f1eafa31a392c71c832b6bb9e113b10d050cb0f1 \ + --hash=sha256:faa88bc527d0f097abdc2c663cddf37c05a1c2f113716601555249805cf573f1 \ + --hash=sha256:fc44e3c68ff00fd991b59092a54350e6e4911152682b4782f68070985aa9e648 diff --git a/tests/sample_project/requirements/cpython@3.11/requirements-cpython@3.11-win_amd64.txt.json b/tests/sample_project/requirements/cpython@3.11/requirements-cpython@3.11-win_amd64.txt.json new file mode 100644 index 0000000..1242109 --- /dev/null +++ b/tests/sample_project/requirements/cpython@3.11/requirements-cpython@3.11-win_amd64.txt.json @@ -0,0 +1,4 @@ +{ + "locked_at": "2024-10-15T10:24:35.734694+00:00", + "requirements_hash": "sha256:5df8a095e5cc8c181fd46278ab782f5ae09ab0cba19b74f1aaa25f9fe5a9f100" +} diff --git a/tests/sample_project/requirements/cpython@3.12/requirements-cpython@3.12-linux_x86_64.in b/tests/sample_project/requirements/cpython@3.12/requirements-cpython@3.12-linux_x86_64.in new file mode 100644 index 0000000..5781ffe --- /dev/null +++ b/tests/sample_project/requirements/cpython@3.12/requirements-cpython@3.12-linux_x86_64.in @@ -0,0 +1,3 @@ +# DO NOT EDIT. Auto-generated as part of deployment bundle build. +# Relock bundle dependencies to update. +numpy diff --git a/tests/sample_project/requirements/cpython@3.12/requirements-cpython@3.12-linux_x86_64.txt b/tests/sample_project/requirements/cpython@3.12/requirements-cpython@3.12-linux_x86_64.txt new file mode 100644 index 0000000..aaa2ba1 --- /dev/null +++ b/tests/sample_project/requirements/cpython@3.12/requirements-cpython@3.12-linux_x86_64.txt @@ -0,0 +1,56 @@ +# This file was autogenerated by uv via the following command: +# python -Im layered_envs lock +numpy==2.1.2 \ + --hash=sha256:05b2d4e667895cc55e3ff2b56077e4c8a5604361fc21a042845ea3ad67465aa8 \ + --hash=sha256:12edb90831ff481f7ef5f6bc6431a9d74dc0e5ff401559a71e5e4611d4f2d466 \ + --hash=sha256:13311c2db4c5f7609b462bc0f43d3c465424d25c626d95040f073e30f7570e35 \ + --hash=sha256:13532a088217fa624c99b843eeb54640de23b3414b14aa66d023805eb731066c \ + --hash=sha256:13602b3174432a35b16c4cfb5de9a12d229727c3dd47a6ce35111f2ebdf66ff4 \ + --hash=sha256:1600068c262af1ca9580a527d43dc9d959b0b1d8e56f8a05d830eea39b7c8af6 \ + --hash=sha256:1b8cde4f11f0a975d1fd59373b32e2f5a562ade7cde4f85b7137f3de8fbb29a0 \ + --hash=sha256:1c193d0b0238638e6fc5f10f1b074a6993cb13b0b431f64079a509d63d3aa8b7 \ + --hash=sha256:1ebec5fd716c5a5b3d8dfcc439be82a8407b7b24b230d0ad28a81b61c2f4659a \ + --hash=sha256:242b39d00e4944431a3cd2db2f5377e15b5785920421993770cddb89992c3f3a \ + --hash=sha256:259ec80d54999cc34cd1eb8ded513cb053c3bf4829152a2e00de2371bd406f5e \ + --hash=sha256:2abbf905a0b568706391ec6fa15161fad0fb5d8b68d73c461b3c1bab6064dd62 \ + --hash=sha256:2cbba4b30bf31ddbe97f1c7205ef976909a93a66bb1583e983adbd155ba72ac2 \ + --hash=sha256:2ffef621c14ebb0188a8633348504a35c13680d6da93ab5cb86f4e54b7e922b5 \ + --hash=sha256:30d53720b726ec36a7f88dc873f0eec8447fbc93d93a8f079dfac2629598d6ee \ + --hash=sha256:32e16a03138cabe0cb28e1007ee82264296ac0983714094380b408097a418cfe \ + --hash=sha256:43cca367bf94a14aca50b89e9bc2061683116cfe864e56740e083392f533ce7a \ + --hash=sha256:456e3b11cb79ac9946c822a56346ec80275eaf2950314b249b512896c0d2505e \ + --hash=sha256:4d6ec0d4222e8ffdab1744da2560f07856421b367928026fb540e1945f2eeeaf \ + --hash=sha256:5006b13a06e0b38d561fab5ccc37581f23c9511879be7693bd33c7cd15ca227c \ + --hash=sha256:675c741d4739af2dc20cd6c6a5c4b7355c728167845e3c6b0e824e4e5d36a6c3 \ + --hash=sha256:6cdb606a7478f9ad91c6283e238544451e3a95f30fb5467fbf715964341a8a86 \ + --hash=sha256:6d95f286b8244b3649b477ac066c6906fbb2905f8ac19b170e2175d3d799f4df \ + --hash=sha256:76322dcdb16fccf2ac56f99048af32259dcc488d9b7e25b51e5eca5147a3fb98 \ + --hash=sha256:7c1c60328bd964b53f8b835df69ae8198659e2b9302ff9ebb7de4e5a5994db3d \ + --hash=sha256:860ec6e63e2c5c2ee5e9121808145c7bf86c96cca9ad396c0bd3e0f2798ccbe2 \ + --hash=sha256:8e00ea6fc82e8a804433d3e9cedaa1051a1422cb6e443011590c14d2dea59146 \ + --hash=sha256:9c6c754df29ce6a89ed23afb25550d1c2d5fdb9901d9c67a16e0b16eaf7e2550 \ + --hash=sha256:a26ae94658d3ba3781d5e103ac07a876b3e9b29db53f68ed7df432fd033358a8 \ + --hash=sha256:a65acfdb9c6ebb8368490dbafe83c03c7e277b37e6857f0caeadbbc56e12f4fb \ + --hash=sha256:a7d80b2e904faa63068ead63107189164ca443b42dd1930299e0d1cb041cec2e \ + --hash=sha256:a84498e0d0a1174f2b3ed769b67b656aa5460c92c9554039e11f20a05650f00d \ + --hash=sha256:ab4754d432e3ac42d33a269c8567413bdb541689b02d93788af4131018cbf366 \ + --hash=sha256:ad369ed238b1959dfbade9018a740fb9392c5ac4f9b5173f420bd4f37ba1f7a0 \ + --hash=sha256:b1d0fcae4f0949f215d4632be684a539859b295e2d0cb14f78ec231915d644db \ + --hash=sha256:b42a1a511c81cc78cbc4539675713bbcf9d9c3913386243ceff0e9429ca892fe \ + --hash=sha256:bd33f82e95ba7ad632bc57837ee99dba3d7e006536200c4e9124089e1bf42426 \ + --hash=sha256:bdd407c40483463898b84490770199d5714dcc9dd9b792f6c6caccc523c00952 \ + --hash=sha256:c6eef7a2dbd0abfb0d9eaf78b73017dbfd0b54051102ff4e6a7b2980d5ac1a03 \ + --hash=sha256:c82af4b2ddd2ee72d1fc0c6695048d457e00b3582ccde72d8a1c991b808bb20f \ + --hash=sha256:d666cb72687559689e9906197e3bec7b736764df6a2e58ee265e360663e9baf7 \ + --hash=sha256:d7bf0a4f9f15b32b5ba53147369e94296f5fffb783db5aacc1be15b4bf72f43b \ + --hash=sha256:d82075752f40c0ddf57e6e02673a17f6cb0f8eb3f587f63ca1eaab5594da5b17 \ + --hash=sha256:da65fb46d4cbb75cb417cddf6ba5e7582eb7bb0b47db4b99c9fe5787ce5d91f5 \ + --hash=sha256:e2b49c3c0804e8ecb05d59af8386ec2f74877f7ca8fd9c1e00be2672e4d399b1 \ + --hash=sha256:e585c8ae871fd38ac50598f4763d73ec5497b0de9a0ab4ef5b69f01c6a046142 \ + --hash=sha256:e8d3ca0a72dd8846eb6f7dfe8f19088060fcb76931ed592d29128e0219652884 \ + --hash=sha256:ef444c57d664d35cac4e18c298c47d7b504c66b17c2ea91312e979fcfbdfb08a \ + --hash=sha256:f1eb068ead09f4994dec71c24b2844f1e4e4e013b9629f812f292f04bd1510d9 \ + --hash=sha256:f2ded8d9b6f68cc26f8425eda5d3877b47343e68ca23d0d0846f4d312ecaa445 \ + --hash=sha256:f751ed0a2f250541e19dfca9f1eafa31a392c71c832b6bb9e113b10d050cb0f1 \ + --hash=sha256:faa88bc527d0f097abdc2c663cddf37c05a1c2f113716601555249805cf573f1 \ + --hash=sha256:fc44e3c68ff00fd991b59092a54350e6e4911152682b4782f68070985aa9e648 diff --git a/tests/sample_project/requirements/cpython@3.12/requirements-cpython@3.12-linux_x86_64.txt.json b/tests/sample_project/requirements/cpython@3.12/requirements-cpython@3.12-linux_x86_64.txt.json new file mode 100644 index 0000000..150c914 --- /dev/null +++ b/tests/sample_project/requirements/cpython@3.12/requirements-cpython@3.12-linux_x86_64.txt.json @@ -0,0 +1,4 @@ +{ + "locked_at": "2024-10-15T10:23:42.669585+00:00", + "requirements_hash": "sha256:8a2182bfe08ff2478e3fe614e4769fce8fa5fa2ea010a2976c8f74d5396fa706" +} diff --git a/tests/sample_project/requirements/cpython@3.12/requirements-cpython@3.12-macosx_arm64.in b/tests/sample_project/requirements/cpython@3.12/requirements-cpython@3.12-macosx_arm64.in new file mode 100644 index 0000000..5781ffe --- /dev/null +++ b/tests/sample_project/requirements/cpython@3.12/requirements-cpython@3.12-macosx_arm64.in @@ -0,0 +1,3 @@ +# DO NOT EDIT. Auto-generated as part of deployment bundle build. +# Relock bundle dependencies to update. +numpy diff --git a/tests/sample_project/requirements/cpython@3.12/requirements-cpython@3.12-macosx_arm64.txt b/tests/sample_project/requirements/cpython@3.12/requirements-cpython@3.12-macosx_arm64.txt new file mode 100644 index 0000000..aaa2ba1 --- /dev/null +++ b/tests/sample_project/requirements/cpython@3.12/requirements-cpython@3.12-macosx_arm64.txt @@ -0,0 +1,56 @@ +# This file was autogenerated by uv via the following command: +# python -Im layered_envs lock +numpy==2.1.2 \ + --hash=sha256:05b2d4e667895cc55e3ff2b56077e4c8a5604361fc21a042845ea3ad67465aa8 \ + --hash=sha256:12edb90831ff481f7ef5f6bc6431a9d74dc0e5ff401559a71e5e4611d4f2d466 \ + --hash=sha256:13311c2db4c5f7609b462bc0f43d3c465424d25c626d95040f073e30f7570e35 \ + --hash=sha256:13532a088217fa624c99b843eeb54640de23b3414b14aa66d023805eb731066c \ + --hash=sha256:13602b3174432a35b16c4cfb5de9a12d229727c3dd47a6ce35111f2ebdf66ff4 \ + --hash=sha256:1600068c262af1ca9580a527d43dc9d959b0b1d8e56f8a05d830eea39b7c8af6 \ + --hash=sha256:1b8cde4f11f0a975d1fd59373b32e2f5a562ade7cde4f85b7137f3de8fbb29a0 \ + --hash=sha256:1c193d0b0238638e6fc5f10f1b074a6993cb13b0b431f64079a509d63d3aa8b7 \ + --hash=sha256:1ebec5fd716c5a5b3d8dfcc439be82a8407b7b24b230d0ad28a81b61c2f4659a \ + --hash=sha256:242b39d00e4944431a3cd2db2f5377e15b5785920421993770cddb89992c3f3a \ + --hash=sha256:259ec80d54999cc34cd1eb8ded513cb053c3bf4829152a2e00de2371bd406f5e \ + --hash=sha256:2abbf905a0b568706391ec6fa15161fad0fb5d8b68d73c461b3c1bab6064dd62 \ + --hash=sha256:2cbba4b30bf31ddbe97f1c7205ef976909a93a66bb1583e983adbd155ba72ac2 \ + --hash=sha256:2ffef621c14ebb0188a8633348504a35c13680d6da93ab5cb86f4e54b7e922b5 \ + --hash=sha256:30d53720b726ec36a7f88dc873f0eec8447fbc93d93a8f079dfac2629598d6ee \ + --hash=sha256:32e16a03138cabe0cb28e1007ee82264296ac0983714094380b408097a418cfe \ + --hash=sha256:43cca367bf94a14aca50b89e9bc2061683116cfe864e56740e083392f533ce7a \ + --hash=sha256:456e3b11cb79ac9946c822a56346ec80275eaf2950314b249b512896c0d2505e \ + --hash=sha256:4d6ec0d4222e8ffdab1744da2560f07856421b367928026fb540e1945f2eeeaf \ + --hash=sha256:5006b13a06e0b38d561fab5ccc37581f23c9511879be7693bd33c7cd15ca227c \ + --hash=sha256:675c741d4739af2dc20cd6c6a5c4b7355c728167845e3c6b0e824e4e5d36a6c3 \ + --hash=sha256:6cdb606a7478f9ad91c6283e238544451e3a95f30fb5467fbf715964341a8a86 \ + --hash=sha256:6d95f286b8244b3649b477ac066c6906fbb2905f8ac19b170e2175d3d799f4df \ + --hash=sha256:76322dcdb16fccf2ac56f99048af32259dcc488d9b7e25b51e5eca5147a3fb98 \ + --hash=sha256:7c1c60328bd964b53f8b835df69ae8198659e2b9302ff9ebb7de4e5a5994db3d \ + --hash=sha256:860ec6e63e2c5c2ee5e9121808145c7bf86c96cca9ad396c0bd3e0f2798ccbe2 \ + --hash=sha256:8e00ea6fc82e8a804433d3e9cedaa1051a1422cb6e443011590c14d2dea59146 \ + --hash=sha256:9c6c754df29ce6a89ed23afb25550d1c2d5fdb9901d9c67a16e0b16eaf7e2550 \ + --hash=sha256:a26ae94658d3ba3781d5e103ac07a876b3e9b29db53f68ed7df432fd033358a8 \ + --hash=sha256:a65acfdb9c6ebb8368490dbafe83c03c7e277b37e6857f0caeadbbc56e12f4fb \ + --hash=sha256:a7d80b2e904faa63068ead63107189164ca443b42dd1930299e0d1cb041cec2e \ + --hash=sha256:a84498e0d0a1174f2b3ed769b67b656aa5460c92c9554039e11f20a05650f00d \ + --hash=sha256:ab4754d432e3ac42d33a269c8567413bdb541689b02d93788af4131018cbf366 \ + --hash=sha256:ad369ed238b1959dfbade9018a740fb9392c5ac4f9b5173f420bd4f37ba1f7a0 \ + --hash=sha256:b1d0fcae4f0949f215d4632be684a539859b295e2d0cb14f78ec231915d644db \ + --hash=sha256:b42a1a511c81cc78cbc4539675713bbcf9d9c3913386243ceff0e9429ca892fe \ + --hash=sha256:bd33f82e95ba7ad632bc57837ee99dba3d7e006536200c4e9124089e1bf42426 \ + --hash=sha256:bdd407c40483463898b84490770199d5714dcc9dd9b792f6c6caccc523c00952 \ + --hash=sha256:c6eef7a2dbd0abfb0d9eaf78b73017dbfd0b54051102ff4e6a7b2980d5ac1a03 \ + --hash=sha256:c82af4b2ddd2ee72d1fc0c6695048d457e00b3582ccde72d8a1c991b808bb20f \ + --hash=sha256:d666cb72687559689e9906197e3bec7b736764df6a2e58ee265e360663e9baf7 \ + --hash=sha256:d7bf0a4f9f15b32b5ba53147369e94296f5fffb783db5aacc1be15b4bf72f43b \ + --hash=sha256:d82075752f40c0ddf57e6e02673a17f6cb0f8eb3f587f63ca1eaab5594da5b17 \ + --hash=sha256:da65fb46d4cbb75cb417cddf6ba5e7582eb7bb0b47db4b99c9fe5787ce5d91f5 \ + --hash=sha256:e2b49c3c0804e8ecb05d59af8386ec2f74877f7ca8fd9c1e00be2672e4d399b1 \ + --hash=sha256:e585c8ae871fd38ac50598f4763d73ec5497b0de9a0ab4ef5b69f01c6a046142 \ + --hash=sha256:e8d3ca0a72dd8846eb6f7dfe8f19088060fcb76931ed592d29128e0219652884 \ + --hash=sha256:ef444c57d664d35cac4e18c298c47d7b504c66b17c2ea91312e979fcfbdfb08a \ + --hash=sha256:f1eb068ead09f4994dec71c24b2844f1e4e4e013b9629f812f292f04bd1510d9 \ + --hash=sha256:f2ded8d9b6f68cc26f8425eda5d3877b47343e68ca23d0d0846f4d312ecaa445 \ + --hash=sha256:f751ed0a2f250541e19dfca9f1eafa31a392c71c832b6bb9e113b10d050cb0f1 \ + --hash=sha256:faa88bc527d0f097abdc2c663cddf37c05a1c2f113716601555249805cf573f1 \ + --hash=sha256:fc44e3c68ff00fd991b59092a54350e6e4911152682b4782f68070985aa9e648 diff --git a/tests/sample_project/requirements/cpython@3.12/requirements-cpython@3.12-macosx_arm64.txt.json b/tests/sample_project/requirements/cpython@3.12/requirements-cpython@3.12-macosx_arm64.txt.json new file mode 100644 index 0000000..98e85f6 --- /dev/null +++ b/tests/sample_project/requirements/cpython@3.12/requirements-cpython@3.12-macosx_arm64.txt.json @@ -0,0 +1,4 @@ +{ + "locked_at": "2024-10-15T10:23:32.881474+00:00", + "requirements_hash": "sha256:8a2182bfe08ff2478e3fe614e4769fce8fa5fa2ea010a2976c8f74d5396fa706" +} diff --git a/tests/sample_project/requirements/cpython@3.12/requirements-cpython@3.12-win_amd64.in b/tests/sample_project/requirements/cpython@3.12/requirements-cpython@3.12-win_amd64.in new file mode 100644 index 0000000..5781ffe --- /dev/null +++ b/tests/sample_project/requirements/cpython@3.12/requirements-cpython@3.12-win_amd64.in @@ -0,0 +1,3 @@ +# DO NOT EDIT. Auto-generated as part of deployment bundle build. +# Relock bundle dependencies to update. +numpy diff --git a/tests/sample_project/requirements/cpython@3.12/requirements-cpython@3.12-win_amd64.txt b/tests/sample_project/requirements/cpython@3.12/requirements-cpython@3.12-win_amd64.txt new file mode 100644 index 0000000..674a718 --- /dev/null +++ b/tests/sample_project/requirements/cpython@3.12/requirements-cpython@3.12-win_amd64.txt @@ -0,0 +1,56 @@ +# This file was autogenerated by uv via the following command: +# python.exe -Im layered_envs lock +numpy==2.1.2 \ + --hash=sha256:05b2d4e667895cc55e3ff2b56077e4c8a5604361fc21a042845ea3ad67465aa8 \ + --hash=sha256:12edb90831ff481f7ef5f6bc6431a9d74dc0e5ff401559a71e5e4611d4f2d466 \ + --hash=sha256:13311c2db4c5f7609b462bc0f43d3c465424d25c626d95040f073e30f7570e35 \ + --hash=sha256:13532a088217fa624c99b843eeb54640de23b3414b14aa66d023805eb731066c \ + --hash=sha256:13602b3174432a35b16c4cfb5de9a12d229727c3dd47a6ce35111f2ebdf66ff4 \ + --hash=sha256:1600068c262af1ca9580a527d43dc9d959b0b1d8e56f8a05d830eea39b7c8af6 \ + --hash=sha256:1b8cde4f11f0a975d1fd59373b32e2f5a562ade7cde4f85b7137f3de8fbb29a0 \ + --hash=sha256:1c193d0b0238638e6fc5f10f1b074a6993cb13b0b431f64079a509d63d3aa8b7 \ + --hash=sha256:1ebec5fd716c5a5b3d8dfcc439be82a8407b7b24b230d0ad28a81b61c2f4659a \ + --hash=sha256:242b39d00e4944431a3cd2db2f5377e15b5785920421993770cddb89992c3f3a \ + --hash=sha256:259ec80d54999cc34cd1eb8ded513cb053c3bf4829152a2e00de2371bd406f5e \ + --hash=sha256:2abbf905a0b568706391ec6fa15161fad0fb5d8b68d73c461b3c1bab6064dd62 \ + --hash=sha256:2cbba4b30bf31ddbe97f1c7205ef976909a93a66bb1583e983adbd155ba72ac2 \ + --hash=sha256:2ffef621c14ebb0188a8633348504a35c13680d6da93ab5cb86f4e54b7e922b5 \ + --hash=sha256:30d53720b726ec36a7f88dc873f0eec8447fbc93d93a8f079dfac2629598d6ee \ + --hash=sha256:32e16a03138cabe0cb28e1007ee82264296ac0983714094380b408097a418cfe \ + --hash=sha256:43cca367bf94a14aca50b89e9bc2061683116cfe864e56740e083392f533ce7a \ + --hash=sha256:456e3b11cb79ac9946c822a56346ec80275eaf2950314b249b512896c0d2505e \ + --hash=sha256:4d6ec0d4222e8ffdab1744da2560f07856421b367928026fb540e1945f2eeeaf \ + --hash=sha256:5006b13a06e0b38d561fab5ccc37581f23c9511879be7693bd33c7cd15ca227c \ + --hash=sha256:675c741d4739af2dc20cd6c6a5c4b7355c728167845e3c6b0e824e4e5d36a6c3 \ + --hash=sha256:6cdb606a7478f9ad91c6283e238544451e3a95f30fb5467fbf715964341a8a86 \ + --hash=sha256:6d95f286b8244b3649b477ac066c6906fbb2905f8ac19b170e2175d3d799f4df \ + --hash=sha256:76322dcdb16fccf2ac56f99048af32259dcc488d9b7e25b51e5eca5147a3fb98 \ + --hash=sha256:7c1c60328bd964b53f8b835df69ae8198659e2b9302ff9ebb7de4e5a5994db3d \ + --hash=sha256:860ec6e63e2c5c2ee5e9121808145c7bf86c96cca9ad396c0bd3e0f2798ccbe2 \ + --hash=sha256:8e00ea6fc82e8a804433d3e9cedaa1051a1422cb6e443011590c14d2dea59146 \ + --hash=sha256:9c6c754df29ce6a89ed23afb25550d1c2d5fdb9901d9c67a16e0b16eaf7e2550 \ + --hash=sha256:a26ae94658d3ba3781d5e103ac07a876b3e9b29db53f68ed7df432fd033358a8 \ + --hash=sha256:a65acfdb9c6ebb8368490dbafe83c03c7e277b37e6857f0caeadbbc56e12f4fb \ + --hash=sha256:a7d80b2e904faa63068ead63107189164ca443b42dd1930299e0d1cb041cec2e \ + --hash=sha256:a84498e0d0a1174f2b3ed769b67b656aa5460c92c9554039e11f20a05650f00d \ + --hash=sha256:ab4754d432e3ac42d33a269c8567413bdb541689b02d93788af4131018cbf366 \ + --hash=sha256:ad369ed238b1959dfbade9018a740fb9392c5ac4f9b5173f420bd4f37ba1f7a0 \ + --hash=sha256:b1d0fcae4f0949f215d4632be684a539859b295e2d0cb14f78ec231915d644db \ + --hash=sha256:b42a1a511c81cc78cbc4539675713bbcf9d9c3913386243ceff0e9429ca892fe \ + --hash=sha256:bd33f82e95ba7ad632bc57837ee99dba3d7e006536200c4e9124089e1bf42426 \ + --hash=sha256:bdd407c40483463898b84490770199d5714dcc9dd9b792f6c6caccc523c00952 \ + --hash=sha256:c6eef7a2dbd0abfb0d9eaf78b73017dbfd0b54051102ff4e6a7b2980d5ac1a03 \ + --hash=sha256:c82af4b2ddd2ee72d1fc0c6695048d457e00b3582ccde72d8a1c991b808bb20f \ + --hash=sha256:d666cb72687559689e9906197e3bec7b736764df6a2e58ee265e360663e9baf7 \ + --hash=sha256:d7bf0a4f9f15b32b5ba53147369e94296f5fffb783db5aacc1be15b4bf72f43b \ + --hash=sha256:d82075752f40c0ddf57e6e02673a17f6cb0f8eb3f587f63ca1eaab5594da5b17 \ + --hash=sha256:da65fb46d4cbb75cb417cddf6ba5e7582eb7bb0b47db4b99c9fe5787ce5d91f5 \ + --hash=sha256:e2b49c3c0804e8ecb05d59af8386ec2f74877f7ca8fd9c1e00be2672e4d399b1 \ + --hash=sha256:e585c8ae871fd38ac50598f4763d73ec5497b0de9a0ab4ef5b69f01c6a046142 \ + --hash=sha256:e8d3ca0a72dd8846eb6f7dfe8f19088060fcb76931ed592d29128e0219652884 \ + --hash=sha256:ef444c57d664d35cac4e18c298c47d7b504c66b17c2ea91312e979fcfbdfb08a \ + --hash=sha256:f1eb068ead09f4994dec71c24b2844f1e4e4e013b9629f812f292f04bd1510d9 \ + --hash=sha256:f2ded8d9b6f68cc26f8425eda5d3877b47343e68ca23d0d0846f4d312ecaa445 \ + --hash=sha256:f751ed0a2f250541e19dfca9f1eafa31a392c71c832b6bb9e113b10d050cb0f1 \ + --hash=sha256:faa88bc527d0f097abdc2c663cddf37c05a1c2f113716601555249805cf573f1 \ + --hash=sha256:fc44e3c68ff00fd991b59092a54350e6e4911152682b4782f68070985aa9e648 diff --git a/tests/sample_project/requirements/cpython@3.12/requirements-cpython@3.12-win_amd64.txt.json b/tests/sample_project/requirements/cpython@3.12/requirements-cpython@3.12-win_amd64.txt.json new file mode 100644 index 0000000..1aa884c --- /dev/null +++ b/tests/sample_project/requirements/cpython@3.12/requirements-cpython@3.12-win_amd64.txt.json @@ -0,0 +1,4 @@ +{ + "locked_at": "2024-10-15T10:24:35.816641+00:00", + "requirements_hash": "sha256:49df6762bcf65e93f7202af1527f129bb55dbd06ff5dff17e5b4949d3fba98de" +} diff --git a/tests/sample_project/requirements/framework-http-client/requirements-framework-http-client-linux_x86_64.in b/tests/sample_project/requirements/framework-http-client/requirements-framework-http-client-linux_x86_64.in new file mode 100644 index 0000000..016ed8a --- /dev/null +++ b/tests/sample_project/requirements/framework-http-client/requirements-framework-http-client-linux_x86_64.in @@ -0,0 +1,3 @@ +# DO NOT EDIT. Auto-generated as part of deployment bundle build. +# Relock bundle dependencies to update. +httpx diff --git a/tests/sample_project/requirements/framework-http-client/requirements-framework-http-client-linux_x86_64.txt b/tests/sample_project/requirements/framework-http-client/requirements-framework-http-client-linux_x86_64.txt new file mode 100644 index 0000000..b14c474 --- /dev/null +++ b/tests/sample_project/requirements/framework-http-client/requirements-framework-http-client-linux_x86_64.txt @@ -0,0 +1,23 @@ +# This file was autogenerated by uv via the following command: +# python -Im layered_envs lock +anyio==4.6.2.post1 \ + --hash=sha256:4c8bc31ccdb51c7f7bd251f51c609e038d63e34219b44aa86e47576389880b4c \ + --hash=sha256:6d170c36fba3bdd840c73d3868c1e777e33676a69c3a72cf0a0d5d6d8009b61d +certifi==2024.8.30 \ + --hash=sha256:922820b53db7a7257ffbda3f597266d435245903d80737e34f8a45ff3e3230d8 \ + --hash=sha256:bec941d2aa8195e248a60b31ff9f0558284cf01a52591ceda73ea9afffd69fd9 +h11==0.14.0 \ + --hash=sha256:8f19fbbe99e72420ff35c00b27a34cb9937e902a8b810e2c88300c6f0a3b699d \ + --hash=sha256:e3fe4ac4b851c468cc8363d500db52c2ead036020723024a109d37346efaa761 +httpcore==1.0.6 \ + --hash=sha256:27b59625743b85577a8c0e10e55b50b5368a4f2cfe8cc7bcfa9cf00829c2682f \ + --hash=sha256:73f6dbd6eb8c21bbf7ef8efad555481853f5f6acdeaff1edb0694289269ee17f +httpx==0.27.2 \ + --hash=sha256:7bb2708e112d8fdd7829cd4243970f0c223274051cb35ee80c03301ee29a3df0 \ + --hash=sha256:f7c2be1d2f3c3c3160d441802406b206c2b76f5947b11115e6df10c6c65e66c2 +idna==3.10 \ + --hash=sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9 \ + --hash=sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3 +sniffio==1.3.1 \ + --hash=sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2 \ + --hash=sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc diff --git a/tests/sample_project/requirements/framework-http-client/requirements-framework-http-client-linux_x86_64.txt.json b/tests/sample_project/requirements/framework-http-client/requirements-framework-http-client-linux_x86_64.txt.json new file mode 100644 index 0000000..dca9a28 --- /dev/null +++ b/tests/sample_project/requirements/framework-http-client/requirements-framework-http-client-linux_x86_64.txt.json @@ -0,0 +1,4 @@ +{ + "locked_at": "2024-10-15T10:23:43.145588+00:00", + "requirements_hash": "sha256:c9668bc44dcd9728f98686cb7d72b4cdfc3c3ed44512d29b279a484723c9525a" +} diff --git a/tests/sample_project/requirements/framework-http-client/requirements-framework-http-client-macosx_arm64.in b/tests/sample_project/requirements/framework-http-client/requirements-framework-http-client-macosx_arm64.in new file mode 100644 index 0000000..016ed8a --- /dev/null +++ b/tests/sample_project/requirements/framework-http-client/requirements-framework-http-client-macosx_arm64.in @@ -0,0 +1,3 @@ +# DO NOT EDIT. Auto-generated as part of deployment bundle build. +# Relock bundle dependencies to update. +httpx diff --git a/tests/sample_project/requirements/framework-http-client/requirements-framework-http-client-macosx_arm64.txt b/tests/sample_project/requirements/framework-http-client/requirements-framework-http-client-macosx_arm64.txt new file mode 100644 index 0000000..b14c474 --- /dev/null +++ b/tests/sample_project/requirements/framework-http-client/requirements-framework-http-client-macosx_arm64.txt @@ -0,0 +1,23 @@ +# This file was autogenerated by uv via the following command: +# python -Im layered_envs lock +anyio==4.6.2.post1 \ + --hash=sha256:4c8bc31ccdb51c7f7bd251f51c609e038d63e34219b44aa86e47576389880b4c \ + --hash=sha256:6d170c36fba3bdd840c73d3868c1e777e33676a69c3a72cf0a0d5d6d8009b61d +certifi==2024.8.30 \ + --hash=sha256:922820b53db7a7257ffbda3f597266d435245903d80737e34f8a45ff3e3230d8 \ + --hash=sha256:bec941d2aa8195e248a60b31ff9f0558284cf01a52591ceda73ea9afffd69fd9 +h11==0.14.0 \ + --hash=sha256:8f19fbbe99e72420ff35c00b27a34cb9937e902a8b810e2c88300c6f0a3b699d \ + --hash=sha256:e3fe4ac4b851c468cc8363d500db52c2ead036020723024a109d37346efaa761 +httpcore==1.0.6 \ + --hash=sha256:27b59625743b85577a8c0e10e55b50b5368a4f2cfe8cc7bcfa9cf00829c2682f \ + --hash=sha256:73f6dbd6eb8c21bbf7ef8efad555481853f5f6acdeaff1edb0694289269ee17f +httpx==0.27.2 \ + --hash=sha256:7bb2708e112d8fdd7829cd4243970f0c223274051cb35ee80c03301ee29a3df0 \ + --hash=sha256:f7c2be1d2f3c3c3160d441802406b206c2b76f5947b11115e6df10c6c65e66c2 +idna==3.10 \ + --hash=sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9 \ + --hash=sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3 +sniffio==1.3.1 \ + --hash=sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2 \ + --hash=sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc diff --git a/tests/sample_project/requirements/framework-http-client/requirements-framework-http-client-macosx_arm64.txt.json b/tests/sample_project/requirements/framework-http-client/requirements-framework-http-client-macosx_arm64.txt.json new file mode 100644 index 0000000..fddc092 --- /dev/null +++ b/tests/sample_project/requirements/framework-http-client/requirements-framework-http-client-macosx_arm64.txt.json @@ -0,0 +1,4 @@ +{ + "locked_at": "2024-10-15T10:23:33.094515+00:00", + "requirements_hash": "sha256:c9668bc44dcd9728f98686cb7d72b4cdfc3c3ed44512d29b279a484723c9525a" +} diff --git a/tests/sample_project/requirements/framework-http-client/requirements-framework-http-client-win_amd64.in b/tests/sample_project/requirements/framework-http-client/requirements-framework-http-client-win_amd64.in new file mode 100644 index 0000000..016ed8a --- /dev/null +++ b/tests/sample_project/requirements/framework-http-client/requirements-framework-http-client-win_amd64.in @@ -0,0 +1,3 @@ +# DO NOT EDIT. Auto-generated as part of deployment bundle build. +# Relock bundle dependencies to update. +httpx diff --git a/tests/sample_project/requirements/framework-http-client/requirements-framework-http-client-win_amd64.txt b/tests/sample_project/requirements/framework-http-client/requirements-framework-http-client-win_amd64.txt new file mode 100644 index 0000000..719a08f --- /dev/null +++ b/tests/sample_project/requirements/framework-http-client/requirements-framework-http-client-win_amd64.txt @@ -0,0 +1,23 @@ +# This file was autogenerated by uv via the following command: +# python.exe -Im layered_envs lock +anyio==4.6.2.post1 \ + --hash=sha256:4c8bc31ccdb51c7f7bd251f51c609e038d63e34219b44aa86e47576389880b4c \ + --hash=sha256:6d170c36fba3bdd840c73d3868c1e777e33676a69c3a72cf0a0d5d6d8009b61d +certifi==2024.8.30 \ + --hash=sha256:922820b53db7a7257ffbda3f597266d435245903d80737e34f8a45ff3e3230d8 \ + --hash=sha256:bec941d2aa8195e248a60b31ff9f0558284cf01a52591ceda73ea9afffd69fd9 +h11==0.14.0 \ + --hash=sha256:8f19fbbe99e72420ff35c00b27a34cb9937e902a8b810e2c88300c6f0a3b699d \ + --hash=sha256:e3fe4ac4b851c468cc8363d500db52c2ead036020723024a109d37346efaa761 +httpcore==1.0.6 \ + --hash=sha256:27b59625743b85577a8c0e10e55b50b5368a4f2cfe8cc7bcfa9cf00829c2682f \ + --hash=sha256:73f6dbd6eb8c21bbf7ef8efad555481853f5f6acdeaff1edb0694289269ee17f +httpx==0.27.2 \ + --hash=sha256:7bb2708e112d8fdd7829cd4243970f0c223274051cb35ee80c03301ee29a3df0 \ + --hash=sha256:f7c2be1d2f3c3c3160d441802406b206c2b76f5947b11115e6df10c6c65e66c2 +idna==3.10 \ + --hash=sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9 \ + --hash=sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3 +sniffio==1.3.1 \ + --hash=sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2 \ + --hash=sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc diff --git a/tests/sample_project/requirements/framework-http-client/requirements-framework-http-client-win_amd64.txt.json b/tests/sample_project/requirements/framework-http-client/requirements-framework-http-client-win_amd64.txt.json new file mode 100644 index 0000000..a168473 --- /dev/null +++ b/tests/sample_project/requirements/framework-http-client/requirements-framework-http-client-win_amd64.txt.json @@ -0,0 +1,4 @@ +{ + "locked_at": "2024-10-15T10:24:36.316012+00:00", + "requirements_hash": "sha256:71ba32717956dcf9c0d4e18b69061efbe53816f4b591ceb50553d6d3d12e1960" +} diff --git a/tests/sample_project/requirements/framework-scipy/requirements-framework-scipy-linux_x86_64.in b/tests/sample_project/requirements/framework-scipy/requirements-framework-scipy-linux_x86_64.in new file mode 100644 index 0000000..a0fa455 --- /dev/null +++ b/tests/sample_project/requirements/framework-scipy/requirements-framework-scipy-linux_x86_64.in @@ -0,0 +1,3 @@ +# DO NOT EDIT. Auto-generated as part of deployment bundle build. +# Relock bundle dependencies to update. +scipy diff --git a/tests/sample_project/requirements/framework-scipy/requirements-framework-scipy-linux_x86_64.txt b/tests/sample_project/requirements/framework-scipy/requirements-framework-scipy-linux_x86_64.txt new file mode 100644 index 0000000..7434c19 --- /dev/null +++ b/tests/sample_project/requirements/framework-scipy/requirements-framework-scipy-linux_x86_64.txt @@ -0,0 +1,90 @@ +# This file was autogenerated by uv via the following command: +# python -Im layered_envs lock +numpy==2.1.2 \ + --hash=sha256:05b2d4e667895cc55e3ff2b56077e4c8a5604361fc21a042845ea3ad67465aa8 \ + --hash=sha256:12edb90831ff481f7ef5f6bc6431a9d74dc0e5ff401559a71e5e4611d4f2d466 \ + --hash=sha256:13311c2db4c5f7609b462bc0f43d3c465424d25c626d95040f073e30f7570e35 \ + --hash=sha256:13532a088217fa624c99b843eeb54640de23b3414b14aa66d023805eb731066c \ + --hash=sha256:13602b3174432a35b16c4cfb5de9a12d229727c3dd47a6ce35111f2ebdf66ff4 \ + --hash=sha256:1600068c262af1ca9580a527d43dc9d959b0b1d8e56f8a05d830eea39b7c8af6 \ + --hash=sha256:1b8cde4f11f0a975d1fd59373b32e2f5a562ade7cde4f85b7137f3de8fbb29a0 \ + --hash=sha256:1c193d0b0238638e6fc5f10f1b074a6993cb13b0b431f64079a509d63d3aa8b7 \ + --hash=sha256:1ebec5fd716c5a5b3d8dfcc439be82a8407b7b24b230d0ad28a81b61c2f4659a \ + --hash=sha256:242b39d00e4944431a3cd2db2f5377e15b5785920421993770cddb89992c3f3a \ + --hash=sha256:259ec80d54999cc34cd1eb8ded513cb053c3bf4829152a2e00de2371bd406f5e \ + --hash=sha256:2abbf905a0b568706391ec6fa15161fad0fb5d8b68d73c461b3c1bab6064dd62 \ + --hash=sha256:2cbba4b30bf31ddbe97f1c7205ef976909a93a66bb1583e983adbd155ba72ac2 \ + --hash=sha256:2ffef621c14ebb0188a8633348504a35c13680d6da93ab5cb86f4e54b7e922b5 \ + --hash=sha256:30d53720b726ec36a7f88dc873f0eec8447fbc93d93a8f079dfac2629598d6ee \ + --hash=sha256:32e16a03138cabe0cb28e1007ee82264296ac0983714094380b408097a418cfe \ + --hash=sha256:43cca367bf94a14aca50b89e9bc2061683116cfe864e56740e083392f533ce7a \ + --hash=sha256:456e3b11cb79ac9946c822a56346ec80275eaf2950314b249b512896c0d2505e \ + --hash=sha256:4d6ec0d4222e8ffdab1744da2560f07856421b367928026fb540e1945f2eeeaf \ + --hash=sha256:5006b13a06e0b38d561fab5ccc37581f23c9511879be7693bd33c7cd15ca227c \ + --hash=sha256:675c741d4739af2dc20cd6c6a5c4b7355c728167845e3c6b0e824e4e5d36a6c3 \ + --hash=sha256:6cdb606a7478f9ad91c6283e238544451e3a95f30fb5467fbf715964341a8a86 \ + --hash=sha256:6d95f286b8244b3649b477ac066c6906fbb2905f8ac19b170e2175d3d799f4df \ + --hash=sha256:76322dcdb16fccf2ac56f99048af32259dcc488d9b7e25b51e5eca5147a3fb98 \ + --hash=sha256:7c1c60328bd964b53f8b835df69ae8198659e2b9302ff9ebb7de4e5a5994db3d \ + --hash=sha256:860ec6e63e2c5c2ee5e9121808145c7bf86c96cca9ad396c0bd3e0f2798ccbe2 \ + --hash=sha256:8e00ea6fc82e8a804433d3e9cedaa1051a1422cb6e443011590c14d2dea59146 \ + --hash=sha256:9c6c754df29ce6a89ed23afb25550d1c2d5fdb9901d9c67a16e0b16eaf7e2550 \ + --hash=sha256:a26ae94658d3ba3781d5e103ac07a876b3e9b29db53f68ed7df432fd033358a8 \ + --hash=sha256:a65acfdb9c6ebb8368490dbafe83c03c7e277b37e6857f0caeadbbc56e12f4fb \ + --hash=sha256:a7d80b2e904faa63068ead63107189164ca443b42dd1930299e0d1cb041cec2e \ + --hash=sha256:a84498e0d0a1174f2b3ed769b67b656aa5460c92c9554039e11f20a05650f00d \ + --hash=sha256:ab4754d432e3ac42d33a269c8567413bdb541689b02d93788af4131018cbf366 \ + --hash=sha256:ad369ed238b1959dfbade9018a740fb9392c5ac4f9b5173f420bd4f37ba1f7a0 \ + --hash=sha256:b1d0fcae4f0949f215d4632be684a539859b295e2d0cb14f78ec231915d644db \ + --hash=sha256:b42a1a511c81cc78cbc4539675713bbcf9d9c3913386243ceff0e9429ca892fe \ + --hash=sha256:bd33f82e95ba7ad632bc57837ee99dba3d7e006536200c4e9124089e1bf42426 \ + --hash=sha256:bdd407c40483463898b84490770199d5714dcc9dd9b792f6c6caccc523c00952 \ + --hash=sha256:c6eef7a2dbd0abfb0d9eaf78b73017dbfd0b54051102ff4e6a7b2980d5ac1a03 \ + --hash=sha256:c82af4b2ddd2ee72d1fc0c6695048d457e00b3582ccde72d8a1c991b808bb20f \ + --hash=sha256:d666cb72687559689e9906197e3bec7b736764df6a2e58ee265e360663e9baf7 \ + --hash=sha256:d7bf0a4f9f15b32b5ba53147369e94296f5fffb783db5aacc1be15b4bf72f43b \ + --hash=sha256:d82075752f40c0ddf57e6e02673a17f6cb0f8eb3f587f63ca1eaab5594da5b17 \ + --hash=sha256:da65fb46d4cbb75cb417cddf6ba5e7582eb7bb0b47db4b99c9fe5787ce5d91f5 \ + --hash=sha256:e2b49c3c0804e8ecb05d59af8386ec2f74877f7ca8fd9c1e00be2672e4d399b1 \ + --hash=sha256:e585c8ae871fd38ac50598f4763d73ec5497b0de9a0ab4ef5b69f01c6a046142 \ + --hash=sha256:e8d3ca0a72dd8846eb6f7dfe8f19088060fcb76931ed592d29128e0219652884 \ + --hash=sha256:ef444c57d664d35cac4e18c298c47d7b504c66b17c2ea91312e979fcfbdfb08a \ + --hash=sha256:f1eb068ead09f4994dec71c24b2844f1e4e4e013b9629f812f292f04bd1510d9 \ + --hash=sha256:f2ded8d9b6f68cc26f8425eda5d3877b47343e68ca23d0d0846f4d312ecaa445 \ + --hash=sha256:f751ed0a2f250541e19dfca9f1eafa31a392c71c832b6bb9e113b10d050cb0f1 \ + --hash=sha256:faa88bc527d0f097abdc2c663cddf37c05a1c2f113716601555249805cf573f1 \ + --hash=sha256:fc44e3c68ff00fd991b59092a54350e6e4911152682b4782f68070985aa9e648 +scipy==1.14.1 \ + --hash=sha256:0c2f95de3b04e26f5f3ad5bb05e74ba7f68b837133a4492414b3afd79dfe540e \ + --hash=sha256:1729560c906963fc8389f6aac023739ff3983e727b1a4d87696b7bf108316a79 \ + --hash=sha256:278266012eb69f4a720827bdd2dc54b2271c97d84255b2faaa8f161a158c3b37 \ + --hash=sha256:2843f2d527d9eebec9a43e6b406fb7266f3af25a751aa91d62ff416f54170bc5 \ + --hash=sha256:2da0469a4ef0ecd3693761acbdc20f2fdeafb69e6819cc081308cc978153c675 \ + --hash=sha256:2ff0a7e01e422c15739ecd64432743cf7aae2b03f3084288f399affcefe5222d \ + --hash=sha256:2ff38e22128e6c03ff73b6bb0f85f897d2362f8c052e3b8ad00532198fbdae3f \ + --hash=sha256:30ac8812c1d2aab7131a79ba62933a2a76f582d5dbbc695192453dae67ad6310 \ + --hash=sha256:3a1b111fac6baec1c1d92f27e76511c9e7218f1695d61b59e05e0fe04dc59617 \ + --hash=sha256:4079b90df244709e675cdc8b93bfd8a395d59af40b72e339c2287c91860deb8e \ + --hash=sha256:5149e3fd2d686e42144a093b206aef01932a0059c2a33ddfa67f5f035bdfe13e \ + --hash=sha256:5a275584e726026a5699459aa72f828a610821006228e841b94275c4a7c08417 \ + --hash=sha256:631f07b3734d34aced009aaf6fedfd0eb3498a97e581c3b1e5f14a04164a456d \ + --hash=sha256:716e389b694c4bb564b4fc0c51bc84d381735e0d39d3f26ec1af2556ec6aad94 \ + --hash=sha256:8426251ad1e4ad903a4514712d2fa8fdd5382c978010d1c6f5f37ef286a713ad \ + --hash=sha256:8475230e55549ab3f207bff11ebfc91c805dc3463ef62eda3ccf593254524ce8 \ + --hash=sha256:8bddf15838ba768bb5f5083c1ea012d64c9a444e16192762bd858f1e126196d0 \ + --hash=sha256:8e32dced201274bf96899e6491d9ba3e9a5f6b336708656466ad0522d8528f69 \ + --hash=sha256:8f9ea80f2e65bdaa0b7627fb00cbeb2daf163caa015e59b7516395fe3bd1e066 \ + --hash=sha256:97c5dddd5932bd2a1a31c927ba5e1463a53b87ca96b5c9bdf5dfd6096e27efc3 \ + --hash=sha256:a49f6ed96f83966f576b33a44257d869756df6cf1ef4934f59dd58b25e0327e5 \ + --hash=sha256:af29a935803cc707ab2ed7791c44288a682f9c8107bc00f0eccc4f92c08d6e07 \ + --hash=sha256:b05d43735bb2f07d689f56f7b474788a13ed8adc484a85aa65c0fd931cf9ccd2 \ + --hash=sha256:b28d2ca4add7ac16ae8bb6632a3c86e4b9e4d52d3e34267f6e1b0c1f8d87e389 \ + --hash=sha256:b99722ea48b7ea25e8e015e8341ae74624f72e5f21fc2abd45f3a93266de4c5d \ + --hash=sha256:baff393942b550823bfce952bb62270ee17504d02a1801d7fd0719534dfb9c84 \ + --hash=sha256:c0ee987efa6737242745f347835da2cc5bb9f1b42996a4d97d5c7ff7928cb6f2 \ + --hash=sha256:d0d2821003174de06b69e58cef2316a6622b60ee613121199cb2852a873f8cf3 \ + --hash=sha256:e0cf28db0f24a38b2a0ca33a85a54852586e43cf6fd876365c86e0657cfe7d73 \ + --hash=sha256:e4f5a7c49323533f9103d4dacf4e4f07078f360743dec7f7596949149efeec06 \ + --hash=sha256:eb58ca0abd96911932f688528977858681a59d61a7ce908ffd355957f7025cfc \ + --hash=sha256:edaf02b82cd7639db00dbff629995ef185c8df4c3ffa71a5562a595765a06ce1 \ + --hash=sha256:fef8c87f8abfb884dac04e97824b61299880c43f4ce675dd2cbeadd3c9b466d2 diff --git a/tests/sample_project/requirements/framework-scipy/requirements-framework-scipy-linux_x86_64.txt.json b/tests/sample_project/requirements/framework-scipy/requirements-framework-scipy-linux_x86_64.txt.json new file mode 100644 index 0000000..ecd2341 --- /dev/null +++ b/tests/sample_project/requirements/framework-scipy/requirements-framework-scipy-linux_x86_64.txt.json @@ -0,0 +1,4 @@ +{ + "locked_at": "2024-10-15T10:23:42.825586+00:00", + "requirements_hash": "sha256:36b0dbfec94b7de6507f348f0823cd02fdca2ea79eeafe92d571c26ae347d150" +} diff --git a/tests/sample_project/requirements/framework-scipy/requirements-framework-scipy-macosx_arm64.in b/tests/sample_project/requirements/framework-scipy/requirements-framework-scipy-macosx_arm64.in new file mode 100644 index 0000000..a0fa455 --- /dev/null +++ b/tests/sample_project/requirements/framework-scipy/requirements-framework-scipy-macosx_arm64.in @@ -0,0 +1,3 @@ +# DO NOT EDIT. Auto-generated as part of deployment bundle build. +# Relock bundle dependencies to update. +scipy diff --git a/tests/sample_project/requirements/framework-scipy/requirements-framework-scipy-macosx_arm64.txt b/tests/sample_project/requirements/framework-scipy/requirements-framework-scipy-macosx_arm64.txt new file mode 100644 index 0000000..7434c19 --- /dev/null +++ b/tests/sample_project/requirements/framework-scipy/requirements-framework-scipy-macosx_arm64.txt @@ -0,0 +1,90 @@ +# This file was autogenerated by uv via the following command: +# python -Im layered_envs lock +numpy==2.1.2 \ + --hash=sha256:05b2d4e667895cc55e3ff2b56077e4c8a5604361fc21a042845ea3ad67465aa8 \ + --hash=sha256:12edb90831ff481f7ef5f6bc6431a9d74dc0e5ff401559a71e5e4611d4f2d466 \ + --hash=sha256:13311c2db4c5f7609b462bc0f43d3c465424d25c626d95040f073e30f7570e35 \ + --hash=sha256:13532a088217fa624c99b843eeb54640de23b3414b14aa66d023805eb731066c \ + --hash=sha256:13602b3174432a35b16c4cfb5de9a12d229727c3dd47a6ce35111f2ebdf66ff4 \ + --hash=sha256:1600068c262af1ca9580a527d43dc9d959b0b1d8e56f8a05d830eea39b7c8af6 \ + --hash=sha256:1b8cde4f11f0a975d1fd59373b32e2f5a562ade7cde4f85b7137f3de8fbb29a0 \ + --hash=sha256:1c193d0b0238638e6fc5f10f1b074a6993cb13b0b431f64079a509d63d3aa8b7 \ + --hash=sha256:1ebec5fd716c5a5b3d8dfcc439be82a8407b7b24b230d0ad28a81b61c2f4659a \ + --hash=sha256:242b39d00e4944431a3cd2db2f5377e15b5785920421993770cddb89992c3f3a \ + --hash=sha256:259ec80d54999cc34cd1eb8ded513cb053c3bf4829152a2e00de2371bd406f5e \ + --hash=sha256:2abbf905a0b568706391ec6fa15161fad0fb5d8b68d73c461b3c1bab6064dd62 \ + --hash=sha256:2cbba4b30bf31ddbe97f1c7205ef976909a93a66bb1583e983adbd155ba72ac2 \ + --hash=sha256:2ffef621c14ebb0188a8633348504a35c13680d6da93ab5cb86f4e54b7e922b5 \ + --hash=sha256:30d53720b726ec36a7f88dc873f0eec8447fbc93d93a8f079dfac2629598d6ee \ + --hash=sha256:32e16a03138cabe0cb28e1007ee82264296ac0983714094380b408097a418cfe \ + --hash=sha256:43cca367bf94a14aca50b89e9bc2061683116cfe864e56740e083392f533ce7a \ + --hash=sha256:456e3b11cb79ac9946c822a56346ec80275eaf2950314b249b512896c0d2505e \ + --hash=sha256:4d6ec0d4222e8ffdab1744da2560f07856421b367928026fb540e1945f2eeeaf \ + --hash=sha256:5006b13a06e0b38d561fab5ccc37581f23c9511879be7693bd33c7cd15ca227c \ + --hash=sha256:675c741d4739af2dc20cd6c6a5c4b7355c728167845e3c6b0e824e4e5d36a6c3 \ + --hash=sha256:6cdb606a7478f9ad91c6283e238544451e3a95f30fb5467fbf715964341a8a86 \ + --hash=sha256:6d95f286b8244b3649b477ac066c6906fbb2905f8ac19b170e2175d3d799f4df \ + --hash=sha256:76322dcdb16fccf2ac56f99048af32259dcc488d9b7e25b51e5eca5147a3fb98 \ + --hash=sha256:7c1c60328bd964b53f8b835df69ae8198659e2b9302ff9ebb7de4e5a5994db3d \ + --hash=sha256:860ec6e63e2c5c2ee5e9121808145c7bf86c96cca9ad396c0bd3e0f2798ccbe2 \ + --hash=sha256:8e00ea6fc82e8a804433d3e9cedaa1051a1422cb6e443011590c14d2dea59146 \ + --hash=sha256:9c6c754df29ce6a89ed23afb25550d1c2d5fdb9901d9c67a16e0b16eaf7e2550 \ + --hash=sha256:a26ae94658d3ba3781d5e103ac07a876b3e9b29db53f68ed7df432fd033358a8 \ + --hash=sha256:a65acfdb9c6ebb8368490dbafe83c03c7e277b37e6857f0caeadbbc56e12f4fb \ + --hash=sha256:a7d80b2e904faa63068ead63107189164ca443b42dd1930299e0d1cb041cec2e \ + --hash=sha256:a84498e0d0a1174f2b3ed769b67b656aa5460c92c9554039e11f20a05650f00d \ + --hash=sha256:ab4754d432e3ac42d33a269c8567413bdb541689b02d93788af4131018cbf366 \ + --hash=sha256:ad369ed238b1959dfbade9018a740fb9392c5ac4f9b5173f420bd4f37ba1f7a0 \ + --hash=sha256:b1d0fcae4f0949f215d4632be684a539859b295e2d0cb14f78ec231915d644db \ + --hash=sha256:b42a1a511c81cc78cbc4539675713bbcf9d9c3913386243ceff0e9429ca892fe \ + --hash=sha256:bd33f82e95ba7ad632bc57837ee99dba3d7e006536200c4e9124089e1bf42426 \ + --hash=sha256:bdd407c40483463898b84490770199d5714dcc9dd9b792f6c6caccc523c00952 \ + --hash=sha256:c6eef7a2dbd0abfb0d9eaf78b73017dbfd0b54051102ff4e6a7b2980d5ac1a03 \ + --hash=sha256:c82af4b2ddd2ee72d1fc0c6695048d457e00b3582ccde72d8a1c991b808bb20f \ + --hash=sha256:d666cb72687559689e9906197e3bec7b736764df6a2e58ee265e360663e9baf7 \ + --hash=sha256:d7bf0a4f9f15b32b5ba53147369e94296f5fffb783db5aacc1be15b4bf72f43b \ + --hash=sha256:d82075752f40c0ddf57e6e02673a17f6cb0f8eb3f587f63ca1eaab5594da5b17 \ + --hash=sha256:da65fb46d4cbb75cb417cddf6ba5e7582eb7bb0b47db4b99c9fe5787ce5d91f5 \ + --hash=sha256:e2b49c3c0804e8ecb05d59af8386ec2f74877f7ca8fd9c1e00be2672e4d399b1 \ + --hash=sha256:e585c8ae871fd38ac50598f4763d73ec5497b0de9a0ab4ef5b69f01c6a046142 \ + --hash=sha256:e8d3ca0a72dd8846eb6f7dfe8f19088060fcb76931ed592d29128e0219652884 \ + --hash=sha256:ef444c57d664d35cac4e18c298c47d7b504c66b17c2ea91312e979fcfbdfb08a \ + --hash=sha256:f1eb068ead09f4994dec71c24b2844f1e4e4e013b9629f812f292f04bd1510d9 \ + --hash=sha256:f2ded8d9b6f68cc26f8425eda5d3877b47343e68ca23d0d0846f4d312ecaa445 \ + --hash=sha256:f751ed0a2f250541e19dfca9f1eafa31a392c71c832b6bb9e113b10d050cb0f1 \ + --hash=sha256:faa88bc527d0f097abdc2c663cddf37c05a1c2f113716601555249805cf573f1 \ + --hash=sha256:fc44e3c68ff00fd991b59092a54350e6e4911152682b4782f68070985aa9e648 +scipy==1.14.1 \ + --hash=sha256:0c2f95de3b04e26f5f3ad5bb05e74ba7f68b837133a4492414b3afd79dfe540e \ + --hash=sha256:1729560c906963fc8389f6aac023739ff3983e727b1a4d87696b7bf108316a79 \ + --hash=sha256:278266012eb69f4a720827bdd2dc54b2271c97d84255b2faaa8f161a158c3b37 \ + --hash=sha256:2843f2d527d9eebec9a43e6b406fb7266f3af25a751aa91d62ff416f54170bc5 \ + --hash=sha256:2da0469a4ef0ecd3693761acbdc20f2fdeafb69e6819cc081308cc978153c675 \ + --hash=sha256:2ff0a7e01e422c15739ecd64432743cf7aae2b03f3084288f399affcefe5222d \ + --hash=sha256:2ff38e22128e6c03ff73b6bb0f85f897d2362f8c052e3b8ad00532198fbdae3f \ + --hash=sha256:30ac8812c1d2aab7131a79ba62933a2a76f582d5dbbc695192453dae67ad6310 \ + --hash=sha256:3a1b111fac6baec1c1d92f27e76511c9e7218f1695d61b59e05e0fe04dc59617 \ + --hash=sha256:4079b90df244709e675cdc8b93bfd8a395d59af40b72e339c2287c91860deb8e \ + --hash=sha256:5149e3fd2d686e42144a093b206aef01932a0059c2a33ddfa67f5f035bdfe13e \ + --hash=sha256:5a275584e726026a5699459aa72f828a610821006228e841b94275c4a7c08417 \ + --hash=sha256:631f07b3734d34aced009aaf6fedfd0eb3498a97e581c3b1e5f14a04164a456d \ + --hash=sha256:716e389b694c4bb564b4fc0c51bc84d381735e0d39d3f26ec1af2556ec6aad94 \ + --hash=sha256:8426251ad1e4ad903a4514712d2fa8fdd5382c978010d1c6f5f37ef286a713ad \ + --hash=sha256:8475230e55549ab3f207bff11ebfc91c805dc3463ef62eda3ccf593254524ce8 \ + --hash=sha256:8bddf15838ba768bb5f5083c1ea012d64c9a444e16192762bd858f1e126196d0 \ + --hash=sha256:8e32dced201274bf96899e6491d9ba3e9a5f6b336708656466ad0522d8528f69 \ + --hash=sha256:8f9ea80f2e65bdaa0b7627fb00cbeb2daf163caa015e59b7516395fe3bd1e066 \ + --hash=sha256:97c5dddd5932bd2a1a31c927ba5e1463a53b87ca96b5c9bdf5dfd6096e27efc3 \ + --hash=sha256:a49f6ed96f83966f576b33a44257d869756df6cf1ef4934f59dd58b25e0327e5 \ + --hash=sha256:af29a935803cc707ab2ed7791c44288a682f9c8107bc00f0eccc4f92c08d6e07 \ + --hash=sha256:b05d43735bb2f07d689f56f7b474788a13ed8adc484a85aa65c0fd931cf9ccd2 \ + --hash=sha256:b28d2ca4add7ac16ae8bb6632a3c86e4b9e4d52d3e34267f6e1b0c1f8d87e389 \ + --hash=sha256:b99722ea48b7ea25e8e015e8341ae74624f72e5f21fc2abd45f3a93266de4c5d \ + --hash=sha256:baff393942b550823bfce952bb62270ee17504d02a1801d7fd0719534dfb9c84 \ + --hash=sha256:c0ee987efa6737242745f347835da2cc5bb9f1b42996a4d97d5c7ff7928cb6f2 \ + --hash=sha256:d0d2821003174de06b69e58cef2316a6622b60ee613121199cb2852a873f8cf3 \ + --hash=sha256:e0cf28db0f24a38b2a0ca33a85a54852586e43cf6fd876365c86e0657cfe7d73 \ + --hash=sha256:e4f5a7c49323533f9103d4dacf4e4f07078f360743dec7f7596949149efeec06 \ + --hash=sha256:eb58ca0abd96911932f688528977858681a59d61a7ce908ffd355957f7025cfc \ + --hash=sha256:edaf02b82cd7639db00dbff629995ef185c8df4c3ffa71a5562a595765a06ce1 \ + --hash=sha256:fef8c87f8abfb884dac04e97824b61299880c43f4ce675dd2cbeadd3c9b466d2 diff --git a/tests/sample_project/requirements/framework-scipy/requirements-framework-scipy-macosx_arm64.txt.json b/tests/sample_project/requirements/framework-scipy/requirements-framework-scipy-macosx_arm64.txt.json new file mode 100644 index 0000000..d689680 --- /dev/null +++ b/tests/sample_project/requirements/framework-scipy/requirements-framework-scipy-macosx_arm64.txt.json @@ -0,0 +1,4 @@ +{ + "locked_at": "2024-10-15T10:23:32.960668+00:00", + "requirements_hash": "sha256:36b0dbfec94b7de6507f348f0823cd02fdca2ea79eeafe92d571c26ae347d150" +} diff --git a/tests/sample_project/requirements/framework-scipy/requirements-framework-scipy-win_amd64.in b/tests/sample_project/requirements/framework-scipy/requirements-framework-scipy-win_amd64.in new file mode 100644 index 0000000..a0fa455 --- /dev/null +++ b/tests/sample_project/requirements/framework-scipy/requirements-framework-scipy-win_amd64.in @@ -0,0 +1,3 @@ +# DO NOT EDIT. Auto-generated as part of deployment bundle build. +# Relock bundle dependencies to update. +scipy diff --git a/tests/sample_project/requirements/framework-scipy/requirements-framework-scipy-win_amd64.txt b/tests/sample_project/requirements/framework-scipy/requirements-framework-scipy-win_amd64.txt new file mode 100644 index 0000000..3c1be97 --- /dev/null +++ b/tests/sample_project/requirements/framework-scipy/requirements-framework-scipy-win_amd64.txt @@ -0,0 +1,90 @@ +# This file was autogenerated by uv via the following command: +# python.exe -Im layered_envs lock +numpy==2.1.2 \ + --hash=sha256:05b2d4e667895cc55e3ff2b56077e4c8a5604361fc21a042845ea3ad67465aa8 \ + --hash=sha256:12edb90831ff481f7ef5f6bc6431a9d74dc0e5ff401559a71e5e4611d4f2d466 \ + --hash=sha256:13311c2db4c5f7609b462bc0f43d3c465424d25c626d95040f073e30f7570e35 \ + --hash=sha256:13532a088217fa624c99b843eeb54640de23b3414b14aa66d023805eb731066c \ + --hash=sha256:13602b3174432a35b16c4cfb5de9a12d229727c3dd47a6ce35111f2ebdf66ff4 \ + --hash=sha256:1600068c262af1ca9580a527d43dc9d959b0b1d8e56f8a05d830eea39b7c8af6 \ + --hash=sha256:1b8cde4f11f0a975d1fd59373b32e2f5a562ade7cde4f85b7137f3de8fbb29a0 \ + --hash=sha256:1c193d0b0238638e6fc5f10f1b074a6993cb13b0b431f64079a509d63d3aa8b7 \ + --hash=sha256:1ebec5fd716c5a5b3d8dfcc439be82a8407b7b24b230d0ad28a81b61c2f4659a \ + --hash=sha256:242b39d00e4944431a3cd2db2f5377e15b5785920421993770cddb89992c3f3a \ + --hash=sha256:259ec80d54999cc34cd1eb8ded513cb053c3bf4829152a2e00de2371bd406f5e \ + --hash=sha256:2abbf905a0b568706391ec6fa15161fad0fb5d8b68d73c461b3c1bab6064dd62 \ + --hash=sha256:2cbba4b30bf31ddbe97f1c7205ef976909a93a66bb1583e983adbd155ba72ac2 \ + --hash=sha256:2ffef621c14ebb0188a8633348504a35c13680d6da93ab5cb86f4e54b7e922b5 \ + --hash=sha256:30d53720b726ec36a7f88dc873f0eec8447fbc93d93a8f079dfac2629598d6ee \ + --hash=sha256:32e16a03138cabe0cb28e1007ee82264296ac0983714094380b408097a418cfe \ + --hash=sha256:43cca367bf94a14aca50b89e9bc2061683116cfe864e56740e083392f533ce7a \ + --hash=sha256:456e3b11cb79ac9946c822a56346ec80275eaf2950314b249b512896c0d2505e \ + --hash=sha256:4d6ec0d4222e8ffdab1744da2560f07856421b367928026fb540e1945f2eeeaf \ + --hash=sha256:5006b13a06e0b38d561fab5ccc37581f23c9511879be7693bd33c7cd15ca227c \ + --hash=sha256:675c741d4739af2dc20cd6c6a5c4b7355c728167845e3c6b0e824e4e5d36a6c3 \ + --hash=sha256:6cdb606a7478f9ad91c6283e238544451e3a95f30fb5467fbf715964341a8a86 \ + --hash=sha256:6d95f286b8244b3649b477ac066c6906fbb2905f8ac19b170e2175d3d799f4df \ + --hash=sha256:76322dcdb16fccf2ac56f99048af32259dcc488d9b7e25b51e5eca5147a3fb98 \ + --hash=sha256:7c1c60328bd964b53f8b835df69ae8198659e2b9302ff9ebb7de4e5a5994db3d \ + --hash=sha256:860ec6e63e2c5c2ee5e9121808145c7bf86c96cca9ad396c0bd3e0f2798ccbe2 \ + --hash=sha256:8e00ea6fc82e8a804433d3e9cedaa1051a1422cb6e443011590c14d2dea59146 \ + --hash=sha256:9c6c754df29ce6a89ed23afb25550d1c2d5fdb9901d9c67a16e0b16eaf7e2550 \ + --hash=sha256:a26ae94658d3ba3781d5e103ac07a876b3e9b29db53f68ed7df432fd033358a8 \ + --hash=sha256:a65acfdb9c6ebb8368490dbafe83c03c7e277b37e6857f0caeadbbc56e12f4fb \ + --hash=sha256:a7d80b2e904faa63068ead63107189164ca443b42dd1930299e0d1cb041cec2e \ + --hash=sha256:a84498e0d0a1174f2b3ed769b67b656aa5460c92c9554039e11f20a05650f00d \ + --hash=sha256:ab4754d432e3ac42d33a269c8567413bdb541689b02d93788af4131018cbf366 \ + --hash=sha256:ad369ed238b1959dfbade9018a740fb9392c5ac4f9b5173f420bd4f37ba1f7a0 \ + --hash=sha256:b1d0fcae4f0949f215d4632be684a539859b295e2d0cb14f78ec231915d644db \ + --hash=sha256:b42a1a511c81cc78cbc4539675713bbcf9d9c3913386243ceff0e9429ca892fe \ + --hash=sha256:bd33f82e95ba7ad632bc57837ee99dba3d7e006536200c4e9124089e1bf42426 \ + --hash=sha256:bdd407c40483463898b84490770199d5714dcc9dd9b792f6c6caccc523c00952 \ + --hash=sha256:c6eef7a2dbd0abfb0d9eaf78b73017dbfd0b54051102ff4e6a7b2980d5ac1a03 \ + --hash=sha256:c82af4b2ddd2ee72d1fc0c6695048d457e00b3582ccde72d8a1c991b808bb20f \ + --hash=sha256:d666cb72687559689e9906197e3bec7b736764df6a2e58ee265e360663e9baf7 \ + --hash=sha256:d7bf0a4f9f15b32b5ba53147369e94296f5fffb783db5aacc1be15b4bf72f43b \ + --hash=sha256:d82075752f40c0ddf57e6e02673a17f6cb0f8eb3f587f63ca1eaab5594da5b17 \ + --hash=sha256:da65fb46d4cbb75cb417cddf6ba5e7582eb7bb0b47db4b99c9fe5787ce5d91f5 \ + --hash=sha256:e2b49c3c0804e8ecb05d59af8386ec2f74877f7ca8fd9c1e00be2672e4d399b1 \ + --hash=sha256:e585c8ae871fd38ac50598f4763d73ec5497b0de9a0ab4ef5b69f01c6a046142 \ + --hash=sha256:e8d3ca0a72dd8846eb6f7dfe8f19088060fcb76931ed592d29128e0219652884 \ + --hash=sha256:ef444c57d664d35cac4e18c298c47d7b504c66b17c2ea91312e979fcfbdfb08a \ + --hash=sha256:f1eb068ead09f4994dec71c24b2844f1e4e4e013b9629f812f292f04bd1510d9 \ + --hash=sha256:f2ded8d9b6f68cc26f8425eda5d3877b47343e68ca23d0d0846f4d312ecaa445 \ + --hash=sha256:f751ed0a2f250541e19dfca9f1eafa31a392c71c832b6bb9e113b10d050cb0f1 \ + --hash=sha256:faa88bc527d0f097abdc2c663cddf37c05a1c2f113716601555249805cf573f1 \ + --hash=sha256:fc44e3c68ff00fd991b59092a54350e6e4911152682b4782f68070985aa9e648 +scipy==1.14.1 \ + --hash=sha256:0c2f95de3b04e26f5f3ad5bb05e74ba7f68b837133a4492414b3afd79dfe540e \ + --hash=sha256:1729560c906963fc8389f6aac023739ff3983e727b1a4d87696b7bf108316a79 \ + --hash=sha256:278266012eb69f4a720827bdd2dc54b2271c97d84255b2faaa8f161a158c3b37 \ + --hash=sha256:2843f2d527d9eebec9a43e6b406fb7266f3af25a751aa91d62ff416f54170bc5 \ + --hash=sha256:2da0469a4ef0ecd3693761acbdc20f2fdeafb69e6819cc081308cc978153c675 \ + --hash=sha256:2ff0a7e01e422c15739ecd64432743cf7aae2b03f3084288f399affcefe5222d \ + --hash=sha256:2ff38e22128e6c03ff73b6bb0f85f897d2362f8c052e3b8ad00532198fbdae3f \ + --hash=sha256:30ac8812c1d2aab7131a79ba62933a2a76f582d5dbbc695192453dae67ad6310 \ + --hash=sha256:3a1b111fac6baec1c1d92f27e76511c9e7218f1695d61b59e05e0fe04dc59617 \ + --hash=sha256:4079b90df244709e675cdc8b93bfd8a395d59af40b72e339c2287c91860deb8e \ + --hash=sha256:5149e3fd2d686e42144a093b206aef01932a0059c2a33ddfa67f5f035bdfe13e \ + --hash=sha256:5a275584e726026a5699459aa72f828a610821006228e841b94275c4a7c08417 \ + --hash=sha256:631f07b3734d34aced009aaf6fedfd0eb3498a97e581c3b1e5f14a04164a456d \ + --hash=sha256:716e389b694c4bb564b4fc0c51bc84d381735e0d39d3f26ec1af2556ec6aad94 \ + --hash=sha256:8426251ad1e4ad903a4514712d2fa8fdd5382c978010d1c6f5f37ef286a713ad \ + --hash=sha256:8475230e55549ab3f207bff11ebfc91c805dc3463ef62eda3ccf593254524ce8 \ + --hash=sha256:8bddf15838ba768bb5f5083c1ea012d64c9a444e16192762bd858f1e126196d0 \ + --hash=sha256:8e32dced201274bf96899e6491d9ba3e9a5f6b336708656466ad0522d8528f69 \ + --hash=sha256:8f9ea80f2e65bdaa0b7627fb00cbeb2daf163caa015e59b7516395fe3bd1e066 \ + --hash=sha256:97c5dddd5932bd2a1a31c927ba5e1463a53b87ca96b5c9bdf5dfd6096e27efc3 \ + --hash=sha256:a49f6ed96f83966f576b33a44257d869756df6cf1ef4934f59dd58b25e0327e5 \ + --hash=sha256:af29a935803cc707ab2ed7791c44288a682f9c8107bc00f0eccc4f92c08d6e07 \ + --hash=sha256:b05d43735bb2f07d689f56f7b474788a13ed8adc484a85aa65c0fd931cf9ccd2 \ + --hash=sha256:b28d2ca4add7ac16ae8bb6632a3c86e4b9e4d52d3e34267f6e1b0c1f8d87e389 \ + --hash=sha256:b99722ea48b7ea25e8e015e8341ae74624f72e5f21fc2abd45f3a93266de4c5d \ + --hash=sha256:baff393942b550823bfce952bb62270ee17504d02a1801d7fd0719534dfb9c84 \ + --hash=sha256:c0ee987efa6737242745f347835da2cc5bb9f1b42996a4d97d5c7ff7928cb6f2 \ + --hash=sha256:d0d2821003174de06b69e58cef2316a6622b60ee613121199cb2852a873f8cf3 \ + --hash=sha256:e0cf28db0f24a38b2a0ca33a85a54852586e43cf6fd876365c86e0657cfe7d73 \ + --hash=sha256:e4f5a7c49323533f9103d4dacf4e4f07078f360743dec7f7596949149efeec06 \ + --hash=sha256:eb58ca0abd96911932f688528977858681a59d61a7ce908ffd355957f7025cfc \ + --hash=sha256:edaf02b82cd7639db00dbff629995ef185c8df4c3ffa71a5562a595765a06ce1 \ + --hash=sha256:fef8c87f8abfb884dac04e97824b61299880c43f4ce675dd2cbeadd3c9b466d2 diff --git a/tests/sample_project/requirements/framework-scipy/requirements-framework-scipy-win_amd64.txt.json b/tests/sample_project/requirements/framework-scipy/requirements-framework-scipy-win_amd64.txt.json new file mode 100644 index 0000000..7b22c21 --- /dev/null +++ b/tests/sample_project/requirements/framework-scipy/requirements-framework-scipy-win_amd64.txt.json @@ -0,0 +1,4 @@ +{ + "locked_at": "2024-10-15T10:24:35.999197+00:00", + "requirements_hash": "sha256:9aba38b5efe287f35d58825dce6b1c47ed556b930056e6edc00ca9e1a165796b" +} diff --git a/tests/sample_project/requirements/framework-sklearn/requirements-framework-sklearn-linux_x86_64.in b/tests/sample_project/requirements/framework-sklearn/requirements-framework-sklearn-linux_x86_64.in new file mode 100644 index 0000000..1ee073a --- /dev/null +++ b/tests/sample_project/requirements/framework-sklearn/requirements-framework-sklearn-linux_x86_64.in @@ -0,0 +1,3 @@ +# DO NOT EDIT. Auto-generated as part of deployment bundle build. +# Relock bundle dependencies to update. +scikit-learn diff --git a/tests/sample_project/requirements/framework-sklearn/requirements-framework-sklearn-linux_x86_64.txt b/tests/sample_project/requirements/framework-sklearn/requirements-framework-sklearn-linux_x86_64.txt new file mode 100644 index 0000000..7cbe722 --- /dev/null +++ b/tests/sample_project/requirements/framework-sklearn/requirements-framework-sklearn-linux_x86_64.txt @@ -0,0 +1,123 @@ +# This file was autogenerated by uv via the following command: +# python -Im layered_envs lock +joblib==1.4.2 \ + --hash=sha256:06d478d5674cbc267e7496a410ee875abd68e4340feff4490bcb7afb88060ae6 \ + --hash=sha256:2382c5816b2636fbd20a09e0f4e9dad4736765fdfb7dca582943b9c1366b3f0e +numpy==2.1.2 \ + --hash=sha256:05b2d4e667895cc55e3ff2b56077e4c8a5604361fc21a042845ea3ad67465aa8 \ + --hash=sha256:12edb90831ff481f7ef5f6bc6431a9d74dc0e5ff401559a71e5e4611d4f2d466 \ + --hash=sha256:13311c2db4c5f7609b462bc0f43d3c465424d25c626d95040f073e30f7570e35 \ + --hash=sha256:13532a088217fa624c99b843eeb54640de23b3414b14aa66d023805eb731066c \ + --hash=sha256:13602b3174432a35b16c4cfb5de9a12d229727c3dd47a6ce35111f2ebdf66ff4 \ + --hash=sha256:1600068c262af1ca9580a527d43dc9d959b0b1d8e56f8a05d830eea39b7c8af6 \ + --hash=sha256:1b8cde4f11f0a975d1fd59373b32e2f5a562ade7cde4f85b7137f3de8fbb29a0 \ + --hash=sha256:1c193d0b0238638e6fc5f10f1b074a6993cb13b0b431f64079a509d63d3aa8b7 \ + --hash=sha256:1ebec5fd716c5a5b3d8dfcc439be82a8407b7b24b230d0ad28a81b61c2f4659a \ + --hash=sha256:242b39d00e4944431a3cd2db2f5377e15b5785920421993770cddb89992c3f3a \ + --hash=sha256:259ec80d54999cc34cd1eb8ded513cb053c3bf4829152a2e00de2371bd406f5e \ + --hash=sha256:2abbf905a0b568706391ec6fa15161fad0fb5d8b68d73c461b3c1bab6064dd62 \ + --hash=sha256:2cbba4b30bf31ddbe97f1c7205ef976909a93a66bb1583e983adbd155ba72ac2 \ + --hash=sha256:2ffef621c14ebb0188a8633348504a35c13680d6da93ab5cb86f4e54b7e922b5 \ + --hash=sha256:30d53720b726ec36a7f88dc873f0eec8447fbc93d93a8f079dfac2629598d6ee \ + --hash=sha256:32e16a03138cabe0cb28e1007ee82264296ac0983714094380b408097a418cfe \ + --hash=sha256:43cca367bf94a14aca50b89e9bc2061683116cfe864e56740e083392f533ce7a \ + --hash=sha256:456e3b11cb79ac9946c822a56346ec80275eaf2950314b249b512896c0d2505e \ + --hash=sha256:4d6ec0d4222e8ffdab1744da2560f07856421b367928026fb540e1945f2eeeaf \ + --hash=sha256:5006b13a06e0b38d561fab5ccc37581f23c9511879be7693bd33c7cd15ca227c \ + --hash=sha256:675c741d4739af2dc20cd6c6a5c4b7355c728167845e3c6b0e824e4e5d36a6c3 \ + --hash=sha256:6cdb606a7478f9ad91c6283e238544451e3a95f30fb5467fbf715964341a8a86 \ + --hash=sha256:6d95f286b8244b3649b477ac066c6906fbb2905f8ac19b170e2175d3d799f4df \ + --hash=sha256:76322dcdb16fccf2ac56f99048af32259dcc488d9b7e25b51e5eca5147a3fb98 \ + --hash=sha256:7c1c60328bd964b53f8b835df69ae8198659e2b9302ff9ebb7de4e5a5994db3d \ + --hash=sha256:860ec6e63e2c5c2ee5e9121808145c7bf86c96cca9ad396c0bd3e0f2798ccbe2 \ + --hash=sha256:8e00ea6fc82e8a804433d3e9cedaa1051a1422cb6e443011590c14d2dea59146 \ + --hash=sha256:9c6c754df29ce6a89ed23afb25550d1c2d5fdb9901d9c67a16e0b16eaf7e2550 \ + --hash=sha256:a26ae94658d3ba3781d5e103ac07a876b3e9b29db53f68ed7df432fd033358a8 \ + --hash=sha256:a65acfdb9c6ebb8368490dbafe83c03c7e277b37e6857f0caeadbbc56e12f4fb \ + --hash=sha256:a7d80b2e904faa63068ead63107189164ca443b42dd1930299e0d1cb041cec2e \ + --hash=sha256:a84498e0d0a1174f2b3ed769b67b656aa5460c92c9554039e11f20a05650f00d \ + --hash=sha256:ab4754d432e3ac42d33a269c8567413bdb541689b02d93788af4131018cbf366 \ + --hash=sha256:ad369ed238b1959dfbade9018a740fb9392c5ac4f9b5173f420bd4f37ba1f7a0 \ + --hash=sha256:b1d0fcae4f0949f215d4632be684a539859b295e2d0cb14f78ec231915d644db \ + --hash=sha256:b42a1a511c81cc78cbc4539675713bbcf9d9c3913386243ceff0e9429ca892fe \ + --hash=sha256:bd33f82e95ba7ad632bc57837ee99dba3d7e006536200c4e9124089e1bf42426 \ + --hash=sha256:bdd407c40483463898b84490770199d5714dcc9dd9b792f6c6caccc523c00952 \ + --hash=sha256:c6eef7a2dbd0abfb0d9eaf78b73017dbfd0b54051102ff4e6a7b2980d5ac1a03 \ + --hash=sha256:c82af4b2ddd2ee72d1fc0c6695048d457e00b3582ccde72d8a1c991b808bb20f \ + --hash=sha256:d666cb72687559689e9906197e3bec7b736764df6a2e58ee265e360663e9baf7 \ + --hash=sha256:d7bf0a4f9f15b32b5ba53147369e94296f5fffb783db5aacc1be15b4bf72f43b \ + --hash=sha256:d82075752f40c0ddf57e6e02673a17f6cb0f8eb3f587f63ca1eaab5594da5b17 \ + --hash=sha256:da65fb46d4cbb75cb417cddf6ba5e7582eb7bb0b47db4b99c9fe5787ce5d91f5 \ + --hash=sha256:e2b49c3c0804e8ecb05d59af8386ec2f74877f7ca8fd9c1e00be2672e4d399b1 \ + --hash=sha256:e585c8ae871fd38ac50598f4763d73ec5497b0de9a0ab4ef5b69f01c6a046142 \ + --hash=sha256:e8d3ca0a72dd8846eb6f7dfe8f19088060fcb76931ed592d29128e0219652884 \ + --hash=sha256:ef444c57d664d35cac4e18c298c47d7b504c66b17c2ea91312e979fcfbdfb08a \ + --hash=sha256:f1eb068ead09f4994dec71c24b2844f1e4e4e013b9629f812f292f04bd1510d9 \ + --hash=sha256:f2ded8d9b6f68cc26f8425eda5d3877b47343e68ca23d0d0846f4d312ecaa445 \ + --hash=sha256:f751ed0a2f250541e19dfca9f1eafa31a392c71c832b6bb9e113b10d050cb0f1 \ + --hash=sha256:faa88bc527d0f097abdc2c663cddf37c05a1c2f113716601555249805cf573f1 \ + --hash=sha256:fc44e3c68ff00fd991b59092a54350e6e4911152682b4782f68070985aa9e648 +scikit-learn==1.5.2 \ + --hash=sha256:03b6158efa3faaf1feea3faa884c840ebd61b6484167c711548fce208ea09445 \ + --hash=sha256:178ddd0a5cb0044464fc1bfc4cca5b1833bfc7bb022d70b05db8530da4bb3dd3 \ + --hash=sha256:1ff45e26928d3b4eb767a8f14a9a6efbf1cbff7c05d1fb0f95f211a89fd4f5de \ + --hash=sha256:299406827fb9a4f862626d0fe6c122f5f87f8910b86fe5daa4c32dcd742139b6 \ + --hash=sha256:2d4cad1119c77930b235579ad0dc25e65c917e756fe80cab96aa3b9428bd3fb0 \ + --hash=sha256:394397841449853c2290a32050382edaec3da89e35b3e03d6cc966aebc6a8ae6 \ + --hash=sha256:3a686885a4b3818d9e62904d91b57fa757fc2bed3e465c8b177be652f4dd37c8 \ + --hash=sha256:3b923d119d65b7bd555c73be5423bf06c0105678ce7e1f558cb4b40b0a5502b1 \ + --hash=sha256:3bed4909ba187aca80580fe2ef370d9180dcf18e621a27c4cf2ef10d279a7efe \ + --hash=sha256:52788f48b5d8bca5c0736c175fa6bdaab2ef00a8f536cda698db61bd89c551c1 \ + --hash=sha256:57cc1786cfd6bd118220a92ede80270132aa353647684efa385a74244a41e3b1 \ + --hash=sha256:643964678f4b5fbdc95cbf8aec638acc7aa70f5f79ee2cdad1eec3df4ba6ead8 \ + --hash=sha256:6c16d84a0d45e4894832b3c4d0bf73050939e21b99b01b6fd59cbb0cf39163b6 \ + --hash=sha256:757c7d514ddb00ae249832fe87100d9c73c6ea91423802872d9e74970a0e40b9 \ + --hash=sha256:8c412ccc2ad9bf3755915e3908e677b367ebc8d010acbb3f182814524f2e5540 \ + --hash=sha256:b0768ad641981f5d3a198430a1d31c3e044ed2e8a6f22166b4d546a5116d7908 \ + --hash=sha256:b4237ed7b3fdd0a4882792e68ef2545d5baa50aca3bb45aa7df468138ad8f94d \ + --hash=sha256:b7b0f9a0b1040830d38c39b91b3a44e1b643f4b36e36567b80b7c6bd2202a27f \ + --hash=sha256:c15b1ca23d7c5f33cc2cb0a0d6aaacf893792271cddff0edbd6a40e8319bc113 \ + --hash=sha256:ca64b3089a6d9b9363cd3546f8978229dcbb737aceb2c12144ee3f70f95684b7 \ + --hash=sha256:e9a702e2de732bbb20d3bad29ebd77fc05a6b427dc49964300340e4c9328b3f5 \ + --hash=sha256:f60021ec1574e56632be2a36b946f8143bf4e5e6af4a06d85281adc22938e0dd \ + --hash=sha256:f7284ade780084d94505632241bf78c44ab3b6f1e8ccab3d2af58e0e950f9c12 \ + --hash=sha256:f763897fe92d0e903aa4847b0aec0e68cadfff77e8a0687cabd946c89d17e675 \ + --hash=sha256:f8b0ccd4a902836493e026c03256e8b206656f91fbcc4fde28c57a5b752561f1 \ + --hash=sha256:f932a02c3f4956dfb981391ab24bda1dbd90fe3d628e4b42caef3e041c67707a +scipy==1.14.1 \ + --hash=sha256:0c2f95de3b04e26f5f3ad5bb05e74ba7f68b837133a4492414b3afd79dfe540e \ + --hash=sha256:1729560c906963fc8389f6aac023739ff3983e727b1a4d87696b7bf108316a79 \ + --hash=sha256:278266012eb69f4a720827bdd2dc54b2271c97d84255b2faaa8f161a158c3b37 \ + --hash=sha256:2843f2d527d9eebec9a43e6b406fb7266f3af25a751aa91d62ff416f54170bc5 \ + --hash=sha256:2da0469a4ef0ecd3693761acbdc20f2fdeafb69e6819cc081308cc978153c675 \ + --hash=sha256:2ff0a7e01e422c15739ecd64432743cf7aae2b03f3084288f399affcefe5222d \ + --hash=sha256:2ff38e22128e6c03ff73b6bb0f85f897d2362f8c052e3b8ad00532198fbdae3f \ + --hash=sha256:30ac8812c1d2aab7131a79ba62933a2a76f582d5dbbc695192453dae67ad6310 \ + --hash=sha256:3a1b111fac6baec1c1d92f27e76511c9e7218f1695d61b59e05e0fe04dc59617 \ + --hash=sha256:4079b90df244709e675cdc8b93bfd8a395d59af40b72e339c2287c91860deb8e \ + --hash=sha256:5149e3fd2d686e42144a093b206aef01932a0059c2a33ddfa67f5f035bdfe13e \ + --hash=sha256:5a275584e726026a5699459aa72f828a610821006228e841b94275c4a7c08417 \ + --hash=sha256:631f07b3734d34aced009aaf6fedfd0eb3498a97e581c3b1e5f14a04164a456d \ + --hash=sha256:716e389b694c4bb564b4fc0c51bc84d381735e0d39d3f26ec1af2556ec6aad94 \ + --hash=sha256:8426251ad1e4ad903a4514712d2fa8fdd5382c978010d1c6f5f37ef286a713ad \ + --hash=sha256:8475230e55549ab3f207bff11ebfc91c805dc3463ef62eda3ccf593254524ce8 \ + --hash=sha256:8bddf15838ba768bb5f5083c1ea012d64c9a444e16192762bd858f1e126196d0 \ + --hash=sha256:8e32dced201274bf96899e6491d9ba3e9a5f6b336708656466ad0522d8528f69 \ + --hash=sha256:8f9ea80f2e65bdaa0b7627fb00cbeb2daf163caa015e59b7516395fe3bd1e066 \ + --hash=sha256:97c5dddd5932bd2a1a31c927ba5e1463a53b87ca96b5c9bdf5dfd6096e27efc3 \ + --hash=sha256:a49f6ed96f83966f576b33a44257d869756df6cf1ef4934f59dd58b25e0327e5 \ + --hash=sha256:af29a935803cc707ab2ed7791c44288a682f9c8107bc00f0eccc4f92c08d6e07 \ + --hash=sha256:b05d43735bb2f07d689f56f7b474788a13ed8adc484a85aa65c0fd931cf9ccd2 \ + --hash=sha256:b28d2ca4add7ac16ae8bb6632a3c86e4b9e4d52d3e34267f6e1b0c1f8d87e389 \ + --hash=sha256:b99722ea48b7ea25e8e015e8341ae74624f72e5f21fc2abd45f3a93266de4c5d \ + --hash=sha256:baff393942b550823bfce952bb62270ee17504d02a1801d7fd0719534dfb9c84 \ + --hash=sha256:c0ee987efa6737242745f347835da2cc5bb9f1b42996a4d97d5c7ff7928cb6f2 \ + --hash=sha256:d0d2821003174de06b69e58cef2316a6622b60ee613121199cb2852a873f8cf3 \ + --hash=sha256:e0cf28db0f24a38b2a0ca33a85a54852586e43cf6fd876365c86e0657cfe7d73 \ + --hash=sha256:e4f5a7c49323533f9103d4dacf4e4f07078f360743dec7f7596949149efeec06 \ + --hash=sha256:eb58ca0abd96911932f688528977858681a59d61a7ce908ffd355957f7025cfc \ + --hash=sha256:edaf02b82cd7639db00dbff629995ef185c8df4c3ffa71a5562a595765a06ce1 \ + --hash=sha256:fef8c87f8abfb884dac04e97824b61299880c43f4ce675dd2cbeadd3c9b466d2 +threadpoolctl==3.5.0 \ + --hash=sha256:082433502dd922bf738de0d8bcc4fdcbf0979ff44c42bd40f5af8a282f6fa107 \ + --hash=sha256:56c1e26c150397e58c4926da8eeee87533b1e32bef131bd4bf6a2f45f3185467 diff --git a/tests/sample_project/requirements/framework-sklearn/requirements-framework-sklearn-linux_x86_64.txt.json b/tests/sample_project/requirements/framework-sklearn/requirements-framework-sklearn-linux_x86_64.txt.json new file mode 100644 index 0000000..c92e3f3 --- /dev/null +++ b/tests/sample_project/requirements/framework-sklearn/requirements-framework-sklearn-linux_x86_64.txt.json @@ -0,0 +1,4 @@ +{ + "locked_at": "2024-10-15T10:23:43.001587+00:00", + "requirements_hash": "sha256:600b3bc658d940b756d5917e6fb7dec3431c5ce4ebc878f5d031e74f3ebdb7a9" +} diff --git a/tests/sample_project/requirements/framework-sklearn/requirements-framework-sklearn-macosx_arm64.in b/tests/sample_project/requirements/framework-sklearn/requirements-framework-sklearn-macosx_arm64.in new file mode 100644 index 0000000..1ee073a --- /dev/null +++ b/tests/sample_project/requirements/framework-sklearn/requirements-framework-sklearn-macosx_arm64.in @@ -0,0 +1,3 @@ +# DO NOT EDIT. Auto-generated as part of deployment bundle build. +# Relock bundle dependencies to update. +scikit-learn diff --git a/tests/sample_project/requirements/framework-sklearn/requirements-framework-sklearn-macosx_arm64.txt b/tests/sample_project/requirements/framework-sklearn/requirements-framework-sklearn-macosx_arm64.txt new file mode 100644 index 0000000..7cbe722 --- /dev/null +++ b/tests/sample_project/requirements/framework-sklearn/requirements-framework-sklearn-macosx_arm64.txt @@ -0,0 +1,123 @@ +# This file was autogenerated by uv via the following command: +# python -Im layered_envs lock +joblib==1.4.2 \ + --hash=sha256:06d478d5674cbc267e7496a410ee875abd68e4340feff4490bcb7afb88060ae6 \ + --hash=sha256:2382c5816b2636fbd20a09e0f4e9dad4736765fdfb7dca582943b9c1366b3f0e +numpy==2.1.2 \ + --hash=sha256:05b2d4e667895cc55e3ff2b56077e4c8a5604361fc21a042845ea3ad67465aa8 \ + --hash=sha256:12edb90831ff481f7ef5f6bc6431a9d74dc0e5ff401559a71e5e4611d4f2d466 \ + --hash=sha256:13311c2db4c5f7609b462bc0f43d3c465424d25c626d95040f073e30f7570e35 \ + --hash=sha256:13532a088217fa624c99b843eeb54640de23b3414b14aa66d023805eb731066c \ + --hash=sha256:13602b3174432a35b16c4cfb5de9a12d229727c3dd47a6ce35111f2ebdf66ff4 \ + --hash=sha256:1600068c262af1ca9580a527d43dc9d959b0b1d8e56f8a05d830eea39b7c8af6 \ + --hash=sha256:1b8cde4f11f0a975d1fd59373b32e2f5a562ade7cde4f85b7137f3de8fbb29a0 \ + --hash=sha256:1c193d0b0238638e6fc5f10f1b074a6993cb13b0b431f64079a509d63d3aa8b7 \ + --hash=sha256:1ebec5fd716c5a5b3d8dfcc439be82a8407b7b24b230d0ad28a81b61c2f4659a \ + --hash=sha256:242b39d00e4944431a3cd2db2f5377e15b5785920421993770cddb89992c3f3a \ + --hash=sha256:259ec80d54999cc34cd1eb8ded513cb053c3bf4829152a2e00de2371bd406f5e \ + --hash=sha256:2abbf905a0b568706391ec6fa15161fad0fb5d8b68d73c461b3c1bab6064dd62 \ + --hash=sha256:2cbba4b30bf31ddbe97f1c7205ef976909a93a66bb1583e983adbd155ba72ac2 \ + --hash=sha256:2ffef621c14ebb0188a8633348504a35c13680d6da93ab5cb86f4e54b7e922b5 \ + --hash=sha256:30d53720b726ec36a7f88dc873f0eec8447fbc93d93a8f079dfac2629598d6ee \ + --hash=sha256:32e16a03138cabe0cb28e1007ee82264296ac0983714094380b408097a418cfe \ + --hash=sha256:43cca367bf94a14aca50b89e9bc2061683116cfe864e56740e083392f533ce7a \ + --hash=sha256:456e3b11cb79ac9946c822a56346ec80275eaf2950314b249b512896c0d2505e \ + --hash=sha256:4d6ec0d4222e8ffdab1744da2560f07856421b367928026fb540e1945f2eeeaf \ + --hash=sha256:5006b13a06e0b38d561fab5ccc37581f23c9511879be7693bd33c7cd15ca227c \ + --hash=sha256:675c741d4739af2dc20cd6c6a5c4b7355c728167845e3c6b0e824e4e5d36a6c3 \ + --hash=sha256:6cdb606a7478f9ad91c6283e238544451e3a95f30fb5467fbf715964341a8a86 \ + --hash=sha256:6d95f286b8244b3649b477ac066c6906fbb2905f8ac19b170e2175d3d799f4df \ + --hash=sha256:76322dcdb16fccf2ac56f99048af32259dcc488d9b7e25b51e5eca5147a3fb98 \ + --hash=sha256:7c1c60328bd964b53f8b835df69ae8198659e2b9302ff9ebb7de4e5a5994db3d \ + --hash=sha256:860ec6e63e2c5c2ee5e9121808145c7bf86c96cca9ad396c0bd3e0f2798ccbe2 \ + --hash=sha256:8e00ea6fc82e8a804433d3e9cedaa1051a1422cb6e443011590c14d2dea59146 \ + --hash=sha256:9c6c754df29ce6a89ed23afb25550d1c2d5fdb9901d9c67a16e0b16eaf7e2550 \ + --hash=sha256:a26ae94658d3ba3781d5e103ac07a876b3e9b29db53f68ed7df432fd033358a8 \ + --hash=sha256:a65acfdb9c6ebb8368490dbafe83c03c7e277b37e6857f0caeadbbc56e12f4fb \ + --hash=sha256:a7d80b2e904faa63068ead63107189164ca443b42dd1930299e0d1cb041cec2e \ + --hash=sha256:a84498e0d0a1174f2b3ed769b67b656aa5460c92c9554039e11f20a05650f00d \ + --hash=sha256:ab4754d432e3ac42d33a269c8567413bdb541689b02d93788af4131018cbf366 \ + --hash=sha256:ad369ed238b1959dfbade9018a740fb9392c5ac4f9b5173f420bd4f37ba1f7a0 \ + --hash=sha256:b1d0fcae4f0949f215d4632be684a539859b295e2d0cb14f78ec231915d644db \ + --hash=sha256:b42a1a511c81cc78cbc4539675713bbcf9d9c3913386243ceff0e9429ca892fe \ + --hash=sha256:bd33f82e95ba7ad632bc57837ee99dba3d7e006536200c4e9124089e1bf42426 \ + --hash=sha256:bdd407c40483463898b84490770199d5714dcc9dd9b792f6c6caccc523c00952 \ + --hash=sha256:c6eef7a2dbd0abfb0d9eaf78b73017dbfd0b54051102ff4e6a7b2980d5ac1a03 \ + --hash=sha256:c82af4b2ddd2ee72d1fc0c6695048d457e00b3582ccde72d8a1c991b808bb20f \ + --hash=sha256:d666cb72687559689e9906197e3bec7b736764df6a2e58ee265e360663e9baf7 \ + --hash=sha256:d7bf0a4f9f15b32b5ba53147369e94296f5fffb783db5aacc1be15b4bf72f43b \ + --hash=sha256:d82075752f40c0ddf57e6e02673a17f6cb0f8eb3f587f63ca1eaab5594da5b17 \ + --hash=sha256:da65fb46d4cbb75cb417cddf6ba5e7582eb7bb0b47db4b99c9fe5787ce5d91f5 \ + --hash=sha256:e2b49c3c0804e8ecb05d59af8386ec2f74877f7ca8fd9c1e00be2672e4d399b1 \ + --hash=sha256:e585c8ae871fd38ac50598f4763d73ec5497b0de9a0ab4ef5b69f01c6a046142 \ + --hash=sha256:e8d3ca0a72dd8846eb6f7dfe8f19088060fcb76931ed592d29128e0219652884 \ + --hash=sha256:ef444c57d664d35cac4e18c298c47d7b504c66b17c2ea91312e979fcfbdfb08a \ + --hash=sha256:f1eb068ead09f4994dec71c24b2844f1e4e4e013b9629f812f292f04bd1510d9 \ + --hash=sha256:f2ded8d9b6f68cc26f8425eda5d3877b47343e68ca23d0d0846f4d312ecaa445 \ + --hash=sha256:f751ed0a2f250541e19dfca9f1eafa31a392c71c832b6bb9e113b10d050cb0f1 \ + --hash=sha256:faa88bc527d0f097abdc2c663cddf37c05a1c2f113716601555249805cf573f1 \ + --hash=sha256:fc44e3c68ff00fd991b59092a54350e6e4911152682b4782f68070985aa9e648 +scikit-learn==1.5.2 \ + --hash=sha256:03b6158efa3faaf1feea3faa884c840ebd61b6484167c711548fce208ea09445 \ + --hash=sha256:178ddd0a5cb0044464fc1bfc4cca5b1833bfc7bb022d70b05db8530da4bb3dd3 \ + --hash=sha256:1ff45e26928d3b4eb767a8f14a9a6efbf1cbff7c05d1fb0f95f211a89fd4f5de \ + --hash=sha256:299406827fb9a4f862626d0fe6c122f5f87f8910b86fe5daa4c32dcd742139b6 \ + --hash=sha256:2d4cad1119c77930b235579ad0dc25e65c917e756fe80cab96aa3b9428bd3fb0 \ + --hash=sha256:394397841449853c2290a32050382edaec3da89e35b3e03d6cc966aebc6a8ae6 \ + --hash=sha256:3a686885a4b3818d9e62904d91b57fa757fc2bed3e465c8b177be652f4dd37c8 \ + --hash=sha256:3b923d119d65b7bd555c73be5423bf06c0105678ce7e1f558cb4b40b0a5502b1 \ + --hash=sha256:3bed4909ba187aca80580fe2ef370d9180dcf18e621a27c4cf2ef10d279a7efe \ + --hash=sha256:52788f48b5d8bca5c0736c175fa6bdaab2ef00a8f536cda698db61bd89c551c1 \ + --hash=sha256:57cc1786cfd6bd118220a92ede80270132aa353647684efa385a74244a41e3b1 \ + --hash=sha256:643964678f4b5fbdc95cbf8aec638acc7aa70f5f79ee2cdad1eec3df4ba6ead8 \ + --hash=sha256:6c16d84a0d45e4894832b3c4d0bf73050939e21b99b01b6fd59cbb0cf39163b6 \ + --hash=sha256:757c7d514ddb00ae249832fe87100d9c73c6ea91423802872d9e74970a0e40b9 \ + --hash=sha256:8c412ccc2ad9bf3755915e3908e677b367ebc8d010acbb3f182814524f2e5540 \ + --hash=sha256:b0768ad641981f5d3a198430a1d31c3e044ed2e8a6f22166b4d546a5116d7908 \ + --hash=sha256:b4237ed7b3fdd0a4882792e68ef2545d5baa50aca3bb45aa7df468138ad8f94d \ + --hash=sha256:b7b0f9a0b1040830d38c39b91b3a44e1b643f4b36e36567b80b7c6bd2202a27f \ + --hash=sha256:c15b1ca23d7c5f33cc2cb0a0d6aaacf893792271cddff0edbd6a40e8319bc113 \ + --hash=sha256:ca64b3089a6d9b9363cd3546f8978229dcbb737aceb2c12144ee3f70f95684b7 \ + --hash=sha256:e9a702e2de732bbb20d3bad29ebd77fc05a6b427dc49964300340e4c9328b3f5 \ + --hash=sha256:f60021ec1574e56632be2a36b946f8143bf4e5e6af4a06d85281adc22938e0dd \ + --hash=sha256:f7284ade780084d94505632241bf78c44ab3b6f1e8ccab3d2af58e0e950f9c12 \ + --hash=sha256:f763897fe92d0e903aa4847b0aec0e68cadfff77e8a0687cabd946c89d17e675 \ + --hash=sha256:f8b0ccd4a902836493e026c03256e8b206656f91fbcc4fde28c57a5b752561f1 \ + --hash=sha256:f932a02c3f4956dfb981391ab24bda1dbd90fe3d628e4b42caef3e041c67707a +scipy==1.14.1 \ + --hash=sha256:0c2f95de3b04e26f5f3ad5bb05e74ba7f68b837133a4492414b3afd79dfe540e \ + --hash=sha256:1729560c906963fc8389f6aac023739ff3983e727b1a4d87696b7bf108316a79 \ + --hash=sha256:278266012eb69f4a720827bdd2dc54b2271c97d84255b2faaa8f161a158c3b37 \ + --hash=sha256:2843f2d527d9eebec9a43e6b406fb7266f3af25a751aa91d62ff416f54170bc5 \ + --hash=sha256:2da0469a4ef0ecd3693761acbdc20f2fdeafb69e6819cc081308cc978153c675 \ + --hash=sha256:2ff0a7e01e422c15739ecd64432743cf7aae2b03f3084288f399affcefe5222d \ + --hash=sha256:2ff38e22128e6c03ff73b6bb0f85f897d2362f8c052e3b8ad00532198fbdae3f \ + --hash=sha256:30ac8812c1d2aab7131a79ba62933a2a76f582d5dbbc695192453dae67ad6310 \ + --hash=sha256:3a1b111fac6baec1c1d92f27e76511c9e7218f1695d61b59e05e0fe04dc59617 \ + --hash=sha256:4079b90df244709e675cdc8b93bfd8a395d59af40b72e339c2287c91860deb8e \ + --hash=sha256:5149e3fd2d686e42144a093b206aef01932a0059c2a33ddfa67f5f035bdfe13e \ + --hash=sha256:5a275584e726026a5699459aa72f828a610821006228e841b94275c4a7c08417 \ + --hash=sha256:631f07b3734d34aced009aaf6fedfd0eb3498a97e581c3b1e5f14a04164a456d \ + --hash=sha256:716e389b694c4bb564b4fc0c51bc84d381735e0d39d3f26ec1af2556ec6aad94 \ + --hash=sha256:8426251ad1e4ad903a4514712d2fa8fdd5382c978010d1c6f5f37ef286a713ad \ + --hash=sha256:8475230e55549ab3f207bff11ebfc91c805dc3463ef62eda3ccf593254524ce8 \ + --hash=sha256:8bddf15838ba768bb5f5083c1ea012d64c9a444e16192762bd858f1e126196d0 \ + --hash=sha256:8e32dced201274bf96899e6491d9ba3e9a5f6b336708656466ad0522d8528f69 \ + --hash=sha256:8f9ea80f2e65bdaa0b7627fb00cbeb2daf163caa015e59b7516395fe3bd1e066 \ + --hash=sha256:97c5dddd5932bd2a1a31c927ba5e1463a53b87ca96b5c9bdf5dfd6096e27efc3 \ + --hash=sha256:a49f6ed96f83966f576b33a44257d869756df6cf1ef4934f59dd58b25e0327e5 \ + --hash=sha256:af29a935803cc707ab2ed7791c44288a682f9c8107bc00f0eccc4f92c08d6e07 \ + --hash=sha256:b05d43735bb2f07d689f56f7b474788a13ed8adc484a85aa65c0fd931cf9ccd2 \ + --hash=sha256:b28d2ca4add7ac16ae8bb6632a3c86e4b9e4d52d3e34267f6e1b0c1f8d87e389 \ + --hash=sha256:b99722ea48b7ea25e8e015e8341ae74624f72e5f21fc2abd45f3a93266de4c5d \ + --hash=sha256:baff393942b550823bfce952bb62270ee17504d02a1801d7fd0719534dfb9c84 \ + --hash=sha256:c0ee987efa6737242745f347835da2cc5bb9f1b42996a4d97d5c7ff7928cb6f2 \ + --hash=sha256:d0d2821003174de06b69e58cef2316a6622b60ee613121199cb2852a873f8cf3 \ + --hash=sha256:e0cf28db0f24a38b2a0ca33a85a54852586e43cf6fd876365c86e0657cfe7d73 \ + --hash=sha256:e4f5a7c49323533f9103d4dacf4e4f07078f360743dec7f7596949149efeec06 \ + --hash=sha256:eb58ca0abd96911932f688528977858681a59d61a7ce908ffd355957f7025cfc \ + --hash=sha256:edaf02b82cd7639db00dbff629995ef185c8df4c3ffa71a5562a595765a06ce1 \ + --hash=sha256:fef8c87f8abfb884dac04e97824b61299880c43f4ce675dd2cbeadd3c9b466d2 +threadpoolctl==3.5.0 \ + --hash=sha256:082433502dd922bf738de0d8bcc4fdcbf0979ff44c42bd40f5af8a282f6fa107 \ + --hash=sha256:56c1e26c150397e58c4926da8eeee87533b1e32bef131bd4bf6a2f45f3185467 diff --git a/tests/sample_project/requirements/framework-sklearn/requirements-framework-sklearn-macosx_arm64.txt.json b/tests/sample_project/requirements/framework-sklearn/requirements-framework-sklearn-macosx_arm64.txt.json new file mode 100644 index 0000000..835d426 --- /dev/null +++ b/tests/sample_project/requirements/framework-sklearn/requirements-framework-sklearn-macosx_arm64.txt.json @@ -0,0 +1,4 @@ +{ + "locked_at": "2024-10-15T10:23:33.041734+00:00", + "requirements_hash": "sha256:600b3bc658d940b756d5917e6fb7dec3431c5ce4ebc878f5d031e74f3ebdb7a9" +} diff --git a/tests/sample_project/requirements/framework-sklearn/requirements-framework-sklearn-win_amd64.in b/tests/sample_project/requirements/framework-sklearn/requirements-framework-sklearn-win_amd64.in new file mode 100644 index 0000000..1ee073a --- /dev/null +++ b/tests/sample_project/requirements/framework-sklearn/requirements-framework-sklearn-win_amd64.in @@ -0,0 +1,3 @@ +# DO NOT EDIT. Auto-generated as part of deployment bundle build. +# Relock bundle dependencies to update. +scikit-learn diff --git a/tests/sample_project/requirements/framework-sklearn/requirements-framework-sklearn-win_amd64.txt b/tests/sample_project/requirements/framework-sklearn/requirements-framework-sklearn-win_amd64.txt new file mode 100644 index 0000000..b82584f --- /dev/null +++ b/tests/sample_project/requirements/framework-sklearn/requirements-framework-sklearn-win_amd64.txt @@ -0,0 +1,123 @@ +# This file was autogenerated by uv via the following command: +# python.exe -Im layered_envs lock +joblib==1.4.2 \ + --hash=sha256:06d478d5674cbc267e7496a410ee875abd68e4340feff4490bcb7afb88060ae6 \ + --hash=sha256:2382c5816b2636fbd20a09e0f4e9dad4736765fdfb7dca582943b9c1366b3f0e +numpy==2.1.2 \ + --hash=sha256:05b2d4e667895cc55e3ff2b56077e4c8a5604361fc21a042845ea3ad67465aa8 \ + --hash=sha256:12edb90831ff481f7ef5f6bc6431a9d74dc0e5ff401559a71e5e4611d4f2d466 \ + --hash=sha256:13311c2db4c5f7609b462bc0f43d3c465424d25c626d95040f073e30f7570e35 \ + --hash=sha256:13532a088217fa624c99b843eeb54640de23b3414b14aa66d023805eb731066c \ + --hash=sha256:13602b3174432a35b16c4cfb5de9a12d229727c3dd47a6ce35111f2ebdf66ff4 \ + --hash=sha256:1600068c262af1ca9580a527d43dc9d959b0b1d8e56f8a05d830eea39b7c8af6 \ + --hash=sha256:1b8cde4f11f0a975d1fd59373b32e2f5a562ade7cde4f85b7137f3de8fbb29a0 \ + --hash=sha256:1c193d0b0238638e6fc5f10f1b074a6993cb13b0b431f64079a509d63d3aa8b7 \ + --hash=sha256:1ebec5fd716c5a5b3d8dfcc439be82a8407b7b24b230d0ad28a81b61c2f4659a \ + --hash=sha256:242b39d00e4944431a3cd2db2f5377e15b5785920421993770cddb89992c3f3a \ + --hash=sha256:259ec80d54999cc34cd1eb8ded513cb053c3bf4829152a2e00de2371bd406f5e \ + --hash=sha256:2abbf905a0b568706391ec6fa15161fad0fb5d8b68d73c461b3c1bab6064dd62 \ + --hash=sha256:2cbba4b30bf31ddbe97f1c7205ef976909a93a66bb1583e983adbd155ba72ac2 \ + --hash=sha256:2ffef621c14ebb0188a8633348504a35c13680d6da93ab5cb86f4e54b7e922b5 \ + --hash=sha256:30d53720b726ec36a7f88dc873f0eec8447fbc93d93a8f079dfac2629598d6ee \ + --hash=sha256:32e16a03138cabe0cb28e1007ee82264296ac0983714094380b408097a418cfe \ + --hash=sha256:43cca367bf94a14aca50b89e9bc2061683116cfe864e56740e083392f533ce7a \ + --hash=sha256:456e3b11cb79ac9946c822a56346ec80275eaf2950314b249b512896c0d2505e \ + --hash=sha256:4d6ec0d4222e8ffdab1744da2560f07856421b367928026fb540e1945f2eeeaf \ + --hash=sha256:5006b13a06e0b38d561fab5ccc37581f23c9511879be7693bd33c7cd15ca227c \ + --hash=sha256:675c741d4739af2dc20cd6c6a5c4b7355c728167845e3c6b0e824e4e5d36a6c3 \ + --hash=sha256:6cdb606a7478f9ad91c6283e238544451e3a95f30fb5467fbf715964341a8a86 \ + --hash=sha256:6d95f286b8244b3649b477ac066c6906fbb2905f8ac19b170e2175d3d799f4df \ + --hash=sha256:76322dcdb16fccf2ac56f99048af32259dcc488d9b7e25b51e5eca5147a3fb98 \ + --hash=sha256:7c1c60328bd964b53f8b835df69ae8198659e2b9302ff9ebb7de4e5a5994db3d \ + --hash=sha256:860ec6e63e2c5c2ee5e9121808145c7bf86c96cca9ad396c0bd3e0f2798ccbe2 \ + --hash=sha256:8e00ea6fc82e8a804433d3e9cedaa1051a1422cb6e443011590c14d2dea59146 \ + --hash=sha256:9c6c754df29ce6a89ed23afb25550d1c2d5fdb9901d9c67a16e0b16eaf7e2550 \ + --hash=sha256:a26ae94658d3ba3781d5e103ac07a876b3e9b29db53f68ed7df432fd033358a8 \ + --hash=sha256:a65acfdb9c6ebb8368490dbafe83c03c7e277b37e6857f0caeadbbc56e12f4fb \ + --hash=sha256:a7d80b2e904faa63068ead63107189164ca443b42dd1930299e0d1cb041cec2e \ + --hash=sha256:a84498e0d0a1174f2b3ed769b67b656aa5460c92c9554039e11f20a05650f00d \ + --hash=sha256:ab4754d432e3ac42d33a269c8567413bdb541689b02d93788af4131018cbf366 \ + --hash=sha256:ad369ed238b1959dfbade9018a740fb9392c5ac4f9b5173f420bd4f37ba1f7a0 \ + --hash=sha256:b1d0fcae4f0949f215d4632be684a539859b295e2d0cb14f78ec231915d644db \ + --hash=sha256:b42a1a511c81cc78cbc4539675713bbcf9d9c3913386243ceff0e9429ca892fe \ + --hash=sha256:bd33f82e95ba7ad632bc57837ee99dba3d7e006536200c4e9124089e1bf42426 \ + --hash=sha256:bdd407c40483463898b84490770199d5714dcc9dd9b792f6c6caccc523c00952 \ + --hash=sha256:c6eef7a2dbd0abfb0d9eaf78b73017dbfd0b54051102ff4e6a7b2980d5ac1a03 \ + --hash=sha256:c82af4b2ddd2ee72d1fc0c6695048d457e00b3582ccde72d8a1c991b808bb20f \ + --hash=sha256:d666cb72687559689e9906197e3bec7b736764df6a2e58ee265e360663e9baf7 \ + --hash=sha256:d7bf0a4f9f15b32b5ba53147369e94296f5fffb783db5aacc1be15b4bf72f43b \ + --hash=sha256:d82075752f40c0ddf57e6e02673a17f6cb0f8eb3f587f63ca1eaab5594da5b17 \ + --hash=sha256:da65fb46d4cbb75cb417cddf6ba5e7582eb7bb0b47db4b99c9fe5787ce5d91f5 \ + --hash=sha256:e2b49c3c0804e8ecb05d59af8386ec2f74877f7ca8fd9c1e00be2672e4d399b1 \ + --hash=sha256:e585c8ae871fd38ac50598f4763d73ec5497b0de9a0ab4ef5b69f01c6a046142 \ + --hash=sha256:e8d3ca0a72dd8846eb6f7dfe8f19088060fcb76931ed592d29128e0219652884 \ + --hash=sha256:ef444c57d664d35cac4e18c298c47d7b504c66b17c2ea91312e979fcfbdfb08a \ + --hash=sha256:f1eb068ead09f4994dec71c24b2844f1e4e4e013b9629f812f292f04bd1510d9 \ + --hash=sha256:f2ded8d9b6f68cc26f8425eda5d3877b47343e68ca23d0d0846f4d312ecaa445 \ + --hash=sha256:f751ed0a2f250541e19dfca9f1eafa31a392c71c832b6bb9e113b10d050cb0f1 \ + --hash=sha256:faa88bc527d0f097abdc2c663cddf37c05a1c2f113716601555249805cf573f1 \ + --hash=sha256:fc44e3c68ff00fd991b59092a54350e6e4911152682b4782f68070985aa9e648 +scikit-learn==1.5.2 \ + --hash=sha256:03b6158efa3faaf1feea3faa884c840ebd61b6484167c711548fce208ea09445 \ + --hash=sha256:178ddd0a5cb0044464fc1bfc4cca5b1833bfc7bb022d70b05db8530da4bb3dd3 \ + --hash=sha256:1ff45e26928d3b4eb767a8f14a9a6efbf1cbff7c05d1fb0f95f211a89fd4f5de \ + --hash=sha256:299406827fb9a4f862626d0fe6c122f5f87f8910b86fe5daa4c32dcd742139b6 \ + --hash=sha256:2d4cad1119c77930b235579ad0dc25e65c917e756fe80cab96aa3b9428bd3fb0 \ + --hash=sha256:394397841449853c2290a32050382edaec3da89e35b3e03d6cc966aebc6a8ae6 \ + --hash=sha256:3a686885a4b3818d9e62904d91b57fa757fc2bed3e465c8b177be652f4dd37c8 \ + --hash=sha256:3b923d119d65b7bd555c73be5423bf06c0105678ce7e1f558cb4b40b0a5502b1 \ + --hash=sha256:3bed4909ba187aca80580fe2ef370d9180dcf18e621a27c4cf2ef10d279a7efe \ + --hash=sha256:52788f48b5d8bca5c0736c175fa6bdaab2ef00a8f536cda698db61bd89c551c1 \ + --hash=sha256:57cc1786cfd6bd118220a92ede80270132aa353647684efa385a74244a41e3b1 \ + --hash=sha256:643964678f4b5fbdc95cbf8aec638acc7aa70f5f79ee2cdad1eec3df4ba6ead8 \ + --hash=sha256:6c16d84a0d45e4894832b3c4d0bf73050939e21b99b01b6fd59cbb0cf39163b6 \ + --hash=sha256:757c7d514ddb00ae249832fe87100d9c73c6ea91423802872d9e74970a0e40b9 \ + --hash=sha256:8c412ccc2ad9bf3755915e3908e677b367ebc8d010acbb3f182814524f2e5540 \ + --hash=sha256:b0768ad641981f5d3a198430a1d31c3e044ed2e8a6f22166b4d546a5116d7908 \ + --hash=sha256:b4237ed7b3fdd0a4882792e68ef2545d5baa50aca3bb45aa7df468138ad8f94d \ + --hash=sha256:b7b0f9a0b1040830d38c39b91b3a44e1b643f4b36e36567b80b7c6bd2202a27f \ + --hash=sha256:c15b1ca23d7c5f33cc2cb0a0d6aaacf893792271cddff0edbd6a40e8319bc113 \ + --hash=sha256:ca64b3089a6d9b9363cd3546f8978229dcbb737aceb2c12144ee3f70f95684b7 \ + --hash=sha256:e9a702e2de732bbb20d3bad29ebd77fc05a6b427dc49964300340e4c9328b3f5 \ + --hash=sha256:f60021ec1574e56632be2a36b946f8143bf4e5e6af4a06d85281adc22938e0dd \ + --hash=sha256:f7284ade780084d94505632241bf78c44ab3b6f1e8ccab3d2af58e0e950f9c12 \ + --hash=sha256:f763897fe92d0e903aa4847b0aec0e68cadfff77e8a0687cabd946c89d17e675 \ + --hash=sha256:f8b0ccd4a902836493e026c03256e8b206656f91fbcc4fde28c57a5b752561f1 \ + --hash=sha256:f932a02c3f4956dfb981391ab24bda1dbd90fe3d628e4b42caef3e041c67707a +scipy==1.14.1 \ + --hash=sha256:0c2f95de3b04e26f5f3ad5bb05e74ba7f68b837133a4492414b3afd79dfe540e \ + --hash=sha256:1729560c906963fc8389f6aac023739ff3983e727b1a4d87696b7bf108316a79 \ + --hash=sha256:278266012eb69f4a720827bdd2dc54b2271c97d84255b2faaa8f161a158c3b37 \ + --hash=sha256:2843f2d527d9eebec9a43e6b406fb7266f3af25a751aa91d62ff416f54170bc5 \ + --hash=sha256:2da0469a4ef0ecd3693761acbdc20f2fdeafb69e6819cc081308cc978153c675 \ + --hash=sha256:2ff0a7e01e422c15739ecd64432743cf7aae2b03f3084288f399affcefe5222d \ + --hash=sha256:2ff38e22128e6c03ff73b6bb0f85f897d2362f8c052e3b8ad00532198fbdae3f \ + --hash=sha256:30ac8812c1d2aab7131a79ba62933a2a76f582d5dbbc695192453dae67ad6310 \ + --hash=sha256:3a1b111fac6baec1c1d92f27e76511c9e7218f1695d61b59e05e0fe04dc59617 \ + --hash=sha256:4079b90df244709e675cdc8b93bfd8a395d59af40b72e339c2287c91860deb8e \ + --hash=sha256:5149e3fd2d686e42144a093b206aef01932a0059c2a33ddfa67f5f035bdfe13e \ + --hash=sha256:5a275584e726026a5699459aa72f828a610821006228e841b94275c4a7c08417 \ + --hash=sha256:631f07b3734d34aced009aaf6fedfd0eb3498a97e581c3b1e5f14a04164a456d \ + --hash=sha256:716e389b694c4bb564b4fc0c51bc84d381735e0d39d3f26ec1af2556ec6aad94 \ + --hash=sha256:8426251ad1e4ad903a4514712d2fa8fdd5382c978010d1c6f5f37ef286a713ad \ + --hash=sha256:8475230e55549ab3f207bff11ebfc91c805dc3463ef62eda3ccf593254524ce8 \ + --hash=sha256:8bddf15838ba768bb5f5083c1ea012d64c9a444e16192762bd858f1e126196d0 \ + --hash=sha256:8e32dced201274bf96899e6491d9ba3e9a5f6b336708656466ad0522d8528f69 \ + --hash=sha256:8f9ea80f2e65bdaa0b7627fb00cbeb2daf163caa015e59b7516395fe3bd1e066 \ + --hash=sha256:97c5dddd5932bd2a1a31c927ba5e1463a53b87ca96b5c9bdf5dfd6096e27efc3 \ + --hash=sha256:a49f6ed96f83966f576b33a44257d869756df6cf1ef4934f59dd58b25e0327e5 \ + --hash=sha256:af29a935803cc707ab2ed7791c44288a682f9c8107bc00f0eccc4f92c08d6e07 \ + --hash=sha256:b05d43735bb2f07d689f56f7b474788a13ed8adc484a85aa65c0fd931cf9ccd2 \ + --hash=sha256:b28d2ca4add7ac16ae8bb6632a3c86e4b9e4d52d3e34267f6e1b0c1f8d87e389 \ + --hash=sha256:b99722ea48b7ea25e8e015e8341ae74624f72e5f21fc2abd45f3a93266de4c5d \ + --hash=sha256:baff393942b550823bfce952bb62270ee17504d02a1801d7fd0719534dfb9c84 \ + --hash=sha256:c0ee987efa6737242745f347835da2cc5bb9f1b42996a4d97d5c7ff7928cb6f2 \ + --hash=sha256:d0d2821003174de06b69e58cef2316a6622b60ee613121199cb2852a873f8cf3 \ + --hash=sha256:e0cf28db0f24a38b2a0ca33a85a54852586e43cf6fd876365c86e0657cfe7d73 \ + --hash=sha256:e4f5a7c49323533f9103d4dacf4e4f07078f360743dec7f7596949149efeec06 \ + --hash=sha256:eb58ca0abd96911932f688528977858681a59d61a7ce908ffd355957f7025cfc \ + --hash=sha256:edaf02b82cd7639db00dbff629995ef185c8df4c3ffa71a5562a595765a06ce1 \ + --hash=sha256:fef8c87f8abfb884dac04e97824b61299880c43f4ce675dd2cbeadd3c9b466d2 +threadpoolctl==3.5.0 \ + --hash=sha256:082433502dd922bf738de0d8bcc4fdcbf0979ff44c42bd40f5af8a282f6fa107 \ + --hash=sha256:56c1e26c150397e58c4926da8eeee87533b1e32bef131bd4bf6a2f45f3185467 diff --git a/tests/sample_project/requirements/framework-sklearn/requirements-framework-sklearn-win_amd64.txt.json b/tests/sample_project/requirements/framework-sklearn/requirements-framework-sklearn-win_amd64.txt.json new file mode 100644 index 0000000..e65351c --- /dev/null +++ b/tests/sample_project/requirements/framework-sklearn/requirements-framework-sklearn-win_amd64.txt.json @@ -0,0 +1,4 @@ +{ + "locked_at": "2024-10-15T10:24:36.193328+00:00", + "requirements_hash": "sha256:52d03445e70ccc0f6f3fe8523185f9f4b0b2795b83927d8703fba35fdc82bf62" +} diff --git a/tests/sample_project/venvstacks.toml b/tests/sample_project/venvstacks.toml new file mode 100644 index 0000000..818e440 --- /dev/null +++ b/tests/sample_project/venvstacks.toml @@ -0,0 +1,126 @@ +# Sample Python runtime, framework and application layers for venvstacks testing +# +# While it is recommended that all layers use the same runtime, allowance is made for +# multiple runtimes to handle incremental migrations to newer Python versions (e.g. shipping +# both Python 3.11 and 3.12 as different frameworks and applications are migrated rather than +# having to migrate everything all at once) + +# By default, inplace runtime and framework updates are permitted without requiring layers +# that depend on them to be updated (this allows lower impact security and maintenance +# updates to the runtime and framework layers). +# +# If an update does imply compatibility issues for higher layers, then either it needs to +# be handled as a new layer with an updated name (e.g. appending a version number), or +# else affected higher layer components need to be updated at the same time. + +# Each layer definition may include a `platforms` list that specifies which platforms the +# layer should be built for. This is intended for use when particular frameworks only +# support a subset of an overall applications target platforms (for example, LMStudio +# supports MLX, but only on macOS with Apple silicon). +# +# Permitted entries in the `platforms` list are: +# +# * "win_amd64": Windows on x86-64 +# * "linux_x86_64": Linux on x86_64 +# * "macosx_arm64": macOS on Apple silicon +# * "macosx_x86_64": macOS on Intel silicon (not currently tested in CI) +# +# The specific platform strings used are chosen for convenience in comparison with the +# platform identifying strings emitted by Python's `sysconfig.get_platform()` API. +# +# Omitting the field entirely means "build for all platforms", setting it to an empty +# list allows a layer to be defined for shared testing purposes without adding it +# to the default build set yet. + +###################################### +# Python runtime layers +###################################### + +[[runtimes]] +# Runtime naming convention is inspired by the approach used in the PDM project +# It is relaxed slightly to allow lower impact runtime security and maintenance updates +name = "cpython@3.11" +fully_versioned_name = "cpython@3.11.10" +requirements = [ + "numpy", + # `dlltracer` helps to debug any binary wheel DLL import failures on Windows. + # It is used here just as an example of a minimal platform dependent library + "dlltracer; sys_platform == 'win32'", +] + +[[runtimes]] +# Same spec as the Python 3.11 runtime, just for a newer Python version +name = "cpython@3.12" +fully_versioned_name = "cpython@3.12.7" +requirements = [ + # Omit dlltracer, as it doesn't publish 3.12 binaries (as of 2024-10-15) + "numpy", +] + +# Add more [[runtimes]] sections to define additional runtimes (TOML array of tables) + +###################################### +# Sample project framework layers +###################################### + +[[frameworks]] +name = "scipy" +# Automatic versioning currently breaks venv layering +# https://github.com/lmstudio-ai/venvstacks/issues/24 +# versioned = true +runtime = "cpython@3.11" +requirements = [ + "scipy", +] + +[[frameworks]] +name = "sklearn" +runtime = "cpython@3.12" +requirements = [ + "scikit-learn", +] + +[[frameworks]] +name = "http-client" +runtime = "cpython@3.11" +requirements = [ + # Framework layer for non-ML app layers that just do HTTP requests + "httpx", +] + +# Add more [[frameworks]] sections to define additional frameworks (TOML array of tables) + +###################################### +# Sample project application layers +###################################### + +[[applications]] +name = "scipy-import" +# Automatic versioning currently breaks venv layering +# https://github.com/lmstudio-ai/venvstacks/issues/24 +# versioned = true +launch_module = "launch_modules/scipy_import.py" +frameworks = ["scipy"] +requirements = [ + "scipy" +] + +[[applications]] +name = "scipy-client" +launch_module = "launch_modules/scipy_client" +frameworks = ["scipy", "http-client"] +requirements = [ + "scipy", + "httpx", +] + +[[applications]] +name = "sklearn-import" +launch_module = "launch_modules/sklearn_import.py" +frameworks = ["sklearn"] +requirements = [ + "scikit-learn", +] +platforms = ["linux_x86_64"] + +# Add more [[applications]] sections to define additional applications (TOML array of tables) diff --git a/tests/support.py b/tests/support.py new file mode 100644 index 0000000..9327cb7 --- /dev/null +++ b/tests/support.py @@ -0,0 +1,199 @@ +"""Test support for venvstacks testing""" + +import json +import os +import subprocess +import tomllib + +from dataclasses import dataclass, fields +from pathlib import Path +from typing import Any, cast, Mapping +from unittest.mock import create_autospec + +from venvstacks._util import run_python_command +from venvstacks.stacks import ( + BuildEnvironment, + EnvNameDeploy, + LayerBaseName, + IndexConfig, +) + +_THIS_DIR = Path(__file__).parent + +################################## +# Exporting test artifacts +################################## + +TEST_EXPORT_ENV_VAR = ( + "VENVSTACKS_EXPORT_TEST_ARTIFACTS" # Output directory for artifacts +) +FORCED_EXPORT_ENV_VAR = "VENVSTACKS_FORCE_TEST_EXPORT" # Force export if non-empty + + +def get_artifact_export_path() -> Path | None: + """Location to export notable artifacts generated during test execution""" + export_dir = os.environ.get(TEST_EXPORT_ENV_VAR) + if not export_dir: + return None + export_path = Path(export_dir) + if not export_path.exists(): + return None + return export_path + + +def force_artifact_export() -> bool: + """Indicate artifacts should be exported even if a test case passes""" + # Export is forced if the environment var is defined and non-empty + return bool(os.environ.get(FORCED_EXPORT_ENV_VAR)) + + +#################################### +# Ensuring predictable test output +#################################### + +# Note: tests that rely on the expected output config should be +# marked as "expected_output" tests so they're executed +# when regenerating the expected output files + +_OUTPUT_CONFIG_PATH = _THIS_DIR / "expected-output-config.toml" +_OUTPUT_CONFIG: Mapping[str, Any] | None = None + + +def _cast_config(config_mapping: Any) -> Mapping[str, str]: + return cast(Mapping[str, str], config_mapping) + + +def get_output_config() -> Mapping[str, Any]: + global _OUTPUT_CONFIG + if _OUTPUT_CONFIG is None: + data = _OUTPUT_CONFIG_PATH.read_text() + _OUTPUT_CONFIG = tomllib.loads(data) + return _OUTPUT_CONFIG + + +def get_pinned_dev_packages() -> Mapping[str, str]: + return _cast_config(get_output_config()["pinned-dev-packages"]) + + +def get_os_environ_settings() -> Mapping[str, str]: + return _cast_config(get_output_config()["env"]) + + +################################## +# Expected layer definitions +################################## + + +# Runtimes +@dataclass(frozen=True) +class EnvSummary: + _spec_name: str + env_prefix: str + + @property + def spec_name(self) -> LayerBaseName: + return LayerBaseName(self._spec_name) + + @property + def env_name(self) -> EnvNameDeploy: + return EnvNameDeploy(self.env_prefix + self._spec_name) + + +# Frameworks +@dataclass(frozen=True) +class LayeredEnvSummary(EnvSummary): + runtime_spec_name: str + + +# Applications +@dataclass(frozen=True) +class ApplicationEnvSummary(LayeredEnvSummary): + framework_spec_names: tuple[str, ...] + + +############################################ +# Reading published and exported manifests +############################################ + + +class ManifestData: + # Speculative: should this helper class be part of the public venvstacks API? + combined_data: dict[str, Any] + snippet_data: list[dict[str, Any]] + + def __init__(self, metadata_path: Path, snippet_paths: list[Path] | None = None): + if metadata_path.suffix == ".json": + manifest_path = metadata_path + metadata_path = metadata_path.parent + else: + manifest_path = metadata_path / BuildEnvironment.METADATA_MANIFEST + if manifest_path.exists(): + manifest_data = json.loads(manifest_path.read_text("utf-8")) + if not isinstance(manifest_data, dict): + msg = f"{manifest_path!r} data is not a dict: {manifest_data!r}" + raise TypeError(msg) + self.combined_data = manifest_data + else: + self.combined_data = {} + self.snippet_data = snippet_data = [] + if snippet_paths is None: + snippet_base_path = metadata_path / BuildEnvironment.METADATA_ENV_DIR + if snippet_base_path.exists(): + snippet_paths = sorted(snippet_base_path.iterdir()) + else: + snippet_paths = [] + for snippet_path in snippet_paths: + metadata_snippet = json.loads(snippet_path.read_text("utf-8")) + if not isinstance(metadata_snippet, dict): + msg = f"{snippet_path!r} data is not a dict: {metadata_snippet!r}" + raise TypeError(msg) + snippet_data.append(metadata_snippet) + + +################################## +# Expected package index access +################################## + + +def make_mock_index_config(reference_config: IndexConfig | None = None) -> Any: + if reference_config is None: + reference_config = IndexConfig() + mock_config = create_autospec(reference_config, spec_set=True) + # Make conditional checks and iteration reflect the actual field values + checked_methods = ("__bool__", "__len__", "__iter__") + for field in fields(reference_config): + attr_name = field.name + mock_field = getattr(mock_config, attr_name) + field_value = getattr(reference_config, attr_name) + for method_name in checked_methods: + mock_method = getattr(mock_field, method_name, None) + if mock_method is None: + continue + mock_method.side_effect = getattr(field_value, method_name) + # Still call the actual CLI arg retrieval methods + for attr_name in dir(reference_config): + if not attr_name.startswith(("_get_pip_", "_get_uv_")): + continue + mock_method = getattr(mock_config, attr_name) + mock_method.side_effect = getattr(reference_config, attr_name) + return mock_config + + +############################################## +# Running commands in a deployed environment +############################################## + + +def capture_python_output(command: list[str]) -> subprocess.CompletedProcess[str]: + return run_python_command(command, stdout=subprocess.PIPE, stderr=subprocess.PIPE) + + +def get_sys_path(env_python: Path) -> list[str]: + command = [str(env_python), "-Ic", "import json, sys; print(json.dumps(sys.path))"] + result = capture_python_output(command) + return cast(list[str], json.loads(result.stdout)) + + +def run_module(env_python: Path, module_name: str) -> subprocess.CompletedProcess[str]: + command = [str(env_python), "-Im", module_name] + return capture_python_output(command) diff --git a/tests/test_basics.py b/tests/test_basics.py new file mode 100644 index 0000000..19288e5 --- /dev/null +++ b/tests/test_basics.py @@ -0,0 +1,30 @@ +"""Basic tests for venvstacks package components""" + +from importlib.metadata import version as pkg_version + +import pytest + +from support import get_pinned_dev_packages + + +def test_python_api_import() -> None: + from venvstacks import stacks + + assert hasattr(stacks, "StackSpec") + + +PINNED_DEV_PACKAGES = sorted(get_pinned_dev_packages().items()) + + +@pytest.mark.parametrize("pkg_name,version", PINNED_DEV_PACKAGES) +def test_pinned_dev_packages(pkg_name: str, version: str) -> None: + assert pkg_version(pkg_name) == version + + +# TODO: The assorted utility classes and functions added to stacks.py when it was +# a mostly standalone script should be separated out and unit tested + +# TODO: the sample project is intentionally well-formed, there should be test cases for the +# assorted incorrect layer specs that the stack builder detects and rejects (e.g. +# missing launch modules, applications depending on frameworks with inconsistent +# runtime requirements) diff --git a/tests/test_cli_invocation.py b/tests/test_cli_invocation.py new file mode 100644 index 0000000..c17ff02 --- /dev/null +++ b/tests/test_cli_invocation.py @@ -0,0 +1,720 @@ +"""Test cases for CLI invocation""" + +from contextlib import contextmanager +from dataclasses import dataclass, field +from pathlib import Path +from traceback import format_exception +from types import ModuleType +from typing import Any, cast, Generator, get_type_hints, Iterator, Self, Sequence +from unittest.mock import create_autospec, MagicMock, patch + +import pytest + +import click.testing +import typer +from typer.models import ArgumentInfo, OptionInfo +from typer.testing import CliRunner + +from venvstacks import cli +from venvstacks.stacks import BuildEnvironment, EnvironmentLock, IndexConfig + + +def report_traceback(exc: BaseException | None) -> str: + if exc is None: + return "Expected exception was not raised" + return "\n".join(format_exception(exc)) + + +def _mock_path(contents: str | None = None) -> Any: + mock_path = create_autospec(Path, spec_set=True, instance=True) + mock_path.exists.return_value = True + if contents is None: + # Pretend this is a directory path + mock_path.is_dir.return_value = True + else: + # Pretend this is a readable file path + mock_path.read_text.return_value = contents + mock_path.is_dir.return_value = False + return mock_path + + +def mock_environment_locking(*, clean: bool = False) -> Sequence[EnvironmentLock]: + return [] + + +def mock_output_generation(output_dir: Path, *, dry_run: bool = False, **_: Any) -> Any: + if dry_run: + return _mock_path(), {} + return _mock_path("{}"), [], [] + + +@dataclass(repr=False, eq=False) +class MockedRunner: + app: typer.Typer + mocked_stack_spec: MagicMock + + runner: CliRunner = field(init=False) + mocked_build_env: MagicMock = field(init=False) + + def __post_init__(self) -> None: + # Note: `mock` doesn't quite handle `dataclass` instances correctly + # (see https://github.com/python/cpython/issues/124176 for details) + # However, the CLI doesn't currently try to access any of the missing + # attributes, so the autospec mocking here is sufficient in practice. + self.runner = CliRunner() + mocked_stack_spec = self.mocked_stack_spec + # Use the patched in mock as the sole defined spec instance + mocked_stack_spec.load.return_value = mocked_stack_spec + # Patch in a mocked build environment + mocked_build_env = create_autospec( + BuildEnvironment, spec_set=True, instance=True + ) + # use the mocked build environment in test cases + mocked_stack_spec.define_build_environment.return_value = mocked_build_env + self.mocked_build_env = mocked_build_env + # Control the result of the environment locking step + mocked_build_env.lock_environments.side_effect = mock_environment_locking + # Control the result of artifact publication and environment exports + mocked_build_env.publish_artifacts.side_effect = mock_output_generation + mocked_build_env.export_environments.side_effect = mock_output_generation + + @classmethod + @contextmanager + def cli_patch_installed(cls, cli_module: ModuleType) -> Iterator[Self]: + """Patch the given CLI module to invoke a mocked StackSpec instance""" + app = cli_module._cli + patch_cm = patch.object(cli_module, "StackSpec", autospec=True, spec_set=True) + with patch_cm as mocked_stack_spec: + yield cls(app, mocked_stack_spec) + + def invoke(self, cli_args: list[str]) -> click.testing.Result: + return self.runner.invoke(self.app, cli_args) + + def assert_build_config( + self, expected_build_dir: str, expected_index_config: IndexConfig + ) -> None: + """Check environment build path and index configuration details""" + env_definition = self.mocked_stack_spec.define_build_environment + env_definition.assert_called_with(expected_build_dir, expected_index_config) + + _OUTPUT_METHODS = { + "build": "publish_artifacts", + "publish": "publish_artifacts", + "local-export": "export_environments", + } + + def get_output_method(self, command: str) -> MagicMock: + """Return the Mock expected to be called for the given output command""" + output_method_name = self._OUTPUT_METHODS[command] + return cast(MagicMock, getattr(self.mocked_build_env, output_method_name)) + + _DEFAULT_OUTPUT_DIRS = { + "build": "_artifacts", + "publish": "_artifacts", + "local-export": "_export", + } + + def get_default_output_dir(self, command: str) -> str: + """Return the Mock expected to be called for the given output command""" + return self._DEFAULT_OUTPUT_DIRS[command] + + +@pytest.fixture +def mocked_runner() -> Generator[MockedRunner, None, None]: + with MockedRunner.cli_patch_installed(cli) as mocked_app: + yield mocked_app + + +class TestTopLevelCommand: + def test_implicit_help(self, mocked_runner: MockedRunner) -> None: + result = mocked_runner.invoke([]) + # Top-level callback docstring is used as the overall CLI help text + cli_help = cli.handle_app_options.__doc__ + assert cli_help is not None + assert cli_help.strip() in result.stdout + # Subcommands are listed in the top level help + assert str(cli.build.__name__) in result.stdout + # No stack spec should be created + mocked_stack_spec = mocked_runner.mocked_stack_spec + mocked_stack_spec.assert_not_called() + mocked_stack_spec.load.assert_not_called() + # Check operation result last to ensure test results are as informative as possible + assert result.exception is None, report_traceback(result.exception) + assert result.exit_code == 0 + + +EXPECTED_USAGE_PREFIX = "Usage: python -m venvstacks" +EXPECTED_SUBCOMMANDS = ["lock", "build", "local-export", "publish"] +NO_SPEC_PATH: list[str] = [] +NEEDS_SPEC_PATH = sorted(set(EXPECTED_SUBCOMMANDS) - set(NO_SPEC_PATH)) +ACCEPTS_BUILD_DIR = ["lock", "build", "local-export", "publish"] +ACCEPTS_OUTPUT_DIR = ["build", "local-export", "publish"] +ACCEPTS_INDEX_CONFIG = ["lock", "build"] + + +def _get_default_index_config(command: str) -> IndexConfig: + if command in ACCEPTS_INDEX_CONFIG: + return IndexConfig() + # Commands that don't support index access should turn it off in their config + return IndexConfig.disabled() + + +ARGUMENT_PREFIX = "_CLI_ARG" +OPTION_PREFIXES = { + bool: "_CLI_OPT_FLAG", + bool | None: "_CLI_OPT_TRISTATE", + list[str] | None: "_CLI_OPT_STRLIST", + str: "_CLI_OPT_STR", +} + + +class TestSubcommands: + @pytest.mark.parametrize("command", EXPECTED_SUBCOMMANDS) + def test_internal_consistency(self, command: str) -> None: + # Check all CLI annotations are internally consistent: + # * ensures all used _CLI prefixes are consistent with their types + # * ensures all arg names are consistent with annotation suffixes + command_impl_name = command.replace("-", "_") + command_impl = getattr(cli, command_impl_name) + annotations = get_type_hints(command_impl, include_extras=True) + for arg_name, arg_annotation in annotations.items(): + if arg_name == "return": + assert arg_annotation is type(None) + continue + arg_kind = type(arg_annotation.__metadata__[0]) + assert arg_kind is ArgumentInfo or arg_kind is OptionInfo + arg_type = arg_annotation.__origin__ + if arg_kind is ArgumentInfo: + expected_text_prefix = ARGUMENT_PREFIX + else: + option_prefix = OPTION_PREFIXES.get(arg_type) + if option_prefix is None: + assert False, f"No CLI option prefix defined for {arg_type!r}" + expected_text_prefix = option_prefix + expected_annotation_name = f"{expected_text_prefix}_{arg_name}" + named_annotation = getattr(cli, expected_annotation_name) + assert named_annotation == arg_annotation + + @pytest.mark.parametrize("command", EXPECTED_SUBCOMMANDS) + def test_help_option(self, mocked_runner: MockedRunner, command: str) -> None: + result = mocked_runner.invoke([command, "--help"]) + # Command implementation docstring is used as the subcommand help text + command_impl_name = command.replace("-", "_") + command_impl = getattr(cli, command_impl_name) + cli_help = command_impl.__doc__ + assert cli_help is not None + assert cli_help.strip() in result.stdout + # No stack spec should be created + mocked_stack_spec = mocked_runner.mocked_stack_spec + mocked_stack_spec.assert_not_called() + mocked_stack_spec.load.assert_not_called() + mocked_stack_spec.define_build_environment.assert_not_called() + # Check operation result last to ensure test results are as informative as possible + assert result.exception is None, report_traceback(result.exception) + assert result.exit_code == 0 + + @pytest.mark.parametrize("command", NEEDS_SPEC_PATH) + def test_usage_error(self, mocked_runner: MockedRunner, command: str) -> None: + result = mocked_runner.invoke([command]) + # No overall help in a usage error + command_impl_name = command.replace("-", "_") + command_impl = getattr(cli, command_impl_name) + cli_help = command_impl.__doc__ + assert cli_help is not None + assert cli_help.strip() not in result.stdout + # Should complain about the missing required argument + assert result.stdout[: len(EXPECTED_USAGE_PREFIX)] == EXPECTED_USAGE_PREFIX + assert "Missing argument 'SPEC_PATH'" in result.stdout + # No stack spec should be created + mocked_stack_spec = mocked_runner.mocked_stack_spec + mocked_stack_spec.assert_not_called() + mocked_stack_spec.load.assert_not_called() + mocked_stack_spec.define_build_environment.assert_not_called() + # Check operation result last to ensure test results are as informative as possible + assert isinstance(result.exception, SystemExit), report_traceback( + result.exception + ) + assert result.exit_code == 2 + + @pytest.mark.parametrize("command", ACCEPTS_BUILD_DIR) + def test_build_dir_configuration( + self, mocked_runner: MockedRunner, command: str + ) -> None: + spec_path_to_mock = "/no/such/path/spec" + result = mocked_runner.invoke( + [command, "--build-dir", "custom", spec_path_to_mock] + ) + # Always loads the stack spec and creates the build environment + mocked_stack_spec = mocked_runner.mocked_stack_spec + mocked_stack_spec.assert_not_called() + expected_build_dir = "custom" + mocked_stack_spec.load.assert_called_once_with(spec_path_to_mock) + expected_index_config = _get_default_index_config(command) + mocked_runner.assert_build_config(expected_build_dir, expected_index_config) + if command in ACCEPTS_OUTPUT_DIR: + # Only check the output path (other tests check the other parameters) + output_method = mocked_runner.get_output_method(command) + expected_output_dir = mocked_runner.get_default_output_dir(command) + output_method.assert_called_once() + assert output_method.call_args.args == (expected_output_dir,) + # Check operation result last to ensure test results are as informative as possible + assert result.exception is None, report_traceback(result.exception) + assert result.exit_code == 0 + + @pytest.mark.parametrize("command", ACCEPTS_OUTPUT_DIR) + def test_output_dir_configuration( + self, mocked_runner: MockedRunner, command: str + ) -> None: + spec_path_to_mock = "/no/such/path/spec" + result = mocked_runner.invoke( + [command, "--output-dir", "custom", spec_path_to_mock] + ) + # Always loads the stack spec and creates the build environment + mocked_stack_spec = mocked_runner.mocked_stack_spec + mocked_stack_spec.assert_not_called() + expected_build_dir = "_build" + expected_output_dir = "custom" + mocked_stack_spec.load.assert_called_once_with(spec_path_to_mock) + expected_index_config = _get_default_index_config(command) + mocked_runner.assert_build_config(expected_build_dir, expected_index_config) + # Only check the output path (other tests check the other parameters) + output_method = mocked_runner.get_output_method(command) + output_method.assert_called_once() + assert output_method.call_args.args == (expected_output_dir,) + # Check operation result last to ensure test results are as informative as possible + assert result.exception is None, report_traceback(result.exception) + assert result.exit_code == 0 + + @staticmethod + def _cli_args_case_id(cli_args_test_case: tuple[Any, ...]) -> str: + extra_cli_args: tuple[str, ...] = cli_args_test_case[0] + return f"({' '.join(extra_cli_args)})" + + # CLI option handling test cases for package index access configuration + IndexConfigCase = tuple[tuple[str, ...], IndexConfig] + _INDEX_CONFIG_ARGS = ( + # Define how the relevant CLI options map to build environment config settings + ( + (), + IndexConfig( + query_default_index=True, + allow_source_builds=False, + local_wheel_dirs=None, + ), + ), + ( + ("--index", "--no-allow-source"), + IndexConfig( + query_default_index=True, + allow_source_builds=False, + local_wheel_dirs=None, + ), + ), + ( + ("--no-index",), + IndexConfig( + query_default_index=False, + allow_source_builds=False, + local_wheel_dirs=None, + ), + ), + ( + ("--allow-source",), + IndexConfig( + query_default_index=True, + allow_source_builds=True, + local_wheel_dirs=None, + ), + ), + ( + ("--local-wheels", "/some_dir", "--local-wheels", "some/other/dir"), + IndexConfig( + query_default_index=True, + allow_source_builds=False, + local_wheel_dirs=["/some_dir", "some/other/dir"], + ), + ), + ) + + @pytest.mark.parametrize("cli_test_case", _INDEX_CONFIG_ARGS, ids=_cli_args_case_id) + @pytest.mark.parametrize("command", ACCEPTS_INDEX_CONFIG) + def test_index_options( + self, mocked_runner: MockedRunner, command: str, cli_test_case: IndexConfigCase + ) -> None: + extra_cli_args, expected_index_config = cli_test_case + spec_path_to_mock = "/no/such/path/spec" + result = mocked_runner.invoke([command, *extra_cli_args, spec_path_to_mock]) + # Always loads the stack spec + mocked_stack_spec = mocked_runner.mocked_stack_spec + mocked_stack_spec.assert_not_called() + expected_build_dir = "_build" + mocked_stack_spec.load.assert_called_once_with(spec_path_to_mock) + # Check build environment is created with the expected options + mocked_runner.assert_build_config(expected_build_dir, expected_index_config) + # Check operation result last to ensure test results are as informative as possible + assert result.exception is None, report_traceback(result.exception) + assert result.exit_code == 0 + + # Specific CLI option handling test cases for the "build" subcommand + BuildFlagCase = tuple[ + tuple[str, ...], dict[str, bool], dict[str, bool], dict[str, bool] + ] + _BUILD_OPTIONS = ( + # Define how the various flags in the CLI build subcommand map to API method options + # CLI vs API distinction: `--[no-]publish` controls the `dry_run` publication flag + # rather than controlling the `publish` operation selection + # `--include` and its related options are tested separately + ( + (), + dict(lock=False, build=True, publish=True), # select_operations + dict(clean=False, lock=False), # create_environments + dict(dry_run=True, tag_outputs=False), # publish_artifacts + ), + ( + ("--lock",), + dict(lock=True, build=True, publish=True), # select_operations + dict(clean=False, lock=True), # create_environments + dict(dry_run=True, tag_outputs=False), # publish_artifacts + ), + ( + ("--publish",), + dict(lock=False, build=True, publish=True), # select_operations + dict(clean=False, lock=False), # create_environments + dict(force=False, tag_outputs=False), # publish_artifacts + ), + ( + ("--clean",), + dict(lock=False, build=True, publish=True), # select_operations + dict(clean=True, lock=False), # create_environments + dict(dry_run=True, tag_outputs=False), # publish_artifacts + ), + ( + ( + "--publish", + "--clean", + ), + dict(lock=False, build=True, publish=True), # select_operations + dict(clean=True, lock=False), # create_environments + dict(force=True, tag_outputs=False), # publish_artifacts + ), + ( + ("--tag-outputs",), + dict(lock=False, build=True, publish=True), # select_operations + dict(clean=False, lock=False), # create_environments + dict(dry_run=True, tag_outputs=True), # publish_artifacts + ), + ( + ("--lock", "--build", "--publish", "--clean", "--tag-outputs"), + dict(lock=True, build=True, publish=True), # select_operations + dict(clean=True, lock=True), # create_environments + dict(force=True, tag_outputs=True), # publish_artifacts + ), + ( + ( + "--no-lock", + "--no-build", + "--no-publish", + "--no-clean", + "--no-tag-outputs", + ), + dict(lock=False, build=False, publish=True), # select_operations + dict(clean=False, lock=False), # create_environments + dict(dry_run=True, tag_outputs=False), # publish_artifacts + ), + ) + + @pytest.mark.parametrize("cli_test_case", _BUILD_OPTIONS, ids=_cli_args_case_id) + def test_mock_build_op_selection( + self, mocked_runner: MockedRunner, cli_test_case: BuildFlagCase + ) -> None: + cli_flags, expected_select_args, expected_create_args, expected_publish_args = ( + cli_test_case + ) + spec_path_to_mock = "/no/such/path/spec" + result = mocked_runner.invoke(["build", *cli_flags, spec_path_to_mock]) + # Always loads the stack spec and creates the build environment + mocked_stack_spec = mocked_runner.mocked_stack_spec + mocked_stack_spec.assert_not_called() + expected_build_dir = "_build" + expected_output_dir = "_artifacts" + mocked_stack_spec.load.assert_called_once_with(spec_path_to_mock) + mocked_runner.assert_build_config(expected_build_dir, IndexConfig()) + # Defaults to selecting operations rather than stacks + mocked_build_env = mocked_runner.mocked_build_env + mocked_build_env.select_operations.assert_called_once_with( + **expected_select_args + ) + mocked_build_env.select_layers.assert_not_called() + # Always creates the environments to perform the requested operations + mocked_build_env.create_environments.assert_called_once_with( + **expected_create_args + ) + # "Disabling" artifact publication triggers a dry run rather than skipping it completely + mocked_build_env.publish_artifacts.assert_called_once_with( + expected_output_dir, **expected_publish_args + ) + # Check operation result last to ensure test results are as informative as possible + assert result.exception is None, report_traceback(result.exception) + assert result.exit_code == 0 + + # Specific CLI option handling test cases for the "lock" subcommand + LockFlagCase = tuple[tuple[str, ...], dict[str, bool], dict[str, bool]] + _LOCK_OPTIONS = ( + # Define how the various flags in the CLI lock subcommand map to API method options + # `--include` and its related options are tested separately + ( + (), + dict(lock=True, build=False, publish=False), # select_operations + dict(clean=False), # lock_environments + ), + ( + ("--clean",), + dict(lock=True, build=False, publish=False), # select_operations + dict(clean=True), # lock_environments + ), + ( + ("--no-clean",), + dict(lock=True, build=False, publish=False), # select_operations + dict(clean=False), # lock_environments + ), + ) + + @pytest.mark.parametrize("cli_test_case", _LOCK_OPTIONS, ids=_cli_args_case_id) + def test_mock_lock_op_selection( + self, mocked_runner: MockedRunner, cli_test_case: LockFlagCase + ) -> None: + cli_flags, expected_select_args, expected_lock_args = cli_test_case + spec_path_to_mock = "/no/such/path/spec" + result = mocked_runner.invoke(["lock", *cli_flags, spec_path_to_mock]) + # Always loads the stack spec and creates the build environment + mocked_stack_spec = mocked_runner.mocked_stack_spec + mocked_stack_spec.assert_not_called() + expected_build_dir = "_build" + mocked_stack_spec.load.assert_called_once_with(spec_path_to_mock) + mocked_runner.assert_build_config(expected_build_dir, IndexConfig()) + # Defaults to selecting operations rather than stacks + mocked_build_env = mocked_runner.mocked_build_env + mocked_build_env.select_operations.assert_called_once_with( + **expected_select_args + ) + mocked_build_env.select_layers.assert_not_called() + # Only locks the environments without fully building them + mocked_build_env.lock_environments.assert_called_once_with(**expected_lock_args) + mocked_build_env.create_environments.assert_not_called() + # The lock subcommand doesn't even attempt the publication step + mocked_build_env.publish_artifacts.assert_not_called() + # Check operation result last to ensure test results are as informative as possible + assert result.exception is None, report_traceback(result.exception) + assert result.exit_code == 0 + + @staticmethod + def get_invalid_lock_options() -> Generator[str, None, None]: + # List of flags to check was defined when 'lock' was extracted from 'build' + invalid_flag_names = ("lock", "build", "publish", "tag-outputs") + for name in invalid_flag_names: + yield f"--{name}" + yield f"--no-{name}" + + @pytest.mark.parametrize("invalid_flag", list(get_invalid_lock_options())) + def test_mock_lock_usage_error( + self, mocked_runner: MockedRunner, invalid_flag: str + ) -> None: + mocked_spec_path = "/no/such/path/spec" + result = mocked_runner.invoke(["lock", invalid_flag, mocked_spec_path]) + # Should complain about the invalid flag + assert result.stdout[: len(EXPECTED_USAGE_PREFIX)] == EXPECTED_USAGE_PREFIX + assert "Try 'python -m venvstacks lock --help' for help." in result.stdout + # No stack spec should be created + mocked_stack_spec = mocked_runner.mocked_stack_spec + mocked_stack_spec.assert_not_called() + mocked_stack_spec.load.assert_not_called() + mocked_stack_spec.define_build_environment.assert_not_called() + # Check operation result last to ensure test results are as informative as possible + assert isinstance(result.exception, SystemExit), report_traceback( + result.exception + ) + assert result.exit_code == 2 + + # Specific CLI option handling test cases for the "publish" subcommand + PublishFlagCase = tuple[tuple[str, ...], dict[str, bool], dict[str, bool]] + _PUBLISH_OPTIONS = ( + # Define how the various flags in the CLI lock subcommand map to API method options + # `--include` and its related options are tested separately + ( + (), + dict(lock=False, build=False, publish=True), # select_operations + dict(force=False, tag_outputs=False), # publish_artifacts + ), + ( + ("--force",), + dict(lock=False, build=False, publish=True), # select_operations + dict(force=True, tag_outputs=False), # publish_artifacts + ), + ( + ("--dry-run",), + dict(lock=False, build=False, publish=True), # select_operations + dict(dry_run=True, tag_outputs=False), # publish_artifacts + ), + ( + ("--tag-outputs",), + dict(lock=False, build=False, publish=True), # select_operations + dict(force=False, tag_outputs=True), # publish_artifacts + ), + ( + ( + "--no-force", + "--no-dry-run", + "--no-tag-outputs", + ), + dict(lock=False, build=False, publish=True), # select_operations + dict(force=False, tag_outputs=False), # publish_artifacts + ), + ) + + @pytest.mark.parametrize("cli_test_case", _PUBLISH_OPTIONS, ids=_cli_args_case_id) + def test_mock_publish_op_selection( + self, mocked_runner: MockedRunner, cli_test_case: PublishFlagCase + ) -> None: + cli_flags, expected_select_args, expected_publish_args = cli_test_case + spec_path_to_mock = "/no/such/path/spec" + result = mocked_runner.invoke(["publish", *cli_flags, spec_path_to_mock]) + # Always loads the stack spec and creates the build environment + mocked_stack_spec = mocked_runner.mocked_stack_spec + mocked_stack_spec.assert_not_called() + expected_build_dir = "_build" + expected_output_dir = "_artifacts" + mocked_stack_spec.load.assert_called_once_with(spec_path_to_mock) + mocked_runner.assert_build_config(expected_build_dir, IndexConfig.disabled()) + # Defaults to selecting operations rather than stacks + mocked_build_env = mocked_runner.mocked_build_env + mocked_build_env.select_operations.assert_called_once_with( + **expected_select_args + ) + mocked_build_env.select_layers.assert_not_called() + # The publish subcommand assumes the environments are already created + mocked_build_env.create_environments.assert_not_called() + # The publish subcommand always attempts to publish the artifacts + mocked_build_env.publish_artifacts.assert_called_once_with( + expected_output_dir, **expected_publish_args + ) + # Check operation result last to ensure test results are as informative as possible + assert result.exception is None, report_traceback(result.exception) + assert result.exit_code == 0 + + @staticmethod + def get_invalid_publish_options() -> Generator[str, None, None]: + # List of flags to check was defined when 'publish' was extracted from 'build' + invalid_flag_names = ("lock", "build", "publish", "clean") + for name in invalid_flag_names: + yield f"--{name}" + yield f"--no-{name}" + + @pytest.mark.parametrize("invalid_flag", list(get_invalid_publish_options())) + def test_mock_publish_usage_error( + self, mocked_runner: MockedRunner, invalid_flag: str + ) -> None: + spec_path_to_mock = "/no/such/path/spec" + result = mocked_runner.invoke(["publish", invalid_flag, spec_path_to_mock]) + # Should complain about the invalid flag + assert result.stdout[: len(EXPECTED_USAGE_PREFIX)] == EXPECTED_USAGE_PREFIX + assert "Try 'python -m venvstacks publish --help' for help." in result.stdout + # No stack spec should be created + mocked_stack_spec = mocked_runner.mocked_stack_spec + mocked_stack_spec.assert_not_called() + mocked_stack_spec.load.assert_not_called() + mocked_stack_spec.define_build_environment.assert_not_called() + # Check operation result last to ensure test results are as informative as possible + assert isinstance(result.exception, SystemExit), report_traceback( + result.exception + ) + assert result.exit_code == 2 + + # Specific CLI option handling test cases for the "publish" subcommand + ExportFlagCase = tuple[tuple[str, ...], dict[str, bool], dict[str, bool]] + _EXPORT_OPTIONS = ( + # Define how the various flags in the CLI lock subcommand map to API method options + # `--include` and its related options are tested separately + ( + (), + dict(lock=False, build=False, publish=True), # select_operations + dict(force=False), # export_environments + ), + ( + ("--force",), + dict(lock=False, build=False, publish=True), # select_operations + dict(force=True), # export_environments + ), + ( + ("--dry-run",), + dict(lock=False, build=False, publish=True), # select_operations + dict(dry_run=True), # export_environments + ), + ( + ( + "--no-force", + "--no-dry-run", + ), + dict(lock=False, build=False, publish=True), # select_operations + dict(force=False), # export_environments + ), + ) + + @pytest.mark.parametrize("cli_test_case", _EXPORT_OPTIONS, ids=_cli_args_case_id) + def test_mock_export_op_selection( + self, mocked_runner: MockedRunner, cli_test_case: ExportFlagCase + ) -> None: + cli_flags, expected_select_args, expected_publish_args = cli_test_case + spec_path_to_mock = "/no/such/path/spec" + result = mocked_runner.invoke(["local-export", *cli_flags, spec_path_to_mock]) + # Always loads the stack spec and creates the build environment + mocked_stack_spec = mocked_runner.mocked_stack_spec + mocked_stack_spec.assert_not_called() + expected_build_dir = "_build" + expected_output_dir = "_export" + mocked_stack_spec.load.assert_called_once_with(spec_path_to_mock) + mocked_runner.assert_build_config(expected_build_dir, IndexConfig.disabled()) + # Defaults to selecting operations rather than stacks + mocked_build_env = mocked_runner.mocked_build_env + mocked_build_env.select_operations.assert_called_once_with( + **expected_select_args + ) + mocked_build_env.select_layers.assert_not_called() + # The publish subcommand assumes the environments are already created + mocked_build_env.create_environments.assert_not_called() + # The publish subcommand always attempts to publish the artifacts + mocked_build_env.export_environments.assert_called_once_with( + expected_output_dir, **expected_publish_args + ) + # Check operation result last to ensure test results are as informative as possible + assert result.exception is None, report_traceback(result.exception) + assert result.exit_code == 0 + + @staticmethod + def get_invalid_export_options() -> Generator[str, None, None]: + # List of flags to check was defined when 'local-export' was derived from 'publish' + invalid_flag_names = ("lock", "build", "publish", "clean", "tag-outputs") + for name in invalid_flag_names: + yield f"--{name}" + yield f"--no-{name}" + + @pytest.mark.parametrize("invalid_flag", list(get_invalid_export_options())) + def test_mock_export_usage_error( + self, mocked_runner: MockedRunner, invalid_flag: str + ) -> None: + spec_path_to_mock = "/no/such/path/spec" + result = mocked_runner.invoke(["local-export", invalid_flag, spec_path_to_mock]) + # Should complain about the invalid flag + assert result.stdout[: len(EXPECTED_USAGE_PREFIX)] == EXPECTED_USAGE_PREFIX + assert ( + "Try 'python -m venvstacks local-export --help' for help." in result.stdout + ) + # No stack spec should be created + mocked_stack_spec = mocked_runner.mocked_stack_spec + mocked_stack_spec.assert_not_called() + mocked_stack_spec.load.assert_not_called() + mocked_stack_spec.define_build_environment.assert_not_called() + # Check operation result last to ensure test results are as informative as possible + assert isinstance(result.exception, SystemExit), report_traceback( + result.exception + ) + assert result.exit_code == 2 diff --git a/tests/test_hashing.py b/tests/test_hashing.py new file mode 100644 index 0000000..e12f212 --- /dev/null +++ b/tests/test_hashing.py @@ -0,0 +1,165 @@ +"""Test cases for hashing utility functions""" + +import hashlib +import shutil +import tempfile + +import pytest + +from pathlib import Path +from typing import Generator, Mapping + +from venvstacks.stacks import _hash_directory, _hash_file + +_THIS_PATH = Path(__file__) +HASH_FODDER_PATH = _THIS_PATH.parent / "hash_fodder" + +# Expected hashes generated with `sha256sum` rather than Python +# Examples ensure that file names don't affect the hash, but the file contents do +SHA256_ALGORITHM = "sha256" +EXPECTED_FILE_HASHES_SHA256: Mapping[str, str] = { + "file.txt": "84dae841773532dcc56da3a65a4c992534c385649645bf0340873da2e2ce7d6a", + "file_duplicate.txt": "84dae841773532dcc56da3a65a4c992534c385649645bf0340873da2e2ce7d6a", + "different_file.txt": "43691ae21f1fd9540bb5b9a6f2ab07fd5be4c2a0545231dc505a5f33a1619337", +} + +# Expected hashes generated with `b2sum` rather than Python +# Examples ensure that file names don't affect the hash, but the file contents do +BLAKE2_ALGORITHM = "blake2b" +EXPECTED_FILE_HASHES_BLAKE2: Mapping[str, str] = { + "file.txt": "bf4d9de4092670662fe8985f38880ce2d1b34ee74a4a110ea6dde23903388bc4fb18b233cc5fb027a2b374731ed6cc9274e244af5605040aa59882a7d6b68b0d", + "file_duplicate.txt": "bf4d9de4092670662fe8985f38880ce2d1b34ee74a4a110ea6dde23903388bc4fb18b233cc5fb027a2b374731ed6cc9274e244af5605040aa59882a7d6b68b0d", + "different_file.txt": "4783a95cdf9b6d0ebc4fe0d553ed6424b0a55400d9ead89b7c5b2dff26fb210aa1f7f9f8b809e58c7f2c79b4e046eea1b52c3a19032d2b861e792814b4ad0782", +} + +# Default algorithm is SHA256 +DEFAULT_ALGORITHM = SHA256_ALGORITHM +EXPECTED_FILE_HASHES_DEFAULT = EXPECTED_FILE_HASHES_SHA256 + + +# Flatten the content hash mappings into 3-tuples for easier test parameterisation +def _all_expected_file_hashes() -> Generator[tuple[str, str, str], None, None]: + for fname, expected_hash in EXPECTED_FILE_HASHES_SHA256.items(): + yield SHA256_ALGORITHM, fname, expected_hash + for fname, expected_hash in EXPECTED_FILE_HASHES_BLAKE2.items(): + yield BLAKE2_ALGORITHM, fname, expected_hash + + +EXPECTED_FILE_HASHES = [*_all_expected_file_hashes()] + + +class TestFileHashing: + @pytest.mark.parametrize( + "fname,expected_hash", EXPECTED_FILE_HASHES_DEFAULT.items() + ) + def test_default_hash(self, fname: str, expected_hash: str) -> None: + file_path = HASH_FODDER_PATH / fname + assert _hash_file(file_path) == f"{DEFAULT_ALGORITHM}:{expected_hash}" + assert _hash_file(file_path, omit_prefix=True) == expected_hash + + @pytest.mark.parametrize("algorithm,fname,expected_hash", EXPECTED_FILE_HASHES) + def test_algorithm_selection( + self, algorithm: str, fname: str, expected_hash: str + ) -> None: + file_path = HASH_FODDER_PATH / fname + assert _hash_file(file_path, algorithm) == f"{algorithm}:{expected_hash}" + assert _hash_file(file_path, algorithm, omit_prefix=True) == expected_hash + + +# Directory hashing uses a custom algorithm (hence the non-standard prefix separator). +# However, the expected hashes for the `hash_fodder` folder can be calculated by specifying +# the expected order that different components of the hash are added to the algorithm: +# +# * directories are visited top down in sorted order +# * directory names are added to the hash when they are visited +# * file content hashes are added to the hash in sorted order after the directory name + +EXPECTED_DIR_HASH_SEQUENCE = [ + ("dirname", "hash_fodder"), + ("filename", "different_file.txt"), + ("contents_hash", "different_file.txt"), + ("filename", "file.txt"), + ("contents_hash", "file.txt"), + ("filename", "file_duplicate.txt"), + ("contents_hash", "file_duplicate.txt"), + ("dirname", "folder1"), + ("filename", "file.txt"), + ("contents_hash", "file.txt"), + ("dirname", "subfolder"), + ("filename", "file.txt"), + ("contents_hash", "file.txt"), + ("dirname", "folder2"), + ("filename", "file_duplicate.txt"), + ("contents_hash", "file_duplicate.txt"), +] + + +def _make_expected_dir_hash(algorithm: str, content_hashes: Mapping[str, str]) -> str: + incremental_hash = hashlib.new(algorithm) + for component_kind, component_text in EXPECTED_DIR_HASH_SEQUENCE: + match component_kind: + case "dirname" | "filename": + hash_component = component_text.encode() + case "contents_hash": + # Directory hashing includes the algorithm prefix (at least for now) + hash_component = ( + f"{algorithm}:{content_hashes[component_text]}".encode() + ) + print(component_text, hash_component) + incremental_hash.update(hash_component) + return incremental_hash.hexdigest() + + +EXPECTED_DIR_HASHES = { + "sha256": _make_expected_dir_hash("sha256", EXPECTED_FILE_HASHES_SHA256), + "blake2b": _make_expected_dir_hash("blake2b", EXPECTED_FILE_HASHES_BLAKE2), +} + + +@pytest.fixture +def cloned_dir_path() -> Generator[Path, None, None]: + with tempfile.TemporaryDirectory() as dir_name: + temp_dir_path = Path(dir_name) + cloned_hash_fodder_path = temp_dir_path / HASH_FODDER_PATH.name + shutil.copytree(HASH_FODDER_PATH, cloned_hash_fodder_path) + yield cloned_hash_fodder_path + + +class TestDirectoryHashing: + def test_default_hash(self) -> None: + dir_path = HASH_FODDER_PATH + expected_hash = EXPECTED_DIR_HASHES[DEFAULT_ALGORITHM] + assert _hash_directory(dir_path) == f"{DEFAULT_ALGORITHM}/{expected_hash}" + assert _hash_directory(dir_path, omit_prefix=True) == expected_hash + + @pytest.mark.parametrize("algorithm,expected_hash", EXPECTED_DIR_HASHES.items()) + def test_algorithm_selection(self, algorithm: str, expected_hash: str) -> None: + dir_path = HASH_FODDER_PATH + assert _hash_directory(dir_path, algorithm) == f"{algorithm}/{expected_hash}" + assert _hash_directory(dir_path, algorithm, omit_prefix=True) == expected_hash + + def test_root_dir_name_change_detected(self, cloned_dir_path: Path) -> None: + renamed_dir_path = cloned_dir_path.with_name("something_completely_different") + cloned_dir_path.rename(renamed_dir_path) + unmodified_hash = EXPECTED_DIR_HASHES[DEFAULT_ALGORITHM] + assert _hash_directory(renamed_dir_path, omit_prefix=True) != unmodified_hash + + def test_subdir_name_change_detected(self, cloned_dir_path: Path) -> None: + subfolder_path = cloned_dir_path / "folder1" + renamed_dir_path = subfolder_path.with_name("something_completely_different") + subfolder_path.rename(renamed_dir_path) + unmodified_hash = EXPECTED_DIR_HASHES[DEFAULT_ALGORITHM] + assert _hash_directory(cloned_dir_path, omit_prefix=True) != unmodified_hash + + def test_file_name_change_detected(self, cloned_dir_path: Path) -> None: + file_path = cloned_dir_path / "folder1/subfolder/file.txt" + renamed_file_path = file_path.with_name("something_completely_different") + file_path.rename(renamed_file_path) + unmodified_hash = EXPECTED_DIR_HASHES[DEFAULT_ALGORITHM] + assert _hash_directory(cloned_dir_path, omit_prefix=True) != unmodified_hash + + def test_file_contents_change_detected(self, cloned_dir_path: Path) -> None: + file_path = cloned_dir_path / "folder1/subfolder/file.txt" + file_path.write_text("This changes the directory hash") + unmodified_hash = EXPECTED_DIR_HASHES[DEFAULT_ALGORITHM] + assert _hash_directory(cloned_dir_path, omit_prefix=True) != unmodified_hash diff --git a/tests/test_index_config.py b/tests/test_index_config.py new file mode 100644 index 0000000..ca1ad69 --- /dev/null +++ b/tests/test_index_config.py @@ -0,0 +1,105 @@ +"""Test for package index access configuration""" + +import os + +from pathlib import Path + +import pytest + +from venvstacks.stacks import IndexConfig + + +class TestDefaultOptions: + TEST_CONFIG = IndexConfig() + + def test_uv_pip_compile(self) -> None: + # Nominal config is always used when locking + assert self.TEST_CONFIG._get_uv_pip_compile_args() == ["--only-binary", ":all:"] + + def test_pip_install(self) -> None: + # Nominal config can be overridden for package installation commands + allow_source_config: list[str] = [] + binary_only_config = ["--only-binary", ":all:"] + assert self.TEST_CONFIG._get_pip_install_args(None) == binary_only_config + assert self.TEST_CONFIG._get_pip_install_args(False) == allow_source_config + assert self.TEST_CONFIG._get_pip_install_args(True) == binary_only_config + + def test_pip_sync(self) -> None: + # Final sync to remove source build dependencies is always binary-only + assert self.TEST_CONFIG._get_pip_sync_args() == [ + "--pip-args", + "--only-binary :all:", + ] + + +class TestConfiguredOptions: + TEST_CONFIG = IndexConfig( + query_default_index=False, + allow_source_builds=True, + local_wheel_dirs=["/some_dir"], + ) + WHEEL_DIR = f"{os.sep}some_dir" + + def test_uv_pip_compile(self) -> None: + # Nominal config is always used when locking + assert self.TEST_CONFIG._get_uv_pip_compile_args() == [ + "--no-index", + "--find-links", + self.WHEEL_DIR, + ] + + def test_pip_install(self) -> None: + # Nominal config can be overridden for package installation commands + allow_source_config: list[str] = [ + "--no-index", + "--find-links", + self.WHEEL_DIR, + ] + binary_only_config = [ + "--no-index", + "--only-binary", + ":all:", + "--find-links", + self.WHEEL_DIR, + ] + assert self.TEST_CONFIG._get_pip_install_args(None) == allow_source_config + assert self.TEST_CONFIG._get_pip_install_args(False) == allow_source_config + assert self.TEST_CONFIG._get_pip_install_args(True) == binary_only_config + + def test_pip_sync(self) -> None: + # Final sync to remove source build dependencies is always binary-only + assert self.TEST_CONFIG._get_pip_sync_args() == [ + "--no-index", + "--find-links", + self.WHEEL_DIR, + "--pip-args", + "--only-binary :all:", + ] + + +# Miscellaneous test cases +def test_wheel_dir_not_in_sequence() -> None: + with pytest.raises(TypeError): + IndexConfig(local_wheel_dirs="/some_dir") + + +def test_lexical_path_resolution() -> None: + paths_to_resolve = [ + "/some/path", + "/some/absolute/../path", + "some/path", + "some/relative/../path", + "~/some/path", + "~/some/user/../path", + ] + expected_paths = [ + Path("/some/path").absolute(), + Path("/some/path").absolute(), + Path("/base_path/some/path").absolute(), + Path("/base_path/some/path").absolute(), + Path.home() / "some/path", + Path.home() / "some/path", + ] + config = IndexConfig(local_wheel_dirs=paths_to_resolve) + config.resolve_lexical_paths("/base_path") + assert config.local_wheel_paths == expected_paths diff --git a/tests/test_minimal_project.py b/tests/test_minimal_project.py new file mode 100644 index 0000000..10c6e24 --- /dev/null +++ b/tests/test_minimal_project.py @@ -0,0 +1,847 @@ +"""Test building the minimal project produces the expected results""" + +import json +import shutil +import tempfile + +from datetime import datetime, timezone +from pathlib import Path +from typing import Any, Callable, cast, Mapping, Sequence, TypeVar + +# Use unittest for consistency with test_sample_project (which needs the better diff support) +import unittest +from unittest.mock import Mock, call as expect_call + +import pytest # To mark slow test cases + +from support import ( + EnvSummary, + LayeredEnvSummary, + ApplicationEnvSummary, + ManifestData, + make_mock_index_config, + get_sys_path, + run_module, +) + +from venvstacks.stacks import ( + ArchiveBuildMetadata, + ArchiveMetadata, + StackPublishingRequest, + BuildEnvironment, + EnvNameDeploy, + StackSpec, + LayerVariants, + ExportedEnvironmentPaths, + ExportMetadata, + IndexConfig, + PublishedArchivePaths, + get_build_platform, +) +from venvstacks._util import get_env_python, run_python_command, WINDOWS_BUILD + +################################## +# Minimal project test helpers +################################## + +_THIS_PATH = Path(__file__) +MINIMAL_PROJECT_PATH = _THIS_PATH.parent / "minimal_project" +MINIMAL_PROJECT_STACK_SPEC_PATH = MINIMAL_PROJECT_PATH / "venvstacks.toml" +MINIMAL_PROJECT_PATHS = ( + MINIMAL_PROJECT_STACK_SPEC_PATH, + MINIMAL_PROJECT_PATH / "empty.py", +) + + +def _define_build_env(working_path: Path) -> BuildEnvironment: + """Define a build environment for the sample project in a temporary folder""" + # To avoid side effects from lock file creation, copy input files to the working path + for src_path in MINIMAL_PROJECT_PATHS: + dest_path = working_path / src_path.name + shutil.copyfile(src_path, dest_path) + # Include "/../" in the spec path in order to test relative path resolution when + # accessing the Python executables (that can be temperamental, especially on macOS). + # The subdirectory won't be used for anything, so it being missing shouldn't matter. + working_spec_path = working_path / "_unused_dir/../venvstacks.toml" + stack_spec = StackSpec.load(working_spec_path) + build_path = working_path / "_build🐸" + return stack_spec.define_build_environment(build_path) + + +################################## +# Expected stack definitions +################################## + +EXPECTED_RUNTIMES = [ + EnvSummary("cpython@3.11", ""), +] + +EXPECTED_FRAMEWORKS = [ + LayeredEnvSummary("layer", "framework-", "cpython@3.11"), +] + +EXPECTED_APPLICATIONS = [ + ApplicationEnvSummary("empty", "app-", "cpython@3.11", ("layer",)), +] + +EXPECTED_ENVIRONMENTS = EXPECTED_RUNTIMES.copy() +EXPECTED_ENVIRONMENTS.extend(EXPECTED_FRAMEWORKS) +EXPECTED_ENVIRONMENTS.extend(EXPECTED_APPLICATIONS) + +# The expected manifest here omits all content dependent fields +# (those are checked when testing the full sample project) +ArchiveSummary = dict[str, Any] +ArchiveSummaries = dict[str, list[ArchiveSummary]] +BuildManifest = dict[str, ArchiveSummaries] +ARCHIVE_SUFFIX = ".zip" if WINDOWS_BUILD else ".tar.xz" +BUILD_PLATFORM = str(get_build_platform()) +EXPECTED_MANIFEST: BuildManifest = { + "layers": { + "applications": [ + { + "app_launch_module": "empty", + "archive_build": 1, + "install_target": "app-empty", + "archive_name": f"app-empty{ARCHIVE_SUFFIX}", + "required_layers": [ + "framework-layer", + ], + "target_platform": BUILD_PLATFORM, + }, + ], + "frameworks": [ + { + "archive_build": 1, + "install_target": "framework-layer", + "archive_name": f"framework-layer{ARCHIVE_SUFFIX}", + "target_platform": BUILD_PLATFORM, + }, + ], + "runtimes": [ + { + "archive_build": 1, + "install_target": "cpython@3.11", + "archive_name": f"cpython@3.11{ARCHIVE_SUFFIX}", + "target_platform": BUILD_PLATFORM, + }, + ], + } +} + +LastLockedTimes = dict[str, datetime] # Mapping from install target names to lock times +_CHECKED_KEYS = frozenset(EXPECTED_MANIFEST["layers"]["applications"][0]) + + +def _filter_archive_manifest(archive_manifest: ArchiveBuildMetadata) -> ArchiveSummary: + """Drop archive manifest fields that aren't covered by this set of test cases""" + summary: ArchiveSummary = {} + for key in _CHECKED_KEYS: + value = archive_manifest.get(key) + if value is not None: + summary[key] = value + return summary + + +def _filter_manifest( + manifest: StackPublishingRequest, +) -> tuple[BuildManifest, LastLockedTimes]: + """Extract manifest fields that are relevant to this set of test cases""" + filtered_summaries: ArchiveSummaries = {} + last_locked_times: LastLockedTimes = {} + for kind, archive_manifests in manifest["layers"].items(): + filtered_summaries[kind] = summaries = [] + for archive_manifest in archive_manifests: + summaries.append(_filter_archive_manifest(archive_manifest)) + last_locked_times[archive_manifest["install_target"]] = ( + datetime.fromisoformat(archive_manifest["locked_at"]) + ) + return {"layers": filtered_summaries}, last_locked_times + + +def _tag_manifest(manifest: BuildManifest, expected_tag: str) -> BuildManifest: + """Add expected build tag to fields that are expected to include the build tag""" + tagged_summaries: ArchiveSummaries = {} + for kind, summaries in manifest["layers"].items(): + tagged_summaries[kind] = new_summaries = [] + for summary in summaries: + new_summary = summary.copy() + new_summaries.append(new_summary) + # Archive name has the build tag inserted before the extension + install_target = summary["install_target"] + new_summary["archive_name"] = ( + f"{install_target}{expected_tag}{ARCHIVE_SUFFIX}" + ) + return {"layers": tagged_summaries} + + +########################## +# Test cases +########################## + + +class TestMinimalSpec(unittest.TestCase): + # Test cases that only need the stack specification file + + def test_spec_loading(self) -> None: + expected_spec_path = MINIMAL_PROJECT_STACK_SPEC_PATH + stack_spec = StackSpec.load(expected_spec_path) + runtime_keys = list(stack_spec.runtimes) + framework_keys = list(stack_spec.frameworks) + application_keys = list(stack_spec.applications) + spec_keys = runtime_keys + framework_keys + application_keys + self.assertCountEqual(spec_keys, set(spec_keys)) + expected_spec_names = [env.spec_name for env in EXPECTED_ENVIRONMENTS] + self.assertCountEqual(spec_keys, expected_spec_names) + spec_names = [env.name for env in stack_spec.all_environment_specs()] + self.assertCountEqual(spec_names, expected_spec_names) + expected_env_names = [env.env_name for env in EXPECTED_ENVIRONMENTS] + env_names = [env.env_name for env in stack_spec.all_environment_specs()] + self.assertCountEqual(env_names, expected_env_names) + for rt_summary in EXPECTED_RUNTIMES: + spec_name = rt_summary.spec_name + rt_env = stack_spec.runtimes[spec_name] + self.assertEqual(rt_env.name, spec_name) + self.assertEqual(rt_env.env_name, rt_summary.env_name) + for fw_summary in EXPECTED_FRAMEWORKS: + spec_name = fw_summary.spec_name + fw_env = stack_spec.frameworks[spec_name] + self.assertEqual(fw_env.name, spec_name) + self.assertEqual(fw_env.env_name, fw_summary.env_name) + for app_summary in EXPECTED_APPLICATIONS: + spec_name = app_summary.spec_name + app_env = stack_spec.applications[spec_name] + self.assertEqual(app_env.name, spec_name) + self.assertEqual(app_env.env_name, app_summary.env_name) + # Check path attributes + self.assertEqual(stack_spec.spec_path, expected_spec_path) + expected_requirements_dir_path = expected_spec_path.parent / "requirements" + self.assertEqual( + stack_spec.requirements_dir_path, expected_requirements_dir_path + ) + + +class TestMinimalBuildDirectoryResolution(unittest.TestCase): + # These test cases don't need the build environment to actually exist + + def setUp(self) -> None: + # No files are created, so no need to use a temporary directory + self.stack_spec = StackSpec.load(MINIMAL_PROJECT_STACK_SPEC_PATH) + + def test_default_build_directory(self) -> None: + stack_spec = self.stack_spec + build_env = stack_spec.define_build_environment() + expected_build_path = stack_spec.spec_path.parent + self.assertEqual(build_env.build_path, expected_build_path) + # The spec directory necessarily already exists + self.assertTrue(expected_build_path.exists()) + + def test_custom_build_directory_relative(self) -> None: + stack_spec = self.stack_spec + build_env = stack_spec.define_build_environment("custom") + expected_build_path = stack_spec.spec_path.parent / "custom" + self.assertEqual(build_env.build_path, expected_build_path) + # Build directory is only created when needed, not immediately + self.assertFalse(expected_build_path.exists()) + + def test_custom_build_directory_user(self) -> None: + build_env = self.stack_spec.define_build_environment("~/custom") + expected_build_path = Path.home() / "custom" + self.assertEqual(build_env.build_path, expected_build_path) + # Build directory is only created when needed, not immediately + self.assertFalse(expected_build_path.exists()) + + def test_custom_build_directory_absolute(self) -> None: + expected_build_path = Path("/custom").absolute() # Add drive info on Windows + build_env = self.stack_spec.define_build_environment(expected_build_path) + self.assertEqual(build_env.build_path, expected_build_path) + # Build directory is only created when needed, not immediately + self.assertFalse(expected_build_path.exists()) + + +class TestMinimalOutputDirectoryResolution(unittest.TestCase): + # These test cases don't need the build environment to actually exist + + def setUp(self) -> None: + # Need a temporary directory to avoid cross-test side effects + working_dir = tempfile.TemporaryDirectory() + self.addCleanup(working_dir.cleanup) + self.working_path = working_path = Path(working_dir.name) + self.build_env = build_env = _define_build_env(working_path) + build_env.select_operations(lock=False, build=False, publish=True) + self.expected_build_path = working_path / "_build🐸" + # Mimic the environments already being locked + build_platform = build_env.build_platform + lock_dir_path = build_env.requirements_dir_path + for env in build_env.all_environments(): + env_spec = env.env_spec + requirements_path = env_spec.get_requirements_path( + build_platform, lock_dir_path + ) + requirements_path.parent.mkdir(parents=True, exist_ok=True) + requirements_path.write_text("") + env.env_lock.update_lock_metadata() + # Path diffs can get surprisingly long + self.maxDiff = None + + def check_publishing_request( + self, publishing_request: StackPublishingRequest + ) -> None: + self.assertEqual(_filter_manifest(publishing_request)[0], EXPECTED_MANIFEST) + + def test_default_output_directory(self) -> None: + build_env = self.build_env + output_path, publishing_request = build_env.publish_artifacts(dry_run=True) + # Build folder is used as the default output directory + expected_output_path = self.expected_build_path + self.assertEqual(output_path, expected_output_path) + self.check_publishing_request(publishing_request) + # The build directory necessarily already exists + self.assertFalse(expected_output_path.exists()) + + def test_custom_output_directory_relative(self) -> None: + build_env = self.build_env + output_path, publishing_request = build_env.publish_artifacts( + "custom", dry_run=True + ) + expected_output_path = self.working_path / "custom" + self.assertEqual(output_path, expected_output_path) + self.check_publishing_request(publishing_request) + # Dry run doesn't create the output directory + self.assertFalse(expected_output_path.exists()) + + def test_custom_output_directory_user(self) -> None: + build_env = self.build_env + output_path, publishing_request = build_env.publish_artifacts( + "~/custom", dry_run=True + ) + expected_output_path = Path.home() / "custom" + self.assertEqual(output_path, expected_output_path) + self.check_publishing_request(publishing_request) + # Dry run doesn't create the output directory + self.assertFalse(expected_output_path.exists()) + + def test_custom_output_directory_absolute(self) -> None: + build_env = self.build_env + expected_output_path = Path("/custom").absolute() # Add drive info on Windows + output_path, publishing_request = build_env.publish_artifacts( + expected_output_path, dry_run=True + ) + self.assertEqual(output_path, expected_output_path) + self.check_publishing_request(publishing_request) + # Dry run doesn't create the output directory + self.assertFalse(expected_output_path.exists()) + + +class TestMinimalBuild(unittest.TestCase): + # Test cases that actually create the build environment folders + + working_path: Path + build_env: BuildEnvironment + + def setUp(self) -> None: + # Need a temporary directory to avoid cross-test side effects + working_dir = tempfile.TemporaryDirectory() + self.addCleanup(working_dir.cleanup) + self.working_path = working_path = Path(working_dir.name) + self.build_env = _define_build_env(working_path) + + def assertRecentlyLocked( + self, last_locked_times: LastLockedTimes, minimum_lock_time: datetime + ) -> None: + for install_target, last_locked in last_locked_times.items(): + # Use a tuple comparison so the install_target value gets + # reported without needing to define nested subtests + self.assertGreaterEqual( + (install_target, last_locked), (install_target, minimum_lock_time) + ) + + @staticmethod + def _load_archive_summary(metadata_path: Path) -> ArchiveSummary: + with metadata_path.open("r", encoding="utf-8") as f: + return _filter_archive_manifest(json.load(f)) + + @staticmethod + def _load_build_manifest(metadata_path: Path) -> BuildManifest: + with metadata_path.open("r", encoding="utf-8") as f: + return _filter_manifest(json.load(f))[0] + + def mock_index_config_options( + self, reference_config: IndexConfig | None = None + ) -> None: + # Mock the index configs in order to check for + # expected CLI argument lookups + for env in self.build_env.all_environments(): + if reference_config is None: + env_reference_config = env.index_config + else: + env_reference_config = reference_config + env.index_config = make_mock_index_config(env_reference_config) + + def check_publication_result( + self, + publication_result: PublishedArchivePaths, + dry_run_result: BuildManifest, + expected_tag: str | None, + ) -> None: + # Build dir is used as the default output path + expected_output_path = self.build_env.build_path + expected_metadata_path = expected_output_path / BuildEnvironment.METADATA_DIR + expected_env_metadata_path = ( + expected_metadata_path / BuildEnvironment.METADATA_ENV_DIR + ) + if expected_tag is None: + expected_metadata_name = "venvstacks.json" + expected_snippet_suffix = ".json" + else: + expected_metadata_name = f"venvstacks{expected_tag}.json" + expected_snippet_suffix = f"{expected_tag}.json" + manifest_path, snippet_paths, archive_paths = publication_result + # Check overall manifest file + expected_manifest_path = expected_metadata_path / expected_metadata_name + self.assertEqual(manifest_path, expected_manifest_path) + manifest_data = self._load_build_manifest(manifest_path) + self.assertEqual(manifest_data, dry_run_result) + # Check individual archive manifests + expected_summaries: dict[str, ArchiveSummary] = {} + for archive_summaries in dry_run_result["layers"].values(): + for archive_summary in archive_summaries: + install_target = archive_summary["install_target"] + expected_summaries[install_target] = archive_summary + for snippet_path in snippet_paths: + archive_summary = self._load_archive_summary(snippet_path) + install_target = archive_summary["install_target"] + expected_snippet_name = f"{install_target}{expected_snippet_suffix}" + expected_snippet_path = expected_env_metadata_path / expected_snippet_name + self.assertEqual(snippet_path, expected_snippet_path) + self.assertEqual(archive_summary, expected_summaries[install_target]) + # Check the names and location of the generated archives + expected_archive_paths: list[Path] = [] + for archive_summaries in dry_run_result["layers"].values(): + for archive_summary in archive_summaries: + expected_archive_path = ( + expected_output_path / archive_summary["archive_name"] + ) + expected_archive_paths.append(expected_archive_path) + expected_archive_paths.sort() + self.assertEqual(sorted(archive_paths), expected_archive_paths) + + # TODO: Refactor to share the environment checking code with test_sample_project + def assertSysPathEntry(self, expected: str, env_sys_path: Sequence[str]) -> None: + self.assertTrue( + any(expected in path_entry for path_entry in env_sys_path), + f"No entry containing {expected!r} found in {env_sys_path}", + ) + + T = TypeVar("T", bound=Mapping[str, Any]) + + def check_deployed_environments( + self, + layered_metadata: dict[str, Sequence[T]], + get_exported_python: Callable[[T], tuple[str, Path, list[str]]], + ) -> None: + for rt_env in layered_metadata["runtimes"]: + env_name, _, env_sys_path = get_exported_python(rt_env) + self.assertTrue(env_sys_path) # Environment should have sys.path entries + # Runtime environment layer should be completely self-contained + self.assertTrue( + all(env_name in path_entry for path_entry in env_sys_path), + f"Path outside {env_name} in {env_sys_path}", + ) + for fw_env in layered_metadata["frameworks"]: + env_name, _, env_sys_path = get_exported_python(fw_env) + self.assertTrue(env_sys_path) # Environment should have sys.path entries + # Framework and runtime should both appear in sys.path + runtime_name = fw_env["runtime_name"] + short_runtime_name = ".".join(runtime_name.split(".")[:2]) + self.assertSysPathEntry(env_name, env_sys_path) + self.assertSysPathEntry(short_runtime_name, env_sys_path) + for app_env in layered_metadata["applications"]: + env_name, env_python, env_sys_path = get_exported_python(app_env) + self.assertTrue(env_sys_path) # Environment should have sys.path entries + # Application, frameworks and runtime should all appear in sys.path + runtime_name = app_env["runtime_name"] + short_runtime_name = ".".join(runtime_name.split(".")[:2]) + self.assertSysPathEntry(env_name, env_sys_path) + self.assertTrue( + any(env_name in path_entry for path_entry in env_sys_path), + f"No entry containing {env_name} found in {env_sys_path}", + ) + for fw_env_name in app_env["required_layers"]: + self.assertSysPathEntry(fw_env_name, env_sys_path) + self.assertSysPathEntry(short_runtime_name, env_sys_path) + # Launch module should be executable + launch_module = app_env["app_launch_module"] + launch_result = run_module(env_python, launch_module) + self.assertEqual(launch_result.stdout, "") + self.assertEqual(launch_result.stderr, "") + + @staticmethod + def _run_postinstall(base_python_path: Path, env_path: Path) -> None: + postinstall_script = env_path / "postinstall.py" + if postinstall_script.exists(): + run_python_command([str(base_python_path), str(postinstall_script)]) + + def check_archive_deployment(self, published_paths: PublishedArchivePaths) -> None: + metadata_path, snippet_paths, archive_paths = published_paths + published_manifests = ManifestData(metadata_path, snippet_paths) + # TODO: read the base Python path for each environment from the metadata + # https://github.com/lmstudio-ai/venvstacks/issues/19 + with tempfile.TemporaryDirectory() as deployment_dir: + # Extract archives + deployment_path = Path(deployment_dir) + env_name_to_path: dict[EnvNameDeploy, Path] = {} + expected_dirs: list[str] = [] + for archive_metadata, archive_path in zip( + published_manifests.snippet_data, archive_paths + ): + if ".tar" in archive_path.suffixes: + # Layered env tar archives typically have symlinks to their runtime environment + shutil.unpack_archive( + archive_path, deployment_path, filter="fully_trusted" + ) + else: + shutil.unpack_archive(archive_path, deployment_path) + env_name = EnvNameDeploy(archive_metadata["install_target"]) + self.assertEqual(archive_path.name[: len(env_name)], env_name) + expected_dirs.append(env_name) + env_path = deployment_path / env_name + env_name_to_path[env_name] = env_path + self.assertCountEqual( + [p.name for p in deployment_path.iterdir()], expected_dirs + ) + # Run the post install scripts + self.assertTrue(published_manifests.combined_data) + layered_metadata = published_manifests.combined_data["layers"] + base_runtime_env_name = layered_metadata["runtimes"][0]["install_target"] + base_runtime_env_path = env_name_to_path[base_runtime_env_name] + base_python_path = get_env_python(base_runtime_env_path) + self._run_postinstall(base_python_path, env_path) + for env_name, env_path in env_name_to_path.items(): + if env_name == base_runtime_env_name: + # Already configured + continue + self._run_postinstall(base_python_path, env_path) + + def get_exported_python( + env: ArchiveMetadata, + ) -> tuple[EnvNameDeploy, Path, list[str]]: + env_name = env["install_target"] + env_path = env_name_to_path[env_name] + env_python = get_env_python(env_path) + env_sys_path = get_sys_path(env_python) + return env_name, env_python, env_sys_path + + self.check_deployed_environments(layered_metadata, get_exported_python) + + def check_environment_exports(self, export_paths: ExportedEnvironmentPaths) -> None: + metadata_path, snippet_paths, env_paths = export_paths + exported_manifests = ManifestData(metadata_path, snippet_paths) + env_name_to_path: dict[str, Path] = {} + for env_metadata, env_path in zip(exported_manifests.snippet_data, env_paths): + # TODO: Check more details regarding expected metadata contents + self.assertTrue(env_path.exists()) + env_name = EnvNameDeploy(env_metadata["install_target"]) + self.assertEqual(env_path.name, env_name) + env_name_to_path[env_name] = env_path + layered_metadata = exported_manifests.combined_data["layers"] + + def get_exported_python( + env: ExportMetadata, + ) -> tuple[EnvNameDeploy, Path, list[str]]: + env_name = env["install_target"] + env_path = env_name_to_path[env_name] + env_python = get_env_python(env_path) + env_sys_path = get_sys_path(env_python) + return env_name, env_python, env_sys_path + + self.check_deployed_environments(layered_metadata, get_exported_python) + + @pytest.mark.slow + def test_locking_and_publishing(self) -> None: + # This is organised as subtests in a monolothic test sequence to reduce CI overhead + # Separating the tests wouldn't really make them independent, unless the outputs of + # the earlier steps were checked in for use when testing the later steps. + # Actually configuring and building the environments is executed outside the subtest + # declarations, since actual build failures need to fail the entire test method. + subtests_started = subtests_passed = 0 # Track subtest failures + build_env = self.build_env + self.mock_index_config_options() + platform_tag = build_env.build_platform + expected_tag = f"-{platform_tag}" + versioned_tag = ( + f"{expected_tag}-1" # No previous metadata when running the test + ) + expected_dry_run_result = EXPECTED_MANIFEST + expected_tagged_dry_run_result = _tag_manifest(EXPECTED_MANIFEST, versioned_tag) + # Ensure the locking and publication steps always run for all environments + build_env.select_operations(lock=True, build=True, publish=True) + # Handle running this test case repeatedly in a local checkout + for env in build_env.all_environments(): + env.env_lock._purge_lock() + # Test stage: check dry run metadata results are as expected + minimum_lock_time = datetime.now(timezone.utc) + build_env.create_environments() + subtests_started += 1 + with self.subTest("Check untagged dry run"): + dry_run_result, dry_run_last_locked_times = _filter_manifest( + build_env.publish_artifacts(dry_run=True)[1] + ) + self.assertEqual(dry_run_result, expected_dry_run_result) + self.assertRecentlyLocked(dry_run_last_locked_times, minimum_lock_time) + # Check for expected subprocess argument lookups + for env in self.build_env.all_environments(): + # First binary only build: lock with uv, install with pip + # sync is never called for binary only builds + mock_compile = cast(Mock, env.index_config._get_uv_pip_compile_args) + mock_compile.assert_called_once() + mock_compile.reset_mock() + mock_install = cast(Mock, env.index_config._get_pip_install_args) + mock_install.assert_called_once_with(None) + mock_install.reset_mock() + mock_sync = cast(Mock, env.index_config._get_pip_sync_args) + mock_sync.assert_not_called() + subtests_passed += 1 + subtests_started += 1 + with self.subTest("Check tagged dry run"): + tagged_dry_run_result, tagged_last_locked_times = _filter_manifest( + build_env.publish_artifacts(dry_run=True, tag_outputs=True)[1] + ) + self.assertEqual(tagged_dry_run_result, expected_tagged_dry_run_result) + self.assertEqual(tagged_last_locked_times, dry_run_last_locked_times) + subtests_passed += 1 + # Test stage: ensure lock timestamps don't change when requirements don't change + build_env.lock_environments() + subtests_started += 1 + with self.subTest("Check lock timestamps don't change for stable requirements"): + stable_dry_run_result, stable_last_locked_times = _filter_manifest( + build_env.publish_artifacts(dry_run=True)[1] + ) + self.assertEqual(stable_dry_run_result, expected_dry_run_result) + self.assertEqual(stable_last_locked_times, dry_run_last_locked_times) + # Check for expected subprocess argument lookups + for env in self.build_env.all_environments(): + # The lock file is recreated, the timestamp metadata just doesn't + # get updated if the hash of the contents doesn't change + mock_compile = cast(Mock, env.index_config._get_uv_pip_compile_args) + mock_compile.assert_called_once() + mock_compile.reset_mock() + mock_install = cast(Mock, env.index_config._get_pip_install_args) + mock_install.assert_not_called() + mock_sync = cast(Mock, env.index_config._get_pip_sync_args) + mock_sync.assert_not_called() + subtests_passed += 1 + # Test stage: ensure lock timestamps *do* change when the requirements "change" + for env in build_env.all_environments(): + # Rather than actually make the hash change, instead change the hash *records* + env_lock = env.env_lock + env_lock._requirements_hash = "ensure requirements appear to have changed" + env_lock._write_lock_metadata() + minimum_relock_time = datetime.now(timezone.utc) + build_env.lock_environments() + subtests_started += 1 + with self.subTest("Check lock timestamps change for updated requirements"): + relocked_dry_run_result, relocked_last_locked_times = _filter_manifest( + build_env.publish_artifacts(dry_run=True)[1] + ) + self.assertEqual(relocked_dry_run_result, expected_dry_run_result) + self.assertGreater(minimum_relock_time, minimum_lock_time) + self.assertRecentlyLocked(relocked_last_locked_times, minimum_relock_time) + # Check for expected subprocess argument lookups + for env in self.build_env.all_environments(): + # Locked, but not rebuilt, so only uv should be called + mock_compile = cast(Mock, env.index_config._get_uv_pip_compile_args) + mock_compile.assert_called_once() + mock_compile.reset_mock() + mock_install = cast(Mock, env.index_config._get_pip_install_args) + mock_install.assert_not_called() + mock_sync = cast(Mock, env.index_config._get_pip_sync_args) + mock_sync.assert_not_called() + subtests_passed += 1 + # Test stage: ensure exported environments allow launch module execution + subtests_started += 1 + with self.subTest("Check environment export"): + export_path = self.working_path / "_export🦎" + export_result = build_env.export_environments(export_path) + self.check_environment_exports(export_result) + subtests_passed += 1 + # Test stage: ensure published archives and manifests have the expected name + # and that unpacking them allows launch module execution + subtests_started += 1 + with self.subTest("Check untagged publication"): + publication_result = build_env.publish_artifacts() + self.check_publication_result( + publication_result, dry_run_result, expected_tag=None + ) + self.check_archive_deployment(publication_result) + subtests_passed += 1 + subtests_started += 1 + with self.subTest("Check tagged publication"): + tagged_publication_result = build_env.publish_artifacts(tag_outputs=True) + self.check_publication_result( + tagged_publication_result, tagged_dry_run_result, expected_tag + ) + self.check_archive_deployment(tagged_publication_result) + subtests_passed += 1 + # TODO: Add another test stage that confirms build versions increment as expected + + # Work aroung pytest-subtests not failing the test case when subtests fail + # https://github.com/pytest-dev/pytest-subtests/issues/76 + self.assertEqual( + subtests_passed, subtests_started, "Fail due to failed subtest(s)" + ) + + @pytest.mark.slow + def test_implicit_source_builds(self) -> None: + # TODO: Completely drop support for implicit source builds (use local wheel dirs instead) + # This is organised as subtests in a monolothic test sequence to reduce CI overhead + # Separating the tests wouldn't really make them independent, unless the outputs of + # the earlier steps were checked in for use when testing the later steps. + # Actually configuring and building the environments is executed outside the subtest + # declarations, since actual build failures need to fail the entire test method. + subtests_started = subtests_passed = 0 # Track subtest failures + build_env = self.build_env + source_build_index_config = IndexConfig(allow_source_builds=True) + self.mock_index_config_options(source_build_index_config) + platform_tag = build_env.build_platform + expected_tag = f"-{platform_tag}" + versioned_tag = ( + f"{expected_tag}-1" # No previous metadata when running the test + ) + expected_dry_run_result = EXPECTED_MANIFEST + expected_tagged_dry_run_result = _tag_manifest(EXPECTED_MANIFEST, versioned_tag) + # Ensure the locking and publication steps always run for all environments + build_env.select_operations(lock=True, build=True, publish=True) + # Handle running this test case repeatedly in a local checkout + # Also inject a cheap-to-install build dependency in all environments + for env in build_env.all_environments(): + env.env_lock._purge_lock() + env.env_spec.build_requirements = ["uv"] + # Test stage: check dry run metadata results are as expected + minimum_lock_time = datetime.now(timezone.utc) + with pytest.deprecated_call(): + build_env.create_environments() + subtests_started += 1 + with self.subTest("Check untagged dry run"): + dry_run_result, dry_run_last_locked_times = _filter_manifest( + build_env.publish_artifacts(dry_run=True)[1] + ) + self.assertEqual(dry_run_result, expected_dry_run_result) + self.assertRecentlyLocked(dry_run_last_locked_times, minimum_lock_time) + # Check for expected subprocess argument lookups + for env in self.build_env.all_environments(): + # Source allowed lock & build invocation: + # * install build deps with pip prior to locking + # * lock with uv + # * install build deps and runtime deps with pip + # * remove build deps with pip-sync + # * ensure runtime deps are installed in upper layers with pip + mock_compile = cast(Mock, env.index_config._get_uv_pip_compile_args) + mock_compile.assert_called_once() + mock_compile.reset_mock() + mock_install = cast(Mock, env.index_config._get_pip_install_args) + if env.kind == LayerVariants.RUNTIME: + expected_install_calls = [ + expect_call(True), # Pre-lock install_build_requirements() + expect_call(True), # install_build_requirements() + expect_call(None), # install_requirements() + ] + else: + expected_install_calls = [ + expect_call(True), # install_build_requirements() + expect_call(None), # install_requirements() + expect_call(True), # ensure_runtime_dependencies() + ] + self.assertEqual(mock_install.call_args_list, expected_install_calls) + mock_install.reset_mock() + mock_sync = cast(Mock, env.index_config._get_pip_sync_args) + mock_sync.assert_called_once() + mock_sync.reset_mock() + subtests_passed += 1 + subtests_started += 1 + with self.subTest("Check tagged dry run"): + tagged_dry_run_result, tagged_last_locked_times = _filter_manifest( + build_env.publish_artifacts(dry_run=True, tag_outputs=True)[1] + ) + self.assertEqual(tagged_dry_run_result, expected_tagged_dry_run_result) + self.assertEqual(tagged_last_locked_times, dry_run_last_locked_times) + subtests_passed += 1 + # Test stage: ensure lock timestamps don't change when requirements don't change + build_env.lock_environments() + subtests_started += 1 + with self.subTest("Check lock timestamps don't change for stable requirements"): + stable_dry_run_result, stable_last_locked_times = _filter_manifest( + build_env.publish_artifacts(dry_run=True)[1] + ) + self.assertEqual(stable_dry_run_result, expected_dry_run_result) + self.assertEqual(stable_last_locked_times, dry_run_last_locked_times) + # Check for expected subprocess argument lookups + for env in self.build_env.all_environments(): + # The lock file is recreated, the timestamp metadata just doesn't + # get updated if the hash of the contents doesn't change + mock_compile = cast(Mock, env.index_config._get_uv_pip_compile_args) + mock_compile.assert_called_once() + mock_compile.reset_mock() + mock_install = cast(Mock, env.index_config._get_pip_install_args) + if env.kind == LayerVariants.RUNTIME: + # Pre-lock install_build_requirements() + mock_install.assert_called_once_with(True) + mock_install.reset_mock() + else: + mock_install.assert_not_called() + mock_sync = cast(Mock, env.index_config._get_pip_sync_args) + mock_sync.assert_not_called() + subtests_passed += 1 + # Test stage: ensure lock timestamps *do* change when the requirements "change" + for env in build_env.all_environments(): + # Rather than actually make the hash change, instead change the hash *records* + env_lock = env.env_lock + env_lock._requirements_hash = "ensure requirements appear to have changed" + env_lock._write_lock_metadata() + minimum_relock_time = datetime.now(timezone.utc) + build_env.lock_environments() + subtests_started += 1 + with self.subTest("Check lock timestamps change for updated requirements"): + relocked_dry_run_result, relocked_last_locked_times = _filter_manifest( + build_env.publish_artifacts(dry_run=True)[1] + ) + self.assertEqual(relocked_dry_run_result, expected_dry_run_result) + self.assertGreater(minimum_relock_time, minimum_lock_time) + self.assertRecentlyLocked(relocked_last_locked_times, minimum_relock_time) + # Check for expected subprocess argument lookups + for env in self.build_env.all_environments(): + # Locked, but not rebuilt, so only uv should be called + mock_compile = cast(Mock, env.index_config._get_uv_pip_compile_args) + mock_compile.assert_called_once() + mock_compile.reset_mock() + mock_install = cast(Mock, env.index_config._get_pip_install_args) + if env.kind == LayerVariants.RUNTIME: + # Pre-lock install_build_requirements() + mock_install.assert_called_once_with(True) + mock_install.reset_mock() + else: + mock_install.assert_not_called() + mock_sync = cast(Mock, env.index_config._get_pip_sync_args) + mock_sync.assert_not_called() + subtests_passed += 1 + # Test stage: ensure published archives and manifests have the expected name + subtests_started += 1 + with self.subTest("Check untagged publication"): + publication_result = build_env.publish_artifacts() + self.check_publication_result( + publication_result, dry_run_result, expected_tag=None + ) + subtests_passed += 1 + subtests_started += 1 + with self.subTest("Check tagged publication"): + tagged_publication_result = build_env.publish_artifacts(tag_outputs=True) + self.check_publication_result( + tagged_publication_result, tagged_dry_run_result, expected_tag + ) + subtests_passed += 1 + # TODO: Add another test stage that confirms build versions increment as expected + + # Work aroung pytest-subtests not failing the test case when subtests fail + # https://github.com/pytest-dev/pytest-subtests/issues/76 + self.assertEqual( + subtests_passed, subtests_started, "Fail due to failed subtest(s)" + ) diff --git a/tests/test_sample_project.py b/tests/test_sample_project.py new file mode 100644 index 0000000..ff4487d --- /dev/null +++ b/tests/test_sample_project.py @@ -0,0 +1,571 @@ +"""Test building the sample project produces the expected results""" + +import os.path +import shutil +import tempfile + +from itertools import chain +from pathlib import Path +from typing import Any, Callable, Mapping, Sequence, TypeVar + + +# Use unittest for the actual test implementations due to the diff-handling in pytest being +# atrociously bad, as discussed in https://github.com/pytest-dev/pytest/issues/6682 +import unittest +from unittest import mock + +import pytest # To mark slow test cases + +from support import ( + EnvSummary, + LayeredEnvSummary, + ApplicationEnvSummary, + ManifestData, + get_artifact_export_path, + force_artifact_export, + get_os_environ_settings, + get_sys_path, + run_module, +) + +from venvstacks.stacks import ( + ArchiveBuildMetadata, + ArchiveMetadata, + BuildEnvironment, + EnvNameDeploy, + StackSpec, + LayerCategories, + ExportedEnvironmentPaths, + ExportMetadata, +) +from venvstacks._util import get_env_python + +################################## +# Sample project test helpers +################################## + +_THIS_PATH = Path(__file__) +SAMPLE_PROJECT_EXPORT_DIR = _THIS_PATH.stem +SAMPLE_PROJECT_PATH = _THIS_PATH.parent / "sample_project" +SAMPLE_PROJECT_STACK_SPEC_PATH = SAMPLE_PROJECT_PATH / "venvstacks.toml" +SAMPLE_PROJECT_REQUIREMENTS_PATH = SAMPLE_PROJECT_PATH / "requirements" +SAMPLE_PROJECT_MANIFESTS_PATH = SAMPLE_PROJECT_PATH / "expected_manifests" + + +def _define_build_env(working_path: Path) -> BuildEnvironment: + """Define a build environment for the sample project in a temporary folder""" + # To simplify regeneration of committed lockfiles and metadata, + # use the spec file directly from its checked out location + stack_spec = StackSpec.load(SAMPLE_PROJECT_STACK_SPEC_PATH) + build_path = working_path / "_build🐸" + return stack_spec.define_build_environment(build_path) + + +def _get_expected_metadata(build_env: BuildEnvironment) -> ManifestData: + """Path to the expected sample project archive metadata for the current platform""" + return ManifestData(SAMPLE_PROJECT_MANIFESTS_PATH / build_env.build_platform) + + +def _get_expected_dry_run_result( + build_env: BuildEnvironment, expect_tagged_outputs: bool = False +) -> dict[str, Any]: + # Dry run results report LayerCategories instances rather than plain strings + untagged_metadata = _get_expected_metadata(build_env).combined_data + all_layer_manifests = untagged_metadata["layers"] + filtered_layer_manifests: dict[LayerCategories, Any] = {} + for category, archive_manifests in all_layer_manifests.items(): + filtered_layer_manifests[LayerCategories(category)] = archive_manifests + # Dry run results omit any metadata keys relating solely to the built archives + build_request_keys = ( + ArchiveBuildMetadata.__required_keys__ | ArchiveBuildMetadata.__optional_keys__ + ) + archive_keys = ArchiveMetadata.__required_keys__ | ArchiveMetadata.__optional_keys__ + archive_only_keys = archive_keys - build_request_keys + platform_tag = build_env.build_platform + for archive_metadata in chain(*all_layer_manifests.values()): + for key in archive_only_keys: + archive_metadata.pop(key, None) + if expect_tagged_outputs: + # Saved metadata is for untagged builds, so the tagged output dry run + # will always indicate that a new build is needed + # Inputs haven't changed, so the iteration number won't be increased + install_target = archive_metadata["install_target"] + build_iteration = archive_metadata["archive_build"] + expected_tag = f"{platform_tag}-{build_iteration}" + tagged_build_name = f"{install_target}-{expected_tag}" + archive_name: str = archive_metadata["archive_name"] + archive_suffix = archive_name.removeprefix(install_target) + archive_metadata["archive_name"] = f"{tagged_build_name}{archive_suffix}" + return {"layers": filtered_layer_manifests} + + +def _collect_locked_requirements(build_env: BuildEnvironment) -> dict[Path, str]: + locked_requirements: dict[Path, str] = {} + lock_dir_path = build_env.requirements_dir_path + build_platform = build_env.build_platform + for env in build_env.all_environments(): + env_spec = env.env_spec + env_requirements_path = env_spec.get_requirements_path( + build_platform, lock_dir_path + ) + env_requirements_text = "" + if env_requirements_path.exists(): + env_requirements_text = env_requirements_path.read_text() + locked_requirements[env_requirements_path] = env_requirements_text + return locked_requirements + + +def _export_locked_requirements( + artifact_export_path: Path | None, + build_env: BuildEnvironment, + lock_paths: list[Path], +) -> None: + if artifact_export_path is None: + # Artifact export has not been enabled + return + export_dir_path = artifact_export_path / SAMPLE_PROJECT_EXPORT_DIR / "requirements" + export_dir_path.mkdir(parents=True, exist_ok=True) + print(f"Exporting locked requirements files to {str(export_dir_path)!r}") + spec_dir_path = build_env.requirements_dir_path + for locked_requirements_path in lock_paths: + export_path = export_dir_path / locked_requirements_path.relative_to( + spec_dir_path + ) + export_path.parent.mkdir(parents=True, exist_ok=True) + shutil.copyfile(locked_requirements_path, export_path) + + +def _export_manifests( + manifests_export_path: Path, manifest_path: Path, archive_metadata_path: Path +) -> None: + manifests_export_path.mkdir(parents=True, exist_ok=True) + shutil.copyfile(manifest_path, manifests_export_path / manifest_path.name) + shutil.copytree( + archive_metadata_path, + manifests_export_path / archive_metadata_path.name, + dirs_exist_ok=True, + ) + + +def _export_archives( + artifact_export_path: Path | None, + build_env: BuildEnvironment, + manifest_path: Path, + archive_metadata_paths: list[Path], + archive_paths: list[Path], +) -> None: + print("Copying generated artifact manifest files back to source tree") + metadata_path = SAMPLE_PROJECT_MANIFESTS_PATH / build_env.build_platform + archive_metadata_path = Path(os.path.commonpath(archive_metadata_paths)) + _export_manifests(metadata_path, manifest_path, archive_metadata_path) + if artifact_export_path is None: + # Artifact export has not been enabled + return + # Export manifests from CI + test_export_dir_path = artifact_export_path / SAMPLE_PROJECT_EXPORT_DIR + export_manifests_dir_path = test_export_dir_path / "manifests" + print(f"Exporting manifest files to {str(export_manifests_dir_path)!r}") + _export_manifests(export_manifests_dir_path, manifest_path, archive_metadata_path) + # Export archives from CI + export_archives_dir_path = test_export_dir_path / "archives" + print(f"Exporting archive files to {str(export_archives_dir_path)!r}") + export_archives_dir_path.mkdir(parents=True, exist_ok=True) + archive_dir_path = build_env.build_path + for archive_path in archive_paths: + relative_archive_path = archive_path.relative_to(archive_dir_path) + export_archive_path = export_archives_dir_path / relative_archive_path + export_archive_path.parent.mkdir(parents=True, exist_ok=True) + shutil.copyfile(archive_path, export_archive_path) + + +################################## +# Expected layer definitions +################################## + +EXPECTED_RUNTIMES = [ + EnvSummary("cpython@3.11", ""), + EnvSummary("cpython@3.12", ""), +] + +EXPECTED_FRAMEWORKS = [ + LayeredEnvSummary("scipy", "framework-", "cpython@3.11"), + LayeredEnvSummary("sklearn", "framework-", "cpython@3.12"), + LayeredEnvSummary("http-client", "framework-", "cpython@3.11"), +] + +EXPECTED_APPLICATIONS = [ + ApplicationEnvSummary("scipy-import", "app-", "cpython@3.11", ("scipy",)), + ApplicationEnvSummary( + "scipy-client", + "app-", + "cpython@3.11", + ( + "scipy", + "http-client", + ), + ), + ApplicationEnvSummary("sklearn-import", "app-", "cpython@3.12", ("sklearn",)), +] + +EXPECTED_ENVIRONMENTS = EXPECTED_RUNTIMES.copy() +EXPECTED_ENVIRONMENTS.extend(EXPECTED_FRAMEWORKS) +EXPECTED_ENVIRONMENTS.extend(EXPECTED_APPLICATIONS) + +########################## +# Test cases +########################## + + +class TestStackSpec(unittest.TestCase): + # Test cases that only need the stack specification file + + def test_spec_loading(self) -> None: + stack_spec = StackSpec.load(SAMPLE_PROJECT_STACK_SPEC_PATH) + runtime_keys = list(stack_spec.runtimes) + framework_keys = list(stack_spec.frameworks) + application_keys = list(stack_spec.applications) + spec_keys = runtime_keys + framework_keys + application_keys + self.assertCountEqual(spec_keys, set(spec_keys)) + expected_spec_names = [env.spec_name for env in EXPECTED_ENVIRONMENTS] + self.assertCountEqual(spec_keys, expected_spec_names) + spec_names = [env.name for env in stack_spec.all_environment_specs()] + self.assertCountEqual(spec_names, expected_spec_names) + expected_env_names = [env.env_name for env in EXPECTED_ENVIRONMENTS] + env_names = [env.env_name for env in stack_spec.all_environment_specs()] + self.assertCountEqual(env_names, expected_env_names) + for rt_summary in EXPECTED_RUNTIMES: + spec_name = rt_summary.spec_name + rt_env = stack_spec.runtimes[spec_name] + self.assertEqual(rt_env.name, spec_name) + self.assertEqual(rt_env.env_name, rt_summary.env_name) + for fw_summary in EXPECTED_FRAMEWORKS: + spec_name = fw_summary.spec_name + fw_env = stack_spec.frameworks[spec_name] + self.assertEqual(fw_env.name, spec_name) + self.assertEqual(fw_env.env_name, fw_summary.env_name) + for app_summary in EXPECTED_APPLICATIONS: + spec_name = app_summary.spec_name + app_env = stack_spec.applications[spec_name] + self.assertEqual(app_env.name, spec_name) + self.assertEqual(app_env.env_name, app_summary.env_name) + + +class TestBuildEnvironment(unittest.TestCase): + # Test cases that need the full build environment to exist + + working_path: Path + build_env: BuildEnvironment + + def setUp(self) -> None: + working_dir = tempfile.TemporaryDirectory() + self.addCleanup(working_dir.cleanup) + working_path = Path(working_dir.name) + self.working_path = working_path + self.build_env = _define_build_env(working_path) + os_env_updates = get_os_environ_settings() + os_env_patch = mock.patch.dict("os.environ", os_env_updates) + os_env_patch.start() + self.addCleanup(os_env_patch.stop) + self.artifact_export_path = get_artifact_export_path() + self.export_on_success = force_artifact_export() + + # TODO: Refactor to share the environment checking code with test_minimal_project + def assertSysPathEntry(self, expected: str, env_sys_path: Sequence[str]) -> None: + self.assertTrue( + any(expected in path_entry for path_entry in env_sys_path), + f"No entry containing {expected!r} found in {env_sys_path}", + ) + + T = TypeVar("T", bound=Mapping[str, Any]) + + def check_deployed_environments( + self, + layered_metadata: dict[str, Sequence[T]], + get_exported_python: Callable[[T], tuple[str, Path, list[str]]], + ) -> None: + for rt_env in layered_metadata["runtimes"]: + deployed_name, _, env_sys_path = get_exported_python(rt_env) + self.assertTrue(env_sys_path) # Environment should have sys.path entries + # Runtime environment layer should be completely self-contained + self.assertTrue( + all(deployed_name in path_entry for path_entry in env_sys_path), + f"Path outside {deployed_name} in {env_sys_path}", + ) + for fw_env in layered_metadata["frameworks"]: + deployed_name, _, env_sys_path = get_exported_python(fw_env) + self.assertTrue(env_sys_path) # Environment should have sys.path entries + # Framework and runtime should both appear in sys.path + runtime_name = fw_env["runtime_name"] + short_runtime_name = ".".join(runtime_name.split(".")[:2]) + self.assertSysPathEntry(deployed_name, env_sys_path) + self.assertSysPathEntry(short_runtime_name, env_sys_path) + for app_env in layered_metadata["applications"]: + deployed_name, env_python, env_sys_path = get_exported_python(app_env) + self.assertTrue(env_sys_path) # Environment should have sys.path entries + # Application, frameworks and runtime should all appear in sys.path + runtime_name = app_env["runtime_name"] + short_runtime_name = ".".join(runtime_name.split(".")[:2]) + self.assertSysPathEntry(deployed_name, env_sys_path) + self.assertTrue( + any(deployed_name in path_entry for path_entry in env_sys_path), + f"No entry containing {deployed_name} found in {env_sys_path}", + ) + for fw_env_name in app_env["required_layers"]: + self.assertSysPathEntry(fw_env_name, env_sys_path) + self.assertSysPathEntry(short_runtime_name, env_sys_path) + # Launch module should be executable + launch_module = app_env["app_launch_module"] + launch_result = run_module(env_python, launch_module) + self.assertEqual( + launch_result.stdout.strip(), + "Environment launch module executed successfully", + ) + self.assertEqual(launch_result.stderr, "") + + def check_environment_exports(self, export_paths: ExportedEnvironmentPaths) -> None: + metadata_path, snippet_paths, env_paths = export_paths + exported_manifests = ManifestData(metadata_path, snippet_paths) + deployed_name_to_path: dict[str, Path] = {} + for env_metadata, env_path in zip(exported_manifests.snippet_data, env_paths): + self.assertTrue(env_path.exists()) + deployed_name = EnvNameDeploy(env_metadata["install_target"]) + self.assertEqual(env_path.name, deployed_name) + deployed_name_to_path[deployed_name] = env_path + layered_metadata = exported_manifests.combined_data["layers"] + + def get_exported_python( + env: ExportMetadata, + ) -> tuple[EnvNameDeploy, Path, list[str]]: + deployed_name = env["install_target"] + env_path = deployed_name_to_path[deployed_name] + env_python = get_env_python(env_path) + env_sys_path = get_sys_path(env_python) + return deployed_name, env_python, env_sys_path + + self.check_deployed_environments(layered_metadata, get_exported_python) + + @pytest.mark.slow + @pytest.mark.expected_output + def test_build_is_reproducible(self) -> None: + # Need long diffs to get useful output from metadata checks + self.maxDiff = None + # This is organised as subtests in a monolothic test sequence to reduce CI overhead + # Separating the tests wouldn't really make them independent, unless the outputs of + # the initial intermediate steps were checked in for use when testing the later steps. + # Actually configuring and building the environments is executed outside the subtest + # declarations, since actual build failures need to fail the entire test method. + subtests_started = subtests_passed = 0 # Track subtest failures + build_env = self.build_env + artifact_export_path = self.artifact_export_path + # Read expected results from committed test data + expected_archive_metadata = _get_expected_metadata(build_env) + expected_dry_run_result = _get_expected_dry_run_result(build_env) + expected_tagged_dry_run_result = _get_expected_dry_run_result( + build_env, expect_tagged_outputs=True + ) + # Test stage 1: ensure lock files can be regenerated without alteration + committed_locked_requirements = _collect_locked_requirements(build_env) + build_env.create_environments(lock=True) + generated_locked_requirements = _collect_locked_requirements(build_env) + export_locked_requirements = True + subtests_started += 1 + with self.subTest("Ensure lock files are reproducible"): + self.assertEqual( + generated_locked_requirements, committed_locked_requirements + ) + export_locked_requirements = self.export_on_success # Only export if forced + subtests_passed += 1 + if export_locked_requirements: + # Lock files will already have been written back to the source tree location + # Also export them to the CI test artifact upload path (if set) + _export_locked_requirements( + artifact_export_path, + build_env, + list(generated_locked_requirements.keys()), + ) + # Test stage 2: ensure environments can be populated without building the artifacts + build_env.create_environments() # Use committed lock files + subtests_started += 1 + with self.subTest("Ensure archive publication requests are reproducible"): + # Check generation of untagged archive names + dry_run_result = build_env.publish_artifacts(dry_run=True)[1] + self.assertEqual(dry_run_result, expected_dry_run_result) + # Check generation of tagged archive names + tagged_dry_run_result = build_env.publish_artifacts( + dry_run=True, tag_outputs=True + )[1] + self.assertEqual(tagged_dry_run_result, expected_tagged_dry_run_result) + # Dry run metadata may be incorrect because the expected outputs are being updated, + # so always continue on and execute the full archive publication subtest + subtests_passed += 1 + subtests_started += 1 + with self.subTest( + "Ensure dry run builds do not update lock files or manifests" + ): + # No changes to lock files + post_rebuild_locked_requirements = _collect_locked_requirements(build_env) + self.assertEqual( + post_rebuild_locked_requirements, generated_locked_requirements + ) + subtests_passed += 1 + # Test stage 3: ensure built artifacts have the expected manifest contents + manifest_path, snippet_paths, archive_paths = build_env.publish_artifacts() + export_published_archives = True + subtests_started += 1 + with self.subTest("Ensure artifact metadata is reproducible"): + # Generated metadata should match committed reference metadata + generated_archive_metadata = ManifestData( + manifest_path.parent, snippet_paths + ) + self.assertEqual( + generated_archive_metadata.combined_data, + expected_archive_metadata.combined_data, + ) + self.assertCountEqual( + generated_archive_metadata.snippet_data, + expected_archive_metadata.snippet_data, + ) + # Archive should be emitted for every environment defined for this platform + num_environments = len(list(build_env.all_environments())) + self.assertEqual(len(archive_paths), num_environments) + export_published_archives = self.export_on_success # Only export if forced + # No changes to lock files + post_publish_locked_requirements = _collect_locked_requirements(build_env) + self.assertEqual( + post_publish_locked_requirements, generated_locked_requirements + ) + subtests_passed += 1 + if export_published_archives: + # Export manifests and archives to the CI test artifact upload path (if set) + # Also write manifests back to the source tree location for local updates + _export_archives( + artifact_export_path, + build_env, + manifest_path, + snippet_paths, + archive_paths, + ) + # Test stage: ensure exported environments allow launch module execution + subtests_started += 1 + with self.subTest("Check environment export"): + export_path = self.working_path / "_export🦎" + export_result = build_env.export_environments(export_path) + self.check_environment_exports(export_result) + subtests_passed += 1 + + # Work aroung pytest-subtests not failing the test case when subtests fail + # https://github.com/pytest-dev/pytest-subtests/issues/76 + self.assertEqual( + subtests_passed, subtests_started, "Fail due to failed subtest(s)" + ) + + def test_default_operation_selection(self) -> None: + subtests_started = subtests_passed = 0 # Track subtest failures + build_env = self.build_env + # Test default state + for env in build_env.all_environments(): + subtests_started += 1 + with self.subTest(env=env.env_name): + self.assertIsNone(env.want_lock, "want_lock should be None") + self.assertTrue(env.want_build, "want_build should be True") + self.assertTrue(env.want_publish, "want_publish should be True") + subtests_passed += 1 + self.assertEqual( + subtests_passed, subtests_started, "Fail due to failed subtest(s)" + ) + + def test_operation_selection(self) -> None: + subtests_started = subtests_passed = 0 # Track subtest failures + requested_operations = ( + (False, False, False), # Don't actually do anything + (True, False, False), # Just lock + (True, True, False), # Lock and build + (None, None, True), # Publish (locking and building if needed) + (False, False, True), # Publish (without modification to current state) + (True, True, True), # Lock, build, and publish + ) + build_env = self.build_env + for requested in requested_operations: + want_lock, want_build, want_publish = requested + build_env.select_operations(want_lock, want_build, want_publish) + for env in build_env.all_environments(): + subtests_started += 1 + with self.subTest(env=env.env_name, requested=requested): + self.assertEqual(env.want_lock, want_lock, "want_lock mismatch") + self.assertEqual(env.want_build, want_build, "want_build mismatch") + self.assertEqual( + env.want_publish, want_publish, "want_publish mismatch" + ) + subtests_passed += 1 + self.assertEqual( + subtests_passed, subtests_started, "Fail due to failed subtest(s)" + ) + + def test_get_unmatched_patterns(self) -> None: + build_env = self.build_env + matching = ["app-*", "*@*", "framework-*", "app-scipy-import"] + self.assertEqual(build_env.get_unmatched_patterns(matching), []) + unknown = ["unknown", "app-?", "*-app"] + self.assertEqual(build_env.get_unmatched_patterns(unknown), unknown) + combined = sorted(matching + unknown) + self.assertEqual(build_env.get_unmatched_patterns(combined), sorted(unknown)) + + def test_layer_selection(self) -> None: + subtests_started = subtests_passed = 0 # Track subtest failures + included = ["framework-sklearn"] + dependencies = ["cpython@3.12"] + derived = ["app-sklearn-import"] + build_env = self.build_env + + build_env.select_layers(included, lock=True) + for env in build_env.all_environments(): + subtests_started += 1 + env_name = env.env_name + with self.subTest(env=env_name): + if env_name in included: + self.assertTrue( + env.want_lock, "want_lock not set for included layer" + ) + self.assertTrue( + env.want_build, "want_build not set for included layer" + ) + self.assertTrue( + env.want_publish, "want_publish not set for included layer" + ) + elif env_name in dependencies: + self.assertIsNone( + env.want_lock, "want_lock is not None for dependency" + ) + self.assertIsNone( + env.want_build, "want_build is not None for dependency" + ) + self.assertFalse( + env.want_publish, "want_publish is set for dependency" + ) + elif env_name in derived: + self.assertTrue( + env.want_lock, "want_lock not set for derived layer" + ) + self.assertTrue( + env.want_build, "want_build not set for derived layer" + ) + self.assertTrue( + env.want_publish, "want_publish not set for derived layer" + ) + else: + self.assertFalse(env.want_lock, "want_lock set for excluded layer") + self.assertFalse( + env.want_build, "want_build set for excluded layer" + ) + self.assertFalse( + env.want_publish, "want_publish set for excluded layer" + ) + subtests_passed += 1 + self.assertEqual( + subtests_passed, subtests_started, "Fail due to failed subtest(s)" + ) + + +# TODO: Add test case for cleaning an existing build environment +# TODO: Add test case that confirms operation & layer selection has the desired effect +# TODO: Add more layer selection test cases beyond the current one (including derivation) diff --git a/tests/update-expected-output.sh b/tests/update-expected-output.sh new file mode 100755 index 0000000..0deb92d --- /dev/null +++ b/tests/update-expected-output.sh @@ -0,0 +1,30 @@ +#!/bin/bash + +# See http://redsymbol.net/articles/unofficial-bash-strict-mode/ for benefit of these options +set -euo pipefail +IFS=$'\n\t' + +# Where to save the list of changed filenames +output_target="${1:?}" + +# Running the test suite updates the committed output files in place, +# allowing any discrepancies to be detected via "git status". +# Tests that update output files are specifically marked. +# Ignore return code, as the tests will fail when updates are needed. +tox -m test -- -m "expected_output" || true + +# Emit the list of changed files (if any) to the specified output file +# Avoids setting a non-zero return code if `grep` doesn't match any lines +project_dir="tests/sample_project" +requirements_dir="$project_dir/requirements" +metadata_dir="$project_dir/expected_manifests" +changed_files="$(git status -uall --porcelain=1 -- "$requirements_dir" "$metadata_dir" | (grep -v '^ D' || true))" +if [ -n "$changed_files" ]; then + echo "$changed_files" | sed -E 's/^ ?[^ ]* //' | tee "$output_target" + path_anchor="tests/expected-output-config.toml" + echo "Including '$path_anchor' to ensure paths are relative to test folder" + echo "$path_anchor" >> "$output_target" +else + echo "No changes to expected output detected" + echo > "$output_target" +fi diff --git a/tox.ini b/tox.ini new file mode 100644 index 0000000..06a64ab --- /dev/null +++ b/tox.ini @@ -0,0 +1,39 @@ +[tox] +env_list = py{3.11,3.12},format,lint,typecheck +skip_missing_interpreters = False +isolated_build = True +labels = + test = py3.12 + test_all = py3.11,py3.12 + static = lint,typecheck + +[testenv] +groups = dev +allowlist_externals = pytest +passenv = + VENVSTACKS_* +commands = + pytest {posargs} tests/ + +[testenv:format] +groups = dev +allowlist_externals = ruff +commands = + ruff format {posargs} src/ tests/ misc/ + +[testenv:lint] +groups = dev +allowlist_externals = ruff +commands = + ruff check --exclude 'tests/sample_project' {posargs} src/ tests/ misc/ + +[testenv:typecheck] +groups = dev,git +allowlist_externals = mypy +commands = + mypy --strict --exclude 'tests/sample_project' {posargs} src/ tests/ misc/ + +[gh] +python = + 3.11 = py3.11 + 3.12 = py3.12 From 1475bbd677441d96f0ac9fec64295e13782fa486 Mon Sep 17 00:00:00 2001 From: Alyssa Coghlan Date: Mon, 21 Oct 2024 17:50:57 +1000 Subject: [PATCH 2/3] Include optional dev groups --- ci-constraints.txt | 105 ++++++++++++++++++++++++++++++++++++++++++++- 1 file changed, 104 insertions(+), 1 deletion(-) diff --git a/ci-constraints.txt b/ci-constraints.txt index 42a5616..673a74e 100644 --- a/ci-constraints.txt +++ b/ci-constraints.txt @@ -4,18 +4,44 @@ anyio==4.6.2.post1 \ --hash=sha256:4c8bc31ccdb51c7f7bd251f51c609e038d63e34219b44aa86e47576389880b4c \ --hash=sha256:6d170c36fba3bdd840c73d3868c1e777e33676a69c3a72cf0a0d5d6d8009b61d +attrs==24.2.0 \ + --hash=sha256:5cfb1b9148b5b086569baec03f20d7b6bf3bcacc9a42bebf87ffaaca362f6346 \ + --hash=sha256:81921eb96de3191c8258c199618104dd27ac608d9366f5e35d011eae1867ede2 blinker==1.8.2 \ --hash=sha256:1779309f71bf239144b9399d06ae925637cf6634cf6bd131104184531bf67c01 \ --hash=sha256:8f77b09d3bf7c795e969e9486f39c2c5e9c39d4ee07424be2bc594ece9642d83 +build==1.2.2.post1 \ + --hash=sha256:1d61c0887fa860c01971625baae8bdd338e517b836a2f70dd1f7aa3a6b2fc5b5 \ + --hash=sha256:b36993e92ca9375a219c99e606a122ff365a760a2d4bba0caa09bd5278b608b7 +cachetools==5.5.0 \ + --hash=sha256:02134e8439cdc2ffb62023ce1debca2944c3f289d66bb17ead3ab3dede74b292 \ + --hash=sha256:2cc24fb4cbe39633fb7badd9db9ca6295d766d9c2995f245725a46715d050f2a certifi==2024.8.30 \ --hash=sha256:922820b53db7a7257ffbda3f597266d435245903d80737e34f8a45ff3e3230d8 \ --hash=sha256:bec941d2aa8195e248a60b31ff9f0558284cf01a52591ceda73ea9afffd69fd9 +chardet==5.2.0 \ + --hash=sha256:1b3b6ff479a8c414bc3fa2c0852995695c4a026dcd6d0633b2dd092ca39c1cf7 \ + --hash=sha256:e1cf59446890a00105fe7b7912492ea04b6e6f06d4b742b2c788469e34c82970 +click==8.1.7 \ + --hash=sha256:ae74fb96c20a0277a1d615f1e4d73c8414f5a98db8b799a7931d1582f3390c28 \ + --hash=sha256:ca9853ad459e787e2192211578cc907e7594e294c7ccc834310722b41b9ca6de +colorama==0.4.6 \ + --hash=sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44 \ + --hash=sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6 dep-logic==0.4.9 \ --hash=sha256:06faa33814e5ff881922f644284a608d7da7946462760f710217d829ae864a0e \ --hash=sha256:5d455ea2a3da4fea2be6186d886905c57eeeebe3ea7fa967f599cb8e0f01d5c9 distlib==0.3.9 \ --hash=sha256:47f8c22fd27c27e25a65601af709b38e4f0a45ea4fc2e710f65755fa8caaaf87 \ --hash=sha256:a60f20dea646b8a33f3e7772f74dc0b2d0772d2837ee1342a00645c81edf9403 +dulwich==0.22.1 \ + --hash=sha256:12482e318895da9acabea7c0cc70b35d36833e7cb2def511ab3a63617f5c1af3 \ + --hash=sha256:6dc42afedc8cda4f2fd15a06d2e9e41281074a02cdf31bb2e0dde4d80766a408 \ + --hash=sha256:82f26e592e9a36ab33bcdb419c7d53320e26c85dfc254cdb84f5f561a2fcaabf \ + --hash=sha256:9d19f04ecd4628a0e4587b4c4e98e040b87924c1362ae5aa27420435f05d5dd8 \ + --hash=sha256:a18d1392eabd02f337dcba23d723a4dcca87274ce8693cf88e6320f38bc3fdcd \ + --hash=sha256:e36d85967cfbf25da1c7bc3d6921adc5baa976969d926aaf1582bd5fd7e94758 \ + --hash=sha256:e90b8a2f24149c5803b733a24f1a016a2943b1f5a9ab2360db545e4638354c35 filelock==3.16.1 \ --hash=sha256:2082e5703d51fbf98ea75855d9d5527e33d8ff23099bec374a134febee6946b0 \ --hash=sha256:c249fbfcd5db47e5e2d6d62198e565475ee65e4831e2561c8e313fa7eb961435 @@ -31,12 +57,15 @@ hishel==0.0.33 \ httpcore==1.0.6 \ --hash=sha256:27b59625743b85577a8c0e10e55b50b5368a4f2cfe8cc7bcfa9cf00829c2682f \ --hash=sha256:73f6dbd6eb8c21bbf7ef8efad555481853f5f6acdeaff1edb0694289269ee17f -httpx[socks]==0.27.2 \ +httpx==0.27.2 \ --hash=sha256:7bb2708e112d8fdd7829cd4243970f0c223274051cb35ee80c03301ee29a3df0 \ --hash=sha256:f7c2be1d2f3c3c3160d441802406b206c2b76f5947b11115e6df10c6c65e66c2 idna==3.10 \ --hash=sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9 \ --hash=sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3 +iniconfig==2.0.0 \ + --hash=sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3 \ + --hash=sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374 installer==0.7.0 \ --hash=sha256:05d1933f0a5ba7d8d6296bb6d5018e7c94fa473ceb10cf198a92ccea19c27b53 \ --hash=sha256:a26d3e3116289bb08216e0d0f7d925fcef0b0194eedfa0c944bcaaa106c4b631 @@ -63,6 +92,27 @@ msgpack==1.1.0 \ --hash=sha256:d46cf9e3705ea9485687aa4001a76e44748b609d260af21c4ceea7f2212a501d \ --hash=sha256:dd432ccc2c72b914e4cb77afce64aab761c1137cc698be3984eee260bcb2896e \ --hash=sha256:fd2906780f25c8ed5d7b323379f6138524ba793428db5d0e9d226d3fa6aa1788 +mypy==1.12.0 \ + --hash=sha256:1ebf9e796521f99d61864ed89d1fb2926d9ab6a5fab421e457cd9c7e4dd65aa9 \ + --hash=sha256:20c7c5ce0c1be0b0aea628374e6cf68b420bcc772d85c3c974f675b88e3e6e57 \ + --hash=sha256:233e11b3f73ee1f10efada2e6da0f555b2f3a5316e9d8a4a1224acc10e7181d3 \ + --hash=sha256:2f106db5ccb60681b622ac768455743ee0e6a857724d648c9629a9bd2ac3f721 \ + --hash=sha256:48d3e37dd7d9403e38fa86c46191de72705166d40b8c9f91a3de77350daa0893 \ + --hash=sha256:4ae8959c21abcf9d73aa6c74a313c45c0b5a188752bf37dace564e29f06e9c1b \ + --hash=sha256:4b86de37a0da945f6d48cf110d5206c5ed514b1ca2614d7ad652d4bf099c7de7 \ + --hash=sha256:52b9e1492e47e1790360a43755fa04101a7ac72287b1a53ce817f35899ba0521 \ + --hash=sha256:5bc81701d52cc8767005fdd2a08c19980de9ec61a25dbd2a937dfb1338a826f9 \ + --hash=sha256:65a22d87e757ccd95cbbf6f7e181e6caa87128255eb2b6be901bb71b26d8a99d \ + --hash=sha256:8462655b6694feb1c99e433ea905d46c478041a8b8f0c33f1dab00ae881b2164 \ + --hash=sha256:923ea66d282d8af9e0f9c21ffc6653643abb95b658c3a8a32dca1eff09c06475 \ + --hash=sha256:a64ee25f05fc2d3d8474985c58042b6759100a475f8237da1f4faf7fcd7e6309 \ + --hash=sha256:c72861b7139a4f738344faa0e150834467521a3fba42dc98264e5aa9507dd601 \ + --hash=sha256:e478601cc3e3fa9d6734d255a59c7a2e5c2934da4378f3dd1e3411ea8a248642 \ + --hash=sha256:faca7ab947c9f457a08dcb8d9a8664fd438080e002b0fa3e41b0535335edcf7f \ + --hash=sha256:fd313226af375d52e1e36c383f39bf3836e1f192801116b31b090dfcd3ec5266 +mypy-extensions==1.0.0 \ + --hash=sha256:4392f6c0eb8a5668a69e23d168ffa70f0be9ccfd32b5cc2d26a34ae5b844552d \ + --hash=sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782 packaging==24.1 \ --hash=sha256:026ed72c8ed3fcce5bf8950572258698927fd1dbda10a5e981cdf0ac37f4f002 \ --hash=sha256:5b8f2217dbdbd2f7f384c41c628544e6d52f2d0f53c6d0c3ea61aa5d1d7ff124 @@ -72,15 +122,33 @@ pbs-installer==2024.10.10 \ pdm==2.19.2 \ --hash=sha256:42af4e0897b139656e003767e99c4f77014bf36d9a7b759d3e09b49ee5979143 \ --hash=sha256:efb39264569181d0375536ef81c556648f16b540d429a53715730490a2283567 +pip==24.2 \ + --hash=sha256:2cd581cf58ab7fcfca4ce8efa6dcacd0de5bf8d0a3eb9ec927e07405f4d9e2a2 \ + --hash=sha256:5b5e490b5e9cb275c879595064adce9ebd31b854e3e803740b72f9ccf34a45b8 +pip-tools==7.4.1 \ + --hash=sha256:4c690e5fbae2f21e87843e89c26191f0d9454f362d8acdbd695716493ec8b3a9 \ + --hash=sha256:864826f5073864450e24dbeeb85ce3920cdfb09848a3d69ebf537b521f14bcc9 platformdirs==4.3.6 \ --hash=sha256:357fb2acbc885b0419afd3ce3ed34564c13c9b95c89360cd9563f73aa5e2b907 \ --hash=sha256:73e575e1408ab8103900836b97580d5307456908a03e92031bab39e4554cc3fb +pluggy==1.5.0 \ + --hash=sha256:2cffa88e94fdc978c4c574f15f9e59b7f4201d439195c3715ca9e2486f1d0cf1 \ + --hash=sha256:44e1ad92c8ca002de6377e165f3e0f1be63266ab4d554740532335b9d75ea669 pygments==2.18.0 \ --hash=sha256:786ff802f32e91311bff3889f6e9a86e81505fe99f2735bb6d60ae0c5004f199 \ --hash=sha256:b8e6aca0523f3ab76fee51799c488e38782ac06eafcf95e7ba832985c8e7b13a +pyproject-api==1.8.0 \ + --hash=sha256:3d7d347a047afe796fd5d1885b1e391ba29be7169bd2f102fcd378f04273d228 \ + --hash=sha256:77b8049f2feb5d33eefcc21b57f1e279636277a8ac8ad6b5871037b243778496 pyproject-hooks==1.2.0 \ --hash=sha256:1e859bd5c40fae9448642dd871adf459e5e2084186e8d2c2a79a824c970da1f8 \ --hash=sha256:9e5c6bfa8dcc30091c74b0cf803c81fdd29d94f01992a7707bc97babb1141913 +pytest==8.3.3 \ + --hash=sha256:70b98107bd648308a7952b06e6ca9a50bc660be218d53c257cc1fc94fda10181 \ + --hash=sha256:a6853c7375b2663155079443d2e45de913a911a11d669df02a50814944db57b2 +pytest-subtests==0.13.1 \ + --hash=sha256:989e38f0f1c01bc7c6b2e04db7d9fd859db35d77c2c1a430c831a70cbf3fde2d \ + --hash=sha256:ab616a22f64cd17c1aee65f18af94dbc30c444f8683de2b30895c3778265e3bd python-dotenv==1.0.1 \ --hash=sha256:e324ee90a023d808f1959c46bcbc04446a10ced277783dc6ee09987c37ec10ca \ --hash=sha256:f7b63ef50f1b690dddf550d03497b66d609393b40b564ed0d674909a68ebf16a @@ -90,6 +158,16 @@ resolvelib==1.0.1 \ rich==13.9.2 \ --hash=sha256:51a2c62057461aaf7152b4d611168f93a9fc73068f8ded2790f29fe2b5366d0c \ --hash=sha256:8c82a3d3f8dcfe9e734771313e606b39d8247bb6b826e196f4914b333b743cf1 +ruff==0.6.9 \ + --hash=sha256:140d4b5c9f5fc7a7b074908a78ab8d384dd7f6510402267bc76c37195c02a7ec \ + --hash=sha256:53fd8ca5e82bdee8da7f506d7b03a261f24cd43d090ea9db9a1dc59d9313914c \ + --hash=sha256:785d31851c1ae91f45b3d8fe23b8ae4b5170089021fbb42402d811135f0b7117 \ + --hash=sha256:a67267654edc23c97335586774790cde402fb6bbdb3c2314f1fc087dee320bfa \ + --hash=sha256:a9641e31476d601f83cd602608739a0840e348bda93fec9f1ee816f8b6798b93 \ + --hash=sha256:b076ef717a8e5bc819514ee1d602bbdca5b4420ae13a9cf61a0c0a4f53a2baa2 +setuptools==75.1.0 \ + --hash=sha256:35ab7fd3bcd95e6b7fd704e4a1539513edad446c097797f2985e0e4b960772f2 \ + --hash=sha256:d59a21b17a275fb872a9c3dae73963160ae079f1049ed956880cd7c09b120538 shellingham==1.5.4 \ --hash=sha256:7ecfff8f2fd72616f7481040475a65b2bf8af90a56c89140852d1120324e8686 \ --hash=sha256:8dbca0739d487e5bd35ab3ca4b36e11c4078f3a234bfce294b0a0291363404de @@ -102,15 +180,40 @@ socksio==1.0.0 \ tomlkit==0.13.2 \ --hash=sha256:7a974427f6e119197f670fbbbeae7bef749a6c14e793db934baefc1b5f03efde \ --hash=sha256:fff5fe59a87295b278abd31bec92c15d9bc4a06885ab12bcea52c71119392e79 +tox==4.21.2 \ + --hash=sha256:13d996adcd792e7c82994b0e116d85efd84f0c6d185254d83d156f73f86b2038 \ + --hash=sha256:49381ff102296753e378fa5ff30e42a35e695f149b4dbf8a2c49d15fdb5797b2 +tox-gh==1.4.4 \ + --hash=sha256:4ea585f66585b90f5826b1677cfc9453747792a0f9ff83d468603bc17556e07b \ + --hash=sha256:b962e0f8c4619e98d11c2a135939876691e148b843b7dac4cff7de1dc4f7c215 +tox-pdm==0.7.2 \ + --hash=sha256:12f6215416b7acd00a80a9e7128f3dc3e3c89308d60707f5d0a24abdf83ac104 \ + --hash=sha256:a841a7e1e942a71805624703b9a6d286663bd6af79bba6130ba756975c315308 truststore==0.9.2 \ --hash=sha256:04559916f8810cc1a5ecc41f215eddc988746067b754fc0995da7a2ceaf54735 \ --hash=sha256:a1dee0d0575ff22d2875476343783a5d64575419974e228f3248772613c3d993 +typer-slim==0.12.5 \ + --hash=sha256:9a994f721b828783dbf144e17461b1c720bb4598e0d5eff7c1b3f08ee58cb062 \ + --hash=sha256:c8e3fcf93cc7dd584036df8755d2e2363f85f8a4dd028c7911eed3f00cf0ebb1 typing-extensions==4.12.2 \ --hash=sha256:04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d \ --hash=sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8 unearth==0.17.2 \ --hash=sha256:0b8a2afd3476f1ab6155fc579501ac47fffe43547d88a70e5a5b76a7fe6caa2c \ --hash=sha256:4d21af1238a583835fca156322f7225382e718cdcc42d6278050a88e605c4ad5 +urllib3==2.2.3 \ + --hash=sha256:ca899ca043dcb1bafa3e262d73aa25c465bfb49e0bd9dd5d59f1d0acba2f8fac \ + --hash=sha256:e7d814a81dad81e6caf2ec9fdedb284ecc9c73076b62654547cc64ccdcae26e9 +uv==0.4.21 \ + --hash=sha256:19607da8ee024e4ff060804efb8251e3b821cbd7f830b58612600ffe739fd33d \ + --hash=sha256:45df47a4f43db730bea72bd3150c206d00d1a4d854137ed63dc04bb73032f280 \ + --hash=sha256:9c08b01f8571d2c64d45d569990aa7bffad5eb259cf64bc329d40d8c787fb9ba \ + --hash=sha256:9dcddbb3b6e1662c6db41d63db539742450e2ce17d6c746329c016e3651bfb4a \ + --hash=sha256:a1a9a126ce48f0f0893891adb5a9749220425169092f3e4da1216168736ac16d \ + --hash=sha256:ba3e3b40cc1d5a980d36589775d6a7e4defa1b33e7e06423af0e395b8e4d9505 virtualenv==20.26.6 \ --hash=sha256:280aede09a2a5c317e409a00102e7077c6432c5a38f0ef938e643805a7ad2c48 \ --hash=sha256:7345cc5b25405607a624d8418154577459c3e0277f5466dd79c49d5e492995f2 +wheel==0.44.0 \ + --hash=sha256:2376a90c98cc337d18623527a97c31797bd02bad0033d41547043a1cbfbe448f \ + --hash=sha256:a29c3f2817e95ab89aa4660681ad547c0e9547f20e75b0562fe7723c9a2a9d49 From 3ba7772673377096dd5a6e12ceb59bab13662db3 Mon Sep 17 00:00:00 2001 From: Alyssa Coghlan Date: Mon, 21 Oct 2024 18:00:46 +1000 Subject: [PATCH 3/3] Keep LMStudio team email in metadata --- pyproject.toml | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/pyproject.toml b/pyproject.toml index 7303c7e..3e9bf8e 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -5,6 +5,10 @@ description = "Use layered Python virtual environment stacks to share large depe authors = [ {name = "Alyssa Coghlan", email = "ncoghlan@gmail.com"}, ] +maintainers = [ + {name = "LMStudio", email = "team@lmstudio.ai"}, +] + dependencies = [ # Environment package installation is run externally (from the build tools environment) "pip>=24.1.1",