From 775889122385cd5cfb5cdeb95d3d85cc23ca48d2 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Sondre=20Lilleb=C3=B8=20Gundersen?= Date: Mon, 24 Jun 2024 19:50:29 +0200 Subject: [PATCH] Remove Rewrite the action in Rust This rewrites the action from the ground-up to solve most outstanding issues and missing features. The API is consolidated and streamlined (+ largely broken); the performance and logging is improved; there's support for new token types; we can now handle multi-arch images; etc. See the v3 release notes when they're out, for more details. --- .github/dependabot.yaml | 8 + .github/get_version.py | 20 - .github/workflows/clean_packages.yml | 80 - .../workflows/{build.yml => live_test.yaml} | 11 +- .github/workflows/release.yaml | 116 ++ .github/workflows/tag.yml | 21 - .github/workflows/test.yaml | 31 + .github/workflows/test.yml | 57 - .gitignore | 1 + .pre-commit-config.yaml | 97 +- Cargo.lock | 1797 +++++++++++++++++ Cargo.toml | 40 + DEVELOPMENT.md | 38 + Dockerfile | 39 + LICENSE | 7 + MIGRATION.md | 108 + README.md | 627 +++--- action.yaml | 70 + action.yml | 112 - deny.toml | 28 + justfile | 64 + main.py | 637 ------ main_tests.py | 672 ------ poetry.lock | 924 --------- pyproject.toml | 46 - rustfmt.toml | 1 + setup.cfg | 34 - src/cli/args.rs | 247 +++ src/cli/mod.rs | 2 + src/cli/models.rs | 108 + src/client/builder.rs | 245 +++ src/client/client.rs | 662 ++++++ src/client/headers.rs | 87 + src/client/mod.rs | 5 + src/client/models.rs | 39 + src/client/urls.rs | 77 + src/core/delete_package_versions.rs | 95 + src/core/mod.rs | 3 + src/core/select_package_versions.rs | 670 ++++++ src/core/select_packages.rs | 128 ++ src/main.rs | 158 ++ src/matchers.rs | 128 ++ 42 files changed, 5470 insertions(+), 2870 deletions(-) create mode 100644 .github/dependabot.yaml delete mode 100644 .github/get_version.py delete mode 100644 .github/workflows/clean_packages.yml rename .github/workflows/{build.yml => live_test.yaml} (93%) create mode 100644 .github/workflows/release.yaml delete mode 100644 .github/workflows/tag.yml create mode 100644 .github/workflows/test.yaml delete mode 100644 .github/workflows/test.yml create mode 100644 Cargo.lock create mode 100644 Cargo.toml create mode 100644 DEVELOPMENT.md create mode 100644 Dockerfile create mode 100644 LICENSE create mode 100644 MIGRATION.md create mode 100644 action.yaml delete mode 100644 action.yml create mode 100644 deny.toml create mode 100644 justfile delete mode 100644 main.py delete mode 100644 main_tests.py delete mode 100644 poetry.lock delete mode 100644 pyproject.toml create mode 100644 rustfmt.toml delete mode 100644 setup.cfg create mode 100644 src/cli/args.rs create mode 100644 src/cli/mod.rs create mode 100644 src/cli/models.rs create mode 100644 src/client/builder.rs create mode 100644 src/client/client.rs create mode 100644 src/client/headers.rs create mode 100644 src/client/mod.rs create mode 100644 src/client/models.rs create mode 100644 src/client/urls.rs create mode 100644 src/core/delete_package_versions.rs create mode 100644 src/core/mod.rs create mode 100644 src/core/select_package_versions.rs create mode 100644 src/core/select_packages.rs create mode 100644 src/main.rs create mode 100644 src/matchers.rs diff --git a/.github/dependabot.yaml b/.github/dependabot.yaml new file mode 100644 index 0000000..815839d --- /dev/null +++ b/.github/dependabot.yaml @@ -0,0 +1,8 @@ +version: 2 +updates: + - package-ecosystem: github-actions + directory: / + schedule: + interval: weekly + reviewers: + - sondrelg diff --git a/.github/get_version.py b/.github/get_version.py deleted file mode 100644 index da76ee8..0000000 --- a/.github/get_version.py +++ /dev/null @@ -1,20 +0,0 @@ -""" -Simple script used to tag our releases with major and minor git tags. - -This lets users use the action with @v1 or @v1.1 references, and not have -to use the complete tag (with patch version specified). -""" - -import sys - -from packaging import version - -if __name__ == '__main__': - ref = sys.argv[1] # ref will usually look like refs/tags/v1.0.1 - major = sys.argv[2] == 'major' - version = version.parse(ref.split('refs/tags/v')[1]) - - if major: - print(f'v{version.major}') - else: - print(f'v{version.major}.{version.minor}') diff --git a/.github/workflows/clean_packages.yml b/.github/workflows/clean_packages.yml deleted file mode 100644 index b68ede7..0000000 --- a/.github/workflows/clean_packages.yml +++ /dev/null @@ -1,80 +0,0 @@ -name: Delete old container images - -on: workflow_dispatch - -jobs: - build-containers: - name: Build a few images - runs-on: ubuntu-latest - env: - IMAGE: ghcr.io/sondrelg/ghcr-retention-policy-test - steps: - - uses: actions/checkout@v3 - - uses: docker/setup-buildx-action@v1 - - run: docker login ghcr.io -u sondrelg --password-stdin <<< ${{ secrets.PAT }} - # Each build should be different because of the $RANDOM addition - - name: Build latest - run: | - randomString=$(LC_ALL=C tr -dc A-Za-z Dockerfile.test - cat Dockerfile.test - docker buildx build . -f Dockerfile.test -t "${{ env.IMAGE }}:latest" --cache-to=type=inline --cache-from="ghcr.io/sondrelg/ghcr-retention-policy-test:latest" --push - - name: Build latest - run: | - randomString=$(LC_ALL=C tr -dc A-Za-z Dockerfile.test - cat Dockerfile.test - docker buildx build . -f Dockerfile.test -t "${{ env.IMAGE }}:latest" --cache-to=type=inline --cache-from="ghcr.io/sondrelg/ghcr-retention-policy-test:latest" --push - - name: Build latest - run: | - randomString=$(LC_ALL=C tr -dc A-Za-z Dockerfile.test - cat Dockerfile.test - docker buildx build . -f Dockerfile.test -t "${{ env.IMAGE }}:latest" --cache-to=type=inline --cache-from="ghcr.io/sondrelg/ghcr-retention-policy-test:latest" --push - - name: Build latest - run: | - randomString=$(LC_ALL=C tr -dc A-Za-z Dockerfile.test - cat Dockerfile.test - docker buildx build . -f Dockerfile.test -t "${{ env.IMAGE }}:latest" --cache-to=type=inline --cache-from="ghcr.io/sondrelg/ghcr-retention-policy-test:latest" --push - - clean-ghcr: - needs: build-containers - name: Then delete them - runs-on: ubuntu-latest - steps: - - name: Cache image versions to skip - uses: actions/cache@v3 - id: cache - with: - path: skip-image-versions.txt - # key will always *not* match for a new commit, but - # restore-key will match if there's a previously stored - # file; so cache will *both be loaded and stored*. - key: image-versions-to-skip-${{ github.sha }} - restore-keys: image-versions-to-skip - - - name: Create file if it doesn't exist - run: touch skip-image-versions.txt - if: steps.cache.outputs.cache-hit != 'true' - - - name: Set image-versions output - id: image-version - run: | - images="$(cat skip-image-versions.txt)" - echo $images - echo "image-versions=$images" >> $GITHUB_OUTPUT - - - name: Delete images more than 2 seconds old - uses: snok/container-retention-policy@${{ github.ref_name }} - id: delete-images - with: - image-names: ghcr-retention-policy-test - cut-off: 2 seconds ago UTC+2 - account-type: personal - token: ${{ secrets.PAT }} - skip-tags: ${{ steps.image-version.outputs.image-versions }} - - - name: Write skipped tags to cache - run: | - echo "${{ steps.delete-images.outputs.failed }}" > skip-image-versions.txt diff --git a/.github/workflows/build.yml b/.github/workflows/live_test.yaml similarity index 93% rename from .github/workflows/build.yml rename to .github/workflows/live_test.yaml index ac45c7f..81b7b66 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/live_test.yaml @@ -10,7 +10,7 @@ jobs: runs-on: ubuntu-latest name: Ubuntu with classic personal access token steps: - - uses: snok/container-retention-policy@v3-develop + - uses: snok/container-retention-policy@main name: Delete test-1-* images with a temporal token with: account: snok @@ -30,7 +30,7 @@ jobs: app-id: 911530 private-key: ${{ secrets.GH_APP_PRIVATE_KEY }} - - uses: snok/container-retention-policy@v3-develop + - uses: snok/container-retention-policy@main name: Delete test-2-* images with an Github app token with: account: snok @@ -43,7 +43,7 @@ jobs: dry-run: false rust-log: container_retention_policy=debug - - uses: snok/container-retention-policy@v3-develop + - uses: snok/container-retention-policy@main name: Delete remaining test images with a PAT with: account: snok @@ -77,11 +77,14 @@ jobs: echo "FROM scratch" echo "COPY --from=builder /test.txt ." } > Dockerfile + imageName="ghcr.io/snok/container-retention-policy:test-${i}" + docker build -f Dockerfile -t "$imageName" --push . + for ((j=1; j<=3; j++)) do docker tag "$imageName" "ghcr.io/snok/container-retention-policy:test-${i}-${j}" docker push "ghcr.io/snok/container-retention-policy:test-${i}-${j}" done - done \ No newline at end of file + done diff --git a/.github/workflows/release.yaml b/.github/workflows/release.yaml new file mode 100644 index 0000000..13ba460 --- /dev/null +++ b/.github/workflows/release.yaml @@ -0,0 +1,116 @@ +# See https://docs.docker.com/build/ci/github-actions/multi-platform/ for docs + +name: Create multi-platform container image + +on: + workflow_dispatch: + inputs: + version-tag: + description: The image tag to build for + required: true + type: string + push: + tags: + - v3* +concurrency: + group: ${{ github.ref }} + cancel-in-progress: true + +env: + REGISTRY_IMAGE: ghcr.io/snok/container-retention-policy + +jobs: + build: + runs-on: ubuntu-latest + strategy: + fail-fast: false + matrix: + platform: + - linux/amd64 + - linux/arm64 + include: + - platform: linux/amd64 + arch: amd64 + target_dir: x86_64-unknown-linux-musl + - platform: linux/arm64 + arch: arm64 + target_dir: aarch64-unknown-linux-musl + steps: + - name: Prepare + run: | + platform="${{ matrix.platform }}" + echo "PLATFORM_PAIR=${platform//\//-}" >> $GITHUB_ENV + + - uses: actions/checkout@v4 + - uses: docker/setup-qemu-action@v3 + - uses: docker/setup-buildx-action@v3 + - uses: docker/login-action@v3 + with: + registry: ghcr.io + username: ${{ github.actor }} + password: ${{ secrets.GITHUB_TOKEN }} + + - name: Docker meta + id: meta + uses: docker/metadata-action@v5 + with: + images: ${{ env.REGISTRY_IMAGE }} + + - name: Build and push by digest + id: build + uses: docker/build-push-action@v5 + with: + context: . + platforms: ${{ matrix.platform }} + labels: ${{ steps.meta.outputs.labels }} + outputs: type=image,name=${{ env.REGISTRY_IMAGE }},push-by-digest=true,name-canonical=true,push=true + + - name: Export digest + run: | + mkdir -p /tmp/digests + digest="${{ steps.build.outputs.digest }}" + touch "/tmp/digests/${digest#sha256:}" + + - name: Upload digest + uses: actions/upload-artifact@v4 + with: + name: digests-${{ env.PLATFORM_PAIR }} + path: /tmp/digests/* + if-no-files-found: error + retention-days: 1 + + merge: + runs-on: ubuntu-latest + needs: + - build + steps: + - name: Download digests + uses: actions/download-artifact@v4 + with: + path: /tmp/digests + pattern: digests-* + merge-multiple: true + + - name: Set up Docker Buildx + uses: docker/setup-buildx-action@v3 + + - uses: docker/login-action@v3 + with: + registry: ghcr.io + username: ${{ github.actor }} + password: ${{ secrets.GITHUB_TOKEN }} + + - name: Docker meta + id: meta + uses: docker/metadata-action@v5 + with: + images: ${{ env.REGISTRY_IMAGE }} + + - name: Create manifest list and push + working-directory: /tmp/digests + run: | + docker buildx imagetools create $(jq -cr '.tags | map("-t " + .) | join(" ")' <<< "$DOCKER_METADATA_OUTPUT_JSON") \ + $(printf '${{ env.REGISTRY_IMAGE }}@sha256:%s ' *) + + - name: Inspect image + run: docker buildx imagetools inspect ${{ env.REGISTRY_IMAGE }}:${{ steps.meta.outputs.version }} diff --git a/.github/workflows/tag.yml b/.github/workflows/tag.yml deleted file mode 100644 index d35c450..0000000 --- a/.github/workflows/tag.yml +++ /dev/null @@ -1,21 +0,0 @@ -name: Tag releases with minor and major version - -on: - release: - types: [published, edited] - -jobs: - tag-v1: - name: Tag v1 - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v3 - - name: Update tag - run: | - major_tag="$(python .github/get_version.py "${GITHUB_REF}" major)" - minor_tag="$(python .github/get_version.py "${GITHUB_REF}" minor)" - git tag $major_tag - git tag $minor_tag - git push origin HEAD:refs/heads/master --tags --force - env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} diff --git a/.github/workflows/test.yaml b/.github/workflows/test.yaml new file mode 100644 index 0000000..12497b3 --- /dev/null +++ b/.github/workflows/test.yaml @@ -0,0 +1,31 @@ +name: Test + +on: + pull_request: + push: + branches: + - main + +jobs: + test: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + - uses: swatinem/rust-cache@v2 + + # This is already installed on GitHub's runners + - run: cargo fmt --check + + - run: rustup toolchain install stable --profile minimal && rustup component add clippy + - run: rustup toolchain install nightly --profile minimal + - uses: cargo-bins/cargo-binstall@main + - run: cargo binstall cargo-udeps --locked --no-confirm --force + - run: cargo binstall cargo-deny --locked --no-confirm --force + - run: cargo binstall cargo-audit --locked --no-confirm + - run: pip install pre-commit && pre-commit install + - uses: actions/cache@v4 + with: + path: ~/.cache/pre-commit/ + key: pre-commit-${{ hashFiles('.pre-commit-config.yaml') }} + - run: cargo build # needed for `assert_cmd` tests + - run: pre-commit run --all-files diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml deleted file mode 100644 index 0d0f649..0000000 --- a/.github/workflows/test.yml +++ /dev/null @@ -1,57 +0,0 @@ -name: Test - -on: - pull_request: - push: - branches: - - main - -jobs: - linting: - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v3 - - uses: actions/setup-python@v5 - with: - python-version: "3.11.2" - - uses: actions/cache@v3 - id: cache - with: - path: | - .venv - ~/.cache/pre-commit - key: venv-1 - - run: | - python -m venv .venv --upgrade-deps - source .venv/bin/activate - pip install pre-commit - if: steps.cache.outputs.cache-hit != 'true' - - run: | - source .venv/bin/activate - pre-commit run --all-files - - test: - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v3 - - uses: actions/setup-python@v5 - with: - python-version: "3.11.2" - - uses: actions/cache@v3 - id: cache - with: - path: | - ~/.local - .venv - key: ${{ hashFiles('**/poetry.lock') }}-1 - - uses: snok/install-poetry@v1 - with: - virtualenvs-in-project: true - - run: poetry install --no-interaction --no-root - if: steps.cache.outputs.cache-hit != 'true' - - run: source $VENV && pytest main_tests.py --cov-report=xml - - uses: codecov/codecov-action@v3 - with: - file: ./coverage.xml - fail_ci_if_error: true - token: ${{ secrets.CODECOV_TOKEN }} diff --git a/.gitignore b/.gitignore index 1d2b3aa..6028328 100644 --- a/.gitignore +++ b/.gitignore @@ -4,3 +4,4 @@ .mypy_cache/ __pycache__/ .coverage +target diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 0a048f9..e61eed4 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,46 +1,73 @@ repos: - - repo: https://github.com/ambv/black - rev: 23.7.0 + - repo: local hooks: - - id: black - args: ['--quiet'] - - repo: https://github.com/pycqa/isort - rev: 5.12.0 + - id: test + name: cargo fmt + entry: cargo fmt + language: system + pass_filenames: false + + - repo: local hooks: - - id: isort + - id: clippy + name: cargo clippy + entry: cargo clippy + language: system + pass_filenames: false + + - repo: local + hooks: + - id: test + name: cargo test + entry: cargo test + language: system + pass_filenames: false + + - repo: local + hooks: + - id: cargo-deny + name: cargo deny + entry: cargo deny check --hide-inclusion-graph + language: system + pass_filenames: false + + - repo: local + hooks: + - id: cargo-udeps + name: cargo udeps + entry: cargo +nightly udeps + language: system + pass_filenames: false + + - repo: local + hooks: + - id: cargo-audit + name: cargo audit + entry: cargo audit + language: system + pass_filenames: false + - repo: https://github.com/pre-commit/pre-commit-hooks - rev: v4.4.0 + rev: v4.6.0 hooks: - - id: check-ast - - id: check-added-large-files - id: check-merge-conflict - id: check-case-conflict - - id: check-docstring-first - id: check-json - - id: check-yaml - - id: double-quote-string-fixer - id: end-of-file-fixer - id: trailing-whitespace - id: mixed-line-ending - - id: trailing-whitespace - - repo: https://github.com/asottile/pyupgrade - rev: v3.10.1 - hooks: - - id: pyupgrade - args: ['--py3-plus', '--py36-plus', '--py37-plus', '--py38-plus', '--py39-plus', '--py310-plus', '--py311-plus'] - - repo: https://github.com/pycqa/flake8 - rev: 6.1.0 - hooks: - - id: flake8 - additional_dependencies: [ - 'flake8-bugbear', - 'flake8-comprehensions', - 'flake8-deprecated', - 'flake8-use-fstring', - 'flake8-type-checking', - ] - - repo: https://github.com/pre-commit/mirrors-mypy - rev: 'v1.4.1' - hooks: - - id: mypy - additional_dependencies: [types-dateparser] + - id: double-quote-string-fixer + + - repo: https://github.com/sondrelg/disallow-file-endings + rev: v0.1.0 + hooks: + - id: disallow-file-extensions + args: + - --extensions=.yml + + - repo: https://github.com/rhysd/actionlint + rev: v1.7.1 + hooks: + - id: actionlint + args: + - --shellcheck= diff --git a/Cargo.lock b/Cargo.lock new file mode 100644 index 0000000..43340d7 --- /dev/null +++ b/Cargo.lock @@ -0,0 +1,1797 @@ +# This file is automatically @generated by Cargo. +# It is not intended for manual editing. +version = 3 + +[[package]] +name = "addr2line" +version = "0.21.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8a30b2e23b9e17a9f90641c7ab1549cd9b44f296d3ccbf309d2863cfe398a0cb" +dependencies = [ + "gimli", +] + +[[package]] +name = "adler" +version = "1.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f26201604c87b1e01bd3d98f8d5d9a8fcbb815e8cedb41ffccbeb4bf593a35fe" + +[[package]] +name = "aho-corasick" +version = "1.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8e60d3430d3a69478ad0993f19238d2df97c507009a52b3c10addcd7f6bcb916" +dependencies = [ + "memchr", +] + +[[package]] +name = "android-tzdata" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e999941b234f3131b00bc13c22d06e8c5ff726d1b6318ac7eb276997bbb4fef0" + +[[package]] +name = "android_system_properties" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "819e7219dbd41043ac279b19830f2efc897156490d7fd6ea916720117ee66311" +dependencies = [ + "libc", +] + +[[package]] +name = "anstream" +version = "0.6.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "418c75fa768af9c03be99d17643f93f79bbba589895012a80e3452a19ddda15b" +dependencies = [ + "anstyle", + "anstyle-parse", + "anstyle-query", + "anstyle-wincon", + "colorchoice", + "is_terminal_polyfill", + "utf8parse", +] + +[[package]] +name = "anstyle" +version = "1.0.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "038dfcf04a5feb68e9c60b21c9625a54c2c0616e79b72b0fd87075a056ae1d1b" + +[[package]] +name = "anstyle-parse" +version = "0.2.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c03a11a9034d92058ceb6ee011ce58af4a9bf61491aa7e1e59ecd24bd40d22d4" +dependencies = [ + "utf8parse", +] + +[[package]] +name = "anstyle-query" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ad186efb764318d35165f1758e7dcef3b10628e26d41a44bc5550652e6804391" +dependencies = [ + "windows-sys 0.52.0", +] + +[[package]] +name = "anstyle-wincon" +version = "3.0.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "61a38449feb7068f52bb06c12759005cf459ee52bb4adc1d5a7c4322d716fb19" +dependencies = [ + "anstyle", + "windows-sys 0.52.0", +] + +[[package]] +name = "arrayvec" +version = "0.7.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "96d30a06541fbafbc7f82ed10c06164cfbd2c401138f6addd8404629c4b16711" + +[[package]] +name = "assert_cmd" +version = "2.0.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ed72493ac66d5804837f480ab3766c72bdfab91a65e565fc54fa9e42db0073a8" +dependencies = [ + "anstyle", + "bstr", + "doc-comment", + "predicates", + "predicates-core", + "predicates-tree", + "wait-timeout", +] + +[[package]] +name = "autocfg" +version = "1.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0c4b4d0bd25bd0b74681c0ad21497610ce1b7c91b1022cd21c80c6fbdd9476b0" + +[[package]] +name = "backtrace" +version = "0.3.71" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "26b05800d2e817c8b3b4b54abd461726265fa9789ae34330622f2db9ee696f9d" +dependencies = [ + "addr2line", + "cc", + "cfg-if", + "libc", + "miniz_oxide", + "object", + "rustc-demangle", +] + +[[package]] +name = "base64" +version = "0.22.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "72b3254f16251a8381aa12e40e3c4d2f0199f8c6508fbecb9d91f575e0fbb8c6" + +[[package]] +name = "bstr" +version = "1.9.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "05efc5cfd9110c8416e471df0e96702d58690178e206e61b7173706673c93706" +dependencies = [ + "memchr", + "regex-automata 0.4.7", + "serde", +] + +[[package]] +name = "bumpalo" +version = "3.16.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "79296716171880943b8470b5f8d03aa55eb2e645a4874bdbb28adb49162e012c" + +[[package]] +name = "bytes" +version = "1.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "514de17de45fdb8dc022b1a7975556c53c86f9f0aa5f534b98977b171857c2c9" + +[[package]] +name = "cc" +version = "1.0.100" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c891175c3fb232128f48de6590095e59198bbeb8620c310be349bfc3afd12c7b" + +[[package]] +name = "cfg-if" +version = "1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd" + +[[package]] +name = "chrono" +version = "0.4.38" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a21f936df1771bf62b77f047b726c4625ff2e8aa607c01ec06e5a05bd8463401" +dependencies = [ + "android-tzdata", + "iana-time-zone", + "num-traits", + "serde", + "windows-targets 0.52.5", +] + +[[package]] +name = "clap" +version = "4.5.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5db83dced34638ad474f39f250d7fea9598bdd239eaced1bdf45d597da0f433f" +dependencies = [ + "clap_builder", + "clap_derive", +] + +[[package]] +name = "clap_builder" +version = "4.5.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f7e204572485eb3fbf28f871612191521df159bc3e15a9f5064c66dba3a8c05f" +dependencies = [ + "anstream", + "anstyle", + "clap_lex", + "strsim", +] + +[[package]] +name = "clap_derive" +version = "4.5.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c780290ccf4fb26629baa7a1081e68ced113f1d3ec302fa5948f1c381ebf06c6" +dependencies = [ + "heck", + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "clap_lex" +version = "0.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4b82cf0babdbd58558212896d1a4272303a57bdb245c2bf1147185fb45640e70" + +[[package]] +name = "color-eyre" +version = "0.6.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "55146f5e46f237f7423d74111267d4597b59b0dad0ffaf7303bce9945d843ad5" +dependencies = [ + "backtrace", + "eyre", + "indenter", + "once_cell", + "owo-colors", +] + +[[package]] +name = "colorchoice" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0b6a852b24ab71dffc585bcb46eaf7959d175cb865a7152e35b348d1b2960422" + +[[package]] +name = "console" +version = "0.15.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0e1f83fc076bd6dd27517eacdf25fef6c4dfe5f1d7448bafaaf3a26f13b5e4eb" +dependencies = [ + "encode_unicode", + "lazy_static", + "libc", + "unicode-width", + "windows-sys 0.52.0", +] + +[[package]] +name = "container-retention-policy" +version = "3.0.0" +dependencies = [ + "assert_cmd", + "chrono", + "clap", + "color-eyre", + "humantime", + "indicatif", + "lazy_static", + "regex", + "reqwest", + "secrecy", + "serde", + "serde_json", + "tokio", + "tower", + "tracing", + "tracing-indicatif", + "tracing-subscriber", + "tracing-test", + "url", + "urlencoding", + "wildmatch", +] + +[[package]] +name = "core-foundation-sys" +version = "0.8.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "06ea2b9bc92be3c2baa9334a323ebca2d6f074ff852cd1d7b11064035cd3868f" + +[[package]] +name = "difflib" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6184e33543162437515c2e2b48714794e37845ec9851711914eec9d308f6ebe8" + +[[package]] +name = "doc-comment" +version = "0.3.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fea41bba32d969b513997752735605054bc0dfa92b4c56bf1189f2e174be7a10" + +[[package]] +name = "encode_unicode" +version = "0.3.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a357d28ed41a50f9c765dbfe56cbc04a64e53e5fc58ba79fbc34c10ef3df831f" + +[[package]] +name = "eyre" +version = "0.6.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7cd915d99f24784cdc19fd37ef22b97e3ff0ae756c7e492e9fbfe897d61e2aec" +dependencies = [ + "indenter", + "once_cell", +] + +[[package]] +name = "fnv" +version = "1.0.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3f9eec918d3f24069decb9af1554cad7c880e2da24a9afd88aca000531ab82c1" + +[[package]] +name = "form_urlencoded" +version = "1.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e13624c2627564efccf4934284bdd98cbaa14e79b0b5a141218e507b3a823456" +dependencies = [ + "percent-encoding", +] + +[[package]] +name = "futures-channel" +version = "0.3.30" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "eac8f7d7865dcb88bd4373ab671c8cf4508703796caa2b1985a9ca867b3fcb78" +dependencies = [ + "futures-core", +] + +[[package]] +name = "futures-core" +version = "0.3.30" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dfc6580bb841c5a68e9ef15c77ccc837b40a7504914d52e47b8b0e9bbda25a1d" + +[[package]] +name = "futures-sink" +version = "0.3.30" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9fb8e00e87438d937621c1c6269e53f536c14d3fbd6a042bb24879e57d474fb5" + +[[package]] +name = "futures-task" +version = "0.3.30" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "38d84fa142264698cdce1a9f9172cf383a0c82de1bddcf3092901442c4097004" + +[[package]] +name = "futures-util" +version = "0.3.30" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3d6401deb83407ab3da39eba7e33987a73c3df0c82b4bb5813ee871c19c41d48" +dependencies = [ + "futures-core", + "futures-task", + "pin-project-lite", + "pin-utils", +] + +[[package]] +name = "getrandom" +version = "0.2.15" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c4567c8db10ae91089c99af84c68c38da3ec2f087c3f82960bcdbf3656b6f4d7" +dependencies = [ + "cfg-if", + "libc", + "wasi", +] + +[[package]] +name = "gimli" +version = "0.28.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4271d37baee1b8c7e4b708028c57d816cf9d2434acb33a549475f78c181f6253" + +[[package]] +name = "heck" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2304e00983f87ffb38b55b444b5e3b60a884b5d30c0fca7d82fe33449bbe55ea" + +[[package]] +name = "hermit-abi" +version = "0.3.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d231dfb89cfffdbc30e7fc41579ed6066ad03abda9e567ccafae602b97ec5024" + +[[package]] +name = "http" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "21b9ddb458710bc376481b842f5da65cdf31522de232c1ca8146abce2a358258" +dependencies = [ + "bytes", + "fnv", + "itoa", +] + +[[package]] +name = "http-body" +version = "1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1cac85db508abc24a2e48553ba12a996e87244a0395ce011e62b37158745d643" +dependencies = [ + "bytes", + "http", +] + +[[package]] +name = "http-body-util" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "793429d76616a256bcb62c2a2ec2bed781c8307e797e2598c50010f2bee2544f" +dependencies = [ + "bytes", + "futures-util", + "http", + "http-body", + "pin-project-lite", +] + +[[package]] +name = "httparse" +version = "1.9.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0fcc0b4a115bf80b728eb8ea024ad5bd707b615bfed49e0665b6e0f86fd082d9" + +[[package]] +name = "humantime" +version = "2.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9a3a5bfb195931eeb336b2a7b4d761daec841b97f947d34394601737a7bba5e4" + +[[package]] +name = "hyper" +version = "1.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fe575dd17d0862a9a33781c8c4696a55c320909004a67a00fb286ba8b1bc496d" +dependencies = [ + "bytes", + "futures-channel", + "futures-util", + "http", + "http-body", + "httparse", + "itoa", + "pin-project-lite", + "smallvec", + "tokio", + "want", +] + +[[package]] +name = "hyper-rustls" +version = "0.27.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5ee4be2c948921a1a5320b629c4193916ed787a7f7f293fd3f7f5a6c9de74155" +dependencies = [ + "futures-util", + "http", + "hyper", + "hyper-util", + "rustls", + "rustls-pki-types", + "tokio", + "tokio-rustls", + "tower-service", + "webpki-roots", +] + +[[package]] +name = "hyper-util" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7b875924a60b96e5d7b9ae7b066540b1dd1cbd90d1828f54c92e02a283351c56" +dependencies = [ + "bytes", + "futures-channel", + "futures-util", + "http", + "http-body", + "hyper", + "pin-project-lite", + "socket2", + "tokio", + "tower", + "tower-service", + "tracing", +] + +[[package]] +name = "iana-time-zone" +version = "0.1.60" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e7ffbb5a1b541ea2561f8c41c087286cc091e21e556a4f09a8f6cbf17b69b141" +dependencies = [ + "android_system_properties", + "core-foundation-sys", + "iana-time-zone-haiku", + "js-sys", + "wasm-bindgen", + "windows-core", +] + +[[package]] +name = "iana-time-zone-haiku" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f31827a206f56af32e590ba56d5d2d085f558508192593743f16b2306495269f" +dependencies = [ + "cc", +] + +[[package]] +name = "idna" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "634d9b1461af396cad843f47fdba5597a4f9e6ddd4bfb6ff5d85028c25cb12f6" +dependencies = [ + "unicode-bidi", + "unicode-normalization", +] + +[[package]] +name = "indenter" +version = "0.3.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ce23b50ad8242c51a442f3ff322d56b02f08852c77e4c0b4d3fd684abc89c683" + +[[package]] +name = "indicatif" +version = "0.17.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "763a5a8f45087d6bcea4222e7b72c291a054edf80e4ef6efd2a4979878c7bea3" +dependencies = [ + "console", + "instant", + "number_prefix", + "portable-atomic", + "unicode-width", + "vt100", +] + +[[package]] +name = "instant" +version = "0.1.13" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e0242819d153cba4b4b05a5a8f2a7e9bbf97b6055b2a002b395c96b5ff3c0222" +dependencies = [ + "cfg-if", +] + +[[package]] +name = "ipnet" +version = "2.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8f518f335dce6725a761382244631d86cf0ccb2863413590b31338feb467f9c3" + +[[package]] +name = "is_terminal_polyfill" +version = "1.70.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f8478577c03552c21db0e2724ffb8986a5ce7af88107e6be5d2ee6e158c12800" + +[[package]] +name = "itoa" +version = "1.0.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "49f1f14873335454500d59611f1cf4a4b0f786f9ac11f4312a78e4cf2566695b" + +[[package]] +name = "js-sys" +version = "0.3.69" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "29c15563dc2726973df627357ce0c9ddddbea194836909d655df6a75d2cf296d" +dependencies = [ + "wasm-bindgen", +] + +[[package]] +name = "lazy_static" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bbd2bcb4c963f2ddae06a2efc7e9f3591312473c50c6685e1f298068316e66fe" + +[[package]] +name = "libc" +version = "0.2.155" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "97b3888a4aecf77e811145cadf6eef5901f4782c53886191b2f693f24761847c" + +[[package]] +name = "log" +version = "0.4.21" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "90ed8c1e510134f979dbc4f070f87d4313098b704861a105fe34231c70a3901c" + +[[package]] +name = "matchers" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8263075bb86c5a1b1427b5ae862e8889656f126e9f77c484496e8b47cf5c5558" +dependencies = [ + "regex-automata 0.1.10", +] + +[[package]] +name = "memchr" +version = "2.7.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "78ca9ab1a0babb1e7d5695e3530886289c18cf2f87ec19a575a0abdce112e3a3" + +[[package]] +name = "mime" +version = "0.3.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6877bb514081ee2a7ff5ef9de3281f14a4dd4bceac4c09388074a6b5df8a139a" + +[[package]] +name = "miniz_oxide" +version = "0.7.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b8a240ddb74feaf34a79a7add65a741f3167852fba007066dcac1ca548d89c08" +dependencies = [ + "adler", +] + +[[package]] +name = "mio" +version = "0.8.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a4a650543ca06a924e8b371db273b2756685faae30f8487da1b56505a8f78b0c" +dependencies = [ + "libc", + "wasi", + "windows-sys 0.48.0", +] + +[[package]] +name = "nu-ansi-term" +version = "0.46.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "77a8165726e8236064dbb45459242600304b42a5ea24ee2948e18e023bf7ba84" +dependencies = [ + "overload", + "winapi", +] + +[[package]] +name = "num-traits" +version = "0.2.19" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "071dfc062690e90b734c0b2273ce72ad0ffa95f0c74596bc250dcfd960262841" +dependencies = [ + "autocfg", +] + +[[package]] +name = "num_cpus" +version = "1.16.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4161fcb6d602d4d2081af7c3a45852d875a03dd337a6bfdd6e06407b61342a43" +dependencies = [ + "hermit-abi", + "libc", +] + +[[package]] +name = "number_prefix" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "830b246a0e5f20af87141b25c173cd1b609bd7779a4617d6ec582abaf90870f3" + +[[package]] +name = "object" +version = "0.32.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a6a622008b6e321afc04970976f62ee297fdbaa6f95318ca343e3eebb9648441" +dependencies = [ + "memchr", +] + +[[package]] +name = "once_cell" +version = "1.19.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3fdb12b2476b595f9358c5161aa467c2438859caa136dec86c26fdd2efe17b92" + +[[package]] +name = "overload" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b15813163c1d831bf4a13c3610c05c0d03b39feb07f7e09fa234dac9b15aaf39" + +[[package]] +name = "owo-colors" +version = "3.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c1b04fb49957986fdce4d6ee7a65027d55d4b6d2265e5848bbb507b58ccfdb6f" + +[[package]] +name = "percent-encoding" +version = "2.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e3148f5046208a5d56bcfc03053e3ca6334e51da8dfb19b6cdc8b306fae3283e" + +[[package]] +name = "pin-project" +version = "1.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b6bf43b791c5b9e34c3d182969b4abb522f9343702850a2e57f460d00d09b4b3" +dependencies = [ + "pin-project-internal", +] + +[[package]] +name = "pin-project-internal" +version = "1.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2f38a4412a78282e09a2cf38d195ea5420d15ba0602cb375210efbc877243965" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "pin-project-lite" +version = "0.2.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bda66fc9667c18cb2758a2ac84d1167245054bcf85d5d1aaa6923f45801bdd02" + +[[package]] +name = "pin-utils" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8b870d8c151b6f2fb93e84a13146138f05d02ed11c7e7c54f8826aaaf7c9f184" + +[[package]] +name = "portable-atomic" +version = "1.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7170ef9988bc169ba16dd36a7fa041e5c4cbeb6a35b76d4c03daded371eae7c0" + +[[package]] +name = "ppv-lite86" +version = "0.2.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5b40af805b3121feab8a3c29f04d8ad262fa8e0561883e7653e024ae4479e6de" + +[[package]] +name = "predicates" +version = "3.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "68b87bfd4605926cdfefc1c3b5f8fe560e3feca9d5552cf68c466d3d8236c7e8" +dependencies = [ + "anstyle", + "difflib", + "predicates-core", +] + +[[package]] +name = "predicates-core" +version = "1.0.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b794032607612e7abeb4db69adb4e33590fa6cf1149e95fd7cb00e634b92f174" + +[[package]] +name = "predicates-tree" +version = "1.0.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "368ba315fb8c5052ab692e68a0eefec6ec57b23a36959c14496f0b0df2c0cecf" +dependencies = [ + "predicates-core", + "termtree", +] + +[[package]] +name = "proc-macro2" +version = "1.0.86" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5e719e8df665df0d1c8fbfd238015744736151d4445ec0836b8e628aae103b77" +dependencies = [ + "unicode-ident", +] + +[[package]] +name = "quinn" +version = "0.11.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e4ceeeeabace7857413798eb1ffa1e9c905a9946a57d81fb69b4b71c4d8eb3ad" +dependencies = [ + "bytes", + "pin-project-lite", + "quinn-proto", + "quinn-udp", + "rustc-hash", + "rustls", + "thiserror", + "tokio", + "tracing", +] + +[[package]] +name = "quinn-proto" +version = "0.11.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ddf517c03a109db8100448a4be38d498df8a210a99fe0e1b9eaf39e78c640efe" +dependencies = [ + "bytes", + "rand", + "ring", + "rustc-hash", + "rustls", + "slab", + "thiserror", + "tinyvec", + "tracing", +] + +[[package]] +name = "quinn-udp" +version = "0.5.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9096629c45860fc7fb143e125eb826b5e721e10be3263160c7d60ca832cf8c46" +dependencies = [ + "libc", + "once_cell", + "socket2", + "tracing", + "windows-sys 0.52.0", +] + +[[package]] +name = "quote" +version = "1.0.36" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0fa76aaf39101c457836aec0ce2316dbdc3ab723cdda1c6bd4e6ad4208acaca7" +dependencies = [ + "proc-macro2", +] + +[[package]] +name = "rand" +version = "0.8.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "34af8d1a0e25924bc5b7c43c079c942339d8f0a8b57c39049bef581b46327404" +dependencies = [ + "libc", + "rand_chacha", + "rand_core", +] + +[[package]] +name = "rand_chacha" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e6c10a63a0fa32252be49d21e7709d4d4baf8d231c2dbce1eaa8141b9b127d88" +dependencies = [ + "ppv-lite86", + "rand_core", +] + +[[package]] +name = "rand_core" +version = "0.6.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ec0be4795e2f6a28069bec0b5ff3e2ac9bafc99e6a9a7dc3547996c5c816922c" +dependencies = [ + "getrandom", +] + +[[package]] +name = "regex" +version = "1.10.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b91213439dad192326a0d7c6ee3955910425f441d7038e0d6933b0aec5c4517f" +dependencies = [ + "aho-corasick", + "memchr", + "regex-automata 0.4.7", + "regex-syntax 0.8.4", +] + +[[package]] +name = "regex-automata" +version = "0.1.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6c230d73fb8d8c1b9c0b3135c5142a8acee3a0558fb8db5cf1cb65f8d7862132" +dependencies = [ + "regex-syntax 0.6.29", +] + +[[package]] +name = "regex-automata" +version = "0.4.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "38caf58cc5ef2fed281f89292ef23f6365465ed9a41b7a7754eb4e26496c92df" +dependencies = [ + "aho-corasick", + "memchr", + "regex-syntax 0.8.4", +] + +[[package]] +name = "regex-syntax" +version = "0.6.29" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f162c6dd7b008981e4d40210aca20b4bd0f9b60ca9271061b07f78537722f2e1" + +[[package]] +name = "regex-syntax" +version = "0.8.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7a66a03ae7c801facd77a29370b4faec201768915ac14a721ba36f20bc9c209b" + +[[package]] +name = "reqwest" +version = "0.12.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c7d6d2a27d57148378eb5e111173f4276ad26340ecc5c49a4a2152167a2d6a37" +dependencies = [ + "base64", + "bytes", + "futures-core", + "futures-util", + "http", + "http-body", + "http-body-util", + "hyper", + "hyper-rustls", + "hyper-util", + "ipnet", + "js-sys", + "log", + "mime", + "once_cell", + "percent-encoding", + "pin-project-lite", + "quinn", + "rustls", + "rustls-pemfile", + "rustls-pki-types", + "serde", + "serde_json", + "serde_urlencoded", + "sync_wrapper", + "tokio", + "tokio-rustls", + "tower-service", + "url", + "wasm-bindgen", + "wasm-bindgen-futures", + "web-sys", + "webpki-roots", + "winreg", +] + +[[package]] +name = "ring" +version = "0.17.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c17fa4cb658e3583423e915b9f3acc01cceaee1860e33d59ebae66adc3a2dc0d" +dependencies = [ + "cc", + "cfg-if", + "getrandom", + "libc", + "spin", + "untrusted", + "windows-sys 0.52.0", +] + +[[package]] +name = "rustc-demangle" +version = "0.1.24" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "719b953e2095829ee67db738b3bfa9fa368c94900df327b3f07fe6e794d2fe1f" + +[[package]] +name = "rustc-hash" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "08d43f7aa6b08d49f382cde6a7982047c3426db949b1424bc4b7ec9ae12c6ce2" + +[[package]] +name = "rustls" +version = "0.23.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "05cff451f60db80f490f3c182b77c35260baace73209e9cdbbe526bfe3a4d402" +dependencies = [ + "once_cell", + "ring", + "rustls-pki-types", + "rustls-webpki", + "subtle", + "zeroize", +] + +[[package]] +name = "rustls-pemfile" +version = "2.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "29993a25686778eb88d4189742cd713c9bce943bc54251a33509dc63cbacf73d" +dependencies = [ + "base64", + "rustls-pki-types", +] + +[[package]] +name = "rustls-pki-types" +version = "1.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "976295e77ce332211c0d24d92c0e83e50f5c5f046d11082cea19f3df13a3562d" + +[[package]] +name = "rustls-webpki" +version = "0.102.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ff448f7e92e913c4b7d4c6d8e4540a1724b319b4152b8aef6d4cf8339712b33e" +dependencies = [ + "ring", + "rustls-pki-types", + "untrusted", +] + +[[package]] +name = "ryu" +version = "1.0.18" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f3cb5ba0dc43242ce17de99c180e96db90b235b8a9fdc9543c96d2209116bd9f" + +[[package]] +name = "secrecy" +version = "0.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9bd1c54ea06cfd2f6b63219704de0b9b4f72dcc2b8fdef820be6cd799780e91e" +dependencies = [ + "zeroize", +] + +[[package]] +name = "serde" +version = "1.0.203" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7253ab4de971e72fb7be983802300c30b5a7f0c2e56fab8abfc6a214307c0094" +dependencies = [ + "serde_derive", +] + +[[package]] +name = "serde_derive" +version = "1.0.203" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "500cbc0ebeb6f46627f50f3f5811ccf6bf00643be300b4c3eabc0ef55dc5b5ba" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "serde_json" +version = "1.0.117" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "455182ea6142b14f93f4bc5320a2b31c1f266b66a4a5c858b013302a5d8cbfc3" +dependencies = [ + "itoa", + "ryu", + "serde", +] + +[[package]] +name = "serde_urlencoded" +version = "0.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d3491c14715ca2294c4d6a88f15e84739788c1d030eed8c110436aafdaa2f3fd" +dependencies = [ + "form_urlencoded", + "itoa", + "ryu", + "serde", +] + +[[package]] +name = "sharded-slab" +version = "0.1.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f40ca3c46823713e0d4209592e8d6e826aa57e928f09752619fc696c499637f6" +dependencies = [ + "lazy_static", +] + +[[package]] +name = "slab" +version = "0.4.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8f92a496fb766b417c996b9c5e57daf2f7ad3b0bebe1ccfca4856390e3d3bb67" +dependencies = [ + "autocfg", +] + +[[package]] +name = "smallvec" +version = "1.13.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3c5e1a9a646d36c3599cd173a41282daf47c44583ad367b8e6837255952e5c67" + +[[package]] +name = "socket2" +version = "0.5.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ce305eb0b4296696835b71df73eb912e0f1ffd2556a501fcede6e0c50349191c" +dependencies = [ + "libc", + "windows-sys 0.52.0", +] + +[[package]] +name = "spin" +version = "0.9.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6980e8d7511241f8acf4aebddbb1ff938df5eebe98691418c4468d0b72a96a67" + +[[package]] +name = "strsim" +version = "0.11.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7da8b5736845d9f2fcb837ea5d9e2628564b3b043a70948a3f0b778838c5fb4f" + +[[package]] +name = "subtle" +version = "2.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0d0208408ba0c3df17ed26eb06992cb1a1268d41b2c0e12e65203fbe3972cee5" + +[[package]] +name = "syn" +version = "2.0.67" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ff8655ed1d86f3af4ee3fd3263786bc14245ad17c4c7e85ba7187fb3ae028c90" +dependencies = [ + "proc-macro2", + "quote", + "unicode-ident", +] + +[[package]] +name = "sync_wrapper" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a7065abeca94b6a8a577f9bd45aa0867a2238b74e8eb67cf10d492bc39351394" + +[[package]] +name = "termtree" +version = "0.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3369f5ac52d5eb6ab48c6b4ffdc8efbcad6b89c765749064ba298f2c68a16a76" + +[[package]] +name = "thiserror" +version = "1.0.61" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c546c80d6be4bc6a00c0f01730c08df82eaa7a7a61f11d656526506112cc1709" +dependencies = [ + "thiserror-impl", +] + +[[package]] +name = "thiserror-impl" +version = "1.0.61" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "46c3384250002a6d5af4d114f2845d37b57521033f30d5c3f46c4d70e1197533" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "thread_local" +version = "1.1.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8b9ef9bad013ada3808854ceac7b46812a6465ba368859a37e2100283d2d719c" +dependencies = [ + "cfg-if", + "once_cell", +] + +[[package]] +name = "tinyvec" +version = "1.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "87cc5ceb3875bb20c2890005a4e226a4651264a5c75edb2421b52861a0a0cb50" +dependencies = [ + "tinyvec_macros", +] + +[[package]] +name = "tinyvec_macros" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1f3ccbac311fea05f86f61904b462b55fb3df8837a366dfc601a0161d0532f20" + +[[package]] +name = "tokio" +version = "1.38.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ba4f4a02a7a80d6f274636f0aa95c7e383b912d41fe721a31f29e29698585a4a" +dependencies = [ + "backtrace", + "bytes", + "libc", + "mio", + "num_cpus", + "pin-project-lite", + "socket2", + "tokio-macros", + "windows-sys 0.48.0", +] + +[[package]] +name = "tokio-macros" +version = "2.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5f5ae998a069d4b5aba8ee9dad856af7d520c3699e6159b185c2acd48155d39a" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "tokio-rustls" +version = "0.26.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0c7bc40d0e5a97695bb96e27995cd3a08538541b0a846f65bba7a359f36700d4" +dependencies = [ + "rustls", + "rustls-pki-types", + "tokio", +] + +[[package]] +name = "tokio-util" +version = "0.7.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9cf6b47b3771c49ac75ad09a6162f53ad4b8088b76ac60e8ec1455b31a189fe1" +dependencies = [ + "bytes", + "futures-core", + "futures-sink", + "pin-project-lite", + "tokio", +] + +[[package]] +name = "tower" +version = "0.4.13" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b8fa9be0de6cf49e536ce1851f987bd21a43b771b09473c3549a6c853db37c1c" +dependencies = [ + "futures-core", + "futures-util", + "pin-project", + "pin-project-lite", + "tokio", + "tokio-util", + "tower-layer", + "tower-service", + "tracing", +] + +[[package]] +name = "tower-layer" +version = "0.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c20c8dbed6283a09604c3e69b4b7eeb54e298b8a600d4d5ecb5ad39de609f1d0" + +[[package]] +name = "tower-service" +version = "0.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b6bc1c9ce2b5135ac7f93c72918fc37feb872bdc6a5533a8b85eb4b86bfdae52" + +[[package]] +name = "tracing" +version = "0.1.40" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c3523ab5a71916ccf420eebdf5521fcef02141234bbc0b8a49f2fdc4544364ef" +dependencies = [ + "pin-project-lite", + "tracing-attributes", + "tracing-core", +] + +[[package]] +name = "tracing-attributes" +version = "0.1.27" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "34704c8d6ebcbc939824180af020566b01a7c01f80641264eba0999f6c2b6be7" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "tracing-core" +version = "0.1.32" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c06d3da6113f116aaee68e4d601191614c9053067f9ab7f6edbcb161237daa54" +dependencies = [ + "once_cell", + "valuable", +] + +[[package]] +name = "tracing-indicatif" +version = "0.3.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "069580424efe11d97c3fef4197fa98c004fa26672cc71ad8770d224e23b1951d" +dependencies = [ + "indicatif", + "tracing", + "tracing-core", + "tracing-subscriber", +] + +[[package]] +name = "tracing-log" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ee855f1f400bd0e5c02d150ae5de3840039a3f54b025156404e34c23c03f47c3" +dependencies = [ + "log", + "once_cell", + "tracing-core", +] + +[[package]] +name = "tracing-subscriber" +version = "0.3.18" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ad0f048c97dbd9faa9b7df56362b8ebcaa52adb06b498c050d2f4e32f90a7a8b" +dependencies = [ + "matchers", + "nu-ansi-term", + "once_cell", + "regex", + "sharded-slab", + "smallvec", + "thread_local", + "tracing", + "tracing-core", + "tracing-log", +] + +[[package]] +name = "tracing-test" +version = "0.2.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "557b891436fe0d5e0e363427fc7f217abf9ccd510d5136549847bdcbcd011d68" +dependencies = [ + "tracing-core", + "tracing-subscriber", + "tracing-test-macro", +] + +[[package]] +name = "tracing-test-macro" +version = "0.2.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "04659ddb06c87d233c566112c1c9c5b9e98256d9af50ec3bc9c8327f873a7568" +dependencies = [ + "quote", + "syn", +] + +[[package]] +name = "try-lock" +version = "0.2.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e421abadd41a4225275504ea4d6566923418b7f05506fbc9c0fe86ba7396114b" + +[[package]] +name = "unicode-bidi" +version = "0.3.15" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "08f95100a766bf4f8f28f90d77e0a5461bbdb219042e7679bebe79004fed8d75" + +[[package]] +name = "unicode-ident" +version = "1.0.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3354b9ac3fae1ff6755cb6db53683adb661634f67557942dea4facebec0fee4b" + +[[package]] +name = "unicode-normalization" +version = "0.1.23" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a56d1686db2308d901306f92a263857ef59ea39678a5458e7cb17f01415101f5" +dependencies = [ + "tinyvec", +] + +[[package]] +name = "unicode-width" +version = "0.1.13" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0336d538f7abc86d282a4189614dfaa90810dfc2c6f6427eaf88e16311dd225d" + +[[package]] +name = "untrusted" +version = "0.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8ecb6da28b8a351d773b68d5825ac39017e680750f980f3a1a85cd8dd28a47c1" + +[[package]] +name = "url" +version = "2.5.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "22784dbdf76fdde8af1aeda5622b546b422b6fc585325248a2bf9f5e41e94d6c" +dependencies = [ + "form_urlencoded", + "idna", + "percent-encoding", +] + +[[package]] +name = "urlencoding" +version = "2.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "daf8dba3b7eb870caf1ddeed7bc9d2a049f3cfdfae7cb521b087cc33ae4c49da" + +[[package]] +name = "utf8parse" +version = "0.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "06abde3611657adf66d383f00b093d7faecc7fa57071cce2578660c9f1010821" + +[[package]] +name = "valuable" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "830b7e5d4d90034032940e4ace0d9a9a057e7a45cd94e6c007832e39edb82f6d" + +[[package]] +name = "vt100" +version = "0.15.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "84cd863bf0db7e392ba3bd04994be3473491b31e66340672af5d11943c6274de" +dependencies = [ + "itoa", + "log", + "unicode-width", + "vte", +] + +[[package]] +name = "vte" +version = "0.11.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f5022b5fbf9407086c180e9557be968742d839e68346af7792b8592489732197" +dependencies = [ + "arrayvec", + "utf8parse", + "vte_generate_state_changes", +] + +[[package]] +name = "vte_generate_state_changes" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2e369bee1b05d510a7b4ed645f5faa90619e05437111783ea5848f28d97d3c2e" +dependencies = [ + "proc-macro2", + "quote", +] + +[[package]] +name = "wait-timeout" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9f200f5b12eb75f8c1ed65abd4b2db8a6e1b138a20de009dacee265a2498f3f6" +dependencies = [ + "libc", +] + +[[package]] +name = "want" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bfa7760aed19e106de2c7c0b581b509f2f25d3dacaf737cb82ac61bc6d760b0e" +dependencies = [ + "try-lock", +] + +[[package]] +name = "wasi" +version = "0.11.0+wasi-snapshot-preview1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9c8d87e72b64a3b4db28d11ce29237c246188f4f51057d65a7eab63b7987e423" + +[[package]] +name = "wasm-bindgen" +version = "0.2.92" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4be2531df63900aeb2bca0daaaddec08491ee64ceecbee5076636a3b026795a8" +dependencies = [ + "cfg-if", + "wasm-bindgen-macro", +] + +[[package]] +name = "wasm-bindgen-backend" +version = "0.2.92" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "614d787b966d3989fa7bb98a654e369c762374fd3213d212cfc0251257e747da" +dependencies = [ + "bumpalo", + "log", + "once_cell", + "proc-macro2", + "quote", + "syn", + "wasm-bindgen-shared", +] + +[[package]] +name = "wasm-bindgen-futures" +version = "0.4.42" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "76bc14366121efc8dbb487ab05bcc9d346b3b5ec0eaa76e46594cabbe51762c0" +dependencies = [ + "cfg-if", + "js-sys", + "wasm-bindgen", + "web-sys", +] + +[[package]] +name = "wasm-bindgen-macro" +version = "0.2.92" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a1f8823de937b71b9460c0c34e25f3da88250760bec0ebac694b49997550d726" +dependencies = [ + "quote", + "wasm-bindgen-macro-support", +] + +[[package]] +name = "wasm-bindgen-macro-support" +version = "0.2.92" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e94f17b526d0a461a191c78ea52bbce64071ed5c04c9ffe424dcb38f74171bb7" +dependencies = [ + "proc-macro2", + "quote", + "syn", + "wasm-bindgen-backend", + "wasm-bindgen-shared", +] + +[[package]] +name = "wasm-bindgen-shared" +version = "0.2.92" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "af190c94f2773fdb3729c55b007a722abb5384da03bc0986df4c289bf5567e96" + +[[package]] +name = "web-sys" +version = "0.3.69" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "77afa9a11836342370f4817622a2f0f418b134426d91a82dfb48f532d2ec13ef" +dependencies = [ + "js-sys", + "wasm-bindgen", +] + +[[package]] +name = "webpki-roots" +version = "0.26.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bd7c23921eeb1713a4e851530e9b9756e4fb0e89978582942612524cf09f01cd" +dependencies = [ + "rustls-pki-types", +] + +[[package]] +name = "wildmatch" +version = "2.3.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3928939971918220fed093266b809d1ee4ec6c1a2d72692ff6876898f3b16c19" + +[[package]] +name = "winapi" +version = "0.3.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5c839a674fcd7a98952e593242ea400abe93992746761e38641405d28b00f419" +dependencies = [ + "winapi-i686-pc-windows-gnu", + "winapi-x86_64-pc-windows-gnu", +] + +[[package]] +name = "winapi-i686-pc-windows-gnu" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6" + +[[package]] +name = "winapi-x86_64-pc-windows-gnu" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f" + +[[package]] +name = "windows-core" +version = "0.52.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "33ab640c8d7e35bf8ba19b884ba838ceb4fba93a4e8c65a9059d08afcfc683d9" +dependencies = [ + "windows-targets 0.52.5", +] + +[[package]] +name = "windows-sys" +version = "0.48.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "677d2418bec65e3338edb076e806bc1ec15693c5d0104683f2efe857f61056a9" +dependencies = [ + "windows-targets 0.48.5", +] + +[[package]] +name = "windows-sys" +version = "0.52.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "282be5f36a8ce781fad8c8ae18fa3f9beff57ec1b52cb3de0789201425d9a33d" +dependencies = [ + "windows-targets 0.52.5", +] + +[[package]] +name = "windows-targets" +version = "0.48.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9a2fa6e2155d7247be68c096456083145c183cbbbc2764150dda45a87197940c" +dependencies = [ + "windows_aarch64_gnullvm 0.48.5", + "windows_aarch64_msvc 0.48.5", + "windows_i686_gnu 0.48.5", + "windows_i686_msvc 0.48.5", + "windows_x86_64_gnu 0.48.5", + "windows_x86_64_gnullvm 0.48.5", + "windows_x86_64_msvc 0.48.5", +] + +[[package]] +name = "windows-targets" +version = "0.52.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6f0713a46559409d202e70e28227288446bf7841d3211583a4b53e3f6d96e7eb" +dependencies = [ + "windows_aarch64_gnullvm 0.52.5", + "windows_aarch64_msvc 0.52.5", + "windows_i686_gnu 0.52.5", + "windows_i686_gnullvm", + "windows_i686_msvc 0.52.5", + "windows_x86_64_gnu 0.52.5", + "windows_x86_64_gnullvm 0.52.5", + "windows_x86_64_msvc 0.52.5", +] + +[[package]] +name = "windows_aarch64_gnullvm" +version = "0.48.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2b38e32f0abccf9987a4e3079dfb67dcd799fb61361e53e2882c3cbaf0d905d8" + +[[package]] +name = "windows_aarch64_gnullvm" +version = "0.52.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7088eed71e8b8dda258ecc8bac5fb1153c5cffaf2578fc8ff5d61e23578d3263" + +[[package]] +name = "windows_aarch64_msvc" +version = "0.48.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dc35310971f3b2dbbf3f0690a219f40e2d9afcf64f9ab7cc1be722937c26b4bc" + +[[package]] +name = "windows_aarch64_msvc" +version = "0.52.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9985fd1504e250c615ca5f281c3f7a6da76213ebd5ccc9561496568a2752afb6" + +[[package]] +name = "windows_i686_gnu" +version = "0.48.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a75915e7def60c94dcef72200b9a8e58e5091744960da64ec734a6c6e9b3743e" + +[[package]] +name = "windows_i686_gnu" +version = "0.52.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "88ba073cf16d5372720ec942a8ccbf61626074c6d4dd2e745299726ce8b89670" + +[[package]] +name = "windows_i686_gnullvm" +version = "0.52.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "87f4261229030a858f36b459e748ae97545d6f1ec60e5e0d6a3d32e0dc232ee9" + +[[package]] +name = "windows_i686_msvc" +version = "0.48.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8f55c233f70c4b27f66c523580f78f1004e8b5a8b659e05a4eb49d4166cca406" + +[[package]] +name = "windows_i686_msvc" +version = "0.52.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "db3c2bf3d13d5b658be73463284eaf12830ac9a26a90c717b7f771dfe97487bf" + +[[package]] +name = "windows_x86_64_gnu" +version = "0.48.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "53d40abd2583d23e4718fddf1ebec84dbff8381c07cae67ff7768bbf19c6718e" + +[[package]] +name = "windows_x86_64_gnu" +version = "0.52.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4e4246f76bdeff09eb48875a0fd3e2af6aada79d409d33011886d3e1581517d9" + +[[package]] +name = "windows_x86_64_gnullvm" +version = "0.48.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0b7b52767868a23d5bab768e390dc5f5c55825b6d30b86c844ff2dc7414044cc" + +[[package]] +name = "windows_x86_64_gnullvm" +version = "0.52.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "852298e482cd67c356ddd9570386e2862b5673c85bd5f88df9ab6802b334c596" + +[[package]] +name = "windows_x86_64_msvc" +version = "0.48.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ed94fce61571a4006852b7389a063ab983c02eb1bb37b47f8272ce92d06d9538" + +[[package]] +name = "windows_x86_64_msvc" +version = "0.52.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bec47e5bfd1bff0eeaf6d8b485cc1074891a197ab4225d504cb7a1ab88b02bf0" + +[[package]] +name = "winreg" +version = "0.52.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a277a57398d4bfa075df44f501a17cfdf8542d224f0d36095a2adc7aee4ef0a5" +dependencies = [ + "cfg-if", + "windows-sys 0.48.0", +] + +[[package]] +name = "zeroize" +version = "1.8.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ced3678a2879b30306d323f4542626697a464a97c0a07c9aebf7ebca65cd4dde" diff --git a/Cargo.toml b/Cargo.toml new file mode 100644 index 0000000..1fe2086 --- /dev/null +++ b/Cargo.toml @@ -0,0 +1,40 @@ +[package] +name = "container-retention-policy" +version = "3.0.0" +edition = "2021" +license = "MIT" + +[dependencies] +clap = { version = "4.5.4", features = ["derive"]} +chrono = { version="0.4.37" , features=["serde", "clock"], default-features = false} +color-eyre = { version = "0.6.3", default-features = false } +humantime = "2.1.0" +indicatif = { version = "0.17.8", default-features = false } +lazy_static = { version = "1.4.0" , default-features = false} +regex = { version = "1.10.4", default-features = false } +reqwest = {version = "0.12.2", features = ["json", "rustls-tls"], default-features = false } +secrecy = { version = "0.8.0" } +serde = { version = "1.0.197", features = ["derive"], default-features = false } +serde_json = { version = "1.0.115", default-features = false } +tokio = { version = "1.36.0", features = ["rt-multi-thread", "macros"], default-features = false } +tower = { version = "0.4.13", default-features = false, features = ["limit"] } +tracing = { version = "0.1.40", default-features = false } +tracing-subscriber = { version = "0.3.18", features = ["env-filter"], default-features = false } +tracing-indicatif = "0.3.6" +url = { version = "2.5.0" , default-features = false} +urlencoding = { version="2.1.3" } +wildmatch = { version = "2.3.3" } + +[dev-dependencies] +assert_cmd = "2.0.14" +tracing-test = "0.2.4" +wildmatch = { version = "2.3.3" } + +[profile.release] +# https://github.com/johnthagen/min-sized-rust +# Optimize release profile for size, as the runtime of the action +# is bottlenecked by GitHub's API response times, not the speed of our code. +lto = true +strip = true +opt-level = "z" +codegen-units = 1 diff --git a/DEVELOPMENT.md b/DEVELOPMENT.md new file mode 100644 index 0000000..e54237d --- /dev/null +++ b/DEVELOPMENT.md @@ -0,0 +1,38 @@ +# Creating a release + +To create a release we need to: + +1. Manually trigger the [`release`](.github/workflows/release.yaml) workflow to build new images +2. Update the image tag in the [action.yaml](action.yaml) +3. Push the change and create a GitHub release post for the repo + +# Running lints and tests + +Install [pre-commit](https://pre-commit.com/) (e.g., using `pip install pre-commit`), +then run `pre-commit run --all-files` before submitting a PR. + +All cargo-components run can be installed by calling `cargo install just && just setup`. +This will install [just](https://github.com/casey/just) and run the `setup` script +in the local [justfile](./justfile). + +If you prefer not to install any of these components, that's fine. Just submit a PR, +then fix errors as they're caught in CI. + +# Integration testing + +Since the action fundamentally depends on the Github container registry, +the only real way to test (that I've thought of at least) is to simply +upload real images and running the binary with dry-run on and off. + +To upload images, see the [live_test workflow](./.github/workflows/live_test.yaml) +where we do the same thing. + +To run the binary, see the `run` command in the [justfile](./justfile). If you run this, +you'll need an `.env` file containing the token you want to pass. + +# Pruning unused features + +You might notice that there's a lot of disabled features in the [Cargo.toml](./Cargo.toml). +This might be redundant, but is a measure for trying to minimize the binary size. We've +used [cargo-unused-features](https://crates.io/crates/cargo-unused-features) and the +`unused-features analyze` command to aid in identifying redundant features. diff --git a/Dockerfile b/Dockerfile new file mode 100644 index 0000000..57fd57f --- /dev/null +++ b/Dockerfile @@ -0,0 +1,39 @@ +# Stage 1: Build the binary +FROM --platform=$BUILDPLATFORM clux/muslrust:stable as builder + +ARG TARGETPLATFORM + +# Set a default value for the target environment variable +ENV TARGET_ENV="x86_64-unknown-linux-musl" + +# Conditionally set the environment variable based on the platform +RUN --mount=type=cache,target=/root/.cache \ + if [ "$TARGETPLATFORM" = "linux/arm64" ]; then \ + export TARGET_ENV="aarch64-unknown-linux-musl"; \ + fi && echo "TARGET_ENV=$TARGET_ENV" + +# Create a non-root user +RUN groupadd -g 10001 -r dockergrp && useradd -r -g dockergrp -u 10001 dockeruser + +# Download dependencies ala cargo chef +WORKDIR /app +COPY Cargo.lock Cargo.toml ./ +RUN mkdir src && echo "fn main() { print!(\"Dummy main\"); }" > src/main.rs +RUN cargo build --release +RUN rm target/$TARGET_ENV/release/deps/container_retention_policy* && rm -r src + +# Build the actual binary +COPY src ./src +RUN cargo build --release +RUN strip target/$TARGET_ENV/release/container-retention-policy + +RUN mkdir /build-out && cp target/$TARGET_ENV/release/container-retention-policy /build-out/container-retention-policy + +# Stage 2: Create a minimal image +FROM scratch + +COPY --from=builder /etc/passwd /etc/passwd +COPY --from=builder /build-out/container-retention-policy /container-retention-policy + +USER dockeruser +ENTRYPOINT ["/container-retention-policy"] diff --git a/LICENSE b/LICENSE new file mode 100644 index 0000000..e6e0a7a --- /dev/null +++ b/LICENSE @@ -0,0 +1,7 @@ +Copyright 2024 Sondre Lillebø Gundersen + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the “Software”), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED “AS IS”, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/MIGRATION.md b/MIGRATION.md new file mode 100644 index 0000000..ad4b008 --- /dev/null +++ b/MIGRATION.md @@ -0,0 +1,108 @@ +# v3.0.0 + +💥 Beware, this release breaks the API of the action to a large degree. It might be wise to run the action with `dry-run: true` after upgrading. + +After a period of incrementally adopting features, the action arguments have become unnecessarily confusing and the API has become bloated. This release consolidates and streamlines the API. + +The new release also adds support for a lot of new features, and fixes most long-standing issues. + +**New features** + +- Support for multi-arch images. See the new section in the [README.md](https://github.com/snok/container-retention-policy/blob/main/README.md#safely-handling-multi-platform-multi-arch-packages) for details. +- Support for GitHub app tokens ([docs](https://docs.github.com/en/apps/creating-github-apps/authenticating-with-a-github-app/generating-a-user-access-token-for-a-github-app)) +- Support for GitHub temporal tokens (`secrets.GITHUB_TOKEN`) ([docs](https://docs.github.com/en/actions/security-guides/automatic-token-authentication#about-the-github_token-secret)) +- Proper handling of primary and secondary rate limits ([docs](https://docs.github.com/en/rest/using-the-rest-api/rate-limits-for-the-rest-api?apiVersion=2022-11-28)) +- The available syntax for `image-names` and `image-tags` previously allowed wildcards (using the `*` character). We now also allow the `?` character to express a single-character wildcard. For example, the pattern `ca?` will match `car` and `cat`. See [wildmatch docs](https://github.com/becheran/wildmatch) for details. +- Significant effort has been spent on improving the logging, to give better insights into what exactly is happening +- Updated license from `BSD-3` to `MIT`. + +**Breaking changes** + +- Over half of the arguments have changed. See the [migration guide](#migration-guide) below for details. +- The [`needs-assistance` output](https://github.com/snok/container-retention-policy/tree/575226aa6cf28ee190c6611e8cc20d545264f443?tab=readme-ov-file#needs-github-assistance) was deleted, since it seem unlikely to ever be used. +- We will not maintain mutable major and minor version tags for the action going forward. In other words, there will be no `v3` target for the action, just `v3.0.0` and other exact versions. In my experience, a mutable major version tag is not much safer than using `@main`. More precise tag tracking is safer for most, and pairs well with [dependabot](https://docs.github.com/en/code-security/dependabot/working-with-dependabot/keeping-your-actions-up-to-date-with-dependabot) if you don't want to track new versions yourself. + +**Performance improvements** + +- The action has been rewritten from a [composite action](https://docs.github.com/en/actions/creating-actions/creating-a-composite-action) to a [container action](https://docs.github.com/en/actions/creating-actions/creating-a-docker-container-action), and the total size of the new image is < 10Mi. +- The action would previously take ~30 seconds to initialize and would need a Python runtime. The action now starts in less than a second, and runs as a standalone binary. +- The runtime of the action has been reduced, and assuming we need to fetch less than 100 package versions, the action completes in, at most, a few seconds. + +## Migration guide + +- The `account-type` and `org-name` inputs have been replaced with `account`, which should be set to the literal string "user" if you previously used `account-type: personal` and to the organization name otherwise: + + ```diff + - account-type: personal + + account: user + ``` + + or + + ```diff + - account-type: organization + - org-name: acme + + account: acme + ``` + +- The `filter-tags` key was renamed to `image-tags` + + ```diff + - filter-tags: *-prod + + image-tags: *-prod + ``` + +- The `token-type` input has been removed. If you previously used `token-type: github-token`, then you should make the following change: + + ```diff + - token-type: github-token + + token: ${{ secrets.GITHUB_TOKEN }} + ``` + + In other words, we've consolidated `token-type` and `token` into a single arg. + +- The `skip-tags` input has been removed. If you previously used `skip-tags: latest`, you should now specify a negative glob pattern in `image-tags`. + + ```diff + - filter-tags: l* + - skip-tags: latest + + image-tags: l*, !latest + ``` + + In other words, we've consolidated the two arguments, by adding support for the `!` operator, which means "not". + +- The `filter-include-untagged` and `untagged-only` inputs were removed. + + `filter-include-untagged` previously enabled you to opt-out of deleting untagged images, while `untagged-only` would allow you to opt-out of deleting tagged images. This was a bit confusing, even for me. + + To make things simpler, these have been collapsed into one argument, called `tag-selection` which accepts the string values `tagged`, `untagged`, or `both`. + + ```diff + - filter-include-untagged: true + - untagged-only: false + + tag-selection: both + ``` + + or + + ```diff + - filter-include-untagged: true + - untagged-only: true + + tag-selection: untagged + ``` + +- The `cut-off` input no longer accepts human-readable datetimes. Instead, it accepts the inputs listed [here](https://github.com/tailhook/humantime). For example: + + ```diff + - cut-off: two hours and 5 minutes ago UTC+2 + + cut-off: 2h 5m + ``` + + or + + ```diff + - cut-off: One week ago UTC + + cut-off: 1w + ``` + + There is no longer timezone support built-into this option. All durations are relative to the current time, UTC. diff --git a/README.md b/README.md index 73b531a..c80877f 100644 --- a/README.md +++ b/README.md @@ -1,308 +1,254 @@ [![release](https://img.shields.io/github/v/release/snok/container-retention-policy)](https://github.com/snok/container-retention-policy/releases/latest) -[![coverage](https://codecov.io/gh/snok/drf-openapi-tester/branch/master/graph/badge.svg)](https://codecov.io/gh/snok/container-retention-policy) # 📘 GHCR Container Retention Policy -A GitHub Action for deleting old image versions from the GitHub container registry. +A GitHub action for deleting old image versions from the GitHub container registry. Storage isn't free and registries can often get bloated with unused images. Having a retention policy to prevent clutter makes sense in most cases. -Supports both organizational and personal accounts. +- ✅ Supports organizational and personal accounts +- 👮 Supports multiple token types for authentication +- 🌱 The docker image used is sized below 10Mi, and the total runtime is a few seconds for most workloads # Content - [Usage](#usage) -- [Examples](#examples) - [Parameters](#parameters) +- [Examples](#examples) - [Nice to knows](#nice-to-knows) - [Contributing](#contributing) # Usage -To use the action, simply add it to your GitHub workflow, like this: +To use the action, create a workflow like this: ```yaml -- uses: snok/container-retention-policy@v2 - with: - image-names: dev, web, test* - cut-off: two hours ago UTC+2 - timestamp-to-use: updated_at - account-type: org - org-name: google - keep-at-least: 1 - skip-tags: latest - token: ${{ secrets.PAT }} -``` - -Notice image-names supports wildcards. - -You could run this as -a [scheduled event](https://docs.github.com/en/actions/reference/events-that-trigger-workflows#schedule), or as a part -of an existing workflow, but for the sake of inspiration, it might also make sense for you to trigger it with a: - -- [workflow_dispatch](https://docs.github.com/en/actions/reference/events-that-trigger-workflows#workflow_dispatch): - trigger it manually in the GitHub repo UI when needed -- [workflow_run](https://docs.github.com/en/actions/reference/events-that-trigger-workflows#workflow_run): have it run - as clean-up after another key workflow completes -- or triggering it with a - webhook ([repository_dispatch](https://docs.github.com/en/actions/reference/events-that-trigger-workflows#repository_dispatch)) - -# Examples - -For an organization, a full example might look something like this: - -```yaml -name: Delete old container images - on: + workflow_dispatch: schedule: - - cron: "0 0 * * *" # every day at midnight - + - cron: "5 * * * *" # every hour jobs: - clean-ghcr: - name: Delete old unused container images + clean: runs-on: ubuntu-latest + name: Delete old test images steps: - - name: Delete 'dev' containers older than a week - uses: snok/container-retention-policy@v2 - with: - image-names: python-dev, js-dev - cut-off: A week ago UTC - account-type: org - org-name: my-org - keep-at-least: 1 - untagged-only: true - token: ${{ secrets.PAT }} - - - name: Delete all test containers older than a month, using a wildcard - uses: snok/container-retention-policy@v2 + - uses: snok/container-retention-policy@v3.0.0 with: - image-names: python-test*, js-test* - cut-off: One month ago UTC - account-type: org - org-name: my-org - keep-at-least: 1 - skip-tags: latest + account: snok token: ${{ secrets.PAT }} + image-names: container-retention-policy + image-tags: test* dev* # target any image that has a tag starting with the word test or dev + cut-off: 2w 3d + dry-run: true ``` -While for a personal account, something like this might do: +For your first run, we recommend running the action with `dry-run: true`. For a personal account, just replace the `snok` org. name with the string "user". -```yaml -name: Delete old container images - -on: - schedule: - - cron: '0 0 0 * *' # the first day of the month - -jobs: - clean-ghcr: - name: Delete old unused container images - runs-on: ubuntu-latest - steps: - - name: Delete old images - uses: snok/container-retention-policy@v2 - with: - image-names: dev/* - cut-off: One month ago UTC - keep-at-least: 1 - account-type: personal - token: ${{ secrets.PAT }} -``` +See [events that trigger workflows](https://docs.github.com/en/actions/using-workflows/events-that-trigger-workflows), +for other event type triggers, if cron doesn't suit your use-case. -An example showing 2 different retention policies based on image tags format: - -```yaml -name: Delete old container images +# Parameters -on: - schedule: - - cron: '0 0 0 * *' # the first day of the month +### Account -jobs: - clean-ghcr: - name: Delete old unused container images - runs-on: ubuntu-latest - steps: - - name: Delete old released images - uses: snok/container-retention-policy@v2 - with: - image-names: dev/* - cut-off: One month ago UTC - keep-at-least: 5 - filter-tags: "v*.*.*" - account-type: personal - token: ${{ secrets.PAT }} - - name: Delete old pre-release images - uses: snok/container-retention-policy@v2 - with: - image-names: dev/* - cut-off: One week ago UTC - keep-at-least: 1 - filter-tags: "rc*", "dev*" - account-type: personal - token: ${{ secrets.PAT }} -``` +* **Required**: `Yes` +* **Example**: `account: acme` for an organization named "acme" or `account: user` to signify that it's for your personal account -An example using `${{ secrets.GITHUB_TOKEN }}` in a repository with package name `my-package`: +The account field provides the action with information on whether the workflow is run by an organization +or a user (each have different API endpoints in GitHub's package APIs). If the action should be run by an organization, +then the input also provides us with the organization name, as this is needed for calling the org. API endpoints. -```yaml -name: Delete old container images +### Token -on: - schedule: - - cron: '0 0 0 * *' # the first day of the month +* **Required**: `Yes` +* **Example**: `token: ${{ secrets.PAT }}` or `token: ${{ secrets.GITHUB_TOKEN }}` -jobs: - clean-ghcr: - name: Delete old unused container images - runs-on: ubuntu-latest - steps: - - name: Delete old images - uses: snok/container-retention-policy@v2 - with: - image-names: my-package - cut-off: One month ago UTC - keep-at-least: 1 - account-type: personal - token: ${{ secrets.GITHUB_TOKEN }} - token-type: github-token -``` +The token is used to authenticate the action when making API calls to the package APIs. See dedicated sections +on extra information to know about each token type, below. -# Parameters +#### Classic personal access tokens -## image-names +Personal access tokens must have the `packages:write` scopes. -* **Required**: `Yes` -* **Example**: `image-names: image1,image2,image3` or just `image*` +#### Temporal tokens -The names of the container images you want to delete old versions for. Takes one or several container image names as a -comma separated list, and supports wildcards. The action will fetch all packages available, and filter -down the list of packages to handle based on the image name input. +If you're using a temporal token (`${{ secrets.GITHUB_TOKEN }}`), you should note that the filtering operators +described for `image-names` below *can not be used*. Temporal tokens are not usable for the [list-packages endpoint](https://docs.github.com/en/rest/packages/packages?apiVersion=2022-11-28#list-packages-for-an-organization), so we have +to work around it by calling the [get-package endpoint](https://docs.github.com/en/rest/packages/packages?apiVersion=2022-11-28#get-a-package-for-an-organization) instead. This means `image-names` needs to contain +exact names that we can use when constructing the endpoint URLs. -If `token-type` is set to `github-token`, then this **MUST** be a single image and match the repository name from where -this action is being used from. +For a temporal token to work, it is necessary for the repository running the workflow to have the `Admin` role +assigned in the package settings. -## cut-off +### GitHub app tokens -* **Required**: `Yes` -* **Example**: `cut-off: 1 week ago UTC` +GitHub app tokens must have the `packages:write` scopes. -The timezone-aware datetime you want to delete container versions that are older than. +To fetch an app token, you can structure your workflow like this: -We use [dateparser](https://dateparser.readthedocs.io/en/latest/) to parse the cut-off specified. This means you should -be able to specify your cut-off in relative human readable terms like `Two hours ago UTC`, or by using a normal -timestamp. +```yaml +- name: Generate a token + id: generate-token + uses: actions/create-github-app-token@v1 + with: + app-id: ${{ secrets.GH_APP_ID }} + private-key: ${{ secrets.GH_APP_PRIVATE_KEY }} -The parsed datetime **must** contain a timezone. +- uses: snok/container-retention-policy@v3.0.0 + with: + account: snok + token: ${{ steps.generate-token.outputs.token }} +``` -## timestamp-to-use +### Cut-off * **Required**: `Yes` -* **Example**: `timestamp-to-use: created_at` -* **Default**: `updated_at` -* **Valid choices**: `updated_at` or `created_at` +* **Example**: `cut-off: 1w` or `cut-off: 5h 2s` -Which timestamp to use when comparing the cut-off to the container version. +Specifies how old package versions need to be before being considered for deletion. -Must be `created_at` or `updated_at`. The timestamp to use determines how we filter container versions. +The cut-off value parsing is handled by the [humantime](https://crates.io/crates/humantime) Rust crate. +Please take a look at their documentation if you have trouble getting it to work. If that doesn't help, +feel free to open an issue. -## account-type +## image-names * **Required**: `Yes` -* **Example**: `account-type: personal` -* **Valid choices**: `org` or `personal` +* **Examples**: + * `image-names: container-retention-policy` to select the `container-retention-policy` image + * `image-names: dev* test*` to select any image starting with the string `dev` or `test` + * `image-names: !v*` to select any image *not* starting with the string `v` -The account type of the account running the action. The account type determines which API endpoints to use in the GitHub -API. +The name(s) of the container image(s) you want to delete package versions for. Supports filtering +with `*` and `!`, where `*` functions as a wildcard and `!` means to not select image names +matching the remaining expression. -## org-name +These operators are only available for personal- and GitHub app-tokens. See the `token` parameter section for more info. -* **Required**: `Only if account type is org` -* **Example**: `org-name: google` +### image-tags -The name of your organization. +* **Required**: `No` +* **Example**: `image-tags: !latest` -## token +Optionally narrows the selection of package versions based on associated tags. Works the same way as the +`image-names` parameter. See above for info on supported syntax. -* **Required**: `Yes` -* **Example**: `token: ${{ secrets.PAT }}` +Like for image-names, these operators are only available for personal- and GitHub app-tokens. See the `token` parameter section for more info. -For the token, you need to pass -a [personal access token](https://docs.github.com/en/github/authenticating-to-github/keeping-your-account-and-data-secure/creating-a-personal-access-token) -with access to the container registry. Specifically, you need to grant it the following scopes: +### skip-shas -- `read:packages`, and -- `delete:packages` +* **Required**: `No` +* **Example**: `skip-shas: sha256:610a8286bda2dcc713754078070341b8e696be0b02c0e36b2d48f1447c7162af,sha256:a1b6216dfcb74a02b33a2ed68b5dc9c1bd6aa1552d1e377155e8cb348525c533` -You can also pass in `${{ secrets.GITHUB_TOKEN }}`; however, please see `token-type` -for more details. +Optionally protects specific package versions by their digest. This parameter was added to support +proper handling of multi-platform images. See [safely handling multi-platform (multi-arch) packages](#safely-handling-multi-platform-multi-arch-packages) for details. -## token-type +### tag-selection * **Required**: `No` -* **Default**: `pat` -* **Example**: `token: github-token` -* **Valid choices**: `github-token` or `pat` +* **Example**: `tag-selection: both` or `tag-selection: untagged` or `tag-selection: tagged` +* **Default**: `tag-selection: both` -The type of token being passed into `token`, which is used to authenticate to Github. +Optionally lets you select only tagged or untagged images. Both are selected by default. -Setting this to `github-token` is useful for pruning images from a workflow that lives -in the same repository; however, the `image-names` parameter **MUST** be set to a single -image, and that image **MUST** match the repository's package name. - -## keep-at-least +### keep-n-most-recent * **Required**: `No` -* **Default**: `0` -* **Example**: `keep-at-least: 5` +* **Example**: `keep-n-most-recent: 5` +* **Default**: `keep-n-most-recent: 0` + +How many images to keep, out of the most recent tagged images selected, per package. -How many versions to keep no matter what. Defaults to 0, meaning all versions older than the `cut-off` date may be deleted. +If there are 10 tagged package versions selected, after filtering on names, tags, and cut-off, and there's a keep-n-most-recent count of 3 set, then we will retain the 3 most recently created package versions and delete 7. -Setting this to a larger value ensures that the specified number of recent versions are always retained, regardless of their age. Useful for images that are not updated very often. +This parameter will not prevent deletion of untagged images, because we do not know of a valid use-case for this behavior. -If used together with `filter-tags` parameter, `keep-at-least` number of image tags will be skipped from the resulting filtered set, which makes it possible to apply different retention policies based on image tag format. +The parameter can be useful, e.g., to protect some number of tagged images, so that rollbacks don't fail +in Kubernetes. See [making sure there are enough revisions available for rollbacks in Kubernetes](#making-sure-there-are-enough-revisions-available-for-rollbacks-in-kubernetes) for details. -## untagged-only +### timestamp-to-use * **Required**: `No` -* **Default**: `false` +* **Example**: `timestamp-to-use: created_at` +* **Default**: `timestamp-to-use: updated_at` -Restricts image deletion to images without any tags, if enabled. +Whether we should use the `created_at` or `updated_at` timestamp when filtering based on the `cut-off` parameter. +Also impacts the selection of the [keep-n-most-recent](#keep-n-most-recent) feature, if used. -## skip-tags +### dry-run * **Required**: `No` -* **Example**: `latest, v*` - -Restrict deletions to images without specific tags, if specified. +* **Example**: `dry-run: true` +* **Default**: `dry-run: false` -Supports Unix-shell style wildcards, i.e 'v*' to match all tags starting with 'v'. +When `true` the action outputs which package versions would have been deleted to stdout, without actually deleting anything. +The output gives an accurate snapshot of what would have been deleted. -## filter-tags +### rust-log * **Required**: `No` -* **Example**: `sha-*` +* **Examples**: + * `rust-log: container_retention_policy=error` + * `rust-log: info` + * `rust-log: container_retention_policy=debug,hyper_util=info` +* **Default**: `container_retention_policy=info` -Comma-separated list of tags to consider for deletion. +Gives users a way to opt-into more/less detailed logging. -Supports Unix-shell style wildcards, i.e 'sha-*' to match all tags starting with 'sha-'. +The action uses the [env_logger](https://docs.rs/env_logger/latest/env_logger/) Rust crate to define log-levels, +and any expression supported by `env_logger` should work. -## filter-include-untagged +If you see any weird behaviour from the action, we recommend running the action with debug or tracing logs +enabled (e.g., by specifying `container_retention_policy=debug`). Beware that if you pass a value like just `debug`, +this will enable debug logging for the action binary *and all of its dependencies*, so that could become a bit noisy. -* **Required**: `No` -* **Default**: `true` +# Examples -Whether to consider untagged images for deletion. +## Organization -## dry-run +```yaml +on: + schedule: + - cron: "0 0 * * *" # run every day at midnight, utc -* **Required**: `No` -* **Default**: `false` +jobs: + delete-package-versions: + name: Delete package versions older than 4 weeks + runs-on: ubuntu-latest + steps: + - uses: snok/container-retention-policy@v3.0.0 + with: + account: snok + token: ${{ secrets.PAT }} + image-names: foo bar baz # select package versions from these three packages + image-tags: !prod !qa # don't delete package versions tagged with 'prod' or 'qa' + tag-selection: both # select both tagged and untagged package versions + cut-off: 4w # package versions should be older than 4 weeks, to be considered + dry-run: false # consider toggling this to true on your first run +``` -Prints output showing images which would be deleted but does not actually delete any images. +## Personal account + +```yaml +on: + schedule: + - cron: "0 0 * * *" # every day at midnight, utc + +jobs: + delete-package-versions: + name: Delete untagged package versions + runs-on: ubuntu-latest + steps: + - uses: snok/container-retention-policy@v3.0.0 + with: + account: user + token: ${{ secrets.PAT }} + image-names: * # all packages owned by the account + tag-selection: untagged + cut-off: 1h +``` # Outputs @@ -315,28 +261,253 @@ Comma-separated list of `image-name:version-id` for each image deleted. Comma-separated list of images that we weren't able to delete. Check logs for responses. -## needs-github-assistance +# Nice to knows -When a container image version is public and reaches -5,000 downloads, the image version can no longer -be deleted via the Github API. +## Supported operating systems -If you run into this issue, you can access the names and versions -of the relevant images by calling `${{ steps..outputs.needs-github-assistance }}`. +This is a ["container" GitHub action](https://docs.github.com/en/actions/creating-actions/creating-a-docker-container-action). +GitHub actions running containers are currently only supported by ubuntu-runners. This is a GitHub action limitation. -The names and versions are output as a comma-separate list, -like `"name1:tag1,name2:tag2"`. +## Running the application outside the action -# Nice to knows +The action is a Rust application packaged as a container, so if you prefer +to run the program elsewhere you may: + +- Pull the docker image and run it with: + + ``` + docker run \ + -e RUST_LOG=container_retention_policy=info \ + ghcr.io/snok/container-retention-policy:v3.0.0-alpha2 \ + --account snok \ + --token $PAT \ + --cut-off 1d \ + --image-names "container-retention-policy*" + ``` + + See the [justfile](./justfile) `run` command for inspiration. + +- Clone the repo, compile it, and run the binary directly: + + ``` + git clone git@github.com:snok/container-retention-policy.git + cargo build --release + RUST_LOG=container_retention_policy=info \ + ./target/releases/container-retention-policy \ + --account snok \ + --token $PAT \ + --cut-off 1d \ + --image-names "container-retention-policy*" + ``` + + This is probably the simplest, if you're happy to install Rust on your machine (installation docs can be found [here](https://www.rust-lang.org/tools/install)!) + +## Making sure there are enough revisions available for rollbacks in Kubernetes + +If you're deploying containers to Kubernetes, one thing to beware of is to not specify retention policies that prevent you from rolling back deployments. If you roll back a deployment to a previous version, and your nodes don't have the image cached, then it will need to re-pull the image from the registry. If you're unlucky, the version might have been deleted, and you could get stuck on a bad release. + +If you're following best-practices for tagging your container images, you might be tagging images with versions, dates, or some other moving tag strategy. In this case, it can be hard to protect *some* package versions from being deleted by using the `image-tags` filters. Instead, you can use the `keep-n-most-recent` argument, which will retain `n` package versions per package specified: + +```yaml +name: Delete old container images + +on: + schedule: + - cron: "0 0 * * *" # every day at midnight + +jobs: + delete-package-versions: + name: Delete package versions older than 4 weeks, but keep the latest 5 in case of rollbacks + runs-on: ubuntu-latest + steps: + - uses: snok/container-retention-policy@v3.0.0 + with: + account: snok + token: ${{ secrets.PAT }} + image-names: foo bar baz # select three packages + image-tags: * # any image tag + tag-selection: both # select both tagged and untagged package versions + cut-off: 1w # package versions should be older than 4 weeks + keep-n-most-recent: 5 # keep up to `n` tagged package versions for each of the packages +``` + +The action will prioritize keeping newer package versions over older ones. + +## Safely handling multi-platform (multi-arch) packages + +This action (or rather, naïve deletion of package version in GitHub's container registry, in general) can break your multi-platform packages. If you're hosting multi-platform packages, please implement the action as described below. + +### The problem + +GitHub's container registry supports uploads of multi-platform packages, with commands like: + +``` +docker buildx build \ + -t ghcr.io/snok/container-retention-policy:multi-arch \ + --platform linux/amd64,linux/arm64 . \ + --push +``` + +However, they do not provide enough metadata in the packages API to properly handle deletion for multi-platform packages. From the build above, the API will return 5 package versions. From these five, one package version contains our `multi-arch` tag, and four are untagged, with no references to each-other: + +```json +[ + { + "id": 214880827, + "name": "sha256:e8530d7d4c44954276715032c027882a2569318bb7f79c5a4fce6c80c0c1018e", + "created_at": "2024-05-11T12:42:55Z", + "metadata": { + "package_type": "container", + "container": { + "tags": [ + "multi-arch" + ] + } + } + }, + { + "id": 214880825, + "name": "sha256:ca5bf1eaa2a393f30d079e8fa005c73318829251613a359d6972bbae90b491fe", + "created_at": "2024-05-11T12:42:54Z", + "metadata": { + "package_type": "container", + "container": { + "tags": [] + } + } + }, + { + "id": 214880822, + "name": "sha256:6cff2700a9a29ace200788b556210973bd35a541166e6c8a682421adb0b6e7bb", + "created_at": "2024-05-11T12:42:54Z", + "metadata": { + "package_type": "container", + "container": { + "tags": [] + } + } + }, + { + "id": 214880821, + "name": "sha256:f8bc799ae7b6ba95595c32e12075d21328dac783c9c0304cf80c61d41025aeb2", + "created_at": "2024-05-11T12:42:53Z", + "html_url": "https://github.com/orgs/snok/packages/container/container-retention-policy/214880821", + "metadata": { + "package_type": "container", + "container": { + "tags": [] + } + } + }, + { + "id": 214880818, + "name": "sha256:a86523225e8d21faae518a5ea117e06887963a4a9ac123683d91890af092cf03", + "created_at": "2024-05-11T12:42:53Z", + "html_url": "https://github.com/orgs/snok/packages/container/container-retention-policy/214880818", + "metadata": { + "package_type": "container", + "container": { + "tags": [] + } + } + } +] +``` + +If we delete some of these, we'll either delete some of the platform targets, or the underlying image manifests, which consequently will lead to missing-manifest-errors when trying to pull the image for any platform. In other words, deleting any one of these is bad. + +### The solution + +While GitHub's packages API does not provide enough metadata for us to adequately handle this, the docker-cli does. If we use `docker manifest inspect ghcr.io/snok/container-retention-policy:multi-arch`, we'll see: + +```json +{ + "schemaVersion": 2, + "mediaType": "application/vnd.oci.image.index.v1+json", + "manifests": [ + { + "mediaType": "application/vnd.oci.image.manifest.v1+json", + "size": 754, + "digest": "sha256:f8bc799ae7b6ba95595c32e12075d21328dac783c9c0304cf80c61d41025aeb2", + "platform": { + "architecture": "amd64", + "os": "linux" + } + }, + { + "mediaType": "application/vnd.oci.image.manifest.v1+json", + "size": 754, + "digest": "sha256:a86523225e8d21faae518a5ea117e06887963a4a9ac123683d91890af092cf03", + "platform": { + "architecture": "arm64", + "os": "linux" + } + }, + { + "mediaType": "application/vnd.oci.image.manifest.v1+json", + "size": 567, + "digest": "sha256:17152a70ea10de6ecd804fffed4b5ebd3abc638e8920efb6fab2993c5a77600a", + "platform": { + "architecture": "unknown", + "os": "unknown" + } + }, + { + "mediaType": "application/vnd.oci.image.manifest.v1+json", + "size": 567, + "digest": "sha256:86215617a0ea1f77e9f314b45ffd578020935996612fb497239509b151a6f1ba", + "platform": { + "architecture": "unknown", + "os": "unknown" + } + } + ] +} +``` + +Which lists all the SHAs of the images associated with this tag. + +This means, you can do the following when implementing this action, to protect against partial deletion of your multi-platform images: + +```yaml +- name: Login to GitHub Container Registry + uses: docker/login-action@v3.0.0 + with: + registry: ghcr.io + username: ${{ github.actor }} + password: ${{ secrets.GITHUB_TOKEN }} + +- name: Fetch multi-platform package version SHAs + id: multi-arch-shas + run: | + package1=$(docker manifest inspect ghcr.io/package1 | jq -r '.manifests.[] | .digest' | paste -s -d ' ' -) + package2=$(docker manifest inspect ghcr.io/package2 | jq -r '.manifests.[] | .digest' | paste -s -d ' ' -) + echo "multi-arch-digests=$package1,$package2" >> $GITHUB_OUTPUT + +- uses: snok/container-retention-policy + with: + skip-shas: ${{ steps.multi-arch-digests.outputs.multi-arch-shas }} +``` + +This should pass the SHAs of any multi-platform images you care about, so that we can avoid deleting them. + +## Rate limits + +The documentation for GitHub rate limits can be found [here](https://docs.github.com/en/rest/using-the-rest-api/rate-limits-for-the-rest-api?apiVersion=2022-11-28). + +The primary request limit for users authenticated with personal access tokens is 5000 requests per hour. The primary request limit for users authenticated with the built-in `GITHUB_TOKEN` is 1000 requests per repository, per hour. Limits also vary by account types, so we use response headers coming from GitHub's API to know how many requests can be sent safely. + +In addition to the primary rate limit, there are multiple secondary rate limits; -* The GitHub API restricts us to fetching 100 image versions per image name, so if your registry isn't 100% clean after - the first job, don't be alarmed. +- No more than 100 concurrent requests +- No more than 900 points per endpoint, per minute, where a `GET` request is 1 point and a `DELETE` request is 5 ([source](https://docs.github.com/en/rest/using-the-rest-api/rate-limits-for-the-rest-api?apiVersion=2022-11-28#calculating-points-for-the-secondary-rate-limit)). +- No more than 90 seconds of CPU time per 60 seconds of real time. No real way of knowing what time you've used is provided by GitHub - instead they suggest counting total response times. +- ~~No more than 80 content-creating requests per minute, and no more than 500 content-creating requests per hour~~ -* If you accidentally delete something you shouldn't have, GitHub apparently has a 30 day grace period before actually - deleting your image version. - See [these docs](https://docs.github.com/en/rest/reference/packages#restore-package-version-for-an-organization) - for the information you need to restore your data. +All but the last secondary limit might are handled by the action. However, secondary rate limits are subject to +change without notice. If you run into problems, please open an issue. -# Contributing +## Restoring a deleted image -Please do 👏 +If you accidentally delete something you shouldn't have, GitHub has a 30-day grace period before actually +deleting your image version. See [these docs](https://docs.github.com/en/rest/reference/packages#restore-package-version-for-an-organization) for details. diff --git a/action.yaml b/action.yaml new file mode 100644 index 0000000..3df7b64 --- /dev/null +++ b/action.yaml @@ -0,0 +1,70 @@ +name: 'Container Retention Policy' +description: 'Define a retention policy for your GHCR-hosted container images' +branding: + icon: "book" + color: "blue" +inputs: + account: + description: "Should be 'user' for personal accounts and the organization name for organizations." + required: true + token: + description: "GitHub access token used to authenticate towards the GitHub packages APIs." + required: true + cut-off: + description: "The cut-off for which to delete images older than. For example '2d' for 2 days." + required: true + image-names: + description: "List of package names to consider." + required: true + image-tags: + description: "List of package version names to consider." + required: false + skip-shas: + description: "Package version SHAs to not delete." + required: false + default: '' + tag-selection: + description: "Specify whether to consider tagged images, untagged images, or both." + required: false + default: 'both' + keep-n-most-recent: + description: 'How many image versions to always retain. Newer package versions are prioritized.' + required: false + default: '0' + timestamp-to-use: + description: 'Whether to use `updated_at` or `created_at` timestamps when considering the cut-off.' + required: false + default: 'updated_at' + dry-run: + description: "Do not actually delete images. Instead, print which images would have been deleted to the console." + required: false + default: 'false' + rust-log: + description: "Lets you specify a log level or a list of log levels for the different Rust crates used by the action." + required: false + default: 'container_retention_policy=INFO' + +outputs: + deleted: + description: 'Comma-separated list of image names and tags, for image versions that were deleted during the run.' + value: ${{ steps.container-retention-policy.outputs.deleted }} + failed: + description: 'Comma-separated list of image names and tags, for image versions that we failed to delete during the run, for an unknown reason.' + value: ${{ steps.container-retention-policy.outputs.failed }} + +runs: + using: 'docker' + image: 'docker://ghcr.io/snok/container-retention-policy:v3.0.0-rc2' + args: + - --account=${{ inputs.account }} + - --token=${{ inputs.token }} + - --image-names=${{ inputs.image-names }} + - --image-tags=${{ inputs.image-tags }} + - --shas-to-skip=${{ inputs.skip-shas }} + - --tag-selection=${{ inputs.tag-selection }} + - --keep-n-most-recent=${{ inputs.keep-n-most-recent }} + - --timestamp-to-use=${{ inputs.timestamp-to-use }} + - --cut-off=${{ inputs.cut-off }} + - --dry-run=${{ inputs.dry-run }} + env: + RUST_LOG: ${{ inputs.rust-log }} diff --git a/action.yml b/action.yml deleted file mode 100644 index f6dc0c1..0000000 --- a/action.yml +++ /dev/null @@ -1,112 +0,0 @@ -name: 'Container Retention Policy' -description: 'Create a retention policy for your GHCR hosted container images' -branding: - icon: "book" - color: "blue" -inputs: - account-type: - description: "The type of account. Can be either 'org' or 'personal'." - required: true - org-name: - description: "The name of the organization. Only required if the account type is 'personal'." - default: '' - required: false - image-names: - description: 'Image name to delete. Supports passing several names as a comma-separated list.' - required: true - timestamp-to-use: - description: 'Whether to use updated_at or created_at timestamps. Defaults to updated_at.' - required: true - default: 'updated_at' - cut-off: - description: "The cut-off for which to delete images older than. For example '2 days ago UTC'. Timezone is required." - required: true - token: - description: 'Personal access token with read and delete scopes.' - required: true - untagged-only: - description: 'Restrict deletions to images without tags.' - required: false - default: 'false' - skip-tags: - description: "Restrict deletions to images without specific tags. Supports Unix-shell style wildcards" - required: false - keep-at-least: - description: 'How many images to keep no matter what. Defaults to 0 which means you might delete everything' - required: false - default: '0' - filter-tags: - description: "Comma-separated list of tags to consider for deletion. Supports Unix-shell style wildcards" - required: false - filter-include-untagged: - description: "Whether to consider untagged images for deletion." - required: false - default: 'true' - dry-run: - description: "Do not actually delete images. Print output showing what would have been deleted." - required: false - default: 'false' - token-type: - description: "The token type. Can be either 'pat' or 'github-token'. If 'github-token', then image-names must the package name of repository from where this action is invoked." - required: false - default: 'pat' - -outputs: - needs-github-assistance: - description: 'Comma-separated list of image names and tags, for image versions that are public and have more than 5000 downloads.' - value: ${{ steps.container-retention-policy.outputs.needs-github-assistance }} - deleted: - description: 'Comma-separated list of image names and tags, for image versions that were deleted during the run.' - value: ${{ steps.container-retention-policy.outputs.deleted }} - failed: - description: 'Comma-separated list of image names and tags, for image versions that we failed to delete during the run, for an unknown reason.' - value: ${{ steps.container-retention-policy.outputs.failed }} -runs: - using: 'composite' - steps: - - name: Setup Python - uses: actions/setup-python@v5 - with: - python-version: "3.11" - - - name: Install dependencies - shell: bash - run: | - pip --disable-pip-version-check install \ - regex==2022.3.2 \ - httpx \ - dateparser \ - pydantic - - - name: Run Container Retention Policy - shell: bash - id: container-retention-policy - run: | - python ${{ github.action_path }}/main.py \ - "$ACCOUNT_TYPE" \ - "$ORG_NAME" \ - "$IMAGE_NAMES" \ - "$TIMESTAMP_TO_USE" \ - "$CUT_OFF" \ - "$TOKEN" \ - "$UNTAGGED_ONLY" \ - "$SKIP_TAGS" \ - "$KEEP_AT_LEAST" \ - "$FILTER_TAGS" \ - "$FILTER_INCLUDE_UNTAGGED" \ - "$DRY_RUN" \ - "$TOKEN_TYPE" - env: - ACCOUNT_TYPE: "${{ inputs.account-type }}" - ORG_NAME: "${{ inputs.org-name }}" - IMAGE_NAMES: "${{ inputs.image-names }}" - TIMESTAMP_TO_USE: "${{ inputs.timestamp-to-use }}" - CUT_OFF: "${{ inputs.cut-off }}" - TOKEN: "${{ inputs.token }}" - UNTAGGED_ONLY: "${{ inputs.untagged-only }}" - SKIP_TAGS: "${{ inputs.skip-tags }}" - KEEP_AT_LEAST: "${{ inputs.keep-at-least }}" - FILTER_TAGS: "${{ inputs.filter-tags }}" - FILTER_INCLUDE_UNTAGGED: "${{ inputs.filter-include-untagged }}" - DRY_RUN: "${{ inputs.dry-run }}" - TOKEN_TYPE: "${{ inputs.token-type }}" diff --git a/deny.toml b/deny.toml new file mode 100644 index 0000000..3c974fc --- /dev/null +++ b/deny.toml @@ -0,0 +1,28 @@ +[advisories] +version = 2 +yanked = "warn" +ignore = [] + +[sources] +unknown-registry = "deny" +unknown-git = "deny" + +[licenses] +version = 2 +confidence-threshold = 0.93 +allow = [ + "Unicode-DFS-2016", + "MIT", + "Apache-2.0", + "ISC", + "BSD-3-Clause", + "OpenSSL", + "MPL-2.0" +] + +[[licenses.clarify]] +name = "ring" +expression = "MIT AND ISC AND OpenSSL" +license-files = [ + { path = "LICENSE", hash = 0xbd0eed23 } +] diff --git a/justfile b/justfile new file mode 100644 index 0000000..153cbf1 --- /dev/null +++ b/justfile @@ -0,0 +1,64 @@ +# See https://just.systems/man/en/ for docs + +set dotenv-load # Loads .env + +bt := '0' + +export RUST_BACKTRACE := bt + +log := "warn" + +# List available commands +default: + just --list + +# Installs any and all cargo utilities used for the development of this app +setup: + # Cargo binstall downloads pre-built binaries for cargo extensions + # This saves minutes on each `cargo binstall` invocation, relative + # to what `cargo install` would have done + @cargo install cargo-binstall + + # cargo-udeps checks for unused dependencies + @cargo binstall cargo-udeps --locked --no-confirm + @rustup toolchain install nightly + + # cargo-deny checks dependency licenses, to make sure we + # dont accidentally use any copy-left licensed packages. + # See deny.toml for configuration. + @cargo binstall cargo-deny --locked --no-confirm + + # cargo-audit checks for security vulnerabilities + @cargo binstall cargo-audit --locked --no-confirm + + # sccache does caching of Rust dependencies really well + @cargo binstall sccache --locked --no-confirm + + # coverage + cargo binstall cargo-llvm-cov --locked --no-confirm + + # helps identify unused dependency features + cargo binstall cargo-unused-features --locked --no-confirm + + # pre-commit is used to run checks on-commit + @pip install pre-commit && pre-commit install + @export RUSTC_WRAPPER=$(which sccache) + @echo "Run \`echo 'export RUSTC_WRAPPER=\$(which sccache)' >> ~/.bashrc\` to use sccache for caching" + +# Run the binary "for real" for the container-retention-policy package +run: + RUST_LOG=container_retention_policy=debug cargo r -- \ + --account snok \ + --token $DELETE_PACKAGES_CLASSIC_TOKEN \ + --tag-selection both \ + --image-names "container-retention-policy" \ + --image-tags "!latest !test-1* !v*" \ + --shas-to-skip "" \ + --keep-n-most-recent 5 \ + --timestamp-to-use "updated_at" \ + --cut-off 1d \ + --dry-run true + +fuzz time: + cargo build + cargo +nightly fuzz run --sanitizer address fuzz_cli -- -max_total_time={{time}} diff --git a/main.py b/main.py deleted file mode 100644 index cc990b2..0000000 --- a/main.py +++ /dev/null @@ -1,637 +0,0 @@ -from __future__ import annotations - -import asyncio -import os -import re -from asyncio import Semaphore, Task -from datetime import datetime, timedelta -from enum import Enum -from fnmatch import fnmatch -from sys import argv -from typing import TYPE_CHECKING, Literal -from urllib.parse import quote_from_bytes - -from dateparser import parse -from httpx import AsyncClient, TimeoutException -from pydantic import BaseModel, ValidationInfo, conint, field_validator - -if TYPE_CHECKING: - from httpx import Response - -BASE_URL = 'https://api.github.com' - - -def encode_image_name(name: str) -> str: - return quote_from_bytes(name.strip().encode('utf-8'), safe='') - - -class TimestampType(str, Enum): - """ - The timestamp-to-use defines how to filter down images for deletion. - """ - - UPDATED_AT = 'updated_at' - CREATED_AT = 'created_at' - - -class AccountType(str, Enum): - """ - The user's account type defines which endpoints to use. - """ - - ORG = 'org' - PERSONAL = 'personal' - - -class GithubTokenType(str, Enum): - """The type of token to use to authenticate to GitHub.""" - - GITHUB_TOKEN = 'github-token' - # Personal Access Token (PAT) - PAT = 'pat' - - -deleted: list[str] = [] -failed: list[str] = [] -needs_github_assistance: list[str] = [] - -GITHUB_ASSISTANCE_MSG = ( - 'Publicly visible package versions with more than ' - '5000 downloads cannot be deleted. ' - 'Contact GitHub support for further assistance.' -) - - -class PackageResponse(BaseModel): - id: int - name: str - created_at: datetime - updated_at: datetime | None - - -# This could be made into a setting if needed -MAX_SLEEP = 60 * 10 # 10 minutes - - -async def wait_for_rate_limit(*, response: Response, eligible_for_secondary_limit: bool = False) -> None: - """ - Sleeps or terminates the workflow if we've hit rate limits. - - See docs on rate limits: https://docs.github.com/en/rest/rate-limit?apiVersion=2022-11-28. - """ - if int(response.headers.get('x-ratelimit-remaining', 1)) == 0: - ratelimit_reset = datetime.fromtimestamp(int(response.headers['x-ratelimit-reset'])) - delta = ratelimit_reset - datetime.now() - - if delta > timedelta(seconds=MAX_SLEEP): - print( - f'Rate limited for {delta} seconds. ' - f'Terminating workflow, since that\'s above the maximum allowed sleep time. ' - f'Retry the job manually, when the rate limit is refreshed.' - ) - exit(1) - elif delta > timedelta(seconds=0): - print(f'Rate limit exceeded. Sleeping for {delta} seconds') - await asyncio.sleep(delta.total_seconds()) - - elif eligible_for_secondary_limit: - # https://docs.github.com/en/rest/overview/resources-in-the-rest-api?apiVersion=2022-11-28#secondary-rate-limits - # https://docs.github.com/en/rest/guides/best-practices-for-integrators#dealing-with-secondary-rate-limits - if int(response.headers.get('retry-after', 1)) == 0: - ratelimit_reset = datetime.fromtimestamp(int(response.headers['retry-after'])) - delta = ratelimit_reset - datetime.now() - if delta > timedelta(seconds=MAX_SLEEP): - print( - f'Rate limited for {delta} seconds. ' - f'Terminating workflow, since that\'s above the maximum allowed sleep time. ' - f'Retry the job manually, when the rate limit is refreshed.' - ) - exit(1) - elif delta > timedelta(seconds=0): - print(f'Secondary Rate limit exceeded. Sleeping for {delta} seconds') - await asyncio.sleep(delta.total_seconds()) - else: - await asyncio.sleep(1) - - -async def get_all_pages(*, url: str, http_client: AsyncClient) -> list[dict]: - """ - Accumulate all pages of a paginated API endpoint. - - :param url: The full API URL - :param http_client: HTTP client. - :return: List of objects. - """ - result = [] - rel_regex = re.compile(r'<([^<>]*)>; rel="(\w+)"') - rels = {'next': url} - - while 'next' in rels: - response = await http_client.get(rels['next']) - response.raise_for_status() - result.extend(response.json()) - - await wait_for_rate_limit(response=response) - - if link := response.headers.get('link'): - rels = {rel: url for url, rel in rel_regex.findall(link)} - else: - break - - return result - - -async def list_org_packages(*, org_name: str, http_client: AsyncClient) -> list[PackageResponse]: - """List all packages for an organization.""" - packages = await get_all_pages( - url=f'{BASE_URL}/orgs/{org_name}/packages?package_type=container&per_page=100', - http_client=http_client, - ) - return [PackageResponse(**i) for i in packages] - - -async def list_packages(*, http_client: AsyncClient) -> list[PackageResponse]: - """List all packages for a user.""" - packages = await get_all_pages( - url=f'{BASE_URL}/user/packages?package_type=container&per_page=100', - http_client=http_client, - ) - return [PackageResponse(**i) for i in packages] - - -async def list_org_package_versions( - *, org_name: str, image_name: str, http_client: AsyncClient -) -> list[PackageVersionResponse]: - """List image versions, for an organization.""" - packages = await get_all_pages( - url=f'{BASE_URL}/orgs/{org_name}/packages/container/{encode_image_name(image_name)}/versions?per_page=100', - http_client=http_client, - ) - return [PackageVersionResponse(**i) for i in packages] - - -async def list_package_versions(*, image_name: str, http_client: AsyncClient) -> list[PackageVersionResponse]: - """List image versions for a user.""" - packages = await get_all_pages( - url=f'{BASE_URL}/user/packages/container/{encode_image_name(image_name)}/versions?per_page=100', - http_client=http_client, - ) - return [PackageVersionResponse(**i) for i in packages] - - -class ContainerModel(BaseModel): - tags: list[str] - - -class MetadataModel(BaseModel): - package_type: Literal['container'] - container: ContainerModel - - -class PackageVersionResponse(BaseModel): - id: int - name: str - metadata: MetadataModel - created_at: datetime | None - updated_at: datetime | None - - -def post_deletion_output(*, response: Response, image_name: str, version_id: int) -> None: - """ - Output a little info to the user. - """ - image_name_with_tag = f'{image_name}:{version_id}' - if response.is_error: - if response.status_code == 400 and response.json()['message'] == GITHUB_ASSISTANCE_MSG: - # Output the names of these images in one block at the end - needs_github_assistance.append(image_name_with_tag) - else: - failed.append(image_name_with_tag) - print( - f'\nCouldn\'t delete {image_name_with_tag}.\n' - f'Status code: {response.status_code}\nResponse: {response.json()}\n' - ) - else: - deleted.append(image_name_with_tag) - print(f'Deleted old image: {image_name_with_tag}') - - -async def delete_package_version( - url: str, semaphore: Semaphore, http_client: AsyncClient, image_name: str, version_id: int -) -> None: - async with semaphore: - try: - response = await http_client.delete(url) - await wait_for_rate_limit(response=response, eligible_for_secondary_limit=True) - post_deletion_output(response=response, image_name=image_name, version_id=version_id) - except TimeoutException as e: - print(f'Request to delete {image_name} timed out with error `{e}`') - - -async def delete_org_package_versions( - *, - org_name: str, - image_name: str, - version_id: int, - http_client: AsyncClient, - semaphore: Semaphore, -) -> None: - """ - Delete an image version, for an organization. - - :param org_name: The name of the org. - :param image_name: The name of the container image. - :param version_id: The ID of the image version we're deleting. - :param http_client: HTTP client. - :return: Nothing - the API returns a 204. - """ - url = f'{BASE_URL}/orgs/{org_name}/packages/container/{encode_image_name(image_name)}/versions/{version_id}' - await delete_package_version( - url=url, - semaphore=semaphore, - http_client=http_client, - image_name=image_name, - version_id=version_id, - ) - - -async def delete_package_versions( - *, image_name: str, version_id: int, http_client: AsyncClient, semaphore: Semaphore -) -> None: - """ - Delete an image version, for a personal account. - - :param image_name: The name of the container image. - :param version_id: The ID of the image version we're deleting. - :param http_client: HTTP client. - :return: Nothing - the API returns a 204. - """ - url = f'{BASE_URL}/user/packages/container/{encode_image_name(image_name)}/versions/{version_id}' - await delete_package_version( - url=url, - semaphore=semaphore, - http_client=http_client, - image_name=image_name, - version_id=version_id, - ) - - -class GithubAPI: - """ - Provide a unified API, regardless of account type. - """ - - @staticmethod - async def list_packages( - *, account_type: AccountType, org_name: str | None, http_client: AsyncClient - ) -> list[PackageResponse]: - if account_type != AccountType.ORG: - return await list_packages(http_client=http_client) - assert isinstance(org_name, str) - return await list_org_packages(org_name=org_name, http_client=http_client) - - @staticmethod - async def list_package_versions( - *, - account_type: AccountType, - org_name: str | None, - image_name: str, - http_client: AsyncClient, - ) -> list[PackageVersionResponse]: - if account_type != AccountType.ORG: - return await list_package_versions(image_name=image_name, http_client=http_client) - assert isinstance(org_name, str) - return await list_org_package_versions(org_name=org_name, image_name=image_name, http_client=http_client) - - @staticmethod - async def delete_package( - *, - account_type: AccountType, - org_name: str | None, - image_name: str, - version_id: int, - http_client: AsyncClient, - semaphore: Semaphore, - ) -> None: - if account_type != AccountType.ORG: - return await delete_package_versions( - image_name=image_name, - version_id=version_id, - http_client=http_client, - semaphore=semaphore, - ) - assert isinstance(org_name, str) - return await delete_org_package_versions( - org_name=org_name, - image_name=image_name, - version_id=version_id, - http_client=http_client, - semaphore=semaphore, - ) - - -class Inputs(BaseModel): - token_type: GithubTokenType = GithubTokenType.PAT - image_names: list[str] - cut_off: datetime - timestamp_to_use: TimestampType - account_type: AccountType - org_name: str | None = None - untagged_only: bool - skip_tags: list[str] - keep_at_least: conint(ge=0) = 0 # type: ignore[valid-type] - filter_tags: list[str] - filter_include_untagged: bool = True - dry_run: bool = False - - @staticmethod - def _parse_comma_separate_string_as_list(v: str) -> list[str]: - return [i.strip() for i in v.split(',')] if v else [] - - @field_validator('skip_tags', 'filter_tags', mode='before') - def parse_comma_separate_string_as_list(cls, v: str) -> list[str]: - return cls._parse_comma_separate_string_as_list(v) - - @field_validator('image_names', mode='before') - def validate_image_names(cls, v: str, values: ValidationInfo) -> list[str]: - images = cls._parse_comma_separate_string_as_list(v) - if 'token_type' in values.data: - token_type = values.data['token_type'] - if token_type == GithubTokenType.GITHUB_TOKEN and len(images) != 1: - raise ValueError('A single image name is required if token_type is github-token') - if token_type == GithubTokenType.GITHUB_TOKEN and '*' in images[0]: - raise ValueError('Wildcards are not allowed if token_type is github-token') - return images - - @field_validator('cut_off', mode='before') - def parse_human_readable_datetime(cls, v: str) -> datetime: - parsed_cutoff = parse(v) - if not parsed_cutoff: - raise ValueError(f"Unable to parse '{v}'") - elif parsed_cutoff.tzinfo is None or parsed_cutoff.tzinfo.utcoffset(parsed_cutoff) is None: - raise ValueError('Timezone is required for the cut-off') - return parsed_cutoff - - @field_validator('org_name', mode='before') - def validate_org_name(cls, v: str, values: ValidationInfo) -> str | None: - if 'account_type' in values.data and values.data['account_type'] == AccountType.ORG and not v: - raise ValueError('org-name is required when account-type is org') - if v: - return v - return None - - -async def get_and_delete_old_versions(image_name: str, inputs: Inputs, http_client: AsyncClient) -> None: - """ - Delete old package versions for an image name. - - This function contains more or less all our logic. - """ - versions = await GithubAPI.list_package_versions( - account_type=inputs.account_type, - org_name=inputs.org_name, - image_name=image_name, - http_client=http_client, - ) - - # Define list of deletion-tasks to append to - tasks: list[Task] = [] - simulated_tasks = 0 - - # Iterate through dicts of image versions - sem = Semaphore(50) - - async with sem: - for idx, version in enumerate(versions): - # Parse either the update-at timestamp, or the created-at timestamp - # depending on which on the user has specified that we should use - updated_or_created_at = getattr(version, inputs.timestamp_to_use.value) - - if not updated_or_created_at: - print(f'Skipping image version {version.id}. Unable to parse timestamps.') - continue - - if inputs.cut_off < updated_or_created_at: - # Skipping because it's above our datetime cut-off - # we're only looking to delete containers older than some timestamp - continue - - # Load the tags for the individual image we're processing - if ( - hasattr(version, 'metadata') - and hasattr(version.metadata, 'container') - and hasattr(version.metadata.container, 'tags') - ): - image_tags = version.metadata.container.tags - else: - image_tags = [] - - if inputs.untagged_only and image_tags: - # Skipping because no tagged images should be deleted - # We could proceed if image_tags was empty, but it's not - continue - - if not image_tags and not inputs.filter_include_untagged: - # Skipping, because the filter_include_untagged setting is False - continue - - # If we got here, most probably we will delete image. - # For pseudo-branching we set delete_image to true and - # handle cases with delete image by tag filtering in separate pseudo-branch - delete_image = not inputs.filter_tags - for filter_tag in inputs.filter_tags: - # One thing to note here is that we use fnmatch to support wildcards. - # A filter-tags setting of 'some-tag-*' should match to both - # 'some-tag-1' and 'some-tag-2'. - if any(fnmatch(tag, filter_tag) for tag in image_tags): - delete_image = True - break - - if inputs.keep_at_least > 0: - if idx + 1 - (len(tasks) + simulated_tasks) > inputs.keep_at_least: - delete_image = True - else: - delete_image = False - - # Here we will handle exclusion case - for skip_tag in inputs.skip_tags: - if any(fnmatch(tag, skip_tag) for tag in image_tags): - # Skipping because this image version is tagged with a protected tag - delete_image = False - - if delete_image is True and inputs.dry_run: - delete_image = False - simulated_tasks += 1 - print(f'Would delete image {image_name}:{version.id}.') - - if delete_image: - tasks.append( - asyncio.create_task( - GithubAPI.delete_package( - account_type=inputs.account_type, - org_name=inputs.org_name, - image_name=image_name, - version_id=version.id, - http_client=http_client, - semaphore=sem, - ) - ) - ) - - if not tasks: - print(f'No more versions to delete for {image_name}') - - results = await asyncio.gather(*tasks, return_exceptions=True) - - for item in results: - if isinstance(item, Exception): - try: - raise item - except Exception as e: - # Unhandled errors *shouldn't* occur - print( - f'Unhandled exception raised at runtime: `{e}`. ' - f'Please report this at https://github.com/snok/container-retention-policy/issues/new' - ) - - -def filter_image_names(all_packages: list[PackageResponse], image_names: list[str]) -> set[str]: - """ - Filter package names by action input package names. - - The action input can contain wildcards and other patterns supported by fnmatch. - - The idea is that given a list: ['ab', 'ac', 'bb', 'ba'], and image names (from the action inputs): ['aa', 'b*'], - this function should return ['ba', 'bb']. - - :param all_packages: List of packages received from the Github API - :param image_names: List of image names the client wishes to delete from - :return: The intersection of the two lists, returned as `ImageName` instances - """ - - packages_to_delete_from = set() - - # Iterate over image names from the action inputs and fnmatch to packages - # contained in the users/orgs list of packages. - for image_name in image_names: - for package in all_packages: - if fnmatch(package.name, image_name): - packages_to_delete_from.add(package.name.strip()) - - return packages_to_delete_from - - -async def main( - account_type: str, - org_name: str, - image_names: str, - timestamp_to_use: str, - cut_off: str, - token: str, - untagged_only: str, - skip_tags: str, - keep_at_least: str, - filter_tags: str, - filter_include_untagged: str, - dry_run: str = 'false', - token_type: str = 'pat', -) -> None: - """ - Delete old image versions. - - See action.yml for additional descriptions of each parameter. - - The argument order matters. They are fed to the script from the action, in order. - - All arguments are either strings or empty strings. We properly - parse types and values in the Inputs pydantic model. - - :param account_type: Account type. must be 'org' or 'personal'. - :param org_name: The name of the org. Required if account type is 'org'. - :param image_names: The image names to delete versions for. Can be a single - image name, or multiple comma-separated image names. - :param timestamp_to_use: Which timestamp to base our cut-off on. Can be 'updated_at' or 'created_at'. - :param cut_off: Can be a human-readable relative time like '2 days ago UTC', or a timestamp. - Must contain a reference to the timezone. - :param token: The personal access token to authenticate with. - :param untagged_only: Whether to only delete untagged images. - :param skip_tags: Comma-separated list of tags to not delete. - Supports wildcard '*', '?', '[seq]' and '[!seq]' via Unix shell-style wildcards - :param keep_at_least: Number of images to always keep - :param filter_tags: Comma-separated list of tags to consider for deletion. - Supports wildcard '*', '?', '[seq]' and '[!seq]' via Unix shell-style wildcards - :param filter_include_untagged: Whether to consider untagged images for deletion. - :param dry_run: Do not actually delete packages but print output showing which packages would - have been deleted. - :param token_type: Token passed into 'token'. Must be 'pat' or 'github-token'. If - 'github-token' is used, then 'image_names` must be a single image, - and the image matches the package name from the repository where - this action is invoked. - """ - inputs = Inputs( - image_names=image_names, - account_type=account_type, - org_name=org_name, - timestamp_to_use=timestamp_to_use, - cut_off=cut_off, - untagged_only=untagged_only, - skip_tags=skip_tags, - keep_at_least=keep_at_least, - filter_tags=filter_tags, - filter_include_untagged=filter_include_untagged, - dry_run=dry_run, - token_type=token_type, - ) - async with AsyncClient( - headers={'accept': 'application/vnd.github.v3+json', 'Authorization': f'Bearer {token}'} - ) as client: - if inputs.token_type == GithubTokenType.GITHUB_TOKEN: - packages_to_delete_from = set(inputs.image_names) - else: - # Get all packages from the user or orgs account - all_packages = await GithubAPI.list_packages( - account_type=inputs.account_type, - org_name=inputs.org_name, - http_client=client, - ) - - # Filter existing image names by action inputs - packages_to_delete_from = filter_image_names(all_packages, inputs.image_names) - - # Create tasks to run concurrently - tasks = [ - asyncio.create_task(get_and_delete_old_versions(image_name, inputs, client)) - for image_name in packages_to_delete_from - ] - - # Execute tasks - await asyncio.gather(*tasks) - - if needs_github_assistance: - # Print a human-readable list of public images we couldn't handle - print('\n') - print('─' * 110) - image_list = '\n\t- ' + '\n\t- '.join(needs_github_assistance) - msg = ( - '\nThe follow images are public and have more than 5000 downloads. ' - f'These cannot be deleted via the Github API:\n{image_list}\n\n' - f'If you still want to delete these images, contact Github support.\n\n' - 'See https://docs.github.com/en/rest/reference/packages for more info.\n' - ) - print(msg) - print('─' * 110) - - # Then add it to the action outputs - for name, l in [ - ('needs-github-assistance', needs_github_assistance), - ('deleted', deleted), - ('failed', failed), - ]: - comma_separated_list = ','.join(l) - - with open(os.environ['GITHUB_OUTPUT'], 'a') as f: - f.write(f'{name}={comma_separated_list}\n') - - -if __name__ == '__main__': - asyncio.run(main(*argv[1:])) diff --git a/main_tests.py b/main_tests.py deleted file mode 100644 index 7f286b1..0000000 --- a/main_tests.py +++ /dev/null @@ -1,672 +0,0 @@ -import asyncio -import os -import tempfile -from asyncio import Semaphore -from copy import deepcopy -from datetime import datetime, timedelta, timezone -from unittest.mock import ANY, AsyncMock, Mock - -import pytest as pytest -from httpx import AsyncClient -from pydantic import ValidationError - -import main -from main import ( - MAX_SLEEP, - AccountType, - Inputs, - MetadataModel, - PackageResponse, - PackageVersionResponse, - delete_org_package_versions, - delete_package_versions, - filter_image_names, - get_and_delete_old_versions, - list_org_package_versions, - list_package_versions, -) -from main import main as main_ -from main import post_deletion_output, wait_for_rate_limit - - -@pytest.fixture -def ok_response(): - mock_ok_response = Mock() - mock_ok_response.headers = {'x-ratelimit-remaining': '1', 'link': ''} - mock_ok_response.json.return_value = [] - mock_ok_response.is_error = False - yield mock_ok_response - - -@pytest.fixture -def bad_response(): - mock_bad_response = Mock() - mock_bad_response.headers = {'x-ratelimit-remaining': '1', 'link': ''} - mock_bad_response.is_error = True - yield mock_bad_response - - -@pytest.fixture -def http_client(ok_response): - mock_http_client = AsyncMock() - mock_http_client.get.return_value = ok_response - mock_http_client.delete.return_value = ok_response - yield mock_http_client - - -@pytest.fixture(autouse=True) -def github_output(): - """ - Create a GITHUB_OUTPUT env value to mock the Github actions equivalent. - """ - with tempfile.NamedTemporaryFile() as temp: - os.environ['GITHUB_OUTPUT'] = temp.name - yield - - -async def test_list_org_package_version(http_client): - await list_org_package_versions(org_name='test', image_name='test', http_client=http_client) - - -async def test_wait_for_rate_limit(ok_response, capsys): - # No rate limit hit, no secondary limit - start = datetime.now() - await wait_for_rate_limit(response=ok_response, eligible_for_secondary_limit=False) - assert capsys.readouterr().out == '' # no output - assert (datetime.now() - start).seconds == 0 - - # No rate limit hit, with secondary limit - this should sleep for one second - start = datetime.now() - await wait_for_rate_limit(response=ok_response, eligible_for_secondary_limit=True) - assert capsys.readouterr().out == '' # no output - assert (datetime.now() - start).seconds == 1 # ~1 second runtime - - # Run with timeout exceeding max limit - this should exit the program - ok_response.headers = {'x-ratelimit-remaining': '0'} - ok_response.headers |= {'x-ratelimit-reset': (datetime.now() + timedelta(seconds=MAX_SLEEP + 1)).timestamp()} - with pytest.raises(SystemExit): - await wait_for_rate_limit(response=ok_response) - assert " Terminating workflow, since that's above the maximum allowed sleep time" in capsys.readouterr().out - - # Run with timeout below max limit - this should just sleep for a bit - ok_response.headers |= {'x-ratelimit-reset': (datetime.now() + timedelta(seconds=2)).timestamp()} - await wait_for_rate_limit(response=ok_response) - assert 'Rate limit exceeded. Sleeping for' in capsys.readouterr().out - - -async def test_list_package_version(http_client): - await list_package_versions(image_name='test', http_client=http_client) - - -async def test_delete_org_package_version(http_client): - await delete_org_package_versions( - org_name='test', - image_name='test', - http_client=http_client, - version_id=123, - semaphore=Semaphore(1), - ) - - -async def test_delete_package_version(http_client): - await delete_package_versions(image_name='test', http_client=http_client, version_id=123, semaphore=Semaphore(1)) - - -async def test_delete_package_version_semaphore(http_client): - """ - A bit of a useless test, but proves Semaphores work the way we think. - """ - # Test that we're still waiting after 1 second, when the semaphore is empty - sem = Semaphore(0) - with pytest.raises(asyncio.TimeoutError): - await asyncio.wait_for( - delete_package_versions(image_name='test', http_client=http_client, version_id=123, semaphore=sem), - 2, - ) - - # Assert that this would not be the case otherwise - sem = Semaphore(1) - await asyncio.wait_for( - delete_package_versions(image_name='test', http_client=http_client, version_id=123, semaphore=sem), - 2, - ) - - -def test_post_deletion_output(capsys, ok_response, bad_response): - # Happy path - post_deletion_output(response=ok_response, image_name='test', version_id=123) - captured = capsys.readouterr() - assert captured.out == 'Deleted old image: test:123\n' - - # Bad response - post_deletion_output(response=bad_response, image_name='test', version_id=123) - captured = capsys.readouterr() - assert captured.out != 'Deleted old image: test:123\n' - - -input_defaults = { - 'image_names': 'a,b', - 'cut_off': 'an hour ago utc', - 'timestamp_to_use': 'created_at', - 'untagged_only': 'false', - 'skip_tags': '', - 'keep_at_least': '0', - 'filter_tags': '', - 'filter_include_untagged': 'true', - 'token': 'test', - 'token_type': 'pat', - 'account_type': 'personal', - 'dry_run': 'false', -} - - -def _create_inputs_model(**kwargs): - """ - Little helper method, to help us instantiate working Inputs models. - """ - - return Inputs(**(input_defaults | kwargs)) - - -def test_org_name_empty(): - with pytest.raises(ValidationError): - Inputs(**(input_defaults | {'account_type': 'org', 'org_name': ''})) - - -async def test_inputs_model_personal(mocker): - # Mock the personal list function - mocked_list_package_versions: AsyncMock = mocker.patch.object(main, 'list_package_versions', AsyncMock()) - mocked_delete_package_versions: AsyncMock = mocker.patch.object(main, 'delete_package_versions', AsyncMock()) - - # Create a personal inputs model - personal = _create_inputs_model(account_type='personal') - assert personal.account_type != AccountType.ORG - - # Call the GithubAPI utility function - await main.GithubAPI.list_package_versions( - account_type=personal.account_type, - org_name=personal.org_name, - image_name=personal.image_names[0], - http_client=AsyncMock(), - ) - await main.GithubAPI.delete_package( - account_type=personal.account_type, - org_name=personal.org_name, - image_name=personal.image_names[0], - http_client=AsyncMock(), - version_id=1, - semaphore=Semaphore(1), - ) - - # Make sure the right function was called - mocked_list_package_versions.assert_awaited_once() - mocked_delete_package_versions.assert_awaited_once() - - -async def test_inputs_model_org(mocker): - # Mock the org list function - mocked_list_package_versions: AsyncMock = mocker.patch.object(main, 'list_org_package_versions', AsyncMock()) - mocked_delete_package_versions: AsyncMock = mocker.patch.object(main, 'delete_org_package_versions', AsyncMock()) - - # Create a personal inputs model - org = _create_inputs_model(account_type='org', org_name='test') - assert org.account_type == AccountType.ORG - - # Call the GithubAPI utility function - await main.GithubAPI.list_package_versions( - account_type=org.account_type, org_name=org.org_name, image_name=org.image_names[0], http_client=AsyncMock() - ) - await main.GithubAPI.delete_package( - account_type=org.account_type, - org_name=org.org_name, - image_name=org.image_names[0], - http_client=AsyncMock(), - version_id=1, - semaphore=Semaphore(1), - ) - - # Make sure the right function was called - mocked_list_package_versions.assert_awaited_once() - mocked_delete_package_versions.assert_awaited_once() - - -class TestGetAndDeleteOldVersions: - valid_data = [ - PackageVersionResponse( - **{ - 'id': 1234567, - 'name': 'sha256:3c6891187412bd31fa04c63b4f06c47417eb599b1b659462632285531aa99c19', - 'created_at': '2021-05-26T14:03:03Z', - 'updated_at': '2021-05-26T14:03:03Z', - 'metadata': {'container': {'tags': []}, 'package_type': 'container'}, - 'html_url': 'https://github.com/orgs/org-name/packages/container/image-name/1234567', - 'package_html_url': 'https://github.com/orgs/org-name/packages/container/package/image-name', - 'url': 'https://api.github.com/orgs/org-name/packages/container/image-name/versions/1234567', - } - ) - ] - - @staticmethod - def generate_fresh_valid_data_with_id(id): - r = deepcopy(TestGetAndDeleteOldVersions.valid_data[0]) - r.id = id - r.created_at = datetime.now(timezone(timedelta())) - return r - - async def test_delete_package(self, mocker, capsys, http_client): - # Mock the list function - mocker.patch.object(main.GithubAPI, 'list_package_versions', return_value=self.valid_data) - - # Call the function - inputs = _create_inputs_model() - await get_and_delete_old_versions(image_name='a', inputs=inputs, http_client=http_client) - - # Check the output - captured = capsys.readouterr() - assert captured.out == 'Deleted old image: a:1234567\n' - - async def test_keep_at_least(self, mocker, capsys, http_client): - mocker.patch.object(main.GithubAPI, 'list_package_versions', return_value=self.valid_data) - inputs = _create_inputs_model(keep_at_least=1) - await get_and_delete_old_versions(image_name='a', inputs=inputs, http_client=http_client) - captured = capsys.readouterr() - assert captured.out == 'No more versions to delete for a\n' - - async def test_keep_at_least_deletes_not_only_marked(self, mocker, capsys, http_client): - data = [self.generate_fresh_valid_data_with_id(id) for id in range(3)] - data.append(self.valid_data[0]) - mocker.patch.object(main.GithubAPI, 'list_package_versions', return_value=data) - inputs = _create_inputs_model(keep_at_least=2) - await get_and_delete_old_versions(image_name='a', inputs=inputs, http_client=http_client) - captured = capsys.readouterr() - assert captured.out == 'Deleted old image: a:1234567\n' - - async def test_not_beyond_cutoff(self, mocker, capsys, http_client): - response_data = [ - PackageVersionResponse( - created_at=datetime.now(timezone(timedelta(hours=1))), - updated_at=datetime.now(timezone(timedelta(hours=1))), - id=1234567, - name='', - metadata={'container': {'tags': []}, 'package_type': 'container'}, - ) - ] - mocker.patch.object(main.GithubAPI, 'list_package_versions', return_value=response_data) - inputs = _create_inputs_model() - await get_and_delete_old_versions(image_name='a', inputs=inputs, http_client=http_client) - captured = capsys.readouterr() - assert captured.out == 'No more versions to delete for a\n' - - async def test_missing_timestamp(self, mocker, capsys, http_client): - data = [ - PackageVersionResponse( - created_at=None, - updated_at=None, - id=1234567, - name='', - metadata={'container': {'tags': []}, 'package_type': 'container'}, - ) - ] - mocker.patch.object(main.GithubAPI, 'list_package_versions', return_value=data) - inputs = _create_inputs_model() - await get_and_delete_old_versions(image_name='a', inputs=inputs, http_client=http_client) - captured = capsys.readouterr() - assert ( - captured.out - == 'Skipping image version 1234567. Unable to parse timestamps.\nNo more versions to delete for a\n' - ) - - async def test_empty_list(self, mocker, capsys, http_client): - data = [] - mocker.patch.object(main.GithubAPI, 'list_package_versions', return_value=data) - inputs = _create_inputs_model() - await get_and_delete_old_versions(image_name='a', inputs=inputs, http_client=http_client) - captured = capsys.readouterr() - assert captured.out == 'No more versions to delete for a\n' - - async def test_skip_tags(self, mocker, capsys, http_client): - data = deepcopy(self.valid_data) - data[0].metadata = MetadataModel(**{'container': {'tags': ['abc', 'bcd']}, 'package_type': 'container'}) - mocker.patch.object(main.GithubAPI, 'list_package_versions', return_value=data) - inputs = _create_inputs_model(skip_tags='abc') - await get_and_delete_old_versions(image_name='a', inputs=inputs, http_client=http_client) - captured = capsys.readouterr() - assert captured.out == 'No more versions to delete for a\n' - - async def test_skip_tags_wildcard(self, mocker, capsys, http_client): - data = deepcopy(self.valid_data) - data[0].metadata = MetadataModel(**{'container': {'tags': ['v1.0.0', 'abc']}, 'package_type': 'container'}) - mocker.patch.object(main.GithubAPI, 'list_package_versions', return_value=data) - inputs = _create_inputs_model(skip_tags='v*') - await get_and_delete_old_versions(image_name='a', inputs=inputs, http_client=http_client) - captured = capsys.readouterr() - assert captured.out == 'No more versions to delete for a\n' - - async def test_untagged_only(self, mocker, capsys, http_client): - data = deepcopy(self.valid_data) - data[0].metadata = MetadataModel(**{'container': {'tags': ['abc', 'bcd']}, 'package_type': 'container'}) - mocker.patch.object(main.GithubAPI, 'list_package_versions', return_value=data) - inputs = _create_inputs_model(untagged_only='true') - await get_and_delete_old_versions(image_name='a', inputs=inputs, http_client=http_client) - captured = capsys.readouterr() - assert captured.out == 'No more versions to delete for a\n' - - async def test_filter_tags(self, mocker, capsys, http_client): - data = deepcopy(self.valid_data) - data[0].metadata = MetadataModel( - **{'container': {'tags': ['sha-deadbeef', 'edge']}, 'package_type': 'container'} - ) - mocker.patch.object(main.GithubAPI, 'list_package_versions', return_value=data) - inputs = _create_inputs_model(filter_tags='sha-*') - await get_and_delete_old_versions(image_name='a', inputs=inputs, http_client=http_client) - captured = capsys.readouterr() - assert captured.out == 'Deleted old image: a:1234567\n' - - async def test_dry_run(self, mocker, capsys, http_client): - data = deepcopy(self.valid_data) - data[0].metadata = MetadataModel( - **{'container': {'tags': ['sha-deadbeef', 'edge']}, 'package_type': 'container'} - ) - mocker.patch.object(main.GithubAPI, 'list_package_versions', return_value=data) - mock_delete_package = mocker.patch.object(main.GithubAPI, 'delete_package') - inputs = _create_inputs_model(dry_run='true') - await get_and_delete_old_versions(image_name='a', inputs=inputs, http_client=http_client) - captured = capsys.readouterr() - assert captured.out == 'Would delete image a:1234567.\nNo more versions to delete for a\n' - mock_delete_package.assert_not_called() - - -def test_inputs_bad_token_type(): - with pytest.raises(ValidationError, match='Input should be \'github-token\' or \'pat\''): - _create_inputs_model(token_type='undefined-token-type', image_names='a,b') - - -def test_inputs_token_type_as_github_token_with_bad_image_names(): - _create_inputs_model(image_names='a', token_type='github-token') - with pytest.raises(ValidationError, match='Wildcards are not allowed if token_type is github-token'): - _create_inputs_model(image_names='a*', token_type='github-token') - with pytest.raises(ValidationError, match='A single image name is required if token_type is github-token'): - _create_inputs_model(image_names='a,b,c', token_type='github-token') - - -def test_inputs_bad_account_type(): - # Account type - _create_inputs_model(account_type='personal') - _create_inputs_model(account_type='org', org_name='myorg') - with pytest.raises(ValidationError, match='Input should be \'org\' or \'personal\''): - _create_inputs_model(account_type='') - - # Org name - _create_inputs_model(org_name='', account_type='personal') - with pytest.raises(ValueError, match='org-name is required when account-type is org'): - _create_inputs_model(org_name='', account_type='org') - - # Timestamp type - _create_inputs_model(timestamp_to_use='updated_at') - _create_inputs_model(timestamp_to_use='created_at') - with pytest.raises(ValueError, match='Input should be \'updated_at\' or \'created_at\''): - _create_inputs_model(timestamp_to_use='wat') - - # Cut-off - _create_inputs_model(cut_off='21 July 2013 10:15 pm +0500') - _create_inputs_model(cut_off='12/12/12 PM EST') - with pytest.raises(ValueError, match='Timezone is required for the cut-off'): - _create_inputs_model(cut_off='12/12/12') - with pytest.raises(ValueError, match="Unable to parse 'test'"): - _create_inputs_model(cut_off='test') - - # Untagged only - for i in ['true', 'True', '1']: - assert _create_inputs_model(untagged_only=i).untagged_only is True - for j in ['False', 'false', '0']: - assert _create_inputs_model(untagged_only=j).untagged_only is False - assert _create_inputs_model(untagged_only=False).untagged_only is False - - # Skip tags - assert _create_inputs_model(skip_tags='a').skip_tags == ['a'] - assert _create_inputs_model(skip_tags='a,b').skip_tags == ['a', 'b'] - assert _create_inputs_model(skip_tags='a , b ,c').skip_tags == ['a', 'b', 'c'] - - # Keep at least - with pytest.raises(ValueError, match='Input should be greater than or equal to 0'): - _create_inputs_model(keep_at_least='-1') - - # Filter tags - assert _create_inputs_model(filter_tags='a').filter_tags == ['a'] - assert _create_inputs_model(filter_tags='sha-*,latest').filter_tags == ['sha-*', 'latest'] - assert _create_inputs_model(filter_tags='sha-* , latest').filter_tags == ['sha-*', 'latest'] - - # Filter include untagged - for i in ['true', 'True', '1', True]: - assert _create_inputs_model(filter_include_untagged=i).filter_include_untagged is True - for j in ['False', 'false', '0', False]: - assert _create_inputs_model(filter_include_untagged=j).filter_include_untagged is False - - -def test_parse_image_names(): - assert filter_image_names( - all_packages=[ - PackageResponse(id=1, name='aaa', created_at=datetime.now(), updated_at=datetime.now()), - PackageResponse(id=1, name='bbb', created_at=datetime.now(), updated_at=datetime.now()), - PackageResponse(id=1, name='ccc', created_at=datetime.now(), updated_at=datetime.now()), - PackageResponse(id=1, name='aab', created_at=datetime.now(), updated_at=datetime.now()), - PackageResponse(id=1, name='aac', created_at=datetime.now(), updated_at=datetime.now()), - PackageResponse(id=1, name='aba', created_at=datetime.now(), updated_at=datetime.now()), - PackageResponse(id=1, name='aca', created_at=datetime.now(), updated_at=datetime.now()), - PackageResponse(id=1, name='abb', created_at=datetime.now(), updated_at=datetime.now()), - PackageResponse(id=1, name='acc', created_at=datetime.now(), updated_at=datetime.now()), - ], - image_names=['ab*', 'aa*', 'cc'], - ) == { - 'aba', - 'abb', - 'aaa', - 'aab', - 'aac', - } - - -async def test_main(mocker, ok_response): - mocker.patch.object(AsyncClient, 'get', return_value=ok_response) - mocker.patch.object(AsyncClient, 'delete', return_value=ok_response) - mocker.patch.object(main, 'get_and_delete_old_versions', AsyncMock()) - await main_( - **{ - 'account_type': 'org', - 'org_name': 'test', - 'image_names': 'a,b,c', - 'timestamp_to_use': 'updated_at', - 'cut_off': '2 hours ago UTC', - 'untagged_only': 'false', - 'skip_tags': '', - 'keep_at_least': '0', - 'filter_tags': '', - 'filter_include_untagged': 'true', - 'token': 'test', - } - ) - - -async def test_main_with_token_type_github_token(mocker, ok_response): - mock_list_package = mocker.patch.object(main.GithubAPI, 'list_packages') - mock_filter_image_names = mocker.patch.object(main, 'filter_image_names') - mock_get_and_delete_old_versions = mocker.patch.object(main, 'get_and_delete_old_versions') - mocker.patch.object(AsyncClient, 'get', return_value=ok_response) - mocker.patch.object(AsyncClient, 'delete', return_value=ok_response) - await main_( - **{ - 'account_type': 'org', - 'org_name': 'test', - 'image_names': 'my-package', - 'timestamp_to_use': 'updated_at', - 'cut_off': '2 hours ago UTC', - 'untagged_only': 'false', - 'skip_tags': '', - 'keep_at_least': '0', - 'filter_tags': '', - 'filter_include_untagged': 'true', - 'token': 'test', - 'token_type': 'github-token', - } - ) - - mock_list_package.assert_not_called() - mock_filter_image_names.assert_not_called() - mock_get_and_delete_old_versions.assert_called_with('my-package', ANY, ANY) - - -async def test_public_images_with_more_than_5000_downloads(mocker, capsys): - """ - The `response.is_error` block is set up to output errors when we run into them. - - One more commonly seen error is the case where an image is public and has more than 5000 downloads. - - For these cases, instead of just outputting the error, we bundle the images names and list - them once at the end, with the necessary context to act on them if wanted. - """ - mock_delete_response = Mock() - mock_delete_response.headers = {'x-ratelimit-remaining': '1', 'link': ''} - mock_delete_response.is_error = True - mock_delete_response.status_code = 400 - mock_delete_response.json = lambda: {'message': main.GITHUB_ASSISTANCE_MSG} - - mock_list_response = Mock() - mock_list_response.headers = {'x-ratelimit-remaining': '1', 'link': ''} - mock_list_response.is_error = True - mock_list_response.status_code = 400 - - class DualMock: - counter = 0 - - def __call__(self): - if self.counter == 0: - self.counter += 1 - return [ - { - 'id': 1, - 'updated_at': '2021-05-26T14:03:03Z', - 'name': 'a', - 'created_at': '2021-05-26T14:03:03Z', - 'metadata': {'container': {'tags': []}, 'package_type': 'container'}, - }, - { - 'id': 1, - 'updated_at': '2021-05-26T14:03:03Z', - 'name': 'b', - 'created_at': '2021-05-26T14:03:03Z', - 'metadata': {'container': {'tags': []}, 'package_type': 'container'}, - }, - { - 'id': 1, - 'updated_at': '2021-05-26T14:03:03Z', - 'name': 'c', - 'created_at': '2021-05-26T14:03:03Z', - 'metadata': {'container': {'tags': []}, 'package_type': 'container'}, - }, - ] - return [ - { - 'id': 1, - 'updated_at': '2021-05-26T14:03:03Z', - 'name': 'a', - 'created_at': '2021-05-26T14:03:03Z', - 'metadata': {'container': {'tags': []}, 'package_type': 'container'}, - }, - ] - - mock_list_response.json = DualMock() - - mocker.patch.object(AsyncClient, 'get', return_value=mock_list_response) - mocker.patch.object(AsyncClient, 'delete', return_value=mock_delete_response) - await main_( - **{ - 'account_type': 'org', - 'org_name': 'test', - 'image_names': 'a,b,c', - 'timestamp_to_use': 'updated_at', - 'cut_off': '2 hours ago UTC', - 'untagged_only': 'false', - 'skip_tags': '', - 'keep_at_least': '0', - 'filter_tags': '', - 'filter_include_untagged': 'true', - 'token': 'test', - } - ) - captured = capsys.readouterr() - - for m in [ - 'The follow images are public and have more than 5000 downloads. These cannot be deleted via the Github API:', - 'If you still want to delete these images, contact Github support.', - 'See https://docs.github.com/en/rest/reference/packages for more info.', - ]: - assert m in captured.out - - -class RotatingStatusCodeMock(Mock): - index = 0 - - @property - def is_error(self): - if self.index == 0: - self.index += 1 - return True - if self.index == 1: - self.index += 1 - return True - return False - - @property - def status_code(self): - return [400, 400, 200][self.index - 1] - - def json(self): - return [ - {'message': 'some random error message'}, - {'message': main.GITHUB_ASSISTANCE_MSG}, - {'message': 'success!'}, - ][self.index - 1] - - -async def test_outputs_are_set(mocker): - mock_list_response = Mock() - mock_list_response.headers = {'x-ratelimit-remaining': '1', 'link': ''} - mock_list_response.is_error = True - mock_list_response.status_code = 200 - mock_list_response.json = lambda: [ - { - 'id': 1, - 'updated_at': '2021-05-26T14:03:03Z', - 'name': 'a', - 'created_at': '2021-05-26T14:03:03Z', - 'metadata': {'container': {'tags': []}, 'package_type': 'container'}, - } - ] - - mocker.patch.object(AsyncClient, 'get', return_value=mock_list_response) - mocker.patch.object(AsyncClient, 'delete', return_value=RotatingStatusCodeMock()) - - await main_( - **{ - 'account_type': 'org', - 'org_name': 'test', - 'image_names': 'a,b,c', - 'timestamp_to_use': 'updated_at', - 'cut_off': '2 hours ago UTC', - 'untagged_only': 'false', - 'skip_tags': '', - 'keep_at_least': '0', - 'filter_tags': '', - 'filter_include_untagged': 'true', - 'token': 'test', - } - ) - with open(os.environ['GITHUB_OUTPUT']) as f: - out_vars = f.read() - - for i in [ - 'needs-github-assistance=', - 'deleted=', - 'failed=', - ]: - assert i in out_vars diff --git a/poetry.lock b/poetry.lock deleted file mode 100644 index b8d0590..0000000 --- a/poetry.lock +++ /dev/null @@ -1,924 +0,0 @@ -# This file is automatically @generated by Poetry and should not be changed by hand. - -[[package]] -name = "annotated-types" -version = "0.6.0" -description = "Reusable constraint types to use with typing.Annotated" -category = "main" -optional = false -python-versions = ">=3.8" -files = [ - {file = "annotated_types-0.6.0-py3-none-any.whl", hash = "sha256:0641064de18ba7a25dee8f96403ebc39113d0cb953a01429249d5c7564666a43"}, - {file = "annotated_types-0.6.0.tar.gz", hash = "sha256:563339e807e53ffd9c267e99fc6d9ea23eb8443c08f112651963e24e22f84a5d"}, -] - -[[package]] -name = "anyio" -version = "3.7.1" -description = "High level compatibility layer for multiple asynchronous event loop implementations" -category = "main" -optional = false -python-versions = ">=3.7" -files = [ - {file = "anyio-3.7.1-py3-none-any.whl", hash = "sha256:91dee416e570e92c64041bd18b900d1d6fa78dff7048769ce5ac5ddad004fbb5"}, - {file = "anyio-3.7.1.tar.gz", hash = "sha256:44a3c9aba0f5defa43261a8b3efb97891f2bd7d804e0e1f56419befa1adfc780"}, -] - -[package.dependencies] -idna = ">=2.8" -sniffio = ">=1.1" - -[package.extras] -doc = ["Sphinx", "packaging", "sphinx-autodoc-typehints (>=1.2.0)", "sphinx-rtd-theme (>=1.2.2)", "sphinxcontrib-jquery"] -test = ["anyio[trio]", "coverage[toml] (>=4.5)", "hypothesis (>=4.0)", "mock (>=4)", "psutil (>=5.9)", "pytest (>=7.0)", "pytest-mock (>=3.6.1)", "trustme", "uvloop (>=0.17)"] -trio = ["trio (<0.22)"] - -[[package]] -name = "certifi" -version = "2023.7.22" -description = "Python package for providing Mozilla's CA Bundle." -category = "main" -optional = false -python-versions = ">=3.6" -files = [ - {file = "certifi-2023.7.22-py3-none-any.whl", hash = "sha256:92d6037539857d8206b8f6ae472e8b77db8058fec5937a1ef3f54304089edbb9"}, - {file = "certifi-2023.7.22.tar.gz", hash = "sha256:539cc1d13202e33ca466e88b2807e29f4c13049d6d87031a3c110744495cb082"}, -] - -[[package]] -name = "cfgv" -version = "3.3.1" -description = "Validate configuration and produce human readable error messages." -category = "dev" -optional = false -python-versions = ">=3.6.1" -files = [ - {file = "cfgv-3.3.1-py2.py3-none-any.whl", hash = "sha256:c6a0883f3917a037485059700b9e75da2464e6c27051014ad85ba6aaa5884426"}, - {file = "cfgv-3.3.1.tar.gz", hash = "sha256:f5a830efb9ce7a445376bb66ec94c638a9787422f96264c98edc6bdeed8ab736"}, -] - -[[package]] -name = "colorama" -version = "0.4.6" -description = "Cross-platform colored terminal text." -category = "dev" -optional = false -python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" -files = [ - {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, - {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, -] - -[[package]] -name = "coverage" -version = "7.2.7" -description = "Code coverage measurement for Python" -category = "dev" -optional = false -python-versions = ">=3.7" -files = [ - {file = "coverage-7.2.7-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d39b5b4f2a66ccae8b7263ac3c8170994b65266797fb96cbbfd3fb5b23921db8"}, - {file = "coverage-7.2.7-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:6d040ef7c9859bb11dfeb056ff5b3872436e3b5e401817d87a31e1750b9ae2fb"}, - {file = "coverage-7.2.7-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ba90a9563ba44a72fda2e85302c3abc71c5589cea608ca16c22b9804262aaeb6"}, - {file = "coverage-7.2.7-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e7d9405291c6928619403db1d10bd07888888ec1abcbd9748fdaa971d7d661b2"}, - {file = "coverage-7.2.7-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:31563e97dae5598556600466ad9beea39fb04e0229e61c12eaa206e0aa202063"}, - {file = "coverage-7.2.7-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:ebba1cd308ef115925421d3e6a586e655ca5a77b5bf41e02eb0e4562a111f2d1"}, - {file = "coverage-7.2.7-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:cb017fd1b2603ef59e374ba2063f593abe0fc45f2ad9abdde5b4d83bd922a353"}, - {file = "coverage-7.2.7-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:d62a5c7dad11015c66fbb9d881bc4caa5b12f16292f857842d9d1871595f4495"}, - {file = "coverage-7.2.7-cp310-cp310-win32.whl", hash = "sha256:ee57190f24fba796e36bb6d3aa8a8783c643d8fa9760c89f7a98ab5455fbf818"}, - {file = "coverage-7.2.7-cp310-cp310-win_amd64.whl", hash = "sha256:f75f7168ab25dd93110c8a8117a22450c19976afbc44234cbf71481094c1b850"}, - {file = "coverage-7.2.7-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:06a9a2be0b5b576c3f18f1a241f0473575c4a26021b52b2a85263a00f034d51f"}, - {file = "coverage-7.2.7-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:5baa06420f837184130752b7c5ea0808762083bf3487b5038d68b012e5937dbe"}, - {file = "coverage-7.2.7-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fdec9e8cbf13a5bf63290fc6013d216a4c7232efb51548594ca3631a7f13c3a3"}, - {file = "coverage-7.2.7-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:52edc1a60c0d34afa421c9c37078817b2e67a392cab17d97283b64c5833f427f"}, - {file = "coverage-7.2.7-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:63426706118b7f5cf6bb6c895dc215d8a418d5952544042c8a2d9fe87fcf09cb"}, - {file = "coverage-7.2.7-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:afb17f84d56068a7c29f5fa37bfd38d5aba69e3304af08ee94da8ed5b0865833"}, - {file = "coverage-7.2.7-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:48c19d2159d433ccc99e729ceae7d5293fbffa0bdb94952d3579983d1c8c9d97"}, - {file = "coverage-7.2.7-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:0e1f928eaf5469c11e886fe0885ad2bf1ec606434e79842a879277895a50942a"}, - {file = "coverage-7.2.7-cp311-cp311-win32.whl", hash = "sha256:33d6d3ea29d5b3a1a632b3c4e4f4ecae24ef170b0b9ee493883f2df10039959a"}, - {file = "coverage-7.2.7-cp311-cp311-win_amd64.whl", hash = "sha256:5b7540161790b2f28143191f5f8ec02fb132660ff175b7747b95dcb77ac26562"}, - {file = "coverage-7.2.7-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:f2f67fe12b22cd130d34d0ef79206061bfb5eda52feb6ce0dba0644e20a03cf4"}, - {file = "coverage-7.2.7-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a342242fe22407f3c17f4b499276a02b01e80f861f1682ad1d95b04018e0c0d4"}, - {file = "coverage-7.2.7-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:171717c7cb6b453aebac9a2ef603699da237f341b38eebfee9be75d27dc38e01"}, - {file = "coverage-7.2.7-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:49969a9f7ffa086d973d91cec8d2e31080436ef0fb4a359cae927e742abfaaa6"}, - {file = "coverage-7.2.7-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:b46517c02ccd08092f4fa99f24c3b83d8f92f739b4657b0f146246a0ca6a831d"}, - {file = "coverage-7.2.7-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:a3d33a6b3eae87ceaefa91ffdc130b5e8536182cd6dfdbfc1aa56b46ff8c86de"}, - {file = "coverage-7.2.7-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:976b9c42fb2a43ebf304fa7d4a310e5f16cc99992f33eced91ef6f908bd8f33d"}, - {file = "coverage-7.2.7-cp312-cp312-win32.whl", hash = "sha256:8de8bb0e5ad103888d65abef8bca41ab93721647590a3f740100cd65c3b00511"}, - {file = "coverage-7.2.7-cp312-cp312-win_amd64.whl", hash = "sha256:9e31cb64d7de6b6f09702bb27c02d1904b3aebfca610c12772452c4e6c21a0d3"}, - {file = "coverage-7.2.7-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:58c2ccc2f00ecb51253cbe5d8d7122a34590fac9646a960d1430d5b15321d95f"}, - {file = "coverage-7.2.7-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d22656368f0e6189e24722214ed8d66b8022db19d182927b9a248a2a8a2f67eb"}, - {file = "coverage-7.2.7-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a895fcc7b15c3fc72beb43cdcbdf0ddb7d2ebc959edac9cef390b0d14f39f8a9"}, - {file = "coverage-7.2.7-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e84606b74eb7de6ff581a7915e2dab7a28a0517fbe1c9239eb227e1354064dcd"}, - {file = "coverage-7.2.7-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:0a5f9e1dbd7fbe30196578ca36f3fba75376fb99888c395c5880b355e2875f8a"}, - {file = "coverage-7.2.7-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:419bfd2caae268623dd469eff96d510a920c90928b60f2073d79f8fe2bbc5959"}, - {file = "coverage-7.2.7-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:2aee274c46590717f38ae5e4650988d1af340fe06167546cc32fe2f58ed05b02"}, - {file = "coverage-7.2.7-cp37-cp37m-win32.whl", hash = "sha256:61b9a528fb348373c433e8966535074b802c7a5d7f23c4f421e6c6e2f1697a6f"}, - {file = "coverage-7.2.7-cp37-cp37m-win_amd64.whl", hash = "sha256:b1c546aca0ca4d028901d825015dc8e4d56aac4b541877690eb76490f1dc8ed0"}, - {file = "coverage-7.2.7-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:54b896376ab563bd38453cecb813c295cf347cf5906e8b41d340b0321a5433e5"}, - {file = "coverage-7.2.7-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:3d376df58cc111dc8e21e3b6e24606b5bb5dee6024f46a5abca99124b2229ef5"}, - {file = "coverage-7.2.7-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5e330fc79bd7207e46c7d7fd2bb4af2963f5f635703925543a70b99574b0fea9"}, - {file = "coverage-7.2.7-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1e9d683426464e4a252bf70c3498756055016f99ddaec3774bf368e76bbe02b6"}, - {file = "coverage-7.2.7-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8d13c64ee2d33eccf7437961b6ea7ad8673e2be040b4f7fd4fd4d4d28d9ccb1e"}, - {file = "coverage-7.2.7-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:b7aa5f8a41217360e600da646004f878250a0d6738bcdc11a0a39928d7dc2050"}, - {file = "coverage-7.2.7-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:8fa03bce9bfbeeef9f3b160a8bed39a221d82308b4152b27d82d8daa7041fee5"}, - {file = "coverage-7.2.7-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:245167dd26180ab4c91d5e1496a30be4cd721a5cf2abf52974f965f10f11419f"}, - {file = "coverage-7.2.7-cp38-cp38-win32.whl", hash = "sha256:d2c2db7fd82e9b72937969bceac4d6ca89660db0a0967614ce2481e81a0b771e"}, - {file = "coverage-7.2.7-cp38-cp38-win_amd64.whl", hash = "sha256:2e07b54284e381531c87f785f613b833569c14ecacdcb85d56b25c4622c16c3c"}, - {file = "coverage-7.2.7-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:537891ae8ce59ef63d0123f7ac9e2ae0fc8b72c7ccbe5296fec45fd68967b6c9"}, - {file = "coverage-7.2.7-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:06fb182e69f33f6cd1d39a6c597294cff3143554b64b9825d1dc69d18cc2fff2"}, - {file = "coverage-7.2.7-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:201e7389591af40950a6480bd9edfa8ed04346ff80002cec1a66cac4549c1ad7"}, - {file = "coverage-7.2.7-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f6951407391b639504e3b3be51b7ba5f3528adbf1a8ac3302b687ecababf929e"}, - {file = "coverage-7.2.7-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6f48351d66575f535669306aa7d6d6f71bc43372473b54a832222803eb956fd1"}, - {file = "coverage-7.2.7-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:b29019c76039dc3c0fd815c41392a044ce555d9bcdd38b0fb60fb4cd8e475ba9"}, - {file = "coverage-7.2.7-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:81c13a1fc7468c40f13420732805a4c38a105d89848b7c10af65a90beff25250"}, - {file = "coverage-7.2.7-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:975d70ab7e3c80a3fe86001d8751f6778905ec723f5b110aed1e450da9d4b7f2"}, - {file = "coverage-7.2.7-cp39-cp39-win32.whl", hash = "sha256:7ee7d9d4822c8acc74a5e26c50604dff824710bc8de424904c0982e25c39c6cb"}, - {file = "coverage-7.2.7-cp39-cp39-win_amd64.whl", hash = "sha256:eb393e5ebc85245347950143969b241d08b52b88a3dc39479822e073a1a8eb27"}, - {file = "coverage-7.2.7-pp37.pp38.pp39-none-any.whl", hash = "sha256:b7b4c971f05e6ae490fef852c218b0e79d4e52f79ef0c8475566584a8fb3e01d"}, - {file = "coverage-7.2.7.tar.gz", hash = "sha256:924d94291ca674905fe9481f12294eb11f2d3d3fd1adb20314ba89e94f44ed59"}, -] - -[package.extras] -toml = ["tomli"] - -[[package]] -name = "dateparser" -version = "1.1.8" -description = "Date parsing library designed to parse dates from HTML pages" -category = "main" -optional = false -python-versions = ">=3.7" -files = [ - {file = "dateparser-1.1.8-py2.py3-none-any.whl", hash = "sha256:070b29b5bbf4b1ec2cd51c96ea040dc68a614de703910a91ad1abba18f9f379f"}, - {file = "dateparser-1.1.8.tar.gz", hash = "sha256:86b8b7517efcc558f085a142cdb7620f0921543fcabdb538c8a4c4001d8178e3"}, -] - -[package.dependencies] -python-dateutil = "*" -pytz = "*" -regex = "<2019.02.19 || >2019.02.19,<2021.8.27 || >2021.8.27" -tzlocal = "*" - -[package.extras] -calendars = ["convertdate", "hijri-converter"] -fasttext = ["fasttext"] -langdetect = ["langdetect"] - -[[package]] -name = "distlib" -version = "0.3.7" -description = "Distribution utilities" -category = "dev" -optional = false -python-versions = "*" -files = [ - {file = "distlib-0.3.7-py2.py3-none-any.whl", hash = "sha256:2e24928bc811348f0feb63014e97aaae3037f2cf48712d51ae61df7fd6075057"}, - {file = "distlib-0.3.7.tar.gz", hash = "sha256:9dafe54b34a028eafd95039d5e5d4851a13734540f1331060d31c9916e7147a8"}, -] - -[[package]] -name = "filelock" -version = "3.12.2" -description = "A platform independent file lock." -category = "dev" -optional = false -python-versions = ">=3.7" -files = [ - {file = "filelock-3.12.2-py3-none-any.whl", hash = "sha256:cbb791cdea2a72f23da6ac5b5269ab0a0d161e9ef0100e653b69049a7706d1ec"}, - {file = "filelock-3.12.2.tar.gz", hash = "sha256:002740518d8aa59a26b0c76e10fb8c6e15eae825d34b6fdf670333fd7b938d81"}, -] - -[package.extras] -docs = ["furo (>=2023.5.20)", "sphinx (>=7.0.1)", "sphinx-autodoc-typehints (>=1.23,!=1.23.4)"] -testing = ["covdefaults (>=2.3)", "coverage (>=7.2.7)", "diff-cover (>=7.5)", "pytest (>=7.3.1)", "pytest-cov (>=4.1)", "pytest-mock (>=3.10)", "pytest-timeout (>=2.1)"] - -[[package]] -name = "h11" -version = "0.14.0" -description = "A pure-Python, bring-your-own-I/O implementation of HTTP/1.1" -category = "main" -optional = false -python-versions = ">=3.7" -files = [ - {file = "h11-0.14.0-py3-none-any.whl", hash = "sha256:e3fe4ac4b851c468cc8363d500db52c2ead036020723024a109d37346efaa761"}, - {file = "h11-0.14.0.tar.gz", hash = "sha256:8f19fbbe99e72420ff35c00b27a34cb9937e902a8b810e2c88300c6f0a3b699d"}, -] - -[[package]] -name = "httpcore" -version = "0.16.3" -description = "A minimal low-level HTTP client." -category = "main" -optional = false -python-versions = ">=3.7" -files = [ - {file = "httpcore-0.16.3-py3-none-any.whl", hash = "sha256:da1fb708784a938aa084bde4feb8317056c55037247c787bd7e19eb2c2949dc0"}, - {file = "httpcore-0.16.3.tar.gz", hash = "sha256:c5d6f04e2fc530f39e0c077e6a30caa53f1451096120f1f38b954afd0b17c0cb"}, -] - -[package.dependencies] -anyio = ">=3.0,<5.0" -certifi = "*" -h11 = ">=0.13,<0.15" -sniffio = ">=1.0.0,<2.0.0" - -[package.extras] -http2 = ["h2 (>=3,<5)"] -socks = ["socksio (>=1.0.0,<2.0.0)"] - -[[package]] -name = "httpx" -version = "0.23.3" -description = "The next generation HTTP client." -category = "main" -optional = false -python-versions = ">=3.7" -files = [ - {file = "httpx-0.23.3-py3-none-any.whl", hash = "sha256:a211fcce9b1254ea24f0cd6af9869b3d29aba40154e947d2a07bb499b3e310d6"}, - {file = "httpx-0.23.3.tar.gz", hash = "sha256:9818458eb565bb54898ccb9b8b251a28785dd4a55afbc23d0eb410754fe7d0f9"}, -] - -[package.dependencies] -certifi = "*" -httpcore = ">=0.15.0,<0.17.0" -rfc3986 = {version = ">=1.3,<2", extras = ["idna2008"]} -sniffio = "*" - -[package.extras] -brotli = ["brotli", "brotlicffi"] -cli = ["click (>=8.0.0,<9.0.0)", "pygments (>=2.0.0,<3.0.0)", "rich (>=10,<13)"] -http2 = ["h2 (>=3,<5)"] -socks = ["socksio (>=1.0.0,<2.0.0)"] - -[[package]] -name = "identify" -version = "2.5.26" -description = "File identification library for Python" -category = "dev" -optional = false -python-versions = ">=3.8" -files = [ - {file = "identify-2.5.26-py2.py3-none-any.whl", hash = "sha256:c22a8ead0d4ca11f1edd6c9418c3220669b3b7533ada0a0ffa6cc0ef85cf9b54"}, - {file = "identify-2.5.26.tar.gz", hash = "sha256:7243800bce2f58404ed41b7c002e53d4d22bcf3ae1b7900c2d7aefd95394bf7f"}, -] - -[package.extras] -license = ["ukkonen"] - -[[package]] -name = "idna" -version = "3.4" -description = "Internationalized Domain Names in Applications (IDNA)" -category = "main" -optional = false -python-versions = ">=3.5" -files = [ - {file = "idna-3.4-py3-none-any.whl", hash = "sha256:90b77e79eaa3eba6de819a0c442c0b4ceefc341a7a2ab77d7562bf49f425c5c2"}, - {file = "idna-3.4.tar.gz", hash = "sha256:814f528e8dead7d329833b91c5faa87d60bf71824cd12a7530b5526063d02cb4"}, -] - -[[package]] -name = "iniconfig" -version = "2.0.0" -description = "brain-dead simple config-ini parsing" -category = "dev" -optional = false -python-versions = ">=3.7" -files = [ - {file = "iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374"}, - {file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"}, -] - -[[package]] -name = "nodeenv" -version = "1.8.0" -description = "Node.js virtual environment builder" -category = "dev" -optional = false -python-versions = ">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*" -files = [ - {file = "nodeenv-1.8.0-py2.py3-none-any.whl", hash = "sha256:df865724bb3c3adc86b3876fa209771517b0cfe596beff01a92700e0e8be4cec"}, - {file = "nodeenv-1.8.0.tar.gz", hash = "sha256:d51e0c37e64fbf47d017feac3145cdbb58836d7eee8c6f6d3b6880c5456227d2"}, -] - -[package.dependencies] -setuptools = "*" - -[[package]] -name = "packaging" -version = "23.1" -description = "Core utilities for Python packages" -category = "dev" -optional = false -python-versions = ">=3.7" -files = [ - {file = "packaging-23.1-py3-none-any.whl", hash = "sha256:994793af429502c4ea2ebf6bf664629d07c1a9fe974af92966e4b8d2df7edc61"}, - {file = "packaging-23.1.tar.gz", hash = "sha256:a392980d2b6cffa644431898be54b0045151319d1e7ec34f0cfed48767dd334f"}, -] - -[[package]] -name = "platformdirs" -version = "3.10.0" -description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." -category = "dev" -optional = false -python-versions = ">=3.7" -files = [ - {file = "platformdirs-3.10.0-py3-none-any.whl", hash = "sha256:d7c24979f292f916dc9cbf8648319032f551ea8c49a4c9bf2fb556a02070ec1d"}, - {file = "platformdirs-3.10.0.tar.gz", hash = "sha256:b45696dab2d7cc691a3226759c0d3b00c47c8b6e293d96f6436f733303f77f6d"}, -] - -[package.extras] -docs = ["furo (>=2023.7.26)", "proselint (>=0.13)", "sphinx (>=7.1.1)", "sphinx-autodoc-typehints (>=1.24)"] -test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.4)", "pytest-cov (>=4.1)", "pytest-mock (>=3.11.1)"] - -[[package]] -name = "pluggy" -version = "1.2.0" -description = "plugin and hook calling mechanisms for python" -category = "dev" -optional = false -python-versions = ">=3.7" -files = [ - {file = "pluggy-1.2.0-py3-none-any.whl", hash = "sha256:c2fd55a7d7a3863cba1a013e4e2414658b1d07b6bc57b3919e0c63c9abb99849"}, - {file = "pluggy-1.2.0.tar.gz", hash = "sha256:d12f0c4b579b15f5e054301bb226ee85eeeba08ffec228092f8defbaa3a4c4b3"}, -] - -[package.extras] -dev = ["pre-commit", "tox"] -testing = ["pytest", "pytest-benchmark"] - -[[package]] -name = "pre-commit" -version = "3.3.3" -description = "A framework for managing and maintaining multi-language pre-commit hooks." -category = "dev" -optional = false -python-versions = ">=3.8" -files = [ - {file = "pre_commit-3.3.3-py2.py3-none-any.whl", hash = "sha256:10badb65d6a38caff29703362271d7dca483d01da88f9d7e05d0b97171c136cb"}, - {file = "pre_commit-3.3.3.tar.gz", hash = "sha256:a2256f489cd913d575c145132ae196fe335da32d91a8294b7afe6622335dd023"}, -] - -[package.dependencies] -cfgv = ">=2.0.0" -identify = ">=1.0.0" -nodeenv = ">=0.11.1" -pyyaml = ">=5.1" -virtualenv = ">=20.10.0" - -[[package]] -name = "pydantic" -version = "2.4.2" -description = "Data validation using Python type hints" -category = "main" -optional = false -python-versions = ">=3.7" -files = [ - {file = "pydantic-2.4.2-py3-none-any.whl", hash = "sha256:bc3ddf669d234f4220e6e1c4d96b061abe0998185a8d7855c0126782b7abc8c1"}, - {file = "pydantic-2.4.2.tar.gz", hash = "sha256:94f336138093a5d7f426aac732dcfe7ab4eb4da243c88f891d65deb4a2556ee7"}, -] - -[package.dependencies] -annotated-types = ">=0.4.0" -pydantic-core = "2.10.1" -typing-extensions = ">=4.6.1" - -[package.extras] -email = ["email-validator (>=2.0.0)"] - -[[package]] -name = "pydantic-core" -version = "2.10.1" -description = "" -category = "main" -optional = false -python-versions = ">=3.7" -files = [ - {file = "pydantic_core-2.10.1-cp310-cp310-macosx_10_7_x86_64.whl", hash = "sha256:d64728ee14e667ba27c66314b7d880b8eeb050e58ffc5fec3b7a109f8cddbd63"}, - {file = "pydantic_core-2.10.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:48525933fea744a3e7464c19bfede85df4aba79ce90c60b94d8b6e1eddd67096"}, - {file = "pydantic_core-2.10.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ef337945bbd76cce390d1b2496ccf9f90b1c1242a3a7bc242ca4a9fc5993427a"}, - {file = "pydantic_core-2.10.1-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:a1392e0638af203cee360495fd2cfdd6054711f2db5175b6e9c3c461b76f5175"}, - {file = "pydantic_core-2.10.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0675ba5d22de54d07bccde38997e780044dcfa9a71aac9fd7d4d7a1d2e3e65f7"}, - {file = "pydantic_core-2.10.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:128552af70a64660f21cb0eb4876cbdadf1a1f9d5de820fed6421fa8de07c893"}, - {file = "pydantic_core-2.10.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f6e6aed5818c264412ac0598b581a002a9f050cb2637a84979859e70197aa9e"}, - {file = "pydantic_core-2.10.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:ecaac27da855b8d73f92123e5f03612b04c5632fd0a476e469dfc47cd37d6b2e"}, - {file = "pydantic_core-2.10.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:b3c01c2fb081fced3bbb3da78510693dc7121bb893a1f0f5f4b48013201f362e"}, - {file = "pydantic_core-2.10.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:92f675fefa977625105708492850bcbc1182bfc3e997f8eecb866d1927c98ae6"}, - {file = "pydantic_core-2.10.1-cp310-none-win32.whl", hash = "sha256:420a692b547736a8d8703c39ea935ab5d8f0d2573f8f123b0a294e49a73f214b"}, - {file = "pydantic_core-2.10.1-cp310-none-win_amd64.whl", hash = "sha256:0880e239827b4b5b3e2ce05e6b766a7414e5f5aedc4523be6b68cfbc7f61c5d0"}, - {file = "pydantic_core-2.10.1-cp311-cp311-macosx_10_7_x86_64.whl", hash = "sha256:073d4a470b195d2b2245d0343569aac7e979d3a0dcce6c7d2af6d8a920ad0bea"}, - {file = "pydantic_core-2.10.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:600d04a7b342363058b9190d4e929a8e2e715c5682a70cc37d5ded1e0dd370b4"}, - {file = "pydantic_core-2.10.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:39215d809470f4c8d1881758575b2abfb80174a9e8daf8f33b1d4379357e417c"}, - {file = "pydantic_core-2.10.1-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:eeb3d3d6b399ffe55f9a04e09e635554012f1980696d6b0aca3e6cf42a17a03b"}, - {file = "pydantic_core-2.10.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a7a7902bf75779bc12ccfc508bfb7a4c47063f748ea3de87135d433a4cca7a2f"}, - {file = "pydantic_core-2.10.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3625578b6010c65964d177626fde80cf60d7f2e297d56b925cb5cdeda6e9925a"}, - {file = "pydantic_core-2.10.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:caa48fc31fc7243e50188197b5f0c4228956f97b954f76da157aae7f67269ae8"}, - {file = "pydantic_core-2.10.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:07ec6d7d929ae9c68f716195ce15e745b3e8fa122fc67698ac6498d802ed0fa4"}, - {file = "pydantic_core-2.10.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e6f31a17acede6a8cd1ae2d123ce04d8cca74056c9d456075f4f6f85de055607"}, - {file = "pydantic_core-2.10.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:d8f1ebca515a03e5654f88411420fea6380fc841d1bea08effb28184e3d4899f"}, - {file = "pydantic_core-2.10.1-cp311-none-win32.whl", hash = "sha256:6db2eb9654a85ada248afa5a6db5ff1cf0f7b16043a6b070adc4a5be68c716d6"}, - {file = "pydantic_core-2.10.1-cp311-none-win_amd64.whl", hash = "sha256:4a5be350f922430997f240d25f8219f93b0c81e15f7b30b868b2fddfc2d05f27"}, - {file = "pydantic_core-2.10.1-cp311-none-win_arm64.whl", hash = "sha256:5fdb39f67c779b183b0c853cd6b45f7db84b84e0571b3ef1c89cdb1dfc367325"}, - {file = "pydantic_core-2.10.1-cp312-cp312-macosx_10_7_x86_64.whl", hash = "sha256:b1f22a9ab44de5f082216270552aa54259db20189e68fc12484873d926426921"}, - {file = "pydantic_core-2.10.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8572cadbf4cfa95fb4187775b5ade2eaa93511f07947b38f4cd67cf10783b118"}, - {file = "pydantic_core-2.10.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:db9a28c063c7c00844ae42a80203eb6d2d6bbb97070cfa00194dff40e6f545ab"}, - {file = "pydantic_core-2.10.1-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:0e2a35baa428181cb2270a15864ec6286822d3576f2ed0f4cd7f0c1708472aff"}, - {file = "pydantic_core-2.10.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:05560ab976012bf40f25d5225a58bfa649bb897b87192a36c6fef1ab132540d7"}, - {file = "pydantic_core-2.10.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d6495008733c7521a89422d7a68efa0a0122c99a5861f06020ef5b1f51f9ba7c"}, - {file = "pydantic_core-2.10.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:14ac492c686defc8e6133e3a2d9eaf5261b3df26b8ae97450c1647286750b901"}, - {file = "pydantic_core-2.10.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:8282bab177a9a3081fd3d0a0175a07a1e2bfb7fcbbd949519ea0980f8a07144d"}, - {file = "pydantic_core-2.10.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:aafdb89fdeb5fe165043896817eccd6434aee124d5ee9b354f92cd574ba5e78f"}, - {file = "pydantic_core-2.10.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:f6defd966ca3b187ec6c366604e9296f585021d922e666b99c47e78738b5666c"}, - {file = "pydantic_core-2.10.1-cp312-none-win32.whl", hash = "sha256:7c4d1894fe112b0864c1fa75dffa045720a194b227bed12f4be7f6045b25209f"}, - {file = "pydantic_core-2.10.1-cp312-none-win_amd64.whl", hash = "sha256:5994985da903d0b8a08e4935c46ed8daf5be1cf217489e673910951dc533d430"}, - {file = "pydantic_core-2.10.1-cp312-none-win_arm64.whl", hash = "sha256:0d8a8adef23d86d8eceed3e32e9cca8879c7481c183f84ed1a8edc7df073af94"}, - {file = "pydantic_core-2.10.1-cp37-cp37m-macosx_10_7_x86_64.whl", hash = "sha256:9badf8d45171d92387410b04639d73811b785b5161ecadabf056ea14d62d4ede"}, - {file = "pydantic_core-2.10.1-cp37-cp37m-macosx_11_0_arm64.whl", hash = "sha256:ebedb45b9feb7258fac0a268a3f6bec0a2ea4d9558f3d6f813f02ff3a6dc6698"}, - {file = "pydantic_core-2.10.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cfe1090245c078720d250d19cb05d67e21a9cd7c257698ef139bc41cf6c27b4f"}, - {file = "pydantic_core-2.10.1-cp37-cp37m-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e357571bb0efd65fd55f18db0a2fb0ed89d0bb1d41d906b138f088933ae618bb"}, - {file = "pydantic_core-2.10.1-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b3dcd587b69bbf54fc04ca157c2323b8911033e827fffaecf0cafa5a892a0904"}, - {file = "pydantic_core-2.10.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9c120c9ce3b163b985a3b966bb701114beb1da4b0468b9b236fc754783d85aa3"}, - {file = "pydantic_core-2.10.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:15d6bca84ffc966cc9976b09a18cf9543ed4d4ecbd97e7086f9ce9327ea48891"}, - {file = "pydantic_core-2.10.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:5cabb9710f09d5d2e9e2748c3e3e20d991a4c5f96ed8f1132518f54ab2967221"}, - {file = "pydantic_core-2.10.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:82f55187a5bebae7d81d35b1e9aaea5e169d44819789837cdd4720d768c55d15"}, - {file = "pydantic_core-2.10.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:1d40f55222b233e98e3921df7811c27567f0e1a4411b93d4c5c0f4ce131bc42f"}, - {file = "pydantic_core-2.10.1-cp37-none-win32.whl", hash = "sha256:14e09ff0b8fe6e46b93d36a878f6e4a3a98ba5303c76bb8e716f4878a3bee92c"}, - {file = "pydantic_core-2.10.1-cp37-none-win_amd64.whl", hash = "sha256:1396e81b83516b9d5c9e26a924fa69164156c148c717131f54f586485ac3c15e"}, - {file = "pydantic_core-2.10.1-cp38-cp38-macosx_10_7_x86_64.whl", hash = "sha256:6835451b57c1b467b95ffb03a38bb75b52fb4dc2762bb1d9dbed8de31ea7d0fc"}, - {file = "pydantic_core-2.10.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:b00bc4619f60c853556b35f83731bd817f989cba3e97dc792bb8c97941b8053a"}, - {file = "pydantic_core-2.10.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0fa467fd300a6f046bdb248d40cd015b21b7576c168a6bb20aa22e595c8ffcdd"}, - {file = "pydantic_core-2.10.1-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d99277877daf2efe074eae6338453a4ed54a2d93fb4678ddfe1209a0c93a2468"}, - {file = "pydantic_core-2.10.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fa7db7558607afeccb33c0e4bf1c9a9a835e26599e76af6fe2fcea45904083a6"}, - {file = "pydantic_core-2.10.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:aad7bd686363d1ce4ee930ad39f14e1673248373f4a9d74d2b9554f06199fb58"}, - {file = "pydantic_core-2.10.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:443fed67d33aa85357464f297e3d26e570267d1af6fef1c21ca50921d2976302"}, - {file = "pydantic_core-2.10.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:042462d8d6ba707fd3ce9649e7bf268633a41018d6a998fb5fbacb7e928a183e"}, - {file = "pydantic_core-2.10.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:ecdbde46235f3d560b18be0cb706c8e8ad1b965e5c13bbba7450c86064e96561"}, - {file = "pydantic_core-2.10.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:ed550ed05540c03f0e69e6d74ad58d026de61b9eaebebbaaf8873e585cbb18de"}, - {file = "pydantic_core-2.10.1-cp38-none-win32.whl", hash = "sha256:8cdbbd92154db2fec4ec973d45c565e767ddc20aa6dbaf50142676484cbff8ee"}, - {file = "pydantic_core-2.10.1-cp38-none-win_amd64.whl", hash = "sha256:9f6f3e2598604956480f6c8aa24a3384dbf6509fe995d97f6ca6103bb8c2534e"}, - {file = "pydantic_core-2.10.1-cp39-cp39-macosx_10_7_x86_64.whl", hash = "sha256:655f8f4c8d6a5963c9a0687793da37b9b681d9ad06f29438a3b2326d4e6b7970"}, - {file = "pydantic_core-2.10.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:e570ffeb2170e116a5b17e83f19911020ac79d19c96f320cbfa1fa96b470185b"}, - {file = "pydantic_core-2.10.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:64322bfa13e44c6c30c518729ef08fda6026b96d5c0be724b3c4ae4da939f875"}, - {file = "pydantic_core-2.10.1-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:485a91abe3a07c3a8d1e082ba29254eea3e2bb13cbbd4351ea4e5a21912cc9b0"}, - {file = "pydantic_core-2.10.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f7c2b8eb9fc872e68b46eeaf835e86bccc3a58ba57d0eedc109cbb14177be531"}, - {file = "pydantic_core-2.10.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a5cb87bdc2e5f620693148b5f8f842d293cae46c5f15a1b1bf7ceeed324a740c"}, - {file = "pydantic_core-2.10.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:25bd966103890ccfa028841a8f30cebcf5875eeac8c4bde4fe221364c92f0c9a"}, - {file = "pydantic_core-2.10.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:f323306d0556351735b54acbf82904fe30a27b6a7147153cbe6e19aaaa2aa429"}, - {file = "pydantic_core-2.10.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:0c27f38dc4fbf07b358b2bc90edf35e82d1703e22ff2efa4af4ad5de1b3833e7"}, - {file = "pydantic_core-2.10.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:f1365e032a477c1430cfe0cf2856679529a2331426f8081172c4a74186f1d595"}, - {file = "pydantic_core-2.10.1-cp39-none-win32.whl", hash = "sha256:a1c311fd06ab3b10805abb72109f01a134019739bd3286b8ae1bc2fc4e50c07a"}, - {file = "pydantic_core-2.10.1-cp39-none-win_amd64.whl", hash = "sha256:ae8a8843b11dc0b03b57b52793e391f0122e740de3df1474814c700d2622950a"}, - {file = "pydantic_core-2.10.1-pp310-pypy310_pp73-macosx_10_7_x86_64.whl", hash = "sha256:d43002441932f9a9ea5d6f9efaa2e21458221a3a4b417a14027a1d530201ef1b"}, - {file = "pydantic_core-2.10.1-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:fcb83175cc4936a5425dde3356f079ae03c0802bbdf8ff82c035f8a54b333521"}, - {file = "pydantic_core-2.10.1-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:962ed72424bf1f72334e2f1e61b68f16c0e596f024ca7ac5daf229f7c26e4208"}, - {file = "pydantic_core-2.10.1-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2cf5bb4dd67f20f3bbc1209ef572a259027c49e5ff694fa56bed62959b41e1f9"}, - {file = "pydantic_core-2.10.1-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:e544246b859f17373bed915182ab841b80849ed9cf23f1f07b73b7c58baee5fb"}, - {file = "pydantic_core-2.10.1-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:c0877239307b7e69d025b73774e88e86ce82f6ba6adf98f41069d5b0b78bd1bf"}, - {file = "pydantic_core-2.10.1-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:53df009d1e1ba40f696f8995683e067e3967101d4bb4ea6f667931b7d4a01357"}, - {file = "pydantic_core-2.10.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:a1254357f7e4c82e77c348dabf2d55f1d14d19d91ff025004775e70a6ef40ada"}, - {file = "pydantic_core-2.10.1-pp37-pypy37_pp73-macosx_10_7_x86_64.whl", hash = "sha256:524ff0ca3baea164d6d93a32c58ac79eca9f6cf713586fdc0adb66a8cdeab96a"}, - {file = "pydantic_core-2.10.1-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3f0ac9fb8608dbc6eaf17956bf623c9119b4db7dbb511650910a82e261e6600f"}, - {file = "pydantic_core-2.10.1-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:320f14bd4542a04ab23747ff2c8a778bde727158b606e2661349557f0770711e"}, - {file = "pydantic_core-2.10.1-pp37-pypy37_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:63974d168b6233b4ed6a0046296803cb13c56637a7b8106564ab575926572a55"}, - {file = "pydantic_core-2.10.1-pp37-pypy37_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:417243bf599ba1f1fef2bb8c543ceb918676954734e2dcb82bf162ae9d7bd514"}, - {file = "pydantic_core-2.10.1-pp37-pypy37_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:dda81e5ec82485155a19d9624cfcca9be88a405e2857354e5b089c2a982144b2"}, - {file = "pydantic_core-2.10.1-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:14cfbb00959259e15d684505263d5a21732b31248a5dd4941f73a3be233865b9"}, - {file = "pydantic_core-2.10.1-pp38-pypy38_pp73-macosx_10_7_x86_64.whl", hash = "sha256:631cb7415225954fdcc2a024119101946793e5923f6c4d73a5914d27eb3d3a05"}, - {file = "pydantic_core-2.10.1-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:bec7dd208a4182e99c5b6c501ce0b1f49de2802448d4056091f8e630b28e9a52"}, - {file = "pydantic_core-2.10.1-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:149b8a07712f45b332faee1a2258d8ef1fb4a36f88c0c17cb687f205c5dc6e7d"}, - {file = "pydantic_core-2.10.1-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4d966c47f9dd73c2d32a809d2be529112d509321c5310ebf54076812e6ecd884"}, - {file = "pydantic_core-2.10.1-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:7eb037106f5c6b3b0b864ad226b0b7ab58157124161d48e4b30c4a43fef8bc4b"}, - {file = "pydantic_core-2.10.1-pp38-pypy38_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:154ea7c52e32dce13065dbb20a4a6f0cc012b4f667ac90d648d36b12007fa9f7"}, - {file = "pydantic_core-2.10.1-pp38-pypy38_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:e562617a45b5a9da5be4abe72b971d4f00bf8555eb29bb91ec2ef2be348cd132"}, - {file = "pydantic_core-2.10.1-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:f23b55eb5464468f9e0e9a9935ce3ed2a870608d5f534025cd5536bca25b1402"}, - {file = "pydantic_core-2.10.1-pp39-pypy39_pp73-macosx_10_7_x86_64.whl", hash = "sha256:e9121b4009339b0f751955baf4543a0bfd6bc3f8188f8056b1a25a2d45099934"}, - {file = "pydantic_core-2.10.1-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:0523aeb76e03f753b58be33b26540880bac5aa54422e4462404c432230543f33"}, - {file = "pydantic_core-2.10.1-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2e0e2959ef5d5b8dc9ef21e1a305a21a36e254e6a34432d00c72a92fdc5ecda5"}, - {file = "pydantic_core-2.10.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:da01bec0a26befab4898ed83b362993c844b9a607a86add78604186297eb047e"}, - {file = "pydantic_core-2.10.1-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:f2e9072d71c1f6cfc79a36d4484c82823c560e6f5599c43c1ca6b5cdbd54f881"}, - {file = "pydantic_core-2.10.1-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:f36a3489d9e28fe4b67be9992a23029c3cec0babc3bd9afb39f49844a8c721c5"}, - {file = "pydantic_core-2.10.1-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:f64f82cc3443149292b32387086d02a6c7fb39b8781563e0ca7b8d7d9cf72bd7"}, - {file = "pydantic_core-2.10.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:b4a6db486ac8e99ae696e09efc8b2b9fea67b63c8f88ba7a1a16c24a057a0776"}, - {file = "pydantic_core-2.10.1.tar.gz", hash = "sha256:0f8682dbdd2f67f8e1edddcbffcc29f60a6182b4901c367fc8c1c40d30bb0a82"}, -] - -[package.dependencies] -typing-extensions = ">=4.6.0,<4.7.0 || >4.7.0" - -[[package]] -name = "pytest" -version = "7.4.0" -description = "pytest: simple powerful testing with Python" -category = "dev" -optional = false -python-versions = ">=3.7" -files = [ - {file = "pytest-7.4.0-py3-none-any.whl", hash = "sha256:78bf16451a2eb8c7a2ea98e32dc119fd2aa758f1d5d66dbf0a59d69a3969df32"}, - {file = "pytest-7.4.0.tar.gz", hash = "sha256:b4bf8c45bd59934ed84001ad51e11b4ee40d40a1229d2c79f9c592b0a3f6bd8a"}, -] - -[package.dependencies] -colorama = {version = "*", markers = "sys_platform == \"win32\""} -iniconfig = "*" -packaging = "*" -pluggy = ">=0.12,<2.0" - -[package.extras] -testing = ["argcomplete", "attrs (>=19.2.0)", "hypothesis (>=3.56)", "mock", "nose", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"] - -[[package]] -name = "pytest-asyncio" -version = "0.21.1" -description = "Pytest support for asyncio" -category = "dev" -optional = false -python-versions = ">=3.7" -files = [ - {file = "pytest-asyncio-0.21.1.tar.gz", hash = "sha256:40a7eae6dded22c7b604986855ea48400ab15b069ae38116e8c01238e9eeb64d"}, - {file = "pytest_asyncio-0.21.1-py3-none-any.whl", hash = "sha256:8666c1c8ac02631d7c51ba282e0c69a8a452b211ffedf2599099845da5c5c37b"}, -] - -[package.dependencies] -pytest = ">=7.0.0" - -[package.extras] -docs = ["sphinx (>=5.3)", "sphinx-rtd-theme (>=1.0)"] -testing = ["coverage (>=6.2)", "flaky (>=3.5.0)", "hypothesis (>=5.7.1)", "mypy (>=0.931)", "pytest-trio (>=0.7.0)"] - -[[package]] -name = "pytest-cov" -version = "4.1.0" -description = "Pytest plugin for measuring coverage." -category = "dev" -optional = false -python-versions = ">=3.7" -files = [ - {file = "pytest-cov-4.1.0.tar.gz", hash = "sha256:3904b13dfbfec47f003b8e77fd5b589cd11904a21ddf1ab38a64f204d6a10ef6"}, - {file = "pytest_cov-4.1.0-py3-none-any.whl", hash = "sha256:6ba70b9e97e69fcc3fb45bfeab2d0a138fb65c4d0d6a41ef33983ad114be8c3a"}, -] - -[package.dependencies] -coverage = {version = ">=5.2.1", extras = ["toml"]} -pytest = ">=4.6" - -[package.extras] -testing = ["fields", "hunter", "process-tests", "pytest-xdist", "six", "virtualenv"] - -[[package]] -name = "pytest-mock" -version = "3.11.1" -description = "Thin-wrapper around the mock package for easier use with pytest" -category = "dev" -optional = false -python-versions = ">=3.7" -files = [ - {file = "pytest-mock-3.11.1.tar.gz", hash = "sha256:7f6b125602ac6d743e523ae0bfa71e1a697a2f5534064528c6ff84c2f7c2fc7f"}, - {file = "pytest_mock-3.11.1-py3-none-any.whl", hash = "sha256:21c279fff83d70763b05f8874cc9cfb3fcacd6d354247a976f9529d19f9acf39"}, -] - -[package.dependencies] -pytest = ">=5.0" - -[package.extras] -dev = ["pre-commit", "pytest-asyncio", "tox"] - -[[package]] -name = "pytest-socket" -version = "0.6.0" -description = "Pytest Plugin to disable socket calls during tests" -category = "dev" -optional = false -python-versions = ">=3.7,<4.0" -files = [ - {file = "pytest_socket-0.6.0-py3-none-any.whl", hash = "sha256:cca72f134ff01e0023c402e78d31b32e68da3efdf3493bf7788f8eba86a6824c"}, - {file = "pytest_socket-0.6.0.tar.gz", hash = "sha256:363c1d67228315d4fc7912f1aabfd570de29d0e3db6217d61db5728adacd7138"}, -] - -[package.dependencies] -pytest = ">=3.6.3" - -[[package]] -name = "python-dateutil" -version = "2.8.2" -description = "Extensions to the standard Python datetime module" -category = "main" -optional = false -python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" -files = [ - {file = "python-dateutil-2.8.2.tar.gz", hash = "sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86"}, - {file = "python_dateutil-2.8.2-py2.py3-none-any.whl", hash = "sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9"}, -] - -[package.dependencies] -six = ">=1.5" - -[[package]] -name = "pytz" -version = "2023.3" -description = "World timezone definitions, modern and historical" -category = "main" -optional = false -python-versions = "*" -files = [ - {file = "pytz-2023.3-py2.py3-none-any.whl", hash = "sha256:a151b3abb88eda1d4e34a9814df37de2a80e301e68ba0fd856fb9b46bfbbbffb"}, - {file = "pytz-2023.3.tar.gz", hash = "sha256:1d8ce29db189191fb55338ee6d0387d82ab59f3d00eac103412d64e0ebd0c588"}, -] - -[[package]] -name = "pyyaml" -version = "6.0.1" -description = "YAML parser and emitter for Python" -category = "dev" -optional = false -python-versions = ">=3.6" -files = [ - {file = "PyYAML-6.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d858aa552c999bc8a8d57426ed01e40bef403cd8ccdd0fc5f6f04a00414cac2a"}, - {file = "PyYAML-6.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:fd66fc5d0da6d9815ba2cebeb4205f95818ff4b79c3ebe268e75d961704af52f"}, - {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69b023b2b4daa7548bcfbd4aa3da05b3a74b772db9e23b982788168117739938"}, - {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:81e0b275a9ecc9c0c0c07b4b90ba548307583c125f54d5b6946cfee6360c733d"}, - {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba336e390cd8e4d1739f42dfe9bb83a3cc2e80f567d8805e11b46f4a943f5515"}, - {file = "PyYAML-6.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:326c013efe8048858a6d312ddd31d56e468118ad4cdeda36c719bf5bb6192290"}, - {file = "PyYAML-6.0.1-cp310-cp310-win32.whl", hash = "sha256:bd4af7373a854424dabd882decdc5579653d7868b8fb26dc7d0e99f823aa5924"}, - {file = "PyYAML-6.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:fd1592b3fdf65fff2ad0004b5e363300ef59ced41c2e6b3a99d4089fa8c5435d"}, - {file = "PyYAML-6.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6965a7bc3cf88e5a1c3bd2e0b5c22f8d677dc88a455344035f03399034eb3007"}, - {file = "PyYAML-6.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f003ed9ad21d6a4713f0a9b5a7a0a79e08dd0f221aff4525a2be4c346ee60aab"}, - {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42f8152b8dbc4fe7d96729ec2b99c7097d656dc1213a3229ca5383f973a5ed6d"}, - {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:062582fca9fabdd2c8b54a3ef1c978d786e0f6b3a1510e0ac93ef59e0ddae2bc"}, - {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2b04aac4d386b172d5b9692e2d2da8de7bfb6c387fa4f801fbf6fb2e6ba4673"}, - {file = "PyYAML-6.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e7d73685e87afe9f3b36c799222440d6cf362062f78be1013661b00c5c6f678b"}, - {file = "PyYAML-6.0.1-cp311-cp311-win32.whl", hash = "sha256:1635fd110e8d85d55237ab316b5b011de701ea0f29d07611174a1b42f1444741"}, - {file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"}, - {file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"}, - {file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"}, - {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"}, - {file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"}, - {file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"}, - {file = "PyYAML-6.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:0d3304d8c0adc42be59c5f8a4d9e3d7379e6955ad754aa9d6ab7a398b59dd1df"}, - {file = "PyYAML-6.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50550eb667afee136e9a77d6dc71ae76a44df8b3e51e41b77f6de2932bfe0f47"}, - {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1fe35611261b29bd1de0070f0b2f47cb6ff71fa6595c077e42bd0c419fa27b98"}, - {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:704219a11b772aea0d8ecd7058d0082713c3562b4e271b849ad7dc4a5c90c13c"}, - {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:afd7e57eddb1a54f0f1a974bc4391af8bcce0b444685d936840f125cf046d5bd"}, - {file = "PyYAML-6.0.1-cp36-cp36m-win32.whl", hash = "sha256:fca0e3a251908a499833aa292323f32437106001d436eca0e6e7833256674585"}, - {file = "PyYAML-6.0.1-cp36-cp36m-win_amd64.whl", hash = "sha256:f22ac1c3cac4dbc50079e965eba2c1058622631e526bd9afd45fedd49ba781fa"}, - {file = "PyYAML-6.0.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:b1275ad35a5d18c62a7220633c913e1b42d44b46ee12554e5fd39c70a243d6a3"}, - {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:18aeb1bf9a78867dc38b259769503436b7c72f7a1f1f4c93ff9a17de54319b27"}, - {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:596106435fa6ad000c2991a98fa58eeb8656ef2325d7e158344fb33864ed87e3"}, - {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:baa90d3f661d43131ca170712d903e6295d1f7a0f595074f151c0aed377c9b9c"}, - {file = "PyYAML-6.0.1-cp37-cp37m-win32.whl", hash = "sha256:9046c58c4395dff28dd494285c82ba00b546adfc7ef001486fbf0324bc174fba"}, - {file = "PyYAML-6.0.1-cp37-cp37m-win_amd64.whl", hash = "sha256:4fb147e7a67ef577a588a0e2c17b6db51dda102c71de36f8549b6816a96e1867"}, - {file = "PyYAML-6.0.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1d4c7e777c441b20e32f52bd377e0c409713e8bb1386e1099c2415f26e479595"}, - {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a0cd17c15d3bb3fa06978b4e8958dcdc6e0174ccea823003a106c7d4d7899ac5"}, - {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:28c119d996beec18c05208a8bd78cbe4007878c6dd15091efb73a30e90539696"}, - {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e07cbde391ba96ab58e532ff4803f79c4129397514e1413a7dc761ccd755735"}, - {file = "PyYAML-6.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:49a183be227561de579b4a36efbb21b3eab9651dd81b1858589f796549873dd6"}, - {file = "PyYAML-6.0.1-cp38-cp38-win32.whl", hash = "sha256:184c5108a2aca3c5b3d3bf9395d50893a7ab82a38004c8f61c258d4428e80206"}, - {file = "PyYAML-6.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:1e2722cc9fbb45d9b87631ac70924c11d3a401b2d7f410cc0e3bbf249f2dca62"}, - {file = "PyYAML-6.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9eb6caa9a297fc2c2fb8862bc5370d0303ddba53ba97e71f08023b6cd73d16a8"}, - {file = "PyYAML-6.0.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c8098ddcc2a85b61647b2590f825f3db38891662cfc2fc776415143f599bb859"}, - {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5773183b6446b2c99bb77e77595dd486303b4faab2b086e7b17bc6bef28865f6"}, - {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b786eecbdf8499b9ca1d697215862083bd6d2a99965554781d0d8d1ad31e13a0"}, - {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc1bf2925a1ecd43da378f4db9e4f799775d6367bdb94671027b73b393a7c42c"}, - {file = "PyYAML-6.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:04ac92ad1925b2cff1db0cfebffb6ffc43457495c9b3c39d3fcae417d7125dc5"}, - {file = "PyYAML-6.0.1-cp39-cp39-win32.whl", hash = "sha256:faca3bdcf85b2fc05d06ff3fbc1f83e1391b3e724afa3feba7d13eeab355484c"}, - {file = "PyYAML-6.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:510c9deebc5c0225e8c96813043e62b680ba2f9c50a08d3724c7f28a747d1486"}, - {file = "PyYAML-6.0.1.tar.gz", hash = "sha256:bfdf460b1736c775f2ba9f6a92bca30bc2095067b8a9d77876d1fad6cc3b4a43"}, -] - -[[package]] -name = "regex" -version = "2023.6.3" -description = "Alternative regular expression module, to replace re." -category = "main" -optional = false -python-versions = ">=3.6" -files = [ - {file = "regex-2023.6.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:824bf3ac11001849aec3fa1d69abcb67aac3e150a933963fb12bda5151fe1bfd"}, - {file = "regex-2023.6.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:05ed27acdf4465c95826962528f9e8d41dbf9b1aa8531a387dee6ed215a3e9ef"}, - {file = "regex-2023.6.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0b49c764f88a79160fa64f9a7b425620e87c9f46095ef9c9920542ab2495c8bc"}, - {file = "regex-2023.6.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8e3f1316c2293e5469f8f09dc2d76efb6c3982d3da91ba95061a7e69489a14ef"}, - {file = "regex-2023.6.3-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:43e1dd9d12df9004246bacb79a0e5886b3b6071b32e41f83b0acbf293f820ee8"}, - {file = "regex-2023.6.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4959e8bcbfda5146477d21c3a8ad81b185cd252f3d0d6e4724a5ef11c012fb06"}, - {file = "regex-2023.6.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:af4dd387354dc83a3bff67127a124c21116feb0d2ef536805c454721c5d7993d"}, - {file = "regex-2023.6.3-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:2239d95d8e243658b8dbb36b12bd10c33ad6e6933a54d36ff053713f129aa536"}, - {file = "regex-2023.6.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:890e5a11c97cf0d0c550eb661b937a1e45431ffa79803b942a057c4fb12a2da2"}, - {file = "regex-2023.6.3-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:a8105e9af3b029f243ab11ad47c19b566482c150c754e4c717900a798806b222"}, - {file = "regex-2023.6.3-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:25be746a8ec7bc7b082783216de8e9473803706723b3f6bef34b3d0ed03d57e2"}, - {file = "regex-2023.6.3-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:3676f1dd082be28b1266c93f618ee07741b704ab7b68501a173ce7d8d0d0ca18"}, - {file = "regex-2023.6.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:10cb847aeb1728412c666ab2e2000ba6f174f25b2bdc7292e7dd71b16db07568"}, - {file = "regex-2023.6.3-cp310-cp310-win32.whl", hash = "sha256:dbbbfce33cd98f97f6bffb17801b0576e653f4fdb1d399b2ea89638bc8d08ae1"}, - {file = "regex-2023.6.3-cp310-cp310-win_amd64.whl", hash = "sha256:c5f8037000eb21e4823aa485149f2299eb589f8d1fe4b448036d230c3f4e68e0"}, - {file = "regex-2023.6.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c123f662be8ec5ab4ea72ea300359023a5d1df095b7ead76fedcd8babbedf969"}, - {file = "regex-2023.6.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:9edcbad1f8a407e450fbac88d89e04e0b99a08473f666a3f3de0fd292badb6aa"}, - {file = "regex-2023.6.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dcba6dae7de533c876255317c11f3abe4907ba7d9aa15d13e3d9710d4315ec0e"}, - {file = "regex-2023.6.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:29cdd471ebf9e0f2fb3cac165efedc3c58db841d83a518b082077e612d3ee5df"}, - {file = "regex-2023.6.3-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:12b74fbbf6cbbf9dbce20eb9b5879469e97aeeaa874145517563cca4029db65c"}, - {file = "regex-2023.6.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0c29ca1bd61b16b67be247be87390ef1d1ef702800f91fbd1991f5c4421ebae8"}, - {file = "regex-2023.6.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d77f09bc4b55d4bf7cc5eba785d87001d6757b7c9eec237fe2af57aba1a071d9"}, - {file = "regex-2023.6.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:ea353ecb6ab5f7e7d2f4372b1e779796ebd7b37352d290096978fea83c4dba0c"}, - {file = "regex-2023.6.3-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:10590510780b7541969287512d1b43f19f965c2ece6c9b1c00fc367b29d8dce7"}, - {file = "regex-2023.6.3-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:e2fbd6236aae3b7f9d514312cdb58e6494ee1c76a9948adde6eba33eb1c4264f"}, - {file = "regex-2023.6.3-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:6b2675068c8b56f6bfd5a2bda55b8accbb96c02fd563704732fd1c95e2083461"}, - {file = "regex-2023.6.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:74419d2b50ecb98360cfaa2974da8689cb3b45b9deff0dcf489c0d333bcc1477"}, - {file = "regex-2023.6.3-cp311-cp311-win32.whl", hash = "sha256:fb5ec16523dc573a4b277663a2b5a364e2099902d3944c9419a40ebd56a118f9"}, - {file = "regex-2023.6.3-cp311-cp311-win_amd64.whl", hash = "sha256:09e4a1a6acc39294a36b7338819b10baceb227f7f7dbbea0506d419b5a1dd8af"}, - {file = "regex-2023.6.3-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:0654bca0cdf28a5956c83839162692725159f4cda8d63e0911a2c0dc76166525"}, - {file = "regex-2023.6.3-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:463b6a3ceb5ca952e66550a4532cef94c9a0c80dc156c4cc343041951aec1697"}, - {file = "regex-2023.6.3-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:87b2a5bb5e78ee0ad1de71c664d6eb536dc3947a46a69182a90f4410f5e3f7dd"}, - {file = "regex-2023.6.3-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6343c6928282c1f6a9db41f5fd551662310e8774c0e5ebccb767002fcf663ca9"}, - {file = "regex-2023.6.3-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b6192d5af2ccd2a38877bfef086d35e6659566a335b1492786ff254c168b1693"}, - {file = "regex-2023.6.3-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:74390d18c75054947e4194019077e243c06fbb62e541d8817a0fa822ea310c14"}, - {file = "regex-2023.6.3-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:742e19a90d9bb2f4a6cf2862b8b06dea5e09b96c9f2df1779e53432d7275331f"}, - {file = "regex-2023.6.3-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:8abbc5d54ea0ee80e37fef009e3cec5dafd722ed3c829126253d3e22f3846f1e"}, - {file = "regex-2023.6.3-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:c2b867c17a7a7ae44c43ebbeb1b5ff406b3e8d5b3e14662683e5e66e6cc868d3"}, - {file = "regex-2023.6.3-cp36-cp36m-musllinux_1_1_ppc64le.whl", hash = "sha256:d831c2f8ff278179705ca59f7e8524069c1a989e716a1874d6d1aab6119d91d1"}, - {file = "regex-2023.6.3-cp36-cp36m-musllinux_1_1_s390x.whl", hash = "sha256:ee2d1a9a253b1729bb2de27d41f696ae893507c7db224436abe83ee25356f5c1"}, - {file = "regex-2023.6.3-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:61474f0b41fe1a80e8dfa70f70ea1e047387b7cd01c85ec88fa44f5d7561d787"}, - {file = "regex-2023.6.3-cp36-cp36m-win32.whl", hash = "sha256:0b71e63226e393b534105fcbdd8740410dc6b0854c2bfa39bbda6b0d40e59a54"}, - {file = "regex-2023.6.3-cp36-cp36m-win_amd64.whl", hash = "sha256:bbb02fd4462f37060122e5acacec78e49c0fbb303c30dd49c7f493cf21fc5b27"}, - {file = "regex-2023.6.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:b862c2b9d5ae38a68b92e215b93f98d4c5e9454fa36aae4450f61dd33ff48487"}, - {file = "regex-2023.6.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:976d7a304b59ede34ca2921305b57356694f9e6879db323fd90a80f865d355a3"}, - {file = "regex-2023.6.3-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:83320a09188e0e6c39088355d423aa9d056ad57a0b6c6381b300ec1a04ec3d16"}, - {file = "regex-2023.6.3-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9427a399501818a7564f8c90eced1e9e20709ece36be701f394ada99890ea4b3"}, - {file = "regex-2023.6.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7178bbc1b2ec40eaca599d13c092079bf529679bf0371c602edaa555e10b41c3"}, - {file = "regex-2023.6.3-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:837328d14cde912af625d5f303ec29f7e28cdab588674897baafaf505341f2fc"}, - {file = "regex-2023.6.3-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:2d44dc13229905ae96dd2ae2dd7cebf824ee92bc52e8cf03dcead37d926da019"}, - {file = "regex-2023.6.3-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:d54af539295392611e7efbe94e827311eb8b29668e2b3f4cadcfe6f46df9c777"}, - {file = "regex-2023.6.3-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:7117d10690c38a622e54c432dfbbd3cbd92f09401d622902c32f6d377e2300ee"}, - {file = "regex-2023.6.3-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:bb60b503ec8a6e4e3e03a681072fa3a5adcbfa5479fa2d898ae2b4a8e24c4591"}, - {file = "regex-2023.6.3-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:65ba8603753cec91c71de423a943ba506363b0e5c3fdb913ef8f9caa14b2c7e0"}, - {file = "regex-2023.6.3-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:271f0bdba3c70b58e6f500b205d10a36fb4b58bd06ac61381b68de66442efddb"}, - {file = "regex-2023.6.3-cp37-cp37m-win32.whl", hash = "sha256:9beb322958aaca059f34975b0df135181f2e5d7a13b84d3e0e45434749cb20f7"}, - {file = "regex-2023.6.3-cp37-cp37m-win_amd64.whl", hash = "sha256:fea75c3710d4f31389eed3c02f62d0b66a9da282521075061ce875eb5300cf23"}, - {file = "regex-2023.6.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:8f56fcb7ff7bf7404becdfc60b1e81a6d0561807051fd2f1860b0d0348156a07"}, - {file = "regex-2023.6.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:d2da3abc88711bce7557412310dfa50327d5769a31d1c894b58eb256459dc289"}, - {file = "regex-2023.6.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a99b50300df5add73d307cf66abea093304a07eb017bce94f01e795090dea87c"}, - {file = "regex-2023.6.3-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5708089ed5b40a7b2dc561e0c8baa9535b77771b64a8330b684823cfd5116036"}, - {file = "regex-2023.6.3-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:687ea9d78a4b1cf82f8479cab23678aff723108df3edeac098e5b2498879f4a7"}, - {file = "regex-2023.6.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4d3850beab9f527f06ccc94b446c864059c57651b3f911fddb8d9d3ec1d1b25d"}, - {file = "regex-2023.6.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e8915cc96abeb8983cea1df3c939e3c6e1ac778340c17732eb63bb96247b91d2"}, - {file = "regex-2023.6.3-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:841d6e0e5663d4c7b4c8099c9997be748677d46cbf43f9f471150e560791f7ff"}, - {file = "regex-2023.6.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:9edce5281f965cf135e19840f4d93d55b3835122aa76ccacfd389e880ba4cf82"}, - {file = "regex-2023.6.3-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:b956231ebdc45f5b7a2e1f90f66a12be9610ce775fe1b1d50414aac1e9206c06"}, - {file = "regex-2023.6.3-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:36efeba71c6539d23c4643be88295ce8c82c88bbd7c65e8a24081d2ca123da3f"}, - {file = "regex-2023.6.3-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:cf67ca618b4fd34aee78740bea954d7c69fdda419eb208c2c0c7060bb822d747"}, - {file = "regex-2023.6.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:b4598b1897837067a57b08147a68ac026c1e73b31ef6e36deeeb1fa60b2933c9"}, - {file = "regex-2023.6.3-cp38-cp38-win32.whl", hash = "sha256:f415f802fbcafed5dcc694c13b1292f07fe0befdb94aa8a52905bd115ff41e88"}, - {file = "regex-2023.6.3-cp38-cp38-win_amd64.whl", hash = "sha256:d4f03bb71d482f979bda92e1427f3ec9b220e62a7dd337af0aa6b47bf4498f72"}, - {file = "regex-2023.6.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:ccf91346b7bd20c790310c4147eee6ed495a54ddb6737162a36ce9dbef3e4751"}, - {file = "regex-2023.6.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:b28f5024a3a041009eb4c333863d7894d191215b39576535c6734cd88b0fcb68"}, - {file = "regex-2023.6.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e0bb18053dfcfed432cc3ac632b5e5e5c5b7e55fb3f8090e867bfd9b054dbcbf"}, - {file = "regex-2023.6.3-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9a5bfb3004f2144a084a16ce19ca56b8ac46e6fd0651f54269fc9e230edb5e4a"}, - {file = "regex-2023.6.3-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5c6b48d0fa50d8f4df3daf451be7f9689c2bde1a52b1225c5926e3f54b6a9ed1"}, - {file = "regex-2023.6.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:051da80e6eeb6e239e394ae60704d2b566aa6a7aed6f2890a7967307267a5dc6"}, - {file = "regex-2023.6.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a4c3b7fa4cdaa69268748665a1a6ff70c014d39bb69c50fda64b396c9116cf77"}, - {file = "regex-2023.6.3-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:457b6cce21bee41ac292d6753d5e94dcbc5c9e3e3a834da285b0bde7aa4a11e9"}, - {file = "regex-2023.6.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:aad51907d74fc183033ad796dd4c2e080d1adcc4fd3c0fd4fd499f30c03011cd"}, - {file = "regex-2023.6.3-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:0385e73da22363778ef2324950e08b689abdf0b108a7d8decb403ad7f5191938"}, - {file = "regex-2023.6.3-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:c6a57b742133830eec44d9b2290daf5cbe0a2f1d6acee1b3c7b1c7b2f3606df7"}, - {file = "regex-2023.6.3-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:3e5219bf9e75993d73ab3d25985c857c77e614525fac9ae02b1bebd92f7cecac"}, - {file = "regex-2023.6.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:e5087a3c59eef624a4591ef9eaa6e9a8d8a94c779dade95d27c0bc24650261cd"}, - {file = "regex-2023.6.3-cp39-cp39-win32.whl", hash = "sha256:20326216cc2afe69b6e98528160b225d72f85ab080cbdf0b11528cbbaba2248f"}, - {file = "regex-2023.6.3-cp39-cp39-win_amd64.whl", hash = "sha256:bdff5eab10e59cf26bc479f565e25ed71a7d041d1ded04ccf9aee1d9f208487a"}, - {file = "regex-2023.6.3.tar.gz", hash = "sha256:72d1a25bf36d2050ceb35b517afe13864865268dfb45910e2e17a84be6cbfeb0"}, -] - -[[package]] -name = "rfc3986" -version = "1.5.0" -description = "Validating URI References per RFC 3986" -category = "main" -optional = false -python-versions = "*" -files = [ - {file = "rfc3986-1.5.0-py2.py3-none-any.whl", hash = "sha256:a86d6e1f5b1dc238b218b012df0aa79409667bb209e58da56d0b94704e712a97"}, - {file = "rfc3986-1.5.0.tar.gz", hash = "sha256:270aaf10d87d0d4e095063c65bf3ddbc6ee3d0b226328ce21e036f946e421835"}, -] - -[package.dependencies] -idna = {version = "*", optional = true, markers = "extra == \"idna2008\""} - -[package.extras] -idna2008 = ["idna"] - -[[package]] -name = "setuptools" -version = "68.0.0" -description = "Easily download, build, install, upgrade, and uninstall Python packages" -category = "dev" -optional = false -python-versions = ">=3.7" -files = [ - {file = "setuptools-68.0.0-py3-none-any.whl", hash = "sha256:11e52c67415a381d10d6b462ced9cfb97066179f0e871399e006c4ab101fc85f"}, - {file = "setuptools-68.0.0.tar.gz", hash = "sha256:baf1fdb41c6da4cd2eae722e135500da913332ab3f2f5c7d33af9b492acb5235"}, -] - -[package.extras] -docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-hoverxref (<2)", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (==0.8.3)", "sphinx-reredirects", "sphinxcontrib-towncrier"] -testing = ["build[virtualenv]", "filelock (>=3.4.0)", "flake8-2020", "ini2toml[lite] (>=0.9)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pip (>=19.1)", "pip-run (>=8.8)", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-ruff", "pytest-timeout", "pytest-xdist", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] -testing-integration = ["build[virtualenv]", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] - -[[package]] -name = "six" -version = "1.16.0" -description = "Python 2 and 3 compatibility utilities" -category = "main" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" -files = [ - {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"}, - {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, -] - -[[package]] -name = "sniffio" -version = "1.3.0" -description = "Sniff out which async library your code is running under" -category = "main" -optional = false -python-versions = ">=3.7" -files = [ - {file = "sniffio-1.3.0-py3-none-any.whl", hash = "sha256:eecefdce1e5bbfb7ad2eeaabf7c1eeb404d7757c379bd1f7e5cce9d8bf425384"}, - {file = "sniffio-1.3.0.tar.gz", hash = "sha256:e60305c5e5d314f5389259b7f22aaa33d8f7dee49763119234af3755c55b9101"}, -] - -[[package]] -name = "types-dateparser" -version = "1.1.4.10" -description = "Typing stubs for dateparser" -category = "dev" -optional = false -python-versions = "*" -files = [ - {file = "types-dateparser-1.1.4.10.tar.gz", hash = "sha256:f9f147147a897ecb99491f59772ab1be1b7a0842fbc22e85ca37c6995b563b52"}, - {file = "types_dateparser-1.1.4.10-py3-none-any.whl", hash = "sha256:b85c664b349412ef0e09afd56c3c554a1d2fc206e45f1222c1001d23cb2fb66d"}, -] - -[[package]] -name = "typing-extensions" -version = "4.7.1" -description = "Backported and Experimental Type Hints for Python 3.7+" -category = "main" -optional = false -python-versions = ">=3.7" -files = [ - {file = "typing_extensions-4.7.1-py3-none-any.whl", hash = "sha256:440d5dd3af93b060174bf433bccd69b0babc3b15b1a8dca43789fd7f61514b36"}, - {file = "typing_extensions-4.7.1.tar.gz", hash = "sha256:b75ddc264f0ba5615db7ba217daeb99701ad295353c45f9e95963337ceeeffb2"}, -] - -[[package]] -name = "tzdata" -version = "2023.3" -description = "Provider of IANA time zone data" -category = "main" -optional = false -python-versions = ">=2" -files = [ - {file = "tzdata-2023.3-py2.py3-none-any.whl", hash = "sha256:7e65763eef3120314099b6939b5546db7adce1e7d6f2e179e3df563c70511eda"}, - {file = "tzdata-2023.3.tar.gz", hash = "sha256:11ef1e08e54acb0d4f95bdb1be05da659673de4acbd21bf9c69e94cc5e907a3a"}, -] - -[[package]] -name = "tzlocal" -version = "5.0.1" -description = "tzinfo object for the local timezone" -category = "main" -optional = false -python-versions = ">=3.7" -files = [ - {file = "tzlocal-5.0.1-py3-none-any.whl", hash = "sha256:f3596e180296aaf2dbd97d124fe76ae3a0e3d32b258447de7b939b3fd4be992f"}, - {file = "tzlocal-5.0.1.tar.gz", hash = "sha256:46eb99ad4bdb71f3f72b7d24f4267753e240944ecfc16f25d2719ba89827a803"}, -] - -[package.dependencies] -tzdata = {version = "*", markers = "platform_system == \"Windows\""} - -[package.extras] -devenv = ["black", "check-manifest", "flake8", "pyroma", "pytest (>=4.3)", "pytest-cov", "pytest-mock (>=3.3)", "zest.releaser"] - -[[package]] -name = "virtualenv" -version = "20.24.2" -description = "Virtual Python Environment builder" -category = "dev" -optional = false -python-versions = ">=3.7" -files = [ - {file = "virtualenv-20.24.2-py3-none-any.whl", hash = "sha256:43a3052be36080548bdee0b42919c88072037d50d56c28bd3f853cbe92b953ff"}, - {file = "virtualenv-20.24.2.tar.gz", hash = "sha256:fd8a78f46f6b99a67b7ec5cf73f92357891a7b3a40fd97637c27f854aae3b9e0"}, -] - -[package.dependencies] -distlib = ">=0.3.7,<1" -filelock = ">=3.12.2,<4" -platformdirs = ">=3.9.1,<4" - -[package.extras] -docs = ["furo (>=2023.5.20)", "proselint (>=0.13)", "sphinx (>=7.0.1)", "sphinx-argparse (>=0.4)", "sphinxcontrib-towncrier (>=0.2.1a0)", "towncrier (>=23.6)"] -test = ["covdefaults (>=2.3)", "coverage (>=7.2.7)", "coverage-enable-subprocess (>=1)", "flaky (>=3.7)", "packaging (>=23.1)", "pytest (>=7.4)", "pytest-env (>=0.8.2)", "pytest-freezer (>=0.4.8)", "pytest-mock (>=3.11.1)", "pytest-randomly (>=3.12)", "pytest-timeout (>=2.1)", "setuptools (>=68)", "time-machine (>=2.10)"] - -[metadata] -lock-version = "2.0" -python-versions = "^3.11" -content-hash = "7e89b57dfca5b17aaf2ddedf761be53f2528ac8f0df321ef8f9557348b576f06" diff --git a/pyproject.toml b/pyproject.toml deleted file mode 100644 index 66b3deb..0000000 --- a/pyproject.toml +++ /dev/null @@ -1,46 +0,0 @@ -[tool.poetry] -name = "container-retention-policy" -version = "1.0.0" # This version doesn't matter - only using Poetry for dependencies -description = "Lets you create a retention policy for GHCR hosted container images" -authors = ["Sondre Lillebø Gundersen "] -license = "BSD-3" - -[tool.poetry.dependencies] -python = "^3.11" -httpx = "^0.23" -dateparser = "^1.0.0" -pydantic = "^2.4.2" - -[tool.poetry.group.dev.dependencies] -pre-commit = "^3.0.4" -types-dateparser = "*" -pytest = "*" -pytest-mock = "*" -pytest-asyncio = "*" -pytest-cov = "*" -coverage = {extras = ["toml"], version = "*"} -pytest-socket = "*" - -[build-system] -requires = ["poetry-core>=1.0.0"] -build-backend = "poetry.core.masonry.api" - -[tool.black] -line-length = 120 -skip-string-normalization = true - -[tool.isort] -profile = "black" -line_length = 120 - -[tool.coverage.run] -omit = [] -branch = true - -[tool.coverage.report] -show_missing = true -skip_covered = true -exclude_lines = [ - 'if TYPE_CHECKING:', - "if __name__ == '__main__':", -] diff --git a/rustfmt.toml b/rustfmt.toml new file mode 100644 index 0000000..7530651 --- /dev/null +++ b/rustfmt.toml @@ -0,0 +1 @@ +max_width = 120 diff --git a/setup.cfg b/setup.cfg deleted file mode 100644 index 30af2ea..0000000 --- a/setup.cfg +++ /dev/null @@ -1,34 +0,0 @@ -[tool:pytest] -testpaths = main_tests.py -addopts = - --cov=main - --cov-report term-missing - # forbid external i/o in tests - --allow-hosts=127.0.0.1 -asyncio_mode = auto - -[flake8] -exclude = main_tests.py -max-line-length = 140 -ignore = E203, D100, D101, D200, W503 -enable-extensions = TC, TC1 -pytest-mark-no-parentheses=true -pytest-fixture-no-parentheses=true -pytest-parametrize-names-type=csv -type-checking-pydantic-enabled=true - -[mypy] -python_version = 3.10 -show_error_codes = True -warn_unused_ignores = True -strict_optional = True -incremental = True -ignore_missing_imports = True -warn_redundant_casts = True -warn_unused_configs = True -warn_no_return = False -disallow_untyped_defs = True -local_partial_types = True - -[mypy-main_tests.*] -ignore_errors = True diff --git a/src/cli/args.rs b/src/cli/args.rs new file mode 100644 index 0000000..276641a --- /dev/null +++ b/src/cli/args.rs @@ -0,0 +1,247 @@ +use std::convert::Infallible; + +use crate::cli::models::{Account, TagSelection, Timestamp, Token}; +use clap::ArgAction; +use clap::Parser; +use humantime::Duration; +use regex::Regex; +use tracing::Level; + +pub fn vec_of_string_from_str(value: &str) -> Result, Infallible> { + let trimmed = value.trim_matches('"').trim_matches('\''); // Remove surrounding quotes + if trimmed.is_empty() { + return Ok(Vec::new()); + } + Ok(trimmed + .split(|c: char| c == ',' || c.is_whitespace()) + .filter_map(|t| { + let s = t.trim().to_string(); + if s.is_empty() { + None + } else { + Some(s) + } + }) + .collect::>()) +} + +pub fn try_parse_shas_as_list(s: &str) -> Result, String> { + let shas = vec_of_string_from_str(s).unwrap(); + let re = Regex::new(r"^sha256:[0-9a-fA-F]{64}$").unwrap(); + for sha in &shas { + if !re.is_match(sha) { + return Err(format!("Invalid image SHA received: {sha}")); + } + } + Ok(shas) +} + +#[derive(Parser)] +#[clap(version, about, long_about = None)] +#[clap(propagate_version = true)] +pub struct Input { + /// The account to delete package versions for + #[arg(long, value_parser = Account::try_from_str)] + pub account: Account, + + /// The token to use for authentication + #[arg(long, value_parser = Token::try_from_str)] + pub token: Token, + + /// The package names to target + #[arg(long, value_parser = vec_of_string_from_str)] + pub image_names: std::vec::Vec, + + /// The container image tags to target + #[arg(long, value_parser = vec_of_string_from_str)] + pub image_tags: std::vec::Vec, + + /// Package version SHAs to avoid deleting + #[arg(long, value_parser = try_parse_shas_as_list)] + pub shas_to_skip: std::vec::Vec, + + /// Whether to delete tagged or untagged package versions, or both + #[arg(long, value_enum, default_value = "both")] + pub tag_selection: TagSelection, + + /// How many tagged packages to keep, after filtering + #[arg(long, long, default_value = "0")] + pub keep_n_most_recent: u32, + + /// Whether to delete package versions or not + #[arg(long, action(ArgAction::Set), default_value = "false")] + pub dry_run: bool, + + /// Which timestamp to use when considering the cut-off filtering + #[arg(long, value_enum, default_value = "updated_at")] + pub timestamp_to_use: Timestamp, + + /// How old package versions should be before being considered + #[arg(long)] + pub cut_off: Duration, + + /// The log level to use for the tracing subscriber + #[arg(long, global = true, default_value = "info")] + pub(crate) log_level: Level, +} + +#[cfg(test)] +mod tests { + use assert_cmd::Command; + use clap::ValueEnum; + use secrecy::Secret; + + use super::*; + + #[test] + fn test_vec_of_string_from_str() { + assert_eq!( + vec_of_string_from_str("foo,bar").unwrap(), + vec!["foo".to_string(), "bar".to_string()] + ); + assert_eq!( + vec_of_string_from_str("foo , bar").unwrap(), + vec!["foo".to_string(), "bar".to_string()] + ); + assert_eq!( + vec_of_string_from_str("foo , bar,baz").unwrap(), + vec!["foo".to_string(), "bar".to_string(), "baz".to_string()] + ); + assert_eq!( + vec_of_string_from_str("foo bar").unwrap(), + vec!["foo".to_string(), "bar".to_string()] + ); + assert_eq!( + vec_of_string_from_str("foo bar baz").unwrap(), + vec!["foo".to_string(), "bar".to_string(), "baz".to_string()] + ); + } + + #[test] + fn test_try_parse_shas_as_list() { + assert_eq!( + try_parse_shas_as_list( + "\ + sha256:86215617a0ea1f77e9f314b45ffd578020935996612fb497239509b151a6f1ba, \ + sha256:17152a70ea10de6ecd804fffed4b5ebd3abc638e8920efb6fab2993c5a77600a \ + sha256:a86523225e8d21faae518a5ea117e06887963a4a9ac123683d91890af092cf03" + ) + .unwrap(), + vec![ + "sha256:86215617a0ea1f77e9f314b45ffd578020935996612fb497239509b151a6f1ba".to_string(), + "sha256:17152a70ea10de6ecd804fffed4b5ebd3abc638e8920efb6fab2993c5a77600a".to_string(), + "sha256:a86523225e8d21faae518a5ea117e06887963a4a9ac123683d91890af092cf03".to_string(), + ] + ); + assert!(try_parse_shas_as_list("a86523225e8d21faae518a5ea117e06887963a4a9ac123683d91890af092cf03").is_err()); + assert!(try_parse_shas_as_list("foo").is_err()); + } + + #[test] + fn parse_timestamp() { + assert_eq!(Timestamp::from_str("updated_at", true).unwrap(), Timestamp::UpdatedAt); + assert_eq!(Timestamp::from_str("created_at", true).unwrap(), Timestamp::CreatedAt); + assert!(Timestamp::from_str("createdAt", true).is_err()); + assert!(Timestamp::from_str("updatedAt", true).is_err()); + assert!(Timestamp::from_str("updated-At", true).is_err()); + assert!(Timestamp::from_str("Created-At", true).is_err()); + } + + #[test] + fn parse_tag_selection() { + assert_eq!(TagSelection::from_str("tagged", true).unwrap(), TagSelection::Tagged); + assert_eq!( + TagSelection::from_str("untagged", true).unwrap(), + TagSelection::Untagged + ); + assert_eq!(TagSelection::from_str("both", true).unwrap(), TagSelection::Both); + assert!(TagSelection::from_str("foo", true).is_err()); + } + + #[test] + fn parse_token() { + assert_eq!( + Token::try_from_str("ghs_U4fUiyjT4gUZKJeUEI3AX501oTqIvV0loS62").unwrap(), + Token::Temporal(Secret::new("ghs_U4fUiyjT4gUZKJeUEI3AX501oTqIvV0loS62".to_string())) + ); + assert_eq!( + Token::try_from_str("ghp_sSIL4kMdtzfbfDdm1MC1OU2q5DbRqA3eSszT").unwrap(), + Token::ClassicPersonalAccess(Secret::new("ghp_sSIL4kMdtzfbfDdm1MC1OU2q5DbRqA3eSszT".to_string())) + ); + assert_eq!( + Token::try_from_str("gho_sSIL4kMdtzfbfDdm1MC1OU2q5DbRqA3eSszT").unwrap(), + Token::Oauth(Secret::new("gho_sSIL4kMdtzfbfDdm1MC1OU2q5DbRqA3eSszT".to_string())) + ); + } + + #[test] + fn parse_account() { + assert_eq!(Account::try_from_str("user").unwrap(), Account::User); + assert_eq!( + Account::try_from_str("foo").unwrap(), + Account::Organization("foo".to_string()) + ); + assert!(Account::try_from_str("").is_err()); + assert!(Account::try_from_str(" ").is_err()); + } + + #[test] + fn parse_input() { + let args_permutations = vec![ + vec![ + "--account=user", + "--token=ghs_sSIL4kMdtzfbfDdm1MC1OU2q5DbRqA3eSszT", + "--image-names=foo", + "--image-tags=one", + "--shas-to-skip=", + "--keep-n-most-recent=0", + "--tag-selection=tagged", + "--timestamp-to-use=updated_at", + "--cut-off=1w", + "--dry-run=true", + ], + vec![ + "--account=acme", + "--token=ghp_sSIL4kMdtzfbfDdm1MC1OU2q5DbRqA3eSszT", + "--image-names=\"foo bar\"", + "--image-tags=\"one two\"", + "--shas-to-skip=", + "--keep-n-most-recent=10", + "--tag-selection=untagged", + "--timestamp-to-use=created_at", + "--cut-off=1d", + "--dry-run=true", + ], + vec![ + "--account=foo", + "--token=ghp_sSIL4kMdtzfbfDdm1MC1OU2q5DbRqA3eSszT", + "--image-names=\"foo, bar\"", + "--image-tags=\"one, two\"", + "--shas-to-skip=''", + "--keep-n-most-recent=999", + "--tag-selection=both", + "--timestamp-to-use=updated_at", + "--cut-off=1h", + "--dry-run=true", + ], + vec![ + "--account=$;\u{b}\n₭↭", + "--token=ghp_sSIL4kMdtzfbfDdm1MC1OU2q5DbRqA3eSszT", + "--image-names=\"foo, bar\"", + "--image-tags=\"one, two\"", + "--shas-to-skip=''", + "--keep-n-most-recent=2", + "--tag-selection=both", + "--timestamp-to-use=updated_at", + "--cut-off=1h", + "--dry-run=true", + ], + ]; + + for args in args_permutations { + let mut cmd = Command::cargo_bin("container-retention-policy").expect("Failed to load binary"); + + cmd.env("CRP_TEST", "true").args(args).assert().success(); + } + } +} diff --git a/src/cli/mod.rs b/src/cli/mod.rs new file mode 100644 index 0000000..54ef5d5 --- /dev/null +++ b/src/cli/mod.rs @@ -0,0 +1,2 @@ +pub mod args; +pub mod models; diff --git a/src/cli/models.rs b/src/cli/models.rs new file mode 100644 index 0000000..f2a0f01 --- /dev/null +++ b/src/cli/models.rs @@ -0,0 +1,108 @@ +use clap::ValueEnum; +use regex::Regex; +use secrecy::{ExposeSecret, Secret}; +use tracing::debug; + +#[derive(Debug, Clone, ValueEnum, PartialEq)] +#[clap(rename_all = "snake-case")] +pub enum Timestamp { + UpdatedAt, + CreatedAt, +} + +#[derive(Debug, Clone, ValueEnum, PartialEq)] +pub enum TagSelection { + Tagged, + Untagged, + Both, +} + +/// Represents the different tokens the action can use to authenticate towards the GitHub API. +/// +/// See +/// for a list of existing token types. +#[derive(Debug, Clone)] +pub enum Token { + ClassicPersonalAccess(Secret), + Oauth(Secret), + Temporal(Secret), +} + +impl PartialEq for Token { + fn eq(&self, other: &Self) -> bool { + match self { + Self::Temporal(a) => { + if let Self::Temporal(b) = other { + a.expose_secret() == b.expose_secret() + } else { + false + } + } + Self::ClassicPersonalAccess(a) => { + if let Self::ClassicPersonalAccess(b) = other { + a.expose_secret() == b.expose_secret() + } else { + false + } + } + Self::Oauth(a) => { + if let Self::Oauth(b) = other { + a.expose_secret() == b.expose_secret() + } else { + false + } + } + } + } +} + +impl Token { + pub fn try_from_str(value: &str) -> Result { + let trimmed_value = value.trim_matches('"'); // Remove surrounding quotes + let secret = Secret::new(trimmed_value.to_string()); + + // Classic PAT + if Regex::new(r"ghp_[a-zA-Z0-9]{36}$").unwrap().is_match(trimmed_value) { + debug!("Recognized tokens as personal access token"); + return Ok(Self::ClassicPersonalAccess(secret)); + }; + + // Temporal token - i.e., $GITHUB_TOKEN + if Regex::new(r"ghs_[a-zA-Z0-9]{36}$").unwrap().is_match(trimmed_value) { + debug!("Recognized tokens as temporal token"); + return Ok(Self::Temporal(secret)); + }; + + // GitHub oauth token + // TODO: Verify whether a Github app token is an oauth token or not. + if Regex::new(r"gho_[a-zA-Z0-9]{36}$").unwrap().is_match(trimmed_value) { + debug!("Recognized tokens as oauth token"); + return Ok(Self::Oauth(secret)); + }; + Err( + "The `token` value is not valid. Must be $GITHUB_TOKEN, a classic personal access token (prefixed by 'ghp') or oauth token (prefixed by 'gho').".to_string() + ) + } +} + +#[derive(Debug, Clone, PartialEq)] +pub enum Account { + Organization(String), + User, +} + +impl Account { + pub fn try_from_str(value: &str) -> Result { + let value = value.trim(); + if value == "user" { + Ok(Self::User) + } else if value.is_empty() { + return Err( + "`account` must be set to 'user' for personal accounts, or to the name of your organization" + .to_string(), + ); + } else { + Ok(Self::Organization(value.to_string())) + } + } +} diff --git a/src/client/builder.rs b/src/client/builder.rs new file mode 100644 index 0000000..7f59972 --- /dev/null +++ b/src/client/builder.rs @@ -0,0 +1,245 @@ +use std::sync::Arc; +use std::time::Duration; + +use color_eyre::eyre::Result; +use reqwest::header::HeaderMap; +use reqwest::Client; +use secrecy::ExposeSecret; +use tokio::sync::Mutex; +use tower::limit::{ConcurrencyLimit, RateLimit}; +use tower::ServiceBuilder; +use tracing::debug; + +use crate::cli::models::{Account, Token}; +use crate::client::client::PackagesClient; +use crate::client::urls::Urls; + +pub type RateLimitedService = Arc>>>; + +#[derive(Debug)] +pub struct PackagesClientBuilder { + pub headers: Option, + pub urls: Option, + pub token: Option, + pub fetch_package_service: Option, + pub list_packages_service: Option, + pub list_package_versions_service: Option, + pub delete_package_versions_service: Option, +} + +impl PackagesClientBuilder { + #[must_use] + pub fn new() -> Self { + Self { + headers: None, + urls: None, + fetch_package_service: None, + list_packages_service: None, + list_package_versions_service: None, + delete_package_versions_service: None, + token: None, + } + } + + /// Add default HTTP headers for the client to use in all requests. + pub fn set_http_headers(mut self, token: Token) -> Result { + debug!("Constructing HTTP headers"); + let auth_header_value = format!( + "Bearer {}", + match &token { + Token::Temporal(token) | Token::Oauth(token) | Token::ClassicPersonalAccess(token) => + token.expose_secret(), + } + ); + let mut headers = HeaderMap::new(); + headers.insert("Authorization", auth_header_value.as_str().parse()?); + headers.insert("X-GitHub-Api-Version", "2022-11-28".parse()?); + headers.insert("Accept", "application/vnd.github+json".parse()?); + headers.insert("User-Agent", "snok/container-retention-policy".parse()?); + self.headers = Some(headers); + self.token = Some(token); + Ok(self) + } + + /// Attach a urls utility struct. + pub fn generate_urls(mut self, account: &Account) -> Self { + debug!("Constructing base urls"); + self.urls = Some(Urls::from_account(account)); + self + } + + /// Creates services which respect some of the secondary rate limits + /// enforced by the GitHub API. + /// + /// Read more about secondary rate limits here: + /// + /// + /// The first limit we handle is the max 100 concurrent requests one. Since we don't send + /// requests to multiple endpoints at the same time, we don't have to maintain a global + /// semaphore for all the clients to respect. All requests to the list-packages endpoints + /// will resolve before we try to list any package versions. + /// + /// The second limit we handle is that there should be no more than 900 points per endpoint, + /// per minute, for REST endpoints (which is what we use). At the time of writing, reads are + /// counted as 1 point, while mutating requests (PUT, PATCH, POST, DELETE) count as 5. + /// + /// We *don't* yet handle the "No more than 90 seconds of CPU time per 60 seconds of real + /// time is allowed" rate limit, though we could probably capture response times to do this. + /// + /// We also don't (and won't) handle the "Create too much content on GitHub in a short + /// amount of time" rate limit, since we don't create any content. + pub fn create_rate_limited_services(mut self) -> Self { + const MAX_CONCURRENCY: usize = 100; + const MAX_POINTS_PER_ENDPOINT_PER_MINUTE: u64 = 900; + const GET_REQUEST_POINTS: u64 = 1; + const DELETE_REQUEST_POINTS: u64 = 5; + const ONE_MINUTE: Duration = Duration::from_secs(60); + + debug!("Creating rate-limited services"); + + self.fetch_package_service = Some(Arc::new(Mutex::new( + ServiceBuilder::new() + .concurrency_limit(MAX_CONCURRENCY) + .rate_limit(MAX_POINTS_PER_ENDPOINT_PER_MINUTE / GET_REQUEST_POINTS, ONE_MINUTE) + .service(Client::new()), + ))); + + self.list_packages_service = Some(Arc::new(Mutex::new( + ServiceBuilder::new() + .concurrency_limit(MAX_CONCURRENCY) + .rate_limit(MAX_POINTS_PER_ENDPOINT_PER_MINUTE / GET_REQUEST_POINTS, ONE_MINUTE) + .service(Client::new()), + ))); + + self.list_package_versions_service = Some(Arc::new(Mutex::new( + ServiceBuilder::new() + .concurrency_limit(MAX_CONCURRENCY) + .rate_limit(MAX_POINTS_PER_ENDPOINT_PER_MINUTE / GET_REQUEST_POINTS, ONE_MINUTE) + .service(Client::new()), + ))); + + self.delete_package_versions_service = Some(Arc::new(Mutex::new( + ServiceBuilder::new() + .concurrency_limit(MAX_CONCURRENCY) + .rate_limit(MAX_POINTS_PER_ENDPOINT_PER_MINUTE / DELETE_REQUEST_POINTS, ONE_MINUTE) + .service(Client::new()), + ))); + + self + } + + pub fn build(self) -> Result> { + // Check if all required fields are set + if self.headers.is_none() + || self.urls.is_none() + || self.list_packages_service.is_none() + || self.list_package_versions_service.is_none() + || self.delete_package_versions_service.is_none() + || self.token.is_none() + { + return Err("All required fields are not set".into()); + } + + // Create PackageVersionsClient instance + let client = PackagesClient { + headers: self.headers.unwrap(), + urls: self.urls.unwrap(), + fetch_package_service: self.fetch_package_service.unwrap(), + list_packages_service: self.list_packages_service.unwrap(), + list_package_versions_service: self.list_package_versions_service.unwrap(), + delete_package_versions_service: self.delete_package_versions_service.unwrap(), + token: self.token.unwrap(), + }; + + Ok(client) + } +} + +#[cfg(test)] +mod tests { + use super::*; + use secrecy::Secret; + + #[test] + fn test_builder_init() { + let builder = PackagesClientBuilder::new(); + assert!(builder.headers.is_none()); + assert!(builder.token.is_none()); + assert!(builder.urls.is_none()); + assert!(builder.fetch_package_service.is_none()); + assert!(builder.list_package_versions_service.is_none()); + assert!(builder.delete_package_versions_service.is_none()); + assert!(builder.list_packages_service.is_none()); + } + + #[test] + fn test_builder_set_http_headers() { + let builder = PackagesClientBuilder::new(); + let builder = builder + .set_http_headers(Token::Temporal(Secret::new("test".to_string()))) + .unwrap(); + assert!(builder.headers.is_some()); + assert!(builder.token.is_some()); + if let Token::Temporal(inner) = builder.token.unwrap() { + assert_eq!(inner.expose_secret(), "test"); + } else { + panic!("this is unexpected") + } + // Remaining attrs should still be none + assert!(builder.urls.is_none()); + assert!(builder.fetch_package_service.is_none()); + assert!(builder.list_package_versions_service.is_none()); + assert!(builder.delete_package_versions_service.is_none()); + assert!(builder.list_packages_service.is_none()); + } + + #[test] + fn test_builder_generate_urls() { + for account in [&Account::User, &Account::Organization("test".to_string())] { + let builder = PackagesClientBuilder::new().generate_urls(account); + assert!(builder.urls.is_some()); + // Remaining attrs should still be none + assert!(builder.headers.is_none()); + assert!(builder.token.is_none()); + assert!(builder.fetch_package_service.is_none()); + assert!(builder.list_package_versions_service.is_none()); + assert!(builder.delete_package_versions_service.is_none()); + assert!(builder.list_packages_service.is_none()); + } + } + + #[tokio::test] + async fn test_builder_create_rate_limited_services() { + let builder = PackagesClientBuilder::new().create_rate_limited_services(); + assert!(builder.fetch_package_service.is_some()); + assert!(builder.list_package_versions_service.is_some()); + assert!(builder.delete_package_versions_service.is_some()); + assert!(builder.list_packages_service.is_some()); + // Remaining attrs should still be none + assert!(builder.urls.is_none()); + assert!(builder.headers.is_none()); + assert!(builder.token.is_none()); + } + + #[tokio::test] + async fn test_builder_build_naked() { + assert!(PackagesClientBuilder::new().build().is_err()); + assert!(PackagesClientBuilder::new() + .generate_urls(&Account::User) + .build() + .is_err()); + assert!(PackagesClientBuilder::new() + .generate_urls(&Account::User) + .set_http_headers(Token::Temporal(Secret::new("test".to_string()))) + .unwrap() + .build() + .is_err()); + assert!(PackagesClientBuilder::new() + .generate_urls(&Account::User) + .set_http_headers(Token::Temporal(Secret::new("test".to_string()))) + .unwrap() + .create_rate_limited_services() + .build() + .is_ok()); + } +} diff --git a/src/client/client.rs b/src/client/client.rs new file mode 100644 index 0000000..004a872 --- /dev/null +++ b/src/client/client.rs @@ -0,0 +1,662 @@ +use std::process::exit; +use std::sync::Arc; +use std::time::Duration; + +use chrono::{DateTime, Utc}; +use color_eyre::eyre::{eyre, Result}; +use reqwest::header::HeaderMap; +use reqwest::{Client, Method, Request, StatusCode}; +use tokio::time::sleep; +use tower::{Service, ServiceExt}; +use tracing::{debug, error, info, Span}; +use tracing_indicatif::span_ext::IndicatifSpanExt; +use url::Url; + +use crate::cli::models::Token; +use crate::client::builder::RateLimitedService; +use crate::client::headers::GithubHeaders; +use crate::client::models::{Package, PackageVersion}; +use crate::client::urls::Urls; +use crate::{Counts, PackageVersions}; + +#[derive(Debug)] +pub struct PackagesClient { + pub headers: HeaderMap, + pub urls: Urls, + pub fetch_package_service: RateLimitedService, + pub list_packages_service: RateLimitedService, + pub list_package_versions_service: RateLimitedService, + pub delete_package_versions_service: RateLimitedService, + pub token: Token, +} + +impl PackagesClient { + pub async fn fetch_packages( + &mut self, + token: &Token, + image_names: &Vec, + counts: Arc, + ) -> Vec { + if let Token::Temporal(_) = *token { + // If a repo is assigned the admin role under Package Settings > Manage Actions Access, + // then it can fetch a package's versions directly by name, and delete them. It cannot, + // however, list packages, so for this token type we are limited to fetching packages + // individually, by name + for image_name in image_names { + assert!(!(image_name.contains('!') || image_name.contains('*')), "Restrictions in the Github API prevent us from listing packages when using a $GITHUB_TOKEN token. Because of this, filtering with '!' and '*' are not supported for this token type. Image name {image_name} is therefore not valid."); + } + self.fetch_individual_packages(image_names, counts) + .await + .expect("Failed to fetch packages") + } else { + self.list_packages(self.urls.list_packages_url.clone(), counts) + .await + .expect("Failed to fetch packages") + } + } + + async fn fetch_packages_with_pagination( + url: Url, + service: RateLimitedService, + headers: HeaderMap, + counts: Arc, + ) -> Result> { + let mut result = Vec::new(); + let mut next_url = Some(url); + + while let Some(current_url) = next_url { + debug!("Fetching data from {}", current_url); + + // Construct these early, so we do as little work, holding a lock, as possible + let mut request = Request::new(Method::GET, current_url); + *request.headers_mut() = headers.clone(); + + // Get a lock on the rate limited tower service + // This has mechanisms for keeping us honest wrt. primary and secondary rate limits + let mut handle = service.lock().await; + + if (*counts.package_versions.read().await) > (*counts.remaining_requests.read().await) { + error!("Returning without fetching all packages, since the remaining requests are less or equal to the number of package versions already selected"); + return Ok(result); + } + + // Wait for a green light from the service. This can wait upwards of a minute + // if we've just exceeded the per-minute max requests + let r = handle.ready().await; + + // Handle possible error case + let response = match r { + Ok(t) => { + // Initiate the request and drop the handle before awaiting the result + // If we don't drop the handle, our request flow becomes synchronous + let fut = t.call(request); + drop(handle); + match fut.await { + Ok(t) => t, + Err(e) => return Err(eyre!("Request failed: {}", e)), + } + } + Err(e) => { + return Err(eyre!("Service failed to become ready: {}", e)); + } + }; + + let response_headers = GithubHeaders::try_from(response.headers())?; + + // Get the string value of the response first, so we can return it in + // a possible error. This will happen if one of our response structs + // are misconfigured, and is pretty helpful + let raw_json = response.text().await?; + + let mut items: Vec = match serde_json::from_str(&raw_json) { + Ok(t) => t, + Err(e) => { + return Err(eyre!( + "Failed to deserialize paginated response: {raw_json}. The error was {e}." + )); + } + }; + + result.append(&mut items); + + next_url = if response_headers.x_ratelimit_remaining > 1 { + response_headers.next_link() + } else { + None + }; + } + Ok(result) + } + + async fn fetch_package_versions_with_pagination( + url: Url, + service: RateLimitedService, + headers: HeaderMap, + counts: Arc, + filter_fn: F, + rate_limit_offset: usize, + ) -> Result + where + F: Fn(Vec) -> Result, + { + let mut result = PackageVersions::new(); + let mut next_url = Some(url); + + while let Some(current_url) = next_url { + if (*counts.package_versions.read().await) > (*counts.remaining_requests.read().await) + rate_limit_offset { + info!("Returning without fetching all package versions, since the remaining requests are less or equal to the number of package versions already selected"); + return Ok(result); + } + + debug!("Fetching data from {}", current_url); + + // Construct these early, so we do as little work, holding a lock, as possible + let mut request = Request::new(Method::GET, current_url); + *request.headers_mut() = headers.clone(); + + // Get a lock on the rate limited tower service + // This has mechanisms for keeping us honest wrt. primary and secondary rate limits + let mut handle = service.lock().await; + + // Wait for a green light from the service. This can wait upwards of a minute + // if we've just exceeded the per-minute max requests + let r = handle.ready().await; + + // Handle possible error case + let response = match r { + Ok(t) => { + // Initiate the request and drop the handle before awaiting the result + // If we don't drop the handle, our request flow becomes synchronous + let fut = t.call(request); + drop(handle); + match fut.await { + Ok(t) => t, + Err(e) => return Err(eyre!("Request failed: {}", e)), + } + } + Err(e) => { + return Err(eyre!("Service failed to become ready: {}", e)); + } + }; + + let response_headers = GithubHeaders::try_from(response.headers())?; + + // Get the string value of the response first, so we can return it in + // a possible error. This will happen if one of our response structs + // are misconfigured, and is pretty helpful + let raw_json = response.text().await?; + + let items: Vec = match serde_json::from_str(&raw_json) { + Ok(t) => t, + Err(e) => { + return Err(eyre!( + "Failed to deserialize paginated response: {raw_json}. The error was {e}." + )); + } + }; + + let package_versions = filter_fn(items.clone())?; + + debug!( + "Filtered out {}/{} package versions", + items.len() - package_versions.len(), + items.len() + ); + + // Decrement the rate limiter count + *counts.remaining_requests.write().await -= 1; + *counts.package_versions.write().await += package_versions.len(); + + result.extend(package_versions); + + next_url = if response_headers.x_ratelimit_remaining > 1 { + response_headers.next_link() + } else { + None + }; + + Span::current().pb_set_message(&format!( + "fetched \x1b[33m{}\x1b[0m package versions (\x1b[33m{}\x1b[0m requests remaining in the rate limit)", + result.len(), + *counts.remaining_requests.read().await + )); + } + Ok(result) + } + + async fn list_packages(&mut self, url: Url, counts: Arc) -> Result> { + Self::fetch_packages_with_pagination(url, self.list_packages_service.clone(), self.headers.clone(), counts) + .await + } + + pub async fn list_package_versions( + &self, + package_name: String, + counts: Arc, + filter_fn: F, + rate_limit_offset: usize, + ) -> Result<(String, PackageVersions)> + where + F: Fn(Vec) -> Result, + { + let url = self.urls.list_package_versions_url(&package_name)?; + let package_versions = Self::fetch_package_versions_with_pagination( + url, + self.list_package_versions_service.clone(), + self.headers.clone(), + counts, + filter_fn, + rate_limit_offset, + ) + .await?; + info!( + package_name = package_name, + "Selected {} package versions", + package_versions.len() + ); + Ok((package_name, package_versions)) + } + + async fn fetch_individual_package(&self, url: Url, counts: Arc) -> Result { + debug!("Fetching package from {url}"); + + let mut request = Request::new(Method::GET, url); + *request.headers_mut() = self.headers.clone(); + + // Get a lock on the tower service which regulates our traffic + let mut handle = self.fetch_package_service.lock().await; + + let response = { + // Wait until the service says we're OK to proceed + let r = handle.ready().await; + + match r { + Ok(t) => { + // Initiate the request and drop the handle before awaiting the result + // If we don't drop the handle, our request flow becomes synchronous + let fut = t.call(request); + drop(handle); + match fut.await { + Ok(t) => t, + Err(e) => return Err(eyre!("Request failed: {}", e)), + } + } + Err(e) => { + return Err(eyre!("Service failed to become ready: {}", e)); + } + } + }; + *counts.remaining_requests.write().await -= 1; + + GithubHeaders::try_from(response.headers())?; + + let raw_json = response.text().await?; + Ok(serde_json::from_str(&raw_json)?) + } + + async fn fetch_individual_packages(&self, package_names: &[String], counts: Arc) -> Result> { + let mut futures = Vec::new(); + + for package_name in package_names { + let url = self.urls.fetch_package_url(package_name)?; + let fut = self.fetch_individual_package(url, counts.clone()); + futures.push(fut); + } + + let mut packages = Vec::new(); + + for fut in futures { + match fut.await { + Ok(package) => { + packages.push(package); + } + Err(e) => return Err(e), + } + } + + Ok(packages) + } + + /// Delete a package version. + /// Docs for organizations: + /// Docs for users: + pub async fn delete_package_version( + &self, + package_name: String, + package_version: PackageVersion, + dry_run: bool, + ) -> std::result::Result, Vec> { + // Create a vec of all the permutations of package tags stored in this package version + // The vec will look something like ["foo:latest", "foo:production", "foo:2024-10-10T08:00:00"] given + // it had these three tags, and ["foo:untagged"] if it had no tags. This isn't really how things + // work, but is what users will expect to see output. + let names = if package_version.metadata.container.tags.is_empty() { + vec![format!("\x1b[34m{package_name}\x1b[0m:\x1b[33m\x1b[0m")] + } else { + package_version + .metadata + .container + .tags + .iter() + .map(|tag| format!("\x1b[34m{package_name}\x1b[0m:\x1b[32m{tag}\x1b[0m")) + .collect() + }; + + // Output information to the user + if dry_run { + // Sleep a few ms to make logs appear "in order" + // These dry-run logs tend to appear before rate limiting warnings, + // and other logs if they're output right away. + sleep(Duration::from_millis(10)).await; + for name in &names { + info!( + package_version = package_version.id, + "dry-run: Would have deleted {name}" + ); + } + return Ok(Vec::new()); + } + + // Construct URL for this package version + let url = match self.urls.delete_package_version_url(&package_name, &package_version.id) { + Ok(t) => t, + Err(e) => { + error!( + "Failed to create deletion URL for package {} and version {}: {}", + package_name, package_version.id, e + ); + return Err(names); + } + }; + + // Construct initial request + let mut request = Request::new(Method::DELETE, url); + *request.headers_mut() = self.headers.clone(); + + // Get a lock on the tower service which regulates our traffic + let mut handle = self.delete_package_versions_service.lock().await; + + let response = { + // Wait until the service says we're OK to proceed + let r = handle.ready().await; + + match r { + Ok(t) => { + // Initiate the request and drop the handle before awaiting the result + // If we don't drop the handle, our request flow becomes synchronous + let fut = t.call(request); + drop(handle); + match fut.await { + Ok(t) => t, + Err(e) => { + error!( + "Failed to delete package version {} with error: {}", + package_version.id, e + ); + return Err(names); + } + } + } + Err(e) => { + error!("Service failed to become ready: {}", e); + return Err(names); + } + } + }; + + match response.status() { + StatusCode::NO_CONTENT => { + for name in &names { + info!(package_version_id = package_version.id, "Deleted {name}"); + } + Ok(names) + } + StatusCode::UNPROCESSABLE_ENTITY | StatusCode::BAD_REQUEST => { + error!( + "Failed to delete package version {}: {}", + package_version.id, + response.text().await.unwrap() + ); + Err(names) + } + _ => { + error!( + "Failed to delete package version {} with status {}: {}", + package_version.id, + response.status(), + response.text().await.expect("Failed to read text from response") + ); + Err(names) + } + } + } + + pub async fn fetch_rate_limit(&self) -> Result<(usize, DateTime)> { + debug!("Retrieving Github API rate limit"); + + // Construct initial request + let response = Client::new() + .get("https://api.github.com/rate_limit") + .headers(self.headers.clone()) + .send() + .await?; + + // Since this is the first call made to the GitHub API, we perform a few extra auth checks here: + + // auth check: Make sure we're authorized correctly + if response.status() == StatusCode::UNAUTHORIZED { + eprintln!("Received a 401 response from the GitHub API. Make sure the token is valid, and that it has the correct permissions."); + exit(1); + } + + let response_headers = GithubHeaders::try_from(response.headers())?; + + // auth check: Make sure we have the correct scopes + match self.token { + Token::Temporal(_) => (), + Token::Oauth(_) | Token::ClassicPersonalAccess(_) => { + if response_headers.x_oauth_scopes.is_none() + || !response_headers + .x_oauth_scopes + .clone() + .unwrap() + .contains("write:packages") + { + /// Check that the headers of a GitHub request indicate that the token used has the correct scopes for deleting packages. + /// See documentation at: https://docs.github.com/en/rest/packages/packages?apiVersion=2022-11-28#delete-a-package-for-an-organization + eprintln!("The token does not have the scopes needed. Tokens need `write:packages`. The scopes found were {}.", response_headers.x_oauth_scopes.unwrap_or("none".to_string())); + exit(1); + } + } + } + + debug!( + "There are {} requests remaining in the rate limit", + response_headers.x_ratelimit_remaining + ); + + Ok(( + response_headers.x_ratelimit_remaining, + response_headers.x_ratelimit_reset, + )) + } +} + +#[cfg(test)] +mod tests { + use crate::cli::models::Account; + use crate::client::builder::PackagesClientBuilder; + use reqwest::header::HeaderValue; + use secrecy::Secret; + + use super::*; + + #[test] + fn github_headers() { + let mut headers = HeaderMap::new(); + headers.insert("x-ratelimit-limit", "60".parse().unwrap()); + headers.insert("x-ratelimit-remaining", "60".parse().unwrap()); + headers.insert("x-ratelimit-reset", "1714483761".parse().unwrap()); + headers.insert("x-ratelimit-used", "0".parse().unwrap()); + headers.insert("x-oauth-scopes", "read:packages,delete:packages,repo".parse().unwrap()); + + let parsed_headers = GithubHeaders::try_from(&headers).unwrap(); + + assert_eq!(parsed_headers.x_ratelimit_reset.timezone(), Utc); + assert_eq!(parsed_headers.x_ratelimit_remaining, 60); + assert!(parsed_headers.x_oauth_scopes.is_some()); + } + + #[test] + fn link_header() { + let link_headers = [ + ( + "; rel=\"next\", ; rel=\"last\"", + Some(Url::parse("https://api.github.com/user/packages?package_type=container&per_page=2&page=2").unwrap()) + ), + ( + "; rel=\"next\", ; rel=\"last\"", + Some(Url::parse("https://api.github.com/user/packages?package_type=container&per_page=2&page=3").unwrap()) + ), + ( + "<; rel=\"last\"", + None + ), + ]; + + for (input, expected) in link_headers { + let parsed_links = GithubHeaders::parse_link_header(input); + assert_eq!(parsed_links, expected) + } + } + + #[tokio::test] + async fn test_http_headers() { + let test_string = "test".to_string(); + + let client_builder = PackagesClientBuilder::new() + .set_http_headers(Token::ClassicPersonalAccess(Secret::new(test_string.clone()))) + .unwrap(); + + let set_headers = client_builder.headers.clone().unwrap(); + + for (header_key, header_value) in [ + ("x-github-api-version", "2022-11-28"), + ("authorization", &format!("Bearer {test_string}")), + ("user-agent", "snok/container-retention-policy"), + ("accept", "application/vnd.github+json"), + ] { + assert_eq!( + set_headers.get(header_key), + Some(&HeaderValue::from_str(header_value).unwrap()) + ); + } + + let client = client_builder + .create_rate_limited_services() + .generate_urls(&Account::User) + .build() + .unwrap(); + + for (header_key, header_value) in [ + ("x-github-api-version", "2022-11-28"), + ("authorization", &format!("Bearer {test_string}")), + ("user-agent", "snok/container-retention-policy"), + ("accept", "application/vnd.github+json"), + ] { + assert_eq!( + client.headers.get(header_key), + Some(&HeaderValue::from_str(header_value).unwrap()) + ); + } + } + + #[test] + fn personal_urls() { + let urls = Urls::from_account(&Account::User); + assert_eq!( + urls.list_packages_url.as_str(), + "https://api.github.com/user/packages?package_type=container&per_page=100" + ); + assert_eq!( + urls.list_package_versions_url("foo").unwrap().as_str(), + "https://api.github.com/user/packages/container/foo/versions?per_page=100" + ); + assert_eq!( + urls.delete_package_version_url("foo", &123).unwrap().as_str(), + "https://api.github.com/user/packages/container/foo/versions/123" + ); + assert_eq!( + urls.package_version_url("foo", &123).unwrap().as_str(), + "https://github.com/user/packages/container/foo/123" + ); + } + + #[test] + fn organization_urls() { + let urls = Urls::from_account(&Account::Organization("acme".to_string())); + assert_eq!( + urls.list_packages_url.as_str(), + "https://api.github.com/orgs/acme/packages?package_type=container&per_page=100" + ); + assert_eq!( + urls.list_package_versions_url("foo").unwrap().as_str(), + "https://api.github.com/orgs/acme/packages/container/foo/versions?per_page=100" + ); + assert_eq!( + urls.delete_package_version_url("foo", &123).unwrap().as_str(), + "https://api.github.com/orgs/acme/packages/container/foo/versions/123" + ); + assert_eq!( + urls.package_version_url("foo", &123).unwrap().as_str(), + "https://github.com/orgs/acme/packages/container/foo/123" + ); + } + + #[test] + fn test_percent_encoding() { + // No special chars + assert_eq!(Urls::percent_encode("example"), "example"); + + // Special chars + assert_eq!(Urls::percent_encode("a/b"), "a%2Fb".to_string()); + assert_eq!(Urls::percent_encode("my_package@1.0"), "my_package%401.0"); + + // Simple space + assert_eq!(Urls::percent_encode("test test"), "test%20test"); + + // Other unicode chars + assert_eq!( + Urls::percent_encode("こんにちは"), + "%E3%81%93%E3%82%93%E3%81%AB%E3%81%A1%E3%81%AF" + ); + } + #[test] + fn test_generate_urls() { + let urls = { + let mut builder = PackagesClientBuilder::new(); + assert!(builder.urls.is_none()); + builder = builder.generate_urls(&Account::User); + builder.urls.unwrap() + }; + assert!(urls.list_packages_url.as_str().contains("per_page=100")); + assert!(urls.list_packages_url.as_str().contains("package_type=container")); + assert!(urls.list_packages_url.as_str().contains("api.github.com")); + assert!(urls.packages_api_base.as_str().contains("api.github.com")); + assert!(urls.packages_frontend_base.as_str().contains("https://github.com")); + + let urls = { + let mut builder = PackagesClientBuilder::new(); + assert!(builder.urls.is_none()); + builder = builder.generate_urls(&Account::Organization("foo".to_string())); + builder.urls.unwrap() + }; + assert!(urls.list_packages_url.as_str().contains("per_page=100")); + assert!(urls.list_packages_url.as_str().contains("package_type=container")); + assert!(urls.list_packages_url.as_str().contains("api.github.com")); + assert!(urls.packages_api_base.as_str().contains("api.github.com")); + assert!(urls.list_packages_url.as_str().contains("/foo/")); + assert!(urls.packages_api_base.as_str().contains("/foo/")); + assert!(urls.packages_frontend_base.as_str().contains("https://github.com")); + } +} diff --git a/src/client/headers.rs b/src/client/headers.rs new file mode 100644 index 0000000..b271c28 --- /dev/null +++ b/src/client/headers.rs @@ -0,0 +1,87 @@ +use crate::cli::models::Token; +use chrono::{DateTime, Utc}; +use color_eyre::Result; +use reqwest::header::HeaderMap; +use std::str::FromStr; +use tracing::debug; +use url::Url; + +#[derive(Debug)] +pub struct GithubHeaders { + pub x_ratelimit_remaining: usize, + pub x_ratelimit_reset: DateTime, + pub x_oauth_scopes: Option, + pub link: Option, +} + +impl GithubHeaders { + pub fn try_from(value: &HeaderMap) -> Result { + let mut x_rate_limit_remaining = None; + let mut x_rate_limit_reset = None; + let mut x_oauth_scopes = None; + let mut link = None; + + for (k, v) in value { + match k.as_str() { + "x-ratelimit-remaining" => { + x_rate_limit_remaining = Some(usize::from_str(v.to_str().unwrap()).unwrap()); + } + "x-ratelimit-reset" => { + x_rate_limit_reset = + Some(DateTime::from_timestamp(i64::from_str(v.to_str().unwrap()).unwrap(), 0).unwrap()); + } + "x-oauth-scopes" => x_oauth_scopes = Some(v.to_str().unwrap().to_string()), + "link" => link = Some(v.to_str().unwrap().to_string()), + _ => (), + } + } + + let headers = Self { + link, + // It seems that these are none for temporal token requests, so + // we set temporal token value defaults. + x_ratelimit_remaining: x_rate_limit_remaining.unwrap_or(1000), + x_ratelimit_reset: x_rate_limit_reset.unwrap_or(Utc::now()), + x_oauth_scopes, + }; + + Ok(headers) + } + + pub fn parse_link_header(link_header: &str) -> Option { + if link_header.is_empty() { + return None; + } + for part in link_header.split(',') { + if part.contains("prev") { + debug!("Skipping parsing of prev link: {part}"); + continue; + } else if part.contains("first") { + debug!("Skipping parsing of first link: {part}"); + continue; + } else if part.contains("last") { + debug!("Skipping parsing of last link: {part}"); + continue; + } else if part.contains("next") { + debug!("Parsing next link: {part}"); + } else { + panic!("Found unrecognized rel type: {part}") + } + let sections: Vec<&str> = part.trim().split(';').collect(); + assert_eq!(sections.len(), 2, "Sections length was {}", sections.len()); + + let url = sections[0].trim().trim_matches('<').trim_matches('>').to_string(); + + return Some(Url::parse(&url).expect("Failed to parse link header URL")); + } + None + } + + pub(crate) fn next_link(&self) -> Option { + if let Some(l) = &self.link { + GithubHeaders::parse_link_header(l) + } else { + None + } + } +} diff --git a/src/client/mod.rs b/src/client/mod.rs new file mode 100644 index 0000000..5d58306 --- /dev/null +++ b/src/client/mod.rs @@ -0,0 +1,5 @@ +pub mod builder; +pub mod client; +pub mod headers; +pub mod models; +pub mod urls; diff --git a/src/client/models.rs b/src/client/models.rs new file mode 100644 index 0000000..1341484 --- /dev/null +++ b/src/client/models.rs @@ -0,0 +1,39 @@ +use crate::cli::models::Timestamp; +use chrono::{DateTime, Utc}; +use serde::Deserialize; + +#[derive(Debug, Clone, Deserialize, PartialEq)] +pub struct ContainerMetadata { + pub tags: Vec, +} + +#[derive(Debug, Clone, Deserialize, PartialEq)] +pub struct Metadata { + pub container: ContainerMetadata, +} + +#[derive(Debug, Clone, Deserialize, PartialEq)] +pub struct PackageVersion { + pub id: u32, + pub name: String, + pub metadata: Metadata, + pub created_at: DateTime, + pub updated_at: Option>, +} + +impl PackageVersion { + pub fn get_relevant_timestamp(&self, timestamp: &Timestamp) -> DateTime { + match *timestamp { + Timestamp::CreatedAt => self.created_at, + Timestamp::UpdatedAt => self.updated_at.unwrap_or(self.created_at), + } + } +} + +#[derive(Debug, Clone, Deserialize)] +pub struct Package { + pub id: u32, + pub name: String, + pub created_at: DateTime, + pub updated_at: Option>, +} diff --git a/src/client/urls.rs b/src/client/urls.rs new file mode 100644 index 0000000..5e02024 --- /dev/null +++ b/src/client/urls.rs @@ -0,0 +1,77 @@ +use crate::cli::models::Account; +use color_eyre::Result; +use url::Url; + +#[derive(Debug)] +pub struct Urls { + pub packages_frontend_base: Url, + pub packages_api_base: Url, + pub list_packages_url: Url, +} + +impl Urls { + pub fn from_account(account: &Account) -> Self { + let mut github_base_url = String::from("https://github.com"); + let mut api_base_url = String::from("https://api.github.com"); + + match account { + Account::User => { + api_base_url += "/user/packages"; + github_base_url += "/user/packages"; + } + Account::Organization(org_name) => { + api_base_url += &format!("/orgs/{org_name}/packages"); + github_base_url += &format!("/orgs/{org_name}/packages"); + } + }; + + let list_packages_url = + Url::parse(&(api_base_url.clone() + "?package_type=container&per_page=100")).expect("Failed to parse URL"); + + api_base_url += "/container"; + github_base_url += "/container"; + + Self { + list_packages_url, + packages_api_base: Url::parse(&api_base_url).expect("Failed to parse URL"), + packages_frontend_base: Url::parse(&github_base_url).expect("Failed to parse URL"), + } + } + + pub fn list_package_versions_url(&self, package_name: &str) -> Result { + let encoded_package_name = Self::percent_encode(package_name); + Ok(Url::parse( + &(self.packages_api_base.to_string() + &format!("/{encoded_package_name}/versions?per_page=100")), + )?) + } + + pub fn delete_package_version_url(&self, package_name: &str, package_version_name: &u32) -> Result { + let encoded_package_name = Self::percent_encode(package_name); + let encoded_package_version_name = Self::percent_encode(&package_version_name.to_string()); + Ok(Url::parse( + &(self.packages_api_base.to_string() + + &format!("/{encoded_package_name}/versions/{encoded_package_version_name}")), + )?) + } + + pub fn package_version_url(&self, package_name: &str, package_id: &u32) -> Result { + let encoded_package_name = Self::percent_encode(package_name); + let encoded_package_version_name = Self::percent_encode(&package_id.to_string()); + Ok(Url::parse( + &(self.packages_frontend_base.to_string() + + &format!("/{encoded_package_name}/{encoded_package_version_name}")), + )?) + } + + pub fn fetch_package_url(&self, package_name: &str) -> Result { + let encoded_package_name = Self::percent_encode(package_name); + Ok(Url::parse( + &(self.packages_api_base.to_string() + &format!("/{encoded_package_name}")), + )?) + } + + /// Percent-encodes string, as is necessary for URLs containing images (version) names. + pub fn percent_encode(n: &str) -> String { + urlencoding::encode(n).to_string() + } +} diff --git a/src/core/delete_package_versions.rs b/src/core/delete_package_versions.rs new file mode 100644 index 0000000..633564b --- /dev/null +++ b/src/core/delete_package_versions.rs @@ -0,0 +1,95 @@ +use crate::client::client::PackagesClient; +use crate::{Counts, PackageVersions}; +use chrono::Utc; +use humantime::format_duration; +use std::collections::HashMap; +use std::sync::Arc; +use tokio::task::JoinSet; +use tracing::{debug, error, info, warn}; + +async fn select_package_versions_to_delete( + package_version_map: HashMap, + client: &'static PackagesClient, + counts: Arc, + dry_run: bool, +) -> JoinSet, Vec>> { + let initial_allocatable_requests = *counts.remaining_requests.read().await; + let mut allocatable_requests = initial_allocatable_requests; + let mut set = JoinSet::new(); + + // Make a first-pass of all packages, adding untagged package versions + package_version_map.iter().for_each(|(package_name, package_versions)| { + if allocatable_requests == 0 { + info!("Skipping package \"{}\"'s untagged package versions, since there are no more requests available in the rate limit", package_name); + return; + } + + let mut package_version_count = 0; + + for version in &package_versions.untagged { + if allocatable_requests > 0 { + set.spawn(client.delete_package_version(package_name.clone(), version.clone(), dry_run)); + package_version_count += 1; + allocatable_requests -= 1; + } else { + break; + } + } + debug!("Trimmed the selection to {} untagged package versions to delete for package \"{}\"", package_version_count, package_name); + }); + + let duration = (counts.rate_limit_reset - Utc::now()).to_std().unwrap(); + let formatted_duration = format_duration(duration); + if allocatable_requests == 0 { + warn!( + "There aren't enough requests remaining in the rate limit to delete all package versions. Prioritizing deleting the first {} untagged package versions. The rate limit resets in {} (at {}).", + set.len(), + formatted_duration, + counts.rate_limit_reset.to_string() + ); + } else { + // Do a second pass over the map to add tagged versions + package_version_map.iter().for_each(|(package_name, package_versions)| { + if allocatable_requests == 0 { + info!("Skipping package \"{}\"'s tagged package versions, since there are no more requests available in the rate limit", package_name); + return; + } + + let mut package_version_count = 0; + + for version in &package_versions.tagged { + if allocatable_requests > 0 { + set.spawn(client.delete_package_version(package_name.clone(), version.clone(), dry_run)); + package_version_count += 1; + allocatable_requests -= 1; + } else { + break; + } + } + debug!("Selected {} tagged package versions to delete for package \"{}\"", package_version_count, package_name); + }); + } + set +} + +pub async fn delete_package_versions( + package_version_map: HashMap, + client: &'static PackagesClient, + counts: Arc, + dry_run: bool, +) -> (Vec, Vec) { + let mut set = select_package_versions_to_delete(package_version_map, client, counts, dry_run).await; + + let mut deleted_packages = Vec::new(); + let mut failed_packages = Vec::new(); + + while let Some(result) = set.join_next().await { + match result { + Ok(Ok(names)) => deleted_packages.extend(names), + Ok(Err(names)) => failed_packages.extend(names), + Err(e) => error!("Failed to join task: {e}"), + } + } + + (deleted_packages, failed_packages) +} diff --git a/src/core/mod.rs b/src/core/mod.rs new file mode 100644 index 0000000..56c0750 --- /dev/null +++ b/src/core/mod.rs @@ -0,0 +1,3 @@ +pub mod delete_package_versions; +pub mod select_package_versions; +pub mod select_packages; diff --git a/src/core/select_package_versions.rs b/src/core/select_package_versions.rs new file mode 100644 index 0000000..638d2ae --- /dev/null +++ b/src/core/select_package_versions.rs @@ -0,0 +1,670 @@ +use crate::cli::models::{TagSelection, Timestamp}; +use crate::client::client::PackagesClient; +use crate::client::models::PackageVersion; +use crate::client::urls::Urls; +use crate::matchers::Matchers; +use crate::{Counts, PackageVersions}; +use chrono::Utc; +use color_eyre::Result; +use humantime::Duration as HumantimeDuration; +use indicatif::ProgressStyle; +use std::collections::HashMap; +use std::sync::Arc; +use std::time::Duration; +use tokio::task::JoinSet; +use tracing::{debug, info, info_span, trace, warn, Instrument}; +use tracing_indicatif::span_ext::IndicatifSpanExt; + +/// Keep the `n` most recent package versions, per package name. +/// +/// Newer package versions are kept over older. +fn handle_keep_n_most_recent( + mut package_versions: Vec, + keep_n_most_recent: u32, + timestamp_to_use: &Timestamp, +) -> Vec { + // Sort package versions by `updated_at` or `created_at` + package_versions.sort_by_key(|p| p.get_relevant_timestamp(timestamp_to_use)); + + let mut kept = 0; + while !package_versions.is_empty() && kept < keep_n_most_recent { + package_versions.pop(); + kept += 1; + } + + info!( + remaining_tagged_image_count = package_versions.len(), + "Kept {kept} of the {keep_n_most_recent} package versions requested by the `keep-n-most-recent` setting" + ); + package_versions +} + +/// Exclude any package version with specified SHAs from deletion. +fn contains_shas_to_skip(shas_to_skip: &[String], package_version: &PackageVersion) -> bool { + if shas_to_skip.contains(&package_version.name) { + debug!( + "Skipping package version with SHA {}, as specified in the `shas-to-skip` setting", + package_version.name + ); + true + } else { + false + } +} + +/// Check whether a package version is old enough to be deleted. +/// +/// A [`PackageVersion`] contains both a `created_at` and `updated_at` +/// timestamp. We check the specified [`Timestamp`] to determine which +/// to consider. +fn older_than_cutoff( + package_version: &PackageVersion, + cut_off: &HumantimeDuration, + timestamp_to_use: &Timestamp, +) -> bool { + let cut_off_duration: Duration = (*cut_off).into(); + let cut_off_time = Utc::now() - cut_off_duration; + if package_version.get_relevant_timestamp(timestamp_to_use) < cut_off_time { + true + } else { + trace!( + cut_off = cut_off_time.to_string(), + "Skipping package version, since it's newer than the cut-off" + ); + false + } +} + +/// Filters package versions by tag-matchers (see the [`Matchers`] definition for details on what matchers are). +/// +/// The user might have specified positive and/or negative expressions to filter down +/// package versions by tags. +/// +/// Because package versions don't correspond to a container image, but rather to a collection +/// of layers (one package version might have multiple tags), this function should ensure that: +/// +/// - If *any* negative matcher (e.g., `!latest`) matches *any* tag for a +/// given package version, then we will not delete it. +/// +/// - If we have a partial match (2/3 tags match), then we also cannot delete; +/// but it might be a bit unexpected to do nothing, so we log a warning to the +/// user. +/// +/// - If *all* tags match, then we will delete the package version. +fn filter_by_matchers( + matchers: &Matchers, + package_version: PackageVersion, + package_name: &str, + urls: &Urls, +) -> Result> { + let tags = &package_version.metadata.container.tags; + + // Check if there are filters to apply - no filters implicitly means "match everything" + if matchers.is_empty() { + trace!("Including package version, since no filters were specified"); + return Ok(Some(package_version)); + } + + // Check for negative matches on any tag + let any_negative_match = tags.iter().any(|tag| matchers.negative_match(tag)); + + // Count positive matches across all tags + let mut positive_matches = 0; + for tag in tags { + if matchers.positive.is_empty() && !any_negative_match { + trace!("Including package version, since no positive filters were specified"); + positive_matches += 1; + } else if matchers.positive_match(tag) { + positive_matches += 1; + } + } + + // Note: the ordering of the match statement matters + match (any_negative_match, positive_matches) { + // Both negative and positive matches + (true, positive_matches) if positive_matches > 0 => { + let package_url = urls.package_version_url(package_name, &package_version.id)?; + warn!(tags=?tags, "✕ package version matched a negative `image-tags` filter, but it also matched a positive filter. If you want this package version to be deleted, make sure to review your `image-tags` filters to remove the conflict. The package version can be found at {package_url}. Enable debug logging for more info."); + Ok(None) + } + // Plain negative match + (true, _) => { + debug!(tags=?tags, "✕ package version matched a negative `image-tags` filter"); + Ok(None) + } + // 100% positive matches + (false, positive_matches) if positive_matches == tags.len() => { + debug!(tags=?tags, "✓ package version matched all `image-tags` filters"); + Ok(Some(package_version)) + } + // 0% positive matches + (false, 0) => { + debug!(tags=?tags, "✕ package version didn't match any `image-tags` filters"); + Ok(None) + } + // Partial positive matches + (false, 1..) => { + let package_url = urls.package_version_url(package_name, &package_version.id)?; + warn!(tags=?tags, "✕ package version matched some, but not all tags. If you want this package version to be deleted, make sure to review your `image-tags` filters to remove the conflict. The package version can be found at {package_url}. Enable debug logging for more info."); + Ok(None) + } + } +} + +#[derive(Debug, PartialEq)] +enum PackageVersionType { + Tagged(PackageVersion), + Untagged(PackageVersion), +} + +/// Filter out package versions according to the [`TagSelection`] specified +/// by the user. +/// +/// If the user has specified `TagSelection::Untagged`, then we should discard all +/// package versions contaning tags, and vice versa. +fn filter_by_tag_selection( + matchers: &Matchers, + tag_selection: &TagSelection, + urls: &Urls, + package_version: PackageVersion, + package_name: &str, +) -> Result> { + let has_no_tags = package_version.metadata.container.tags.is_empty(); + match (tag_selection, has_no_tags) { + // Handle untagged images + (&TagSelection::Untagged | &TagSelection::Both, true) => { + debug!("Selecting package version since it no longer has any associated tags"); + Ok(Some(PackageVersionType::Untagged(package_version))) + } + // Handle tagged images + (&TagSelection::Tagged | &TagSelection::Both, false) => { + if let Some(t) = filter_by_matchers(matchers, package_version, package_name, urls)? { + Ok(Some(PackageVersionType::Tagged(t))) + } else { + Ok(None) + } + } + // Do nothing + (&TagSelection::Untagged, false) | (&TagSelection::Tagged, true) => { + debug!("Skipping package version because of the tag selection"); + Ok(None) + } + } +} + +pub fn filter_package_versions( + package_versions: Vec, + package_name: &str, + shas_to_skip: Vec, + tag_selection: TagSelection, + cut_off: &HumantimeDuration, + timestamp_to_use: &Timestamp, + matchers: Matchers, + client: &'static PackagesClient, +) -> Result { + let mut tagged = Vec::new(); + let mut untagged = Vec::new(); + + debug!("Found {} package versions for package", package_versions.len()); + + for package_version in package_versions { + let span = info_span!("select package versions", package_version_id = package_version.id).entered(); + // Filter out any package versions specified in the shas-to-skip input + if contains_shas_to_skip(&shas_to_skip, &package_version) { + continue; + } + // Filter out any package version that isn't old enough + if !older_than_cutoff(&package_version, cut_off, timestamp_to_use) { + continue; + } + // Filter the remaining package versions by image-tag matchers and tag-selection, if specified + match filter_by_tag_selection(&matchers, &tag_selection, &client.urls, package_version, package_name)? { + Some(PackageVersionType::Tagged(package_version)) => { + tagged.push(package_version); + } + Some(PackageVersionType::Untagged(package_version)) => { + untagged.push(package_version); + } + None => (), + } + span.exit(); + } + + Ok(PackageVersions { untagged, tagged }) +} + +/// Fetches and filters package versions by account type, image-tag filters, cut-off, +/// tag-selection, and a bunch of other things. Fetches versions concurrently. +pub async fn select_package_versions( + package_names: Vec, + client: &'static PackagesClient, + image_tags: Vec, + shas_to_skip: Vec, + keep_n_most_recent: u32, + tag_selection: TagSelection, + cut_off: &HumantimeDuration, + timestamp_to_use: &Timestamp, + counts: Arc, +) -> Result> { + // Create matchers for the image tags + let matchers = Matchers::from(&image_tags); + + // Create async tasks to fetch everything concurrently + let mut set = JoinSet::new(); + for package_name in package_names { + let span = info_span!("fetch package versions", package_name = %package_name); + span.pb_set_style( + &ProgressStyle::default_spinner() + .template(&format!("{{spinner}} \x1b[34m{package_name}\x1b[0m: {{msg}}")) + .unwrap(), + ); + span.pb_set_message(&format!( + "fetched \x1b[33m0\x1b[0m package versions (\x1b[33m{}\x1b[0m requests remaining in the rate limit)", + *counts.remaining_requests.read().await + keep_n_most_recent as usize + )); + + let a = package_name.clone(); + let b = shas_to_skip.clone(); + let c = tag_selection.clone(); + let d = cut_off.clone(); + let e = timestamp_to_use.clone(); + let f = matchers.clone(); + + set.spawn( + client + .list_package_versions( + package_name, + counts.clone(), + move |package_versions| { + let b = b.clone(); + let c = c.clone(); + let f = f.clone(); + filter_package_versions(package_versions, &a, b, c, &d, &e, f, client) + }, + keep_n_most_recent as usize, + ) + .instrument(span), + ); + } + + let mut package_version_map = HashMap::new(); + + debug!("Fetching package versions"); + while let Some(r) = set.join_next().await { + // Get all the package versions for a package + let (package_name, mut package_versions) = r??; + + // Keep n package versions per package, if specified + package_versions.tagged = + handle_keep_n_most_recent(package_versions.tagged, keep_n_most_recent, timestamp_to_use); + + info!( + package_name = package_name, + "Selected {} tagged and {} untagged package versions for deletion", + package_versions.tagged.len(), + package_versions.untagged.len() + ); + package_version_map.insert(package_name, package_versions); + } + + Ok(package_version_map) +} + +#[cfg(test)] +mod tests { + use crate::client::models::{ContainerMetadata, Metadata, PackageVersion}; + use chrono::DateTime; + use humantime::Duration as HumantimeDuration; + use std::str::FromStr; + use tracing_test::traced_test; + use url::Url; + use wildmatch::WildMatchPattern; + + use super::*; + + #[traced_test] + #[test] + fn test_filter_by_tag_selection() { + let urls = Urls { + packages_frontend_base: Url::parse("https://foo.com").unwrap(), + packages_api_base: Url::parse("https://foo.com").unwrap(), + list_packages_url: Url::parse("https://foo.com").unwrap(), + }; + let matchers = &Matchers { + positive: vec![WildMatchPattern::<'*', '?'>::new("foo")], + negative: vec![], + }; + + let tagged_package_version = PackageVersion { + id: 1, + name: "".to_string(), + metadata: Metadata { + container: ContainerMetadata { + tags: vec!["foo".to_string()], + }, + }, + created_at: Default::default(), + updated_at: None, + }; + + // Tagged package version with untagged strategy + assert_eq!( + filter_by_tag_selection( + matchers, + &TagSelection::Untagged, + &urls, + tagged_package_version.clone(), + "", + ) + .unwrap(), + None + ); + // Tagged package version with tagged and both strategies + assert_eq!( + filter_by_tag_selection( + matchers, + &TagSelection::Tagged, + &urls, + tagged_package_version.clone(), + "", + ) + .unwrap(), + Some(PackageVersionType::Tagged(tagged_package_version.clone())) + ); + assert_eq!( + filter_by_tag_selection(matchers, &TagSelection::Both, &urls, tagged_package_version.clone(), "").unwrap(), + Some(PackageVersionType::Tagged(tagged_package_version.clone())) + ); + + let mut untagged_package_version = tagged_package_version.clone(); + untagged_package_version.metadata.container.tags = vec![]; + + // Untagged package version with untagged and both strategies + assert_eq!( + filter_by_tag_selection( + matchers, + &TagSelection::Untagged, + &urls, + untagged_package_version.clone(), + "", + ) + .unwrap(), + Some(PackageVersionType::Untagged(untagged_package_version.clone())) + ); + assert_eq!( + filter_by_tag_selection( + matchers, + &TagSelection::Both, + &urls, + untagged_package_version.clone(), + "", + ) + .unwrap(), + Some(PackageVersionType::Untagged(untagged_package_version.clone())) + ); + // Untagged package version with tagged strategy + assert_eq!( + filter_by_tag_selection( + matchers, + &TagSelection::Tagged, + &urls, + untagged_package_version.clone(), + "", + ) + .unwrap(), + None + ); + } + + fn create_pv(id: u32, name: &str, tags: Vec<&str>) -> PackageVersion { + PackageVersion { + id, + name: name.to_string(), + metadata: Metadata { + container: ContainerMetadata { + tags: tags.into_iter().map(|i| i.to_string()).collect(), + }, + }, + created_at: Default::default(), + updated_at: None, + } + } + + #[traced_test] + #[test] + fn test_filter_by_matchers_early_return() { + filter_by_matchers( + &Matchers { + positive: vec![], + negative: vec![], + }, + create_pv(0, "sha256:foobar", vec!["foo", "bar"]), + "package", + &Urls { + packages_frontend_base: Url::parse("https://foo.com").unwrap(), + packages_api_base: Url::parse("https://foo.com").unwrap(), + list_packages_url: Url::parse("https://foo.com").unwrap(), + }, + ) + .unwrap(); + assert!(logs_contain( + "Including package version, since no filters were specified" + )); + } + + #[traced_test] + #[test] + fn test_filter_by_matchers_permutations() { + fn call_f(matchers: Matchers) { + let urls = Urls { + packages_frontend_base: Url::parse("https://foo.com").unwrap(), + packages_api_base: Url::parse("https://foo.com").unwrap(), + list_packages_url: Url::parse("https://foo.com").unwrap(), + }; + let package_version = create_pv(0, "sha256:foobar", vec!["foo", "bar"]); + filter_by_matchers(&matchers, package_version, "package", &urls).unwrap(); + } + + // Plain negative match + call_f(Matchers { + positive: vec![], + negative: vec![WildMatchPattern::<'*', '?'>::new("foo")], + }); + assert!(logs_contain("✕ package version matched a negative `image-tags` filter")); + + // Negative and positive match + call_f(Matchers { + positive: vec![WildMatchPattern::<'*', '?'>::new("*")], + negative: vec![WildMatchPattern::<'*', '?'>::new("*")], + }); + assert!(logs_contain( + "✕ package version matched a negative `image-tags` filter, but it also matched a positive filter" + )); + + // 100% positive match + call_f(Matchers { + positive: vec![ + WildMatchPattern::<'*', '?'>::new("foo"), + WildMatchPattern::<'*', '?'>::new("bar"), + ], + negative: vec![], + }); + assert!(logs_contain("✓ package version matched all `image-tags` filters")); + + // No positive match + call_f(Matchers { + positive: vec![WildMatchPattern::<'*', '?'>::new("random")], + negative: vec![], + }); + assert!(logs_contain("✕ package version didn't match any `image-tags` filters")); + + // Partial positive match + call_f(Matchers { + positive: vec![WildMatchPattern::<'*', '?'>::new("foo")], + negative: vec![], + }); + assert!(logs_contain("✕ package version matched some, but not all tags")); + } + + #[test] + fn test_handle_keep_n_most_recent() { + let metadata = Metadata { + container: ContainerMetadata { tags: Vec::new() }, + }; + let now = Utc::now(); + + let tagged = vec![ + PackageVersion { + updated_at: None, + created_at: now - Duration::from_secs(2), + name: "".to_string(), + id: 1, + metadata: metadata.clone(), + }, + PackageVersion { + updated_at: Some(now - Duration::from_secs(1)), + created_at: now - Duration::from_secs(3), + name: "".to_string(), + id: 1, + metadata: metadata.clone(), + }, + PackageVersion { + updated_at: Some(now), + created_at: now - Duration::from_secs(4), + name: "".to_string(), + id: 1, + metadata: metadata.clone(), + }, + ]; + + // Test case 1: more items than keep at least + let keep_n_most_recent = 2; + let remaining_tagged = handle_keep_n_most_recent(tagged.clone(), keep_n_most_recent, &Timestamp::CreatedAt); + assert_eq!(remaining_tagged.len(), 1); + + // Test case 2: same items as keep_n_most_recent + let keep_n_most_recent = 6; + let remaining_tagged = handle_keep_n_most_recent(tagged.clone(), keep_n_most_recent, &Timestamp::CreatedAt); + assert_eq!(remaining_tagged.len(), 0); + + // Test case 3: fewer items than keep_n_most_recent + let keep_n_most_recent = 10; + let remaining_tagged = handle_keep_n_most_recent(tagged.clone(), keep_n_most_recent, &Timestamp::CreatedAt); + assert_eq!(remaining_tagged.len(), 0); + } + + #[test] + fn test_handle_keep_n_most_recent_ordering() { + let now: DateTime = Utc::now(); + let five_minutes_ago: DateTime = now - chrono::Duration::minutes(5); + let ten_minutes_ago: DateTime = now - chrono::Duration::minutes(10); + + fn pv(dt: DateTime) -> PackageVersion { + PackageVersion { + id: 0, + name: "".to_string(), + metadata: Metadata { + container: ContainerMetadata { tags: Vec::new() }, + }, + created_at: dt, + updated_at: None, + } + } + + // Newest is removed (to be kept) + let kept = handle_keep_n_most_recent( + vec![pv(five_minutes_ago), pv(now), pv(ten_minutes_ago)], + 1, + &Timestamp::CreatedAt, + ); + assert_eq!(kept.len(), 2); + assert_eq!(kept, vec![pv(ten_minutes_ago), pv(five_minutes_ago)]); + + let kept = handle_keep_n_most_recent( + vec![pv(five_minutes_ago), pv(ten_minutes_ago), pv(now)], + 1, + &Timestamp::CreatedAt, + ); + assert_eq!(kept.len(), 2); + assert_eq!(kept, vec![pv(ten_minutes_ago), pv(five_minutes_ago)]); + + let kept = handle_keep_n_most_recent( + vec![pv(now), pv(ten_minutes_ago), pv(five_minutes_ago)], + 1, + &Timestamp::CreatedAt, + ); + assert_eq!(kept.len(), 2); + assert_eq!(kept, vec![pv(ten_minutes_ago), pv(five_minutes_ago)]); + } + + #[test] + fn test_older_than_cutoff() { + let mut p = PackageVersion { + id: 0, + name: "".to_string(), + metadata: Metadata { + container: ContainerMetadata { tags: vec![] }, + }, + created_at: Default::default(), + updated_at: None, + }; + + let now = Utc::now(); + + { + let timestamp = Timestamp::CreatedAt; + // when timestamp is earlier than cut-off + p.created_at = now - Duration::from_secs(10); + assert!(older_than_cutoff( + &p, + &HumantimeDuration::from_str("1s").unwrap(), + ×tamp, + )); + + // when timestamp is the newer as cut-off + p.created_at = now - Duration::from_secs(10); + assert!(!older_than_cutoff( + &p, + &HumantimeDuration::from_str("11s").unwrap(), + ×tamp, + )); + } + + { + let timestamp = Timestamp::UpdatedAt; + p.created_at = Utc::now(); + + // when timestamp is earlier than cut-off + p.updated_at = Some(now - Duration::from_secs(10)); + assert!(older_than_cutoff( + &p, + &HumantimeDuration::from_str("1s").unwrap(), + ×tamp, + )); + + // when timestamp is the newer as cut-off + p.updated_at = Some(now - Duration::from_secs(10)); + assert!(!older_than_cutoff( + &p, + &HumantimeDuration::from_str("11s").unwrap(), + ×tamp, + )); + } + } + + #[test] + fn test_contains_shas_to_skip() { + let p = PackageVersion { + id: 0, + name: "foo".to_string(), + metadata: Metadata { + container: ContainerMetadata { tags: vec![] }, + }, + created_at: Default::default(), + updated_at: None, + }; + assert!(contains_shas_to_skip(&["foo".to_string()], &p)); + assert!(!contains_shas_to_skip(&["foos".to_string()], &p)); + assert!(!contains_shas_to_skip(&["fo".to_string()], &p)); + } +} diff --git a/src/core/select_packages.rs b/src/core/select_packages.rs new file mode 100644 index 0000000..17951c4 --- /dev/null +++ b/src/core/select_packages.rs @@ -0,0 +1,128 @@ +use std::sync::Arc; + +use crate::cli::models::{Account, Token}; +use crate::client::client::PackagesClient; +use crate::client::models::Package; +use crate::matchers::Matchers; +use crate::Counts; +use tracing::{debug, info}; + +/// Filter packages by package name-matchers. +/// +/// See the [`Matchers`] definition for details on what matchers are. +fn filter_by_matchers(packages: &[Package], matchers: &Matchers) -> Vec { + packages + .iter() + .filter_map(|p| { + if matchers.negative_match(&p.name) { + return None; + }; + if matchers.positive.is_empty() { + return Some(p.name.to_string()); + }; + if matchers.positive_match(&p.name) { + return Some(p.name.to_string()); + }; + debug!("No match for package {} in {:?}", p.name, matchers.positive); + None + }) + .collect() +} + +/// Fetch and filters packages based on token type, account type, and image name filters. +pub async fn select_packages( + client: &mut PackagesClient, + image_names: &Vec, + token: &Token, + account: &Account, + counts: Arc, +) -> Vec { + // Fetch all packages that the account owns + let packages = client.fetch_packages(token, image_names, counts.clone()).await; + + match account { + Account::User => info!("Found {} package(s) for the user", packages.len()), + Account::Organization(name) => info!("Found {} package(s) for the \"{name}\" organization", packages.len()), + } + debug!( + "There are {} requests remaining in the rate limit", + counts.remaining_requests.read().await + ); + + // Filter image names + let image_name_matchers = Matchers::from(image_names); + let selected_package_names = filter_by_matchers(&packages, &image_name_matchers); + info!( + "{}/{} package names matched the `package-name` filters", + selected_package_names.len(), + packages.len() + ); + + selected_package_names +} + +#[cfg(test)] +mod tests { + use super::*; + use crate::client::models::Package; + + #[test] + fn test_filter_by_matchers() { + let packages = vec![Package { + id: 0, + name: "foo".to_string(), + created_at: Default::default(), + updated_at: None, + }]; + // Negative matches + let empty_vec: Vec = vec![]; + assert_eq!( + filter_by_matchers(&packages, &Matchers::from(&vec![String::from("!foo")])), + empty_vec + ); + assert_eq!( + filter_by_matchers(&packages, &Matchers::from(&vec![String::from("!f*")])), + empty_vec + ); + assert_eq!( + filter_by_matchers(&packages, &Matchers::from(&vec![String::from("!*")])), + empty_vec + ); + + // No positive filters and no negative match + assert_eq!( + filter_by_matchers( + &packages, + &Matchers::from(&vec![String::from("!bar"), String::from("!baz")]) + ), + vec!["foo".to_string()] + ); + assert_eq!( + filter_by_matchers(&packages, &Matchers::from(&vec![String::from("!")])), + vec!["foo".to_string()] + ); + + // No positive matches + assert_eq!( + filter_by_matchers( + &packages, + &Matchers::from(&vec![String::from("bar"), String::from("baz")]) + ), + empty_vec + ); + + // Positive matches + assert_eq!( + filter_by_matchers(&packages, &Matchers::from(&vec![String::from("foo")])), + vec!["foo".to_string()] + ); + assert_eq!( + filter_by_matchers(&packages, &Matchers::from(&vec![String::from("*")])), + vec!["foo".to_string()] + ); + assert_eq!( + filter_by_matchers(&packages, &Matchers::from(&vec![String::from("f*")])), + vec!["foo".to_string()] + ); + } +} diff --git a/src/main.rs b/src/main.rs new file mode 100644 index 0000000..7e51a4f --- /dev/null +++ b/src/main.rs @@ -0,0 +1,158 @@ +use std::env; +use std::process::exit; +use std::sync::Arc; + +use color_eyre::eyre::Result; +use tokio::sync::RwLock; +use tracing::{debug, error, info_span, trace, Instrument}; +use tracing_indicatif::IndicatifLayer; +use tracing_subscriber::layer::SubscriberExt; +use tracing_subscriber::util::SubscriberInitExt; +use tracing_subscriber::{fmt, EnvFilter}; + +use crate::cli::args::Input; +use crate::client::builder::PackagesClientBuilder; +use crate::client::client::PackagesClient; +use crate::client::models::PackageVersion; +use crate::core::delete_package_versions::delete_package_versions; +use crate::core::select_package_versions::select_package_versions; +use crate::core::select_packages::select_packages; +use chrono::{DateTime, Utc}; +use clap::Parser; + +mod cli; +pub mod client; +mod core; +mod matchers; + +pub struct Counts { + pub remaining_requests: RwLock, + pub rate_limit_reset: DateTime, + pub package_versions: RwLock, +} + +pub struct PackageVersions { + pub untagged: Vec, + pub tagged: Vec, +} + +impl PackageVersions { + /// Create a new, empty, struct + pub fn new() -> Self { + Self { + untagged: vec![], + tagged: vec![], + } + } + + /// Compute the total number of package versions contained in the struct + pub fn len(&self) -> usize { + self.untagged.len() + self.tagged.len() + } + + /// Add another PackageVersions struct to this one + pub fn extend(&mut self, other: PackageVersions) { + self.untagged.extend(other.untagged); + self.tagged.extend(other.tagged); + } +} + +#[tokio::main()] +async fn main() -> Result<()> { + let indicatif_layer = IndicatifLayer::new(); + + // Set up logging + tracing_subscriber::registry() + .with( + fmt::layer() + .with_ansi(true) + .with_writer(indicatif_layer.get_stderr_writer()), + ) + .with(EnvFilter::from_default_env()) + .with(indicatif_layer) + .init(); + debug!("Logging initialized"); + + // Load and validate inputs + let init_span = info_span!("parse input").entered(); + let input = Input::parse(); + + // TODO: Is there a better way? + if env::var("CRP_TEST").is_ok() { + return Ok(()); + } + + // Create rate-limited and authorized HTTP client + let boxed_client = Box::new( + PackagesClientBuilder::new() + .generate_urls(&input.account) + .set_http_headers(input.token.clone()) + .expect("Failed to set HTTP headers") + .create_rate_limited_services() + .build() + .expect("Failed to build client"), + ); + let client: &'static mut PackagesClient = Box::leak(boxed_client); + init_span.exit(); + + // Check how many remaining requests there are in the rate limit + let (remaining, rate_limit_reset) = client + .fetch_rate_limit() + .instrument(info_span!("fetch rate limit")) + .await + .expect("Failed to fetch rate limit"); + let counts = Arc::new(Counts { + rate_limit_reset, + remaining_requests: RwLock::new(remaining), + package_versions: RwLock::new(0), + }); + + // Fetch the packages we should delete package versions from + let selected_package_names = + select_packages(client, &input.image_names, &input.token, &input.account, counts.clone()) + .instrument(info_span!("select packages")) + .await; + debug!("Selected {} package name(s)", selected_package_names.len()); + trace!( + "There are now {} requests remaining in the rate limit", + *counts.remaining_requests.read().await + ); + + // Fetch package versions to delete + let package_version_map = match select_package_versions( + selected_package_names, + client, + input.image_tags, + input.shas_to_skip, + input.keep_n_most_recent, + input.tag_selection, + &input.cut_off, + &input.timestamp_to_use, + counts.clone(), + ) + .await + { + Ok(t) => t, + Err(e) => { + error!("Failed to fetch package versions: {e}"); + exit(1); + } + }; + trace!( + "There are now {} requests remaining in the rate limit", + *counts.remaining_requests.read().await + ); + + let (deleted_packages, failed_packages) = + delete_package_versions(package_version_map, client, counts.clone(), input.dry_run) + .instrument(info_span!("deleting package versions")) + .await; + + let mut github_output = env::var("GITHUB_OUTPUT").unwrap_or_default(); + + github_output.push_str(&format!("deleted={}", deleted_packages.join(","))); + github_output.push_str(&format!("failed={}", failed_packages.join(","))); + env::set_var("GITHUB_OUTPUT", github_output); + + Ok(()) +} diff --git a/src/matchers.rs b/src/matchers.rs new file mode 100644 index 0000000..a07aae1 --- /dev/null +++ b/src/matchers.rs @@ -0,0 +1,128 @@ +use tracing::trace; +use wildmatch::WildMatchPattern; + +/// Container for negative and positive "matcher"-expressions. +/// +/// Matchers, in this context, are expressions like: +/// +/// "foo" -> select package "foo" +/// "foo*" -> select packages starting with "foo" +/// "!foo" -> select packages not called "foo" +/// "!foo*" -> select packages not starting with "foo" +/// +/// i.e., glob-like patterns to include or exclude packages by. +/// +/// We use matchers to select packages *and* to filter package versions. +/// When selecting packages, we filter by the package name, while for package versions, +/// we match by image tags. +/// +/// Both our positive and negative matchers are vecs of [`WildMatchPattern`] from the [wildmatch] +/// crate. +/// +/// When parsing matchers from strings, any string prefixed by `!` are considered +/// negative matchers, and anything else is considered positive. +#[derive(Debug, Clone)] +pub struct Matchers { + pub positive: Vec>, + pub negative: Vec>, +} + +impl Matchers { + /// Creates a new `Matchers` instance from a slice of filter strings. + pub fn from(filters: &[String]) -> Self { + trace!(filters=?filters, "Creating matchers from filters"); + Self { + positive: filters + .iter() + .filter_map(|pattern| { + if pattern.starts_with('!') { + None + } else { + Some(WildMatchPattern::<'*', '?'>::new(pattern)) + } + }) + .collect(), + negative: filters + .iter() + .filter_map(|pattern| { + if let Some(without_prefix) = pattern.strip_prefix('!') { + Some(WildMatchPattern::<'*', '?'>::new(without_prefix)) + } else { + None + } + }) + .collect(), + } + } + + /// Check whether there are any negative matches. + pub fn negative_match(&self, value: &str) -> bool { + self.negative.iter().any(|matcher| { + if matcher.matches(value) { + trace!("Negative filter `{matcher}` matched \"{value}\""); + return true; + }; + false + }) + } + + /// Check whether there are any positive matches. + pub fn positive_match(&self, value: &str) -> bool { + self.positive.iter().any(|matcher| { + if matcher.matches(value) { + trace!("Positive filter `{matcher}` matched \"{value}\""); + return true; + } + false + }) + } + + pub fn is_empty(&self) -> bool { + self.positive.is_empty() && self.negative.is_empty() + } +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_filter_matchers() { + // Exact filters should only match the exact string + let matchers = Matchers::from(&vec![String::from("foo")]); + + assert!(!matchers.positive_match("fo")); + assert!(matchers.positive_match("foo")); + assert!(!matchers.positive_match("foos")); + assert!(!matchers.positive_match("foosssss $xas")); + let matchers = Matchers::from(&vec![String::from("!foo")]); + assert!(!matchers.negative_match("fo")); + assert!(matchers.negative_match("foo")); + assert!(!matchers.negative_match("foos")); + assert!(!matchers.negative_match("foosssss $xas")); + + // Wildcard filters should match the string without the wildcard, and with any postfix + let matchers = Matchers::from(&vec![String::from("foo*")]); + assert!(!matchers.positive_match("fo")); + assert!(matchers.positive_match("foo")); + assert!(matchers.positive_match("foos")); + assert!(matchers.positive_match("foosssss $xas")); + let matchers = Matchers::from(&vec![String::from("!foo*")]); + assert!(!matchers.negative_match("fo")); + assert!(matchers.negative_match("foo")); + assert!(matchers.negative_match("foos")); + assert!(matchers.negative_match("foosssss $xas")); + + // Filters with ? should match the string + one wildcard character + let matchers = Matchers::from(&vec![String::from("foo?")]); + assert!(!matchers.positive_match("fo")); + assert!(!matchers.positive_match("foo")); + assert!(matchers.positive_match("foos")); + assert!(!matchers.positive_match("foosssss $xas")); + let matchers = Matchers::from(&vec![String::from("!foo?")]); + assert!(!matchers.negative_match("fo")); + assert!(!matchers.negative_match("foo")); + assert!(matchers.negative_match("foos")); + assert!(!matchers.negative_match("foosssss $xas")); + } +}