diff --git a/.codecov.yml b/.codecov.yml new file mode 100644 index 0000000000..740941e17c --- /dev/null +++ b/.codecov.yml @@ -0,0 +1,45 @@ +--- + +codecov: + notify: + # after_n_builds: 24 # Number of test matrix+lint jobs uploading coverage + wait_for_ci: false + + require_ci_to_pass: false + + token: xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx # repo-scoped + +comment: + require_changes: true + +coverage: + range: 100..100 + status: + project: + default: + target: 100% + lib: + flags: + - pytest + paths: + - src/awx/plugins/credentials/**/*.py + target: 100% + tests: + flags: + - pytest + paths: + - tests/ + target: 100% + typing: + flags: + - MyPy + target: 100% + typing-stubs: + flags: + - MyPy + paths: + - >- + **/*.pyi + target: 100% + +... diff --git a/.codespellrc b/.codespellrc new file mode 100644 index 0000000000..a0aa12d4de --- /dev/null +++ b/.codespellrc @@ -0,0 +1,3 @@ +[codespell] +ignore-words = docs/spelling_wordlist.txt +ignore-words-list = THIRDPARTY diff --git a/.coveragerc b/.coveragerc new file mode 100644 index 0000000000..6d5c35a0e6 --- /dev/null +++ b/.coveragerc @@ -0,0 +1,34 @@ +[html] +directory = .test-results/pytest/cov/ +show_contexts = true +skip_covered = false + +[paths] +source = + src + */src + *\src + */lib/python*/site-packages + */pypy*/site-packages + *\Lib\site-packages + +[report] +skip_covered = true +skip_empty = true +show_missing = true +exclude_also = + ^\s*@pytest\.mark\.xfail + +[run] +branch = true +cover_pylib = false +# https://coverage.rtfd.io/en/latest/contexts.html#dynamic-contexts +# dynamic_context = test_function # conflicts with `pytest-cov` if set here +parallel = true +plugins = + covdefaults +relative_files = true +source = + tests +source_pkgs = + awx_plugins.credentials.x.api diff --git a/.darglint b/.darglint new file mode 100644 index 0000000000..1991e0f718 --- /dev/null +++ b/.darglint @@ -0,0 +1,9 @@ +[darglint] +# NOTE: All `darglint` styles except for `sphinx` hit ridiculously low +# NOTE: performance on some of the in-project Python modules. +# Refs: +# * https://github.com/terrencepreilly/darglint/issues/186 +# * https://github.com/wemake-services/wemake-python-styleguide/issues/2287 +docstring_style = sphinx +enable = DAR104 +strictness = full diff --git a/.editorconfig b/.editorconfig new file mode 100644 index 0000000000..a20fd0efd2 --- /dev/null +++ b/.editorconfig @@ -0,0 +1,18 @@ +root = True + +[*] +charset = utf-8 +end_of_line = lf +indent_size = 4 +indent_style = space +insert_final_newline = True +trim_trailing_whitespace = True + +[*.{bat,cmd,ps1}] +end_of_line = crlf + +[*.{js,json,json5,yml,yaml,md,rb}] +indent_size = 2 + +[Makefile] +indent_style = tab diff --git a/.flake8 b/.flake8 new file mode 100644 index 0000000000..c8080af7c5 --- /dev/null +++ b/.flake8 @@ -0,0 +1,134 @@ +[flake8] + +# Print the total number of errors: +count = true + +# Don't even try to analyze these: +extend-exclude = + # Circle CI configs + .circleci, + # No need to traverse egg info dir + *.egg-info, + # GitHub configs + .github, + # Cache files of MyPy + .mypy_cache, + # Cache files of pytest + .pytest_cache, + # Temp dir of pytest-testmon + .tmontmp, + # Countless third-party libs in venvs + .tox, + # Occasional virtualenv dir + .venv, + # VS Code + .vscode, + # Temporary build dir + build, + # This contains sdists and wheels that we don't want to check + dist, + # Metadata of `pip wheel` cmd is autogenerated + pip-wheel-metadata, + +# IMPORTANT: avoid using ignore option, always use extend-ignore instead +# Completely and unconditionally ignore the following errors: +extend-ignore = + # Legitimate cases, no need to "fix" these violations: + # E501: "line too long", its function is replaced by `flake8-length` + E501, + # W505: "doc line too long", its function is replaced by `flake8-length` + W505, + # S101: MyPy requires `asserts`, plus they're not bad if cooked well + S101, + # WPS300: "Found local folder import" -- nothing bad about this + WPS300, + # WPS305: "Found f string" -- nothing bad about this + WPS305, + # An opposite consistency expectation is currently enforced + # by pylint via: useless-object-inheritance (R0205): + # WPS306: "Found class without a base class: *" -- nothing bad about this + WPS306, + # WPS317 enforces weird indents + WPS317, + # WPS318 enforces weird indents too + WPS318, + # WPS326: "Found implicit string concatenation" -- nothing bad about this + WPS326, + # WPS422: "Found future import: *" -- we need these for multipython + WPS422, + +# IMPORTANT: avoid using select option, always use extend-select instead +# Enable the following errors: +extend-select = + # B950: "line too long", longer than `max-line-length` + 10% + B950, + +# https://wemake-python-stylegui.de/en/latest/pages/usage/formatter.html +format = wemake + +# Let's not overcomplicate the code: +max-complexity = 10 + +# Accessibility/large fonts and PEP8 friendly. +# This is being flexibly extended through the `flake8-length`: +max-line-length = 79 + +# Allow certain violations in certain files: +# Please keep both sections of this list sorted, as it will be easier for others to find and add entries in the future +per-file-ignores = + # The following ignores have been researched and should be considered permanent + # each should be preceded with an explanation of each of the error codes + # If other ignores are added for a specific file in the section following this, + # these will need to be added to that line as well. + + # There are multiple `assert`s (S101) + # and subprocesses (import โ€“ S404; call โ€“ S603) in tests; + # also, using fixtures looks like shadowing the outer scope (WPS442); + # furthermore, we should be able to import and test private attributes + # (WPS450) and modules (WPS436), and finally it's impossible to + # have <= members in tests (WPS202), including many local vars (WPS210), + # additionally test docstrings don't need param lists (DAR, DCO020): + tests/**.py: DAR, DCO020, S101, S404, S603, WPS202, WPS210, WPS436, WPS442, WPS450 + +# Count the number of occurrences of each error/warning code and print a report: +statistics = true + +# ## Plugin-provided settings: ## + +# flake8-eradicate +# E800: +eradicate-whitelist-extend = isort:\s+\w+|Ref:\s+https?:\/\/ + +# flake8-pytest-style +# PT001: +pytest-fixture-no-parentheses = true +# PT006: +pytest-parametrize-names-type = tuple +# PT007: +pytest-parametrize-values-type = tuple +pytest-parametrize-values-row-type = tuple +# PT023: +pytest-mark-no-parentheses = true + +# flake8-rst-docstrings +rst-directives = + spelling +rst-roles = + # Built-in Sphinx roles: + class, + data, + file, + exc, + meth, + mod, + term, + py:class, + py:data, + py:exc, + py:meth, + py:term, + # Sphinx's internal role: + event, + +# wemake-python-styleguide +show-source = true diff --git a/.git_archival.txt b/.git_archival.txt new file mode 100644 index 0000000000..6acb3b415a --- /dev/null +++ b/.git_archival.txt @@ -0,0 +1,3 @@ +node: $Format:%H$ +node-date: $Format:%cI$ +describe-name: $Format:%(describe:tags=true,match=v[0-9]*)$ diff --git a/.gitattributes b/.gitattributes new file mode 100644 index 0000000000..b588b04bbd --- /dev/null +++ b/.gitattributes @@ -0,0 +1,8 @@ +# Force LF line endings for text files +* text=auto eol=lf + +# Needed for setuptools-scm to work with `git archive`-produced sources from GitHub's tar.gz URLs +.git_archival.txt export-subst + +# Blame ignore list entries are expected to always be appended, never edited +.git-blame-ignore-revs merge=union diff --git a/.github/workflows/ci-cd.yml b/.github/workflows/ci-cd.yml new file mode 100644 index 0000000000..842184a305 --- /dev/null +++ b/.github/workflows/ci-cd.yml @@ -0,0 +1,777 @@ +--- + +name: ๐Ÿงช + +on: + merge_group: + push: # publishes to TestPyPI pushes to the main branch + branches-ignore: + - dependabot/** # Dependabot always creates PRs + - gh-readonly-queue/** # Temporary merge queue-related GH-made branches + - maintenance/pip-tools-constraint-lockfiles # Lock files through PRs + - maintenance/pip-tools-constraint-lockfiles-** # Lock files through PRs + - patchback/backports/** # Patchback always creates PRs + - pre-commit-ci-update-config # pre-commit.ci always creates a PR + pull_request: + types: + - opened # default + - synchronize # default + - reopened # default + - ready_for_review # used in PRs created from the release workflow + schedule: + - cron: 1 0 * * * # Run daily at 0:01 UTC + workflow_dispatch: + inputs: + release-version: + # github.event_name == 'workflow_dispatch' + # && github.event.inputs.release-version + description: >- + Target PEP440-compliant version to release. + Please, don't prepend `v`. + required: true + type: string + release-committish: + # github.event_name == 'workflow_dispatch' + # && github.event.inputs.release-committish + default: '' + description: >- + The commit to be released to PyPI and tagged + in Git as `release-version`. Normally, you + should keep this empty. + type: string + YOLO: + default: false + description: >- + Set this flag to disregard the outcome of the + test stage. The test results will block the + release otherwise. Only use this under + extraordinary circumstances to ignore the test + failures and cut the release regardless. + type: boolean + +concurrency: + group: >- + ${{ + github.workflow + }}-${{ + github.ref_type + }}-${{ + github.event.pull_request.number || github.sha + }} + cancel-in-progress: true + +env: + FORCE_COLOR: 1 # Request colored output from CLI tools supporting it + MYPY_FORCE_COLOR: 1 # MyPy's color enforcement + PIP_DISABLE_PIP_VERSION_CHECK: 1 # Hide "there's a newer pip" message + PIP_NO_PYTHON_VERSION_WARNING: 1 # Hide "this Python is deprecated" message + PIP_NO_WARN_SCRIPT_LOCATION: 1 # Hide "script dir is not in $PATH" message + PRE_COMMIT_COLOR: always + PROJECT_NAME: awx-plugins-core + PUBLISHING_TO_TESTPYPI_ENABLED: false + PY_COLORS: 1 # Recognized by the `py` package, dependency of `pytest` + PYTHONIOENCODING: utf-8 + PYTHONUTF8: 1 + TOX_PARALLEL_NO_SPINNER: 1 # Disable tox's parallel run spinner animation + TOX_TESTENV_PASSENV: >- # Make tox-wrapped tools see color requests + FORCE_COLOR + MYPY_FORCE_COLOR + NO_COLOR + PIP_DISABLE_PIP_VERSION_CHECK + PIP_NO_PYTHON_VERSION_WARNING + PIP_NO_WARN_SCRIPT_LOCATION + PRE_COMMIT_COLOR + PY_COLORS + PYTEST_THEME + PYTEST_THEME_MODE + PYTHONIOENCODING + PYTHONLEGACYWINDOWSSTDIO + PYTHONUTF8 + UPSTREAM_REPOSITORY_ID: >- + 836873755 + +run-name: >- + ${{ + github.event_name == 'workflow_dispatch' + && format('๐Ÿ“ฆ Releasing v{0}...', github.event.inputs.release-version) + || '' + }} + ${{ + github.event.pull_request.number && 'PR' || '' + }}${{ + !github.event.pull_request.number && 'Commit' || '' + }} + ${{ github.event.pull_request.number || github.sha }} + triggered by: ${{ github.event_name }} of ${{ + github.ref + }} ${{ + github.ref_type + }} + (workflow run ID: ${{ + github.run_id + }}; number: ${{ + github.run_number + }}; attempt: ${{ + github.run_attempt + }}) + +jobs: + pre-setup: + name: โš™๏ธ Pre-set global build settings + + runs-on: ubuntu-latest + + timeout-minutes: 1 + + defaults: + run: + shell: python + outputs: + # NOTE: These aren't env vars because the `${{ env }}` context is + # NOTE: inaccessible when passing inputs to reusable workflows. + dists-artifact-name: python-package-distributions + dist-version: >- + ${{ + steps.request-check.outputs.release-requested == 'true' + && github.event.inputs.release-version + || steps.scm-version.outputs.dist-version + }} + is-untagged-devel: >- + ${{ steps.untagged-check.outputs.is-untagged-devel || false }} + release-requested: >- + ${{ + steps.request-check.outputs.release-requested || false + }} + is-yolo-mode: >- + ${{ + ( + steps.request-check.outputs.release-requested == 'true' + && github.event.inputs.YOLO + ) + && true || false + }} + cache-key-files: >- + ${{ steps.calc-cache-key-files.outputs.files-hash-key }} + git-tag: ${{ steps.git-tag.outputs.tag }} + sdist-artifact-name: ${{ steps.artifact-name.outputs.sdist }} + wheel-artifact-name: ${{ steps.artifact-name.outputs.wheel }} + upstream-repository-id: ${{ env.UPSTREAM_REPOSITORY_ID }} + publishing-to-testpypi-enabled: ${{ env.PUBLISHING_TO_TESTPYPI_ENABLED }} + steps: + - name: Switch to using Python 3.11 by default + uses: actions/setup-python@v5 + with: + python-version: 3.11 + - name: >- + Mark the build as untagged '${{ + github.event.repository.default_branch + }}' branch build + id: untagged-check + if: >- + github.event_name == 'push' && + github.ref == format( + 'refs/heads/{0}', github.event.repository.default_branch + ) + run: | + from os import environ + from pathlib import Path + + FILE_APPEND_MODE = 'a' + + with Path(environ['GITHUB_OUTPUT']).open( + mode=FILE_APPEND_MODE, + ) as outputs_file: + print('is-untagged-devel=true', file=outputs_file) + - name: Mark the build as "release request" + id: request-check + if: github.event_name == 'workflow_dispatch' + run: | + from os import environ + from pathlib import Path + + FILE_APPEND_MODE = 'a' + + with Path(environ['GITHUB_OUTPUT']).open( + mode=FILE_APPEND_MODE, + ) as outputs_file: + print('release-requested=true', file=outputs_file) + - name: Check out src from Git + if: >- + steps.request-check.outputs.release-requested != 'true' + uses: actions/checkout@v4 + with: + fetch-depth: >- + ${{ + steps.request-check.outputs.release-requested == 'true' + && 1 || 0 + }} + ref: ${{ github.event.inputs.release-committish }} + - name: >- + Calculate Python interpreter version hash value + for use in the cache key + if: >- + steps.request-check.outputs.release-requested != 'true' + id: calc-cache-key-py + run: | + from hashlib import sha512 + from os import environ + from pathlib import Path + from sys import version + + FILE_APPEND_MODE = 'a' + + hash = sha512(version.encode()).hexdigest() + + with Path(environ['GITHUB_OUTPUT']).open( + mode=FILE_APPEND_MODE, + ) as outputs_file: + print(f'py-hash-key={hash}', file=outputs_file) + - name: >- + Calculate dependency files' combined hash value + for use in the cache key + if: >- + steps.request-check.outputs.release-requested != 'true' + id: calc-cache-key-files + run: | + from os import environ + from pathlib import Path + + FILE_APPEND_MODE = 'a' + + with Path(environ['GITHUB_OUTPUT']).open( + mode=FILE_APPEND_MODE, + ) as outputs_file: + print( + "files-hash-key=${{ + hashFiles( + 'tox.ini', 'pyproject.toml', + '.pre-commit-config.yaml', 'pytest.ini', + 'docs/requirements.*', + 'requirements/.*', + 'requirements-build.*' + ) + }}", + file=outputs_file, + ) + - name: Get pip cache dir + id: pip-cache-dir + if: >- + steps.request-check.outputs.release-requested != 'true' + run: >- + echo "dir=$(python -m pip cache dir)" >> "${GITHUB_OUTPUT}" + shell: bash + - name: Set up pip cache + if: >- + steps.request-check.outputs.release-requested != 'true' + uses: actions/cache@v4 + with: + path: ${{ steps.pip-cache-dir.outputs.dir }} + key: >- + ${{ runner.os }}-pip-${{ + steps.calc-cache-key-py.outputs.py-hash-key }}-${{ + steps.calc-cache-key-files.outputs.files-hash-key }} + restore-keys: | + ${{ runner.os }}-pip-${{ + steps.calc-cache-key-py.outputs.py-hash-key + }}- + ${{ runner.os }}-pip- + ${{ runner.os }}- + - name: Drop Git tags from HEAD for non-release requests + if: >- + steps.request-check.outputs.release-requested != 'true' + run: >- + git tag --points-at HEAD + | + xargs git tag --delete + shell: bash + - name: Set up versioning prerequisites + if: >- + steps.request-check.outputs.release-requested != 'true' + run: >- + python -m + pip install + --user + setuptools-scm + shell: bash + - name: Set the current dist version from Git + if: steps.request-check.outputs.release-requested != 'true' + id: scm-version + run: | + from os import environ + from pathlib import Path + + import setuptools_scm + + FILE_APPEND_MODE = 'a' + + ver = setuptools_scm.get_version( + ${{ + steps.untagged-check.outputs.is-untagged-devel == 'true' + && 'local_scheme="no-local-version"' || '' + }} + ) + with Path(environ['GITHUB_OUTPUT']).open( + mode=FILE_APPEND_MODE, + ) as outputs_file: + print(f'dist-version={ver}', file=outputs_file) + print( + f'dist-version-for-filenames={ver.replace("+", "-")}', + file=outputs_file, + ) + - name: Set the target Git tag + id: git-tag + run: | + from os import environ + from pathlib import Path + + FILE_APPEND_MODE = 'a' + + with Path(environ['GITHUB_OUTPUT']).open( + mode=FILE_APPEND_MODE, + ) as outputs_file: + print( + "tag=v${{ + steps.request-check.outputs.release-requested == 'true' + && github.event.inputs.release-version + || steps.scm-version.outputs.dist-version + }}", + file=outputs_file, + ) + - name: Set the expected dist artifact names + id: artifact-name + run: | + from os import environ + from pathlib import Path + + FILE_APPEND_MODE = 'a' + + dist_file_prj_base_name = '${{ env.PROJECT_NAME }}'.replace('-', '_') + with Path(environ['GITHUB_OUTPUT']).open( + mode=FILE_APPEND_MODE, + ) as outputs_file: + print( + f"sdist={dist_file_prj_base_name !s}-${{ + steps.request-check.outputs.release-requested == 'true' + && github.event.inputs.release-version + || steps.scm-version.outputs.dist-version + }}.tar.gz", + file=outputs_file, + ) + print( + f"wheel={dist_file_prj_base_name !s}-${{ + steps.request-check.outputs.release-requested == 'true' + && github.event.inputs.release-version + || steps.scm-version.outputs.dist-version + }}-py3-none-any.whl", + file=outputs_file, + ) + + build: + name: >- + ๐Ÿ“ฆ ${{ needs.pre-setup.outputs.git-tag }} + [mode: ${{ + fromJSON(needs.pre-setup.outputs.is-untagged-devel) + && 'test' || '' + }}${{ + fromJSON(needs.pre-setup.outputs.release-requested) + && 'release' || '' + }}${{ + ( + !fromJSON(needs.pre-setup.outputs.is-untagged-devel) + && !fromJSON(needs.pre-setup.outputs.release-requested) + ) && 'nightly' || '' + }}] + needs: + - pre-setup + + runs-on: ubuntu-latest + + timeout-minutes: 2 + + env: + TOXENV: cleanup-dists,build-dists + + steps: + - name: Switch to using Python 3.11 + uses: actions/setup-python@v5 + with: + python-version: 3.11 + + - name: Grab the source from Git + uses: actions/checkout@v4 + with: + fetch-depth: >- + ${{ + fromJSON(needs.pre-setup.outputs.release-requested) + && 1 || 0 + }} + ref: ${{ github.event.inputs.release-committish }} + + - name: >- + Calculate Python interpreter version hash value + for use in the cache key + id: calc-cache-key-py + run: | + from hashlib import sha512 + from os import environ + from pathlib import Path + from sys import version + + FILE_APPEND_MODE = 'a' + + hash = sha512(version.encode()).hexdigest() + + with Path(environ['GITHUB_OUTPUT']).open( + mode=FILE_APPEND_MODE, + ) as outputs_file: + print(f'py-hash-key={hash}', file=outputs_file) + shell: python + - name: Get pip cache dir + id: pip-cache-dir + run: >- + echo "dir=$(python -m pip cache dir)" >> "${GITHUB_OUTPUT}" + - name: Set up pip cache + uses: actions/cache@v4 + with: + path: ${{ steps.pip-cache-dir.outputs.dir }} + key: >- + ${{ runner.os }}-pip-${{ + steps.calc-cache-key-py.outputs.py-hash-key }}-${{ + needs.pre-setup.outputs.cache-key-files }} + restore-keys: | + ${{ runner.os }}-pip-${{ + steps.calc-cache-key-py.outputs.py-hash-key + }}- + ${{ runner.os }}-pip- + + - name: Identify tox's own lock file + id: tox-deps + run: > + LOCK_FILE_PATH="requirements/$( + python bin/print_lockfile_base_name.py tox + ).txt" + + + echo lock-file="$( + ls -1 "${LOCK_FILE_PATH}" + || >&2 echo "${LOCK_FILE_PATH}" not found, not injecting... + )" + >> "${GITHUB_OUTPUT}" + shell: bash # windows compat + + - name: Install tox + run: >- + python -Im pip install -r requirements/tox-tox.in + ${{ + steps.tox-deps.outputs.lock-file + && format('--constraint={0}', steps.tox-deps.outputs.lock-file) + || '' + }} + shell: bash # windows compat + + - name: Pre-populate the tox env + run: >- + python -m + tox + --parallel auto + --parallel-live + --skip-missing-interpreters false + --notest + + - name: Drop Git tags from HEAD for non-tag-create events + if: >- + !fromJSON(needs.pre-setup.outputs.release-requested) + run: >- + git tag --points-at HEAD + | + xargs git tag --delete + shell: bash + + - name: Setup git user as [bot] + if: >- + fromJSON(needs.pre-setup.outputs.release-requested) + || fromJSON(needs.pre-setup.outputs.is-untagged-devel) + uses: fregante/setup-git-user@v2 + - name: >- + Tag the release in the local Git repo + as ${{ needs.pre-setup.outputs.git-tag }} + for setuptools-scm to set the desired version + if: >- + fromJSON(needs.pre-setup.outputs.release-requested) + run: >- + git tag + -m '${{ needs.pre-setup.outputs.git-tag }}' + '${{ needs.pre-setup.outputs.git-tag }}' + -- + ${{ + fromJSON(needs.pre-setup.outputs.release-requested) + && github.event.inputs.release-committish || '' + }} + + - name: Install tomlkit Python distribution package + if: >- + fromJSON(needs.pre-setup.outputs.is-untagged-devel) + run: >- + python -m pip install --user tomlkit + - name: Instruct setuptools-scm not to add a local version part + if: >- + fromJSON(needs.pre-setup.outputs.is-untagged-devel) + run: | + from pathlib import Path + + import tomlkit + + pyproject_toml_path = Path.cwd() / 'pyproject.toml' + pyproject_toml_txt = pyproject_toml_path.read_text() + pyproject_toml = tomlkit.loads(pyproject_toml_txt) + setuptools_scm_section = pyproject_toml['tool']['setuptools_scm'] + setuptools_scm_section['local_scheme'] = 'no-local-version' + patched_pyproject_toml_txt = tomlkit.dumps(pyproject_toml) + pyproject_toml_path.write_text(patched_pyproject_toml_txt) + shell: python + - name: Pretend that pyproject.toml is unchanged + if: >- + fromJSON(needs.pre-setup.outputs.is-untagged-devel) + run: | + git diff --color=always + git update-index --assume-unchanged pyproject.toml + + - name: Build dists + run: >- + python -m + tox + --parallel auto + --parallel-live + --skip-missing-interpreters false + --skip-pkg-install + - name: Verify that the artifacts with expected names got created + run: >- + ls -1 + 'dist/${{ needs.pre-setup.outputs.sdist-artifact-name }}' + 'dist/${{ needs.pre-setup.outputs.wheel-artifact-name }}' + - name: Store the distribution packages + uses: actions/upload-artifact@v4 + with: + name: >- + ${{ needs.pre-setup.outputs.dists-artifact-name }} + # NOTE: Exact expected file names are specified here + # NOTE: as a safety measure โ€” if anything weird ends + # NOTE: up being in this dir or not all dists will be + # NOTE: produced, this will fail the workflow. + path: | + dist/${{ needs.pre-setup.outputs.sdist-artifact-name }} + dist/${{ needs.pre-setup.outputs.wheel-artifact-name }} + retention-days: >- + ${{ + ( + fromJSON(needs.pre-setup.outputs.release-requested) + ) && 90 || 30 + }} + + lint: + name: ๐Ÿงน Linters${{ '' }} # nest jobs under the same sidebar category + needs: + - build + - pre-setup # transitive, for accessing settings + strategy: + matrix: + runner-vm-os: + - ubuntu-latest + python-version: + - 3.11 + toxenv: + - pre-commit + - metadata-validation + - build-docs + - coverage-docs + - doctest-docs + - linkcheck-docs + - spellcheck-docs + fail-fast: false + uses: ./.github/workflows/reusable-tox.yml + with: + cache-key-files: >- + ${{ needs.pre-setup.outputs.cache-key-files }} + dists-artifact-name: >- + ${{ needs.pre-setup.outputs.dists-artifact-name }} + python-version: >- + ${{ matrix.python-version }} + release-requested: >- + ${{ needs.pre-setup.outputs.release-requested }} + runner-vm-os: >- + ${{ matrix.runner-vm-os }} + source-tarball-name: >- + ${{ needs.pre-setup.outputs.sdist-artifact-name }} + timeout-minutes: 3 + toxenv: >- + ${{ matrix.toxenv }} + upstream-repository-id: >- + ${{ needs.pre-setup.outputs.upstream-repository-id }} + yolo: >- + ${{ fromJSON(needs.pre-setup.outputs.is-yolo-mode) }} + secrets: + codecov-token: ${{ secrets.CODECOV_TOKEN }} + + tests: + name: ๐Ÿงช Tests${{ '' }} # nest jobs under the same sidebar category + needs: + - build + - pre-setup # transitive, for accessing settings + strategy: + matrix: + python-version: + # NOTE: The latest and the lowest supported Pythons are prioritized + # NOTE: to improve the responsiveness. It's nice to see the most + # NOTE: important results first. + - 3.12 + - 3.11 + - ~3.13.0-0 + runner-vm-os: + - ubuntu-24.04 + - macos-14 + - macos-13 + toxenv: + - py + + uses: ./.github/workflows/reusable-tox.yml + with: + built-wheel-names: >- + ${{ needs.pre-setup.outputs.wheel-artifact-name }} + cache-key-files: >- + ${{ needs.pre-setup.outputs.cache-key-files }} + dists-artifact-name: >- + ${{ needs.pre-setup.outputs.dists-artifact-name }} + python-version: >- + ${{ matrix.python-version }} + release-requested: >- + ${{ needs.pre-setup.outputs.release-requested }} + runner-vm-os: >- + ${{ matrix.runner-vm-os }} + source-tarball-name: >- + ${{ needs.pre-setup.outputs.sdist-artifact-name }} + timeout-minutes: 5 + toxenv: >- + ${{ matrix.toxenv }} + tox-run-posargs: >- + --cov-report=xml:.tox/.tmp/.test-results/pytest-${{ + matrix.python-version + }}/cobertura.xml + --junitxml=.tox/.tmp/.test-results/pytest-${{ + matrix.python-version + }}/test.xml + tox-rerun-posargs: >- + --no-cov + -vvvvv + --lf + upstream-repository-id: >- + ${{ needs.pre-setup.outputs.upstream-repository-id }} + yolo: >- + ${{ fromJSON(needs.pre-setup.outputs.is-yolo-mode) }} + secrets: + codecov-token: ${{ secrets.CODECOV_TOKEN }} + + check: # This job does nothing and is only used for the branch protection + if: always() + + needs: + - lint + - pre-setup # transitive, for accessing settings + - tests + + runs-on: ubuntu-latest + + timeout-minutes: 1 + + steps: + - name: Decide whether the needed jobs succeeded or failed + uses: re-actors/alls-green@release/v1 + with: + allowed-failures: >- + ${{ + fromJSON(needs.pre-setup.outputs.is-yolo-mode) + && 'lint, tests' + || '' + }} + jobs: ${{ toJSON(needs) }} + + publish-pypi: + name: Publish ๐Ÿ๐Ÿ“ฆ ${{ needs.pre-setup.outputs.git-tag }} to PyPI + needs: + - check + - pre-setup # transitive, for accessing settings + if: >- + always() + && needs.check.result == 'success' + && fromJSON(needs.pre-setup.outputs.release-requested) + && needs.pre-setup.outputs.upstream-repository-id == github.repository_id + + runs-on: ubuntu-latest + + timeout-minutes: 1 + + environment: + name: pypi + url: >- + https://pypi.org/project/${{ env.PROJECT_NAME }}/${{ + needs.pre-setup.outputs.dist-version + }} + + permissions: + contents: read # This job doesn't need to `git push` anything + id-token: write # PyPI Trusted Publishing (OIDC) + + steps: + - name: Download all the dists + uses: actions/download-artifact@v4 + with: + name: >- + ${{ needs.pre-setup.outputs.dists-artifact-name }} + path: dist/ + - name: >- + Publish ๐Ÿ๐Ÿ“ฆ ${{ needs.pre-setup.outputs.git-tag }} to PyPI + uses: pypa/gh-action-pypi-publish@release/v1 + + publish-testpypi: + name: Publish ๐Ÿ๐Ÿ“ฆ ${{ needs.pre-setup.outputs.git-tag }} to TestPyPI + needs: + - check + - pre-setup # transitive, for accessing settings + if: >- + always() + && needs.check.result == 'success' + && ( + fromJSON(needs.pre-setup.outputs.is-untagged-devel) + || fromJSON(needs.pre-setup.outputs.release-requested) + ) + && needs.pre-setup.outputs.upstream-repository-id == github.repository_id + && fromJSON(needs.pre-setup.outputs.publishing-to-testpypi-enabled) + + runs-on: ubuntu-latest + + timeout-minutes: 1 + + environment: + name: testpypi + url: >- + https://test.pypi.org/project/${{ env.PROJECT_NAME }}/${{ + needs.pre-setup.outputs.dist-version + }} + + permissions: + contents: read # This job doesn't need to `git push` anything + id-token: write # PyPI Trusted Publishing (OIDC) + + steps: + - name: Download all the dists + uses: actions/download-artifact@v4 + with: + name: >- + ${{ needs.pre-setup.outputs.dists-artifact-name }} + path: dist/ + - name: >- + Publish ๐Ÿ๐Ÿ“ฆ ${{ needs.pre-setup.outputs.git-tag }} to TestPyPI + uses: pypa/gh-action-pypi-publish@release/v1 + with: + repository-url: https://test.pypi.org/legacy/ + +... diff --git a/.github/workflows/pip-tools.yml b/.github/workflows/pip-tools.yml new file mode 100644 index 0000000000..93cb6a5a1f --- /dev/null +++ b/.github/workflows/pip-tools.yml @@ -0,0 +1,398 @@ +--- + +name: ๐Ÿ”’ pip-tools + +on: + workflow_dispatch: + inputs: + package-distribuition: + # github.event_name == 'workflow_dispatch' + # && github.event.inputs.package-distribuition + description: >- + A target Python package distribution to upgrade + required: false + pull_request: + paths: + - .github/workflows/pip-tools.yml + schedule: + - cron: 1 0 * * * # Run daily at 0:01 UTC + +env: + GIT_BRANCH: >- + maintenance/pip-tools-constraint-lockfiles${{ + ( + github.event_name == 'workflow_dispatch' + && github.event.inputs.package-distribuition + ) + && format('-updating-{0}', github.event.inputs.package-distribuition) + || '' + }} + PIP_DISABLE_PIP_VERSION_CHECK: 1 + PIP_NO_PYTHON_VERSION_WARNING: 1 + PIP_NO_WARN_SCRIPT_LOCATION: 1 + PY_COLORS: 1 + +concurrency: + group: >- + ${{ + github.workflow + }}-${{ + github.event.inputs.package-distribuition + || github.event.pull_request.number + || github.sha + }} + cancel-in-progress: true + +run-name: >- + โฎธ + Bumping + ${{ + ( + github.event_name == 'workflow_dispatch' + && github.event.inputs.package-distribuition + ) + && format('`{0}`', github.event.inputs.package-distribuition) + || 'everything' + }} + in all the lock files + ๐Ÿ”’ + +jobs: + + deps: + name: >- + โ›“${{ matrix.lock-file-env }}: + ๐Ÿ${{ + matrix.python-version + }}@${{ + matrix.os + }} + runs-on: ${{ matrix.os }} + + timeout-minutes: 3 + + strategy: + matrix: + python-version: + # NOTE: The latest and the lowest supported Pythons are prioritized + # NOTE: to improve the responsiveness. It's nice to see the most + # NOTE: important results first. + - 3.12 + - 3.11 + - ~3.13.0-0 + os: + - ubuntu-24.04 + - macos-14 + - macos-13 + lock-file-env: + - build-dists + - build-docs + - linkcheck-docs + - metadata-validation + - pip-compile + - pip-compile-build-lock + - pip-compile-tox-env-lock + - pre-commit + - py + - tox + - spellcheck-docs + lock-file-extra-input: + - pyproject.toml + - '' + exclude: + - lock-file-env: build-dists + lock-file-extra-input: pyproject.toml + - lock-file-env: build-docs + lock-file-extra-input: '' + - lock-file-env: linkcheck-docs + lock-file-extra-input: '' + - lock-file-env: metadata-validation + lock-file-extra-input: pyproject.toml + - lock-file-env: pip-compile + lock-file-extra-input: pyproject.toml + - lock-file-env: pip-compile-build-lock + lock-file-extra-input: pyproject.toml + - lock-file-env: pip-compile-tox-env-lock + lock-file-extra-input: pyproject.toml + - lock-file-env: pre-commit + lock-file-extra-input: pyproject.toml + - lock-file-env: py + lock-file-extra-input: '' + - lock-file-env: tox + lock-file-extra-input: pyproject.toml + - lock-file-env: spellcheck-docs + lock-file-extra-input: '' + + env: + TOXENV: pip-compile-tox-env-lock + + steps: + - name: Grab the source from Git + uses: actions/checkout@v4 # Keep before `setup-python` for cache to work + + - name: Set up Python ${{ matrix.python-version }} + uses: actions/setup-python@v5 + with: + cache: pip + cache-dependency-path: requirements/** + python-version: ${{ matrix.python-version }} + + - name: Identify tox's own lock file + id: tox-deps + run: > + LOCK_FILE_PATH="requirements/$( + python bin/print_lockfile_base_name.py tox + ).txt" + + + echo lock-file="$( + ls -1 "${LOCK_FILE_PATH}" + || >&2 echo "${LOCK_FILE_PATH}" not found, not injecting... + )" + >> "${GITHUB_OUTPUT}" + shell: bash # windows compat + + - name: Install tox + run: >- + python -Im pip install -r requirements/tox-tox.in + ${{ + steps.tox-deps.outputs.lock-file + && format('--constraint={0}', steps.tox-deps.outputs.lock-file) + || '' + }} + shell: bash # windows compat + + - name: Pre-populate the tox env + run: python -Im tox --skip-missing-interpreters false --notest + + - name: Setup git user as [bot] + # Refs: + # * https://github.community/t/github-actions-bot-email-address/17204/6 + # * https://github.com/actions/checkout/issues/13#issuecomment-724415212 + uses: fregante/setup-git-user@v2.0.1 + + - name: Generate constraints files + run: >- + python -Im tox r + -- + ${{ matrix.lock-file-env }} ${{ matrix.lock-file-extra-input }} + ${{ + ( + github.event_name == 'workflow_dispatch' + && github.event.inputs.package-distribuition + ) + && format( + '--upgrade-package="{0}"', + github.event.inputs.package-distribuition + ) + || '--upgrade' + }} + + - name: Commit version bumps to Git + id: constraints + run: | + LOCK_BASE_NAME=$(python bin/print_lockfile_base_name.py ${{ + matrix.lock-file-env + }}) + + git add "requirements/${LOCK_BASE_NAME}.txt" + + git commit "requirements/${LOCK_BASE_NAME}.txt" \ + -m "Update ${LOCK_BASE_NAME} constraints${{ + ( + github.event_name == 'workflow_dispatch' + && github.event.inputs.package-distribuition + ) + && format(' for {0}', github.event.inputs.package-distribuition) + || '' + }}" \ + && { + echo "patch=${{ + runner.temp + }}/patches/0001-Update-${LOCK_BASE_NAME}-constraints.patch" \ + >> "${GITHUB_OUTPUT}" + } \ + || : + shell: bash # windows compat + + - name: Log the patch + if: steps.constraints.outputs.patch + run: git show --color + - name: Create a temporary patch directory + if: steps.constraints.outputs.patch + run: mkdir -pv '${{ runner.temp }}/patches' + shell: bash # windows compat + - name: Create a patch from the last Git commit + if: steps.constraints.outputs.patch + run: >- + git format-patch + --output='${{ steps.constraints.outputs.patch }}' + -1 + HEAD + - name: Make a GHA artifact suffix + if: steps.constraints.outputs.patch + id: random + run: >- + echo uuid=$(python -c 'import uuid; print(uuid.uuid4())') + >> "${GITHUB_OUTPUT}" + shell: bash # windows compat + - name: Save the package bump patch as a GHA artifact + if: steps.constraints.outputs.patch + uses: actions/upload-artifact@v4 + with: + name: pip-constraints-git-patches--${{ steps.random.outputs.uuid }} + path: ${{ steps.constraints.outputs.patch }} + + check: # This job does nothing and is only used for the branch protection + if: always() + + needs: + - deps + + runs-on: ubuntu-latest + + timeout-minutes: 1 + + steps: + - name: Decide whether the needed jobs succeeded or failed + uses: re-actors/alls-green@release/v1 + with: + jobs: ${{ toJSON(needs) }} + + publish-pr: + name: Open/refresh a PR + if: github.event_name != 'pull_request' + + needs: + - check + + runs-on: Ubuntu-latest + + timeout-minutes: 1 + + environment: + name: pip-tools + url: ${{ steps.pr.outputs.pull_request_url }} + + permissions: + contents: write + pull-requests: write + + steps: + - name: Download all the dists + id: artifacts-download + continue-on-error: true # and judge whether there's updates later + uses: actions/download-artifact@v4 + with: + merge-multiple: true + path: ${{ runner.temp }}/patches/ + pattern: pip-constraints-git-patches--* + - name: >- + Determine whether any change suggestions to lockfiles + have been produced + if: steps.artifacts-download.outcome == 'success' + id: artifacts + run: >- + echo "lockfile-updates-needed=true" >> "${GITHUB_OUTPUT}" + - name: Grab the source from Git + if: steps.artifacts.outputs.lockfile-updates-needed + uses: actions/checkout@v4 + - name: Setup git user as [bot] + if: steps.artifacts.outputs.lockfile-updates-needed + # Refs: + # * https://github.community/t/github-actions-bot-email-address/17204/6 + # * https://github.com/actions/checkout/issues/13#issuecomment-724415212 + uses: fregante/setup-git-user@v2.0.1 + + - name: Figure out if the pre-existing remote branch exists + if: steps.artifacts.outputs.lockfile-updates-needed + id: pre-existing-remote-branch + run: >- + echo "info=$( + git ls-remote origin "${GIT_BRANCH}" + )" >> "${GITHUB_OUTPUT}" + - name: Fetch the existing remote PR branch + if: steps.pre-existing-remote-branch.outputs.info + run: git fetch origin "${GIT_BRANCH}" + - name: Switch to the PR branch + if: steps.artifacts.outputs.lockfile-updates-needed + run: git checkout -B "${GIT_BRANCH}" + + - name: List Git patches + if: steps.artifacts.outputs.lockfile-updates-needed + run: ls -alh '${{ runner.temp }}/patches/' + - name: Apply patches to the Git repo + if: steps.artifacts.outputs.lockfile-updates-needed + run: git am '${{ runner.temp }}/patches'/*.patch + - name: Force-push the PR branch to remote + if: steps.artifacts.outputs.lockfile-updates-needed + run: git push origin "HEAD:${GIT_BRANCH}" --force-with-lease + + - name: Create a PR + if: >- + !steps.pre-existing-remote-branch.outputs.info + && steps.artifacts.outputs.lockfile-updates-needed + id: new-pr + uses: vsoch/pull-request-action@1.1.1 + env: + BRANCH_PREFIX: '' + GITHUB_TOKEN: ${{ github.token }} + PULL_REQUEST_BODY: >- + Automated pip-tools-managed pip constraint lockfiles update. + PULL_REQUEST_BRANCH: ${{ github.event.repository.default_branch }} + PULL_REQUEST_DRAFT: true + PULL_REQUEST_FROM_BRANCH: ${{ env.GIT_BRANCH }} + PULL_REQUEST_TITLE: >- + โ›“๐Ÿ”’ Bump transitive deps in pip-tools-managed lockfiles${{ + ( + github.event_name == 'workflow_dispatch' + && github.event.inputs.package-distribuition + ) + && format(' for {0}', github.event.inputs.package-distribuition) + || '' + }} + - name: Retrieve the existing PR URL + if: steps.pre-existing-remote-branch.outputs.info + id: existing-pr + env: + GITHUB_TOKEN: ${{ github.token }} + run: > + echo -n pull_request_url= + >> "${GITHUB_OUTPUT}" + + + gh pr view + --json 'url' + --jq '.url' + --repo '${{ github.repository }}' + '${{ env.GIT_BRANCH }}' + >> "${GITHUB_OUTPUT}" + - name: Select the actual PR URL + id: pr + env: + GITHUB_TOKEN: ${{ github.token }} + run: > + echo -n pull_request_url= + >> "${GITHUB_OUTPUT}" + + + echo '${{ + steps.new-pr.outputs.pull_request_url + || steps.existing-pr.outputs.pull_request_url + }}' + >> "${GITHUB_OUTPUT}" + - name: Log the pull request details + run: >- + echo 'PR URL: ${{ steps.pr.outputs.pull_request_url }}' + | tee -a "${GITHUB_STEP_SUMMARY}" + + - name: Instruct the maintainers to trigger CI by undrafting the PR + env: + GITHUB_TOKEN: ${{ github.token }} + run: >- + gh pr comment + --body 'Please mark the PR as ready for review to trigger PR checks.' + --repo '${{ github.repository }}' + '${{ steps.pr.outputs.pull_request_url }}' + +... diff --git a/.github/workflows/reusable-tox.yml b/.github/workflows/reusable-tox.yml new file mode 100644 index 0000000000..d82783495e --- /dev/null +++ b/.github/workflows/reusable-tox.yml @@ -0,0 +1,388 @@ +--- + +name: >- + โŒ + [DO NOT CLICK] + Reusable Tox + +on: + workflow_call: + inputs: + cache-key-files: + description: Dependency files cache + required: true + type: string + built-wheel-names: + description: >- + A glob for the built distributions in the artifact + to test (is installed into tox env if passed) + required: false + type: string + dists-artifact-name: + description: Workflow artifact name containing dists + required: true + type: string + python-version: + description: Python version to provision in the VM + required: true + type: string + release-requested: + description: Flag whether this is CI run is a release request + default: 'false' + required: false + type: string + runner-vm-os: + description: VM OS to use + default: ubuntu + required: false + type: string + source-tarball-name: + description: Sdist filename wildcard + required: true + type: string + timeout-minutes: + description: Deadline for the job to complete + required: true + type: string + toxenv: + description: Name of the tox environment to use + required: true + type: string + tox-run-posargs: + description: Positional arguments to pass to the regular tox run + required: false + type: string + tox-rerun-posargs: + description: Positional arguments to pass to the re-attempted tox run + required: false + type: string + upstream-repository-id: + description: ID of the upstream GitHub Repository + required: true + type: string + yolo: + required: true + type: string + secrets: + codecov-token: + description: Mandatory token for uploading to Codecov + required: true + +env: + COLOR: >- # Supposedly, pytest or coveragepy use this + yes + FORCE_COLOR: 1 # Request colored output from CLI tools supporting it + MYPY_FORCE_COLOR: 1 # MyPy's color enforcement + PIP_DISABLE_PIP_VERSION_CHECK: 1 + PIP_NO_PYTHON_VERSION_WARNING: 1 + PIP_NO_WARN_SCRIPT_LOCATION: 1 + PRE_COMMIT_COLOR: always + PY_COLORS: 1 # Recognized by the `py` package, dependency of `pytest` + PYTHONIOENCODING: utf-8 + PYTHONUTF8: 1 + TOX_PARALLEL_NO_SPINNER: 1 + TOX_TESTENV_PASSENV: >- # Make tox-wrapped tools see color requests + COLOR + FORCE_COLOR + MYPY_FORCE_COLOR + NO_COLOR + PIP_DISABLE_PIP_VERSION_CHECK + PIP_NO_PYTHON_VERSION_WARNING + PIP_NO_WARN_SCRIPT_LOCATION + PRE_COMMIT_COLOR + PY_COLORS + PYTEST_THEME + PYTEST_THEME_MODE + PYTHONIOENCODING + PYTHONLEGACYWINDOWSSTDIO + PYTHONUTF8 + +jobs: + tox: + name: >- + ${{ + inputs.toxenv + }}@๐Ÿ${{ + inputs.python-version + }}@${{ + inputs.runner-vm-os + }} + + runs-on: ${{ inputs.runner-vm-os }} + + timeout-minutes: ${{ fromJSON(inputs.timeout-minutes) }} + + continue-on-error: >- + ${{ + ( + fromJSON(inputs.yolo) || + ( + startsWith(inputs.python-version, '~') + ) || + contains(inputs.python-version, 'alpha') + ) && true || false + }} + + env: + TOXENV: ${{ inputs.toxenv }} + + steps: + - name: >- + Switch to using Python v${{ inputs.python-version }} + by default + id: python-install + uses: actions/setup-python@v5 + with: + python-version: ${{ inputs.python-version }} + + # NOTE: `pre-commit --show-diff-on-failure` and `sphinxcontrib-spellcheck` + # NOTE: with Git authors allowlist enabled both depend on the presence of a + # NOTE: Git repository. + - name: Grab the source from Git + if: >- + contains(fromJSON('["pre-commit", "spellcheck-docs"]'), inputs.toxenv) + uses: actions/checkout@v4 + with: + ref: ${{ github.event.inputs.release-committish }} + - name: Retrieve the project source from an sdist inside the GHA artifact + if: >- + !contains(fromJSON('["pre-commit", "spellcheck-docs"]'), inputs.toxenv) + uses: re-actors/checkout-python-sdist@release/v2 + with: + source-tarball-name: ${{ inputs.source-tarball-name }} + workflow-artifact-name: ${{ inputs.dists-artifact-name }} + + - name: Cache pre-commit.com virtualenvs + if: inputs.toxenv == 'pre-commit' + uses: actions/cache@v4 + with: + path: ~/.cache/pre-commit + key: >- + ${{ + runner.os + }}-pre-commit-${{ + hashFiles('.pre-commit-config.yaml') + }} + + - name: Figure out if the interpreter ABI is stable + id: py-abi + run: | + from os import environ + from pathlib import Path + from sys import version_info + + FILE_APPEND_MODE = 'a' + + is_stable_abi = version_info.releaselevel == 'final' + + with Path(environ['GITHUB_OUTPUT']).open( + mode=FILE_APPEND_MODE, + ) as outputs_file: + print( + 'is-stable-abi={is_stable_abi}'. + format(is_stable_abi=str(is_stable_abi).lower()), + file=outputs_file, + ) + shell: python + - name: >- + Calculate Python interpreter version hash value + for use in the cache key + if: fromJSON(steps.py-abi.outputs.is-stable-abi) + id: calc-cache-key-py + run: | + from hashlib import sha512 + from os import environ + from pathlib import Path + from sys import version + + FILE_APPEND_MODE = 'a' + + hash = sha512(version.encode()).hexdigest() + + with Path(environ['GITHUB_OUTPUT']).open( + mode=FILE_APPEND_MODE, + ) as outputs_file: + print(f'py-hash-key={hash}', file=outputs_file) + shell: python + - name: Get pip cache dir + if: fromJSON(steps.py-abi.outputs.is-stable-abi) + id: pip-cache-dir + run: >- + echo "dir=$(python -Im pip cache dir)" >> "${GITHUB_OUTPUT}" + shell: bash + - name: Set up pip cache + if: fromJSON(steps.py-abi.outputs.is-stable-abi) + uses: actions/cache@v4 + with: + path: ${{ steps.pip-cache-dir.outputs.dir }} + key: >- + ${{ runner.os }}-pip-${{ + steps.calc-cache-key-py.outputs.py-hash-key }}-${{ + inputs.cache-key-files }} + restore-keys: | + ${{ runner.os }}-pip-${{ + steps.calc-cache-key-py.outputs.py-hash-key + }}- + ${{ runner.os }}-pip- + + - name: Identify tox's own lock file + id: tox-deps + run: > + LOCK_FILE_PATH="requirements/$( + python bin/print_lockfile_base_name.py tox + ).txt" + + + echo lock-file="$( + ls -1 "${LOCK_FILE_PATH}" + || >&2 echo "${LOCK_FILE_PATH}" not found, not injecting... + )" + >> "${GITHUB_OUTPUT}" + shell: bash # windows compat + + - name: Install tox + run: >- + python -Im pip install -r requirements/tox-tox.in + ${{ + steps.tox-deps.outputs.lock-file + && format('--constraint={0}', steps.tox-deps.outputs.lock-file) + || '' + }} + shell: bash # windows compat + + - name: Make the env clean of non-test files + if: inputs.toxenv == 'metadata-validation' + run: | + shopt -s extglob + rm -rf !tox.ini + shell: bash + - name: Download all the dists + if: >- + contains(fromJSON('["metadata-validation", "py"]'), inputs.toxenv) + uses: actions/download-artifact@v4 + with: + name: ${{ inputs.dists-artifact-name }} + path: dist/ + + - name: >- + Pre-populate tox envs: `${{ env.TOXENV }}` + run: >- + python -Im + tox + --parallel auto + --parallel-live + --skip-missing-interpreters false + ${{ + inputs.built-wheel-names != '' + && format('--installpkg dist/{0}', inputs.built-wheel-names) + || '' + }} + --notest + - name: Initialize pre-commit envs if needed + if: inputs.toxenv == 'pre-commit' + run: >- + .tox/${{ inputs.toxenv }}/bin/python -Im pre_commit install-hooks + - name: >- + Run tox envs: `${{ env.TOXENV }}` + id: tox-run + run: >- + python -Im + tox + --parallel auto + --parallel-live + --skip-missing-interpreters false + --skip-pkg-install + ${{ + inputs.tox-run-posargs != '' + && format('-- {0}', inputs.tox-run-posargs) + || '' + }} + - name: Produce markdown test summary from JUnit + if: >- + !cancelled() + && steps.tox-run.outputs.test-result-files != '' + uses: test-summary/action@v2.3 + with: + paths: >- + ${{ steps.tox-run.outputs.test-result-files }} + - name: Produce markdown test summary from Cobertura XML + # NOTE: MyPy is temporarily excluded because it produces incomplete XML + # NOTE: files that `irongut/CodeCoverageSummary` can't stomach. + # Refs: + # * https://github.com/irongut/CodeCoverageSummary/issues/324 + # * https://github.com/python/mypy/issues/17689 + # FIXME: Revert the exclusion once upstream fixes the bug. + if: >- + !cancelled() + && runner.os == 'Linux' + && steps.tox-run.outputs.cov-report-files != '' + && steps.tox-run.outputs.test-result-files == '' + && steps.tox-run.outputs.codecov-flags != 'MyPy' + uses: irongut/CodeCoverageSummary@v1.3.0 + with: + badge: true + filename: >- + ${{ steps.tox-run.outputs.cov-report-files }} + format: markdown + output: both + # Ref: https://github.com/irongut/CodeCoverageSummary/issues/66 + - name: Append coverage results to Job Summary + if: >- + !cancelled() + && runner.os == 'Linux' + && steps.tox-run.outputs.cov-report-files != '' + && steps.tox-run.outputs.test-result-files == '' + && steps.tox-run.outputs.codecov-flags != 'MyPy' + run: >- + cat code-coverage-results.md >> "${GITHUB_STEP_SUMMARY}" + - name: Re-run the failing tests with maximum verbosity + if: >- + !cancelled() + && failure() + && inputs.tox-rerun-posargs != '' + run: >- # `exit 1` makes sure that the job remains red with flaky runs + python -Im + tox + --parallel auto + --parallel-live + --skip-missing-interpreters false + -vvvvv + --skip-pkg-install + -- + ${{ inputs.tox-rerun-posargs }} + && exit 1 + shell: bash + - name: Send coverage data to Codecov + if: >- + !cancelled() + && steps.tox-run.outputs.cov-report-files != '' + uses: codecov/codecov-action@v4 + with: + token: ${{ secrets.codecov-token }} + files: >- + ${{ steps.tox-run.outputs.cov-report-files }}, + flags: >- + CI-GHA, + ${{ steps.tox-run.outputs.codecov-flags }}, + OS-${{ + runner.os + }}, + VM-${{ + inputs.runner-vm-os + }}, + Py-${{ + steps.python-install.outputs.python-version + }} + fail_ci_if_error: >- + ${{ toJSON(inputs.upstream-repository-id == github.repository_id) }} + - name: Upload test results to Codecov + if: >- + !cancelled() + && steps.tox-run.outputs.test-result-files != '' + uses: codecov/test-results-action@v1 + with: + token: ${{ secrets.codecov-token }} + files: >- + ${{ steps.tox-run.outputs.test-result-files }} + +... diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000000..407d68c0aa --- /dev/null +++ b/.gitignore @@ -0,0 +1,330 @@ +# Created by https://www.toptal.com/developers/gitignore/api/python,ansibletower,dotenv,direnv,sonar,django,virtualenv,ansible,venv,pydev +# Edit at https://www.toptal.com/developers/gitignore?templates=python,ansibletower,dotenv,direnv,sonar,django,virtualenv,ansible,venv,pydev + +### Ansible ### +*.retry + +### AnsibleTower ### +# Ansible runtime and backups +*.original +*.tmp +*.bkp +*.*~ + +# Tower runtime roles +roles/** +!roles/requirements.yml + +# Avoid plain-text passwords +*pwd* +*pass* +*password* +*.txt + +# Exclude all binaries +*.bin +*.jar +*.tar +*.zip +*.gzip +*.tgz + + +### direnv ### +.direnv +.envrc + +### Django ### +*.log +*.pot +*.pyc +__pycache__/ +local_settings.py +db.sqlite3 +db.sqlite3-journal +media + +# If your build process includes running collectstatic, then you probably don't need or want to include staticfiles/ +# in your Git repository. Update and uncomment the following line accordingly. +# /staticfiles/ + +### Django.Python Stack ### +# Byte-compiled / optimized / DLL files +*.py[cod] +*$py.class + +# C extensions +*.so + +# Distribution / packaging +.Python +build/ +develop-eggs/ +dist/ +downloads/ +eggs/ +.eggs/ +lib/ +lib64/ +parts/ +sdist/ +var/ +wheels/ +share/python-wheels/ +*.egg-info/ +.installed.cfg +*.egg +MANIFEST + +# PyInstaller +# Usually these files are written by a python script from a template +# before PyInstaller builds the exe, so as to inject date/other infos into it. +*.manifest +*.spec + +# Installer logs +pip-log.txt +pip-delete-this-directory.txt + +# Unit test / coverage reports +htmlcov/ +.tox/ +.nox/ +.coverage +.coverage.* +.cache +nosetests.xml +coverage.xml +*.cover +*.py,cover +.hypothesis/ +.pytest_cache/ +cover/ + +# Translations +*.mo + +# Django stuff: + +# Flask stuff: +instance/ +.webassets-cache + +# Scrapy stuff: +.scrapy + +# Sphinx documentation +docs/_build/ + +# PyBuilder +.pybuilder/ +target/ + +# Jupyter Notebook +.ipynb_checkpoints + +# IPython +profile_default/ +ipython_config.py + +# pyenv +# For a library or package, you might want to ignore these files since the code is +# intended to run in multiple environments; otherwise, check them in: +# .python-version + +# pipenv +# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. +# However, in case of collaboration, if having platform-specific dependencies or dependencies +# having no cross-platform support, pipenv may install dependencies that don't work, or not +# install all needed dependencies. +#Pipfile.lock + +# poetry +# Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control. +# This is especially recommended for binary packages to ensure reproducibility, and is more +# commonly ignored for libraries. +# https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control +#poetry.lock + +# pdm +# Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control. +#pdm.lock +# pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it +# in version control. +# https://pdm.fming.dev/#use-with-ide +.pdm.toml + +# PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm +__pypackages__/ + +# Celery stuff +celerybeat-schedule +celerybeat.pid + +# SageMath parsed files +*.sage.py + +# Environments +.env +.venv +env/ +venv/ +ENV/ +env.bak/ +venv.bak/ + +# Spyder project settings +.spyderproject +.spyproject + +# Rope project settings +.ropeproject + +# mkdocs documentation +/site + +# mypy +.mypy_cache/ +.dmypy.json +dmypy.json + +# Pyre type checker +.pyre/ + +# pytype static type analyzer +.pytype/ + +# Cython debug symbols +cython_debug/ + +# PyCharm +# JetBrains specific template is maintained in a separate JetBrains.gitignore that can +# be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore +# and can be added to the global gitignore or merged into this file. For a more nuclear +# option (not recommended) you can uncomment the following to ignore the entire idea folder. +#.idea/ + +### dotenv ### + +### pydev ### +.pydevproject + +### Python ### +# Byte-compiled / optimized / DLL files + +# C extensions + +# Distribution / packaging + +# PyInstaller +# Usually these files are written by a python script from a template +# before PyInstaller builds the exe, so as to inject date/other infos into it. + +# Installer logs + +# Unit test / coverage reports + +# Translations + +# Django stuff: + +# Flask stuff: + +# Scrapy stuff: + +# Sphinx documentation + +# PyBuilder + +# Jupyter Notebook + +# IPython + +# pyenv +# For a library or package, you might want to ignore these files since the code is +# intended to run in multiple environments; otherwise, check them in: +# .python-version + +# pipenv +# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. +# However, in case of collaboration, if having platform-specific dependencies or dependencies +# having no cross-platform support, pipenv may install dependencies that don't work, or not +# install all needed dependencies. + +# poetry +# Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control. +# This is especially recommended for binary packages to ensure reproducibility, and is more +# commonly ignored for libraries. +# https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control + +# pdm +# Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control. +# pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it +# in version control. +# https://pdm.fming.dev/#use-with-ide + +# PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm + +# Celery stuff + +# SageMath parsed files + +# Environments + +# Spyder project settings + +# Rope project settings + +# mkdocs documentation + +# mypy + +# Pyre type checker + +# pytype static type analyzer + +# Cython debug symbols + +# PyCharm +# JetBrains specific template is maintained in a separate JetBrains.gitignore that can +# be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore +# and can be added to the global gitignore or merged into this file. For a more nuclear +# option (not recommended) you can uncomment the following to ignore the entire idea folder. + +### Python Patch ### +# Poetry local configuration file - https://python-poetry.org/docs/configuration/#local-configuration +poetry.toml + +# ruff +.ruff_cache/ + +# LSP config files +pyrightconfig.json + +### Sonar ### +#Sonar generated dir +/.sonar/ + +### venv ### +# Virtualenv +# http://iamzed.com/2009/05/07/a-primer-on-virtualenv/ +[Bb]in +[Ii]nclude +[Ll]ib +[Ll]ib64 +[Ll]ocal +[Ss]cripts +pyvenv.cfg +pip-selfcheck.json + +### VirtualEnv ### +# Virtualenv +# http://iamzed.com/2009/05/07/a-primer-on-virtualenv/ + +# End of https://www.toptal.com/developers/gitignore/api/python,ansibletower,dotenv,direnv,sonar,django,virtualenv,ansible,venv,pydev + +# setuptools-scm +!.git_archival.txt + +# lockfile scripts +!/bin/ diff --git a/.isort.cfg b/.isort.cfg new file mode 100644 index 0000000000..7c86d1e9e4 --- /dev/null +++ b/.isort.cfg @@ -0,0 +1,17 @@ +# https://pycqa.github.io/isort/docs/configuration/config_files/ +[settings] +combine_as_imports = true +default_section = THIRDPARTY +honor_noqa = true +include_trailing_comma = true +indent = 4 +known_frameworks = awx, django +known_testing = pytest +line_length = 79 +lines_after_imports = 2 +# https://pycqa.github.io/isort/#multi-line-output-modes +multi_line_output = 3 +no_lines_before = LOCALFOLDER +sections = FUTURE, STDLIB, TESTING, FRAMEWORKS, THIRDPARTY, FIRSTPARTY, LOCALFOLDER +use_parentheses = true +verbose = true diff --git a/.mypy.ini b/.mypy.ini new file mode 100644 index 0000000000..6823f62d94 --- /dev/null +++ b/.mypy.ini @@ -0,0 +1,32 @@ +[mypy] +python_version = 3.11 +color_output = true +error_summary = true +files = + src/, + tests/ + +check_untyped_defs = true + +disallow_untyped_calls = true +disallow_untyped_defs = true +disallow_any_generics = true + +enable_error_code = + ignore-without-code + +follow_imports = normal + +ignore_missing_imports = false + +namespace_packages = true + +pretty = true + +show_column_numbers = true +show_error_codes = true +strict_optional = true + +warn_no_return = true +warn_redundant_casts = true +warn_unused_ignores = true diff --git a/.pep8 b/.pep8 new file mode 100644 index 0000000000..3995969e42 --- /dev/null +++ b/.pep8 @@ -0,0 +1,3 @@ +[pep8] +aggressive = 3 +in-place = true diff --git a/.pip-tools.toml b/.pip-tools.toml new file mode 100644 index 0000000000..97afb0483b --- /dev/null +++ b/.pip-tools.toml @@ -0,0 +1,5 @@ +[tool.pip-tools] +allow-unsafe = true # weird outdated default +generate-hashes = false # pip bug https://github.com/pypa/pip/issues/9243 +resolver = "backtracking" # modern depresolver +strip-extras = true # so that output files are true pip constraints diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml new file mode 100644 index 0000000000..f7aaf9e246 --- /dev/null +++ b/.pre-commit-config.yaml @@ -0,0 +1,234 @@ +--- + +ci: + autoupdate_schedule: quarterly # low frequency to reduce maintenance noise + +repos: +- repo: local + hooks: + - id: forbidden-files + name: Verify that forbidden files are not present in the repo + entry: >- + `setup.py` shouldn't be in the root because we fully rely on PEP 621. + `__init__.py` shouldn't be present to avoid clashes in namespace packages. + The `tests/__init__.py` module must not exist so `pytest` doesn't add the + project root to `sys.path` / `$PYTHONPATH` + files: >- + (?x) + ^ + (?: + setup + |src(?: + /awx(?: + /plugins(?: + /[^/]+ + )? + )? + )?/__init__ + |tests/__init__ + )\.py + $ + language: fail + types: [] + types_or: + - file + - symlink + +- repo: https://github.com/asottile/add-trailing-comma.git + rev: v3.1.0 + hooks: + - id: add-trailing-comma + +- repo: https://github.com/asottile/pyupgrade.git + rev: v3.17.0 + hooks: + - id: pyupgrade + args: + - --py311-plus + +- repo: https://github.com/PyCQA/isort.git + rev: 5.13.2 + hooks: + - id: isort + args: + - --honor-noqa + +- repo: https://github.com/PyCQA/docformatter.git + rev: v1.7.5 + hooks: + - id: docformatter + args: + - --in-place + +- repo: https://github.com/hhatto/autopep8.git + rev: v2.3.1 + hooks: + - id: autopep8 + +- repo: https://github.com/Lucas-C/pre-commit-hooks.git + rev: v1.5.5 + hooks: + - id: remove-tabs + +- repo: https://github.com/python-jsonschema/check-jsonschema.git + rev: 0.29.1 + hooks: + - id: check-github-workflows + files: ^\.github/workflows/[^/]+$ + types: + - yaml + - id: check-jsonschema + name: Check GitHub Workflows set timeout-minutes + args: + - --builtin-schema + - github-workflows-require-timeout + files: ^\.github/workflows/[^/]+$ + types: + - yaml + - id: check-readthedocs + +- repo: https://github.com/andreoliwa/nitpick.git + rev: v0.35.0 + hooks: + - id: nitpick-check + pass_filenames: false + +- repo: https://github.com/pre-commit/pygrep-hooks.git + rev: v1.10.0 + hooks: + - id: python-check-blanket-noqa + - id: python-check-mock-methods + - id: python-no-eval + - id: python-no-log-warn + - id: rst-backticks + +- repo: https://github.com/pre-commit/pre-commit-hooks.git + rev: v4.6.0 + hooks: + # Side-effects: + - id: trailing-whitespace + - id: check-merge-conflict + - id: double-quote-string-fixer + - id: end-of-file-fixer + # Non-modifying checks: + - id: name-tests-test + files: >- + ^tests/[^_].*\.py$ + - id: check-added-large-files + - id: check-byte-order-marker + - id: check-case-conflict + # disabled due to pre-commit/pre-commit-hooks#159 + # - id: check-docstring-first + - id: check-json + - id: check-symlinks + - id: check-yaml + # args: + # - --unsafe + - id: detect-private-key + + # Heavy checks: + - id: check-ast + - id: debug-statements + +- repo: https://github.com/Lucas-C/pre-commit-hooks-markup.git + rev: v1.0.1 + hooks: + - id: rst-linter + files: >- + ^README\.rst$ + +- repo: https://github.com/codespell-project/codespell.git + rev: v2.3.0 + hooks: + - id: codespell + exclude: >- + ^\.github/\.json-schemas/.*\.json$ + +- repo: https://github.com/adrienverge/yamllint.git + rev: v1.35.1 + hooks: + - id: yamllint + types: + - file + - yaml + args: + - --strict + +- repo: https://github.com/openstack/bashate.git + rev: 2.1.1 + hooks: + - id: bashate + +- repo: https://github.com/shellcheck-py/shellcheck-py.git + rev: v0.10.0.1 + hooks: + - id: shellcheck + +- repo: https://github.com/PyCQA/flake8.git + rev: 7.1.1 + hooks: + - id: flake8 + additional_dependencies: + - flake8-annotations ~= 3.1.1 + - flake8-comprehensions ~= 3.15.0 + - flake8-cognitive-complexity ~= 0.1.0 + - flake8-docstrings ~= 1.7.0 + - flake8-length ~= 0.3.1 + - flake8-logging ~= 1.6.0 + - flake8-logging-format ~= 2024.24.12 + - flake8-pytest-style ~= 2.0.0 + - wemake-python-styleguide ~= 0.19.2 + language_version: python3 + +- repo: https://github.com/pre-commit/mirrors-mypy.git + rev: v1.11.2 + hooks: + - id: mypy + alias: mypy-py313 + name: MyPy, for Python 3.13 + additional_dependencies: + - lxml # dep of `--txt-report`, `--cobertura-xml-report` & `--html-report` + - pytest + args: + - --python-version=3.13 + - --txt-report=.tox/.tmp/.test-results/mypy--py-3.13 + - --cobertura-xml-report=.tox/.tmp/.test-results/mypy--py-3.13 + - --html-report=.tox/.tmp/.test-results/mypy--py-3.13 + pass_filenames: false + - id: mypy + alias: mypy-py312 + name: MyPy, for Python 3.12 + additional_dependencies: + - lxml # dep of `--txt-report`, `--cobertura-xml-report` & `--html-report` + - pytest + args: + - --python-version=3.12 + - --txt-report=.tox/.tmp/.test-results/mypy--py-3.12 + - --cobertura-xml-report=.tox/.tmp/.test-results/mypy--py-3.12 + - --html-report=.tox/.tmp/.test-results/mypy--py-3.12 + pass_filenames: false + - id: mypy + alias: mypy-py311 + name: MyPy, for Python 3.11 + additional_dependencies: + - lxml # dep of `--txt-report`, `--cobertura-xml-report` & `--html-report` + - pytest + args: + - --python-version=3.11 + - --txt-report=.tox/.tmp/.test-results/mypy--py-3.11 + - --cobertura-xml-report=.tox/.tmp/.test-results/mypy--py-3.11 + - --html-report=.tox/.tmp/.test-results/mypy--py-3.11 + pass_filenames: false + +- repo: https://github.com/PyCQA/pylint.git + rev: v3.2.6 + hooks: + - id: pylint + additional_dependencies: + - covdefaults # needed by pylint-pytest due to pytest-cov loading coverage + - pylint-pytest ~= 2.0.0a0 + - pytest-cov # needed by pylint-pytest since it picks up pytest's args + - pytest-xdist # needed by pylint-pytest since it picks up pytest's args + - Sphinx # needed by the Sphinx extension stub + +... diff --git a/.pylintrc.toml b/.pylintrc.toml new file mode 100644 index 0000000000..56303ae252 --- /dev/null +++ b/.pylintrc.toml @@ -0,0 +1,622 @@ +[tool.pylint.main] +# Analyse import fallback blocks. This can be used to support both Python 2 and 3 +# compatible code, which means that the block might have code that exists only in +# one or another interpreter, leading to false positives when analysed. +# analyse-fallback-blocks = + +# Clear in-memory caches upon conclusion of linting. Useful if running pylint in +# a server-like mode. +# clear-cache-post-run = + +# Always return a 0 (non-error) status code, even if lint errors are found. This +# is primarily useful in continuous integration scripts. +# exit-zero = + +# A comma-separated list of package or module names from where C extensions may +# be loaded. Extensions are loading into the active Python interpreter and may +# run arbitrary code. +# extension-pkg-allow-list = + +# A comma-separated list of package or module names from where C extensions may +# be loaded. Extensions are loading into the active Python interpreter and may +# run arbitrary code. (This is an alternative name to extension-pkg-allow-list +# for backward compatibility.) +# extension-pkg-whitelist = + +# Return non-zero exit code if any of these messages/categories are detected, +# even if score is above --fail-under value. Syntax same as enable. Messages +# specified are enabled, while categories only check already-enabled messages. +# fail-on = + +# Specify a score threshold under which the program will exit with error. +fail-under = 10 + +# Interpret the stdin as a python script, whose filename needs to be passed as +# the module_or_package argument. +# from-stdin = + +# Files or directories to be skipped. They should be base names, not paths. +ignore = ["CVS"] + +# Add files or directories matching the regular expressions patterns to the +# ignore-list. The regex matches against paths and can be in Posix or Windows +# format. Because '\\' represents the directory delimiter on Windows systems, it +# can't be used as an escape character. +# ignore-paths = + +# Files or directories matching the regular expression patterns are skipped. The +# regex matches against base names, not paths. The default value ignores Emacs +# file locks +ignore-patterns = ["^\\.#"] + +# List of module names for which member attributes should not be checked and will +# not be imported (useful for modules/projects where namespaces are manipulated +# during runtime and thus existing member attributes cannot be deduced by static +# analysis). It supports qualified module names, as well as Unix pattern +# matching. +# ignored-modules = + +# Python code to execute, usually for sys.path manipulation such as +# pygtk.require(). +# This patch injects the project directory into the import path so that the +# local `pytest` plugin can be imported when `pylint-pytest` invokes it when +# exploring the fixtures available: +init-hook = "import os, sys; sys.path[:0] = [os.getcwd()]; os.environ['PYTHONPATH'] = sys.path[0]" + +# Use multiple processes to speed up Pylint. Specifying 0 will auto-detect the +# number of processors available to use, and will cap the count on Windows to +# avoid hangs. +jobs = 0 + +# Control the amount of potential inferred values when inferring a single object. +# This can help the performance when dealing with large functions or complex, +# nested conditions. +limit-inference-results = 100 + +# List of plugins (as comma separated values of python module names) to load, +# usually to register additional checkers. +load-plugins = [ + # "stdlib": + "pylint.extensions.no_self_use", + # third-party: + "pylint_pytest", +] + +# Pickle collected data for later comparisons. +persistent = true + +# Resolve imports to .pyi stubs if available. May reduce no-member messages and +# increase not-an-iterable messages. +# prefer-stubs = + +# Minimum Python version to use for version dependent checks. Will default to the +# version used to run pylint. +py-version = "3.11" + +# Discover python modules and packages in the file system subtree. +# recursive = + +# Add paths to the list of the source roots. Supports globbing patterns. The +# source root is an absolute path or a path relative to the current working +# directory used to determine a package namespace for modules located under the +# source root. +# source-roots = + +# When enabled, pylint would attempt to guess common misconfiguration and emit +# user-friendly hints instead of false-positive error messages. +suggestion-mode = true + +# Allow loading of arbitrary C extensions. Extensions are imported into the +# active Python interpreter and may run arbitrary code. +# unsafe-load-any-extension = + +[tool.pylint.basic] +# Naming style matching correct argument names. +argument-naming-style = "snake_case" + +# Regular expression matching correct argument names. Overrides argument-naming- +# style. If left empty, argument names will be checked with the set naming style. +# argument-rgx = + +# Naming style matching correct attribute names. +attr-naming-style = "snake_case" + +# Regular expression matching correct attribute names. Overrides attr-naming- +# style. If left empty, attribute names will be checked with the set naming +# style. +# attr-rgx = + +# Bad variable names which should always be refused, separated by a comma. +bad-names = [ + "foo", + "bar", + "baz", + "toto", + "tutu", + "tata", + "a", + "b", + "c", + "i", + "j", + "k", + "x", + "y", + "z", + "_", +] + +# Bad variable names regexes, separated by a comma. If names match any regex, +# they will always be refused +# bad-names-rgxs = + +# Naming style matching correct class attribute names. +class-attribute-naming-style = "any" + +# Regular expression matching correct class attribute names. Overrides class- +# attribute-naming-style. If left empty, class attribute names will be checked +# with the set naming style. +# class-attribute-rgx = + +# Naming style matching correct class constant names. +class-const-naming-style = "UPPER_CASE" + +# Regular expression matching correct class constant names. Overrides class- +# const-naming-style. If left empty, class constant names will be checked with +# the set naming style. +# class-const-rgx = + +# Naming style matching correct class names. +class-naming-style = "PascalCase" + +# Regular expression matching correct class names. Overrides class-naming-style. +# If left empty, class names will be checked with the set naming style. +# class-rgx = + +# Naming style matching correct constant names. +const-naming-style = "UPPER_CASE" + +# Regular expression matching correct constant names. Overrides const-naming- +# style. If left empty, constant names will be checked with the set naming style. +# const-rgx = + +# Minimum line length for functions/classes that require docstrings, shorter ones +# are exempt. +docstring-min-length = -1 + +# Naming style matching correct function names. +function-naming-style = "snake_case" + +# Regular expression matching correct function names. Overrides function-naming- +# style. If left empty, function names will be checked with the set naming style. +# function-rgx = + +# Good variable names which should always be accepted, separated by a comma. +good-names = [] + +# Good variable names regexes, separated by a comma. If names match any regex, +# they will always be accepted +# good-names-rgxs = + +# Include a hint for the correct naming format with invalid-name. +# include-naming-hint = + +# Naming style matching correct inline iteration names. +inlinevar-naming-style = "any" + +# Regular expression matching correct inline iteration names. Overrides +# inlinevar-naming-style. If left empty, inline iteration names will be checked +# with the set naming style. +# inlinevar-rgx = + +# Naming style matching correct method names. +method-naming-style = "snake_case" + +# Regular expression matching correct method names. Overrides method-naming- +# style. If left empty, method names will be checked with the set naming style. +# method-rgx = + +# Naming style matching correct module names. +module-naming-style = "snake_case" + +# Regular expression matching correct module names. Overrides module-naming- +# style. If left empty, module names will be checked with the set naming style. +# module-rgx = + +# Colon-delimited sets of names that determine each other's naming style when the +# name regexes allow several styles. +# name-group = + +# Regular expression which should only match function or class names that do not +# require a docstring. +no-docstring-rgx = "^_" + +# List of decorators that produce properties, such as abc.abstractproperty. Add +# to this list to register other decorators that produce valid properties. These +# decorators are taken in consideration only for invalid-name. +property-classes = ["abc.abstractproperty"] + +# Regular expression matching correct type alias names. If left empty, type alias +# names will be checked with the set naming style. +# typealias-rgx = + +# Regular expression matching correct type variable names. If left empty, type +# variable names will be checked with the set naming style. +# typevar-rgx = + +# Naming style matching correct variable names. +variable-naming-style = "snake_case" + +# Regular expression matching correct variable names. Overrides variable-naming- +# style. If left empty, variable names will be checked with the set naming style. +# variable-rgx = + +[tool.pylint.classes] +# Warn about protected attribute access inside special methods +# check-protected-access-in-special-methods = + +# List of method names used to declare (i.e. assign) instance attributes. +defining-attr-methods = ["__init__", "__new__", "setUp", "asyncSetUp", "__post_init__"] + +# List of member names, which should be excluded from the protected access +# warning. +exclude-protected = ["_asdict", "_fields", "_replace", "_source", "_make", "os._exit"] + +# List of valid names for the first argument in a class method. +valid-classmethod-first-arg = ["cls"] + +# List of valid names for the first argument in a metaclass class method. +valid-metaclass-classmethod-first-arg = ["mcs"] + +[tool.pylint.design] +# List of regular expressions of class ancestor names to ignore when counting +# public methods (see R0903) +# exclude-too-few-public-methods = + +# List of qualified class names to ignore when counting class parents (see R0901) +# ignored-parents = + +# Maximum number of arguments for function / method. +max-args = 5 + +# Maximum number of attributes for a class (see R0902). +max-attributes = 7 + +# Maximum number of boolean expressions in an if statement (see R0916). +max-bool-expr = 5 + +# Maximum number of branch for function / method body. +max-branches = 12 + +# Maximum number of locals for function / method body. +max-locals = 15 + +# Maximum number of parents for a class (see R0901). +max-parents = 7 + +# Maximum number of public methods for a class (see R0904). +max-public-methods = 20 + +# Maximum number of return / yield for function / method body. +max-returns = 6 + +# Maximum number of statements in function / method body. +max-statements = 50 + +# Minimum number of public methods for a class (see R0903). +min-public-methods = 2 + +[tool.pylint.exceptions] +# Exceptions that will emit a warning when caught. +overgeneral-exceptions = [ + "builtins.BaseException", + "builtins.Exception", +] + +[tool.pylint.format] +# Expected format of line ending, e.g. empty (any line ending), LF or CRLF. +# expected-line-ending-format = + +# Regexp for a line that is allowed to be longer than the limit. +ignore-long-lines = "^\\s*(# )??$" + +# Number of spaces of indent required inside a hanging or continued line. +indent-after-paren = 4 + +# String used as indentation unit. This is usually " " (4 spaces) or "\t" (1 +# tab). +indent-string = " " + +# Maximum number of characters on a single line. +max-line-length = 79 + +# Maximum number of lines in a module. +max-module-lines = 1000 + +# Allow the body of a class to be on the same line as the declaration if body +# contains single statement. +single-line-class-stmt = false + +# Allow the body of an if to be on the same line as the test if there is no else. +single-line-if-stmt = false + +[tool.pylint.imports] +# List of modules that can be imported at any level, not just the top level one. +# allow-any-import-level = + +# Allow explicit reexports by alias from a package __init__. +# allow-reexport-from-package = + +# Allow wildcard imports from modules that define __all__. +# allow-wildcard-with-all = + +# Deprecated modules which should not be used, separated by a comma. +# deprecated-modules = + +# Output a graph (.gv or any supported image format) of external dependencies to +# the given file (report RP0402 must not be disabled). +# ext-import-graph = + +# Output a graph (.gv or any supported image format) of all (i.e. internal and +# external) dependencies to the given file (report RP0402 must not be disabled). +# import-graph = + +# Output a graph (.gv or any supported image format) of internal dependencies to +# the given file (report RP0402 must not be disabled). +# int-import-graph = + +# Force import order to recognize a module as part of the standard compatibility +# libraries. +# known-standard-library = + +# Force import order to recognize a module as part of a third party library. +known-third-party = [ + "enchant", +] + +# Couples of modules and preferred modules, separated by a comma. +preferred-modules = [ + "unittest:pytest", +] + +[tool.pylint.logging] +# The type of string formatting that logging methods do. `old` means using % +# formatting, `new` is for `{}` formatting. +logging-format-style = "old" + +# Logging modules to check that the string format arguments are in logging +# function parameter format. +logging-modules = [ + "logging", +] + +[tool.pylint."messages control"] +# Only show warnings with the listed confidence levels. Leave empty to show all. +# Valid levels: HIGH, CONTROL_FLOW, INFERENCE, INFERENCE_FAILURE, UNDEFINED. +confidence = [ + "HIGH", + "CONTROL_FLOW", + "INFERENCE", + "INFERENCE_FAILURE", + "UNDEFINED", +] + +# Disable the message, report, category or checker with the given id(s). You can +# either give multiple identifiers separated by comma (,) or put this option +# multiple times (only on the command line, not in the configuration file where +# it should appear only once). You can also use "--disable=all" to disable +# everything first and then re-enable specific checks. For example, if you want +# to run only the similarities checker, you can use "--disable=all +# --enable=similarities". If you want to run only the classes checker, but have +# no Warning level messages displayed, use "--disable=all --enable=classes +# --disable=W". +disable = [] + +# Enable the message, report, category or checker with the given id(s). You can +# either give multiple identifier separated by comma (,) or put this option +# multiple time (only on the command line, not in the configuration file where it +# should appear only once). See also the "--disable" option for examples. +# enable = + +[tool.pylint.method_args] +# List of qualified names (i.e., library.method) which require a timeout +# parameter e.g. 'requests.api.get,requests.api.post' +timeout-methods = [ + "requests.api.delete", + "requests.api.get", + "requests.api.head", + "requests.api.options", + "requests.api.patch", + "requests.api.post", + "requests.api.put", + "requests.api.request", +] + +[tool.pylint.miscellaneous] +# List of note tags to take in consideration, separated by a comma. +notes = [ + "FIXME", + "XXX", + "TODO", +] + +# Regular expression of note tags to take in consideration. +# notes-rgx = + +[tool.pylint.refactoring] +# Maximum number of nested blocks for function / method body +max-nested-blocks = 5 + +# Complete name of functions that never returns. When checking for inconsistent- +# return-statements if a never returning function is called then it will be +# considered as an explicit return statement and no message will be printed. +never-returning-functions = [ + "sys.exit", + "argparse.parse_error", +] + +# Let 'consider-using-join' be raised when the separator to join on would be non- +# empty (resulting in expected fixes of the type: ``"- " + " - ".join(items)``) +suggest-join-with-non-empty-separator = true + +[tool.pylint.reports] +# Python expression which should return a score less than or equal to 10. You +# have access to the variables 'fatal', 'error', 'warning', 'refactor', +# 'convention', and 'info' which contain the number of messages in each category, +# as well as 'statement' which is the total number of statements analyzed. This +# score is used by the global evaluation report (RP0004). +evaluation = "max(0, 0 if fatal else 10.0 - ((float(5 * error + warning + refactor + convention) / statement) * 10))" + +# Template used to display messages. This is a python new-style format string +# used to format the message information. See doc for all details. +# msg-template = + +# Set the output format. Available formats are: text, parseable, colorized, json2 +# (improved json format), json (old json format) and msvs (visual studio). You +# can also give a reporter class, e.g. mypackage.mymodule.MyReporterClass. +output-format = "colorized" + +# Tells whether to display a full report or only the messages. +# reports = + +# Activate the evaluation score. +score = true + +[tool.pylint.similarities] +# Comments are removed from the similarity computation +ignore-comments = true + +# Docstrings are removed from the similarity computation +ignore-docstrings = true + +# Imports are removed from the similarity computation +ignore-imports = true + +# Signatures are removed from the similarity computation +ignore-signatures = true + +# Minimum lines number of a similarity. +min-similarity-lines = 4 + +[tool.pylint.spelling] +# Limits count of emitted suggestions for spelling mistakes. +max-spelling-suggestions = 4 + +# Spelling dictionary name. No available dictionaries : You need to install both +# the python package and the system dependency for enchant to work. +# spelling-dict = + +# List of comma separated words that should be considered directives if they +# appear at the beginning of a comment and should not be checked. +spelling-ignore-comment-directives = "fmt: on,fmt: off,noqa:,noqa,nosec,isort:skip,mypy:" + +# List of comma separated words that should not be checked. +# spelling-ignore-words = + +# A path to a file that contains the private dictionary; one word per line. +# spelling-private-dict-file = + +# Tells whether to store unknown words to the private dictionary (see the +# --spelling-private-dict-file option) instead of raising a message. +# spelling-store-unknown-words = + +[tool.pylint.typecheck] +# List of decorators that produce context managers, such as +# contextlib.contextmanager. Add to this list to register other decorators that +# produce valid context managers. +contextmanager-decorators = [ + "contextlib.contextmanager", +] + +# List of members which are set dynamically and missed by pylint inference +# system, and so shouldn't trigger E1101 when accessed. Python regular +# expressions are accepted. +# generated-members = + +# Tells whether missing members accessed in mixin class should be ignored. A +# class is considered mixin if its name matches the mixin-class-rgx option. +# Tells whether to warn about missing members when the owner of the attribute is +# inferred to be None. +ignore-none = true + +# This flag controls whether pylint should warn about no-member and similar +# checks whenever an opaque object is returned when inferring. The inference can +# return multiple potential results while evaluating a Python object, but some +# branches might not be evaluated, which results in partial inference. In that +# case, it might be useful to still emit no-member and other checks for the rest +# of the inferred objects. +ignore-on-opaque-inference = true + +# List of symbolic message names to ignore for Mixin members. +ignored-checks-for-mixins = [ + "no-member", + "not-async-context-manager", + "not-context-manager", + "attribute-defined-outside-init", +] + +# List of class names for which member attributes should not be checked (useful +# for classes with dynamically set attributes). This supports the use of +# qualified names. +ignored-classes = [ + "optparse.Values", + "thread._local", + "_thread._local", + "argparse.Namespace", +] + +# Show a hint with possible names when a member name was not found. The aspect of +# finding the hint is based on edit distance. +missing-member-hint = true + +# The minimum edit distance a name should have in order to be considered a +# similar match for a missing member name. +missing-member-hint-distance = 1 + +# The total number of similar names that should be taken in consideration when +# showing a hint for a missing member. +missing-member-max-choices = 1 + +# Regex pattern to define which classes are considered mixins. +mixin-class-rgx = ".*[Mm]ixin" + +# List of decorators that change the signature of a decorated function. +# signature-mutators = + +[tool.pylint.variables] +# List of additional names supposed to be defined in builtins. Remember that you +# should avoid defining new builtins when possible. +# additional-builtins = + +# Tells whether unused global variables should be treated as a violation. +allow-global-unused-variables = true + +# List of names allowed to shadow builtins +# allowed-redefined-builtins = + +# List of strings which can identify a callback function by name. A callback name +# must start or end with one of those strings. +callbacks = [ + "cb_", + "_cb", +] + +# A regular expression matching the name of dummy variables (i.e. expected to not +# be used). +dummy-variables-rgx = "_+$|(_[a-zA-Z0-9_]*[a-zA-Z0-9]+?$)|dummy|^ignored_|^unused_" + +# Argument names that match this expression will be ignored. +ignored-argument-names = "_.*|^ignored_|^unused_" + +# Tells whether we should check for unused import in __init__ files. +# init-import = + +# List of qualified module names which can have objects that can redefine +# builtins. +redefining-builtins-modules = [ + "six.moves", + "past.builtins", + "future.builtins", + "builtins", + "io", +] diff --git a/.readthedocs.yaml b/.readthedocs.yaml new file mode 100644 index 0000000000..21052bc2c8 --- /dev/null +++ b/.readthedocs.yaml @@ -0,0 +1,31 @@ +# Read the Docs configuration file +# See https://docs.readthedocs.io/en/stable/config-file/v2.html for details +--- + +version: 2 + +build: + os: ubuntu-24.04 + tools: + python: >- # has to be parsed as a YAML string + 3.11 + commands: + - >- + PYTHONWARNINGS=error + python3 -Im venv "${READTHEDOCS_VIRTUALENV_PATH}" + - >- + PYTHONWARNINGS=error + "${READTHEDOCS_VIRTUALENV_PATH}"/bin/python -Im + pip install tox + - >- + PYTHONWARNINGS=error + "${READTHEDOCS_VIRTUALENV_PATH}"/bin/python -Im + tox -e build-docs --notest -vvvvv + - >- + PYTHONWARNINGS=error + "${READTHEDOCS_VIRTUALENV_PATH}"/bin/python -Im + tox -e build-docs --skip-pkg-install -q + -- + "${READTHEDOCS_OUTPUT}"/html -b dirhtml + +... diff --git a/.yamllint b/.yamllint new file mode 100644 index 0000000000..82cb77f057 --- /dev/null +++ b/.yamllint @@ -0,0 +1,18 @@ +--- + +extends: default + +rules: + indentation: + level: error + indent-sequences: false + truthy: + allowed-values: + - >- + false + - >- + true + - >- # Allow "on" key name in GHA CI/CD workflow definitions + on + +... diff --git a/CHANGELOG.md b/CHANGELOG.md new file mode 100644 index 0000000000..53110e5b7a --- /dev/null +++ b/CHANGELOG.md @@ -0,0 +1,3 @@ +# Change log + +*A change log stub.* diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 61cc56ee60..90a1770678 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -1,34 +1,42 @@ # How to Contribute -This project is [GPL-3.0 licensed](COPYING) and accepts contributions through +This project is [Apache-2.0 licensed] and accepts contributions through GitHub pull requests. +[Apache-2.0 licensed]: +https://github.com/ansible/awx-plugins/blob/devel/LICENSE + ## Certificate of Origin By contributing to this project you agree to the Developer Certificate of Origin (DCO). This document was created by the Linux Kernel community and is a simple statement that you, as a contributor, have the legal right to make the -contribution. See the [DCO](DCO) file for details. +contribution. See the [DCO] file for details. + +[DCO]: https://github.com/ansible/awx-plugins/blob/devel/DCO ## Principles This repository adheres to the following principles: - Open: Contribution is always welcome. -- Respectful: See the [Code of Conduct](CODE_OF_CONDUCT.md). +- Respectful: See the [Code of Conduct]. - Transparent and accessible: Work and collaboration should be done in public. See [Governance](#governance) section for details. - Merit: Ideas and contributions are accepted according to their merit and alignment with the project objectives principles. +[Code of Conduct]: +https://github.com/ansible/awx-plugins/blob/devel/CODE-OF-CONDUCT.md + ## How to contribute We are very happy to receive contributions from the community in any form! Please use a GitHub pull request to submit your contributions. If you have a question or are unsure if a contribution is wanted, please join us in -[TBD](#channel-name-here) on Matrix to discuss your change or on the Ansible forum -using the TBD tag if you prefer async discussion. +#channel-name-here on Matrix to discuss your change or on the Ansible +forum using the TBD tag if you prefer async discussion. Open a GitHub issue to report bugs or request features. ## Governance diff --git a/README.md b/README.md index 5155b51103..5379273893 100644 --- a/README.md +++ b/README.md @@ -26,7 +26,7 @@ https://github.com/ansible/awx-plugins/actions/workflows/ci-cd.yml/badge.svg?bra [Code of Conduct]: https://docs.ansible.com/ansible/latest/community/code_of_conduct.html [Apache v2 License Badge]: https://img.shields.io/badge/license-Apache%202.0-brightgreen.svg -[Apache v2 License]: https://github.com/ansible/awx-plugins/blob/devel/LICENSE.md +[Apache v2 License]: https://github.com/ansible/awx-plugins/blob/devel/LICENSE [Ansible Matrix Badge]: https://img.shields.io/badge/matrix-Ansible%20Community-blueviolet.svg?logo=matrix diff --git a/SECURITY.md b/SECURITY.md index 21d434a1c8..2f3ebb7967 100644 --- a/SECURITY.md +++ b/SECURITY.md @@ -1 +1,3 @@ +# Security + Please report any security concerns on this repository content to security@ansible.com. diff --git a/bin/.gitignore b/bin/.gitignore new file mode 100644 index 0000000000..6364b13169 --- /dev/null +++ b/bin/.gitignore @@ -0,0 +1,3 @@ +* +!.gitignore +!*.py diff --git a/bin/pip_constraint_helpers.py b/bin/pip_constraint_helpers.py new file mode 100644 index 0000000000..9831605274 --- /dev/null +++ b/bin/pip_constraint_helpers.py @@ -0,0 +1,105 @@ +"""Shared functions for platform detection.""" + +from __future__ import annotations + +import pathlib +import platform +import shlex +import subprocess # noqa: S404 -- pip/pip-tools don't have importable APIs +import sys + + +PYTHON_IMPLEMENTATION_MAP = { # noqa: WPS407 + 'cpython': 'cp', + 'ironpython': 'ip', + 'jython': 'jy', + 'python': 'py', + 'pypy': 'pp', +} +PYTHON_IMPLEMENTATION = platform.python_implementation() + + +def get_runtime_python_tag() -> str: + """Identify the Python tag of the current runtime. + + :returns: Python tag. + """ + python_minor_ver = sys.version_info[:2] + + try: + sys_impl = sys.implementation.name + except AttributeError: + sys_impl = PYTHON_IMPLEMENTATION.lower() + + python_tag_prefix = PYTHON_IMPLEMENTATION_MAP.get(sys_impl, sys_impl) + + python_minor_ver_tag = ''.join(map(str, python_minor_ver)) + + return f'{python_tag_prefix !s}{python_minor_ver_tag !s}' + + +def get_constraint_file_path( # noqa: WPS210 -- no way to drop vars + req_dir: pathlib.Path | str, + toxenv: str, + python_tag: str, +) -> pathlib.Path: + """Identify the constraints filename for the current environment. + + :param req_dir: Requirements directory. + :type req_dir: pathlib.Path | str + :param toxenv: tox testenv. + :type toxenv: str + :param python_tag: Python tag. + :type python_tag: str + :returns: Constraints filename for the current environment. + """ + sys_platform = sys.platform + platform_machine = platform.machine().lower() + + if toxenv in {'py', 'python'}: + env_prefix = 'pypy' if PYTHON_IMPLEMENTATION == 'PyPy' else 'py' + python_ver_num = python_tag[2:] + toxenv = f'{env_prefix}{python_ver_num}' + + if sys_platform == 'linux2': + sys_platform = 'linux' + + constraint_name = ( + f'tox-{toxenv}-{python_tag}-{sys_platform}-{platform_machine}' + ) + return (pathlib.Path(req_dir) / constraint_name).with_suffix('.txt') + + +def make_pip_cmd( + pip_args: list[str], + constraint_file_path: pathlib.Path, +) -> list[str]: + """Inject a lockfile constraint into the pip command if present. + + :param pip_args: pip arguments. + :type pip_args: list[str] + :param constraint_file_path: Path to a ``constraints.txt``-compatible file. + :type constraint_file_path: pathlib.Path + + :returns: pip command. + """ + pip_cmd = [sys.executable, '-Im', 'pip'] + pip_args + if constraint_file_path.is_file(): + pip_cmd += ['--constraint', str(constraint_file_path)] + else: + print( # noqa: WPS421 + 'WARNING: The expected pinned constraints file for the current ' + f'env does not exist (should be "{constraint_file_path !s}").', + ) + return pip_cmd + + +def run_cmd(cmd: list[str] | tuple[str, ...]) -> None: + """Invoke a shell command after logging it. + + :param cmd: The command to invoke. + :type cmd: list[str] | tuple[str, ...] + """ + escaped_cmd = shlex.join(cmd) + print(f'Invoking the following command: {escaped_cmd !s}') # noqa: WPS421 + subprocess.check_call(cmd) # noqa: S603 diff --git a/bin/pip_wrapper.py b/bin/pip_wrapper.py new file mode 100644 index 0000000000..c5467a3dd7 --- /dev/null +++ b/bin/pip_wrapper.py @@ -0,0 +1,38 @@ +"""A pip-wrapper that injects platform-specific constraints into pip.""" + +from __future__ import annotations + +import sys + +from pip_constraint_helpers import ( + get_constraint_file_path, + get_runtime_python_tag, + make_pip_cmd, + run_cmd, +) + + +def main(req_dir: str, toxenv: str, *pip_args: tuple[str, ...]) -> None: + """Invoke pip with the matching constraints file, if present. + + :param req_dir: Requirements directory path. + :type req_dir: str + :param toxenv: Tox env name. + :type toxenv: str + :param pip_args: Iterable of args to bypass to pip. + :type pip_args: tuple[str, ...] + """ + constraint_file_path = get_constraint_file_path( + req_dir=req_dir, + toxenv=toxenv, + python_tag=get_runtime_python_tag(), + ) + pip_cmd = make_pip_cmd( + pip_args=list(pip_args), + constraint_file_path=constraint_file_path, + ) + run_cmd(pip_cmd) + + +if __name__ == '__main__': + main(*sys.argv[1:]) diff --git a/bin/print_lockfile_base_name.py b/bin/print_lockfile_base_name.py new file mode 100755 index 0000000000..272b43b0a7 --- /dev/null +++ b/bin/print_lockfile_base_name.py @@ -0,0 +1,29 @@ +#! /usr/bin/env python +"""A script that prints platform-specific constraints file name base.""" + +from __future__ import annotations + +import sys + +from pip_constraint_helpers import ( + get_constraint_file_path, + get_runtime_python_tag, +) + + +def compute_constraint_base_name(toxenv: str) -> str: + """Get the lock file name stem. + + :param toxenv: Name of the tox env. + :type toxenv: str + :returns: A platform-specific lock file base name for tox env. + """ + return get_constraint_file_path( + req_dir='', + toxenv=toxenv, + python_tag=get_runtime_python_tag(), + ).stem + + +if __name__ == '__main__': + print(compute_constraint_base_name(sys.argv[1])) # noqa: WPS421 diff --git a/bin/resolve_platform_lock_file.py b/bin/resolve_platform_lock_file.py new file mode 100644 index 0000000000..d7f6a97805 --- /dev/null +++ b/bin/resolve_platform_lock_file.py @@ -0,0 +1,40 @@ +"""A script for making a lock file for the current platform and tox env.""" + +from __future__ import annotations + +import sys + +from pip_constraint_helpers import ( + get_constraint_file_path, + get_runtime_python_tag, + run_cmd, +) + + +def generate_lock_for( + req_dir: str, toxenv: str, *pip_compile_extra_args: tuple[str, ...], +) -> None: + """Generate a patform-specific lock file for given tox env. + + :param req_dir: Requirements directory path. + :type req_dir: str + :param toxenv: Tox env name. + :type toxenv: str + :param pip_compile_extra_args: Iterable of args to bypass to pip-compile. + :type pip_compile_extra_args: tuple[str, ...] + """ + lock_file_name = get_constraint_file_path( + req_dir, toxenv, get_runtime_python_tag(), + ) + direct_deps_file_name = lock_file_name.with_name(f'tox-{toxenv}.in') + pip_compile_cmd = ( + sys.executable, '-Im', 'piptools', 'compile', + f'--output-file={lock_file_name !s}', + str(direct_deps_file_name), + *pip_compile_extra_args, + ) + run_cmd(pip_compile_cmd) + + +if __name__ == '__main__': + generate_lock_for(*sys.argv[1:]) diff --git a/docs/.gitignore b/docs/.gitignore new file mode 100644 index 0000000000..50bc906562 --- /dev/null +++ b/docs/.gitignore @@ -0,0 +1 @@ +!spelling_wordlist.txt diff --git a/docs/_ext/spelling_stub_ext.py b/docs/_ext/spelling_stub_ext.py new file mode 100644 index 0000000000..2835fdac9c --- /dev/null +++ b/docs/_ext/spelling_stub_ext.py @@ -0,0 +1,86 @@ +"""Sphinx extension for making the spelling directive noop.""" + +from sphinx.application import Sphinx +from sphinx.config import Config as _SphinxConfig +from sphinx.util import logging +from sphinx.util.docutils import SphinxDirective +from sphinx.util.nodes import nodes + + +try: + from enchant.tokenize import ( # noqa: WPS433 + Filter as _EnchantTokenizeFilterBase, + ) +except ImportError: + _EnchantTokenizeFilterBase = object # noqa: WPS440 + + +logger = logging.getLogger(__name__) + + +def _configure_spelling_ext(app: Sphinx, config: _SphinxConfig) -> None: + # pylint: disable-next=too-few-public-methods + class VersionFilter(_EnchantTokenizeFilterBase): # noqa: WPS431 + # NOTE: It's nested because we need to reference the config by closure. + """Filter for treating version words as known.""" + + def _skip( # pylint: disable=no-self-use + self: 'VersionFilter', + word: str, + ) -> bool: + # NOTE: Only accessing the config values in the method since they + # NOTE: aren't yet populated when the config-inited event happens. + known_version_words = { + config.release, + config.version, + } + if word not in known_version_words: + return False + + logger.debug( + 'Known version words: %r', # noqa: WPS323 + known_version_words, + ) + logger.debug( + 'Ignoring %r because it is a known version', # noqa: WPS323 + word, + ) + + return True + + app.config.spelling_filters = [VersionFilter] + app.setup_extension('sphinxcontrib.spelling') + # suppress unpicklable value warnings: + del app.config.spelling_filters # noqa: WPS420 + + +class SpellingNoOpDirective(SphinxDirective): + """Definition of the stub spelling directive.""" + + has_content = True + + def run(self: 'SpellingNoOpDirective') -> list[nodes.Node]: + """Generate nothing in place of the directive. + + :returns: An empty list of nodes. + """ + return [] + + +def setup(app: Sphinx) -> dict[str, bool | str]: + """Initialize the extension. + + :param app: A Sphinx application object. + :type app: Sphinx + :returns: Extension metadata as a dict. + """ + if _EnchantTokenizeFilterBase is object: + app.add_directive('spelling', SpellingNoOpDirective) + else: + app.connect('config-inited', _configure_spelling_ext) + + return { + 'parallel_read_safe': True, + 'parallel_write_safe': True, + 'version': app.config.release, + } diff --git a/docs/changelog.md b/docs/changelog.md new file mode 100644 index 0000000000..66efc0fecd --- /dev/null +++ b/docs/changelog.md @@ -0,0 +1,2 @@ +```{include} ../CHANGELOG.md +``` diff --git a/docs/conf.py b/docs/conf.py new file mode 100644 index 0000000000..4b26f39f95 --- /dev/null +++ b/docs/conf.py @@ -0,0 +1,192 @@ +# pylint: disable=invalid-name # <-- demands all settings to be uppercase +"""Configuration of Sphinx documentation generator.""" + +import os +import sys +from importlib.metadata import version as _retrieve_metadata_version_for +from pathlib import Path + + +# -- Path setup -------------------------------------------------------------- + +DOCS_ROOT_DIR = Path(__file__).parent.resolve() +PROJECT_ROOT_DIR = DOCS_ROOT_DIR.parent.resolve() +PROJECT_SRC_DIR = PROJECT_ROOT_DIR / 'src' +IS_RTD_ENV = os.getenv('READTHEDOCS', 'False') == 'True' +IS_RELEASE_ON_RTD = ( + IS_RTD_ENV + and os.environ['READTHEDOCS_VERSION_TYPE'] == 'tag' +) +tags: set[str] +if IS_RELEASE_ON_RTD: + # pylint: disable-next=used-before-assignment + tags.add('is_release') # noqa: F821 +elif IS_RTD_ENV: + # pylint: disable-next=used-before-assignment + tags.add('is_unversioned') # noqa: F821 + + +# Make in-tree extension importable in non-tox setups/envs, like RTD. +# Refs: +# https://github.com/readthedocs/readthedocs.org/issues/6311 +# https://github.com/readthedocs/readthedocs.org/issues/7182 +sys.path.insert(0, str(DOCS_ROOT_DIR / '_ext')) + + +project = 'awx_plugins.credentials' +author = 'Ansible maintainers and contributors' +copyright = author # pylint: disable=redefined-builtin + +# NOTE: Using the "unversioned" static string improves rebuild +# NOTE: performance by keeping the doctree cache valid for longer. + +# The full version, including alpha/beta/rc tags +release = ( + # pylint: disable-next=used-before-assignment + 'unversioned' if tags.has('is_unversioned') # noqa: F821 + else _retrieve_metadata_version_for('awx-plugins-core') +) + +# The short X.Y version +version = ( + # pylint: disable-next=used-before-assignment + 'unversioned' if tags.has('is_unversioned') # noqa: F821 + else '.'.join(release.split('.')[:2]) +) + +rst_epilog = f""" +.. |project| replace:: {project} +.. |release_l| replace:: ``v{release}`` +""" + + +extensions = [ + # Stdlib extensions: + 'sphinx.ext.autodoc', + 'sphinx.ext.autosectionlabel', # autocreate section targets for refs + 'sphinx.ext.coverage', # for invoking with `-b coverage` + 'sphinx.ext.doctest', # for invoking with `-b doctest` + 'sphinx.ext.intersphinx', + + # Third-party extensions: + 'myst_parser', # extended markdown; https://pypi.org/project/myst-parser/ + 'sphinx_issues', # implements `:issue:`, `:pr:` and other GH-related roles + 'sphinx_tabs.tabs', + 'sphinxcontrib.apidoc', + + # In-tree extensions: + 'spelling_stub_ext', # auto-loads `sphinxcontrib.spelling` if installed +] + +# Add any paths that contain templates here, relative to this directory. +templates_path = ['_templates'] + +# List of patterns, relative to source directory, that match files and +# directories to ignore when looking for source files. +# This pattern also affects html_static_path and html_extra_path. +exclude_patterns = [] + +html_theme = 'furo' + +master_doc = 'index' + +# -- Options for myst_parser extension --------------------------------------- + +myst_enable_extensions = [ + 'colon_fence', # allow to optionally use ::: instead of ``` + 'deflist', + 'html_admonition', # allow having HTML admonitions + 'html_image', # allow HTML in Markdown + 'linkify', # auto-detect URLs @ plain text, needs myst-parser[linkify] + 'replacements', # allows Jinja2-style replacements + 'smartquotes', # use "cursive" quotes + 'substitution', # replace common ASCII shortcuts into their symbols +] +myst_substitutions = { + 'project': project, + 'release': release, + 'release_l': f'`v{release}`', + 'version': version, +} +myst_heading_anchors = 3 + +# -- Options for sphinxcontrib.apidoc extension ------------------------------ + +apidoc_excluded_paths = [] +apidoc_extra_args = [ + '--implicit-namespaces', + '--private', # include โ€œ_privateโ€ modules +] +apidoc_module_dir = str(PROJECT_SRC_DIR / 'awx_plugins') +apidoc_module_first = False +apidoc_output_dir = 'pkg' +apidoc_separate_modules = True +apidoc_template_dir = str(DOCS_ROOT_DIR / 'pkg' / '_templates/') +apidoc_toc_file = None + +# -- Options for sphinxcontrib.spelling extension ---------------------------- + +spelling_ignore_acronyms = True +spelling_ignore_importable_modules = True +spelling_ignore_pypi_package_names = True +spelling_ignore_python_builtins = True +spelling_ignore_wiki_words = True +spelling_show_suggestions = True +spelling_word_list_filename = [ + 'spelling_wordlist.txt', +] + +# -- Options for intersphinx extension --------------------------------------- + +intersphinx_mapping = { + 'python': ('https://docs.python.org/3', None), +} + +# -- Options for linkcheck builder ------------------------------------------- + +linkcheck_ignore = [ + r'https?://localhost:\d+/', # local URLs + r'https://codecov\.io/gh(/[^/]+){2}/branch/master/graph/badge\.svg', + r'https://github\.com(/[^/]+){2}/actions', # 404 if no auth + r'^https://chat\.ansible\.im/#', # these render fully on front-end + r'^https://matrix\.to/#', # these render fully on front-end from anchors + + # temporary ignores: + 'https://pypi.org/p/awx-plugins-core', + 'https://github.com/ansible/awx-plugins/blob/devel/DCO', + 'https://github.com/ansible/awx-plugins/blob/devel/CODE-OF-CONDUCT.md', + 'https://github.com/ansible/awx-plugins/blob/devel/LICENSE', +] +linkcheck_workers = 25 + +# -- Options for sphinx.ext.autosectionlabel extension ----------------------- + +# Ref: +# https://www.sphinx-doc.org/en/master/usage/extensions/autosectionlabel.html +autosectionlabel_maxdepth = 1 # mitigate Towncrier nested subtitles collision + +# -- Options for sphinx_issues extension ------------------------------------- + +# https://github.com/sloria/sphinx-issues#installation-and-configuration + +issues_github_path = 'ansible/awx_plugins' + +# -- Options for sphinx_tabs extension --------------------------------------- + +# Ref: +# * https://github.com/djungelorm/sphinx-tabs/issues/26#issuecomment-422160463 +sphinx_tabs_valid_builders = ['linkcheck'] # prevent linkcheck warning + +# -- Options enforcing strict mode ------------------------------------------- + +# Ref: https://github.com/python-attrs/attrs/pull/571/files\ +# #diff-85987f48f1258d9ee486e3191495582dR82 +default_role = 'any' + +nitpicky = True + +# NOTE: consider having a separate ignore file +# Ref: https://stackoverflow.com/a/30624034/595220 +nitpick_ignore = [ + # temporarily listed ('role', 'reference') pairs that Sphinx cannot resolve +] diff --git a/docs/contributing/code_of_conduct.md b/docs/contributing/code_of_conduct.md new file mode 100644 index 0000000000..bc194b74e4 --- /dev/null +++ b/docs/contributing/code_of_conduct.md @@ -0,0 +1,2 @@ +```{include} ../../CODE-OF-CONDUCT.md +``` diff --git a/docs/contributing/guidelines.md b/docs/contributing/guidelines.md new file mode 100644 index 0000000000..ef6daa82aa --- /dev/null +++ b/docs/contributing/guidelines.md @@ -0,0 +1,2 @@ +```{include} ../../CONTRIBUTING.md +``` diff --git a/docs/contributing/release_guide.md b/docs/contributing/release_guide.md new file mode 100644 index 0000000000..ef8d12bcfc --- /dev/null +++ b/docs/contributing/release_guide.md @@ -0,0 +1,63 @@ +# Release guide + +Welcome to the {{ project }} Release Guide! + +This page contains information on how to release a new version +of {{ project }} using the automated Continuous Delivery pipeline. + +:::{tip} +The intended audience for this document is maintainers and core contributors. +::: + + +## Pre-release activities + +1. Check if there are any open Pull Requests that could be + desired in the upcoming release. If there are any โ€” merge + them. If some are incomplete, try to get them ready. +2. Visually inspect the draft section of the {ref}`Change log` + page. Make sure the content looks consistent, uses the same + writing style, targets the end-users and adheres to our + documented guidelines. + Most of the changelog sections will typically use the past + tense or another way to relay the effect of the changes for + the users, since the previous release. + It should not target core contributors as the information + they are normally interested in is already present in the + Git history. + Update the changelog if you see any problems with + this changelog section. +3. If you are satisfied with the above, inspect the changelog + section categories in the draft. Presence of the breaking + changes or features will hint you what version number + segment to bump for the release. + +## The release stage + +1. Open the [GitHub Actions CI/CD workflow page][GitHub Actions + CI/CD workflow] in your web browser. +2. Click the gray button {guilabel}`Run workflow` in the blue + banner at the top of the workflow runs list. +3. In the form that appears, enter the version you decided on + in the preparation steps, into the mandatory field. Do not + prepend a leading-`v`. Just use the raw version number as + per {pep}`440`. +4. Now, click the green button {guilabel}`Run workflow`. +5. At some point, the workflow gets to the job for publishing + to the "production" PyPI and pauses there. You will see a + banner informing you that a deployment approval is needed. + You will also get an email notification with the same + information and a link to the deployment approval view. +6. While the normal PyPI upload hasn't happened yet, the + TestPyPI one proceeds. This gives you a chance to optionally + verify what got published there and decide if you want to + abort the process. +7. Approve the deployment and wait for the workflow to complete. +8. Verify that the following things got created: + - a PyPI release +9. Tag the released commit. +10. Tell everyone you released a new version of {{ project }} :) + + +[GitHub Actions CI/CD workflow]: +https://github.com/ansible/awx-plugins/actions/workflows/ci-cd.yml diff --git a/docs/contributing/security.md b/docs/contributing/security.md new file mode 100644 index 0000000000..ff2e94493e --- /dev/null +++ b/docs/contributing/security.md @@ -0,0 +1,2 @@ +```{include} ../../SECURITY.md +``` diff --git a/docs/index.md b/docs/index.md new file mode 100644 index 0000000000..c749055313 --- /dev/null +++ b/docs/index.md @@ -0,0 +1,48 @@ +```{spelling} +``` + + + +(\_awx_plugins_core_index)= +# {{ project }} Documentation + + +```{include} ../README.md +:start-after: +:end-before: +``` + +```{include} ../README.md +:start-after: +:end-before: +``` + +```{toctree} +:caption: Contents +:hidden: true + +changelog +``` + +```{toctree} +:caption: Contributing +:hidden: true + +Code Of Conduct +contributing/guidelines +contributing/security +Private unsupported (dev) API autodoc +``` + +```{toctree} +:caption: Maintenance +:hidden: true + +contributing/release_guide +``` + +## Indices and tables + +- {ref}`genindex` +- {ref}`modindex` +- {ref}`search` diff --git a/docs/pkg/.gitignore b/docs/pkg/.gitignore new file mode 100644 index 0000000000..fec1651928 --- /dev/null +++ b/docs/pkg/.gitignore @@ -0,0 +1,3 @@ +* +!.gitignore +!_templates/ diff --git a/docs/pkg/_templates/.gitignore b/docs/pkg/_templates/.gitignore new file mode 100644 index 0000000000..c286575791 --- /dev/null +++ b/docs/pkg/_templates/.gitignore @@ -0,0 +1,2 @@ +!*.rst.jinja +!.gitignore diff --git a/docs/pkg/_templates/toc.rst.jinja b/docs/pkg/_templates/toc.rst.jinja new file mode 100644 index 0000000000..fed0a03ce2 --- /dev/null +++ b/docs/pkg/_templates/toc.rst.jinja @@ -0,0 +1,9 @@ +:orphan: + +{{ header | heading }} + +.. toctree:: + :maxdepth: {{ maxdepth }} +{% for docname in docnames %} + {{ docname }} +{%- endfor %} diff --git a/docs/spelling_wordlist.txt b/docs/spelling_wordlist.txt new file mode 100644 index 0000000000..c9804ee29b --- /dev/null +++ b/docs/spelling_wordlist.txt @@ -0,0 +1,5 @@ +Ansible +hasn +Pre +Submodules +Subpackages diff --git a/nitpick-style.toml b/nitpick-style.toml new file mode 100644 index 0000000000..cf5ab34550 --- /dev/null +++ b/nitpick-style.toml @@ -0,0 +1,127 @@ +[nitpick.styles] +include = [ + 'py://nitpick/resources/any/pre-commit-hooks', +] + +[".editorconfig"] +root = true + +[".editorconfig"."*"] +charset = 'utf-8' +end_of_line = 'lf' +indent_size = 4 +indent_style = 'space' +insert_final_newline = true +trim_trailing_whitespace = true + +[".editorconfig"."*.{bat,cmd,ps1}"] +end_of_line = 'crlf' + +[".editorconfig"."*.{js,json,json5,yml,yaml,md,rb}"] +indent_size = 2 + +[".editorconfig".Makefile] +indent_style = 'tab' + +[".flake8".flake8] +# Print the total number of errors: +count = true + +# https://wemake-python-stylegui.de/en/latest/pages/usage/formatter.html +format = 'wemake' + +# Let's not overcomplicate the code: +max-complexity = 10 + +# Accessibility/large fonts and PEP8 friendly. +# This is being flexibly extended through the `flake8-length`: +max-line-length = 79 + +# Count the number of occurrences of each error/warning code and print a report: +statistics = true + +# ## Plugin-provided settings: ## + +# flake8-eradicate +# E800: +eradicate-whitelist-extend = 'isort:\s+\w+|Ref:\s+https?:\/\/' + +# flake8-pytest-style +# PT001: +pytest-fixture-no-parentheses = true +# PT006: +pytest-parametrize-names-type = 'tuple' +# PT007: +pytest-parametrize-values-type = 'tuple' +pytest-parametrize-values-row-type = 'tuple' +# PT023: +pytest-mark-no-parentheses = true + +# wemake-python-styleguide +show-source = true + +[".isort.cfg".settings] +combine_as_imports = true +default_section = 'THIRDPARTY' +honor_noqa = true +include_trailing_comma = true +indent = 4 +known_frameworks = 'awx, django' +known_testing = 'pytest' +line_length = 79 +lines_after_imports = 2 +# https://pycqa.github.io/isort/#multi-line-output-modes +multi_line_output = 3 +no_lines_before = 'LOCALFOLDER' +sections = 'FUTURE, STDLIB, TESTING, FRAMEWORKS, THIRDPARTY, FIRSTPARTY, LOCALFOLDER' +use_parentheses = true +verbose = true + +[".mypy.ini".mypy] +color_output = true +error_summary = true + +check_untyped_defs = true + +disallow_untyped_calls = true +disallow_untyped_defs = true +disallow_any_generics = true + +enable_error_code = "\nignore-without-code" + +follow_imports = 'normal' + +pretty = true + +warn_no_return = true +warn_redundant_casts = true +warn_unused_ignores = true + +# [nitpick.files.".mypy.ini"] +# comma_separated_values = [ +# 'mypy.enable_error_code', +# ] + +[".pylintrc.toml".tool.pylint.format] +max-line-length = 79 + +[".codecov.yml".codecov] +require_ci_to_pass = false +[".codecov.yml".codecov.notify] +wait_for_ci = false +[".codecov.yml".codecov.notify.after_n_builds] +[".codecov.yml".codecov.token] +[".codecov.yml".comment] +require_changes = true +[".codecov.yml".coverage] +range = '100..100' +[".codecov.yml".coverage.status.project.default] +target = '100%' +[".codecov.yml".coverage.status.project.lib] +target = '100%' +[".codecov.yml".coverage.status.project.tests] +target = '100%' +[".codecov.yml".coverage.status.project.typing] +target = '100%' +[".codecov.yml".coverage.status.project.typing-stubs] +target = '100%' diff --git a/pyproject.toml b/pyproject.toml new file mode 100644 index 0000000000..f98ee5a791 --- /dev/null +++ b/pyproject.toml @@ -0,0 +1,44 @@ +[build-system] +requires = [ + "setuptools >= 64", + "setuptools-scm >= 8", +] +build-backend = "setuptools.build_meta" + +[project] +name = "awx-plugins-core" # import awx_plugins.credentials.x, awx_plugins.credentials.y +# name = "awx_plugins.credentials.x" # import awx_plugins.credentials.x +# name = "awx_plugins.credentials.y" # import awx_plugins.credentials.y +# version = +dependencies = [ # runtime deps # https://packaging.python.org/en/latest/guides/writing-pyproject-toml/#dependencies-optional-dependencies + # GUIDANCE: only add things that this project imports directly + # GUIDANCE: only set lower version bounds + # "awx_plugins.base_interface.api", # keep `__init__.py` empty +] +dynamic = [ + "version", # let `setuptools-scm` populate this +] + +# Example entry point declarations: +# * https://github.com/ansible/awx/blob/cb2ad41/setup.cfg#L16-L25 +# * https://github.com/pytest-dev/pytest-xdist/blob/649cca5/pyproject.toml#L48-L50 +# * https://github.com/ansible-community/ansible-pygments/blob/2ffb109/pyproject.toml#L29-L35 +# * https://github.com/ansible-community/sphinx_ansible_theme/blob/645f414/pyproject.toml#L53-L54 +# awx calls `importlib.metadata.entry_points(group='awx_plugins.credentials')` to discover and later enable any plugins present in the same env +# TODO: consider using https://pluggy.rtfd.io +# +# PLUGIN ACTIVATION GUIDANCE (UX): +# `pip install awx_plugins.credentials.x` would auto-activate any plugins the packaged project ships +[project.entry-points."awx_plugins.credentials"] # new entry points group name +x = "awx_plugins.credentials.x.api:XPlugin" + +# awx calls `importlib.metadata.entry_points(group='awx.credential_plugins')` to discover and later enable any plugins present in the same env +[project.entry-points."awx.credential_plugins"] # pre-existing entry points group name +x = "awx_plugins.credentials.x.api:XPlugin" +# conjur = awx.main.credential_plugins.conjur:conjur_plugin + +[project.readme] +file = "README.md" +content-type = "text/markdown" + +[tool.setuptools_scm] # this section presence is a feature-flag to activate `setuptools-scm` diff --git a/pytest.ini b/pytest.ini new file mode 100644 index 0000000000..e685a1b022 --- /dev/null +++ b/pytest.ini @@ -0,0 +1,84 @@ +[pytest] +addopts = + # `pytest-xdist`: + --numprocesses=auto + # NOTE: the plugin disabled because it's slower with so few tests + --numprocesses=0 + + # Show 10 slowest invocations: + --durations=10 + + # Report all the things == -rxXs: + -ra + + # Show values of the local vars in errors/tracebacks: + --showlocals + + # Autocollect and invoke the doctests from all modules: + # https://docs.pytest.org/en/stable/doctest.html + --doctest-modules + + # Pre-load the `pytest-cov` plugin early: + -p pytest_cov + + # `pytest-cov`: + --cov + --cov-config=.coveragerc + --cov-context=test + --no-cov-on-fail + + # Fail on config parsing warnings: + # --strict-config + + # Fail on non-existing markers: + # * Deprecated since v6.2.0 but may be reintroduced later covering a + # broader scope: + # --strict + # * Exists since v4.5.0 (advised to be used instead of `--strict`): + --strict-markers + +doctest_optionflags = ALLOW_UNICODE ELLIPSIS + +# Marks tests with an empty parameterset as xfail(run=False) +empty_parameter_set_mark = xfail + +faulthandler_timeout = 30 + +filterwarnings = + error + +# https://docs.pytest.org/en/stable/usage.html#creating-junitxml-format-files +junit_duration_report = call +# xunit1 contains more metadata than xunit2 so it's better for CI UIs: +junit_family = xunit1 +junit_logging = all +junit_log_passing_tests = true +junit_suite_name = awx_plugins_test_suite + +# A mapping of markers to their descriptions allowed in strict mode: +markers = + +minversion = 6.1.0 + +# Optimize pytest's lookup by restricting potentially deep dir tree scan: +norecursedirs = + build + dist + docs + requirements + .cache + .eggs + .git + .github + .tox + *.egg + *.egg-info + */*.egg-info + */**/*.egg-info + *.dist-info + */*.dist-info + */**/*.dist-info + +testpaths = tests/ + +xfail_strict = true diff --git a/requirements/.gitignore b/requirements/.gitignore new file mode 100644 index 0000000000..ed6a55fb24 --- /dev/null +++ b/requirements/.gitignore @@ -0,0 +1 @@ +!*.txt diff --git a/requirements/dist-build-constraints.txt b/requirements/dist-build-constraints.txt new file mode 100644 index 0000000000..6ef52ad7c2 --- /dev/null +++ b/requirements/dist-build-constraints.txt @@ -0,0 +1,16 @@ +# +# This file is autogenerated by pip-compile with Python 3.12 +# by the following command: +# +# tox r -e pip-compile-build-lock -- +# +packaging==24.1 + # via setuptools-scm +setuptools-scm==8.1.0 + # via awx-plugins-core (pyproject.toml::build-system.requires) + +# The following packages are considered to be unsafe in a requirements file: +setuptools==73.0.0 + # via + # awx-plugins-core (pyproject.toml::build-system.requires) + # setuptools-scm diff --git a/requirements/tox-build-dists.in b/requirements/tox-build-dists.in new file mode 100644 index 0000000000..a70eb13f40 --- /dev/null +++ b/requirements/tox-build-dists.in @@ -0,0 +1 @@ +build # the "go-to" build frontend diff --git a/requirements/tox-build-docs.in b/requirements/tox-build-docs.in new file mode 100644 index 0000000000..89749a13d7 --- /dev/null +++ b/requirements/tox-build-docs.in @@ -0,0 +1,8 @@ +# -r tests.in # `sphinxcontrib-autodoc` will import all the files + +furo # modern Sphinx theme +myst-parser[linkify] # Markdown documents support w/ in-text link detector +Sphinx # main docs framework +sphinx-issues # Sphinx roles providing support for linking GitHub +sphinx-tabs # Sphinx directives providing support for HTML tabs +sphinxcontrib-apidoc # automatic API pages generator diff --git a/requirements/tox-linkcheck-docs.in b/requirements/tox-linkcheck-docs.in new file mode 100644 index 0000000000..3e5335049b --- /dev/null +++ b/requirements/tox-linkcheck-docs.in @@ -0,0 +1,3 @@ +-r tox-build-docs.in + +sphinxcontrib-spelling >= 5.2.0 diff --git a/requirements/tox-metadata-validation.in b/requirements/tox-metadata-validation.in new file mode 100644 index 0000000000..f61f701979 --- /dev/null +++ b/requirements/tox-metadata-validation.in @@ -0,0 +1,2 @@ +setuptools-scm +twine diff --git a/requirements/tox-pip-compile-build-lock.in b/requirements/tox-pip-compile-build-lock.in new file mode 120000 index 0000000000..03e5653ad6 --- /dev/null +++ b/requirements/tox-pip-compile-build-lock.in @@ -0,0 +1 @@ +tox-pip-compile.in \ No newline at end of file diff --git a/requirements/tox-pip-compile-cp311-linux-x86_64.txt b/requirements/tox-pip-compile-cp311-linux-x86_64.txt new file mode 100644 index 0000000000..f40509f5a0 --- /dev/null +++ b/requirements/tox-pip-compile-cp311-linux-x86_64.txt @@ -0,0 +1,26 @@ +# +# This file is autogenerated by pip-compile with Python 3.11 +# by the following command: +# +# tox r -e pip-compile-tox-env-lock -- pip-compile +# +build==1.2.1 + # via pip-tools +click==8.1.7 + # via pip-tools +packaging==24.1 + # via build +pip-tools==7.4.1 + # via -r requirements/tox-pip-compile.in +pyproject-hooks==1.1.0 + # via + # build + # pip-tools +wheel==0.44.0 + # via pip-tools + +# The following packages are considered to be unsafe in a requirements file: +pip==24.2 + # via pip-tools +setuptools==73.0.0 + # via pip-tools diff --git a/requirements/tox-pip-compile-cp312-linux-x86_64.txt b/requirements/tox-pip-compile-cp312-linux-x86_64.txt new file mode 100644 index 0000000000..b1e202527b --- /dev/null +++ b/requirements/tox-pip-compile-cp312-linux-x86_64.txt @@ -0,0 +1,26 @@ +# +# This file is autogenerated by pip-compile with Python 3.12 +# by the following command: +# +# tox r -e pip-compile-tox-env-lock -- pip-compile +# +build==1.2.1 + # via pip-tools +click==8.1.7 + # via pip-tools +packaging==24.1 + # via build +pip-tools==7.4.1 + # via -r requirements/tox-pip-compile.in +pyproject-hooks==1.1.0 + # via + # build + # pip-tools +wheel==0.44.0 + # via pip-tools + +# The following packages are considered to be unsafe in a requirements file: +pip==24.2 + # via pip-tools +setuptools==73.0.0 + # via pip-tools diff --git a/requirements/tox-pip-compile-cp313-linux-x86_64.txt b/requirements/tox-pip-compile-cp313-linux-x86_64.txt new file mode 100644 index 0000000000..24ea1de671 --- /dev/null +++ b/requirements/tox-pip-compile-cp313-linux-x86_64.txt @@ -0,0 +1,26 @@ +# +# This file is autogenerated by pip-compile with Python 3.13 +# by the following command: +# +# tox r -e pip-compile-tox-env-lock -- pip-compile +# +build==1.2.1 + # via pip-tools +click==8.1.7 + # via pip-tools +packaging==24.1 + # via build +pip-tools==7.4.1 + # via -r requirements/tox-pip-compile.in +pyproject-hooks==1.1.0 + # via + # build + # pip-tools +wheel==0.44.0 + # via pip-tools + +# The following packages are considered to be unsafe in a requirements file: +pip==24.2 + # via pip-tools +setuptools==73.0.0 + # via pip-tools diff --git a/requirements/tox-pip-compile-tox-env-lock.in b/requirements/tox-pip-compile-tox-env-lock.in new file mode 120000 index 0000000000..03e5653ad6 --- /dev/null +++ b/requirements/tox-pip-compile-tox-env-lock.in @@ -0,0 +1 @@ +tox-pip-compile.in \ No newline at end of file diff --git a/requirements/tox-pip-compile.in b/requirements/tox-pip-compile.in new file mode 100644 index 0000000000..5eb24fa5df --- /dev/null +++ b/requirements/tox-pip-compile.in @@ -0,0 +1 @@ +pip-tools >= 7.4.0 # first to support build env dep extraction diff --git a/requirements/tox-pre-commit.in b/requirements/tox-pre-commit.in new file mode 100644 index 0000000000..8ff712a026 --- /dev/null +++ b/requirements/tox-pre-commit.in @@ -0,0 +1 @@ +pre-commit >= 2.6.0 diff --git a/requirements/tox-py-constraints.in b/requirements/tox-py-constraints.in new file mode 100644 index 0000000000..37811703cb --- /dev/null +++ b/requirements/tox-py-constraints.in @@ -0,0 +1,15 @@ +############################################################################### +# # +# This file is only meant to exclude broken dependency versions, not feature # +# dependencies. # +# # +# GUIDELINES: # +# 1. Only list PyPI project versions that need to be excluded using `!=` # +# and `<`. # +# 2. It is allowed to have transitive dependency limitations in this file. # +# 3. Apply bare minimum constraints under narrow conditions, use # +# environment markers if possible. E.g. `; python_version < "3.12"`. # +# 4. Whenever there are no constraints, let the file and this header # +# remain in Git. # +# # +############################################################################### diff --git a/requirements/tox-py.in b/requirements/tox-py.in new file mode 100644 index 0000000000..28e3578e6b --- /dev/null +++ b/requirements/tox-py.in @@ -0,0 +1,8 @@ +-c tox-py-constraints.in # limits known broken versions + +covdefaults +coverage # accessed directly from tox +coverage-enable-subprocess +pytest +pytest-cov +pytest-xdist diff --git a/requirements/tox-py311-cp311-linux-x86_64.txt b/requirements/tox-py311-cp311-linux-x86_64.txt new file mode 100644 index 0000000000..fba7e28480 --- /dev/null +++ b/requirements/tox-py311-cp311-linux-x86_64.txt @@ -0,0 +1,33 @@ +# +# This file is autogenerated by pip-compile with Python 3.11 +# by the following command: +# +# tox r -e pip-compile-tox-env-lock -- py +# +covdefaults==2.3.0 + # via -r requirements/tox-py.in +coverage==7.6.1 + # via + # -r requirements/tox-py.in + # covdefaults + # coverage-enable-subprocess + # pytest-cov +coverage-enable-subprocess==1.0 + # via -r requirements/tox-py.in +execnet==2.1.1 + # via pytest-xdist +iniconfig==2.0.0 + # via pytest +packaging==24.1 + # via pytest +pluggy==1.5.0 + # via pytest +pytest==8.3.2 + # via + # -r requirements/tox-py.in + # pytest-cov + # pytest-xdist +pytest-cov==5.0.0 + # via -r requirements/tox-py.in +pytest-xdist==3.6.1 + # via -r requirements/tox-py.in diff --git a/requirements/tox-py312-cp312-linux-x86_64.txt b/requirements/tox-py312-cp312-linux-x86_64.txt new file mode 100644 index 0000000000..d76fa6f178 --- /dev/null +++ b/requirements/tox-py312-cp312-linux-x86_64.txt @@ -0,0 +1,33 @@ +# +# This file is autogenerated by pip-compile with Python 3.12 +# by the following command: +# +# tox r -e pip-compile-tox-env-lock -- py pyproject.toml +# +covdefaults==2.3.0 + # via -r requirements/tox-py.in +coverage==7.6.1 + # via + # -r requirements/tox-py.in + # covdefaults + # coverage-enable-subprocess + # pytest-cov +coverage-enable-subprocess==1.0 + # via -r requirements/tox-py.in +execnet==2.1.1 + # via pytest-xdist +iniconfig==2.0.0 + # via pytest +packaging==24.1 + # via pytest +pluggy==1.5.0 + # via pytest +pytest==8.3.2 + # via + # -r requirements/tox-py.in + # pytest-cov + # pytest-xdist +pytest-cov==5.0.0 + # via -r requirements/tox-py.in +pytest-xdist==3.6.1 + # via -r requirements/tox-py.in diff --git a/requirements/tox-py313-cp313-linux-x86_64.txt b/requirements/tox-py313-cp313-linux-x86_64.txt new file mode 100644 index 0000000000..364cf3107d --- /dev/null +++ b/requirements/tox-py313-cp313-linux-x86_64.txt @@ -0,0 +1,33 @@ +# +# This file is autogenerated by pip-compile with Python 3.13 +# by the following command: +# +# tox r -e pip-compile-tox-env-lock -- py +# +covdefaults==2.3.0 + # via -r requirements/tox-py.in +coverage==7.6.1 + # via + # -r requirements/tox-py.in + # covdefaults + # coverage-enable-subprocess + # pytest-cov +coverage-enable-subprocess==1.0 + # via -r requirements/tox-py.in +execnet==2.1.1 + # via pytest-xdist +iniconfig==2.0.0 + # via pytest +packaging==24.1 + # via pytest +pluggy==1.5.0 + # via pytest +pytest==8.3.2 + # via + # -r requirements/tox-py.in + # pytest-cov + # pytest-xdist +pytest-cov==5.0.0 + # via -r requirements/tox-py.in +pytest-xdist==3.6.1 + # via -r requirements/tox-py.in diff --git a/requirements/tox-spellcheck-docs.in b/requirements/tox-spellcheck-docs.in new file mode 120000 index 0000000000..8e263a9376 --- /dev/null +++ b/requirements/tox-spellcheck-docs.in @@ -0,0 +1 @@ +tox-linkcheck-docs.in \ No newline at end of file diff --git a/requirements/tox-tox.in b/requirements/tox-tox.in new file mode 100644 index 0000000000..053148f848 --- /dev/null +++ b/requirements/tox-tox.in @@ -0,0 +1 @@ +tox diff --git a/src/awx_plugins/credentials/x/api.py b/src/awx_plugins/credentials/x/api.py new file mode 100644 index 0000000000..8139e413fb --- /dev/null +++ b/src/awx_plugins/credentials/x/api.py @@ -0,0 +1,5 @@ +"""Plugin entry point module.""" + + +class XPlugin: # pylint: disable=too-few-public-methods + """Plugin entry point.""" diff --git a/tests/importable_test.py b/tests/importable_test.py new file mode 100644 index 0000000000..9caa9b56c1 --- /dev/null +++ b/tests/importable_test.py @@ -0,0 +1,26 @@ +"""Smoke tests related to loading entry points.""" + +from importlib.metadata import entry_points as _discover_entry_points + +import pytest + + +@pytest.mark.parametrize( + 'entry_points_group', + ( + 'awx.credential_plugins', + 'awx_plugins.credentials', + ), +) +def test_entry_points_exposed(entry_points_group: str) -> None: + """Verify the plugin entry point is discoverable. + + This check relies on the plugin-declaring distribution package to be + pre-installed. + """ + entry_points = _discover_entry_points(group=entry_points_group) + assert 'x' in entry_points.names + + assert entry_points['x'].value == 'awx_plugins.credentials.x.api:XPlugin' + + assert callable(entry_points['x'].load()) diff --git a/tox.ini b/tox.ini new file mode 100644 index 0000000000..96c16e9b5b --- /dev/null +++ b/tox.ini @@ -0,0 +1,592 @@ +[tox] +isolated_build = true + + +[python-cli-options] +byte-warnings = -b +byte-errors = -bb +max-isolation = -E -s -I +some-isolation = -E -s +warnings-to-errors = -Werror + + +[testenv] +description = Run pytest under {envpython} +deps = -rrequirements{/}tox-py.in +install_command = + {envpython} \ + {[python-cli-options]byte-errors} \ + {[python-cli-options]some-isolation} \ + {[python-cli-options]warnings-to-errors} \ + {toxinidir}{/}bin{/}pip_wrapper.py \ + '{toxinidir}{/}requirements{/}' \ + '{envname}' \ + install {opts} {packages} +commands = + {envpython} \ + {[python-cli-options]byte-errors} \ + {[python-cli-options]max-isolation} \ + {[python-cli-options]warnings-to-errors} \ + -m pytest \ + {tty:--color=yes} {posargs:} +commands_post = + {envpython} \ + {[python-cli-options]byte-errors} \ + {[python-cli-options]max-isolation} \ + {[python-cli-options]warnings-to-errors} \ + -c \ + 'import os, sys; \ + os.getenv("GITHUB_ACTIONS") == "true" or sys.exit(); \ + import coverage; \ + gh_summary_fd = open(\ + os.environ["GITHUB_STEP_SUMMARY"], encoding="utf-8", mode="a",\ + ); \ + cov = coverage.Coverage(); \ + cov.load(); \ + cov.report(file=gh_summary_fd, output_format="markdown"); \ + gh_summary_fd.close()' + {envpython} \ + {[python-cli-options]byte-errors} \ + {[python-cli-options]max-isolation} \ + {[python-cli-options]warnings-to-errors} \ + -c \ + 'import os, pathlib, sys; \ + os.getenv("GITHUB_ACTIONS") == "true" or sys.exit(); \ + cov_report_arg_prefix = "--cov-report=xml:"; \ + test_report_arg_prefix = "--junitxml="; \ + cov_report_file = [\ + arg[len(cov_report_arg_prefix):] for arg in sys.argv \ + if arg.startswith(cov_report_arg_prefix)\ + ][-1]; \ + test_report_file = [\ + arg[len(test_report_arg_prefix):] for arg in sys.argv \ + if arg.startswith(test_report_arg_prefix)\ + ][-1]; \ + gh_output_fd = open(\ + os.environ["GITHUB_OUTPUT"], encoding="utf-8", mode="a",\ + ); \ + print(f"cov-report-files={cov_report_file !s}", file=gh_output_fd); \ + print(f"test-result-files={test_report_file !s}", file=gh_output_fd); \ + print("codecov-flags=pytest", file=gh_output_fd); \ + gh_output_fd.close()' \ + {posargs} +package = editable +pass_env = + CI + GITHUB_* + SSH_AUTH_SOCK + TERM +set_env = + COVERAGE_PROCESS_START = {toxinidir}{/}.coveragerc +wheel_build_env = .pkg + + +[pkgenv] +# NOTE: `[testenv:.pkg]` does not work due to a regression in tox v4.14.1 +# NOTE: so `[pkgenv]` is being used in place of it. +# Refs: +# * https://github.com/tox-dev/tox/pull/3237 +# * https://github.com/tox-dev/tox/issues/3238 +# * https://github.com/tox-dev/tox/issues/3292 +# * https://hynek.me/articles/turbo-charge-tox/ +set_env = + PIP_CONSTRAINT = requirements{/}dist-build-constraints.txt + + +[testenv:cleanup-dists] +description = + Wipe the the dist{/} folder +deps = +commands_pre = +commands = + {envpython} \ + {[python-cli-options]byte-errors} \ + {[python-cli-options]max-isolation} \ + {[python-cli-options]warnings-to-errors} \ + -c \ + 'import os, shutil, sys; \ + dists_dir = "{toxinidir}{/}dist{/}"; \ + shutil.rmtree(dists_dir, ignore_errors=True); \ + sys.exit(os.path.exists(dists_dir))' +commands_post = +package = skip + + +[testenv:build-dists] +allowlist_externals = + env +description = + Build dists with {basepython} and put them into the dist{/} folder +depends = + cleanup-dists +deps = -rrequirements{/}tox-{envname}.in +commands = + env PIP_CONSTRAINT=requirements{/}dist-build-constraints.txt \ + {envpython} \ + {[python-cli-options]byte-errors} \ + {[python-cli-options]max-isolation} \ + {[python-cli-options]warnings-to-errors} \ + -m build \ + {posargs:} +commands_post = +package = skip + + +[testenv:metadata-validation] +description = + Verify that dists under the `dist{/}` dir + have valid metadata +depends = + build-dists +deps = -rrequirements{/}tox-{envname}.in +commands_pre = + {envpython} \ + {[python-cli-options]byte-errors} \ + {[python-cli-options]max-isolation} \ + {[python-cli-options]warnings-to-errors} \ + '-Wdefault{:}git archive did not support describe output\ + {:}UserWarning{:}setuptools_scm.git' \ + '-Wdefault{:}unprocessed git archival found\ + {:}UserWarning{:}setuptools_scm.git' \ + -m setuptools_scm \ + ls +commands = + {envpython} \ + {[python-cli-options]byte-errors} \ + {[python-cli-options]max-isolation} \ + {[python-cli-options]warnings-to-errors} \ + -m twine \ + check \ + --strict \ + dist{/}* +commands_post = +package = skip + + +[testenv:pre-commit] +description = + Run the quality checks under {basepython}; run as + `SKIP=check-id1,check-id2 tox r -e pre-commit` to instruct the underlying + `pre-commit` invocation avoid running said checks; Use + `tox r -e pre-commit -- check-id1 --all-files` to select checks matching IDs + aliases{:} `tox r -e pre-commit -- mypy --all-files` will run 3 MyPy + invocations, but `tox r -e pre-commit -- mypy-py313 --all-files` runs one. +commands = + {envpython} \ + {[python-cli-options]byte-errors} \ + {[python-cli-options]max-isolation} \ + {[python-cli-options]warnings-to-errors} \ + -m pre_commit \ + run \ + --color=always \ + --show-diff-on-failure \ + {posargs:--all-files} + + # Print out the advice on how to install pre-commit from this env into Git: + -{envpython} \ + {[python-cli-options]byte-errors} \ + {[python-cli-options]max-isolation} \ + {[python-cli-options]warnings-to-errors} \ + -c \ + 'cmd = "{envpython} -m pre_commit install"; \ + scr_width = len(cmd) + 10; \ + sep = "=" * scr_width; \ + cmd_str = " $ \{cmd\}";' \ + 'print(f"\n\{sep\}\nTo install pre-commit hooks into the Git repo, run:\ + \n\n\{cmd_str\}\n\n\{sep\}\n")' +commands_post = + {envpython} \ + {[python-cli-options]byte-errors} \ + {[python-cli-options]max-isolation} \ + {[python-cli-options]warnings-to-errors} \ + -c \ + 'import os, pathlib, sys; \ + os.getenv("GITHUB_ACTIONS") == "true" or sys.exit(); \ + project_root_path = pathlib.Path(r"{toxinidir}"); \ + test_results_dir = pathlib.Path(r"{temp_dir}") / ".test-results"; \ + coverage_result_files = ",".join(\ + str(xml_path.relative_to(project_root_path)) \ + for xml_path in test_results_dir.glob("mypy--py-*{/}cobertura.xml")\ + ); \ + gh_output_fd = open(\ + os.environ["GITHUB_OUTPUT"], encoding="utf-8", mode="a",\ + ); \ + print(\ + f"cov-report-files={coverage_result_files !s}", file=gh_output_fd\ + ); \ + print("codecov-flags=MyPy", file=gh_output_fd); \ + gh_output_fd.close()' +deps = -rrequirements{/}tox-{envname}.in +isolated_build = true +package = skip +pass_env = + {[testenv]pass_env} + SKIP # set this variable + + +[testenv:build-docs] +# NOTE: Passing the `is_unversioned` tag speeds up rebuilds in dev env +allowlist_externals = + git +description = Build The Docs +changedir = docs{/} +commands_pre = + # Retrieve possibly missing commits: + -git fetch --unshallow + -git fetch --tags + + # Clean up sphinxcontrib-apidoc generated RST files: + -git clean -x -f -- 'pkg{/}*.rst' +commands = + {envpython} \ + {[python-cli-options]byte-errors} \ + {[python-cli-options]max-isolation} \ + {[python-cli-options]warnings-to-errors} \ + -m sphinx \ + -j auto \ + -b html \ + {tty:--color} \ + -a \ + -n \ + -W --keep-going \ + -d '{temp_dir}{/}.doctrees' \ + . \ + {posargs:{envtmpdir}{/}html -t is_unversioned} +commands_post = + # Print out the output docs dir and a way to serve html: + {envpython} \ + {[python-cli-options]byte-errors} \ + {[python-cli-options]max-isolation} \ + {[python-cli-options]warnings-to-errors} \ + -c\ + 'import os, pathlib;\ + IS_RTD_ENV = os.getenv("READTHEDOCS", "False") == "True";\ + docs_dir = pathlib.Path(r"{envdir}") / r"{envtmpdir}" / "html";\ + index_file = docs_dir / "index.html";\ + docs_url = os.environ["READTHEDOCS_CANONICAL_URL"] if IS_RTD_ENV \ + else f"file://\{index_file\}";\ + print(f"\nTo open the documentation, run\n\n\ + \tpython3 -Im webbrowser \ + \N\{QUOTATION MARK\}\{docs_url !s\}\N\{QUOTATION MARK\}\n");\ + not IS_RTD_ENV and \ + print(f"To serve \ + the docs with a local web server, use\n\n\ + \tpython3 -Im http.server --directory \ + \N\{QUOTATION MARK\}\{docs_dir\}\N\{QUOTATION MARK\} 0\n")' +deps = + -r{toxinidir}{/}requirements{/}tox-{envname}.in +pass_env = + {[testenv]pass_env} + READTHEDOCS* # Present @ RTD + + +[testenv:coverage-docs] +allowlist_externals = + {[testenv:build-docs]allowlist_externals} +description = Measure coverage in docs +changedir = {[testenv:build-docs]changedir} +commands_pre = + # Retrieve possibly missing commits: + -git fetch --unshallow + -git fetch --tags + + # Clean up sphinxcontrib-apidoc generated RST files: + -git clean -x -f -- 'pkg{/}*.rst' +commands = + {envpython} \ + {[python-cli-options]byte-errors} \ + {[python-cli-options]max-isolation} \ + {[python-cli-options]warnings-to-errors} \ + -m sphinx \ + -j auto \ + {tty:--color} \ + -a \ + -n \ + -W --keep-going \ + -b coverage \ + -d '{temp_dir}{/}.doctrees' \ + . \ + {posargs:{envtmpdir}{/}coverage} +commands_post = + {envpython} \ + {[python-cli-options]byte-errors} \ + {[python-cli-options]max-isolation} \ + {[python-cli-options]warnings-to-errors} \ + -c \ + 'import os, pathlib, shlex, sys; \ + os.getenv("GITHUB_ACTIONS") == "true" or sys.exit(); \ + output_dir = pathlib.Path(\ + shlex.split(r"{posargs:{envtmpdir}{/}coverage}")[0]\ + ); \ + output_txt_file = output_dir / "python.txt"; \ + gh_summary_fd = open(\ + os.environ["GITHUB_STEP_SUMMARY"], encoding="utf-8", mode="a",\ + ); \ + print(output_txt_file.read_text(), file=gh_summary_fd); \ + gh_summary_fd.close()' +deps = + {[testenv:build-docs]deps} +pass_env = + {[testenv:build-docs]pass_env} + + +[testenv:doctest-docs] +allowlist_externals = + {[testenv:build-docs]allowlist_externals} +description = Doctest The Docs +changedir = {[testenv:build-docs]changedir} +commands_pre = + # Retrieve possibly missing commits: + -git fetch --unshallow + -git fetch --tags + + # Clean up sphinxcontrib-apidoc generated RST files: + -git clean -x -f -- 'pkg{/}*.rst' +commands = + {envpython} \ + {[python-cli-options]byte-errors} \ + {[python-cli-options]max-isolation} \ + {[python-cli-options]warnings-to-errors} \ + -m sphinx \ + -j auto \ + {tty:--color} \ + -a \ + -n \ + -W --keep-going \ + -b doctest \ + -d '{temp_dir}{/}.doctrees' \ + . \ + {posargs:{envtmpdir}{/}doctest} +commands_post = + {envpython} \ + {[python-cli-options]byte-errors} \ + {[python-cli-options]max-isolation} \ + {[python-cli-options]warnings-to-errors} \ + -c \ + 'import os, pathlib, shlex, sys; \ + os.getenv("GITHUB_ACTIONS") == "true" or sys.exit(); \ + output_dir = pathlib.Path(\ + shlex.split(r"{posargs:{envtmpdir}{/}doctest}")[0]\ + ); \ + output_txt_file = output_dir / "output.txt"; \ + gh_summary_fd = open(\ + os.environ["GITHUB_STEP_SUMMARY"], encoding="utf-8", mode="a",\ + ); \ + print(output_txt_file.read_text(), file=gh_summary_fd); \ + gh_summary_fd.close()' +deps = + {[testenv:build-docs]deps} +pass_env = + {[testenv:build-docs]pass_env} + + +[testenv:linkcheck-docs] +allowlist_externals = + {[testenv:build-docs]allowlist_externals} +description = Linkcheck The Docs +changedir = {[testenv:build-docs]changedir} +commands_pre = + # Retrieve possibly missing commits: + -git fetch --unshallow + -git fetch --tags + + # Clean up sphinxcontrib-apidoc generated RST files: + -git clean -x -f -- 'pkg{/}*.rst' +commands = + {envpython} \ + {[python-cli-options]byte-errors} \ + {[python-cli-options]max-isolation} \ + {[python-cli-options]warnings-to-errors} \ + -m sphinx \ + -j auto \ + {tty:--color} \ + -a \ + -n \ + -W --keep-going \ + --keep-going \ + -b linkcheck \ + -d "{temp_dir}{/}.doctrees" \ + . \ + {posargs:{envtmpdir}{/}linkcheck} +commands_post = + {envpython} \ + {[python-cli-options]byte-errors} \ + {[python-cli-options]max-isolation} \ + {[python-cli-options]warnings-to-errors} \ + -c \ + 'import os, pathlib, shlex, sys; \ + os.getenv("GITHUB_ACTIONS") == "true" or sys.exit(); \ + output_dir = pathlib.Path(\ + shlex.split(r"{posargs:{envtmpdir}{/}linkcheck}")[0]\ + ); \ + output_json_file = output_dir / "output.json"; \ + output_txt_file = output_dir / "output.txt"; \ + gh_summary_fd = open(\ + os.environ["GITHUB_STEP_SUMMARY"], encoding="utf-8", mode="a",\ + ); \ + print(output_json_file.read_text(), file=gh_summary_fd); \ + print(output_txt_file.read_text(), file=gh_summary_fd); \ + gh_summary_fd.close()' +deps = + -rrequirements{/}tox-{envname}.in +pass_env = + {[testenv:build-docs]pass_env} + + +[testenv:spellcheck-docs] +allowlist_externals = + {[testenv:build-docs]allowlist_externals} +description = Spellcheck The Docs +changedir = {[testenv:build-docs]changedir} +commands_pre = + # Retrieve possibly missing commits: + -git fetch --unshallow + -git fetch --tags + + # Clean up sphinxcontrib-apidoc generated RST files: + -git clean -x -f -- 'pkg{/}*.rst' +# FIXME: The `sphinxcontrib-spelling` builder emits a resource warning. +# Ref: https://github.com/sphinx-contrib/spelling/pull/226 +commands = + {envpython} \ + {[python-cli-options]byte-errors} \ + {[python-cli-options]max-isolation} \ + {[python-cli-options]warnings-to-errors} \ + -Wdefault::ResourceWarning \ + -m sphinx \ + -j auto \ + {tty:--color} \ + -a \ + -n \ + -W --keep-going \ + -b spelling --color \ + -d "{temp_dir}{/}.doctrees" \ + . "{toxworkdir}{/}spelling" +commands_post = +deps = + -rrequirements{/}tox-{envname}.in +pass_env = + {[testenv:build-docs]pass_env} + + +[testenv:pip-compile] +description = Run `pip-compile {posargs:}` under {envpython} +deps = -rrequirements{/}tox-pip-compile.in +commands = + {envpython} \ + {[python-cli-options]byte-errors} \ + {[python-cli-options]max-isolation} \ + {[python-cli-options]warnings-to-errors} \ + '-Wdefault{:}pkg_resources is deprecated as an API{:}DeprecationWarning' \ + -m piptools \ + compile \ + {posargs:} +commands_post = + # NOTE: Invocations without posargs result in trailing spaces in the + # NOTE: `pip-tools` generated file headers. This snippet cleans them up. + {envpython} \ + {[python-cli-options]byte-errors} \ + {[python-cli-options]max-isolation} \ + {[python-cli-options]warnings-to-errors} \ + -c \ + 'import os, pathlib, re; \ + project_root_path = pathlib.Path(r"{toxinidir}"); \ + requirements_dir = project_root_path / "requirements"; \ + [\ + lock_file.write_text(\ + re.sub(\ + r"\s*?(?P(?:\r\n|\r|\n))+?", \ + r"\g", \ + lock_file.read_text(encoding="utf-8"), \ + flags=re.MULTILINE,\ + ), \ + encoding="utf-8",\ + ) \ + for lock_file in requirements_dir.glob("*.txt")\ + ]' +package = skip +set_env = + CUSTOM_COMPILE_COMMAND = tox r -e {envname} -- {posargs:} + + +[testenv:pip-compile-build-lock] +description = Produce a PEP 517/660 build deps lock using {envpython} +deps = {[testenv:pip-compile]deps} +commands = + {envpython} \ + {[python-cli-options]byte-warnings} \ + {[python-cli-options]max-isolation} \ + {[python-cli-options]warnings-to-errors} \ + '-Wdefault{:}pkg_resources is deprecated as an API{:}DeprecationWarning' \ + '-Wdefault{:}Unimplemented abstract methods{:}DeprecationWarning' \ + -m piptools \ + compile \ + --only-build-deps \ + --all-build-deps \ + --output-file=requirements{/}dist-build-constraints.txt \ + {posargs:} +commands_post = + # NOTE: Invocations without posargs result in trailing spaces in the + # NOTE: `pip-tools` generated file headers. This snippet cleans them up. + {envpython} \ + {[python-cli-options]byte-errors} \ + {[python-cli-options]max-isolation} \ + {[python-cli-options]warnings-to-errors} \ + -c \ + 'import os, pathlib, re; \ + project_root_path = pathlib.Path(r"{toxinidir}"); \ + requirements_dir = project_root_path / "requirements"; \ + [\ + lock_file.write_text(\ + re.sub(\ + r"\s*?(?P(?:\r\n|\r|\n))+?", \ + r"\g", \ + lock_file.read_text(encoding="utf-8"), \ + flags=re.MULTILINE,\ + ), \ + encoding="utf-8",\ + ) \ + for lock_file in requirements_dir.glob("*.txt")\ + ]' +set_env = + CUSTOM_COMPILE_COMMAND = tox r -e {envname} -- {posargs:} +package = {[testenv:pip-compile]package} + + +[testenv:pip-compile-tox-env-lock] +description = Produce {posargs} lock file using {envpython} +deps = {[testenv:pip-compile]deps} +commands = + {envpython} \ + {[python-cli-options]byte-errors} \ + {[python-cli-options]some-isolation} \ + {[python-cli-options]warnings-to-errors} \ + bin{/}resolve_platform_lock_file.py \ + 'requirements{/}' \ + {posargs} +commands_post = + # NOTE: Invocations without posargs result in trailing spaces in the + # NOTE: `pip-tools` generated file headers. This snippet cleans them up. + {envpython} \ + {[python-cli-options]byte-errors} \ + {[python-cli-options]max-isolation} \ + {[python-cli-options]warnings-to-errors} \ + -c \ + 'import os, pathlib, re; \ + project_root_path = pathlib.Path(r"{toxinidir}"); \ + requirements_dir = project_root_path / "requirements"; \ + [\ + lock_file.write_text(\ + re.sub(\ + r"\s*?(?P(?:\r\n|\r|\n))+?", \ + r"\g", \ + lock_file.read_text(encoding="utf-8"), \ + flags=re.MULTILINE,\ + ), \ + encoding="utf-8",\ + ) \ + for lock_file in requirements_dir.glob("*.txt")\ + ]' +set_env = + CUSTOM_COMPILE_COMMAND = tox r -e {envname} -- {posargs:} +package = {[testenv:pip-compile]package}