diff --git a/.devcontainer/Dockerfile b/.devcontainer/Dockerfile index fdd540f99..5d8b340ad 100644 --- a/.devcontainer/Dockerfile +++ b/.devcontainer/Dockerfile @@ -8,12 +8,18 @@ RUN \ software-properties-common git default-jre && \ apt-get clean &&\ rm -rf /var/lib/apt/lists/* +# Install allure report RUN \ wget https://repo.maven.apache.org/maven2/io/qameta/allure/allure-commandline/2.20.1/allure-commandline-2.20.1.zip && \ unzip allure-commandline-2.20.1.zip -d /allure && \ rm allure-commandline-2.20.1.zip - -ENV PATH "/allure/allure-2.20.1/bin:${PATH}" + RUN mkdir /config /icloud && chown -R vscode:vscode /config /icloud +USER vscode +# Install uv (pip replacement) +RUN \ + curl -LsSf https://astral.sh/uv/install.sh | sh + +ENV PATH="/allure/allure-2.20.1/bin:/home/vscode/.cargo/bin:${PATH}" WORKDIR /workspaces diff --git a/.devcontainer/devcontainer.json b/.devcontainer/devcontainer.json index b9e010161..82f69a2f5 100644 --- a/.devcontainer/devcontainer.json +++ b/.devcontainer/devcontainer.json @@ -5,46 +5,47 @@ "containerEnv": { "DEVCONTAINER": "1" }, - "extensions": [ - "ms-python.vscode-pylance", - "visualstudioexptteam.vscodeintellicode", - "redhat.vscode-yaml", - "esbenp.prettier-vscode", - "GitHub.vscode-pull-request-github" - ], - "settings": { - "python.pythonPath": "/usr/local/bin/python", - "python.linting.enabled": true, - "python.linting.pylintEnabled": true, - "python.formatting.blackPath": "/usr/local/py-utils/bin/black", - "python.linting.flake8Path": "/usr/local/bin/flake8", - "python.linting.pycodestylePath": "/usr/local/bin/pycodestyle", - "python.linting.pydocstylePath": "/usr/local/bin/pydocstyle", - "python.linting.mypyPath": "/usr/local/bin/mypy", - "python.linting.pylintPath": "/usr/local/bin/pylint", - "python.formatting.provider": "black", - "python.testing.pytestArgs": ["--no-cov"], - "editor.formatOnPaste": false, - "editor.formatOnSave": true, - "editor.formatOnType": true, - "files.trimTrailingWhitespace": true, - "terminal.integrated.profiles.linux": { - "zsh": { - "path": "/usr/bin/zsh" + "customizations": { + "vscode": { + "extensions": [ + "ms-python.vscode-pylance", + "visualstudioexptteam.vscodeintellicode", + "redhat.vscode-yaml", + "esbenp.prettier-vscode", + "GitHub.vscode-pull-request-github", + "github.vscode-github-actions", + "charliermarsh.ruff" + ], + "settings": { + "[python]": { + "diffEditor.ignoreTrimWhitespace": false, + "editor.formatOnType": true, + "editor.formatOnSave": true, + "editor.wordBasedSuggestions": "off", + "editor.defaultFormatter": "charliermarsh.ruff", + "editor.codeActionsOnSave": { + "source.fixAll": "explicit", + "source.organizeImports": "explicit" + } + }, + "python.pythonPath": "./venv/bin/python", + "python.testing.pytestArgs": ["--no-cov"], + "files.trimTrailingWhitespace": true, + "terminal.integrated.defaultProfile.linux": "bash", + "yaml.customTags": [ + "!input scalar", + "!secret scalar", + "!include_dir_named scalar", + "!include_dir_list scalar", + "!include_dir_merge_list scalar", + "!include_dir_merge_named scalar" + ] } - }, - "terminal.integrated.defaultProfile.linux": "zsh", - "yaml.customTags": [ - "!input scalar", - "!secret scalar", - "!include_dir_named scalar", - "!include_dir_list scalar", - "!include_dir_merge_list scalar", - "!include_dir_merge_named scalar" - ] + } }, - "postCreateCommand": "pip install --upgrade pip && pip install -r requirements-test.txt", + + "postCreateCommand": "uv venv && . .venv/bin/activate && uv pip install -r requirements-test.txt && git config commit.gpgsign true", "mounts": [ "source=${localEnv:HOME}${localEnv:USERPROFILE}/.gnupg,target=/home/vscode/.gnupg,type=bind,consistency=cached" ] -} +} \ No newline at end of file diff --git a/.flake8 b/.flake8 deleted file mode 100644 index 79a16af7e..000000000 --- a/.flake8 +++ /dev/null @@ -1,2 +0,0 @@ -[flake8] -max-line-length = 120 \ No newline at end of file diff --git a/.github/workflows/ci-main-test-coverage-deploy.yml b/.github/workflows/ci-main-test-coverage-deploy.yml index 5e7eca835..a4a7504a1 100644 --- a/.github/workflows/ci-main-test-coverage-deploy.yml +++ b/.github/workflows/ci-main-test-coverage-deploy.yml @@ -21,13 +21,13 @@ jobs: runs-on: ubuntu-latest steps: - name: Checkout source code - uses: actions/checkout@v3 + uses: actions/checkout@v4 - name: Set up Python 3.10 - uses: actions/setup-python@v4 + uses: actions/setup-python@v5 with: python-version: "3.10" - name: Cache pip dependencies - uses: actions/cache@v3 + uses: actions/cache@v4 id: cache-dependencies with: path: ~/.cache/pip @@ -43,13 +43,13 @@ jobs: runs-on: ubuntu-latest steps: - name: Checkout source code - uses: actions/checkout@v3 + uses: actions/checkout@v4 - name: Set up Python 3.10 - uses: actions/setup-python@v4 + uses: actions/setup-python@v5 with: python-version: "3.10" - name: Restore pip cache dependencies - uses: actions/cache@v3 + uses: actions/cache@v4 with: path: ~/.cache/pip key: ${{ runner.os }}-pip-${{ hashFiles('requirements*.txt') }} @@ -60,16 +60,18 @@ jobs: pip install -r requirements-test.txt - name: Test with pytest run: | - sudo mkdir /config /icloud && sudo chown -R $(id -u) /config /icloud && pytest + sudo mkdir /config /icloud && + sudo chown -R $(id -u):$(id -g) /config /icloud && + ENV_CONFIG_FILE_PATH=./tests/data/test_config.yaml pytest - name: Upload coverage artifacts - uses: actions/upload-artifact@v3 + uses: actions/upload-artifact@v4 if: ${{ success() }} with: name: coverage-output path: htmlcov retention-days: 1 - name: Checkout gh-pages - uses: actions/checkout@v3 + uses: actions/checkout@v4 if: always() continue-on-error: true with: @@ -90,19 +92,19 @@ jobs: run: | python generate_badges.py - name: Upload tests artifacts - uses: actions/upload-artifact@v3 + uses: actions/upload-artifact@v4 with: name: test-output.zip path: test-output.zip retention-days: 1 - name: Upload coverage artifacts - uses: actions/upload-artifact@v3 + uses: actions/upload-artifact@v4 with: name: coverage-output path: htmlcov retention-days: 1 - name: Upload badges artifacts - uses: actions/upload-artifact@v3 + uses: actions/upload-artifact@v4 with: name: badges-output path: badges @@ -112,13 +114,13 @@ jobs: runs-on: ubuntu-latest steps: - name: Download test artifacts - uses: actions/download-artifact@v3 + uses: actions/download-artifact@v4 with: name: test-output.zip # - name: Setup tmate session # uses: mxschmitt/action-tmate@v3 - name: Checkout gh-pages - uses: actions/checkout@v3 + uses: actions/checkout@v4 if: always() continue-on-error: true with: @@ -129,7 +131,7 @@ jobs: unzip test-output.zip && rm test-output.zip - name: Publish test report to gh-pages if: always() - uses: peaceiris/actions-gh-pages@v3 + uses: peaceiris/actions-gh-pages@v4 with: deploy_key: ${{ secrets.DEPLOY_PRIVATE_KEY }} publish_branch: gh-pages @@ -140,12 +142,12 @@ jobs: runs-on: ubuntu-latest steps: - name: Download coverage artifacts - uses: actions/download-artifact@v3 + uses: actions/download-artifact@v4 with: name: coverage-output path: coverage - name: Checkout gh-pages - uses: actions/checkout@v3 + uses: actions/checkout@v4 if: always() continue-on-error: true with: @@ -153,7 +155,7 @@ jobs: path: gh-pages - name: Publish test coverage to gh-pages if: always() - uses: peaceiris/actions-gh-pages@v3 + uses: peaceiris/actions-gh-pages@v4 with: deploy_key: ${{ secrets.DEPLOY_PRIVATE_KEY }} publish_branch: gh-pages @@ -164,12 +166,12 @@ jobs: runs-on: ubuntu-latest steps: - name: Download badges artifacts - uses: actions/download-artifact@v3 + uses: actions/download-artifact@v4 with: name: badges-output path: badges - name: Checkout gh-pages - uses: actions/checkout@v3 + uses: actions/checkout@v4 if: always() continue-on-error: true with: @@ -177,7 +179,7 @@ jobs: path: gh-pages - name: Publish badges to gh-pages if: always() - uses: peaceiris/actions-gh-pages@v3 + uses: peaceiris/actions-gh-pages@v4 with: deploy_key: ${{ secrets.DEPLOY_PRIVATE_KEY }} publish_branch: gh-pages @@ -188,24 +190,24 @@ jobs: runs-on: ubuntu-latest steps: - name: Check Out Repo - uses: actions/checkout@v3 + uses: actions/checkout@v4 - name: Cache Docker layers - uses: actions/cache@v3 + uses: actions/cache@v4 with: path: /tmp/.buildx-cache key: ${{ runner.os }}-buildx-${{ github.sha }} restore-keys: | ${{ runner.os }}-buildx- - name: Login to Docker Hub - uses: docker/login-action@v2 + uses: docker/login-action@v3 with: username: ${{ secrets.DOCKER_HUB_USERNAME }} password: ${{ secrets.DOCKER_HUB_ACCESS_TOKEN }} - name: Set up Docker QEMU - uses: docker/setup-qemu-action@v2 + uses: docker/setup-qemu-action@v3 - name: Set up Docker Buildx id: buildx - uses: docker/setup-buildx-action@v2 + uses: docker/setup-buildx-action@v3 - name: Build and push id: docker_build uses: docker/build-push-action@v5 diff --git a/.github/workflows/ci-pr-test.yml b/.github/workflows/ci-pr-test.yml index b1fd947bc..2f721e514 100644 --- a/.github/workflows/ci-pr-test.yml +++ b/.github/workflows/ci-pr-test.yml @@ -58,7 +58,10 @@ jobs: pip install -r requirements-test.txt - name: Test with pytest run: | - sudo mkdir /config /icloud && sudo chown -R $(id -u):$(id -g) /config /icloud && pylint src/ tests/ && ENV_CONFIG_FILE_PATH=./tests/data/test_config.yaml pytest + sudo mkdir /config /icloud && + sudo chown -R $(id -u):$(id -g) /config /icloud && + ruff check && + ENV_CONFIG_FILE_PATH=./tests/data/test_config.yaml pytest # - name: Setup tmate session # uses: mxschmitt/action-tmate@v3 - name: Generate Allure Report diff --git a/.github/workflows/close-stale.yml b/.github/workflows/close-stale.yml index 5d61e2378..c25a5967c 100644 --- a/.github/workflows/close-stale.yml +++ b/.github/workflows/close-stale.yml @@ -7,7 +7,7 @@ jobs: stale: runs-on: ubuntu-latest steps: - - uses: actions/stale@v4 + - uses: actions/stale@v9 with: stale-issue-message: "This issue is stale because it has been open 1 year with no activity. Remove stale label or comment or this will be closed in 5 days." days-before-stale: 365 diff --git a/.github/workflows/official-release-to-docker-hub.yml b/.github/workflows/official-release-to-docker-hub.yml index 8e90adbc3..7cf8772b4 100644 --- a/.github/workflows/official-release-to-docker-hub.yml +++ b/.github/workflows/official-release-to-docker-hub.yml @@ -22,7 +22,7 @@ jobs: # Steps represent a sequence of tasks that will be executed as part of the job steps: - name: Check Out Repo - uses: actions/checkout@v3 + uses: actions/checkout@v4 - name: Prepare id: prep run: | @@ -32,25 +32,25 @@ jobs: fi TAGS="${DOCKER_IMAGE}:${VERSION},${DOCKER_IMAGE}:latest" echo "${TAGS}" - echo ::set-output name=tags::${TAGS} - echo ::set-output name=version::${VERSION} + echo "name=tags::${TAGS}" >> $GITHUB_OUTPUT + echo "name=version::${VERSION}" >> $GITHUB_OUTPUT - name: Cache Docker layers - uses: actions/cache@v3 + uses: actions/cache@v4 with: path: /tmp/.buildx-cache key: ${{ runner.os }}-buildx-${{ github.sha }} restore-keys: | ${{ runner.os }}-buildx- - name: Login to Docker Hub - uses: docker/login-action@v2 + uses: docker/login-action@v3 with: username: ${{ secrets.DOCKER_HUB_USERNAME }} password: ${{ secrets.DOCKER_HUB_ACCESS_TOKEN }} - name: Set up Docker QEMU - uses: docker/setup-qemu-action@v2 + uses: docker/setup-qemu-action@v3 - name: Set up Docker Buildx id: buildx - uses: docker/setup-buildx-action@v2 + uses: docker/setup-buildx-action@v3 - name: Build and push id: docker_build uses: docker/build-push-action@v5 @@ -72,6 +72,6 @@ jobs: name: Move cache run: | rm -rf /tmp/.buildx-cache - mv /tmp/.buildx-cache-new /tmp/.buildx-cache + mv /tmp/.buildx-cache-new /tmp/.buildx-cache - name: Image digest run: echo ${{ steps.docker_build.outputs.digest }} diff --git a/.gitignore b/.gitignore index 1133a643c..3e6177485 100644 --- a/.gitignore +++ b/.gitignore @@ -136,4 +136,5 @@ allure-report ignore-config.yaml session session_data -icloud/ \ No newline at end of file +icloud/ +.ruff_cache/ \ No newline at end of file diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 1235be6ba..84155312c 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,27 +1,12 @@ repos: - repo: https://github.com/astral-sh/ruff-pre-commit - rev: v0.1.3 + rev: v0.5.7 hooks: - id: ruff args: - --fix - - repo: https://github.com/psf/black-pre-commit-mirror - rev: 23.10.1 - hooks: - - id: black - args: - - --quiet - files: ^((src|tests)/.+)?[^/]+\.py$ - - repo: https://github.com/codespell-project/codespell - rev: v2.2.6 - hooks: - - id: codespell - args: - - --skip="./.*,*.csv,*.json" - - --quiet-level=2 - exclude: ^tests/data/ - repo: https://github.com/pre-commit/pre-commit-hooks - rev: v4.5.0 + rev: v4.6.0 hooks: - id: check-executables-have-shebangs stages: [manual] @@ -31,62 +16,11 @@ repos: args: - --branch=main - repo: https://github.com/adrienverge/yamllint.git - rev: v1.32.0 + rev: v1.35.1 hooks: - id: yamllint - - repo: https://github.com/pre-commit/mirrors-prettier - rev: v3.0.3 - hooks: - - id: prettier - - repo: https://github.com/cdce8p/python-typing-update - rev: v0.6.0 - hooks: - # Run `python-typing-update` hook manually from time to time - # to update python typing syntax. - # Will require manual work, before submitting changes! - # pre-commit run --hook-stage manual python-typing-update --all-files - - id: python-typing-update - stages: [manual] - args: - - --py311-plus - - --force - - --keep-updates - files: ^(src|tests)/.+\.py$ - - - repo: https://github.com/asottile/pyupgrade - rev: v3.15.0 - hooks: - - id: pyupgrade - args: [--py39-plus] - - repo: https://github.com/PyCQA/autoflake - rev: v2.2.1 - hooks: - - id: autoflake - args: - - --in-place - - --remove-all-unused-imports - - repo: https://github.com/psf/black - rev: 23.10.1 - hooks: - - id: black - args: - - --quiet - files: ^((src|tests)/.+)?[^/]+\.py$ - - repo: https://github.com/PyCQA/flake8 - rev: 6.1.0 - hooks: - - id: flake8 - additional_dependencies: - - pycodestyle - - pyflakes - - flake8-docstrings - - pydocstyle - - flake8-comprehensions - - flake8-noqa - - mccabe - files: ^(src|tests)/.+\.py$ - repo: https://github.com/PyCQA/bandit - rev: 1.7.5 + rev: 1.7.9 hooks: - id: bandit args: @@ -94,24 +28,3 @@ repos: - --format=custom - --configfile=tests/bandit.yaml files: ^(src|tests)/.+\.py$ - - repo: https://github.com/PyCQA/isort - rev: 5.12.0 - hooks: - - id: isort - args: - - --profile=black - - repo: local - hooks: - # - id: mypy - # name: mypy - # entry: mypy - # language: script - # types: [python] - # require_serial: true - # files: ^(src|tests)/.+\.py$ - - id: pylint - name: pylint - entry: run-in-env.sh pylint - language: script - types: [python] - files: ^(src|tests)/.+\.py$ diff --git a/.ruff.toml b/.ruff.toml new file mode 100644 index 000000000..a87e745d7 --- /dev/null +++ b/.ruff.toml @@ -0,0 +1,125 @@ +exclude = [ + ".bzr", + ".direnv", + ".eggs", + ".git", + ".git-rewrite", + ".hg", + ".ipynb_checkpoints", + ".mypy_cache", + ".nox", + ".pants.d", + ".pyenv", + ".pytest_cache", + ".pytype", + ".ruff_cache", + ".svn", + ".tox", + ".venv", + ".vscode", + "__pypackages__", + "_build", + "buck-out", + "build", + "dist", + "node_modules", + "site-packages", + "venv", +] + +# Same as Black. +line-length = 120 +indent-width = 4 + +# Assume Python 3.8 +# target-version = "py38" + +[lint] +# Enable Pyflakes (`F`) and a subset of the pycodestyle (`E`) codes by default. +# Unlike Flake8, Ruff doesn't enable pycodestyle warnings (`W`) or +# McCabe complexity (`C901`) by default. +select = ["F", "E", "W", +# "C90", + "I", +# "N", +# "D", +"UP", "YTT", +# "ANN", +"ASYNC", +# "S", +# "BLE", +# "FBT", +# "B", +# "A", +"COM", "C4", +# "DTZ", +# "T10", +"DJ", "EM", "EXE", +#"FA", +# "ISC", +"ICN", +# "LOG", +# "G", +"INP", "PIE", +# "T20", +"PYI", +# "PT", +"Q", "RSE", +#"RET", +"SLF", "SLOT", +# "SIM", +"TID", "TCH", "INT", +# "ARG", +# "PTH", +"TD", +"FIX", +# "ERA", +"PD", "PGH", +# "PL", +# "TRY", +# "FLY", +"NPY", "AIR", "PERF", +# "FURB", +# "RUF" +] +ignore = ["E501"] + +# Allow fix for all enabled rules (when `--fix`) is provided. +fixable = ["ALL"] +unfixable = ["B"] + +# Allow unused variables when underscore-prefixed. +dummy-variable-rgx = "^(_+|(_+[a-zA-Z0-9_]*[a-zA-Z0-9]+?))$" +# 4. Ignore `E402` (import violations) in all `__init__.py` files, and in select subdirectories. +[lint.per-file-ignores] +"__init__.py" = ["E402"] +"**/{tests,docs,tools}/*" = ["E402"] + + +[format] +# Like Black, use double quotes for strings. +quote-style = "double" + +# Like Black, indent with spaces, rather than tabs. +indent-style = "space" + +# Like Black, respect magic trailing commas. +skip-magic-trailing-comma = false + +# Like Black, automatically detect the appropriate line ending. +line-ending = "auto" + +# Enable auto-formatting of code examples in docstrings. Markdown, +# reStructuredText code/literal blocks and doctests are all supported. +# +# This is currently disabled by default, but it is planned for this +# to be opt-out in the future. +# docstring-code-format = true +# docstring-code-line-length = 120 + +# Set the line length limit used when formatting code snippets in +# docstrings. +# +# This only has an effect when the `docstring-code-format` setting is +# enabled. +# docstring-code-line-length = "dynamic" \ No newline at end of file diff --git a/.vscode/settings.json b/.vscode/settings.json index 3d1db8f85..1f9c48b55 100644 --- a/.vscode/settings.json +++ b/.vscode/settings.json @@ -1,4 +1,5 @@ { + "python.defaultInterpreterPath": "${workspaceFolder}/.venv", "python.testing.pytestArgs": ["tests", "--no-cov"], "python.testing.unittestEnabled": false, "python.testing.pytestEnabled": true, diff --git a/generate_badges.py b/generate_badges.py index eb469cde0..c8ceed23c 100644 --- a/generate_badges.py +++ b/generate_badges.py @@ -11,9 +11,7 @@ test_data = json.load(f) test_result = test_data["statistic"]["total"] == test_data["statistic"]["passed"] -coverage_result = ( - float(ET.parse("./coverage.xml").getroot().attrib["line-rate"]) * 100.0 -) +coverage_result = float(ET.parse("./coverage.xml").getroot().attrib["line-rate"]) * 100.0 if os.path.exists(badges_directory) and os.path.isdir(badges_directory): shutil.rmtree(badges_directory) @@ -22,14 +20,10 @@ os.mkdir(badges_directory) url_data = "passing&color=brightgreen" if test_result else "failing&color=critical" -response = requests.get( - "https://img.shields.io/static/v1?label=Tests&message=" + url_data -) +response = requests.get("https://img.shields.io/static/v1?label=Tests&message=" + url_data) with open(badges_directory + "/tests.svg", "w") as f: f.write(response.text) url_data = "brightgreen" if coverage_result == 100.0 else "critical" -response = requests.get( - f"https://img.shields.io/static/v1?label=Coverage&message={coverage_result}%&color={url_data}" -) +response = requests.get(f"https://img.shields.io/static/v1?label=Coverage&message={coverage_result}%&color={url_data}") with open(badges_directory + "/coverage.svg", "w") as f: f.write(response.text) diff --git a/gpg-init.sh b/gpg-init.sh new file mode 100755 index 000000000..c152833c2 --- /dev/null +++ b/gpg-init.sh @@ -0,0 +1,2 @@ +export GPG_TTY=$(tty) +echo "" | gpg --clearsign \ No newline at end of file diff --git a/pylintrc b/pylintrc deleted file mode 100644 index 8ec667a14..000000000 --- a/pylintrc +++ /dev/null @@ -1,446 +0,0 @@ -# This Pylint rcfile contains a best-effort configuration to uphold the -# best-practices and style described in the Google Python style guide: -# https://google.github.io/styleguide/pyguide.html -# -# Its canonical open-source location is: -# https://google.github.io/styleguide/pylintrc - -[MASTER] - -# Files or directories to be skipped. They should be base names, not paths. -ignore=third_party - -# Files or directories matching the regex patterns are skipped. The regex -# matches against base names, not paths. -ignore-patterns=photos_data.py - -# Pickle collected data for later comparisons. -persistent=no - -# List of plugins (as comma separated values of python modules names) to load, -# usually to register additional checkers. -load-plugins= - -# Use multiple processes to speed up Pylint. -jobs=4 - -# Allow loading of arbitrary C extensions. Extensions are imported into the -# active Python interpreter and may run arbitrary code. -unsafe-load-any-extension=no - - -[MESSAGES CONTROL] - -# Only show warnings with the listed confidence levels. Leave empty to show -# all. Valid levels: HIGH, INFERENCE, INFERENCE_FAILURE, UNDEFINED -confidence= - -# Enable the message, report, category or checker with the given id(s). You can -# either give multiple identifier separated by comma (,) or put this option -# multiple time (only on the command line, not in the configuration file where -# it should appear only once). See also the "--disable" option for examples. -#enable= - -# Disable the message, report, category or checker with the given id(s). You -# can either give multiple identifiers separated by comma (,) or put this -# option multiple times (only on the command line, not in the configuration -# file where it should appear only once).You can also use "--disable=all" to -# disable everything first and then re-enable specific checks. For example, if -# you want to run only the similarities checker, you can use "--disable=all -# --enable=similarities". If you want to run only the classes checker, but have -# no Warning level messages displayed, use"--disable=all --enable=classes -# --disable=W" -disable=abstract-method, - apply-builtin, - arguments-differ, - attribute-defined-outside-init, - backtick, - bad-option-value, - basestring-builtin, - buffer-builtin, - c-extension-no-member, - consider-using-enumerate, - consider-using-with, - cmp-builtin, - cmp-method, - coerce-builtin, - coerce-method, - delslice-method, - div-method, - duplicate-code, - eq-without-hash, - execfile-builtin, - file-builtin, - filter-builtin-not-iterating, - fixme, - getslice-method, - global-statement, - hex-method, - idiv-method, - implicit-str-concat-in-sequence, - import-error, - import-self, - import-star-module-level, - inconsistent-return-statements, - input-builtin, - intern-builtin, - invalid-str-codec, - locally-disabled, - long-builtin, - long-suffix, - map-builtin-not-iterating, - misplaced-comparison-constant, - missing-function-docstring, - missing-module-docstring, - missing-class-docstring, - metaclass-assignment, - next-method-called, - next-method-defined, - no-absolute-import, - no-else-break, - no-else-continue, - no-else-raise, - no-else-return, - no-init, # added - no-member, - no-name-in-module, - no-self-use, - nonzero-method, - oct-method, - old-division, - old-ne-operator, - old-octal-literal, - old-raise-syntax, - parameter-unpacking, - print-statement, - raising-string, - range-builtin-not-iterating, - raw_input-builtin, - rdiv-method, - reduce-builtin, - relative-import, - reload-builtin, - round-builtin, - setslice-method, - signature-differs, - standarderror-builtin, - suppressed-message, - sys-max-int, - too-few-public-methods, - too-many-ancestors, - too-many-arguments, - too-many-boolean-expressions, - too-many-branches, - too-many-instance-attributes, - too-many-locals, - too-many-nested-blocks, - too-many-public-methods, - too-many-return-statements, - too-many-statements, - trailing-newlines, - unichr-builtin, - unicode-builtin, - unnecessary-pass, - unpacking-in-except, - unused-argument, - useless-else-on-loop, - useless-object-inheritance, - useless-suppression, - using-cmp-argument, - wrong-import-order, - xrange-builtin, - zip-builtin-not-iterating, - logging-not-lazy, - logging-fstring-interpolation - - -[REPORTS] - -# Set the output format. Available formats are text, parseable, colorized, msvs -# (visual studio) and html. You can also give a reporter class, eg -# mypackage.mymodule.MyReporterClass. -output-format=text - -# Put messages in a separate file for each module / package specified on the -# command line instead of printing them on stdout. Reports (if any) will be -# written in a file name "pylint_global.[txt|html]". This option is deprecated -# and it will be removed in Pylint 2.0. -# files-output=no - -# Tells whether to display a full report or only the messages -reports=no - -# Python expression which should return a note less than 10 (10 is the highest -# note). You have access to the variables errors warning, statement which -# respectively contain the number of errors / warnings messages and the total -# number of statements analyzed. This is used by the global evaluation report -# (RP0004). -evaluation=10.0 - ((float(5 * error + warning + refactor + convention) / statement) * 10) - -# Template used to display messages. This is a python new-style format string -# used to format the message information. See doc for all details -#msg-template= - - -[BASIC] - -# Good variable names which should always be accepted, separated by a comma -good-names=main,_ - -# Bad variable names which should always be refused, separated by a comma -bad-names= - -# Colon-delimited sets of names that determine each other's naming style when -# the name regexes allow several styles. -name-group= - -# Include a hint for the correct naming format with invalid-name -include-naming-hint=no - -# List of decorators that produce properties, such as abc.abstractproperty. Add -# to this list to register other decorators that produce valid properties. -property-classes=abc.abstractproperty,cached_property.cached_property,cached_property.threaded_cached_property,cached_property.cached_property_with_ttl,cached_property.threaded_cached_property_with_ttl - -# Regular expression matching correct function names -function-rgx=^(?:(?PsetUp|tearDown|setUpModule|tearDownModule)|(?P_?[A-Z][a-zA-Z0-9]*)|(?P_?[a-z][a-z0-9_]*))$ - -# Regular expression matching correct variable names -variable-rgx=^[a-z][a-z0-9_]*$ - -# Regular expression matching correct constant names -const-rgx=^(_?[A-Z][A-Z0-9_]*|__[a-z0-9_]+__|_?[a-z][a-z0-9_]*)$ - -# Regular expression matching correct attribute names -attr-rgx=^_{0,2}[a-z][a-z0-9_]*$ - -# Regular expression matching correct argument names -argument-rgx=^[a-z][a-z0-9_]*$ - -# Regular expression matching correct class attribute names -class-attribute-rgx=^(_?[A-Z][A-Z0-9_]*|__[a-z0-9_]+__|_?[a-z][a-z0-9_]*)$ - -# Regular expression matching correct inline iteration names -inlinevar-rgx=^[a-z][a-z0-9_]*$ - -# Regular expression matching correct class names -class-rgx=^_?[A-Z][a-zA-Z0-9]*$ - -# Regular expression matching correct module names -module-rgx=^(_?[a-z][a-z0-9_]*|__init__)$ - -# Regular expression matching correct method names -method-rgx=(?x)^(?:(?P_[a-z0-9_]+__|runTest|setUp|tearDown|setUpTestCase|tearDownTestCase|setupSelf|tearDownClass|setUpClass|(test|assert)_*[A-Z0-9][a-zA-Z0-9_]*|next)|(?P_{0,2}[A-Z][a-zA-Z0-9_]*)|(?P_{0,2}[a-z][a-z0-9_]*))$ - -# Regular expression which should only match function or class names that do -# not require a docstring. -no-docstring-rgx=(__.*__|main|test.*|.*test|.*Test)$ - -# Minimum line length for functions/classes that require docstrings, shorter -# ones are exempt. -docstring-min-length=10 - - -[TYPECHECK] - -# List of decorators that produce context managers, such as -# contextlib.contextmanager. Add to this list to register other decorators that -# produce valid context managers. -contextmanager-decorators=contextlib.contextmanager,contextlib2.contextmanager - -# Tells whether missing members accessed in mixin class should be ignored. A -# mixin class is detected if its name ends with "mixin" (case insensitive). -ignore-mixin-members=yes - -# List of module names for which member attributes should not be checked -# (useful for modules/projects where namespaces are manipulated during runtime -# and thus existing member attributes cannot be deduced by static analysis. It -# supports qualified module names, as well as Unix pattern matching. -ignored-modules= - -# List of class names for which member attributes should not be checked (useful -# for classes with dynamically set attributes). This supports the use of -# qualified names. -ignored-classes=optparse.Values,thread._local,_thread._local - -# List of members which are set dynamically and missed by pylint inference -# system, and so shouldn't trigger E1101 when accessed. Python regular -# expressions are accepted. -generated-members= - - -[FORMAT] - -# Maximum number of characters on a single line. -max-line-length=120 - -# TODO(https://github.com/PyCQA/pylint/issues/3352): Direct pylint to exempt -# lines made too long by directives to pytype. - -# Regexp for a line that is allowed to be longer than the limit. -ignore-long-lines=(?x)( - ^\s*(\#\ )??$| - ^\s*(from\s+\S+\s+)?import\s+.+$) - -# Allow the body of an if to be on the same line as the test if there is no -# else. -single-line-if-stmt=yes - -# List of optional constructs for which whitespace checking is disabled. `dict- -# separator` is used to allow tabulation in dicts, etc.: {1 : 1,\n222: 2}. -# `trailing-comma` allows a space between comma and closing bracket: (a, ). -# `empty-line` allows space-only lines. -# no-space-check= - -# Maximum number of lines in a module -max-module-lines=99999 - -# String used as indentation unit. The internal Google style guide mandates 2 -# spaces. Google's externaly-published style guide says 4, consistent with -# PEP 8. Here, we use 2 spaces, for conformity with many open-sourced Google -# projects (like TensorFlow). -indent-string=' ' - -# Number of spaces of indent required inside a hanging or continued line. -indent-after-paren=4 - -# Expected format of line ending, e.g. empty (any line ending), LF or CRLF. -expected-line-ending-format= - - -[MISCELLANEOUS] - -# List of note tags to take in consideration, separated by a comma. -notes=TODO - - -[STRING] - -# This flag controls whether inconsistent-quotes generates a warning when the -# character used as a quote delimiter is used inconsistently within a module. -check-quote-consistency=yes - - -[VARIABLES] - -# Tells whether we should check for unused import in __init__ files. -init-import=no - -# A regular expression matching the name of dummy variables (i.e. expectedly -# not used). -dummy-variables-rgx=^\*{0,2}(_$|unused_|dummy_) - -# List of additional names supposed to be defined in builtins. Remember that -# you should avoid to define new builtins when possible. -additional-builtins= - -# List of strings which can identify a callback function by name. A callback -# name must start or end with one of those strings. -callbacks=cb_,_cb - -# List of qualified module names which can have objects that can redefine -# builtins. -redefining-builtins-modules=six,six.moves,past.builtins,future.builtins,functools - - -[LOGGING] - -# Logging modules to check that the string format arguments are in logging -# function parameter format -logging-modules=logging,absl.logging,tensorflow.io.logging - - -[SIMILARITIES] - -# Minimum lines number of a similarity. -min-similarity-lines=4 - -# Ignore comments when computing similarities. -ignore-comments=yes - -# Ignore docstrings when computing similarities. -ignore-docstrings=yes - -# Ignore imports when computing similarities. -ignore-imports=no - - -[SPELLING] - -# Spelling dictionary name. Available dictionaries: none. To make it working -# install python-enchant package. -spelling-dict= - -# List of comma separated words that should not be checked. -spelling-ignore-words= - -# A path to a file that contains private dictionary; one word per line. -spelling-private-dict-file= - -# Tells whether to store unknown words to indicated private dictionary in -# --spelling-private-dict-file option instead of raising a message. -spelling-store-unknown-words=no - - -[IMPORTS] - -# Deprecated modules which should not be used, separated by a comma -deprecated-modules=regsub, - TERMIOS, - Bastion, - rexec, - sets - -# Create a graph of every (i.e. internal and external) dependencies in the -# given file (report RP0402 must not be disabled) -import-graph= - -# Create a graph of external dependencies in the given file (report RP0402 must -# not be disabled) -ext-import-graph= - -# Create a graph of internal dependencies in the given file (report RP0402 must -# not be disabled) -int-import-graph= - -# Force import order to recognize a module as part of the standard -# compatibility libraries. -known-standard-library= - -# Force import order to recognize a module as part of a third party library. -known-third-party=enchant, absl - -# Analyse import fallback blocks. This can be used to support both Python 2 and -# 3 compatible code, which means that the block might have code that exists -# only in one or another interpreter, leading to false positives when analysed. -analyse-fallback-blocks=no - - -[CLASSES] - -# List of method names used to declare (i.e. assign) instance attributes. -defining-attr-methods=__init__, - __new__, - setUp - -# List of member names, which should be excluded from the protected access -# warning. -exclude-protected=_asdict, - _fields, - _replace, - _source, - _make - -# List of valid names for the first argument in a class method. -valid-classmethod-first-arg=cls, - class_ - -# List of valid names for the first argument in a metaclass class method. -valid-metaclass-classmethod-first-arg=mcs - - -[EXCEPTIONS] - -# Exceptions that will emit a warning when being caught. Defaults to -# "Exception" -overgeneral-exceptions=builtins.StandardError, - builtins.BaseException \ No newline at end of file diff --git a/requirements-test.txt b/requirements-test.txt index a6e583343..fbf47bfa3 100644 --- a/requirements-test.txt +++ b/requirements-test.txt @@ -3,6 +3,7 @@ allure-pytest==2.8.33 coverage==5.4 pytest==6.2.5 pytest-cov==2.11.1 -pylint==2.9.3 +# pylint==2.9.3 +ruff ipython pre-commit \ No newline at end of file diff --git a/run-ci.sh b/run-ci.sh index 76d6e92c7..80c11f7a8 100755 --- a/run-ci.sh +++ b/run-ci.sh @@ -3,8 +3,8 @@ if [ -d htmlcov ]; then rm -rf htmlcov; fi if [ -d icloud ]; then rm -rf icloud; fi if [ -d session_data ]; then rm -rf session_data; fi if [ -f icloud.log ]; then rm -f icloud.log; fi -echo "Linting ..." && - pylint src/ tests/ && +echo "Ruffing ..." && + ruff check --fix && echo "Testing ..." && ENV_CONFIG_FILE_PATH=./tests/data/test_config.yaml pytest && echo "Reporting ..." && diff --git a/src/__init__.py b/src/__init__.py index 94990dedd..51b84f95d 100644 --- a/src/__init__.py +++ b/src/__init__.py @@ -1,4 +1,5 @@ """Root module.""" + __author__ = "Mandar Patil (mandarons@pm.me)" import logging @@ -18,9 +19,7 @@ ENV_CONFIG_FILE_PATH_KEY = "ENV_CONFIG_FILE_PATH" DEFAULT_LOGGER_LEVEL = "info" DEFAULT_LOG_FILE_NAME = "icloud.log" -DEFAULT_CONFIG_FILE_PATH = os.path.join( - os.path.dirname(os.path.dirname(__file__)), DEFAULT_CONFIG_FILE_NAME -) +DEFAULT_CONFIG_FILE_PATH = os.path.join(os.path.dirname(os.path.dirname(__file__)), DEFAULT_CONFIG_FILE_NAME) DEFAULT_COOKIE_DIRECTORY = "/config/session_data" warnings.filterwarnings("ignore", category=DeprecationWarning) @@ -34,9 +33,7 @@ def read_config(config_path=DEFAULT_CONFIG_FILE_PATH): with open(file=config_path, encoding="utf-8") as config_file: config = YAML().load(config_file) config["app"]["credentials"]["username"] = ( - config["app"]["credentials"]["username"].strip() - if config["app"]["credentials"]["username"] is not None - else "" + config["app"]["credentials"]["username"].strip() if config["app"]["credentials"]["username"] is not None else "" ) return config @@ -48,14 +45,10 @@ def get_logger_config(config): return None config_app_logger = config["app"]["logger"] logger_config["level"] = ( - config_app_logger["level"].strip().lower() - if "level" in config_app_logger - else DEFAULT_LOGGER_LEVEL + config_app_logger["level"].strip().lower() if "level" in config_app_logger else DEFAULT_LOGGER_LEVEL ) logger_config["filename"] = ( - config_app_logger["filename"].strip().lower() - if "filename" in config_app_logger - else DEFAULT_LOG_FILE_NAME + config_app_logger["filename"].strip().lower() if "filename" in config_app_logger else DEFAULT_LOG_FILE_NAME ) return logger_config @@ -117,18 +110,16 @@ def get_logger(): file_handler = logging.FileHandler(logger_config["filename"]) file_handler.setFormatter( logging.Formatter( - "%(asctime)s :: %(levelname)s :: %(name)s :: %(filename)s :: %(lineno)d :: %(message)s" - ) + "%(asctime)s :: %(levelname)s :: %(name)s :: %(filename)s :: %(lineno)d :: %(message)s", + ), ) logger.addHandler(file_handler) - if not log_handler_exists( - logger=logger, handler_type=logging.StreamHandler, stream=sys.stdout - ): + if not log_handler_exists(logger=logger, handler_type=logging.StreamHandler, stream=sys.stdout): console_handler = logging.StreamHandler(sys.stdout) console_handler.setFormatter( ColorfulConsoleFormatter( - "%(asctime)s :: %(levelname)s :: %(name)s :: %(filename)s :: %(lineno)d :: %(message)s" - ) + "%(asctime)s :: %(levelname)s :: %(name)s :: %(filename)s :: %(lineno)d :: %(message)s", + ), ) logger.addHandler(console_handler) return logger diff --git a/src/config_parser.py b/src/config_parser.py index 954cbd6f8..be680f7db 100644 --- a/src/config_parser.py +++ b/src/config_parser.py @@ -1,4 +1,5 @@ """Config file parser.""" + __author__ = "Mandar Patil (mandarons@pm.me)" import os @@ -43,9 +44,7 @@ def get_username(config): username = None config_path = ["app", "credentials", "username"] if not traverse_config_path(config=config, config_path=config_path): - LOGGER.error( - f"username is missing in {config_path_to_string(config_path)}. Please set the username." - ) + LOGGER.error(f"username is missing in {config_path_to_string(config_path)}. Please set the username.") else: username = get_config_value(config=config, config_path=config_path) username = username.strip() @@ -62,7 +61,7 @@ def get_retry_login_interval(config): if not traverse_config_path(config=config, config_path=config_path): LOGGER.warning( f"retry_login_interval not found in {config_path_to_string(config_path=config_path)}." - + f" Using default {retry_login_interval} seconds ..." + + f" Using default {retry_login_interval} seconds ...", ) else: retry_login_interval = get_config_value(config=config, config_path=config_path) @@ -77,7 +76,7 @@ def get_drive_sync_interval(config): if not traverse_config_path(config=config, config_path=config_path): LOGGER.warning( f"sync_interval is not found in {config_path_to_string(config_path=config_path)}." - + f" Using default sync_interval: {sync_interval} seconds ..." + + f" Using default sync_interval: {sync_interval} seconds ...", ) else: sync_interval = get_config_value(config=config, config_path=config_path) @@ -92,7 +91,7 @@ def get_photos_sync_interval(config): if not traverse_config_path(config=config, config_path=config_path): LOGGER.warning( f"sync_interval is not found in {config_path_to_string(config_path=config_path)}." - + f" Using default sync_interval: {sync_interval} seconds ..." + + f" Using default sync_interval: {sync_interval} seconds ...", ) else: sync_interval = get_config_value(config=config, config_path=config_path) @@ -161,9 +160,7 @@ def get_smtp_password(config): password = None config_path = ["app", "smtp", "password"] if not traverse_config_path(config=config, config_path=config_path): - LOGGER.warning( - f"Warning: password is not found in {config_path_to_string(config_path)}" - ) + LOGGER.warning(f"Warning: password is not found in {config_path_to_string(config_path)}") else: password = get_config_value(config=config, config_path=config_path) return password @@ -174,9 +171,7 @@ def get_smtp_host(config): host = None config_path = ["app", "smtp", "host"] if not traverse_config_path(config=config, config_path=config_path): - LOGGER.warning( - f"Warning: host is not found in {config_path_to_string(config_path)}" - ) + LOGGER.warning(f"Warning: host is not found in {config_path_to_string(config_path)}") else: host = get_config_value(config=config, config_path=config_path) return host @@ -187,9 +182,7 @@ def get_smtp_port(config): port = None config_path = ["app", "smtp", "port"] if not traverse_config_path(config=config, config_path=config_path): - LOGGER.warning( - f"Warning: port is not found in {config_path_to_string(config_path)}" - ) + LOGGER.warning(f"Warning: port is not found in {config_path_to_string(config_path)}") else: port = get_config_value(config=config, config_path=config_path) return port @@ -200,9 +193,7 @@ def get_smtp_no_tls(config): no_tls = False config_path = ["app", "smtp", "no_tls"] if not traverse_config_path(config=config, config_path=config_path): - LOGGER.warning( - f"Warning: no_tls is not found in {config_path_to_string(config_path)}" - ) + LOGGER.warning(f"Warning: no_tls is not found in {config_path_to_string(config_path)}") else: no_tls = get_config_value(config=config, config_path=config_path) return no_tls @@ -216,13 +207,11 @@ def prepare_drive_destination(config): if not traverse_config_path(config=config, config_path=config_path): LOGGER.warning( f"Warning: destination is missing in {config_path_to_string(config_path)}." - + f" Using default drive destination: {drive_destination}." + + f" Using default drive destination: {drive_destination}.", ) else: drive_destination = get_config_value(config=config, config_path=config_path) - drive_destination_path = os.path.abspath( - os.path.join(prepare_root_destination(config=config), drive_destination) - ) + drive_destination_path = os.path.abspath(os.path.join(prepare_root_destination(config=config), drive_destination)) os.makedirs(drive_destination_path, exist_ok=True) return drive_destination_path @@ -234,13 +223,11 @@ def get_drive_remove_obsolete(config): if not traverse_config_path(config=config, config_path=config_path): LOGGER.warning( f"Warning: remove_obsolete is not found in {config_path_to_string(config_path)}." - + " Not removing the obsolete files and folders." + + " Not removing the obsolete files and folders.", ) else: drive_remove_obsolete = get_config_value(config=config, config_path=config_path) - LOGGER.debug( - f"{'R' if drive_remove_obsolete else 'Not R'}emoving obsolete files and folders ..." - ) + LOGGER.debug(f"{'R' if drive_remove_obsolete else 'Not R'}emoving obsolete files and folders ...") return drive_remove_obsolete @@ -252,13 +239,11 @@ def prepare_photos_destination(config): if not traverse_config_path(config=config, config_path=config_path): LOGGER.warning( f"Warning: destination is missing in {photos_destination}." - + f" Using default photos destination: {config_path_to_string(config_path)}" + + f" Using default photos destination: {config_path_to_string(config_path)}", ) else: photos_destination = get_config_value(config=config, config_path=config_path) - photos_destination_path = os.path.abspath( - os.path.join(prepare_root_destination(config=config), photos_destination) - ) + photos_destination_path = os.path.abspath(os.path.join(prepare_root_destination(config=config), photos_destination)) os.makedirs(photos_destination_path, exist_ok=True) return photos_destination_path @@ -270,15 +255,11 @@ def get_photos_remove_obsolete(config): if not traverse_config_path(config=config, config_path=config_path): LOGGER.warning( f"Warning: remove_obsolete is not found in {config_path_to_string(config_path)}." - + " Not removing the obsolete photos." + + " Not removing the obsolete photos.", ) else: - photos_remove_obsolete = get_config_value( - config=config, config_path=config_path - ) - LOGGER.debug( - f"{'R' if photos_remove_obsolete else 'Not R'}emoving obsolete photos ..." - ) + photos_remove_obsolete = get_config_value(config=config, config_path=config_path) + LOGGER.debug(f"{'R' if photos_remove_obsolete else 'Not R'}emoving obsolete photos ...") return photos_remove_obsolete @@ -297,7 +278,7 @@ def get_photos_filters(config): if not traverse_config_path(config=config, config_path=config_path): LOGGER.warning( f"{config_path_to_string(config_path=config_path)} not found. \ - Downloading all libraries and albums with original size ..." + Downloading all libraries and albums with original size ...", ) return photos_filters @@ -308,13 +289,9 @@ def get_photos_filters(config): or not get_config_value(config=config, config_path=config_path) or len(get_config_value(config=config, config_path=config_path)) == 0 ): - LOGGER.warning( - f"{config_path_to_string(config_path=config_path)} not found. Downloading all libraries ..." - ) + LOGGER.warning(f"{config_path_to_string(config_path=config_path)} not found. Downloading all libraries ...") else: - photos_filters["libraries"] = get_config_value( - config=config, config_path=config_path - ) + photos_filters["libraries"] = get_config_value(config=config, config_path=config_path) # Parse albums config_path[2] = "albums" @@ -323,19 +300,15 @@ def get_photos_filters(config): or not get_config_value(config=config, config_path=config_path) or len(get_config_value(config=config, config_path=config_path)) == 0 ): - LOGGER.warning( - f"{config_path_to_string(config_path=config_path)} not found. Downloading all albums ..." - ) + LOGGER.warning(f"{config_path_to_string(config_path=config_path)} not found. Downloading all albums ...") else: - photos_filters["albums"] = get_config_value( - config=config, config_path=config_path - ) + photos_filters["albums"] = get_config_value(config=config, config_path=config_path) # Parse file sizes config_path[2] = "file_sizes" if not traverse_config_path(config=config, config_path=config_path): LOGGER.warning( - f"{config_path_to_string(config_path=config_path)} not found. Downloading original size photos ..." + f"{config_path_to_string(config_path=config_path)} not found. Downloading original size photos ...", ) else: file_sizes = get_config_value(config=config, config_path=config_path) @@ -343,7 +316,7 @@ def get_photos_filters(config): if file_size not in valid_file_sizes: LOGGER.warning( f"Skipping the invalid file size {file_size}, " - + f"valid file sizes are {','.join(valid_file_sizes)}." + + f"valid file sizes are {','.join(valid_file_sizes)}.", ) file_sizes.remove(file_size) if len(file_sizes) == 0: @@ -357,13 +330,9 @@ def get_photos_filters(config): or not get_config_value(config=config, config_path=config_path) or len(get_config_value(config=config, config_path=config_path)) == 0 ): - LOGGER.warning( - f"{config_path_to_string(config_path=config_path)} not found. Downloading all extensions ..." - ) + LOGGER.warning(f"{config_path_to_string(config_path=config_path)} not found. Downloading all extensions ...") else: - photos_filters["extensions"] = get_config_value( - config=config, config_path=config_path - ) + photos_filters["extensions"] = get_config_value(config=config, config_path=config_path) return photos_filters @@ -373,15 +342,13 @@ def get_region(config): region = "global" config_path = ["app", "region"] if not traverse_config_path(config=config, config_path=config_path): - LOGGER.warning( - f"{config_path_to_string(config_path=config_path)} not found. Using default value - global ..." - ) + LOGGER.warning(f"{config_path_to_string(config_path=config_path)} not found. Using default value - global ...") else: region = get_config_value(config=config, config_path=config_path) if region not in ["global", "china"]: LOGGER.error( f"{config_path_to_string(config_path=config_path)} is invalid. \ - Valid values are - global or china. Using default value - global ..." + Valid values are - global or china. Using default value - global ...", ) region = "global" @@ -403,9 +370,7 @@ def get_telegram_bot_token(config): bot_token = None config_path = ["app", "telegram", "bot_token"] if not traverse_config_path(config=config, config_path=config_path): - LOGGER.warning( - f"Warning: bot_token is not found in {config_path_to_string(config_path)}." - ) + LOGGER.warning(f"Warning: bot_token is not found in {config_path_to_string(config_path)}.") else: bot_token = get_config_value(config=config, config_path=config_path) return bot_token @@ -416,9 +381,7 @@ def get_telegram_chat_id(config): chat_id = None config_path = ["app", "telegram", "chat_id"] if not traverse_config_path(config=config, config_path=config_path): - LOGGER.warning( - f"Warning: chat_id is not found in {config_path_to_string(config_path)}." - ) + LOGGER.warning(f"Warning: chat_id is not found in {config_path_to_string(config_path)}.") else: chat_id = get_config_value(config=config, config_path=config_path) return chat_id @@ -430,9 +393,7 @@ def get_discord_webhook_url(config): webhook_url = None config_path = ["app", "discord", "webhook_url"] if not traverse_config_path(config=config, config_path=config_path): - LOGGER.warning( - f"Warning: webhook_url is not found in {config_path_to_string(config_path)}." - ) + LOGGER.warning(f"Warning: webhook_url is not found in {config_path_to_string(config_path)}.") else: webhook_url = get_config_value(config=config, config_path=config_path) return webhook_url @@ -444,9 +405,7 @@ def get_discord_username(config): username = None config_path = ["app", "discord", "username"] if not traverse_config_path(config=config, config_path=config_path): - LOGGER.warning( - f"Warning: username is not found in {config_path_to_string(config_path)}." - ) + LOGGER.warning(f"Warning: username is not found in {config_path_to_string(config_path)}.") else: username = get_config_value(config=config, config_path=config_path) return username diff --git a/src/email_message.py b/src/email_message.py index bd94a151d..e3a49997f 100644 --- a/src/email_message.py +++ b/src/email_message.py @@ -1,4 +1,5 @@ """Email message module.""" + import time import uuid from email.mime.text import MIMEText @@ -13,17 +14,15 @@ def __init__(self, **kwargs): for item in kwargs.items(): params[item[0]] = item[1] - self.to = params.get("to", None) - self.rto = params.get("rto", None) - self.cc = params.get("cc", None) - self.bcc = params.get("bcc", None) - self.sender = params.get("from", None) + self.to = params.get("to") + self.rto = params.get("rto") + self.cc = params.get("cc") + self.bcc = params.get("bcc") + self.sender = params.get("from") self.subject = params.get("subject", "") - self.body = params.get("body", None) - self.html = params.get("html", None) - self.date = params.get( - "date", time.strftime("%a, %d %b %Y %H:%M:%S %z", time.gmtime()) - ) + self.body = params.get("body") + self.html = params.get("html") + self.date = params.get("date", time.strftime("%a, %d %b %Y %H:%M:%S %z", time.gmtime())) self.charset = params.get("charset", "us-ascii") self.headers = params.get("headers", {}) diff --git a/src/main.py b/src/main.py index 02fbf52eb..cf34ba604 100644 --- a/src/main.py +++ b/src/main.py @@ -1,4 +1,5 @@ """Main module.""" + __author__ = "Mandar Patil (mandarons@pm.me)" from src import sync diff --git a/src/notify.py b/src/notify.py index 71dd8f829..d66282248 100644 --- a/src/notify.py +++ b/src/notify.py @@ -1,4 +1,5 @@ """Send an email if the 2FA is expired.""" + import datetime import smtplib @@ -30,9 +31,7 @@ def notify_telegram(config, message, last_send=None, dry_run=False): ): sent_on = None else: - LOGGER.warning( - "Not sending 2FA notification because Telegram is not configured." - ) + LOGGER.warning("Not sending 2FA notification because Telegram is not configured.") return sent_on @@ -75,9 +74,7 @@ def notify_discord(config, message, last_send=None, dry_run=False): if not post_message_to_discord(webhook_url, username, message): sent_on = None else: - LOGGER.warning( - "Not sending 2FA notification because Discord is not configured." - ) + LOGGER.warning("Not sending 2FA notification because Discord is not configured.") return sent_on @@ -89,9 +86,7 @@ def send(config, username, last_send=None, dry_run=False): `docker exec -it --user=abc icloud /bin/sh -c "icloud --session-directory=/config/session_data --username={username}"`.""" subject = f"icloud-docker: Two step authentication is required for {username}" - notify_telegram( - config=config, message=message, last_send=last_send, dry_run=dry_run - ) + notify_telegram(config=config, message=message, last_send=last_send, dry_run=dry_run) notify_discord(config=config, message=message, last_send=last_send, dry_run=dry_run) email = config_parser.get_smtp_email(config=config) to_email = config_parser.get_smtp_to_email(config=config) @@ -126,7 +121,7 @@ def send(config, username, last_send=None, dry_run=False): smtp.quit() except Exception as e: sent_on = None - LOGGER.error(f"Failed to send email: {str(e)}.") + LOGGER.error(f"Failed to send email: {e!s}.") else: LOGGER.warning("Not sending 2FA notification because SMTP is not configured") diff --git a/src/sync.py b/src/sync.py index 3e00a0a5c..98b0fd268 100644 --- a/src/sync.py +++ b/src/sync.py @@ -1,4 +1,5 @@ """Sync module.""" + __author__ = "Mandar Patil " import datetime import os @@ -57,45 +58,31 @@ def sync(): sleep_for = 10 while True: - config = read_config( - config_path=os.environ.get( - ENV_CONFIG_FILE_PATH_KEY, DEFAULT_CONFIG_FILE_PATH - ) - ) + config = read_config(config_path=os.environ.get(ENV_CONFIG_FILE_PATH_KEY, DEFAULT_CONFIG_FILE_PATH)) alive(config=config) username = config_parser.get_username(config=config) if username: try: if ENV_ICLOUD_PASSWORD_KEY in os.environ: password = os.environ.get(ENV_ICLOUD_PASSWORD_KEY) - utils.store_password_in_keyring( - username=username, password=password - ) + utils.store_password_in_keyring(username=username, password=password) else: password = utils.get_password_from_keyring(username=username) server_region = config_parser.get_region(config=config) - api = get_api_instance( - username=username, password=password, server_region=server_region - ) + api = get_api_instance(username=username, password=password, server_region=server_region) if not api.requires_2sa: if "drive" in config and enable_sync_drive: LOGGER.info("Syncing drive...") sync_drive.sync_drive(config=config, drive=api.drive) LOGGER.info("Drive synced") - drive_sync_interval = config_parser.get_drive_sync_interval( - config=config - ) + drive_sync_interval = config_parser.get_drive_sync_interval(config=config) if "photos" in config and enable_sync_photos: LOGGER.info("Syncing photos...") sync_photos.sync_photos(config=config, photos=api.photos) LOGGER.info("Photos synced") - photos_sync_interval = config_parser.get_photos_sync_interval( - config=config - ) + photos_sync_interval = config_parser.get_photos_sync_interval(config=config) if "drive" not in config and "photos" not in config: - LOGGER.warning( - "Nothing to sync. Please add drive: and/or photos: section in config.yaml file." - ) + LOGGER.warning("Nothing to sync. Please add drive: and/or photos: section in config.yaml file.") else: LOGGER.error("Error: 2FA is required. Please log in.") # Retry again @@ -103,30 +90,20 @@ def sync(): if sleep_for < 0: LOGGER.info("retry_login_interval is < 0, exiting ...") break - next_sync = ( - datetime.datetime.now() + datetime.timedelta(seconds=sleep_for) - ).strftime("%c") + next_sync = (datetime.datetime.now() + datetime.timedelta(seconds=sleep_for)).strftime("%c") LOGGER.info(f"Retrying login at {next_sync} ...") - last_send = notify.send( - config=config, username=username, last_send=last_send - ) + last_send = notify.send(config=config, username=username, last_send=last_send) sleep(sleep_for) continue except exceptions.ICloudPyNoStoredPasswordAvailableException: - LOGGER.error( - "Password is not stored in keyring. Please save the password in keyring." - ) + LOGGER.error("Password is not stored in keyring. Please save the password in keyring.") sleep_for = config_parser.get_retry_login_interval(config=config) if sleep_for < 0: LOGGER.info("retry_login_interval is < 0, exiting ...") break - next_sync = ( - datetime.datetime.now() + datetime.timedelta(seconds=sleep_for) - ).strftime("%c") + next_sync = (datetime.datetime.now() + datetime.timedelta(seconds=sleep_for)).strftime("%c") LOGGER.info(f"Retrying login at {next_sync} ...") - last_send = notify.send( - config=config, username=username, last_send=last_send - ) + last_send = notify.send(config=config, username=username, last_send=last_send) sleep(sleep_for) continue @@ -138,11 +115,7 @@ def sync(): sleep_for = drive_sync_interval enable_sync_drive = True enable_sync_photos = False - elif ( - "drive" in config - and "photos" in config - and drive_sync_interval <= photos_sync_interval - ): + elif "drive" in config and "photos" in config and drive_sync_interval <= photos_sync_interval: sleep_for = photos_sync_interval - drive_sync_interval photos_sync_interval -= drive_sync_interval enable_sync_drive = True @@ -152,9 +125,7 @@ def sync(): drive_sync_interval -= photos_sync_interval enable_sync_drive = False enable_sync_photos = True - next_sync = ( - datetime.datetime.now() + datetime.timedelta(seconds=sleep_for) - ).strftime("%c") + next_sync = (datetime.datetime.now() + datetime.timedelta(seconds=sleep_for)).strftime("%c") LOGGER.info(f"Resyncing at {next_sync} ...") if ( config_parser.get_drive_sync_interval(config=config) < 0 diff --git a/src/sync_drive.py b/src/sync_drive.py index b81a08377..c8cfc8c9e 100644 --- a/src/sync_drive.py +++ b/src/sync_drive.py @@ -1,4 +1,5 @@ """Sync drive module.""" + __author__ = "Mandar Patil (mandarons@pm.me)" import gzip @@ -47,16 +48,8 @@ def wanted_folder(filters, ignore, root, folder_path): # Something to filter folder_path = Path(folder_path) for folder in filters: - child_path = Path( - os.path.join( - os.path.abspath(root), str(folder).removeprefix("/").removesuffix("/") - ) - ) - if ( - folder_path in child_path.parents - or child_path in folder_path.parents - or folder_path == child_path - ): + child_path = Path(os.path.join(os.path.abspath(root), str(folder).removeprefix("/").removesuffix("/"))) + if folder_path in child_path.parents or child_path in folder_path.parents or folder_path == child_path: return True return False @@ -75,11 +68,7 @@ def wanted_parent_folder(filters, ignore, root, folder_path): return True folder_path = Path(folder_path) for folder in filters: - child_path = Path( - os.path.join( - os.path.abspath(root), folder.removeprefix("/").removesuffix("/") - ) - ) + child_path = Path(os.path.join(os.path.abspath(root), folder.removeprefix("/").removesuffix("/"))) if child_path in folder_path.parents or folder_path == child_path: return True return False @@ -91,9 +80,7 @@ def process_folder(item, destination_path, filters, ignore, root): return None new_directory = os.path.join(destination_path, item.name) new_directory_norm = unicodedata.normalize("NFC", new_directory) - if not wanted_folder( - filters=filters, ignore=ignore, folder_path=new_directory_norm, root=root - ): + if not wanted_folder(filters=filters, ignore=ignore, folder_path=new_directory_norm, root=root): LOGGER.debug(f"Skipping the unwanted folder {new_directory} ...") return None os.makedirs(new_directory_norm, exist_ok=True) @@ -105,25 +92,16 @@ def package_exists(item, local_package_path): if item and local_package_path and os.path.isdir(local_package_path): local_package_modified_time = int(os.path.getmtime(local_package_path)) remote_package_modified_time = int(item.date_modified.timestamp()) - local_package_size = sum( - f.stat().st_size - for f in Path(local_package_path).glob("**/*") - if f.is_file() - ) + local_package_size = sum(f.stat().st_size for f in Path(local_package_path).glob("**/*") if f.is_file()) remote_package_size = item.size - if ( - local_package_modified_time == remote_package_modified_time - and local_package_size == remote_package_size - ): - LOGGER.debug( - f"No changes detected. Skipping the package {local_package_path} ..." - ) + if local_package_modified_time == remote_package_modified_time and local_package_size == remote_package_size: + LOGGER.debug(f"No changes detected. Skipping the package {local_package_path} ...") return True else: LOGGER.info( f"Changes detected: local_modified_time is {local_package_modified_time}, " + f"remote_modified_time is {remote_package_modified_time}, " - + f"local_package_size is {local_package_size} and remote_package_size is {remote_package_size}." + + f"local_package_size is {local_package_size} and remote_package_size is {remote_package_size}.", ) rmtree(local_package_path) else: @@ -149,7 +127,7 @@ def file_exists(item, local_file): LOGGER.debug( f"Changes detected: local_modified_time is {local_file_modified_time}, " + f"remote_modified_time is {remote_file_modified_time}, " - + f"local_file_size is {local_file_size} and remote_file_size is {remote_file_size}." + + f"local_file_size is {local_file_size} and remote_file_size is {remote_file_size}.", ) else: LOGGER.debug(f"File {local_file} does not exist locally.") @@ -160,7 +138,7 @@ def process_package(local_file): """Process the package.""" archive_file = local_file magic_object = magic.Magic(mime=True) - if "application/zip" == magic_object.from_file(filename=local_file): + if magic_object.from_file(filename=local_file) == "application/zip": archive_file += ".zip" os.rename(local_file, archive_file) LOGGER.info(f"Unpacking {archive_file} to {os.path.dirname(archive_file)}") @@ -170,7 +148,7 @@ def process_package(local_file): os.rename(local_file, normalized_path) local_file = normalized_path os.remove(archive_file) - elif "application/gzip" == magic_object.from_file(filename=local_file): + elif magic_object.from_file(filename=local_file) == "application/gzip": archive_file += ".gz" os.rename(local_file, archive_file) LOGGER.info(f"Unpacking {archive_file} to {os.path.dirname(local_file)}") @@ -181,7 +159,7 @@ def process_package(local_file): process_package(local_file=local_file) else: LOGGER.error( - f"Unhandled file type - cannot unpack the package {magic_object.from_file(filename=archive_file)}." + f"Unhandled file type - cannot unpack the package {magic_object.from_file(filename=archive_file)}.", ) return False LOGGER.info(f"Successfully unpacked the package {archive_file}.") @@ -211,7 +189,7 @@ def download_file(item, local_file): item_modified_time = time.mktime(item.date_modified.timetuple()) os.utime(local_file, (item_modified_time, item_modified_time)) except (exceptions.ICloudPyAPIResponseException, FileNotFoundError, Exception) as e: - LOGGER.error(f"Failed to download {local_file}: {str(e)}") + LOGGER.error(f"Failed to download {local_file}: {e!s}") return False return local_file @@ -281,9 +259,7 @@ def sync_directory( new_folder = process_folder( item=item, destination_path=destination_path, - filters=filters["folders"] - if filters and "folders" in filters - else None, + filters=filters["folders"] if filters and "folders" in filters else None, ignore=ignore, root=root, ) @@ -300,16 +276,14 @@ def sync_directory( top=False, filters=filters, ignore=ignore, - ) + ), ) except Exception: # Continue execution to next item, without crashing the app pass elif item.type == "file": if wanted_parent_folder( - filters=filters["folders"] - if filters and "folders" in filters - else None, + filters=filters["folders"] if filters and "folders" in filters else None, ignore=ignore, root=root, folder_path=destination_path, @@ -318,9 +292,7 @@ def sync_directory( process_file( item=item, destination_path=destination_path, - filters=filters["file_extensions"] - if filters and "file_extensions" in filters - else None, + filters=filters["file_extensions"] if filters and "file_extensions" in filters else None, ignore=ignore, files=files, ) @@ -341,11 +313,7 @@ def sync_drive(config, drive): root=destination_path, items=drive.dir(), top=True, - filters=config["drive"]["filters"] - if "drive" in config and "filters" in config["drive"] - else None, - ignore=config["drive"]["ignore"] - if "drive" in config and "ignore" in config["drive"] - else None, + filters=config["drive"]["filters"] if "drive" in config and "filters" in config["drive"] else None, + ignore=config["drive"]["ignore"] if "drive" in config and "ignore" in config["drive"] else None, remove=config_parser.get_drive_remove_obsolete(config=config), ) diff --git a/src/sync_photos.py b/src/sync_photos.py index 1d631a189..1cadad678 100644 --- a/src/sync_photos.py +++ b/src/sync_photos.py @@ -1,4 +1,5 @@ """Sync photos module.""" + ___author___ = "Mandar Patil " import base64 import os @@ -55,10 +56,7 @@ def get_name_and_extension(photo, file_size): if filetype in original_alt_filetype_to_extension: extension = original_alt_filetype_to_extension[filetype] else: - LOGGER.warning( - f"Unknown filetype {filetype} for " - f"original_alt version of {filename}" - ) + LOGGER.warning(f"Unknown filetype {filetype} for original_alt version of {filename}") return name, extension @@ -79,9 +77,7 @@ def generate_file_name(photo, file_size, destination_path, folder_format): file_path = os.path.join(destination_path, filename) file_size_path = os.path.join( destination_path, - f'{"__".join([name, file_size])}' - if extension == "" - else f'{"__".join([name, file_size])}.{extension}', + f'{"__".join([name, file_size])}' if extension == "" else f'{"__".join([name, file_size])}.{extension}', ) file_size_id_path = os.path.join( destination_path, @@ -121,9 +117,7 @@ def photo_exists(photo, file_size, local_path): LOGGER.debug(f"No changes detected. Skipping the file {local_path} ...") return True else: - LOGGER.debug( - f"Change detected: local_file_size is {local_size} and remote_file_size is {remote_size}." - ) + LOGGER.debug(f"Change detected: local_file_size is {local_size} and remote_file_size is {remote_size}.") return False @@ -139,7 +133,7 @@ def download_photo(photo, file_size, destination_path): local_modified_time = time.mktime(photo.added_date.timetuple()) os.utime(destination_path, (local_modified_time, local_modified_time)) except (exceptions.ICloudPyAPIResponseException, FileNotFoundError, Exception) as e: - LOGGER.error(f"Failed to download {destination_path}: {str(e)}") + LOGGER.error(f"Failed to download {destination_path}: {e!s}") return False return True @@ -153,9 +147,7 @@ def process_photo(photo, file_size, destination_path, files, folder_format): folder_format=folder_format, ) if file_size not in photo.versions: - LOGGER.warning( - f"File size {file_size} not found on server. Skipping the photo {photo_path} ..." - ) + LOGGER.warning(f"File size {file_size} not found on server. Skipping the photo {photo_path} ...") return False if files is not None: files.add(photo_path) @@ -165,9 +157,7 @@ def process_photo(photo, file_size, destination_path, files, folder_format): return True -def sync_album( - album, destination_path, file_sizes, extensions=None, files=None, folder_format=None -): +def sync_album(album, destination_path, file_sizes, extensions=None, files=None, folder_format=None): """Sync given album.""" if album is None or destination_path is None or file_sizes is None: return None @@ -212,9 +202,7 @@ def sync_photos(config, photos): filters = config_parser.get_photos_filters(config=config) files = set() download_all = config_parser.get_photos_all_albums(config=config) - libraries = ( - filters["libraries"] if filters["libraries"] is not None else photos.libraries - ) + libraries = filters["libraries"] if filters["libraries"] is not None else photos.libraries folder_format = config_parser.get_photos_folder_format(config=config) for library in libraries: if download_all and library == "PrimarySync": @@ -251,9 +239,7 @@ def sync_photos(config, photos): folder_format=folder_format, ) else: - LOGGER.warning( - f"Album {album} not found in {library}. Skipping the album {album} ..." - ) + LOGGER.warning(f"Album {album} not found in {library}. Skipping the album {album} ...") else: sync_album( album=photos.libraries[library].all, diff --git a/src/usage.py b/src/usage.py index c12a7dbba..f99f492c9 100644 --- a/src/usage.py +++ b/src/usage.py @@ -1,4 +1,5 @@ """To record usage of the app.""" + import json import os from datetime import datetime, timedelta @@ -62,11 +63,7 @@ def record_new_installation(previous_id=None): def already_installed(cached_data): """Check if already installed.""" - return ( - "id" in cached_data - and "app_version" in cached_data - and cached_data["app_version"] == APP_VERSION - ) + return "id" in cached_data and "app_version" in cached_data and cached_data["app_version"] == APP_VERSION def install(cached_data): diff --git a/tests/__init__.py b/tests/__init__.py index 8bc3cdda5..41f09d9a4 100644 --- a/tests/__init__.py +++ b/tests/__init__.py @@ -1,4 +1,5 @@ """Tests module.""" + __author__ = "Mandar Patil (mandarons@pm.me)" import os @@ -18,9 +19,7 @@ def update_config(data): """Update config test config path.""" - return YAML().dump( - data=data, stream=open(file=CONFIG_PATH, mode="w", encoding="utf-8") - ) + return YAML().dump(data=data, stream=open(file=CONFIG_PATH, mode="w", encoding="utf-8")) def mocked_usage_post(*args, **kwargs): diff --git a/tests/data/__init__.py b/tests/data/__init__.py index b1ee8a520..6a446aad2 100644 --- a/tests/data/__init__.py +++ b/tests/data/__init__.py @@ -1,4 +1,5 @@ """Fixtures for tests.""" + __author__ = "Mandar Patil (mandarons@pm.me)" import json @@ -80,7 +81,7 @@ "isEligibleForZoneShare": True, "isEligibleForHierarchicalShare": False, }, - ] + ], } LOGIN_WORKING = { @@ -213,19 +214,13 @@ "pcsEnabled": True, "configBag": { "urls": { - "accountCreateUI": "https://appleid.apple.com/widget/account/?widgetKey=" - + WIDGET_KEY - + "#!create", - "accountLoginUI": "https://idmsa.apple.com/appleauth/auth/signin?widgetKey=" - + WIDGET_KEY, + "accountCreateUI": "https://appleid.apple.com/widget/account/?widgetKey=" + WIDGET_KEY + "#!create", + "accountLoginUI": "https://idmsa.apple.com/appleauth/auth/signin?widgetKey=" + WIDGET_KEY, "accountLogin": "https://setup.icloud.com/setup/ws/1/accountLogin", - "accountRepairUI": "https://appleid.apple.com/widget/account/?widgetKey=" - + WIDGET_KEY - + "#!repair", + "accountRepairUI": "https://appleid.apple.com/widget/account/?widgetKey=" + WIDGET_KEY + "#!repair", "downloadICloudTerms": "https://setup.icloud.com/setup/ws/1/downloadLiteTerms", "repairDone": "https://setup.icloud.com/setup/ws/1/repairDone", - "accountAuthorizeUI": "https://idmsa.apple.com/appleauth/auth/authorize/signin?client_id=" - + WIDGET_KEY, + "accountAuthorizeUI": "https://idmsa.apple.com/appleauth/auth/authorize/signin?client_id=" + WIDGET_KEY, "vettingUrlForEmail": "https://id.apple.com/IDMSEmailVetting/vetShareEmail", "accountCreate": "https://setup.icloud.com/setup/ws/1/createLiteAccount", "getICloudTerms": "https://setup.icloud.com/setup/ws/1/getTerms", @@ -406,19 +401,13 @@ "pcsEnabled": True, "configBag": { "urls": { - "accountCreateUI": "https://appleid.apple.com/widget/account/?widgetKey=" - + WIDGET_KEY - + "#!create", - "accountLoginUI": "https://idmsa.apple.com/appleauth/auth/signin?widgetKey=" - + WIDGET_KEY, + "accountCreateUI": "https://appleid.apple.com/widget/account/?widgetKey=" + WIDGET_KEY + "#!create", + "accountLoginUI": "https://idmsa.apple.com/appleauth/auth/signin?widgetKey=" + WIDGET_KEY, "accountLogin": "https://setup.icloud.com/setup/ws/1/accountLogin", - "accountRepairUI": "https://appleid.apple.com/widget/account/?widgetKey=" - + WIDGET_KEY - + "#!repair", + "accountRepairUI": "https://appleid.apple.com/widget/account/?widgetKey=" + WIDGET_KEY + "#!repair", "downloadICloudTerms": "https://setup.icloud.com/setup/ws/1/downloadLiteTerms", "repairDone": "https://setup.icloud.com/setup/ws/1/repairDone", - "accountAuthorizeUI": "https://idmsa.apple.com/appleauth/auth/authorize/signin?client_id=" - + WIDGET_KEY, + "accountAuthorizeUI": "https://idmsa.apple.com/appleauth/auth/authorize/signin?client_id=" + WIDGET_KEY, "vettingUrlForEmail": "https://id.apple.com/IDMSEmailVetting/vetShareEmail", "accountCreate": "https://setup.icloud.com/setup/ws/1/createLiteAccount", "getICloudTerms": "https://setup.icloud.com/setup/ws/1/getTerms", @@ -555,7 +544,7 @@ "webPrefs": { "id": "web_prefs", "selectedDeviceId": "iPhone4,1", - } + }, }, "content": [ { @@ -2719,20 +2708,18 @@ "type": "FILE", }, ], - } + }, ], "numberOfItems": 1, "status": "OK", }, ], "numberOfItems": 5, - } + }, ] # App specific folder (Keynote, Numbers, Pages, Preview ...) type=APP_LIBRARY -DRIVE_ROOT_INVALID = [ - {"drivewsid": "FOLDER::com.apple.CloudDocs::documents", "status": "ID_INVALID"} -] +DRIVE_ROOT_INVALID = [{"drivewsid": "FOLDER::com.apple.CloudDocs::documents", "status": "ID_INVALID"}] DRIVE_FOLDER_WORKING = [ { @@ -2762,7 +2749,7 @@ "shareCount": 0, "shareAliasCount": 0, "directChildrenCount": 2, - } + }, ], "numberOfItems": 1, }, @@ -3280,9 +3267,9 @@ "size": 14, "etag": "4ioq::4eu3", "type": "FILE", - } + }, ], - } + }, ], "numberOfItems": 1, "status": "OK", @@ -3319,7 +3306,7 @@ "Alternatives", "000", "WindowImage.jpg", - ) + ), ), "etag": "2k::2j", "extension": "pdf", @@ -3343,7 +3330,7 @@ "Alternatives", "000", "WindowImage.jpg", - ) + ), ), "etag": "32::2x", "extension": "pdf", @@ -3353,7 +3340,7 @@ }, ], "numberOfItems": 2, - } + }, ] DRIVE_SUBFOLDER_UNWANTED_WORKING = [ @@ -3372,7 +3359,7 @@ "directChildrenCount": 0, "items": [], "numberOfItems": 0, - } + }, ] DRIVE_PACKAGE_SPECIAL_CHARS_WORKING = { @@ -3693,7 +3680,7 @@ "ownerRecordName": "_fvhhqlzef1uvsgxnrw119mylkpjut1a0", "zoneType": "REGULAR_CUSTOM_ZONE", }, - } + }, ], "syncToken": "AQAAAAAAArKjf//////////fSxWSKv5JfZASQT7N0a1m", } @@ -3757,10 +3744,7 @@ def request(self, method, url, **kwargs): if self.service.auth_endpoint in url: if "signin" in url and method == "POST": - if ( - data.get("accountName") not in VALID_USERS - or data.get("password") != VALID_PASSWORD - ): + if data.get("accountName") not in VALID_USERS or data.get("password") != VALID_PASSWORD: self._raise_error(None, "Unknown reason") if data.get("accountName") == REQUIRES_2FA_USER: self.service.session_data["session_token"] = REQUIRES_2FA_TOKEN @@ -3788,31 +3772,19 @@ def request(self, method, url, **kwargs): return ResponseMock(ACCOUNT_STORAGE_WORKING) # Drive - if ( - "retrieveItemDetailsInFolders" in url - and method == "POST" - and data[0].get("drivewsid") - ): + if "retrieveItemDetailsInFolders" in url and method == "POST" and data[0].get("drivewsid"): if data[0].get("drivewsid") == "FOLDER::com.apple.CloudDocs::root": return ResponseMock(DRIVE_ROOT_WORKING) if data[0].get("drivewsid") == "FOLDER::com.apple.CloudDocs::documents": return ResponseMock(DRIVE_ROOT_INVALID) if ( - data[0].get("drivewsid") - == "FOLDER::com.apple.CloudDocs::1C7F1760-D940-480F-8C4F-005824A4E05B" - or data[0].get("drivewsid") - == "FOLDER::com.apple.CloudDocs::1C7F1760-D940-480F-8C4F-005824A4E05D" + data[0].get("drivewsid") == "FOLDER::com.apple.CloudDocs::1C7F1760-D940-480F-8C4F-005824A4E05B" + or data[0].get("drivewsid") == "FOLDER::com.apple.CloudDocs::1C7F1760-D940-480F-8C4F-005824A4E05D" ): return ResponseMock(DRIVE_FOLDER_WORKING) - if ( - data[0].get("drivewsid") - == "FOLDER::com.apple.CloudDocs::D5AA0425-E84F-4501-AF5D-60F1D92648CF" - ): + if data[0].get("drivewsid") == "FOLDER::com.apple.CloudDocs::D5AA0425-E84F-4501-AF5D-60F1D92648CF": return ResponseMock(DRIVE_SUBFOLDER_WORKING) - if ( - data[0].get("drivewsid") - == "FOLDER::com.apple.CloudDocs::1C7F1760-D940-480F-8C4F-005824A4E05C" - ): + if data[0].get("drivewsid") == "FOLDER::com.apple.CloudDocs::1C7F1760-D940-480F-8C4F-005824A4E05C": return ResponseMock(DRIVE_SUBFOLDER_UNWANTED_WORKING) # Drive download if "com.apple.CloudDocs/download/by_id" in url and method == "GET": @@ -3854,9 +3826,7 @@ def request(self, method, url, **kwargs): {}, url="/packageDownload?", raw=open( - os.path.join( - os.path.dirname(__file__), "Fotoksiążka-Wzór.xmcf.zip" - ), + os.path.join(os.path.dirname(__file__), "Fotoksiążka-Wzór.xmcf.zip"), "rb", ), ) @@ -3889,44 +3859,23 @@ def request(self, method, url, **kwargs): if url.endswith("remapEnums=True&getCurrentSyncToken=True"): if data.get("query").get("recordType") == "CheckIndexingState": return ResponseMock( - photos_data.DATA[ - "query?remapEnums=True&getCurrentSyncToken=True" - ][0]["response"] + photos_data.DATA["query?remapEnums=True&getCurrentSyncToken=True"][0]["response"], ) - if ( - data.get("query").get("recordType") - == "CPLAssetAndMasterHiddenByAssetDate" - ): + if data.get("query").get("recordType") == "CPLAssetAndMasterHiddenByAssetDate": return ResponseMock( - photos_data.DATA[ - "query?remapEnums=True&getCurrentSyncToken=True" - ][5]["response"] + photos_data.DATA["query?remapEnums=True&getCurrentSyncToken=True"][5]["response"], ) - if ( - data.get("query").get("recordType") - == "CPLAssetAndMasterDeletedByExpungedDate" - ): + if data.get("query").get("recordType") == "CPLAssetAndMasterDeletedByExpungedDate": return ResponseMock( - photos_data.DATA[ - "query?remapEnums=True&getCurrentSyncToken=True" - ][5]["response"] + photos_data.DATA["query?remapEnums=True&getCurrentSyncToken=True"][5]["response"], ) - if ( - data.get("query").get("recordType") - == "CPLBurstStackAssetAndMasterByAssetDate" - ): + if data.get("query").get("recordType") == "CPLBurstStackAssetAndMasterByAssetDate": return ResponseMock( - photos_data.DATA[ - "query?remapEnums=True&getCurrentSyncToken=True" - ][5]["response"] + photos_data.DATA["query?remapEnums=True&getCurrentSyncToken=True"][5]["response"], ) - if data.get("query").get("recordType") in ( - "CPLAssetAndMasterInSmartAlbumByAssetDate" - ): - if "filterBy" in data["query"] and data.get("query").get( - "filterBy" - )[2]["fieldValue"]["value"] in ( + if data.get("query").get("recordType") in ("CPLAssetAndMasterInSmartAlbumByAssetDate"): + if "filterBy" in data["query"] and data.get("query").get("filterBy")[2]["fieldValue"]["value"] in ( "TIMELAPSE", "LIVE", "VIDEO", @@ -3937,19 +3886,14 @@ def request(self, method, url, **kwargs): "PANORAMA", ): return ResponseMock( - photos_data.DATA[ - "query?remapEnums=True&getCurrentSyncToken=True" - ][5]["response"] + photos_data.DATA["query?remapEnums=True&getCurrentSyncToken=True"][5]["response"], ) if ( "filterBy" in data["query"] - and data.get("query").get("filterBy")[2]["fieldValue"]["value"] - == "VIDEO" + and data.get("query").get("filterBy")[2]["fieldValue"]["value"] == "VIDEO" ): return ResponseMock( - photos_data.DATA[ - "query?remapEnums=True&getCurrentSyncToken=True" - ][5]["response"] + photos_data.DATA["query?remapEnums=True&getCurrentSyncToken=True"][5]["response"], ) if data.get("query").get("recordType") == "CPLAlbumByPositionLive": if ( @@ -3958,9 +3902,7 @@ def request(self, method, url, **kwargs): == "E4RT4FB7-4A35-4958-1D42-5769E66BE407" ): return ResponseMock( - photos_data.DATA[ - "query?remapEnums=True&getCurrentSyncToken=True" - ][4]["response"] + photos_data.DATA["query?remapEnums=True&getCurrentSyncToken=True"][4]["response"], ) if ( @@ -3969,9 +3911,7 @@ def request(self, method, url, **kwargs): == "CB3DB78F-D683-42D5-A340-A5DECC7397F6" ): return ResponseMock( - photos_data.DATA[ - "query?remapEnums=True&getCurrentSyncToken=True" - ][5]["response"] + photos_data.DATA["query?remapEnums=True&getCurrentSyncToken=True"][5]["response"], ) if ( "filterBy" in data["query"] @@ -3979,9 +3919,7 @@ def request(self, method, url, **kwargs): == "E803E065-D8A4-4398-DE23-23F8FD0886EB" ): return ResponseMock( - photos_data.DATA[ - "query?remapEnums=True&getCurrentSyncToken=True" - ][6]["response"] + photos_data.DATA["query?remapEnums=True&getCurrentSyncToken=True"][6]["response"], ) if ( "filterBy" in data["query"] @@ -3989,89 +3927,58 @@ def request(self, method, url, **kwargs): == "E803E065-D8A4-4398-DE23-23F8FD0886EC" ): return ResponseMock( - photos_data.DATA[ - "query?remapEnums=True&getCurrentSyncToken=True" - ][7]["response"] + photos_data.DATA["query?remapEnums=True&getCurrentSyncToken=True"][7]["response"], ) if ( "zoneID" in data - and data.get("zoneID").get("zoneName") - == "SharedSync-9DD9B767-9F30-4D6F-B658-F17DBA16D107" + and data.get("zoneID").get("zoneName") == "SharedSync-9DD9B767-9F30-4D6F-B658-F17DBA16D107" ): return ResponseMock( - photos_data.DATA[ - "query?remapEnums=True&getCurrentSyncToken=True" - ][8]["response"] + photos_data.DATA["query?remapEnums=True&getCurrentSyncToken=True"][8]["response"], ) return ResponseMock( - photos_data.DATA[ - "query?remapEnums=True&getCurrentSyncToken=True" - ][1]["response"] + photos_data.DATA["query?remapEnums=True&getCurrentSyncToken=True"][1]["response"], ) - if ( - data.get("query").get("recordType") - == "CPLAssetAndMasterByAddedDate" - ): + if data.get("query").get("recordType") == "CPLAssetAndMasterByAddedDate": if data.get("query").get("filterBy")[0]["fieldValue"]["value"] == 0: return ResponseMock( - photos_data.DATA[ - "query?remapEnums=True&getCurrentSyncToken=True" - ][9]["response"] + photos_data.DATA["query?remapEnums=True&getCurrentSyncToken=True"][9]["response"], ) return ResponseMock( - photos_data.DATA[ - "query?remapEnums=True&getCurrentSyncToken=True" - ][8]["response"] + photos_data.DATA["query?remapEnums=True&getCurrentSyncToken=True"][8]["response"], ) - if ( - data.get("query").get("recordType") - == "CPLContainerRelationLiveByAssetDate" - ): + if data.get("query").get("recordType") == "CPLContainerRelationLiveByAssetDate": if data.get("query").get("filterBy")[0]["fieldValue"]["value"] == 0: return ResponseMock( - photos_data.DATA[ - "query?remapEnums=True&getCurrentSyncToken=True" - ][2]["response"] + photos_data.DATA["query?remapEnums=True&getCurrentSyncToken=True"][2]["response"], ) if data.get("query").get("filterBy")[0]["fieldValue"]["value"] == 7: return ResponseMock( - photos_data.DATA[ - "query?remapEnums=True&getCurrentSyncToken=True" - ][3]["response"] + photos_data.DATA["query?remapEnums=True&getCurrentSyncToken=True"][3]["response"], ) if "query/batch?remapEnums=True&getCurrentSyncToken=True" in url: return ResponseMock( - photos_data.DATA[ - "query/batch?remapEnums=True&getCurrentSyncToken=True" - ][0]["response"] + photos_data.DATA["query/batch?remapEnums=True&getCurrentSyncToken=True"][0]["response"], ) # Photos download if ( # IMG_3327.JPG - "https://cvws.icloud-content.com/B/ARKzBUr-DdmTaP_SAVglTurWtsmrAb5Vyk36t2jwuON7WSxvon_DvGtK" - in url + "https://cvws.icloud-content.com/B/ARKzBUr-DdmTaP_SAVglTurWtsmrAb5Vyk36t2jwuON7WSxvon_DvGtK" in url # IMG_3322.JPG - or "https://cvws.icloud-content.com/B/ASTSuc7S58IPmVCJIUslbeCRjsnoASIoOBi88potAS0gE8tfnojuSlrb" - in url + or "https://cvws.icloud-content.com/B/ASTSuc7S58IPmVCJIUslbeCRjsnoASIoOBi88potAS0gE8tfnojuSlrb" in url # IMG_3306.JPG - or "https://cvws.icloud-content.com/B/ATTRy6p-Q3U1HqcF6BUKrrOMnjvnATqG89bMsXhtmMRMw009uhyJc_Kh" - in url + or "https://cvws.icloud-content.com/B/ATTRy6p-Q3U1HqcF6BUKrrOMnjvnATqG89bMsXhtmMRMw009uhyJc_Kh" in url # IMG_3148.JPG - or "https://cvws.icloud-content.com/B/ARZd_GzpY62XRtXt+jP6UsV4fBZHATi6BbOzDuHl6RONNFCub9eqZqSm" - in url + or "https://cvws.icloud-content.com/B/ARZd_GzpY62XRtXt+jP6UsV4fBZHATi6BbOzDuHl6RONNFCub9eqZqSm" in url # no-extension - or "https://cvws.icloud-content.com/B/ARZd_GzpY62XRtXt+jP6UsV4fBZIATi6BbOzDuHl6RONNFCub9eqZqSn" - in url + or "https://cvws.icloud-content.com/B/ARZd_GzpY62XRtXt+jP6UsV4fBZIATi6BbOzDuHl6RONNFCub9eqZqSn" in url # IMG_3328.JPG - or "https://cvws.icloud-content.com/B/EeGlt2PppPTgd0Q7mp8GenIugSh7AQYmx-DRYXnMs0tkDZ3rorp4IB99" - in url + or "https://cvws.icloud-content.com/B/EeGlt2PppPTgd0Q7mp8GenIugSh7AQYmx-DRYXnMs0tkDZ3rorp4IB99" in url # IMG_3148.JPG another device - or "https://cvws.icloud-content.com/B/ATTRy6p-Q3U1HqcF6BUKrrOMnjvoATqG89bMsXhtmMRMw009uhyJc_Kh" - in url + or "https://cvws.icloud-content.com/B/ATTRy6p-Q3U1HqcF6BUKrrOMnjvoATqG89bMsXhtmMRMw009uhyJc_Kh" in url # IMG_5513.HEIC Shared Library - or "https://cvws.icloud-content.com/B/AQDN6auXvelQyb_btBqkNNjA97E2AZ_h3_ZBuSDV7J1SfMKpllmP-FGN" - in url + or "https://cvws.icloud-content.com/B/AQDN6auXvelQyb_btBqkNNjA97E2AZ_h3_ZBuSDV7J1SfMKpllmP-FGN" in url ): return ResponseMock( {}, @@ -4079,61 +3986,43 @@ def request(self, method, url, **kwargs): ) if ( # IMG_3327.JPG - "https://cvws.icloud-content.com/B/AUvKU8j-Z5pfqGI_fe-9tibuqfVRAd0I2qxdsqlGuSLlqtTBgoKndHE_" - in url + "https://cvws.icloud-content.com/B/AUvKU8j-Z5pfqGI_fe-9tibuqfVRAd0I2qxdsqlGuSLlqtTBgoKndHE_" in url # IMG_3322.JPG - or "https://cvws.icloud-content.com/B/AUxVFT2yVsQ5739tmU5c1497duFDAdkoZP1534bwlULpwCdn2fd44LAt" - in url + or "https://cvws.icloud-content.com/B/AUxVFT2yVsQ5739tmU5c1497duFDAdkoZP1534bwlULpwCdn2fd44LAt" in url # IMG_3306.JPG - or "https://cvws.icloud-content.com/B/Ab_8kUAhnGzSxnl9yWvh8JKBpOvVAVLSGMHt-PAQ9_krqqfXATNX57d5" - in url + or "https://cvws.icloud-content.com/B/Ab_8kUAhnGzSxnl9yWvh8JKBpOvVAVLSGMHt-PAQ9_krqqfXATNX57d5" in url # IMG_3148.JPG - or "https://cvws.icloud-content.com/B/AVx3_VKkbWPdNbWw68mrWzSuemXgAdUIDFzHC2rVOvwTz0jPi_tKihnb" - in url + or "https://cvws.icloud-content.com/B/AVx3_VKkbWPdNbWw68mrWzSuemXgAdUIDFzHC2rVOvwTz0jPi_tKihnb" in url # no-extension - or "https://cvws.icloud-content.com/B/AVx3_VKkbWPdNbWw68mrWzSuemXiAdUIDFzHC2rVOvwTz0jPi_tKihnn" - in url + or "https://cvws.icloud-content.com/B/AVx3_VKkbWPdNbWw68mrWzSuemXiAdUIDFzHC2rVOvwTz0jPi_tKihnn" in url # IMG_3328.JPG - or "https://cvws.icloud-content.com/B/YN1v8eGiHYYZ_aKUkMuGtSf0P1BNAXKVYPcDa-9Mjvnap0ZS-p2Z24V3" - in url + or "https://cvws.icloud-content.com/B/YN1v8eGiHYYZ_aKUkMuGtSf0P1BNAXKVYPcDa-9Mjvnap0ZS-p2Z24V3" in url # IMG_3148.JPG another device - or "https://cvws.icloud-content.com/B/Ab_8kUAhnGzSxnl9yWvh8JKBpOvWAVLSGMHt-PAQ9_krqqfXATNX57d5" - in url + or "https://cvws.icloud-content.com/B/Ab_8kUAhnGzSxnl9yWvh8JKBpOvWAVLSGMHt-PAQ9_krqqfXATNX57d5" in url # IMG_5513.HEIC Shared Library - or "https://cvws.icloud-content.com/B/AY4eS1ezj9pmMHzfVzwC2CLmBwZOAXKLBx985QzfCKCGyN0wbGs6SuTf" - in url + or "https://cvws.icloud-content.com/B/AY4eS1ezj9pmMHzfVzwC2CLmBwZOAXKLBx985QzfCKCGyN0wbGs6SuTf" in url ): return ResponseMock( {}, - raw=open( - os.path.join(os.path.dirname(__file__), "original.jpeg"), "rb" - ), + raw=open(os.path.join(os.path.dirname(__file__), "original.jpeg"), "rb"), ) if ( # IMG_3327.JPG - "https://cvws.icloud-content.com/B/ASy6f_leU1-xkR1aPmQyvYmwHUpEARHOzkI3sbX3SZDmNQgttNJ9DqQa" - in url + "https://cvws.icloud-content.com/B/ASy6f_leU1-xkR1aPmQyvYmwHUpEARHOzkI3sbX3SZDmNQgttNJ9DqQa" in url # IMG_3322.JPG - or "https://cvws.icloud-content.com/B/ASPVZ_Pft6gIN2VEA_oUbqQzh6WyAXd258pYF6LLhmADLoZAumNqI-8M" - in url + or "https://cvws.icloud-content.com/B/ASPVZ_Pft6gIN2VEA_oUbqQzh6WyAXd258pYF6LLhmADLoZAumNqI-8M" in url # IMG_3306.JPG - or "https://cvws.icloud-content.com/B/AQNND5zpteAXnnBP2BmDd0ropjY9AV2Zh7WygJu74eNWVuuMT4lM8qme" - in url + or "https://cvws.icloud-content.com/B/AQNND5zpteAXnnBP2BmDd0ropjY9AV2Zh7WygJu74eNWVuuMT4lM8qme" in url # IMG_3148.JPG - or "https://cvws.icloud-content.com/B/ARpHiouI3Ib_ziuZYTCiSikohvMYARtMrcvA8cbMefPDnmwSWQwe-mBd" - in url + or "https://cvws.icloud-content.com/B/ARpHiouI3Ib_ziuZYTCiSikohvMYARtMrcvA8cbMefPDnmwSWQwe-mBd" in url # no-extension - or "https://cvws.icloud-content.com/B/ARpHiouI3Ib_ziuZYTCiSikohvMZARtMrcvA8cbMefPDnmwSWQwe-mBe" - in url + or "https://cvws.icloud-content.com/B/ARpHiouI3Ib_ziuZYTCiSikohvMZARtMrcvA8cbMefPDnmwSWQwe-mBe" in url # IMG_3328.JPG - or "https://cvws.icloud-content.com/B/DmK0xzSiAUSFrAsYYAvby7QHrMDeAR5TiM9Qko4rHwmoDH1BgNRVZpF4" - in url + or "https://cvws.icloud-content.com/B/DmK0xzSiAUSFrAsYYAvby7QHrMDeAR5TiM9Qko4rHwmoDH1BgNRVZpF4" in url # IMG_3148.JPG another device - or "https://cvws.icloud-content.com/B/AQNND5zpteAXnnBP2BmDd0ropjY0AV2Zh7WygJu74eNWVuuMT4lM8qme" - in url + or "https://cvws.icloud-content.com/B/AQNND5zpteAXnnBP2BmDd0ropjY0AV2Zh7WygJu74eNWVuuMT4lM8qme" in url # IMG_5513.HEIC Shared Library - or "https://cvws.icloud-content.com/B/Aa_QVPVEM9bvm5Owy3GRFNyqbKuXAbgec55EhUFp9db5znXM3Xz-nq1X" - in url + or "https://cvws.icloud-content.com/B/Aa_QVPVEM9bvm5Owy3GRFNyqbKuXAbgec55EhUFp9db5znXM3Xz-nq1X" in url ): return ResponseMock( {}, diff --git a/tests/data/photos_data.py b/tests/data/photos_data.py index 0573f0f7f..5461992ce 100644 --- a/tests/data/photos_data.py +++ b/tests/data/photos_data.py @@ -1,4 +1,5 @@ """Photos fixtures.""" + DATA = { "query?remapEnums=True&getCurrentSyncToken=True": [ { @@ -33,7 +34,7 @@ "ownerRecordName": "_1d5r3c201b3a4r5daac8ff7e7fbc0c23", "zoneType": "REGULAR_CUSTOM_ZONE", }, - } + }, ], "syncToken": "AQAAAAAAArKjf//////////fSxWSKv5JfZ34edrt875d", }, @@ -382,7 +383,7 @@ "wrappingKey": "amaCdL9Z+QxfzgD4+aYATg==", "referenceChecksum": "AQYmx+DRYXnMs0tkDZ3rorp4IB99", # pylint: disable=C0321 - "downloadURL": "https://cvws.icloud-content.com/B/EeGlt2PppPTgd0Q7mp8GenIugSh7AQYmx-DRYXnMs0tkDZ3rorp4IB99/${f}?o=Ai6vEWSVp5w5zaBTm7XvC55prdq006u5yUW5EfZs4KLT&v=1&x=3&a=CAogvhLnXY3DD7gxkuzbuKlak-NMlKvq37s7a-beQRlkZCsSbRCbkq2nty8Ym--IqbcvIgEAUgQugSh6WgR4IB99aiYQWP8altHEtfDsXqRVOJ19O49YwikLbHn5Ha6IeAIHhXVRK7Fpa3ImBoK0z2Usv0QeZBog1G6uVLc1ZapiFVtXuoc52Ijt3dpb4J3VMIA&e=1629757781&fl=&r=4d5c62f6-c81b-4e60-a785-4139aad087a7-1&k=amaCdL9Z-QxfzgD4-aYATg&ckc=com.apple.photos.cloud&ckz=PrimarySync&y=1&p=104&s=J9LA0hC_xBV3TqYvwg_zAPWPwH8", # noqa: E501 + "downloadURL": "https://cvws.icloud-content.com/B/EeGlt2PppPTgd0Q7mp8GenIugSh7AQYmx-DRYXnMs0tkDZ3rorp4IB99/${f}?o=Ai6vEWSVp5w5zaBTm7XvC55prdq006u5yUW5EfZs4KLT&v=1&x=3&a=CAogvhLnXY3DD7gxkuzbuKlak-NMlKvq37s7a-beQRlkZCsSbRCbkq2nty8Ym--IqbcvIgEAUgQugSh6WgR4IB99aiYQWP8altHEtfDsXqRVOJ19O49YwikLbHn5Ha6IeAIHhXVRK7Fpa3ImBoK0z2Usv0QeZBog1G6uVLc1ZapiFVtXuoc52Ijt3dpb4J3VMIA&e=1629757781&fl=&r=4d5c62f6-c81b-4e60-a785-4139aad087a7-1&k=amaCdL9Z-QxfzgD4-aYATg&ckc=com.apple.photos.cloud&ckz=PrimarySync&y=1&p=104&s=J9LA0hC_xBV3TqYvwg_zAPWPwH8", }, "type": "ASSETID", }, @@ -395,7 +396,7 @@ "wrappingKey": "Y40xDPUr6DmxfeoSqxaQ7A==", "referenceChecksum": "AXKVYPcDa+9Mjvnap0ZS+p2Z24V3", # pylint: disable=C0321 - "downloadURL": "https://cvws.icloud-content.com/B/YN1v8eGiHYYZ_aKUkMuGtSf0P1BNAXKVYPcDa-9Mjvnap0ZS-p2Z24V3/${f}?o=Ame-Q1e_1nWqIn7YG7VfVZk-XAs8bVdcHo-owaNRmfPn&v=1&x=3&a=CAogwS503Q9EkCdnzvD-kLG0VNwrlEmARONCS-hADMtqg1QSbRCckq2nty8YnO-IqbcvIgEAUgT0P1BNWgSZ24V3aiYLdjzdjGLXPtKfjwtH_PG0ralgbDDBOIftNXxyRxdhzz8OuZztNnImb65YPlo1qUOy4i7tW1pcyAZcjqS8kYfxPQD6SKIAKNk3dUid7mE&e=1629757781&fl=&r=4d5c62f6-c81b-4e60-a785-4139aad087a7-1&k=Y40xDPUr6DmxfeoSqxaQ7A&ckc=com.apple.photos.cloud&ckz=PrimarySync&y=1&p=104&s=X91oiOo0Avp6TR4d27MGupd_cqY", # noqa: E501 + "downloadURL": "https://cvws.icloud-content.com/B/YN1v8eGiHYYZ_aKUkMuGtSf0P1BNAXKVYPcDa-9Mjvnap0ZS-p2Z24V3/${f}?o=Ame-Q1e_1nWqIn7YG7VfVZk-XAs8bVdcHo-owaNRmfPn&v=1&x=3&a=CAogwS503Q9EkCdnzvD-kLG0VNwrlEmARONCS-hADMtqg1QSbRCckq2nty8YnO-IqbcvIgEAUgT0P1BNWgSZ24V3aiYLdjzdjGLXPtKfjwtH_PG0ralgbDDBOIftNXxyRxdhzz8OuZztNnImb65YPlo1qUOy4i7tW1pcyAZcjqS8kYfxPQD6SKIAKNk3dUid7mE&e=1629757781&fl=&r=4d5c62f6-c81b-4e60-a785-4139aad087a7-1&k=Y40xDPUr6DmxfeoSqxaQ7A&ckc=com.apple.photos.cloud&ckz=PrimarySync&y=1&p=104&s=X91oiOo0Avp6TR4d27MGupd_cqY", }, "type": "ASSETID", }, @@ -416,7 +417,7 @@ "wrappingKey": "Y40xDPUr6DmxfeoSqxaQ7A==", "referenceChecksum": "AXKVYPcDa+9Mjvnap0ZS+p2Z24V3", # pylint: disable=C0321 - "downloadURL": "https://cvws.icloud-content.com/B/YN1v8eGiHYYZ_aKUkMuGtSf0P1BNAXKVYPcDa-9Mjvnap0ZS-p2Z24V3/${f}?o=Ame-Q1e_1nWqIn7YG7VfVZk-XAs8bVdcHo-owaNRmfPn&v=1&x=3&a=CAogwS503Q9EkCdnzvD-kLG0VNwrlEmARONCS-hADMtqg1QSbRCckq2nty8YnO-IqbcvIgEAUgT0P1BNWgSZ24V3aiYLdjzdjGLXPtKfjwtH_PG0ralgbDDBOIftNXxyRxdhzz8OuZztNnImb65YPlo1qUOy4i7tW1pcyAZcjqS8kYfxPQD6SKIAKNk3dUid7mE&e=1629757781&fl=&r=4d5c62f6-c81b-4e60-a785-4139aad087a7-1&k=Y40xDPUr6DmxfeoSqxaQ7A&ckc=com.apple.photos.cloud&ckz=PrimarySync&y=1&p=104&s=X91oiOo0Avp6TR4d27MGupd_cqY", # noqa: E501 + "downloadURL": "https://cvws.icloud-content.com/B/YN1v8eGiHYYZ_aKUkMuGtSf0P1BNAXKVYPcDa-9Mjvnap0ZS-p2Z24V3/${f}?o=Ame-Q1e_1nWqIn7YG7VfVZk-XAs8bVdcHo-owaNRmfPn&v=1&x=3&a=CAogwS503Q9EkCdnzvD-kLG0VNwrlEmARONCS-hADMtqg1QSbRCckq2nty8YnO-IqbcvIgEAUgT0P1BNWgSZ24V3aiYLdjzdjGLXPtKfjwtH_PG0ralgbDDBOIftNXxyRxdhzz8OuZztNnImb65YPlo1qUOy4i7tW1pcyAZcjqS8kYfxPQD6SKIAKNk3dUid7mE&e=1629757781&fl=&r=4d5c62f6-c81b-4e60-a785-4139aad087a7-1&k=Y40xDPUr6DmxfeoSqxaQ7A&ckc=com.apple.photos.cloud&ckz=PrimarySync&y=1&p=104&s=X91oiOo0Avp6TR4d27MGupd_cqY", }, "type": "ASSETID", }, @@ -449,7 +450,7 @@ "wrappingKey": "r7EeA3tyPsWdcECp6X9dHA==", "referenceChecksum": "AR5TiM9Qko4rHwmoDH1BgNRVZpF4", # pylint: disable=C0321 - "downloadURL": "https://cvws.icloud-content.com/B/DmK0xzSiAUSFrAsYYAvby7QHrMDeAR5TiM9Qko4rHwmoDH1BgNRVZpF4/${f}?o=AjM3SMy6F9O-5AWTv2HnEp_GiL7ycAx1ls3yOqypKX-3&v=1&x=3&a=CAogiUnx0vJRhNr8Xt_dbOGrxiu8gKNAz_l_8Z5TVGmok64SbRCckq2nty8YnO-IqbcvIgEAUgQHrMObWgRVZpF4aiYwQUojYj2kyD-EyrtVjkw5sVJ60NK0x8nKjsjNXzTYH__dA6VcCHImQduy0Vis9tCiB3ox2KXKiyf3NOaih9TbQ8KfJ8H_8sFzdtXHw8I&e=1629757781&fl=&r=4d5c62f6-c81b-4e60-a785-4139aad087a7-1&k=r7EeA3tyPsWdcECp6X9dHA&ckc=com.apple.photos.cloud&ckz=PrimarySync&y=1&p=104&s=m2Z7uOxYG9iNHiAZbLm6OE2O6hE", # noqa: E501 + "downloadURL": "https://cvws.icloud-content.com/B/DmK0xzSiAUSFrAsYYAvby7QHrMDeAR5TiM9Qko4rHwmoDH1BgNRVZpF4/${f}?o=AjM3SMy6F9O-5AWTv2HnEp_GiL7ycAx1ls3yOqypKX-3&v=1&x=3&a=CAogiUnx0vJRhNr8Xt_dbOGrxiu8gKNAz_l_8Z5TVGmok64SbRCckq2nty8YnO-IqbcvIgEAUgQHrMObWgRVZpF4aiYwQUojYj2kyD-EyrtVjkw5sVJ60NK0x8nKjsjNXzTYH__dA6VcCHImQduy0Vis9tCiB3ox2KXKiyf3NOaih9TbQ8KfJ8H_8sFzdtXHw8I&e=1629757781&fl=&r=4d5c62f6-c81b-4e60-a785-4139aad087a7-1&k=r7EeA3tyPsWdcECp6X9dHA&ckc=com.apple.photos.cloud&ckz=PrimarySync&y=1&p=104&s=m2Z7uOxYG9iNHiAZbLm6OE2O6hE", }, "type": "ASSETID", }, @@ -498,7 +499,7 @@ "wrappingKey": "eqUQfTajfGXgQHbBJh8Qwg==", "referenceChecksum": "Ab5Vyk36t2jwuON7WSxvon/DvGtK", # pylint: disable=C0321 - "downloadURL": "https://cvws.icloud-content.com/B/ARKzBUr-DdmTaP_SAVglTurWtsmrAb5Vyk36t2jwuON7WSxvon_DvGtK/${f}?o=AnHR_fOkcKyPuLPOCXcp9C52pM-oZefmc9efp0e0ahAO&v=1&x=3&a=CAogf7lfgZsLeoZdnUfnSzBMLzy9WrbD7vMgHjmY9CI7_uESbRCmkq2nty8Ypu-IqbcvIgEAUgTWtsmrWgTDvGtKaiYANfDoXLBqjbu3_O1AGa62AuKbnBEsqXqysujWIiFYxe-i-AiEb3ImWrZCs4OP45m3SoQL7fh49dD-aHcXkEMAfevtQ6xh5-RH-5bq3sQ&e=1629757781&fl=&r=4d5c62f6-c81b-4e60-a785-4139aad087a7-1&k=eqUQfTajfGXgQHbBJh8Qwg&ckc=com.apple.photos.cloud&ckz=PrimarySync&y=1&p=104&s=ci4ad9AWukocHK1gYXbJrx-Ok9M", # noqa: E501 + "downloadURL": "https://cvws.icloud-content.com/B/ARKzBUr-DdmTaP_SAVglTurWtsmrAb5Vyk36t2jwuON7WSxvon_DvGtK/${f}?o=AnHR_fOkcKyPuLPOCXcp9C52pM-oZefmc9efp0e0ahAO&v=1&x=3&a=CAogf7lfgZsLeoZdnUfnSzBMLzy9WrbD7vMgHjmY9CI7_uESbRCmkq2nty8Ypu-IqbcvIgEAUgTWtsmrWgTDvGtKaiYANfDoXLBqjbu3_O1AGa62AuKbnBEsqXqysujWIiFYxe-i-AiEb3ImWrZCs4OP45m3SoQL7fh49dD-aHcXkEMAfevtQ6xh5-RH-5bq3sQ&e=1629757781&fl=&r=4d5c62f6-c81b-4e60-a785-4139aad087a7-1&k=eqUQfTajfGXgQHbBJh8Qwg&ckc=com.apple.photos.cloud&ckz=PrimarySync&y=1&p=104&s=ci4ad9AWukocHK1gYXbJrx-Ok9M", }, "type": "ASSETID", }, @@ -511,7 +512,7 @@ "wrappingKey": "tPtP2Y7mGQ4yOsOCMFG/sg==", "referenceChecksum": "Ad0I2qxdsqlGuSLlqtTBgoKndHE/", # pylint: disable=C0321 - "downloadURL": "https://cvws.icloud-content.com/B/AUvKU8j-Z5pfqGI_fe-9tibuqfVRAd0I2qxdsqlGuSLlqtTBgoKndHE_/${f}?o=AuoILQZ6O-MHJ-g-prxkKJNvAz0wU24Va6re5l5JIhrW&v=1&x=3&a=CAogiV6FTwlLeQt348ipvPuax8JBYrtL7o0q7WMX775pR4YSbRCmkq2nty8Ypu-IqbcvIgEAUgTuqfVRWgSndHE_aiZN21K0DzyVdoR0roYdRIUTUdT16tIhKWq2fJfrIDzHjd0YU3MhW3ImUfLx8SZ3FmkyDDQA-J5nJkGVtdKMsxmegM4H68EIUA9-idz8C-g&e=1629757781&fl=&r=4d5c62f6-c81b-4e60-a785-4139aad087a7-1&k=tPtP2Y7mGQ4yOsOCMFG_sg&ckc=com.apple.photos.cloud&ckz=PrimarySync&y=1&p=104&s=4MygIoxG8NYN-VK3zWB-a-wqy7c", # noqa: E501 + "downloadURL": "https://cvws.icloud-content.com/B/AUvKU8j-Z5pfqGI_fe-9tibuqfVRAd0I2qxdsqlGuSLlqtTBgoKndHE_/${f}?o=AuoILQZ6O-MHJ-g-prxkKJNvAz0wU24Va6re5l5JIhrW&v=1&x=3&a=CAogiV6FTwlLeQt348ipvPuax8JBYrtL7o0q7WMX775pR4YSbRCmkq2nty8Ypu-IqbcvIgEAUgTuqfVRWgSndHE_aiZN21K0DzyVdoR0roYdRIUTUdT16tIhKWq2fJfrIDzHjd0YU3MhW3ImUfLx8SZ3FmkyDDQA-J5nJkGVtdKMsxmegM4H68EIUA9-idz8C-g&e=1629757781&fl=&r=4d5c62f6-c81b-4e60-a785-4139aad087a7-1&k=tPtP2Y7mGQ4yOsOCMFG_sg&ckc=com.apple.photos.cloud&ckz=PrimarySync&y=1&p=104&s=4MygIoxG8NYN-VK3zWB-a-wqy7c", }, "type": "ASSETID", }, @@ -539,7 +540,7 @@ "wrappingKey": "E1zCp4gxgoHQNQHWjS3Wag==", "referenceChecksum": "ARHOzkI3sbX3SZDmNQgttNJ9DqQa", # pylint: disable=C0321 - "downloadURL": "https://cvws.icloud-content.com/B/ASy6f_leU1-xkR1aPmQyvYmwHUpEARHOzkI3sbX3SZDmNQgttNJ9DqQa/${f}?o=AiE0LlRJclp9DPkfhwdmJUfxo_vgP7JLWn3qtvPUeTuS&v=1&x=3&a=CAogroLnmMZUEfwNczwEl6zmt6YvBGhwJnPwcJogyD0-dYESbRCmkq2nty8Ypu-IqbcvIgEAUgSwHUpEWgR9DqQaaiZ5SoSlUQbaa-uaRlv6ga8Vyh14lIf466mlURl-3DYa6jr_6SsjQnImlqDZof_hQbcHONiYRrB9MXnKpJ9akb7rPc8_GAwPduNtPHAhBBk&e=1629757781&fl=&r=4d5c62f6-c81b-4e60-a785-4139aad087a7-1&k=E1zCp4gxgoHQNQHWjS3Wag&ckc=com.apple.photos.cloud&ckz=PrimarySync&y=1&p=104&s=5uuV1dFCaVoK0EhvreVHqYZiBNM", # noqa: E501 + "downloadURL": "https://cvws.icloud-content.com/B/ASy6f_leU1-xkR1aPmQyvYmwHUpEARHOzkI3sbX3SZDmNQgttNJ9DqQa/${f}?o=AiE0LlRJclp9DPkfhwdmJUfxo_vgP7JLWn3qtvPUeTuS&v=1&x=3&a=CAogroLnmMZUEfwNczwEl6zmt6YvBGhwJnPwcJogyD0-dYESbRCmkq2nty8Ypu-IqbcvIgEAUgSwHUpEWgR9DqQaaiZ5SoSlUQbaa-uaRlv6ga8Vyh14lIf466mlURl-3DYa6jr_6SsjQnImlqDZof_hQbcHONiYRrB9MXnKpJ9akb7rPc8_GAwPduNtPHAhBBk&e=1629757781&fl=&r=4d5c62f6-c81b-4e60-a785-4139aad087a7-1&k=E1zCp4gxgoHQNQHWjS3Wag&ckc=com.apple.photos.cloud&ckz=PrimarySync&y=1&p=104&s=5uuV1dFCaVoK0EhvreVHqYZiBNM", }, "type": "ASSETID", }, @@ -593,7 +594,7 @@ "wrappingKey": "ysHoAuqERA8H3MadxO6+PA==", "referenceChecksum": "ASIoOBi88potAS0gE8tfnojuSlrb", # pylint: disable=C0321 - "downloadURL": "https://cvws.icloud-content.com/B/ASTSuc7S58IPmVCJIUslbeCRjsnoASIoOBi88potAS0gE8tfnojuSlrb/${f}?o=AotG4ZrSZDU3u6kP4yWDFXfZ3_6tKEyLXvh4gcpo4ELn&v=1&x=3&a=CAogJr6QVcZkN2p9iAoOF0Tr4qnsmeCHSHrFoTzxEHq7NQUSbRCwkq2nty8YsO-IqbcvIgEAUgSRjsnoWgTuSlrbaibzwCZQjqQ5JaCOYReCcRWatftcle04VKPDs1BnZT75v_W_X7tuyXImMWPY8r1ICHzS3Us89foRJ0jtqcXwvVd3nT7EM6EPexOdJAI4_qQ&e=1629757781&fl=&r=4d5c62f6-c81b-4e60-a785-4139aad087a7-1&k=ysHoAuqERA8H3MadxO6-PA&ckc=com.apple.photos.cloud&ckz=PrimarySync&y=1&p=104&s=TiejMZ9ftbHECNhJzeowymOGu3I", # noqa: E501 + "downloadURL": "https://cvws.icloud-content.com/B/ASTSuc7S58IPmVCJIUslbeCRjsnoASIoOBi88potAS0gE8tfnojuSlrb/${f}?o=AotG4ZrSZDU3u6kP4yWDFXfZ3_6tKEyLXvh4gcpo4ELn&v=1&x=3&a=CAogJr6QVcZkN2p9iAoOF0Tr4qnsmeCHSHrFoTzxEHq7NQUSbRCwkq2nty8YsO-IqbcvIgEAUgSRjsnoWgTuSlrbaibzwCZQjqQ5JaCOYReCcRWatftcle04VKPDs1BnZT75v_W_X7tuyXImMWPY8r1ICHzS3Us89foRJ0jtqcXwvVd3nT7EM6EPexOdJAI4_qQ&e=1629757781&fl=&r=4d5c62f6-c81b-4e60-a785-4139aad087a7-1&k=ysHoAuqERA8H3MadxO6-PA&ckc=com.apple.photos.cloud&ckz=PrimarySync&y=1&p=104&s=TiejMZ9ftbHECNhJzeowymOGu3I", }, "type": "ASSETID", }, @@ -606,7 +607,7 @@ "wrappingKey": "8ydPkuUeW1rXYBf+8EUhWQ==", "referenceChecksum": "AdkoZP1534bwlULpwCdn2fd44LAt", # pylint: disable=C0321 - "downloadURL": "https://cvws.icloud-content.com/B/AUxVFT2yVsQ5739tmU5c1497duFDAdkoZP1534bwlULpwCdn2fd44LAt/${f}?o=AtyLCU3HpSWYfXnAPtzDXkhqPhQVX_2KI1m03qQMcrX8&v=1&x=3&a=CAoguSOx9xIzgn8O-3JZPJEFmuCqSpNEdkQUdkK-kdTjfyQSbRCwkq2nty8YsO-IqbcvIgEAUgR7duFDWgR44LAtaiYyf1-bnKqpPXGMfJ_iZeMO0Ar6T3qqD2Nwc5hia_fAPn-qOLNguXImMEz0ks6Sun_tBea1p7Gs39vk_ERXdi-KrSpKwpkhrUNPNhAl3t4&e=1629757781&fl=&r=4d5c62f6-c81b-4e60-a785-4139aad087a7-1&k=8ydPkuUeW1rXYBf-8EUhWQ&ckc=com.apple.photos.cloud&ckz=PrimarySync&y=1&p=104&s=hYKaX0XrV6ghQqijbnjdyNejnec", # noqa: E501 + "downloadURL": "https://cvws.icloud-content.com/B/AUxVFT2yVsQ5739tmU5c1497duFDAdkoZP1534bwlULpwCdn2fd44LAt/${f}?o=AtyLCU3HpSWYfXnAPtzDXkhqPhQVX_2KI1m03qQMcrX8&v=1&x=3&a=CAoguSOx9xIzgn8O-3JZPJEFmuCqSpNEdkQUdkK-kdTjfyQSbRCwkq2nty8YsO-IqbcvIgEAUgR7duFDWgR44LAtaiYyf1-bnKqpPXGMfJ_iZeMO0Ar6T3qqD2Nwc5hia_fAPn-qOLNguXImMEz0ks6Sun_tBea1p7Gs39vk_ERXdi-KrSpKwpkhrUNPNhAl3t4&e=1629757781&fl=&r=4d5c62f6-c81b-4e60-a785-4139aad087a7-1&k=8ydPkuUeW1rXYBf-8EUhWQ&ckc=com.apple.photos.cloud&ckz=PrimarySync&y=1&p=104&s=hYKaX0XrV6ghQqijbnjdyNejnec", }, "type": "ASSETID", }, @@ -634,7 +635,7 @@ "wrappingKey": "3EAjgkS2+Mr38eqQFk7C0A==", "referenceChecksum": "AXd258pYF6LLhmADLoZAumNqI+8M", # pylint: disable=C0321 - "downloadURL": "https://cvws.icloud-content.com/B/ASPVZ_Pft6gIN2VEA_oUbqQzh6WyAXd258pYF6LLhmADLoZAumNqI-8M/${f}?o=AtA8VtFypX8-ayXsVGPnssT56G8EM8ZdWr3nFmug2dPM&v=1&x=3&a=CAogZYCSkK2TW_pwByMq7sMg791XyNwx5u0lgyxvV0cKs3YSbRCxkq2nty8Yse-IqbcvIgEAUgQzh6WyWgRqI-8MaibbeMKtZNDpNkTWx6jOoPY4npOZt2t0xHw4QfV3u2b-KI47DIvSOXImJp7IX_oKObj3XLk-Gvvg-9z_e9JYfqbOJyUNAxz_e5wdxr6wQxs&e=1629757781&fl=&r=4d5c62f6-c81b-4e60-a785-4139aad087a7-1&k=3EAjgkS2-Mr38eqQFk7C0A&ckc=com.apple.photos.cloud&ckz=PrimarySync&y=1&p=104&s=PbVsX_tz5q6QJFzf_TxHZ1SH79I", # noqa: E501 + "downloadURL": "https://cvws.icloud-content.com/B/ASPVZ_Pft6gIN2VEA_oUbqQzh6WyAXd258pYF6LLhmADLoZAumNqI-8M/${f}?o=AtA8VtFypX8-ayXsVGPnssT56G8EM8ZdWr3nFmug2dPM&v=1&x=3&a=CAogZYCSkK2TW_pwByMq7sMg791XyNwx5u0lgyxvV0cKs3YSbRCxkq2nty8Yse-IqbcvIgEAUgQzh6WyWgRqI-8MaibbeMKtZNDpNkTWx6jOoPY4npOZt2t0xHw4QfV3u2b-KI47DIvSOXImJp7IX_oKObj3XLk-Gvvg-9z_e9JYfqbOJyUNAxz_e5wdxr6wQxs&e=1629757781&fl=&r=4d5c62f6-c81b-4e60-a785-4139aad087a7-1&k=3EAjgkS2-Mr38eqQFk7C0A&ckc=com.apple.photos.cloud&ckz=PrimarySync&y=1&p=104&s=PbVsX_tz5q6QJFzf_TxHZ1SH79I", }, "type": "ASSETID", }, @@ -688,7 +689,7 @@ "wrappingKey": "pYDpdhqdaL9SAxHilZEj3Q==", "referenceChecksum": "ATqG89bMsXhtmMRMw009uhyJc/Kh", # pylint: disable=C0321 - "downloadURL": "https://cvws.icloud-content.com/B/ATTRy6p-Q3U1HqcF6BUKrrOMnjvnATqG89bMsXhtmMRMw009uhyJc_Kh/${f}?o=AovA4TUyNl2kYkqOdInhEXGZ_6Lgkx1fTEsqpkkMh3hm&v=1&x=3&a=CAogVnr-sKWlefxaxarlxJ-k7EPRB-Q851T9df9zyhCvis0SbRC7kq2nty8Yu--IqbcvIgEAUgSMnjvnWgSJc_KhaiZCiMEZuykdl4ex2Ra8y53DbEEtJi6ItoX1e6b8TOoWXYiLA-mkr3Im7aDvMFg_m7tYuslgLZFXL8hxJftHL4oTy1ZpuVaP__2nTQTPLp4&e=1629757781&fl=&r=4d5c62f6-c81b-4e60-a785-4139aad087a7-1&k=pYDpdhqdaL9SAxHilZEj3Q&ckc=com.apple.photos.cloud&ckz=PrimarySync&y=1&p=104&s=IKz0oqClwHpM9shdTb3e5liYV5E", # noqa: E501 + "downloadURL": "https://cvws.icloud-content.com/B/ATTRy6p-Q3U1HqcF6BUKrrOMnjvnATqG89bMsXhtmMRMw009uhyJc_Kh/${f}?o=AovA4TUyNl2kYkqOdInhEXGZ_6Lgkx1fTEsqpkkMh3hm&v=1&x=3&a=CAogVnr-sKWlefxaxarlxJ-k7EPRB-Q851T9df9zyhCvis0SbRC7kq2nty8Yu--IqbcvIgEAUgSMnjvnWgSJc_KhaiZCiMEZuykdl4ex2Ra8y53DbEEtJi6ItoX1e6b8TOoWXYiLA-mkr3Im7aDvMFg_m7tYuslgLZFXL8hxJftHL4oTy1ZpuVaP__2nTQTPLp4&e=1629757781&fl=&r=4d5c62f6-c81b-4e60-a785-4139aad087a7-1&k=pYDpdhqdaL9SAxHilZEj3Q&ckc=com.apple.photos.cloud&ckz=PrimarySync&y=1&p=104&s=IKz0oqClwHpM9shdTb3e5liYV5E", }, "type": "ASSETID", }, @@ -701,7 +702,7 @@ "wrappingKey": "YIFaf0awZsX16khQaJ5pHw==", "referenceChecksum": "AVLSGMHt+PAQ9/krqqfXATNX57d5", # pylint: disable=C0321 - "downloadURL": "https://cvws.icloud-content.com/B/Ab_8kUAhnGzSxnl9yWvh8JKBpOvVAVLSGMHt-PAQ9_krqqfXATNX57d5/${f}?o=AvHBwurT0LTKni3mzYNLu4FnSeLeXfxYgSThZ4ImxjO8&v=1&x=3&a=CAogTIPbEVbPukTNLTRMbKPr3KEw-OwlmwJ6E2P4TWSVmS0SbRC7kq2nty8Yu--IqbcvIgEAUgSBpOvVWgRX57d5aibAoDs2oxjwpsMmZzKDj2ndE0sAhXdcwzBu-U_oZGpb059mW6D0dnImIjbNA_Bqcyw_VKQmNxeLtnGtGwyFB16OPwFKYcs1KsSFvHFAD7Y&e=1629757781&fl=&r=4d5c62f6-c81b-4e60-a785-4139aad087a7-1&k=YIFaf0awZsX16khQaJ5pHw&ckc=com.apple.photos.cloud&ckz=PrimarySync&y=1&p=104&s=1bdqdYxBN6JqLAkjMyHSEGNkqDA", # noqa: E501 + "downloadURL": "https://cvws.icloud-content.com/B/Ab_8kUAhnGzSxnl9yWvh8JKBpOvVAVLSGMHt-PAQ9_krqqfXATNX57d5/${f}?o=AvHBwurT0LTKni3mzYNLu4FnSeLeXfxYgSThZ4ImxjO8&v=1&x=3&a=CAogTIPbEVbPukTNLTRMbKPr3KEw-OwlmwJ6E2P4TWSVmS0SbRC7kq2nty8Yu--IqbcvIgEAUgSBpOvVWgRX57d5aibAoDs2oxjwpsMmZzKDj2ndE0sAhXdcwzBu-U_oZGpb059mW6D0dnImIjbNA_Bqcyw_VKQmNxeLtnGtGwyFB16OPwFKYcs1KsSFvHFAD7Y&e=1629757781&fl=&r=4d5c62f6-c81b-4e60-a785-4139aad087a7-1&k=YIFaf0awZsX16khQaJ5pHw&ckc=com.apple.photos.cloud&ckz=PrimarySync&y=1&p=104&s=1bdqdYxBN6JqLAkjMyHSEGNkqDA", }, "type": "ASSETID", }, @@ -729,7 +730,7 @@ "wrappingKey": "lxLQBw46n1nvea4s30UY+A==", "referenceChecksum": "AV2Zh7WygJu74eNWVuuMT4lM8qme", # pylint: disable=C0321 - "downloadURL": "https://cvws.icloud-content.com/B/AQNND5zpteAXnnBP2BmDd0ropjY9AV2Zh7WygJu74eNWVuuMT4lM8qme/${f}?o=ArcD2SL9b5Gy5zcnrnT2luycDRZzFLjOiX-8u9IWdQM2&v=1&x=3&a=CAogfF6-sW-XhnsFDy-vqHQjR8LXVO4OmxBUqG4CZf1zmOwSbRC8kq2nty8YvO-IqbcvIgEAUgTopjY9WgRM8qmeaibiG79B2YhfcchV4W9EgxQXAN4Bpi57NX82WXqo_YW-xi1qLAH9-HImRd8oYhd7r27sXPkUL3GT-rKGSKG-leLeNevi3ay090liNNZH-2U&e=1629757781&fl=&r=4d5c62f6-c81b-4e60-a785-4139aad087a7-1&k=lxLQBw46n1nvea4s30UY-A&ckc=com.apple.photos.cloud&ckz=PrimarySync&y=1&p=104&s=34vgVK6vLEdlpYceHAmIfqIp1Fk", # noqa: E501 + "downloadURL": "https://cvws.icloud-content.com/B/AQNND5zpteAXnnBP2BmDd0ropjY9AV2Zh7WygJu74eNWVuuMT4lM8qme/${f}?o=ArcD2SL9b5Gy5zcnrnT2luycDRZzFLjOiX-8u9IWdQM2&v=1&x=3&a=CAogfF6-sW-XhnsFDy-vqHQjR8LXVO4OmxBUqG4CZf1zmOwSbRC8kq2nty8YvO-IqbcvIgEAUgTopjY9WgRM8qmeaibiG79B2YhfcchV4W9EgxQXAN4Bpi57NX82WXqo_YW-xi1qLAH9-HImRd8oYhd7r27sXPkUL3GT-rKGSKG-leLeNevi3ay090liNNZH-2U&e=1629757781&fl=&r=4d5c62f6-c81b-4e60-a785-4139aad087a7-1&k=lxLQBw46n1nvea4s30UY-A&ckc=com.apple.photos.cloud&ckz=PrimarySync&y=1&p=104&s=34vgVK6vLEdlpYceHAmIfqIp1Fk", }, "type": "ASSETID", }, @@ -783,7 +784,7 @@ "wrappingKey": "dmZqsyvxEA4s3CvifNMApA==", "referenceChecksum": "ATi6BbOzDuHl6RONNFCub9eqZqSm", # pylint: disable=C0321 - "downloadURL": "https://cvws.icloud-content.com/B/ARZd_GzpY62XRtXt+jP6UsV4fBZHATi6BbOzDuHl6RONNFCub9eqZqSm/${f}?o=AkMi62tflbXgUrgSyQZ94SinXG9TYXZ6tydGOmDQx9HG&v=1&x=3&a=CAogc8yqpMgxKnf363cp0n1CujaxnsWY_KrZ3VEN9QchlhcSbRDGkq2nty8Yxu-IqbcvIgEAUgR4fBZHWgSqZqSmaiaxcg1zIsiESwGaEOecYR84r83ltACA6SY5ypGyvYxKD0M3LmqI8HIm7n2S2UL6EBM2Z3a9YFIGX8MrKABFDMA5TXFPUVUP6AfsnKigVMc&e=1629757781&fl=&r=4d5c62f6-c81b-4e60-a785-4139aad087a7-1&k=dmZqsyvxEA4s3CvifNMApA&ckc=com.apple.photos.cloud&ckz=PrimarySync&y=1&p=104&s=rYmwpGBg6DPYHSGj6UAOnCfuMPk", # noqa: E501 + "downloadURL": "https://cvws.icloud-content.com/B/ARZd_GzpY62XRtXt+jP6UsV4fBZHATi6BbOzDuHl6RONNFCub9eqZqSm/${f}?o=AkMi62tflbXgUrgSyQZ94SinXG9TYXZ6tydGOmDQx9HG&v=1&x=3&a=CAogc8yqpMgxKnf363cp0n1CujaxnsWY_KrZ3VEN9QchlhcSbRDGkq2nty8Yxu-IqbcvIgEAUgR4fBZHWgSqZqSmaiaxcg1zIsiESwGaEOecYR84r83ltACA6SY5ypGyvYxKD0M3LmqI8HIm7n2S2UL6EBM2Z3a9YFIGX8MrKABFDMA5TXFPUVUP6AfsnKigVMc&e=1629757781&fl=&r=4d5c62f6-c81b-4e60-a785-4139aad087a7-1&k=dmZqsyvxEA4s3CvifNMApA&ckc=com.apple.photos.cloud&ckz=PrimarySync&y=1&p=104&s=rYmwpGBg6DPYHSGj6UAOnCfuMPk", }, "type": "ASSETID", }, @@ -796,7 +797,7 @@ "wrappingKey": "Nz2a7ohpe3KPptCk0J0lWA==", "referenceChecksum": "AdUIDFzHC2rVOvwTz0jPi/tKihnb", # pylint: disable=C0321 - "downloadURL": "https://cvws.icloud-content.com/B/AVx3_VKkbWPdNbWw68mrWzSuemXgAdUIDFzHC2rVOvwTz0jPi_tKihnb/${f}?o=AksMTyqi4NosuW50ei90oXcv82fP1r-6QocLorp20RpO&v=1&x=3&a=CAogfvU0-_8L-3qRcy6jZsj3Vuqt4aL2rk5xVXF7lwVV6A8SbRDGkq2nty8Yxu-IqbcvIgEAUgSuemXgWgRKihnbaiZoWboa3qYl3KVDo1VGIHrRDoySixw8lzXtf1Y-AnoVN1Pd4hLkPnImXYuLGS8iK7BRJcQg25R5hk54OD04duy2TscnYu1mACOSERXpXEI&e=1629757781&fl=&r=4d5c62f6-c81b-4e60-a785-4139aad087a7-1&k=Nz2a7ohpe3KPptCk0J0lWA&ckc=com.apple.photos.cloud&ckz=PrimarySync&y=1&p=104&s=t3NT5mCLmsRjPqAGvROVsMrAjfg", # noqa: E501 + "downloadURL": "https://cvws.icloud-content.com/B/AVx3_VKkbWPdNbWw68mrWzSuemXgAdUIDFzHC2rVOvwTz0jPi_tKihnb/${f}?o=AksMTyqi4NosuW50ei90oXcv82fP1r-6QocLorp20RpO&v=1&x=3&a=CAogfvU0-_8L-3qRcy6jZsj3Vuqt4aL2rk5xVXF7lwVV6A8SbRDGkq2nty8Yxu-IqbcvIgEAUgSuemXgWgRKihnbaiZoWboa3qYl3KVDo1VGIHrRDoySixw8lzXtf1Y-AnoVN1Pd4hLkPnImXYuLGS8iK7BRJcQg25R5hk54OD04duy2TscnYu1mACOSERXpXEI&e=1629757781&fl=&r=4d5c62f6-c81b-4e60-a785-4139aad087a7-1&k=Nz2a7ohpe3KPptCk0J0lWA&ckc=com.apple.photos.cloud&ckz=PrimarySync&y=1&p=104&s=t3NT5mCLmsRjPqAGvROVsMrAjfg", }, "type": "ASSETID", }, @@ -824,7 +825,7 @@ "wrappingKey": "UiIQr3rRvyIcoAz/sxDugQ==", "referenceChecksum": "ARtMrcvA8cbMefPDnmwSWQwe+mBd", # pylint: disable=C0321 - "downloadURL": "https://cvws.icloud-content.com/B/ARpHiouI3Ib_ziuZYTCiSikohvMYARtMrcvA8cbMefPDnmwSWQwe-mBd/${f}?o=Auh2MA-6wuqdRGUDQ4kZL3fuuuMVWVVnTnTcThej9ad5&v=1&x=3&a=CAogaHp1wKKc8QF3MI-2OrLYdQx8V4PIVZvFQyuN1m6pXFMSbRDHkq2nty8Yx--IqbcvIgEAUgQohvMYWgQe-mBdaibQsOQuSEfHUK0xs9nLWG6nHKAvRCwkkmsvXL1Ku9aCARYpDg4mWHImDCoL_RiyOC-KXU_0Jpntuid9MdC08bvpHUp5hkzlctbjsBvT654&e=1629757781&fl=&r=4d5c62f6-c81b-4e60-a785-4139aad087a7-1&k=UiIQr3rRvyIcoAz_sxDugQ&ckc=com.apple.photos.cloud&ckz=PrimarySync&y=1&p=104&s=Rx2sZoVhs_Phm_Ps3RvVwJ2mgvA", # noqa: E501 + "downloadURL": "https://cvws.icloud-content.com/B/ARpHiouI3Ib_ziuZYTCiSikohvMYARtMrcvA8cbMefPDnmwSWQwe-mBd/${f}?o=Auh2MA-6wuqdRGUDQ4kZL3fuuuMVWVVnTnTcThej9ad5&v=1&x=3&a=CAogaHp1wKKc8QF3MI-2OrLYdQx8V4PIVZvFQyuN1m6pXFMSbRDHkq2nty8Yx--IqbcvIgEAUgQohvMYWgQe-mBdaibQsOQuSEfHUK0xs9nLWG6nHKAvRCwkkmsvXL1Ku9aCARYpDg4mWHImDCoL_RiyOC-KXU_0Jpntuid9MdC08bvpHUp5hkzlctbjsBvT654&e=1629757781&fl=&r=4d5c62f6-c81b-4e60-a785-4139aad087a7-1&k=UiIQr3rRvyIcoAz_sxDugQ&ckc=com.apple.photos.cloud&ckz=PrimarySync&y=1&p=104&s=Rx2sZoVhs_Phm_Ps3RvVwJ2mgvA", }, "type": "ASSETID", }, @@ -878,7 +879,7 @@ "wrappingKey": "dmZqsyvxEA4s3CvifNMApA==", "referenceChecksum": "ATi6BbOzDuHl6RONNFCub9eqZqSm", # pylint: disable=C0321 - "downloadURL": "https://cvws.icloud-content.com/B/ARZd_GzpY62XRtXt+jP6UsV4fBZIATi6BbOzDuHl6RONNFCub9eqZqSn/${f}?o=AkMi62tflbXgUrgSyQZ94SinXG9TYXZ6tydGOmDQx9HG&v=1&x=3&a=CAogc8yqpMgxKnf363cp0n1CujaxnsWY_KrZ3VEN9QchlhcSbRDGkq2nty8Yxu-IqbcvIgEAUgR4fBZHWgSqZqSmaiaxcg1zIsiESwGaEOecYR84r83ltACA6SY5ypGyvYxKD0M3LmqI8HIm7n2S2UL6EBM2Z3a9YFIGX8MrKABFDMA5TXFPUVUP6AfsnKigVMc&e=1629757781&fl=&r=4d5c62f6-c81b-4e60-a785-4139aad087a7-1&k=dmZqsyvxEA4s3CvifNMApA&ckc=com.apple.photos.cloud&ckz=PrimarySync&y=1&p=104&s=rYmwpGBg6DPYHSGj6UAOnCfuMPk", # noqa: E501 + "downloadURL": "https://cvws.icloud-content.com/B/ARZd_GzpY62XRtXt+jP6UsV4fBZIATi6BbOzDuHl6RONNFCub9eqZqSn/${f}?o=AkMi62tflbXgUrgSyQZ94SinXG9TYXZ6tydGOmDQx9HG&v=1&x=3&a=CAogc8yqpMgxKnf363cp0n1CujaxnsWY_KrZ3VEN9QchlhcSbRDGkq2nty8Yxu-IqbcvIgEAUgR4fBZHWgSqZqSmaiaxcg1zIsiESwGaEOecYR84r83ltACA6SY5ypGyvYxKD0M3LmqI8HIm7n2S2UL6EBM2Z3a9YFIGX8MrKABFDMA5TXFPUVUP6AfsnKigVMc&e=1629757781&fl=&r=4d5c62f6-c81b-4e60-a785-4139aad087a7-1&k=dmZqsyvxEA4s3CvifNMApA&ckc=com.apple.photos.cloud&ckz=PrimarySync&y=1&p=104&s=rYmwpGBg6DPYHSGj6UAOnCfuMPk", }, "type": "ASSETID", }, @@ -891,7 +892,7 @@ "wrappingKey": "Nz2a7ohpe3KPptCk0J0lWA==", "referenceChecksum": "AdUIDFzHC2rVOvwTz0jPi/tKihnb", # pylint: disable=C0321 - "downloadURL": "https://cvws.icloud-content.com/B/AVx3_VKkbWPdNbWw68mrWzSuemXiAdUIDFzHC2rVOvwTz0jPi_tKihnn/${f}?o=AksMTyqi4NosuW50ei90oXcv82fP1r-6QocLorp20RpO&v=1&x=3&a=CAogfvU0-_8L-3qRcy6jZsj3Vuqt4aL2rk5xVXF7lwVV6A8SbRDGkq2nty8Yxu-IqbcvIgEAUgSuemXgWgRKihnbaiZoWboa3qYl3KVDo1VGIHrRDoySixw8lzXtf1Y-AnoVN1Pd4hLkPnImXYuLGS8iK7BRJcQg25R5hk54OD04duy2TscnYu1mACOSERXpXEI&e=1629757781&fl=&r=4d5c62f6-c81b-4e60-a785-4139aad087a7-1&k=Nz2a7ohpe3KPptCk0J0lWA&ckc=com.apple.photos.cloud&ckz=PrimarySync&y=1&p=104&s=t3NT5mCLmsRjPqAGvROVsMrAjfg", # noqa: E501 + "downloadURL": "https://cvws.icloud-content.com/B/AVx3_VKkbWPdNbWw68mrWzSuemXiAdUIDFzHC2rVOvwTz0jPi_tKihnn/${f}?o=AksMTyqi4NosuW50ei90oXcv82fP1r-6QocLorp20RpO&v=1&x=3&a=CAogfvU0-_8L-3qRcy6jZsj3Vuqt4aL2rk5xVXF7lwVV6A8SbRDGkq2nty8Yxu-IqbcvIgEAUgSuemXgWgRKihnbaiZoWboa3qYl3KVDo1VGIHrRDoySixw8lzXtf1Y-AnoVN1Pd4hLkPnImXYuLGS8iK7BRJcQg25R5hk54OD04duy2TscnYu1mACOSERXpXEI&e=1629757781&fl=&r=4d5c62f6-c81b-4e60-a785-4139aad087a7-1&k=Nz2a7ohpe3KPptCk0J0lWA&ckc=com.apple.photos.cloud&ckz=PrimarySync&y=1&p=104&s=t3NT5mCLmsRjPqAGvROVsMrAjfg", }, "type": "ASSETID", }, @@ -919,7 +920,7 @@ "wrappingKey": "UiIQr3rRvyIcoAz/sxDugQ==", "referenceChecksum": "ARtMrcvA8cbMefPDnmwSWQwe+mBd", # pylint: disable=C0321 - "downloadURL": "https://cvws.icloud-content.com/B/ARpHiouI3Ib_ziuZYTCiSikohvMZARtMrcvA8cbMefPDnmwSWQwe-mBe/${f}?o=Auh2MA-6wuqdRGUDQ4kZL3fuuuMVWVVnTnTcThej9ad5&v=1&x=3&a=CAogaHp1wKKc8QF3MI-2OrLYdQx8V4PIVZvFQyuN1m6pXFMSbRDHkq2nty8Yx--IqbcvIgEAUgQohvMYWgQe-mBdaibQsOQuSEfHUK0xs9nLWG6nHKAvRCwkkmsvXL1Ku9aCARYpDg4mWHImDCoL_RiyOC-KXU_0Jpntuid9MdC08bvpHUp5hkzlctbjsBvT654&e=1629757781&fl=&r=4d5c62f6-c81b-4e60-a785-4139aad087a7-1&k=UiIQr3rRvyIcoAz_sxDugQ&ckc=com.apple.photos.cloud&ckz=PrimarySync&y=1&p=104&s=Rx2sZoVhs_Phm_Ps3RvVwJ2mgvA", # noqa: E501 + "downloadURL": "https://cvws.icloud-content.com/B/ARpHiouI3Ib_ziuZYTCiSikohvMZARtMrcvA8cbMefPDnmwSWQwe-mBe/${f}?o=Auh2MA-6wuqdRGUDQ4kZL3fuuuMVWVVnTnTcThej9ad5&v=1&x=3&a=CAogaHp1wKKc8QF3MI-2OrLYdQx8V4PIVZvFQyuN1m6pXFMSbRDHkq2nty8Yx--IqbcvIgEAUgQohvMYWgQe-mBdaibQsOQuSEfHUK0xs9nLWG6nHKAvRCwkkmsvXL1Ku9aCARYpDg4mWHImDCoL_RiyOC-KXU_0Jpntuid9MdC08bvpHUp5hkzlctbjsBvT654&e=1629757781&fl=&r=4d5c62f6-c81b-4e60-a785-4139aad087a7-1&k=UiIQr3rRvyIcoAz_sxDugQ&ckc=com.apple.photos.cloud&ckz=PrimarySync&y=1&p=104&s=Rx2sZoVhs_Phm_Ps3RvVwJ2mgvA", }, "type": "ASSETID", }, @@ -970,7 +971,7 @@ "wrappingKey": "pYDpdhqdaL9SAxHilZEj3Q==", "referenceChecksum": "ATqG89bMsXhtmMRMw009uhyJc/Kh", # pylint: disable=C0321 - "downloadURL": "https://cvws.icloud-content.com/B/ATTRy6p-Q3U1HqcF6BUKrrOMnjvoATqG89bMsXhtmMRMw009uhyJc_Kh/${f}?o=AovA4TUyNl2kYkqOdInhEXGZ_6Lgkx1fTEsqpkkMh3hm&v=1&x=3&a=CAogVnr-sKWlefxaxarlxJ-k7EPRB-Q851T9df9zyhCvis0SbRC7kq2nty8Yu--IqbcvIgEAUgSMnjvnWgSJc_KhaiZCiMEZuykdl4ex2Ra8y53DbEEtJi6ItoX1e6b8TOoWXYiLA-mkr3Im7aDvMFg_m7tYuslgLZFXL8hxJftHL4oTy1ZpuVaP__2nTQTPLp4&e=1629757781&fl=&r=4d5c62f6-c81b-4e60-a785-4139aad087a7-1&k=pYDpdhqdaL9SAxHilZEj3Q&ckc=com.apple.photos.cloud&ckz=PrimarySync&y=1&p=104&s=IKz0oqClwHpM9shdTb3e5liYV5E", # noqa: E501 + "downloadURL": "https://cvws.icloud-content.com/B/ATTRy6p-Q3U1HqcF6BUKrrOMnjvoATqG89bMsXhtmMRMw009uhyJc_Kh/${f}?o=AovA4TUyNl2kYkqOdInhEXGZ_6Lgkx1fTEsqpkkMh3hm&v=1&x=3&a=CAogVnr-sKWlefxaxarlxJ-k7EPRB-Q851T9df9zyhCvis0SbRC7kq2nty8Yu--IqbcvIgEAUgSMnjvnWgSJc_KhaiZCiMEZuykdl4ex2Ra8y53DbEEtJi6ItoX1e6b8TOoWXYiLA-mkr3Im7aDvMFg_m7tYuslgLZFXL8hxJftHL4oTy1ZpuVaP__2nTQTPLp4&e=1629757781&fl=&r=4d5c62f6-c81b-4e60-a785-4139aad087a7-1&k=pYDpdhqdaL9SAxHilZEj3Q&ckc=com.apple.photos.cloud&ckz=PrimarySync&y=1&p=104&s=IKz0oqClwHpM9shdTb3e5liYV5E", }, "type": "ASSETID", }, @@ -992,7 +993,7 @@ "wrappingKey": "YIFaf0awZsX16khQaJ5pHw==", "referenceChecksum": "AVLSGMHt+PAQ9/krqqfXATNX57d5", # pylint: disable=C0321 - "downloadURL": "https://cvws.icloud-content.com/B/Ab_8kUAhnGzSxnl9yWvh8JKBpOvWAVLSGMHt-PAQ9_krqqfXATNX57d5/${f}?o=AvHBwurT0LTKni3mzYNLu4FnSeLeXfxYgSThZ4ImxjO8&v=1&x=3&a=CAogTIPbEVbPukTNLTRMbKPr3KEw-OwlmwJ6E2P4TWSVmS0SbRC7kq2nty8Yu--IqbcvIgEAUgSBpOvWWgRX57d5aibAoDs2oxjwpsMmZzKDj2ndE0sAhXdcwzBu-U_oZGpb059mW6D0dnImIjbNA_Bqcyw_VKQmNxeLtnGtGwyFB16OPwFKYcs1KsSFvHFAD7Y&e=1629757781&fl=&r=4d5c62f6-c81b-4e60-a785-4139aad087a7-1&k=YIFaf0awZsX16khQaJ5pHw&ckc=com.apple.photos.cloud&ckz=PrimarySync&y=1&p=104&s=1bdqdYxBN6JqLAkjMyHSEGNkqDA", # noqa: E501 + "downloadURL": "https://cvws.icloud-content.com/B/Ab_8kUAhnGzSxnl9yWvh8JKBpOvWAVLSGMHt-PAQ9_krqqfXATNX57d5/${f}?o=AvHBwurT0LTKni3mzYNLu4FnSeLeXfxYgSThZ4ImxjO8&v=1&x=3&a=CAogTIPbEVbPukTNLTRMbKPr3KEw-OwlmwJ6E2P4TWSVmS0SbRC7kq2nty8Yu--IqbcvIgEAUgSBpOvWWgRX57d5aibAoDs2oxjwpsMmZzKDj2ndE0sAhXdcwzBu-U_oZGpb059mW6D0dnImIjbNA_Bqcyw_VKQmNxeLtnGtGwyFB16OPwFKYcs1KsSFvHFAD7Y&e=1629757781&fl=&r=4d5c62f6-c81b-4e60-a785-4139aad087a7-1&k=YIFaf0awZsX16khQaJ5pHw&ckc=com.apple.photos.cloud&ckz=PrimarySync&y=1&p=104&s=1bdqdYxBN6JqLAkjMyHSEGNkqDA", }, "type": "ASSETID", }, @@ -1023,7 +1024,7 @@ "wrappingKey": "lxLQBw46n1nvea4s30UY+A==", "referenceChecksum": "AV2Zh7WygJu74eNWVuuMT4lM8qme", # pylint: disable=C0321 - "downloadURL": "https://cvws.icloud-content.com/B/AQNND5zpteAXnnBP2BmDd0ropjY0AV2Zh7WygJu74eNWVuuMT4lM8qme/${f}?o=ArcD2SL9b5Gy5zcnrnT2luycDRZzFLjOiX-8u9IWdQM2&v=1&x=3&a=CAogfF6-sW-XhnsFDy-vqHQjR8LXVO4OmxBUqG4CZf1zmOwSbRC8kq2nty8YvO-IqbcvIgEAUgTopjY9WgRM8qmeaibiG79B2YhfcchV4W9EgxQXAN4Bpi57NX82WXqo_YW-xi1qLAH9-HImRd8oYhd7r27sXPkUL3GT-rKGSKG-leLeNevi3ay090liNNZH-2U&e=1629757781&fl=&r=4d5c62f6-c81b-4e60-a785-4139aad087a7-1&k=lxLQBw46n1nvea4s30UY-A&ckc=com.apple.photos.cloud&ckz=PrimarySync&y=1&p=104&s=34vgVK6vLEdlpYceHAmIfqIp1Fk", # noqa: E501 + "downloadURL": "https://cvws.icloud-content.com/B/AQNND5zpteAXnnBP2BmDd0ropjY0AV2Zh7WygJu74eNWVuuMT4lM8qme/${f}?o=ArcD2SL9b5Gy5zcnrnT2luycDRZzFLjOiX-8u9IWdQM2&v=1&x=3&a=CAogfF6-sW-XhnsFDy-vqHQjR8LXVO4OmxBUqG4CZf1zmOwSbRC8kq2nty8YvO-IqbcvIgEAUgTopjY9WgRM8qmeaibiG79B2YhfcchV4W9EgxQXAN4Bpi57NX82WXqo_YW-xi1qLAH9-HImRd8oYhd7r27sXPkUL3GT-rKGSKG-leLeNevi3ay090liNNZH-2U&e=1629757781&fl=&r=4d5c62f6-c81b-4e60-a785-4139aad087a7-1&k=lxLQBw46n1nvea4s30UY-A&ckc=com.apple.photos.cloud&ckz=PrimarySync&y=1&p=104&s=34vgVK6vLEdlpYceHAmIfqIp1Fk", }, "type": "ASSETID", }, @@ -1983,7 +1984,7 @@ "zoneType": "REGULAR_CUSTOM_ZONE", }, }, - ] + ], }, }, { @@ -2545,7 +2546,7 @@ "wrappingKey": "amaCdL9Z+QxfzgD4+aYATg==", "referenceChecksum": "AQYmx+DRYXnMs0tkDZ3rorp4IB99", # pylint: disable=C0321 - "downloadURL": "https://cvws.icloud-content.com/B/AQDN6auXvelQyb_btBqkNNjA97E2AZ_h3_ZBuSDV7J1SfMKpllmP-FGN/${f}?o=AkpvVanzvRePQKJ40Dm4YSvdVd9PwJFyrTyTz2oz9JPo&v=1&x=3&a=CAogVj_YvgW5M2-UxrKh2Fvp9mXaucTfmoEtSZ68oOF-ufUSbRDcod2wuDEY3P64srgxIgEAUgTA97E2WgSP-FGNaiZJrOc38Lxkny5PD-mG8_GkvEdMW-iCBLF63Zqz7ylzJVygR3BCr3ImsXnj-RgqNBe6eE3QOGEIQYnSIw1cO9KLmioI3AD_m-fQ-RL4Pfo&e=1698765356&fl=&r=1c7a694a-6450-4be0-85be-b8da170ce390-1&k=S2x_JT65p4HaFzealKoCTg&ckc=com.apple.photos.cloud&ckz=SharedSync-9DD9B767-9F30-4D6F-B658-F17DBA16D107&y=1&p=104&s=aw1qXk9b-vv16ld1xJ9OJ2lHSHk", # noqa: E501 + "downloadURL": "https://cvws.icloud-content.com/B/AQDN6auXvelQyb_btBqkNNjA97E2AZ_h3_ZBuSDV7J1SfMKpllmP-FGN/${f}?o=AkpvVanzvRePQKJ40Dm4YSvdVd9PwJFyrTyTz2oz9JPo&v=1&x=3&a=CAogVj_YvgW5M2-UxrKh2Fvp9mXaucTfmoEtSZ68oOF-ufUSbRDcod2wuDEY3P64srgxIgEAUgTA97E2WgSP-FGNaiZJrOc38Lxkny5PD-mG8_GkvEdMW-iCBLF63Zqz7ylzJVygR3BCr3ImsXnj-RgqNBe6eE3QOGEIQYnSIw1cO9KLmioI3AD_m-fQ-RL4Pfo&e=1698765356&fl=&r=1c7a694a-6450-4be0-85be-b8da170ce390-1&k=S2x_JT65p4HaFzealKoCTg&ckc=com.apple.photos.cloud&ckz=SharedSync-9DD9B767-9F30-4D6F-B658-F17DBA16D107&y=1&p=104&s=aw1qXk9b-vv16ld1xJ9OJ2lHSHk", }, "type": "ASSETID", }, @@ -2558,7 +2559,7 @@ "wrappingKey": "Y40xDPUr6DmxfeoSqxaQ7A==", "referenceChecksum": "AXKVYPcDa+9Mjvnap0ZS+p2Z24V3", # pylint: disable=C0321 - "downloadURL": "https://cvws.icloud-content.com/B/AY4eS1ezj9pmMHzfVzwC2CLmBwZOAXKLBx985QzfCKCGyN0wbGs6SuTf/${f}?o=AsBiWhNj_AjH-3vh0riCjYfhRtBfd-mUS6PQhxj4zYTH&v=1&x=3&a=CAogEeo3WZxkrjW9vg53X0JbMLiadhXSjhuENTh402ga9sISbRDdod2wuDEY3f64srgxIgEAUgTmBwZNWgQ6SuTfaiZ-CDCZGCdwB5-2MeuQ6hLpY6mmSYhtwgliGihERuDLhb89zqlO4HImWLHBIgl_7lBgvs7t6Ur5Vu_UAGoIQvIv1UEPwakPbNcGSCVnR5E&e=1698765356&fl=&r=1c7a694a-6450-4be0-85be-b8da170ce390-1&k=TF9BsVkUpEEdK0QpxQupmA&ckc=com.apple.photos.cloud&ckz=SharedSync-9DD9B767-9F30-4D6F-B658-F17DBA16D107&y=1&p=104&s=3nnkdl_tJa2baG_ccWB6PwXdlUM", # noqa: E501 + "downloadURL": "https://cvws.icloud-content.com/B/AY4eS1ezj9pmMHzfVzwC2CLmBwZOAXKLBx985QzfCKCGyN0wbGs6SuTf/${f}?o=AsBiWhNj_AjH-3vh0riCjYfhRtBfd-mUS6PQhxj4zYTH&v=1&x=3&a=CAogEeo3WZxkrjW9vg53X0JbMLiadhXSjhuENTh402ga9sISbRDdod2wuDEY3f64srgxIgEAUgTmBwZNWgQ6SuTfaiZ-CDCZGCdwB5-2MeuQ6hLpY6mmSYhtwgliGihERuDLhb89zqlO4HImWLHBIgl_7lBgvs7t6Ur5Vu_UAGoIQvIv1UEPwakPbNcGSCVnR5E&e=1698765356&fl=&r=1c7a694a-6450-4be0-85be-b8da170ce390-1&k=TF9BsVkUpEEdK0QpxQupmA&ckc=com.apple.photos.cloud&ckz=SharedSync-9DD9B767-9F30-4D6F-B658-F17DBA16D107&y=1&p=104&s=3nnkdl_tJa2baG_ccWB6PwXdlUM", }, "type": "ASSETID", }, @@ -2569,7 +2570,7 @@ "wrappingKey": "Y40xDPUr6DmxfeoSqxaQ7A==", "referenceChecksum": "AXKVYPcDa+9Mjvnap0ZS+p2Z24V3", # pylint: disable=C0321 - "downloadURL": "https://cvws.icloud-content.com/B/AY4eS1ezj9pmMHzfVzwC2CLmBwZOAXKLBx985QzfCKCGyN0wbGs6SuTf/${f}?o=AsBiWhNj_AjH-3vh0riCjYfhRtBfd-mUS6PQhxj4zYTH&v=1&x=3&a=CAogEeo3WZxkrjW9vg53X0JbMLiadhXSjhuENTh402ga9sISbRDdod2wuDEY3f64srgxIgEAUgTmBwZNWgQ6SuTfaiZ-CDCZGCdwB5-2MeuQ6hLpY6mmSYhtwgliGihERuDLhb89zqlO4HImWLHBIgl_7lBgvs7t6Ur5Vu_UAGoIQvIv1UEPwakPbNcGSCVnR5E&e=1698765356&fl=&r=1c7a694a-6450-4be0-85be-b8da170ce390-1&k=TF9BsVkUpEEdK0QpxQupmA&ckc=com.apple.photos.cloud&ckz=SharedSync-9DD9B767-9F30-4D6F-B658-F17DBA16D107&y=1&p=104&s=3nnkdl_tJa2baG_ccWB6PwXdlUM", # noqa: E501 + "downloadURL": "https://cvws.icloud-content.com/B/AY4eS1ezj9pmMHzfVzwC2CLmBwZOAXKLBx985QzfCKCGyN0wbGs6SuTf/${f}?o=AsBiWhNj_AjH-3vh0riCjYfhRtBfd-mUS6PQhxj4zYTH&v=1&x=3&a=CAogEeo3WZxkrjW9vg53X0JbMLiadhXSjhuENTh402ga9sISbRDdod2wuDEY3f64srgxIgEAUgTmBwZNWgQ6SuTfaiZ-CDCZGCdwB5-2MeuQ6hLpY6mmSYhtwgliGihERuDLhb89zqlO4HImWLHBIgl_7lBgvs7t6Ur5Vu_UAGoIQvIv1UEPwakPbNcGSCVnR5E&e=1698765356&fl=&r=1c7a694a-6450-4be0-85be-b8da170ce390-1&k=TF9BsVkUpEEdK0QpxQupmA&ckc=com.apple.photos.cloud&ckz=SharedSync-9DD9B767-9F30-4D6F-B658-F17DBA16D107&y=1&p=104&s=3nnkdl_tJa2baG_ccWB6PwXdlUM", }, "type": "ASSETID", }, @@ -2602,7 +2603,7 @@ "wrappingKey": "r7EeA3tyPsWdcECp6X9dHA==", "referenceChecksum": "AR5TiM9Qko4rHwmoDH1BgNRVZpF4", # pylint: disable=C0321 - "downloadURL": "https://cvws.icloud-content.com/B/Aa_QVPVEM9bvm5Owy3GRFNyqbKuXAbgec55EhUFp9db5znXM3Xz-nq1X/${f}?o=AucoksyzdsJiFVmSYa_ajMHD_2grSFU6qbVTFdggYHgq&v=1&x=3&a=CAogsS3em6hEfTnsZDnYIlTaWjcN28xkDpe_JZFfm-ELLooSbRDdod2wuDEY3f64srgxIgEAUgSqbKuXWgT-nq1XaiZcxyPbeVdgxw86mBeD2193kkQ33hkJJh_JN1ApKm98JOLBPtcV4nImJH-ycgL-oJavf11qz9IfC5_g6vlq5shkb4Ohcl6xwdHgbBIcFYE&e=1698765356&fl=&r=1c7a694a-6450-4be0-85be-b8da170ce390-1&k=n8u2-NoAEqPijfVUBMbN6Q&ckc=com.apple.photos.cloud&ckz=SharedSync-9DD9B767-9F30-4D6F-B658-F17DBA16D107&y=1&p=104&s=m_19S8GYm58_yhhskaWeQNM6Nug", # noqa: E501 + "downloadURL": "https://cvws.icloud-content.com/B/Aa_QVPVEM9bvm5Owy3GRFNyqbKuXAbgec55EhUFp9db5znXM3Xz-nq1X/${f}?o=AucoksyzdsJiFVmSYa_ajMHD_2grSFU6qbVTFdggYHgq&v=1&x=3&a=CAogsS3em6hEfTnsZDnYIlTaWjcN28xkDpe_JZFfm-ELLooSbRDdod2wuDEY3f64srgxIgEAUgSqbKuXWgT-nq1XaiZcxyPbeVdgxw86mBeD2193kkQ33hkJJh_JN1ApKm98JOLBPtcV4nImJH-ycgL-oJavf11qz9IfC5_g6vlq5shkb4Ohcl6xwdHgbBIcFYE&e=1698765356&fl=&r=1c7a694a-6450-4be0-85be-b8da170ce390-1&k=n8u2-NoAEqPijfVUBMbN6Q&ckc=com.apple.photos.cloud&ckz=SharedSync-9DD9B767-9F30-4D6F-B658-F17DBA16D107&y=1&p=104&s=m_19S8GYm58_yhhskaWeQNM6Nug", }, "type": "ASSETID", }, @@ -2693,7 +2694,7 @@ "wrappingKey": "KaI99WFTkCwtFMUF+tYrSQ==", "referenceChecksum": "AYsP/+uTEhISEYYHCK+nasKEFIie", # pylint: disable=C0321 - "downloadURL": "https://cvws.icloud-content.com/B/AVUZeNmSEsGH47OEyy0AojnozAK8AYsP_-uTEhISEYYHCK-nasKEFIie/${f}?o=AkNOIvUnrfPDQ5THreKWKcvRplOk2ggvZF6KsaY7fd-A&v=1&x=3&a=CAogmm1yJGk7Y1lG58D_XQVryIZ6o11DEv3NIaN6iIPSKtMSbRCBpevFuDEYgYLHx7gxIgEAUgTozAK8WgSEFIieaiaHabDKDv5faXcmtytIOOSUbK4AWjU1YWdR46bN82z7JbfcangVG3ImUXOt030VnBVdnXbi-kYl85xzaurON3eMgxiRpnHZOHf0eem7cHY&e=1698809626&fl=&r=b39216b7-2243-48c2-92cb-b598e98e0d60-1&k=KaI99WFTkCwtFMUF-tYrSQ&ckc=com.apple.photos.cloud&ckz=SharedSync-9DD9B767-9F30-4D6F-B658-F17DBA16D106&y=1&p=104&s=7ATOXQ_ALygO3TEGReKS2XM-L_Q", # noqa: E501 + "downloadURL": "https://cvws.icloud-content.com/B/AVUZeNmSEsGH47OEyy0AojnozAK8AYsP_-uTEhISEYYHCK-nasKEFIie/${f}?o=AkNOIvUnrfPDQ5THreKWKcvRplOk2ggvZF6KsaY7fd-A&v=1&x=3&a=CAogmm1yJGk7Y1lG58D_XQVryIZ6o11DEv3NIaN6iIPSKtMSbRCBpevFuDEYgYLHx7gxIgEAUgTozAK8WgSEFIieaiaHabDKDv5faXcmtytIOOSUbK4AWjU1YWdR46bN82z7JbfcangVG3ImUXOt030VnBVdnXbi-kYl85xzaurON3eMgxiRpnHZOHf0eem7cHY&e=1698809626&fl=&r=b39216b7-2243-48c2-92cb-b598e98e0d60-1&k=KaI99WFTkCwtFMUF-tYrSQ&ckc=com.apple.photos.cloud&ckz=SharedSync-9DD9B767-9F30-4D6F-B658-F17DBA16D106&y=1&p=104&s=7ATOXQ_ALygO3TEGReKS2XM-L_Q", }, "type": "ASSETID", }, @@ -2718,7 +2719,7 @@ "wrappingKey": "OhxadeoR+QL9gyG5vjchQQ==", "referenceChecksum": "AVIWcEe38h3rZ6jdx0xradApmsm5", # pylint: disable=C0321 - "downloadURL": "https://cvws.icloud-content.com/B/AfDQhuBu4hXYZG1MKMZww5hr0XFtAVIWcEe38h3rZ6jdx0xradApmsm5/${f}?o=Av--_tK-30ay30YC4r-3ioGEUXi0lv_jiR4L9IqMnIN1&v=1&x=3&a=CAogyaYaP5z_6cAm4ENgJsdpmA7mEWWm0bNV2NE_uHkHO5QSbRCCpevFuDEYgoLHx7gxIgEAUgRr0XFtWgQpmsm5aibYv1Ug7RFU3CVYUoQwEQushhNxX3GrnGlEPU0JXThR4gnzI-UuA3ImA-Q5OOhz9lBTE-KMkuTizKv8KxrchdoxZyDpoPXumnzP-Lz8Z9Q&e=1698809626&fl=&r=b39216b7-2243-48c2-92cb-b598e98e0d60-1&k=OhxadeoR-QL9gyG5vjchQQ&ckc=com.apple.photos.cloud&ckz=SharedSync-9DD9B767-9F30-4D6F-B658-F17DBA16D106&y=1&p=104&s=XERpve2UemTKkPC2qUKPjEAq_fI", # noqa: E501 + "downloadURL": "https://cvws.icloud-content.com/B/AfDQhuBu4hXYZG1MKMZww5hr0XFtAVIWcEe38h3rZ6jdx0xradApmsm5/${f}?o=Av--_tK-30ay30YC4r-3ioGEUXi0lv_jiR4L9IqMnIN1&v=1&x=3&a=CAogyaYaP5z_6cAm4ENgJsdpmA7mEWWm0bNV2NE_uHkHO5QSbRCCpevFuDEYgoLHx7gxIgEAUgRr0XFtWgQpmsm5aibYv1Ug7RFU3CVYUoQwEQushhNxX3GrnGlEPU0JXThR4gnzI-UuA3ImA-Q5OOhz9lBTE-KMkuTizKv8KxrchdoxZyDpoPXumnzP-Lz8Z9Q&e=1698809626&fl=&r=b39216b7-2243-48c2-92cb-b598e98e0d60-1&k=OhxadeoR-QL9gyG5vjchQQ&ckc=com.apple.photos.cloud&ckz=SharedSync-9DD9B767-9F30-4D6F-B658-F17DBA16D106&y=1&p=104&s=XERpve2UemTKkPC2qUKPjEAq_fI", }, "type": "ASSETID", }, @@ -2762,7 +2763,7 @@ "fieldValue": { "type": "STRING_LIST", "value": [ - "CPLContainerRelationNotDeletedByAssetDate:E4RT4FB7-4A35-4958-1D42-5769E66BE407" + "CPLContainerRelationNotDeletedByAssetDate:E4RT4FB7-4A35-4958-1D42-5769E66BE407", ], }, "comparator": "IN", @@ -2771,8 +2772,8 @@ }, "zoneWide": True, "zoneID": {"zoneName": "PrimarySync"}, - } - ] + }, + ], }, "response": { "batch": [ @@ -2780,7 +2781,7 @@ "records": [ { # pylint: disable=C0321 - "recordName": "CPLContainerRelationNotDeletedByAssetDate:E4RT4FB7-4A35-4958-1D42-5769E66BE407", # noqa: E501 + "recordName": "CPLContainerRelationNotDeletedByAssetDate:E4RT4FB7-4A35-4958-1D42-5769E66BE407", "recordType": "IndexCountResult", "fields": {"itemCount": {"value": 5, "type": "INT64"}}, "pluginFields": {}, @@ -2801,13 +2802,13 @@ "ownerRecordName": "_1d5r3c201b3a4r5daac8ff7e7fbc0c23", "zoneType": "REGULAR_CUSTOM_ZONE", }, - } + }, ], "syncToken": "AQAAAAAAArKjf//////////fSxWSKv5JfZ34edrt875d", - } - ] + }, + ], }, - } + }, ], "https://cvws.icloud-content.com/B/": [], } diff --git a/tests/test_config_parser.py b/tests/test_config_parser.py index ce9c141e1..b166e06aa 100644 --- a/tests/test_config_parser.py +++ b/tests/test_config_parser.py @@ -1,4 +1,5 @@ """Tests for config_parser.py file.""" + __author__ = "Mandar Patil (mandarons@pm.me)" import os @@ -33,9 +34,7 @@ def test_read_config_default_config_path(self): def test_read_config_env_config_path(self): """Test for ENV_CONFIG_FILE_PATH.""" os.environ[ENV_CONFIG_FILE_PATH_KEY] = tests.ENV_CONFIG_PATH - config = read_config( - config_path=os.environ.get(ENV_CONFIG_FILE_PATH_KEY, tests.CONFIG_PATH) - ) + config = read_config(config_path=os.environ.get(ENV_CONFIG_FILE_PATH_KEY, tests.CONFIG_PATH)) self.assertEqual( config["app"]["logger"]["filename"], "config_loaded_using_env_config_path.log", @@ -131,9 +130,7 @@ def test_prepare_drive_destination_given(self): # Given destination actual = config_parser.prepare_drive_destination(config=config) self.assertEqual( - os.path.abspath( - os.path.join(config["app"]["root"], config["drive"]["destination"]) - ), + os.path.abspath(os.path.join(config["app"]["root"], config["drive"]["destination"])), actual, ) self.assertTrue(os.path.exists(actual)) @@ -151,7 +148,7 @@ def test_prepare_drive_destination_default(self): os.path.join( DEFAULT_ROOT_DESTINATION, DEFAULT_DRIVE_DESTINATION, - ) + ), ), actual, ) @@ -167,7 +164,7 @@ def test_prepare_drive_destination_none_config(self): os.path.join( DEFAULT_ROOT_DESTINATION, DEFAULT_DRIVE_DESTINATION, - ) + ), ), actual, ) @@ -224,9 +221,7 @@ def test_get_smtp_empty(self): def test_get_smtp_email(self): """Given email.""" config = {"app": {"smtp": {"email": "user@test.com"}}} - self.assertEqual( - config["app"]["smtp"]["email"], config_parser.get_smtp_email(config=config) - ) + self.assertEqual(config["app"]["smtp"]["email"], config_parser.get_smtp_email(config=config)) def test_get_smtp_username(self): """If present, get smtp username else None.""" @@ -246,9 +241,7 @@ def test_smtp_email_none_config(self): def test_get_smtp_to_email(self): """Given email.""" config = {"app": {"smtp": {"to": "receiver@test.com"}}} - self.assertEqual( - config["app"]["smtp"]["to"], config_parser.get_smtp_to_email(config=config) - ) + self.assertEqual(config["app"]["smtp"]["to"], config_parser.get_smtp_to_email(config=config)) def test_get_smtp_to_email_default(self): """Given email.""" @@ -265,9 +258,7 @@ def test_smtp_to_email_none_config(self): def test_get_smtp_host(self): """Given host.""" config = {"app": {"smtp": {"host": "smtp.test.com"}}} - self.assertEqual( - config["app"]["smtp"]["host"], config_parser.get_smtp_host(config=config) - ) + self.assertEqual(config["app"]["smtp"]["host"], config_parser.get_smtp_host(config=config)) def test_smtp_host_none_config(self): """Test for None config.""" @@ -276,9 +267,7 @@ def test_smtp_host_none_config(self): def test_get_smtp_port(self): """Test for Given port.""" config = {"app": {"smtp": {"port": 587}}} - self.assertEqual( - config["app"]["smtp"]["port"], config_parser.get_smtp_port(config=config) - ) + self.assertEqual(config["app"]["smtp"]["port"], config_parser.get_smtp_port(config=config)) def test_smtp_port_none_config(self): """Test for None config.""" @@ -302,9 +291,7 @@ def test_prepare_photos_destination(self): # Given destination actual = config_parser.prepare_photos_destination(config=config) self.assertEqual( - os.path.abspath( - os.path.join(config["app"]["root"], config["photos"]["destination"]) - ), + os.path.abspath(os.path.join(config["app"]["root"], config["photos"]["destination"])), actual, ) self.assertTrue(os.path.exists(actual)) @@ -322,7 +309,7 @@ def test_prepare_photos_destination_default(self): os.path.join( DEFAULT_ROOT_DESTINATION, DEFAULT_PHOTOS_DESTINATION, - ) + ), ), actual, ) @@ -338,7 +325,7 @@ def test_prepare_photos_destination_none_config(self): os.path.join( DEFAULT_ROOT_DESTINATION, DEFAULT_PHOTOS_DESTINATION, - ) + ), ), actual, ) @@ -449,9 +436,7 @@ def test_get_photos_filters_libraries_empty(self): """Photos > library is missing in config.""" config = read_config(config_path=tests.CONFIG_PATH) del config["photos"]["filters"]["libraries"] - self.assertEqual( - config_parser.get_photos_filters(config=config)["libraries"], None - ) + self.assertEqual(config_parser.get_photos_filters(config=config)["libraries"], None) def test_get_photos_filters_libraries_specified(self): """Photos > library is specified as shared.""" diff --git a/tests/test_notify.py b/tests/test_notify.py index d5f7d2496..33307748f 100644 --- a/tests/test_notify.py +++ b/tests/test_notify.py @@ -1,4 +1,5 @@ """Test for notify.py file.""" + import datetime import unittest from unittest.mock import patch @@ -28,7 +29,7 @@ def setUp(self) -> None: "password": "password", }, "telegram": {"bot_token": "bot_token", "chat_id": "chat_id"}, - } + }, } self.message_body = "message body" @@ -48,16 +49,12 @@ def test_no_smtp_config(self): def test_dry_run_send(self): """Test for send returns the datetime of the request.""" - self.assertIsInstance( - notify.send(self.config, None, dry_run=True), datetime.datetime - ) + self.assertIsInstance(notify.send(self.config, None, dry_run=True), datetime.datetime) def test_build_message(self): """Test for building a valid email.""" subject = "icloud-docker: Two step authentication required" - message = ( - "Two-step authentication for iCloud Drive, Photos (Docker) is required." - ) + message = "Two-step authentication for iCloud Drive, Photos (Docker) is required." msg = notify.build_message( email=self.config["app"]["smtp"]["email"], @@ -143,11 +140,7 @@ def test_send_fail(self): def test_notify_telegram_success(self): """Test for successful notification.""" - config = { - "app": { - "telegram": {"bot_token": "your-bot-token", "chat_id": "your-chat-id"} - } - } + config = {"app": {"telegram": {"bot_token": "your-bot-token", "chat_id": "your-chat-id"}}} with patch("src.notify.post_message_to_telegram") as post_message_mock: notify_telegram(config, self.message_body, None, False) @@ -161,11 +154,7 @@ def test_notify_telegram_success(self): def test_notify_telegram_fail(self): """Test for failed notification.""" - config = { - "app": { - "telegram": {"bot_token": "your-bot-token", "chat_id": "your-chat-id"} - } - } + config = {"app": {"telegram": {"bot_token": "your-bot-token", "chat_id": "your-chat-id"}}} with patch("src.notify.post_message_to_telegram") as post_message_mock: post_message_mock.return_value = False @@ -180,9 +169,7 @@ def test_notify_telegram_fail(self): def test_notify_telegram_throttling(self): """Test for throttled notification.""" - config = { - "telegram": {"bot_token": "your-bot-token", "chat_id": "your-chat-id"} - } + config = {"telegram": {"bot_token": "your-bot-token", "chat_id": "your-chat-id"}} last_send = datetime.datetime.now() - datetime.timedelta(hours=2) dry_run = False @@ -194,9 +181,7 @@ def test_notify_telegram_throttling(self): def test_notify_telegram_dry_run(self): """Test for dry run mode.""" - config = { - "telegram": {"bot_token": "your-bot-token", "chat_id": "your-chat-id"} - } + config = {"telegram": {"bot_token": "your-bot-token", "chat_id": "your-chat-id"}} last_send = datetime.datetime.now() dry_run = True @@ -246,9 +231,7 @@ def test_post_message_to_telegram_fail(self): def test_notify_discord_success(self): """Test for successful notification.""" - config = { - "app": {"discord": {"webhook_url": "webhook-url", "username": "username"}} - } + config = {"app": {"discord": {"webhook_url": "webhook-url", "username": "username"}}} with patch("src.notify.post_message_to_discord") as post_message_mock: notify_discord(config, self.message_body, None, False) @@ -263,9 +246,7 @@ def test_notify_discord_success(self): def test_notify_discord_fail(self): """Test for failed notification.""" - config = { - "app": {"discord": {"webhook_url": "webhook-url", "username": "username"}} - } + config = {"app": {"discord": {"webhook_url": "webhook-url", "username": "username"}}} with patch("src.notify.post_message_to_discord") as post_message_mock: post_message_mock.return_value = False @@ -280,9 +261,7 @@ def test_notify_discord_fail(self): def test_notify_discord_throttling(self): """Test for throttled notification.""" - config = { - "app": {"discord": {"webhook_url": "webhook-url", "username": "username"}} - } + config = {"app": {"discord": {"webhook_url": "webhook-url", "username": "username"}}} last_send = datetime.datetime.now() - datetime.timedelta(hours=2) dry_run = False @@ -294,9 +273,7 @@ def test_notify_discord_throttling(self): def test_notify_discord_dry_run(self): """Test for dry run mode.""" - config = { - "app": {"discord": {"webhook_url": "webhook-url", "username": "username"}} - } + config = {"app": {"discord": {"webhook_url": "webhook-url", "username": "username"}}} last_send = datetime.datetime.now() dry_run = True diff --git a/tests/test_src_init.py b/tests/test_src_init.py index c0f60dc4d..3b9af0647 100644 --- a/tests/test_src_init.py +++ b/tests/test_src_init.py @@ -1,4 +1,5 @@ """Tests for sync module.""" + __author__ = "Mandar Patil (mandarons@pm.me)" import logging @@ -25,9 +26,7 @@ def test_get_logger_no_config(self, mock_read_config): del config["app"]["logger"] mock_read_config.return_value = config logger = get_logger() - self.assertTrue( - len([h for h in logger.handlers if isinstance(h, logging.NullHandler)]) > 0 - ) + self.assertTrue(len([h for h in logger.handlers if isinstance(h, logging.NullHandler)]) > 0) @patch("src.read_config") def test_get_logger(self, mock_read_config): diff --git a/tests/test_sync.py b/tests/test_sync.py index 2e01c493b..6fcb21d60 100644 --- a/tests/test_sync.py +++ b/tests/test_sync.py @@ -1,4 +1,5 @@ """Tests for sync.py file.""" + __author__ = "Mandar Patil (mandarons@pm.me)" import os @@ -32,18 +33,14 @@ def setUp(self) -> None: self.root_dir = tests.TEMP_DIR self.config["app"]["root"] = self.root_dir os.makedirs(tests.TEMP_DIR, exist_ok=True) - self.service = data.ICloudPyServiceMock( - data.AUTHENTICATED_USER, data.VALID_PASSWORD - ) + self.service = data.ICloudPyServiceMock(data.AUTHENTICATED_USER, data.VALID_PASSWORD) def tearDown(self) -> None: """Remove temp directories.""" self.remove_temp() @patch(target="keyring.get_password", return_value=data.VALID_PASSWORD) - @patch( - target="src.config_parser.get_username", return_value=data.AUTHENTICATED_USER - ) + @patch(target="src.config_parser.get_username", return_value=data.AUTHENTICATED_USER) @patch("icloudpy.ICloudPyService") @patch("src.sync.read_config") @patch("requests.post", side_effect=tests.mocked_usage_post) @@ -64,9 +61,7 @@ def test_sync( self.assertTrue(os.path.isdir("/config/session_data")) @patch(target="keyring.get_password", return_value=data.VALID_PASSWORD) - @patch( - target="src.config_parser.get_username", return_value=data.AUTHENTICATED_USER - ) + @patch(target="src.config_parser.get_username", return_value=data.AUTHENTICATED_USER) @patch("icloudpy.ICloudPyService") @patch("src.sync.read_config") @patch("requests.post", side_effect=tests.mocked_usage_post) @@ -88,15 +83,11 @@ def test_sync_photos_only( mock_read_config.return_value = config self.assertIsNone(sync.sync()) dir_length = len(os.listdir(self.root_dir)) - self.assertTrue(1 == dir_length) - self.assertTrue( - os.path.isdir(os.path.join(self.root_dir, config["photos"]["destination"])) - ) + self.assertTrue(dir_length == 1) + self.assertTrue(os.path.isdir(os.path.join(self.root_dir, config["photos"]["destination"]))) @patch(target="keyring.get_password", return_value=data.VALID_PASSWORD) - @patch( - target="src.config_parser.get_username", return_value=data.AUTHENTICATED_USER - ) + @patch(target="src.config_parser.get_username", return_value=data.AUTHENTICATED_USER) @patch("icloudpy.ICloudPyService") @patch("src.sync.read_config") @patch("requests.post", side_effect=tests.mocked_usage_post) @@ -118,16 +109,12 @@ def test_sync_drive_only( self.remove_temp() mock_read_config.return_value = config self.assertIsNone(sync.sync()) - self.assertTrue( - os.path.isdir(os.path.join(self.root_dir, config["drive"]["destination"])) - ) + self.assertTrue(os.path.isdir(os.path.join(self.root_dir, config["drive"]["destination"]))) dir_length = len(os.listdir(self.root_dir)) - self.assertTrue(1 == dir_length) + self.assertTrue(dir_length == 1) @patch(target="keyring.get_password", return_value=data.VALID_PASSWORD) - @patch( - target="src.config_parser.get_username", return_value=data.AUTHENTICATED_USER - ) + @patch(target="src.config_parser.get_username", return_value=data.AUTHENTICATED_USER) @patch("icloudpy.ICloudPyService") @patch("src.sync.read_config") @patch("requests.post", side_effect=tests.mocked_usage_post) @@ -154,9 +141,7 @@ def test_sync_empty( @patch("src.sync.sleep") @patch(target="keyring.get_password", return_value=data.VALID_PASSWORD) - @patch( - target="src.config_parser.get_username", return_value=data.AUTHENTICATED_USER - ) + @patch(target="src.config_parser.get_username", return_value=data.AUTHENTICATED_USER) @patch("icloudpy.ICloudPyService") @patch("src.sync.read_config") @patch("requests.post", side_effect=tests.mocked_usage_post) @@ -188,9 +173,7 @@ def test_sync_2fa_required( @patch("src.sync.sleep") @patch(target="keyring.get_password", return_value=data.VALID_PASSWORD) - @patch( - target="src.config_parser.get_username", return_value=data.AUTHENTICATED_USER - ) + @patch(target="src.config_parser.get_username", return_value=data.AUTHENTICATED_USER) @patch("icloudpy.ICloudPyService") @patch("src.sync.read_config") @patch("requests.post", side_effect=tests.mocked_usage_post) @@ -221,11 +204,10 @@ def test_sync_password_missing_in_keyring( [ e for e in captured[1] - if "Password is not stored in keyring. Please save the password in keyring." - in e - ] + if "Password is not stored in keyring. Please save the password in keyring." in e + ], ) - > 0 + > 0, ) @patch("src.sync.sleep") @@ -254,22 +236,11 @@ def test_sync_password_as_environment_variable( with patch.dict(os.environ, {ENV_ICLOUD_PASSWORD_KEY: data.VALID_PASSWORD}): with self.assertRaises(Exception): sync.sync() - self.assertTrue( - len( - [ - e - for e in captured[1] - if "Error: 2FA is required. Please log in." in e - ] - ) - > 0 - ) + self.assertTrue(len([e for e in captured[1] if "Error: 2FA is required. Please log in." in e]) > 0) @patch("src.sync.sleep") @patch(target="keyring.get_password", return_value=data.VALID_PASSWORD) - @patch( - target="src.config_parser.get_username", return_value=data.AUTHENTICATED_USER - ) + @patch(target="src.config_parser.get_username", return_value=data.AUTHENTICATED_USER) @patch("icloudpy.ICloudPyService") @patch("src.sync.read_config") @patch("requests.post", side_effect=tests.mocked_usage_post) @@ -298,9 +269,7 @@ def test_sync_exception_thrown( @patch(target="sys.stdout", new_callable=StringIO) @patch("src.sync.sleep") @patch(target="keyring.get_password", return_value=data.VALID_PASSWORD) - @patch( - target="src.config_parser.get_username", return_value=data.AUTHENTICATED_USER - ) + @patch(target="src.config_parser.get_username", return_value=data.AUTHENTICATED_USER) @patch("icloudpy.ICloudPyService") @patch("src.sync.read_config") @patch("requests.post", side_effect=tests.mocked_usage_post) @@ -353,9 +322,7 @@ def test_get_api_instance_default( if ENV_ICLOUD_PASSWORD_KEY in os.environ: del os.environ[ENV_ICLOUD_PASSWORD_KEY] - actual = sync.get_api_instance( - username=data.AUTHENTICATED_USER, password=data.VALID_PASSWORD - ) + actual = sync.get_api_instance(username=data.AUTHENTICATED_USER, password=data.VALID_PASSWORD) self.assertNotIn(".com.cn", actual.home_endpoint) self.assertNotIn(".com.cn", actual.setup_endpoint) @@ -371,17 +338,13 @@ def test_get_api_instance_china_region( if ENV_ICLOUD_PASSWORD_KEY in os.environ: del os.environ[ENV_ICLOUD_PASSWORD_KEY] - actual = sync.get_api_instance( - username=data.AUTHENTICATED_USER, password=data.VALID_PASSWORD - ) + actual = sync.get_api_instance(username=data.AUTHENTICATED_USER, password=data.VALID_PASSWORD) self.assertNotIn(".com.cn", actual.home_endpoint) self.assertNotIn(".com.cn", actual.setup_endpoint) @patch("src.sync.sleep") @patch(target="keyring.get_password", return_value=data.VALID_PASSWORD) - @patch( - target="src.config_parser.get_username", return_value=data.AUTHENTICATED_USER - ) + @patch(target="src.config_parser.get_username", return_value=data.AUTHENTICATED_USER) @patch("icloudpy.ICloudPyService") @patch("src.sync.read_config") @patch("requests.post", side_effect=tests.mocked_usage_post) @@ -409,25 +372,14 @@ def test_sync_negative_retry_login_interval( sync.sync() self.assertTrue(len(captured.records) > 1) self.assertTrue(len([e for e in captured[1] if "2FA is required" in e]) > 0) - self.assertTrue( - len( - [ - e - for e in captured[1] - if "retry_login_interval is < 0, exiting ..." in e - ] - ) - > 0 - ) + self.assertTrue(len([e for e in captured[1] if "retry_login_interval is < 0, exiting ..." in e]) > 0) @patch("src.sync.sleep") @patch( target="keyring.get_password", side_effect=exceptions.ICloudPyNoStoredPasswordAvailableException, ) - @patch( - target="src.config_parser.get_username", return_value=data.AUTHENTICATED_USER - ) + @patch(target="src.config_parser.get_username", return_value=data.AUTHENTICATED_USER) @patch("icloudpy.ICloudPyService") @patch("src.sync.read_config") @patch("requests.post", side_effect=tests.mocked_usage_post) @@ -454,17 +406,5 @@ def test_sync_negative_retry_login_interval_without_keyring_password( ] sync.sync() self.assertTrue(len(captured.records) > 1) - self.assertTrue( - len([e for e in captured[1] if "Password is not stored in keyring." in e]) - > 0 - ) - self.assertTrue( - len( - [ - e - for e in captured[1] - if "retry_login_interval is < 0, exiting ..." in e - ] - ) - > 0 - ) + self.assertTrue(len([e for e in captured[1] if "Password is not stored in keyring." in e]) > 0) + self.assertTrue(len([e for e in captured[1] if "retry_login_interval is < 0, exiting ..." in e]) > 0) diff --git a/tests/test_sync_drive.py b/tests/test_sync_drive.py index 7f266ca63..5a80f6943 100644 --- a/tests/test_sync_drive.py +++ b/tests/test_sync_drive.py @@ -1,4 +1,5 @@ """Tests for sync_drive.py file.""" + __author__ = "Mandar Patil (mandarons@pm.me)" import os @@ -25,17 +26,13 @@ def setUp(self) -> None: self.root = tests.DRIVE_DIR self.destination_path = self.root os.makedirs(self.destination_path, exist_ok=True) - self.service = data.ICloudPyServiceMock( - data.AUTHENTICATED_USER, data.VALID_PASSWORD - ) + self.service = data.ICloudPyServiceMock(data.AUTHENTICATED_USER, data.VALID_PASSWORD) self.drive = self.service.drive self.items = self.drive.dir() self.folder_item = self.drive[self.items[5]] self.file_item = self.drive[self.items[4]]["Test"]["Scanned document 1.pdf"] self.package_item = self.drive[self.items[6]]["Sample"]["Project.band"] - self.special_chars_package_item = self.drive[self.items[6]]["Sample"][ - "Fotoksiążka-Wzór.xmcf" - ] + self.special_chars_package_item = self.drive[self.items[6]]["Sample"]["Fotoksiążka-Wzór.xmcf"] self.package_item_nested = self.drive[self.items[6]]["Sample"]["ms.band"] self.file_name = "Scanned document 1.pdf" self.package_name = "Project.band" @@ -56,7 +53,7 @@ def test_wanted_parent_folder_none_filters(self): ignore=None, root=self.root, folder_path=os.path.join(self.root, "dir1/dir11"), - ) + ), ) def test_wanted_parent_folder(self): @@ -68,7 +65,7 @@ def test_wanted_parent_folder(self): filters=self.filters["folders"], root=self.root, folder_path=os.path.join(self.root, "dir1/dir11/some/dirs/file.ext"), - ) + ), ) def test_wanted_parent_folder_missing_parent_folder(self): @@ -80,7 +77,7 @@ def test_wanted_parent_folder_missing_parent_folder(self): filters=self.filters["folders"], root=self.root, folder_path=os.path.join(self.root, "dir1"), - ) + ), ) def test_wanted_folder_single_variations(self): @@ -92,7 +89,7 @@ def test_wanted_folder_single_variations(self): ignore=None, root=self.root, folder_path=os.path.join(self.root, "dir1"), - ) + ), ) self.filters["folders"] = ["/dir1"] self.assertTrue( @@ -101,7 +98,7 @@ def test_wanted_folder_single_variations(self): ignore=None, root=self.root, folder_path=os.path.join(self.root, "dir1"), - ) + ), ) self.filters["folders"] = ["dir1/"] self.assertTrue( @@ -110,7 +107,7 @@ def test_wanted_folder_single_variations(self): ignore=None, root=self.root, folder_path=os.path.join(self.root, "dir1"), - ) + ), ) self.filters["folders"] = ["/dir1/"] self.assertTrue( @@ -119,7 +116,7 @@ def test_wanted_folder_single_variations(self): ignore=None, root=self.root, folder_path=os.path.join(self.root, "dir1"), - ) + ), ) def test_wanted_folder_single_path(self): @@ -131,7 +128,7 @@ def test_wanted_folder_single_path(self): ignore=None, root=self.root, folder_path=os.path.join(self.root, "dir1"), - ) + ), ) self.assertTrue( sync_drive.wanted_folder( @@ -139,7 +136,7 @@ def test_wanted_folder_single_path(self): ignore=None, root=self.root, folder_path=os.path.join(self.root, "dir1", "dir2"), - ) + ), ) self.assertTrue( sync_drive.wanted_folder( @@ -147,7 +144,7 @@ def test_wanted_folder_single_path(self): ignore=None, root=self.root, folder_path=os.path.join(self.root, "dir1", "dir2", "dir3"), - ) + ), ) self.assertFalse( sync_drive.wanted_folder( @@ -155,7 +152,7 @@ def test_wanted_folder_single_path(self): ignore=None, root=self.root, folder_path=os.path.join(self.root, "dir1", "dir3"), - ) + ), ) self.assertFalse( sync_drive.wanted_folder( @@ -163,7 +160,7 @@ def test_wanted_folder_single_path(self): ignore=None, root=self.root, folder_path=os.path.join(self.root, "dir2"), - ) + ), ) self.assertFalse( sync_drive.wanted_folder( @@ -171,7 +168,7 @@ def test_wanted_folder_single_path(self): ignore=None, root=self.root, folder_path=os.path.join(self.root, "dir3"), - ) + ), ) self.filters["folders"] = ["dir1//dir2/dir3//"] self.assertTrue( @@ -180,7 +177,7 @@ def test_wanted_folder_single_path(self): ignore=None, root=self.root, folder_path=os.path.join(self.root, "dir1"), - ) + ), ) self.assertTrue( sync_drive.wanted_folder( @@ -188,7 +185,7 @@ def test_wanted_folder_single_path(self): ignore=None, root=self.root, folder_path=os.path.join(self.root, "dir1", "dir2"), - ) + ), ) self.assertTrue( sync_drive.wanted_folder( @@ -196,7 +193,7 @@ def test_wanted_folder_single_path(self): ignore=None, root=self.root, folder_path=os.path.join(self.root, "dir1", "dir2", "dir3"), - ) + ), ) self.assertFalse( sync_drive.wanted_folder( @@ -204,7 +201,7 @@ def test_wanted_folder_single_path(self): ignore=None, root=self.root, folder_path=os.path.join(self.root, "dir1", "dir3"), - ) + ), ) self.assertFalse( sync_drive.wanted_folder( @@ -212,7 +209,7 @@ def test_wanted_folder_single_path(self): ignore=None, root=self.root, folder_path=os.path.join(self.root, "dir2"), - ) + ), ) self.assertFalse( sync_drive.wanted_folder( @@ -220,7 +217,7 @@ def test_wanted_folder_single_path(self): ignore=None, root=self.root, folder_path=os.path.join(self.root, "dir3"), - ) + ), ) def test_wanted_folder_multiple(self): @@ -232,7 +229,7 @@ def test_wanted_folder_multiple(self): ignore=None, root=self.root, folder_path=os.path.join(self.root, "dir1"), - ) + ), ) self.assertTrue( sync_drive.wanted_folder( @@ -240,7 +237,7 @@ def test_wanted_folder_multiple(self): ignore=None, root=self.root, folder_path=os.path.join(self.root, "dir2"), - ) + ), ) self.assertFalse( sync_drive.wanted_folder( @@ -248,7 +245,7 @@ def test_wanted_folder_multiple(self): ignore=None, root=self.root, folder_path=os.path.join(self.root, "dir3"), - ) + ), ) def test_wanted_folder_multiple_paths(self): @@ -260,7 +257,7 @@ def test_wanted_folder_multiple_paths(self): ignore=None, root=self.root, folder_path=os.path.join(self.root, "dir1"), - ) + ), ) self.assertTrue( sync_drive.wanted_folder( @@ -268,7 +265,7 @@ def test_wanted_folder_multiple_paths(self): ignore=None, root=self.root, folder_path=os.path.join(self.root, "dir1", "dir2"), - ) + ), ) self.assertTrue( sync_drive.wanted_folder( @@ -276,7 +273,7 @@ def test_wanted_folder_multiple_paths(self): ignore=None, root=self.root, folder_path=os.path.join(self.root, "dir1", "dir2", "dir3"), - ) + ), ) self.assertFalse( sync_drive.wanted_folder( @@ -284,7 +281,7 @@ def test_wanted_folder_multiple_paths(self): ignore=None, root=self.root, folder_path=os.path.join(self.root, "dir1", "dir3"), - ) + ), ) self.assertFalse( sync_drive.wanted_folder( @@ -292,7 +289,7 @@ def test_wanted_folder_multiple_paths(self): ignore=None, root=self.root, folder_path=os.path.join(self.root, "dir2"), - ) + ), ) self.assertFalse( sync_drive.wanted_folder( @@ -300,7 +297,7 @@ def test_wanted_folder_multiple_paths(self): ignore=None, root=self.root, folder_path=os.path.join(self.root, "dir3"), - ) + ), ) self.assertTrue( sync_drive.wanted_folder( @@ -308,7 +305,7 @@ def test_wanted_folder_multiple_paths(self): ignore=None, root=self.root, folder_path=os.path.join(self.root, "dirA"), - ) + ), ) self.assertTrue( sync_drive.wanted_folder( @@ -316,7 +313,7 @@ def test_wanted_folder_multiple_paths(self): ignore=None, root=self.root, folder_path=os.path.join(self.root, "dirA", "dirB"), - ) + ), ) self.assertTrue( sync_drive.wanted_folder( @@ -324,7 +321,7 @@ def test_wanted_folder_multiple_paths(self): ignore=None, root=self.root, folder_path=os.path.join(self.root, "dirA", "dirB", "dirC"), - ) + ), ) self.assertFalse( sync_drive.wanted_folder( @@ -332,7 +329,7 @@ def test_wanted_folder_multiple_paths(self): ignore=None, root=self.root, folder_path=os.path.join(self.root, "dirA", "dirC"), - ) + ), ) self.assertFalse( sync_drive.wanted_folder( @@ -340,7 +337,7 @@ def test_wanted_folder_multiple_paths(self): ignore=None, root=self.root, folder_path=os.path.join(self.root, "dirB"), - ) + ), ) self.assertFalse( sync_drive.wanted_folder( @@ -348,7 +345,7 @@ def test_wanted_folder_multiple_paths(self): ignore=None, root=self.root, folder_path=os.path.join(self.root, "dirC"), - ) + ), ) def test_wanted_folder_ignore(self): @@ -360,7 +357,7 @@ def test_wanted_folder_ignore(self): ignore=self.ignore, root=self.root, folder_path=os.path.join(self.root, "dir1"), - ) + ), ) self.assertTrue( sync_drive.wanted_folder( @@ -368,7 +365,7 @@ def test_wanted_folder_ignore(self): ignore=self.ignore, root=self.root, folder_path=os.path.join(self.root, "dir1", "dir3"), - ) + ), ) self.assertFalse( sync_drive.wanted_folder( @@ -376,7 +373,7 @@ def test_wanted_folder_ignore(self): ignore=self.ignore, root=self.root, folder_path=os.path.join(self.root, "dir1", "dir2", "dir3"), - ) + ), ) self.assertFalse( sync_drive.wanted_folder( @@ -384,7 +381,7 @@ def test_wanted_folder_ignore(self): ignore=self.ignore, root=self.root, folder_path=os.path.join(self.root, "dir2", "dir1"), - ) + ), ) def test_wanted_folder_ignore_multiple_paths(self): @@ -396,7 +393,7 @@ def test_wanted_folder_ignore_multiple_paths(self): ignore=self.ignore, root=self.root, folder_path=os.path.join(self.root, "dir1"), - ) + ), ) self.assertTrue( sync_drive.wanted_folder( @@ -404,7 +401,7 @@ def test_wanted_folder_ignore_multiple_paths(self): ignore=self.ignore, root=self.root, folder_path=os.path.join(self.root, "dir1", "dir4"), - ) + ), ) self.assertFalse( sync_drive.wanted_folder( @@ -412,7 +409,7 @@ def test_wanted_folder_ignore_multiple_paths(self): ignore=self.ignore, root=self.root, folder_path=os.path.join(self.root, "dir1", "dir2", "dir3"), - ) + ), ) self.assertFalse( sync_drive.wanted_folder( @@ -420,7 +417,7 @@ def test_wanted_folder_ignore_multiple_paths(self): ignore=["dir3"], root=self.root, folder_path=os.path.join(self.root, "dir1", "dir3"), - ) + ), ) self.assertFalse( sync_drive.wanted_folder( @@ -428,7 +425,7 @@ def test_wanted_folder_ignore_multiple_paths(self): ignore=self.ignore, root=self.root, folder_path=os.path.join(self.root, "dir2", "dir1"), - ) + ), ) def test_wanted_folder_ignore_takes_precedence_to_filters(self): @@ -441,7 +438,7 @@ def test_wanted_folder_ignore_takes_precedence_to_filters(self): ignore=self.ignore, root=self.root, folder_path=os.path.join(self.root, "dir2", "dir3"), - ) + ), ) def test_wanted_folder_empty(self): @@ -454,7 +451,7 @@ def test_wanted_folder_empty(self): ignore=None, root=self.root, folder_path=os.path.join(self.root, "dir1"), - ) + ), ) self.filters = dict(original_filters) @@ -466,16 +463,12 @@ def test_wanted_folder_none_folder_path(self): ignore=None, root=self.root, folder_path=None, - ) + ), ) def test_wanted_folder_none_filters(self): """Test for wanted folder filters as None.""" - self.assertTrue( - sync_drive.wanted_folder( - filters=None, ignore=None, root=self.root, folder_path="dir1" - ) - ) + self.assertTrue(sync_drive.wanted_folder(filters=None, ignore=None, root=self.root, folder_path="dir1")) def test_wanted_folder_none_root(self): """Test for wanted folder root as None.""" @@ -485,16 +478,14 @@ def test_wanted_folder_none_root(self): ignore=None, root=None, folder_path="dir1", - ) + ), ) def test_wanted_file(self): """Test for a valid wanted file.""" self.filters["file_extensions"] = ["py"] self.assertTrue( - sync_drive.wanted_file( - filters=self.filters["file_extensions"], ignore=None, file_path=__file__ - ) + sync_drive.wanted_file(filters=self.filters["file_extensions"], ignore=None, file_path=__file__), ) def test_wanted_file_missing(self): @@ -504,7 +495,7 @@ def test_wanted_file_missing(self): filters=self.filters["file_extensions"], ignore=None, file_path=tests.CONFIG_PATH, - ) + ), ) def test_wanted_file_check_log(self): @@ -516,29 +507,19 @@ def test_wanted_file_check_log(self): file_path=tests.CONFIG_PATH, ) self.assertTrue(len(captured.records) > 0) - self.assertIn( - "Skipping the unwanted file", captured.records[0].getMessage() - ) + self.assertIn("Skipping the unwanted file", captured.records[0].getMessage()) def test_wanted_file_none_file_path(self): """Test for unexpected wanted file path.""" - self.assertTrue( - sync_drive.wanted_file(filters=None, ignore=None, file_path=__file__) - ) - self.assertFalse( - sync_drive.wanted_file( - filters=self.filters["file_extensions"], ignore=None, file_path=None - ) - ) + self.assertTrue(sync_drive.wanted_file(filters=None, ignore=None, file_path=__file__)) + self.assertFalse(sync_drive.wanted_file(filters=self.filters["file_extensions"], ignore=None, file_path=None)) def test_wanted_file_empty_file_extensions(self): """Test for empty file extensions in wanted file.""" original_filters = dict(self.filters) self.filters["file_extensions"] = [] self.assertTrue( - sync_drive.wanted_file( - filters=self.filters["file_extensions"], ignore=None, file_path=__file__ - ) + sync_drive.wanted_file(filters=self.filters["file_extensions"], ignore=None, file_path=__file__), ) self.filters = dict(original_filters) @@ -546,9 +527,7 @@ def test_wanted_file_case_variations_extensions(self): """Test for wanted file extensions case variations.""" self.filters["file_extensions"] = ["pY"] self.assertTrue( - sync_drive.wanted_file( - filters=self.filters["file_extensions"], ignore=None, file_path=__file__ - ) + sync_drive.wanted_file(filters=self.filters["file_extensions"], ignore=None, file_path=__file__), ) self.filters["file_extensions"] = ["pY"] self.assertTrue( @@ -556,7 +535,7 @@ def test_wanted_file_case_variations_extensions(self): filters=self.filters["file_extensions"], ignore=None, file_path=os.path.join(os.path.dirname(__file__), "file.Py"), - ) + ), ) def test_wanted_file_ignore(self): @@ -567,21 +546,21 @@ def test_wanted_file_ignore(self): filters=None, ignore=self.ignore, file_path=os.path.join(self.root, "/dir1/README.md"), - ) + ), ) self.assertFalse( sync_drive.wanted_file( filters=None, ignore=self.ignore, file_path=os.path.join(self.root, "/.git/index"), - ) + ), ) self.assertTrue( sync_drive.wanted_file( filters=None, ignore=self.ignore, file_path=os.path.join(os.path.dirname(__file__), "/dir1/index.html"), - ) + ), ) def test_wanted_file_ignore_takes_precedences_over_filters(self): @@ -593,7 +572,7 @@ def test_wanted_file_ignore_takes_precedences_over_filters(self): filters=self.filters["file_extensions"], ignore=self.ignore, file_path=os.path.join(self.root, "/dir1/index.py"), - ) + ), ) def test_process_folder_wanted(self): @@ -629,7 +608,7 @@ def test_process_folder_none_item(self): filters=self.filters["folders"], ignore=None, root=self.root, - ) + ), ) def test_process_folder_none_destination_path(self): @@ -641,7 +620,7 @@ def test_process_folder_none_destination_path(self): filters=self.filters["folders"], ignore=None, root=self.root, - ) + ), ) def test_process_folder_none_root(self): @@ -653,37 +632,29 @@ def test_process_folder_none_root(self): filters=self.filters["folders"], ignore=None, root=None, - ) + ), ) def test_file_non_existing_file(self): """Test for file does not exist.""" - self.assertFalse( - sync_drive.file_exists(item=self.file_item, local_file=self.local_file_path) - ) + self.assertFalse(sync_drive.file_exists(item=self.file_item, local_file=self.local_file_path)) def test_file_existing_file(self): """Test for file exists.""" sync_drive.download_file(item=self.file_item, local_file=self.local_file_path) - actual = sync_drive.file_exists( - item=self.file_item, local_file=self.local_file_path - ) + actual = sync_drive.file_exists(item=self.file_item, local_file=self.local_file_path) self.assertTrue(actual) # Verbose sync_drive.download_file(item=self.file_item, local_file=self.local_file_path) with self.assertLogs(logger=LOGGER, level="DEBUG") as captured: - actual = sync_drive.file_exists( - item=self.file_item, local_file=self.local_file_path - ) + actual = sync_drive.file_exists(item=self.file_item, local_file=self.local_file_path) self.assertTrue(actual) self.assertTrue(len(captured.records) > 0) self.assertIn("No changes detected.", captured.records[0].getMessage()) def test_file_exists_none_item(self): """Test if item is None.""" - self.assertFalse( - sync_drive.file_exists(item=None, local_file=self.local_file_path) - ) + self.assertFalse(sync_drive.file_exists(item=None, local_file=self.local_file_path)) def test_file_exists_none_local_file(self): """Test if local_file is None.""" @@ -691,27 +662,17 @@ def test_file_exists_none_local_file(self): def test_download_file(self): """Test for valid file download.""" - self.assertTrue( - sync_drive.download_file( - item=self.file_item, local_file=self.local_file_path - ) - ) + self.assertTrue(sync_drive.download_file(item=self.file_item, local_file=self.local_file_path)) # Verbose with self.assertLogs() as captured: - self.assertTrue( - sync_drive.download_file( - item=self.file_item, local_file=self.local_file_path - ) - ) + self.assertTrue(sync_drive.download_file(item=self.file_item, local_file=self.local_file_path)) self.assertTrue(len(captured.records) > 0) self.assertIn("Downloading ", captured.records[0].getMessage()) def test_download_file_none_item(self): """Test for item as None.""" - self.assertFalse( - sync_drive.download_file(item=None, local_file=self.local_file_path) - ) + self.assertFalse(sync_drive.download_file(item=None, local_file=self.local_file_path)) def test_download_file_none_local_file(self): """Test for local_file as None.""" @@ -722,21 +683,15 @@ def test_download_file_non_existing(self): self.assertFalse( sync_drive.download_file( item=self.file_item, - local_file=os.path.join( - self.destination_path, "non-existent-folder", self.file_name - ), - ) + local_file=os.path.join(self.destination_path, "non-existent-folder", self.file_name), + ), ) def test_download_file_key_error_data_token(self): """Test for data token key error.""" with patch.object(self.file_item, "open") as mock_item: mock_item.side_effect = KeyError("data_token") - self.assertFalse( - sync_drive.download_file( - item=self.file_item, local_file=self.local_file_path - ) - ) + self.assertFalse(sync_drive.download_file(item=self.file_item, local_file=self.local_file_path)) def test_process_file_non_existing(self): """Test for non-existing file.""" @@ -749,7 +704,7 @@ def test_process_file_non_existing(self): filters=self.filters["file_extensions"], ignore=None, files=files, - ) + ), ) self.assertTrue(len(files) == 1) @@ -771,7 +726,7 @@ def test_process_file_existing(self): filters=self.filters["file_extensions"], ignore=None, files=files, - ) + ), ) def test_process_file_not_wanted(self): @@ -784,7 +739,7 @@ def test_process_file_not_wanted(self): filters=self.filters, ignore=None, files=files, - ) + ), ) def test_process_file_none_item(self): @@ -797,7 +752,7 @@ def test_process_file_none_item(self): filters=self.filters["file_extensions"], ignore=None, files=files, - ) + ), ) def test_process_file_none_destination_path(self): @@ -810,7 +765,7 @@ def test_process_file_none_destination_path(self): filters=self.filters["file_extensions"], ignore=None, files=files, - ) + ), ) def test_process_file_none_filters(self): @@ -823,7 +778,7 @@ def test_process_file_none_filters(self): filters=None, ignore=None, files=files, - ) + ), ) def test_process_file_none_files(self): @@ -835,7 +790,7 @@ def test_process_file_none_files(self): filters=self.filters["file_extensions"], ignore=None, files=None, - ) + ), ) def test_process_file_existing_file(self): @@ -850,7 +805,7 @@ def test_process_file_existing_file(self): filters=self.filters["file_extensions"], ignore=None, files=files, - ) + ), ) # Locally modified file shutil.copyfile( @@ -864,7 +819,7 @@ def test_process_file_existing_file(self): filters=self.filters["file_extensions"], ignore=None, files=files, - ) + ), ) def test_remove_obsolete_file(self): @@ -876,9 +831,7 @@ def test_remove_obsolete_file(self): # Remove the file files = set() files.add(obsolete_path) - actual = sync_drive.remove_obsolete( - destination_path=self.destination_path, files=files - ) + actual = sync_drive.remove_obsolete(destination_path=self.destination_path, files=files) self.assertTrue(len(actual) == 1) self.assertFalse(os.path.isfile(obsolete_file_path)) @@ -893,17 +846,13 @@ def test_remove_obsolete_directory(self): shutil.copyfile(__file__, obsolete_file_path) # Remove the directory files.remove(obsolete_path) - actual = sync_drive.remove_obsolete( - destination_path=self.destination_path, files=files - ) + actual = sync_drive.remove_obsolete(destination_path=self.destination_path, files=files) self.assertTrue(len(actual) == 1) self.assertFalse(os.path.isdir(obsolete_path)) # Remove the directory with file os.mkdir(obsolete_path) shutil.copyfile(__file__, obsolete_file_path) - actual = sync_drive.remove_obsolete( - destination_path=self.destination_path, files=files - ) + actual = sync_drive.remove_obsolete(destination_path=self.destination_path, files=files) self.assertTrue(len(actual) > 0) self.assertFalse(os.path.isdir(obsolete_path)) self.assertFalse(os.path.isfile(obsolete_file_path)) @@ -911,9 +860,7 @@ def test_remove_obsolete_directory(self): with self.assertLogs() as captured: os.mkdir(obsolete_path) shutil.copyfile(__file__, obsolete_file_path) - actual = sync_drive.remove_obsolete( - destination_path=self.destination_path, files=files - ) + actual = sync_drive.remove_obsolete(destination_path=self.destination_path, files=files) self.assertTrue(len(actual) > 0) self.assertFalse(os.path.isdir(obsolete_path)) self.assertFalse(os.path.isfile(obsolete_file_path)) @@ -921,44 +868,31 @@ def test_remove_obsolete_directory(self): self.assertIn("Removing ", captured.records[0].getMessage()) @patch(target="keyring.get_password", return_value=data.VALID_PASSWORD) - @patch( - target="src.config_parser.get_username", return_value=data.AUTHENTICATED_USER - ) + @patch(target="src.config_parser.get_username", return_value=data.AUTHENTICATED_USER) @patch("icloudpy.ICloudPyService") @patch("src.read_config") - def test_remove_obsolete_package( - self, mock_read_config, mock_service, mock_get_username, mock_get_password - ): + def test_remove_obsolete_package(self, mock_read_config, mock_service, mock_get_username, mock_get_password): """Test for removing obsolete package.""" mock_service = self.service config = self.config.copy() config["drive"]["remove_obsolete"] = True config["drive"]["destination"] = self.destination_path mock_read_config.return_value = config - ms_band_package_local_path = os.path.join( - self.destination_path, "Obsidian", "Sample", "ms.band" - ) + ms_band_package_local_path = os.path.join(self.destination_path, "Obsidian", "Sample", "ms.band") files = sync_drive.sync_drive(config=config, drive=mock_service.drive) self.assertIsNotNone(files) files.remove(ms_band_package_local_path) - files = sync_drive.remove_obsolete( - destination_path=self.destination_path, files=files - ) + files = sync_drive.remove_obsolete(destination_path=self.destination_path, files=files) self.assertFalse(os.path.exists(ms_band_package_local_path)) def test_remove_obsolete_none_destination_path(self): """Test for destination path as None.""" - self.assertTrue( - len(sync_drive.remove_obsolete(destination_path=None, files=set())) == 0 - ) + self.assertTrue(len(sync_drive.remove_obsolete(destination_path=None, files=set())) == 0) def test_remove_obsolete_none_files(self): """Test for files as None.""" obsolete_path = os.path.join(self.destination_path, "obsolete") - self.assertTrue( - len(sync_drive.remove_obsolete(destination_path=obsolete_path, files=None)) - == 0 - ) + self.assertTrue(len(sync_drive.remove_obsolete(destination_path=obsolete_path, files=None)) == 0) def test_sync_directory_without_remove(self): """Test for remove as False.""" @@ -973,30 +907,18 @@ def test_sync_directory_without_remove(self): ) self.assertTrue(len(actual) == 49) self.assertTrue(os.path.isdir(os.path.join(self.destination_path, "icloudpy"))) + self.assertTrue(os.path.isdir(os.path.join(self.destination_path, "icloudpy", "Test"))) self.assertTrue( - os.path.isdir(os.path.join(self.destination_path, "icloudpy", "Test")) + os.path.isfile(os.path.join(self.destination_path, "icloudpy", "Test", "Document scanne 2.pdf")), ) self.assertTrue( - os.path.isfile( - os.path.join( - self.destination_path, "icloudpy", "Test", "Document scanne 2.pdf" - ) - ) - ) - self.assertTrue( - os.path.isfile( - os.path.join( - self.destination_path, "icloudpy", "Test", "Scanned document 1.pdf" - ) - ) + os.path.isfile(os.path.join(self.destination_path, "icloudpy", "Test", "Scanned document 1.pdf")), ) def test_sync_directory_with_remove(self): """Test for remove as True.""" os.mkdir(os.path.join(self.destination_path, "obsolete")) - shutil.copyfile( - __file__, os.path.join(self.destination_path, "obsolete", "obsolete.py") - ) + shutil.copyfile(__file__, os.path.join(self.destination_path, "obsolete", "obsolete.py")) actual = sync_drive.sync_directory( drive=self.drive, destination_path=self.destination_path, @@ -1009,22 +931,12 @@ def test_sync_directory_with_remove(self): ) self.assertTrue(len(actual) == 49) self.assertTrue(os.path.isdir(os.path.join(self.destination_path, "icloudpy"))) + self.assertTrue(os.path.isdir(os.path.join(self.destination_path, "icloudpy", "Test"))) self.assertTrue( - os.path.isdir(os.path.join(self.destination_path, "icloudpy", "Test")) + os.path.isfile(os.path.join(self.destination_path, "icloudpy", "Test", "Document scanne 2.pdf")), ) self.assertTrue( - os.path.isfile( - os.path.join( - self.destination_path, "icloudpy", "Test", "Document scanne 2.pdf" - ) - ) - ) - self.assertTrue( - os.path.isfile( - os.path.join( - self.destination_path, "icloudpy", "Test", "Scanned document 1.pdf" - ) - ) + os.path.isfile(os.path.join(self.destination_path, "icloudpy", "Test", "Scanned document 1.pdf")), ) def test_sync_directory_without_folder_filter(self): @@ -1043,22 +955,12 @@ def test_sync_directory_without_folder_filter(self): ) self.assertTrue(len(actual) == 53) self.assertTrue(os.path.isdir(os.path.join(self.destination_path, "icloudpy"))) + self.assertTrue(os.path.isdir(os.path.join(self.destination_path, "icloudpy", "Test"))) self.assertTrue( - os.path.isdir(os.path.join(self.destination_path, "icloudpy", "Test")) + os.path.isfile(os.path.join(self.destination_path, "icloudpy", "Test", "Document scanne 2.pdf")), ) self.assertTrue( - os.path.isfile( - os.path.join( - self.destination_path, "icloudpy", "Test", "Document scanne 2.pdf" - ) - ) - ) - self.assertTrue( - os.path.isfile( - os.path.join( - self.destination_path, "icloudpy", "Test", "Scanned document 1.pdf" - ) - ) + os.path.isfile(os.path.join(self.destination_path, "icloudpy", "Test", "Scanned document 1.pdf")), ) self.assertTrue(os.path.isdir(os.path.join(self.destination_path, "unwanted"))) @@ -1067,8 +969,7 @@ def test_sync_directory_without_folder_filter(self): def test_sync_directory_none_drive(self): """Test for drive as None.""" self.assertTrue( - 0 - == len( + len( sync_drive.sync_directory( drive=None, destination_path=self.destination_path, @@ -1078,15 +979,15 @@ def test_sync_directory_none_drive(self): filters=self.filters, ignore=self.ignore, remove=False, - ) + ), ) + == 0, ) def test_sync_directory_none_destination(self): """Test for destination as None.""" self.assertTrue( - 0 - == len( + len( sync_drive.sync_directory( drive=self.drive, destination_path=None, @@ -1096,15 +997,15 @@ def test_sync_directory_none_destination(self): filters=self.filters, ignore=self.ignore, remove=False, - ) + ), ) + == 0, ) def test_sync_directory_none_root(self): """Test for root as None.""" self.assertTrue( - 0 - == len( + len( sync_drive.sync_directory( drive=self.drive, destination_path=self.destination_path, @@ -1114,15 +1015,15 @@ def test_sync_directory_none_root(self): filters=self.filters, ignore=self.ignore, remove=False, - ) + ), ) + == 0, ) def test_sync_directory_none_items(self): """Test for items as None.""" self.assertTrue( - 0 - == len( + len( sync_drive.sync_directory( drive=self.drive, destination_path=self.destination_path, @@ -1132,71 +1033,42 @@ def test_sync_directory_none_items(self): filters=self.filters, ignore=self.ignore, remove=False, - ) + ), ) + == 0, ) @patch(target="keyring.get_password", return_value=data.VALID_PASSWORD) - @patch( - target="src.config_parser.get_username", return_value=data.AUTHENTICATED_USER - ) + @patch(target="src.config_parser.get_username", return_value=data.AUTHENTICATED_USER) @patch("icloudpy.ICloudPyService") @patch("src.read_config") - def test_sync_drive_valids( - self, mock_read_config, mock_service, mock_get_username, mock_get_password - ): + def test_sync_drive_valids(self, mock_read_config, mock_service, mock_get_username, mock_get_password): """Test for valid sync_drive.""" mock_service = self.service config = self.config.copy() config["drive"]["destination"] = self.destination_path mock_read_config.return_value = config - self.assertIsNotNone( - sync_drive.sync_drive(config=config, drive=mock_service.drive) - ) + self.assertIsNotNone(sync_drive.sync_drive(config=config, drive=mock_service.drive)) self.assertTrue(os.path.isdir(os.path.join(self.destination_path, "icloudpy"))) + self.assertTrue(os.path.isdir(os.path.join(self.destination_path, "icloudpy", "Test"))) self.assertTrue( - os.path.isdir(os.path.join(self.destination_path, "icloudpy", "Test")) - ) - self.assertTrue( - os.path.isfile( - os.path.join( - self.destination_path, "icloudpy", "Test", "Document scanne 2.pdf" - ) - ) + os.path.isfile(os.path.join(self.destination_path, "icloudpy", "Test", "Document scanne 2.pdf")), ) self.assertTrue( - os.path.isfile( - os.path.join( - self.destination_path, "icloudpy", "Test", "Scanned document 1.pdf" - ) - ) + os.path.isfile(os.path.join(self.destination_path, "icloudpy", "Test", "Scanned document 1.pdf")), ) self.assertTrue(os.path.isdir(os.path.join(self.destination_path, "Obsidian"))) - self.assertTrue( - os.path.isdir(os.path.join(self.destination_path, "Obsidian", "Sample")) - ) - self.assertTrue( - os.path.isfile( - os.path.join( - self.destination_path, "Obsidian", "Sample", "This is a title.md" - ) - ) - ) + self.assertTrue(os.path.isdir(os.path.join(self.destination_path, "Obsidian", "Sample"))) + self.assertTrue(os.path.isfile(os.path.join(self.destination_path, "Obsidian", "Sample", "This is a title.md"))) self.assertEqual( sum( f.stat().st_size - for f in Path( - os.path.join( - self.destination_path, "Obsidian", "Sample", "Project.band" - ) - ).glob("**/*") + for f in Path(os.path.join(self.destination_path, "Obsidian", "Sample", "Project.band")).glob("**/*") if f.is_file() ), sum( f.stat().st_size - for f in Path( - os.path.join(tests.DATA_DIR, "Project_original.band") - ).glob("**/*") + for f in Path(os.path.join(tests.DATA_DIR, "Project_original.band")).glob("**/*") if f.is_file() ), ) @@ -1212,16 +1084,14 @@ def test_process_file_special_chars_package(self): filters=self.filters["file_extensions"], ignore=None, files=files, - ) + ), ) def test_process_file_existing_package(self): """Test for existing package.""" files = set() # Existing package - sync_drive.download_file( - item=self.package_item, local_file=self.local_package_path - ) + sync_drive.download_file(item=self.package_item, local_file=self.local_package_path) # Do not download the package self.assertFalse( sync_drive.process_file( @@ -1230,7 +1100,7 @@ def test_process_file_existing_package(self): filters=self.filters["file_extensions"], ignore=None, files=files, - ) + ), ) # Modify local package shutil.copyfile( @@ -1245,7 +1115,7 @@ def test_process_file_existing_package(self): filters=self.filters["file_extensions"], ignore=None, files=files, - ) + ), ) def test_process_file_nested_package_extraction(self): @@ -1258,39 +1128,31 @@ def test_process_file_nested_package_extraction(self): filters=self.filters["file_extensions"], ignore=None, files=files, - ) + ), ) self.assertTrue(os.path.exists(os.path.join(self.destination_path, "ms.band"))) self.assertEqual( sum( f.stat().st_size - for f in Path(os.path.join(self.destination_path, "ms.band")).glob( - "**/*" - ) - if f.is_file() - ), - sum( - f.stat().st_size - for f in Path(os.path.join(tests.DATA_DIR, "ms.band")).glob("**/*") + for f in Path(os.path.join(self.destination_path, "ms.band")).glob("**/*") if f.is_file() ), + sum(f.stat().st_size for f in Path(os.path.join(tests.DATA_DIR, "ms.band")).glob("**/*") if f.is_file()), ) def test_process_package_invalid_package_type(self): """Test for invalid package type.""" - self.assertFalse( - sync_drive.process_package(local_file=os.path.join(DATA_DIR, "medium.jpeg")) - ) + self.assertFalse(sync_drive.process_package(local_file=os.path.join(DATA_DIR, "medium.jpeg"))) def test_execution_continuation_on_icloudpy_exception(self): """Test for icloudpy exception.""" with patch.object(self.file_item, "open") as mocked_file_method, patch.object( - self.folder_item, "dir" + self.folder_item, "dir", ) as mocked_folder_method: - mocked_file_method.side_effect = ( - mocked_folder_method.side_effect - ) = ICloudPyAPIResponseException("Exception occurred.") + mocked_file_method.side_effect = mocked_folder_method.side_effect = ICloudPyAPIResponseException( + "Exception occurred.", + ) filters = dict(self.filters) filters["folders"].append("unwanted") actual = sync_drive.sync_directory( @@ -1304,12 +1166,8 @@ def test_execution_continuation_on_icloudpy_exception(self): remove=False, ) self.assertTrue(len(actual) == 50) - self.assertTrue( - os.path.isdir(os.path.join(self.destination_path, "icloudpy")) - ) - self.assertTrue( - os.path.isdir(os.path.join(self.destination_path, "icloudpy", "Test")) - ) + self.assertTrue(os.path.isdir(os.path.join(self.destination_path, "icloudpy"))) + self.assertTrue(os.path.isdir(os.path.join(self.destination_path, "icloudpy", "Test"))) self.assertTrue( os.path.isfile( os.path.join( @@ -1317,8 +1175,8 @@ def test_execution_continuation_on_icloudpy_exception(self): "icloudpy", "Test", "Document scanne 2.pdf", - ) - ) + ), + ), ) self.assertFalse( os.path.isfile( @@ -1327,57 +1185,35 @@ def test_execution_continuation_on_icloudpy_exception(self): "icloudpy", "Test", "Scanned document 1.pdf", - ) - ) + ), + ), ) @patch(target="keyring.get_password", return_value=data.VALID_PASSWORD) - @patch( - target="src.config_parser.get_username", return_value=data.AUTHENTICATED_USER - ) + @patch(target="src.config_parser.get_username", return_value=data.AUTHENTICATED_USER) @patch("icloudpy.ICloudPyService") @patch("src.read_config") - def test_child_ignored_folder( - self, mock_read_config, mock_service, mock_get_username, mock_get_password - ): + def test_child_ignored_folder(self, mock_read_config, mock_service, mock_get_username, mock_get_password): """Test for child ignored folder.""" mock_service = self.service config = self.config.copy() config["drive"]["destination"] = self.destination_path config["drive"]["ignore"] = ["icloudpy/*"] mock_read_config.return_value = config - self.assertIsNotNone( - sync_drive.sync_drive(config=config, drive=mock_service.drive) - ) - self.assertFalse( - os.path.exists(os.path.join(self.destination_path, "icloudpy", "Test")) - ) + self.assertIsNotNone(sync_drive.sync_drive(config=config, drive=mock_service.drive)) + self.assertFalse(os.path.exists(os.path.join(self.destination_path, "icloudpy", "Test"))) self.assertTrue(os.path.isdir(os.path.join(self.destination_path, "Obsidian"))) - self.assertTrue( - os.path.isdir(os.path.join(self.destination_path, "Obsidian", "Sample")) - ) - self.assertTrue( - os.path.isfile( - os.path.join( - self.destination_path, "Obsidian", "Sample", "This is a title.md" - ) - ) - ) + self.assertTrue(os.path.isdir(os.path.join(self.destination_path, "Obsidian", "Sample"))) + self.assertTrue(os.path.isfile(os.path.join(self.destination_path, "Obsidian", "Sample", "This is a title.md"))) self.assertEqual( sum( f.stat().st_size - for f in Path( - os.path.join( - self.destination_path, "Obsidian", "Sample", "Project.band" - ) - ).glob("**/*") + for f in Path(os.path.join(self.destination_path, "Obsidian", "Sample", "Project.band")).glob("**/*") if f.is_file() ), sum( f.stat().st_size - for f in Path( - os.path.join(tests.DATA_DIR, "Project_original.band") - ).glob("**/*") + for f in Path(os.path.join(tests.DATA_DIR, "Project_original.band")).glob("**/*") if f.is_file() ), ) diff --git a/tests/test_sync_photos.py b/tests/test_sync_photos.py index 8c10582cc..b069d147c 100644 --- a/tests/test_sync_photos.py +++ b/tests/test_sync_photos.py @@ -1,4 +1,5 @@ """Tests for sync_photos.py file.""" + __author__ = "Mandar Patil (mandarons@pm.me)" import glob @@ -26,9 +27,7 @@ def setUp(self) -> None: self.destination_path = self.root os.makedirs(self.destination_path, exist_ok=True) - self.service = data.ICloudPyServiceMock( - data.AUTHENTICATED_USER, data.VALID_PASSWORD - ) + self.service = data.ICloudPyServiceMock(data.AUTHENTICATED_USER, data.VALID_PASSWORD) def tearDown(self) -> None: """Remove temp directory.""" @@ -37,14 +36,10 @@ def tearDown(self) -> None: shutil.rmtree(tests.PHOTOS_DIR) @patch(target="keyring.get_password", return_value=data.VALID_PASSWORD) - @patch( - target="src.config_parser.get_username", return_value=data.AUTHENTICATED_USER - ) + @patch(target="src.config_parser.get_username", return_value=data.AUTHENTICATED_USER) @patch("icloudpy.ICloudPyService") @patch("src.read_config") - def test_sync_photos_original( - self, mock_read_config, mock_service, mock_get_username, mock_get_password - ): + def test_sync_photos_original(self, mock_read_config, mock_service, mock_get_username, mock_get_password): """Test for successful original photo size download.""" mock_service = self.service config = self.config.copy() @@ -52,9 +47,7 @@ def test_sync_photos_original( config["photos"]["filters"]["libraries"] = ["PrimarySync"] mock_read_config.return_value = config # Sync original photos - self.assertIsNone( - sync_photos.sync_photos(config=config, photos=mock_service.photos) - ) + self.assertIsNone(sync_photos.sync_photos(config=config, photos=mock_service.photos)) album_2_path = os.path.join(self.destination_path, "album 2") album_1_1_path = os.path.join(album_2_path, "album-1-1") album_1_path = os.path.join(self.destination_path, "album-1") @@ -65,13 +58,11 @@ def test_sync_photos_original( self.assertTrue(len(os.listdir(album_1_path)) > 0) @patch(target="keyring.get_password", return_value=data.VALID_PASSWORD) - @patch( - target="src.config_parser.get_username", return_value=data.AUTHENTICATED_USER - ) + @patch(target="src.config_parser.get_username", return_value=data.AUTHENTICATED_USER) @patch("icloudpy.ICloudPyService") @patch("src.read_config") def test_sync_photos_all_albums_filtered( - self, mock_read_config, mock_service, mock_get_username, mock_get_password + self, mock_read_config, mock_service, mock_get_username, mock_get_password, ): """Test for successful original photo size download.""" mock_service = self.service @@ -80,26 +71,18 @@ def test_sync_photos_all_albums_filtered( config["photos"]["all_albums"] = True mock_read_config.return_value = config # Sync original photos - self.assertIsNone( - sync_photos.sync_photos(config=config, photos=mock_service.photos) - ) - album_0_path = os.path.join( - self.destination_path, config["photos"]["filters"]["albums"][0] - ) - album_1_path = os.path.join( - self.destination_path, config["photos"]["filters"]["albums"][1] - ) + self.assertIsNone(sync_photos.sync_photos(config=config, photos=mock_service.photos)) + album_0_path = os.path.join(self.destination_path, config["photos"]["filters"]["albums"][0]) + album_1_path = os.path.join(self.destination_path, config["photos"]["filters"]["albums"][1]) self.assertFalse(os.path.isdir(album_0_path)) self.assertFalse(os.path.isdir(album_1_path)) @patch(target="keyring.get_password", return_value=data.VALID_PASSWORD) - @patch( - target="src.config_parser.get_username", return_value=data.AUTHENTICATED_USER - ) + @patch(target="src.config_parser.get_username", return_value=data.AUTHENTICATED_USER) @patch("icloudpy.ICloudPyService") @patch("src.read_config") def test_sync_photos_all_albums_not_filtered( - self, mock_read_config, mock_service, mock_get_username, mock_get_password + self, mock_read_config, mock_service, mock_get_username, mock_get_password, ): """Test for successful original photo size download.""" mock_service = self.service @@ -107,31 +90,21 @@ def test_sync_photos_all_albums_not_filtered( config["photos"]["destination"] = self.destination_path config["photos"]["all_albums"] = True mock_read_config.return_value = config - album_0_path = os.path.join( - self.destination_path, config["photos"]["filters"]["albums"][0] - ) - album_1_path = os.path.join( - self.destination_path, config["photos"]["filters"]["albums"][1] - ) + album_0_path = os.path.join(self.destination_path, config["photos"]["filters"]["albums"][0]) + album_1_path = os.path.join(self.destination_path, config["photos"]["filters"]["albums"][1]) config["photos"]["filters"]["albums"] = None # Sync original photos - self.assertIsNone( - sync_photos.sync_photos(config=config, photos=mock_service.photos) - ) + self.assertIsNone(sync_photos.sync_photos(config=config, photos=mock_service.photos)) self.assertTrue(os.path.isdir(album_0_path)) self.assertTrue(os.path.isdir(album_1_path)) self.assertTrue(len(os.listdir(album_0_path)) > 1) self.assertTrue(len(os.listdir(album_1_path)) > 0) @patch(target="keyring.get_password", return_value=data.VALID_PASSWORD) - @patch( - target="src.config_parser.get_username", return_value=data.AUTHENTICATED_USER - ) + @patch(target="src.config_parser.get_username", return_value=data.AUTHENTICATED_USER) @patch("icloudpy.ICloudPyService") @patch("src.read_config") - def test_sync_photos_folder_format( - self, mock_read_config, mock_service, mock_get_username, mock_get_password - ): + def test_sync_photos_folder_format(self, mock_read_config, mock_service, mock_get_username, mock_get_password): """Test for successful original photo size download with folder format.""" mock_service = self.service config = self.config.copy() @@ -139,22 +112,14 @@ def test_sync_photos_folder_format( config["photos"]["folder_format"] = "%Y/%m" mock_read_config.return_value = config # Sync original photos - self.assertIsNone( - sync_photos.sync_photos(config=config, photos=mock_service.photos) - ) - album_0_path = os.path.join( - self.destination_path, config["photos"]["filters"]["albums"][0] - ) - album_1_path = os.path.join( - self.destination_path, config["photos"]["filters"]["albums"][1] - ) + self.assertIsNone(sync_photos.sync_photos(config=config, photos=mock_service.photos)) + album_0_path = os.path.join(self.destination_path, config["photos"]["filters"]["albums"][0]) + album_1_path = os.path.join(self.destination_path, config["photos"]["filters"]["albums"][1]) self.assertTrue(os.path.isdir(os.path.join(album_0_path, "2020", "08"))) self.assertTrue(os.path.isdir(os.path.join(album_1_path, "2020", "07"))) @patch(target="keyring.get_password", return_value=data.VALID_PASSWORD) - @patch( - target="src.config_parser.get_username", return_value=data.AUTHENTICATED_USER - ) + @patch(target="src.config_parser.get_username", return_value=data.AUTHENTICATED_USER) @patch("icloudpy.ICloudPyService") @patch("src.read_config") def test_sync_photos_missing_photo_download( @@ -169,36 +134,29 @@ def test_sync_photos_missing_photo_download( config = self.config.copy() config["photos"]["destination"] = self.destination_path mock_read_config.return_value = config - album_0_path = os.path.join( - self.destination_path, config["photos"]["filters"]["albums"][0] - ) - album_1_path = os.path.join( - self.destination_path, config["photos"]["filters"]["albums"][1] - ) + album_0_path = os.path.join(self.destination_path, config["photos"]["filters"]["albums"][0]) + album_1_path = os.path.join(self.destination_path, config["photos"]["filters"]["albums"][1]) sync_photos.sync_photos(config=config, photos=mock_service.photos) os.remove( os.path.join( album_1_path, "IMG_3148__medium__QVZ4My9WS2tiV1BkTmJXdzY4bXJXelN1ZW1YZw==.JPG", - ) + ), ) # Download missing file with self.assertLogs() as captured: - self.assertIsNone( - sync_photos.sync_photos(config=config, photos=mock_service.photos) - ) + self.assertIsNone(sync_photos.sync_photos(config=config, photos=mock_service.photos)) self.assertTrue(len(captured.records) > 0) self.assertIsNotNone( next( ( s for s in captured[1] - if "album-1/IMG_3148__medium__QVZ4My9WS2tiV1BkTmJXdzY4bXJXelN1ZW1YZw==.JPG ..." - in s + if "album-1/IMG_3148__medium__QVZ4My9WS2tiV1BkTmJXdzY4bXJXelN1ZW1YZw==.JPG ..." in s ), None, - ) + ), ) self.assertTrue(os.path.isdir(album_0_path)) self.assertTrue(os.path.isdir(album_1_path)) @@ -206,9 +164,7 @@ def test_sync_photos_missing_photo_download( self.assertTrue(len(os.listdir(album_1_path)) > 0) @patch(target="keyring.get_password", return_value=data.VALID_PASSWORD) - @patch( - target="src.config_parser.get_username", return_value=data.AUTHENTICATED_USER - ) + @patch(target="src.config_parser.get_username", return_value=data.AUTHENTICATED_USER) @patch("icloudpy.ICloudPyService") @patch("src.read_config") def test_sync_photos_download_changed_photo( @@ -223,39 +179,32 @@ def test_sync_photos_download_changed_photo( config = self.config.copy() config["photos"]["destination"] = self.destination_path mock_read_config.return_value = config - album_0_path = os.path.join( - self.destination_path, config["photos"]["filters"]["albums"][0] - ) - album_1_path = os.path.join( - self.destination_path, config["photos"]["filters"]["albums"][1] - ) + album_0_path = os.path.join(self.destination_path, config["photos"]["filters"]["albums"][0]) + album_1_path = os.path.join(self.destination_path, config["photos"]["filters"]["albums"][1]) sync_photos.sync_photos(config=config, photos=mock_service.photos) # Download changed file os.remove( os.path.join( album_1_path, "IMG_3148__medium__QVZ4My9WS2tiV1BkTmJXdzY4bXJXelN1ZW1YZw==.JPG", - ) + ), ) shutil.copyfile( os.path.join(DATA_DIR, "thumb.jpeg"), os.path.join(album_1_path, "IMG_3148.JPG"), ) with self.assertLogs() as captured: - self.assertIsNone( - sync_photos.sync_photos(config=config, photos=mock_service.photos) - ) + self.assertIsNone(sync_photos.sync_photos(config=config, photos=mock_service.photos)) self.assertTrue(len(captured.records) > 0) self.assertIsNotNone( next( ( s for s in captured[1] - if "album-1/IMG_3148__medium__QVZ4My9WS2tiV1BkTmJXdzY4bXJXelN1ZW1YZw==.JPG ..." - in s + if "album-1/IMG_3148__medium__QVZ4My9WS2tiV1BkTmJXdzY4bXJXelN1ZW1YZw==.JPG ..." in s ), None, - ) + ), ) self.assertTrue(os.path.isdir(album_0_path)) self.assertTrue(os.path.isdir(album_1_path)) @@ -263,9 +212,7 @@ def test_sync_photos_download_changed_photo( self.assertTrue(len(os.listdir(album_1_path)) > 0) @patch(target="keyring.get_password", return_value=data.VALID_PASSWORD) - @patch( - target="src.config_parser.get_username", return_value=data.AUTHENTICATED_USER - ) + @patch(target="src.config_parser.get_username", return_value=data.AUTHENTICATED_USER) @patch("icloudpy.ICloudPyService") @patch("src.read_config") def test_sync_photos_nothing_to_download( @@ -285,18 +232,12 @@ def test_sync_photos_nothing_to_download( # No files to download with self.assertLogs(logger=LOGGER, level="DEBUG") as captured: - self.assertIsNone( - sync_photos.sync_photos(config=config, photos=mock_service.photos) - ) + self.assertIsNone(sync_photos.sync_photos(config=config, photos=mock_service.photos)) self.assertTrue(len(captured.records) > 0) - self.assertIsNone( - next((s for s in captured[1] if "Downloading /" in s), None) - ) + self.assertIsNone(next((s for s in captured[1] if "Downloading /" in s), None)) @patch(target="keyring.get_password", return_value=data.VALID_PASSWORD) - @patch( - target="src.config_parser.get_username", return_value=data.AUTHENTICATED_USER - ) + @patch(target="src.config_parser.get_username", return_value=data.AUTHENTICATED_USER) @patch("icloudpy.ICloudPyService") @patch("src.read_config") def test_sync_photos_rename_previous_original_photos( @@ -311,9 +252,7 @@ def test_sync_photos_rename_previous_original_photos( config = self.config.copy() config["photos"]["destination"] = self.destination_path mock_read_config.return_value = config - album_1_path = os.path.join( - self.destination_path, config["photos"]["filters"]["albums"][1] - ) + album_1_path = os.path.join(self.destination_path, config["photos"]["filters"]["albums"][1]) sync_photos.sync_photos(config=config, photos=mock_service.photos) # Rename previous original files - upgrade to newer version @@ -326,13 +265,9 @@ def test_sync_photos_rename_previous_original_photos( ) with self.assertLogs(logger=LOGGER, level="DEBUG") as captured: - self.assertIsNone( - sync_photos.sync_photos(config=config, photos=mock_service.photos) - ) + self.assertIsNone(sync_photos.sync_photos(config=config, photos=mock_service.photos)) self.assertTrue(len(captured.records) > 0) - self.assertIsNone( - next((s for s in captured[1] if "Downloading /" in s), None) - ) + self.assertIsNone(next((s for s in captured[1] if "Downloading /" in s), None)) self.assertFalse(os.path.exists(os.path.join(album_1_path, "IMG_3148.JPG"))) @@ -346,18 +281,12 @@ def test_sync_photos_rename_previous_original_photos( ) with self.assertLogs(logger=LOGGER, level="DEBUG") as captured: - self.assertIsNone( - sync_photos.sync_photos(config=config, photos=mock_service.photos) - ) + self.assertIsNone(sync_photos.sync_photos(config=config, photos=mock_service.photos)) self.assertTrue(len(captured.records) > 0) - self.assertIsNone( - next((s for s in captured[1] if "Downloading /" in s), None) - ) + self.assertIsNone(next((s for s in captured[1] if "Downloading /" in s), None)) @patch(target="keyring.get_password", return_value=data.VALID_PASSWORD) - @patch( - target="src.config_parser.get_username", return_value=data.AUTHENTICATED_USER - ) + @patch(target="src.config_parser.get_username", return_value=data.AUTHENTICATED_USER) @patch("icloudpy.ICloudPyService") @patch("src.read_config") def test_sync_photos_rename_original_photos_obsolete_false( @@ -373,9 +302,7 @@ def test_sync_photos_rename_original_photos_obsolete_false( config["photos"]["destination"] = self.destination_path config["photos"]["remove_obsolete"] = False mock_read_config.return_value = config - album_1_path = os.path.join( - self.destination_path, config["photos"]["filters"]["albums"][1] - ) + album_1_path = os.path.join(self.destination_path, config["photos"]["filters"]["albums"][1]) sync_photos.sync_photos(config=config, photos=mock_service.photos) # Rename previous original files - upgrade to newer version @@ -392,9 +319,7 @@ def test_sync_photos_rename_original_photos_obsolete_false( self.assertTrue(os.path.exists(os.path.join(album_1_path, "delete_me.JPG"))) @patch(target="keyring.get_password", return_value=data.VALID_PASSWORD) - @patch( - target="src.config_parser.get_username", return_value=data.AUTHENTICATED_USER - ) + @patch(target="src.config_parser.get_username", return_value=data.AUTHENTICATED_USER) @patch("icloudpy.ICloudPyService") @patch("src.read_config") def test_sync_photos_rename_original_photos_obsolete_true( @@ -410,9 +335,7 @@ def test_sync_photos_rename_original_photos_obsolete_true( config["photos"]["destination"] = self.destination_path config["photos"]["remove_obsolete"] = True mock_read_config.return_value = config - album_1_path = os.path.join( - self.destination_path, config["photos"]["filters"]["albums"][1] - ) + album_1_path = os.path.join(self.destination_path, config["photos"]["filters"]["albums"][1]) sync_photos.sync_photos(config=config, photos=mock_service.photos) # Rename previous original files - upgrade to newer version @@ -430,22 +353,15 @@ def test_sync_photos_rename_original_photos_obsolete_true( def test_remove_obsolete_none_destination_path(self): """Test for destination path as None.""" - self.assertTrue( - len(sync_photos.remove_obsolete(destination_path=None, files=set())) == 0 - ) + self.assertTrue(len(sync_photos.remove_obsolete(destination_path=None, files=set())) == 0) def test_remove_obsolete_none_files(self): """Test for files as None.""" obsolete_path = os.path.join(self.destination_path, "obsolete") - self.assertTrue( - len(sync_photos.remove_obsolete(destination_path=obsolete_path, files=None)) - == 0 - ) + self.assertTrue(len(sync_photos.remove_obsolete(destination_path=obsolete_path, files=None)) == 0) @patch(target="keyring.get_password", return_value=data.VALID_PASSWORD) - @patch( - target="src.config_parser.get_username", return_value=data.AUTHENTICATED_USER - ) + @patch(target="src.config_parser.get_username", return_value=data.AUTHENTICATED_USER) @patch("icloudpy.ICloudPyService") @patch("src.read_config") def test_sync_photos_empty_albums_list( @@ -462,29 +378,24 @@ def test_sync_photos_empty_albums_list( config["photos"]["filters"]["albums"] = [] mock_read_config.return_value = config with self.assertLogs() as captured: - self.assertIsNone( - sync_photos.sync_photos(config=config, photos=mock_service.photos) - ) + self.assertIsNone(sync_photos.sync_photos(config=config, photos=mock_service.photos)) self.assertTrue(len(captured.records) > 0) self.assertIsNotNone( next( ( s for s in captured[1] - if "all/IMG_3148__original__QVZ4My9WS2tiV1BkTmJXdzY4bXJXelN1ZW1YZw==.JPG ..." - in s + if "all/IMG_3148__original__QVZ4My9WS2tiV1BkTmJXdzY4bXJXelN1ZW1YZw==.JPG ..." in s ), None, - ) + ), ) self.assertTrue(os.path.isdir(os.path.join(self.destination_path, "all"))) def test_download_photo_none_photo(self): """Test if download_photo has photo as None.""" - self.assertFalse( - sync_photos.download_photo(None, ["original"], self.destination_path) - ) + self.assertFalse(sync_photos.download_photo(None, ["original"], self.destination_path)) def test_download_photo_none_file_size(self): """Test if download_photo has file size as None.""" @@ -493,9 +404,7 @@ class MockPhoto: def download(self, quality): raise icloudpy.exceptions.ICloudPyAPIResponseException - self.assertFalse( - sync_photos.download_photo(MockPhoto(), None, self.destination_path) - ) + self.assertFalse(sync_photos.download_photo(MockPhoto(), None, self.destination_path)) def test_download_photo_none_destination_path(self): """Test if download_photo has destination path as None.""" @@ -513,15 +422,11 @@ class MockPhoto: def download(self, quality): raise icloudpy.exceptions.ICloudPyAPIResponseException - self.assertFalse( - sync_photos.download_photo(MockPhoto(), ["original"], self.destination_path) - ) + self.assertFalse(sync_photos.download_photo(MockPhoto(), ["original"], self.destination_path)) def test_sync_album_none_album(self): """Test if album is None.""" - self.assertIsNone( - sync_photos.sync_album(None, self.destination_path, ["original"]) - ) + self.assertIsNone(sync_photos.sync_album(None, self.destination_path, ["original"])) def test_sync_album_none_destination_path(self): """Test if destination path is None.""" @@ -554,7 +459,7 @@ def versions(self): destination_path=self.destination_path, files=None, folder_format=None, - ) + ), ) def test_missing_thumb_photo_sizes(self): @@ -580,18 +485,14 @@ def versions(self): destination_path=self.destination_path, files=None, folder_format=None, - ) + ), ) @patch(target="keyring.get_password", return_value=data.VALID_PASSWORD) - @patch( - target="src.config_parser.get_username", return_value=data.AUTHENTICATED_USER - ) + @patch(target="src.config_parser.get_username", return_value=data.AUTHENTICATED_USER) @patch("icloudpy.ICloudPyService") @patch("src.read_config") - def test_photo_wanted_extensions_jpg( - self, mock_read_config, mock_service, mock_get_username, mock_get_password - ): + def test_photo_wanted_extensions_jpg(self, mock_read_config, mock_service, mock_get_username, mock_get_password): """Test for JPG extension filter.""" mock_service = self.service config = self.config.copy() @@ -599,29 +500,19 @@ def test_photo_wanted_extensions_jpg( config["photos"]["filters"]["extensions"] = ["JpG"] mock_read_config.return_value = config # Sync original photos - self.assertIsNone( - sync_photos.sync_photos(config=config, photos=mock_service.photos) - ) - album_0_path = os.path.join( - self.destination_path, config["photos"]["filters"]["albums"][0] - ) - album_1_path = os.path.join( - self.destination_path, config["photos"]["filters"]["albums"][1] - ) + self.assertIsNone(sync_photos.sync_photos(config=config, photos=mock_service.photos)) + album_0_path = os.path.join(self.destination_path, config["photos"]["filters"]["albums"][0]) + album_1_path = os.path.join(self.destination_path, config["photos"]["filters"]["albums"][1]) self.assertTrue(os.path.isdir(album_0_path)) self.assertTrue(os.path.isdir(album_1_path)) self.assertTrue(len(os.listdir(album_0_path)) > 1) self.assertTrue(len(os.listdir(album_1_path)) > 0) @patch(target="keyring.get_password", return_value=data.VALID_PASSWORD) - @patch( - target="src.config_parser.get_username", return_value=data.AUTHENTICATED_USER - ) + @patch(target="src.config_parser.get_username", return_value=data.AUTHENTICATED_USER) @patch("icloudpy.ICloudPyService") @patch("src.read_config") - def test_photo_wanted_extensions_png( - self, mock_read_config, mock_service, mock_get_username, mock_get_password - ): + def test_photo_wanted_extensions_png(self, mock_read_config, mock_service, mock_get_username, mock_get_password): """Test for PNG extension filter.""" mock_service = self.service config = self.config.copy() @@ -629,28 +520,20 @@ def test_photo_wanted_extensions_png( config["photos"]["filters"]["extensions"] = ["PnG"] mock_read_config.return_value = config # Sync original photos - self.assertIsNone( - sync_photos.sync_photos(config=config, photos=mock_service.photos) - ) - album_0_path = os.path.join( - self.destination_path, config["photos"]["filters"]["albums"][0] - ) - album_1_path = os.path.join( - self.destination_path, config["photos"]["filters"]["albums"][1] - ) + self.assertIsNone(sync_photos.sync_photos(config=config, photos=mock_service.photos)) + album_0_path = os.path.join(self.destination_path, config["photos"]["filters"]["albums"][0]) + album_1_path = os.path.join(self.destination_path, config["photos"]["filters"]["albums"][1]) self.assertTrue(os.path.isdir(album_0_path)) self.assertTrue(os.path.isdir(album_1_path)) self.assertTrue(len(os.listdir(album_0_path)) == 1) self.assertTrue(len(os.listdir(album_1_path)) == 0) @patch(target="keyring.get_password", return_value=data.VALID_PASSWORD) - @patch( - target="src.config_parser.get_username", return_value=data.AUTHENTICATED_USER - ) + @patch(target="src.config_parser.get_username", return_value=data.AUTHENTICATED_USER) @patch("icloudpy.ICloudPyService") @patch("src.read_config") def test_photo_download_with_shared_libraries( - self, mock_read_config, mock_service, mock_get_username, mock_get_password + self, mock_read_config, mock_service, mock_get_username, mock_get_password, ): """Test for downloading photos from shared libraries.""" mock_service = self.service @@ -661,9 +544,7 @@ def test_photo_download_with_shared_libraries( del config["photos"]["filters"]["libraries"] mock_read_config.return_value = config # Sync original photos - self.assertIsNone( - sync_photos.sync_photos(config=config, photos=mock_service.photos) - ) + self.assertIsNone(sync_photos.sync_photos(config=config, photos=mock_service.photos)) all_path = os.path.join(self.destination_path, "all") self.assertTrue(os.path.isdir(all_path)) # Check for PrimarySync photo @@ -672,13 +553,11 @@ def test_photo_download_with_shared_libraries( self.assertTrue(len(glob.glob(os.path.join(all_path, "IMG_5513*.HEIC"))) > 0) @patch(target="keyring.get_password", return_value=data.VALID_PASSWORD) - @patch( - target="src.config_parser.get_username", return_value=data.AUTHENTICATED_USER - ) + @patch(target="src.config_parser.get_username", return_value=data.AUTHENTICATED_USER) @patch("icloudpy.ICloudPyService") @patch("src.read_config") def test_sync_photos_all_albums_filtered_missing_primary_sync( - self, mock_read_config, mock_service, mock_get_username, mock_get_password + self, mock_read_config, mock_service, mock_get_username, mock_get_password, ): """Test for successful original photo size download.""" mock_service = self.service @@ -688,31 +567,19 @@ def test_sync_photos_all_albums_filtered_missing_primary_sync( config["photos"]["filters"]["albums"] += ["Favorites"] mock_read_config.return_value = config # Sync original photos - self.assertIsNone( - sync_photos.sync_photos(config=config, photos=mock_service.photos) - ) - album_0_path = os.path.join( - self.destination_path, config["photos"]["filters"]["albums"][0] - ) - album_1_path = os.path.join( - self.destination_path, config["photos"]["filters"]["albums"][1] - ) - album_2_path = os.path.join( - self.destination_path, config["photos"]["filters"]["albums"][2] - ) + self.assertIsNone(sync_photos.sync_photos(config=config, photos=mock_service.photos)) + album_0_path = os.path.join(self.destination_path, config["photos"]["filters"]["albums"][0]) + album_1_path = os.path.join(self.destination_path, config["photos"]["filters"]["albums"][1]) + album_2_path = os.path.join(self.destination_path, config["photos"]["filters"]["albums"][2]) self.assertTrue(os.path.isdir(album_0_path)) self.assertTrue(os.path.isdir(album_1_path)) self.assertTrue(os.path.isdir(album_2_path)) @patch(target="keyring.get_password", return_value=data.VALID_PASSWORD) - @patch( - target="src.config_parser.get_username", return_value=data.AUTHENTICATED_USER - ) + @patch(target="src.config_parser.get_username", return_value=data.AUTHENTICATED_USER) @patch("icloudpy.ICloudPyService") @patch("src.read_config") - def test_get_name_and_extension( - self, mock_read_config, mock_service, mock_get_username, mock_get_password - ): + def test_get_name_and_extension(self, mock_read_config, mock_service, mock_get_username, mock_get_password): """Test for successful original_alt photo size download.""" mock_service = self.service config = self.config.copy() @@ -720,15 +587,9 @@ def test_get_name_and_extension( config["photos"]["filters"]["file_sizes"] = ["original_alt"] mock_read_config.return_value = config # Sync original photos - self.assertIsNone( - sync_photos.sync_photos(config=config, photos=mock_service.photos) - ) - album_0_path = os.path.join( - self.destination_path, config["photos"]["filters"]["albums"][0] - ) - album_1_path = os.path.join( - self.destination_path, config["photos"]["filters"]["albums"][1] - ) + self.assertIsNone(sync_photos.sync_photos(config=config, photos=mock_service.photos)) + album_0_path = os.path.join(self.destination_path, config["photos"]["filters"]["albums"][0]) + album_1_path = os.path.join(self.destination_path, config["photos"]["filters"]["albums"][1]) self.assertTrue(os.path.isdir(album_0_path)) self.assertTrue(os.path.isdir(album_1_path)) @@ -740,8 +601,6 @@ class MockPhoto: filename = "mock_filename.xed" versions = {"original_alt": {"type": "invalid"}} - name, extension = sync_photos.get_name_and_extension( - photo=MockPhoto(), file_size="original_alt" - ) + name, extension = sync_photos.get_name_and_extension(photo=MockPhoto(), file_size="original_alt") self.assertEqual(name, "mock_filename") self.assertEqual(extension, "xed") diff --git a/tests/test_usage.py b/tests/test_usage.py index 05b305e66..53f5ec42c 100644 --- a/tests/test_usage.py +++ b/tests/test_usage.py @@ -1,4 +1,5 @@ """Tests for usage.py file.""" + import datetime import os import unittest @@ -70,9 +71,7 @@ def test_post_new_installation_valid(self, mock_post): @patch("requests.post", side_effect=tests.mocked_usage_post) def test_post_new_installation_error(self, mock_post): """Test for post failure.""" - actual = usage.post_new_installation( - data=dict(self.new_installation_data), endpoint="Invalid" - ) + actual = usage.post_new_installation(data=dict(self.new_installation_data), endpoint="Invalid") self.assertIsNone(actual) @patch("requests.post", side_effect=tests.mocked_usage_post) @@ -124,9 +123,7 @@ def test_new_heartbeat_valid(self, mock_post): file_path = usage.init_cache(config=self.config) cached_data = usage.load_cache(file_path=file_path) fresh = usage.install(cached_data=cached_data) - actual = usage.post_new_heartbeat( - data={"installationId": fresh["id"], "data": None} - ) + actual = usage.post_new_heartbeat(data={"installationId": fresh["id"], "data": None}) self.assertTrue(actual) @patch("requests.post", side_effect=tests.mocked_usage_post) @@ -135,9 +132,7 @@ def test_new_heartbeat_invalid(self, mock_post): file_path = usage.init_cache(config=self.config) cached_data = usage.load_cache(file_path=file_path) fresh = usage.install(cached_data=cached_data) - actual = usage.post_new_heartbeat( - data={"installationId": fresh["id"], "data": None}, endpoint="invalid" - ) + actual = usage.post_new_heartbeat(data={"installationId": fresh["id"], "data": None}, endpoint="invalid") self.assertFalse(actual) @patch("requests.post", side_effect=tests.mocked_usage_post) @@ -147,9 +142,7 @@ def test_new_heartbeat_post_exception(self, mock_post): cached_data = usage.load_cache(file_path=file_path) fresh = usage.install(cached_data=cached_data) mock_post.side_effect = Exception("Error occurred.") - actual = usage.post_new_heartbeat( - data={"installationId": fresh["id"], "data": None} - ) + actual = usage.post_new_heartbeat(data={"installationId": fresh["id"], "data": None}) self.assertFalse(actual) @patch("requests.post", side_effect=tests.mocked_usage_post)