From 3c376662e836177d8d841f2f92514a1040486456 Mon Sep 17 00:00:00 2001 From: Arrrrr Date: Wed, 11 Dec 2024 16:00:51 -0800 Subject: [PATCH] first commit --- .coveragerc | 18 + .dockerignore | 46 + .env.example | 355 ++ .github/FUNDING.yml | 3 + .github/ISSUE_TEMPLAYE/---bug-report.yml | 39 + .github/ISSUE_TEMPLAYE/---feature-request.yml | 53 + .github/ISSUE_TEMPLAYE/config.yml | 11 + .github/dependabot.yml | 30 + .github/pull_request_template.md | 9 + .../workflows/backend-battery.yml.disabled | 52 + .github/workflows/conventional-commits.yml | 16 + .github/workflows/docker-build-dev.yml | 68 + .github/workflows/docker-build.yml | 69 + .github/workflows/notify-discord.yml | 23 + .github/workflows/release-please.yaml | 21 + .gitignore | 64 + CHANGELOG.md | 590 +++ CODE_OF_CONDUCT.md | 128 + CONTRIBUTING.md | 19 + Dockerfile | 70 + Dockerfile.slim | 54 + LICENSE.md | 674 ++++ README.md | 322 ++ assets/riven-dark.png | Bin 0 -> 3831 bytes assets/riven-light.png | Bin 0 -> 7116 bytes dev/attach-memray.sh | 5 + docker-compose-dev.yml | 20 + docker-compose-full.yml | 144 + docker-compose.yml | 60 + entrypoint.sh | 91 + poetry.lock | 3329 +++++++++++++++++ pyproject.toml | 120 + src/.gitignore | 203 + src/__init__.py | 0 src/alembic.ini | 12 + src/alembic/env.py | 124 + src/alembic/script.py.mako | 26 + ...41105_1300_c99709e3648f_baseline_schema.py | 179 + src/auth.py | 23 + src/main.py | 113 + src/program/__init__.py | 4 + src/program/apis/__init__.py | 45 + src/program/apis/listrr_api.py | 74 + src/program/apis/mdblist_api.py | 50 + src/program/apis/overseerr_api.py | 186 + src/program/apis/plex_api.py | 135 + src/program/apis/trakt_api.py | 366 ++ src/program/db/__init__.py | 0 src/program/db/db.py | 66 + src/program/db/db_functions.py | 506 +++ src/program/managers/event_manager.py | 327 ++ src/program/managers/sse_manager.py | 27 + src/program/media/__init__.py | 2 + src/program/media/item.py | 733 ++++ src/program/media/state.py | 15 + src/program/media/stream.py | 70 + src/program/media/subtitle.py | 46 + src/program/program.py | 478 +++ src/program/services/content/__init__.py | 32 + src/program/services/content/listrr.py | 69 + src/program/services/content/mdblist.py | 71 + src/program/services/content/overseerr.py | 61 + .../services/content/plex_watchlist.py | 71 + src/program/services/content/trakt.py | 177 + src/program/services/downloaders/__init__.py | 232 ++ src/program/services/downloaders/alldebrid.py | 246 ++ .../services/downloaders/realdebrid.py | 1510 ++++++++ src/program/services/downloaders/shared.py | 220 ++ src/program/services/downloaders/torbox.py | 334 ++ src/program/services/indexers/__init__.py | 1 + src/program/services/indexers/tmdb.py | 454 +++ src/program/services/indexers/trakt.py | 127 + src/program/services/libraries/__init__.py | 1 + src/program/services/libraries/symlink.py | 384 ++ .../services/post_processing/__init__.py | 52 + .../services/post_processing/subliminal.py | 152 + src/program/services/scrapers/__init__.py | 161 + src/program/services/scrapers/comet.py | 122 + src/program/services/scrapers/jackett.py | 279 ++ .../services/scrapers/knightcrawler.py | 107 + src/program/services/scrapers/mediafusion.py | 159 + src/program/services/scrapers/orionoid.py | 170 + src/program/services/scrapers/prowlarr.py | 290 ++ src/program/services/scrapers/shared.py | 169 + src/program/services/scrapers/torbox.py | 101 + src/program/services/scrapers/torrentio.py | 96 + src/program/services/scrapers/zilean.py | 94 + src/program/services/updaters/__init__.py | 51 + src/program/services/updaters/emby.py | 121 + src/program/services/updaters/jellyfin.py | 122 + src/program/services/updaters/plex.py | 118 + src/program/settings/__init__.py | 0 src/program/settings/manager.py | 88 + src/program/settings/migratable.py | 11 + src/program/settings/models.py | 377 ++ src/program/settings/versions.py | 36 + src/program/state_transition.py | 83 + src/program/symlink.py | 319 ++ src/program/types.py | 55 + src/program/utils/__init__.py | 38 + src/program/utils/cli.py | 67 + src/program/utils/logging.py | 137 + src/program/utils/notifications.py | 73 + src/program/utils/request.py | 357 ++ src/program/utils/useragents.py | 54 + src/pytest.ini | 6 + src/routers/__init__.py | 29 + src/routers/models/overseerr.py | 67 + src/routers/models/plex.py | 46 + src/routers/models/shared.py | 8 + src/routers/secure/__init__.py | 0 src/routers/secure/default.py | 276 ++ src/routers/secure/items.py | 435 +++ src/routers/secure/scrape.py | 415 ++ src/routers/secure/settings.py | 131 + src/routers/secure/stream.py | 38 + src/routers/secure/webhooks.py | 63 + src/routers/secure/ws.py | 14 + src/tests/test_alldebrid_downloader.py | 177 + src/tests/test_cache.sqlite | Bin 0 -> 24576 bytes src/tests/test_container.py | 142 + .../test_data/alldebrid_magnet_delete.json | 6 + .../test_data/alldebrid_magnet_instant.json | 16 + .../alldebrid_magnet_instant_unavailable.json | 12 + ...ldebrid_magnet_status_one_downloading.json | 25 + .../alldebrid_magnet_status_one_ready.json | 40 + .../alldebrid_magnet_upload_not_ready.json | 16 + .../alldebrid_magnet_upload_ready.json | 16 + src/tests/test_db_functions.py | 395 ++ src/tests/test_debrid_matching.py | 78 + src/tests/test_ranking.py | 26 + src/tests/test_rate_limiting.py | 297 ++ src/tests/test_requests.py | 186 + src/tests/test_settings_migration.py | 70 + src/tests/test_states_processing.py | 229 ++ src/tests/test_symlink_creation.py | 222 ++ src/tests/test_symlink_library.py | 100 + 137 files changed, 21943 insertions(+) create mode 100644 .coveragerc create mode 100644 .dockerignore create mode 100644 .env.example create mode 100644 .github/FUNDING.yml create mode 100644 .github/ISSUE_TEMPLAYE/---bug-report.yml create mode 100644 .github/ISSUE_TEMPLAYE/---feature-request.yml create mode 100644 .github/ISSUE_TEMPLAYE/config.yml create mode 100644 .github/dependabot.yml create mode 100644 .github/pull_request_template.md create mode 100644 .github/workflows/backend-battery.yml.disabled create mode 100644 .github/workflows/conventional-commits.yml create mode 100644 .github/workflows/docker-build-dev.yml create mode 100644 .github/workflows/docker-build.yml create mode 100644 .github/workflows/notify-discord.yml create mode 100644 .github/workflows/release-please.yaml create mode 100644 .gitignore create mode 100644 CHANGELOG.md create mode 100644 CODE_OF_CONDUCT.md create mode 100644 CONTRIBUTING.md create mode 100644 Dockerfile create mode 100644 Dockerfile.slim create mode 100644 LICENSE.md create mode 100644 README.md create mode 100644 assets/riven-dark.png create mode 100644 assets/riven-light.png create mode 100755 dev/attach-memray.sh create mode 100644 docker-compose-dev.yml create mode 100644 docker-compose-full.yml create mode 100644 docker-compose.yml create mode 100644 entrypoint.sh create mode 100644 poetry.lock create mode 100644 pyproject.toml create mode 100644 src/.gitignore create mode 100644 src/__init__.py create mode 100644 src/alembic.ini create mode 100644 src/alembic/env.py create mode 100644 src/alembic/script.py.mako create mode 100644 src/alembic/versions/20241105_1300_c99709e3648f_baseline_schema.py create mode 100644 src/auth.py create mode 100644 src/main.py create mode 100644 src/program/__init__.py create mode 100644 src/program/apis/__init__.py create mode 100644 src/program/apis/listrr_api.py create mode 100644 src/program/apis/mdblist_api.py create mode 100644 src/program/apis/overseerr_api.py create mode 100644 src/program/apis/plex_api.py create mode 100644 src/program/apis/trakt_api.py create mode 100644 src/program/db/__init__.py create mode 100644 src/program/db/db.py create mode 100644 src/program/db/db_functions.py create mode 100644 src/program/managers/event_manager.py create mode 100644 src/program/managers/sse_manager.py create mode 100644 src/program/media/__init__.py create mode 100644 src/program/media/item.py create mode 100644 src/program/media/state.py create mode 100644 src/program/media/stream.py create mode 100644 src/program/media/subtitle.py create mode 100644 src/program/program.py create mode 100644 src/program/services/content/__init__.py create mode 100644 src/program/services/content/listrr.py create mode 100644 src/program/services/content/mdblist.py create mode 100644 src/program/services/content/overseerr.py create mode 100644 src/program/services/content/plex_watchlist.py create mode 100644 src/program/services/content/trakt.py create mode 100644 src/program/services/downloaders/__init__.py create mode 100644 src/program/services/downloaders/alldebrid.py create mode 100644 src/program/services/downloaders/realdebrid.py create mode 100644 src/program/services/downloaders/shared.py create mode 100644 src/program/services/downloaders/torbox.py create mode 100644 src/program/services/indexers/__init__.py create mode 100644 src/program/services/indexers/tmdb.py create mode 100644 src/program/services/indexers/trakt.py create mode 100644 src/program/services/libraries/__init__.py create mode 100644 src/program/services/libraries/symlink.py create mode 100644 src/program/services/post_processing/__init__.py create mode 100644 src/program/services/post_processing/subliminal.py create mode 100644 src/program/services/scrapers/__init__.py create mode 100644 src/program/services/scrapers/comet.py create mode 100644 src/program/services/scrapers/jackett.py create mode 100644 src/program/services/scrapers/knightcrawler.py create mode 100644 src/program/services/scrapers/mediafusion.py create mode 100644 src/program/services/scrapers/orionoid.py create mode 100644 src/program/services/scrapers/prowlarr.py create mode 100644 src/program/services/scrapers/shared.py create mode 100644 src/program/services/scrapers/torbox.py create mode 100644 src/program/services/scrapers/torrentio.py create mode 100644 src/program/services/scrapers/zilean.py create mode 100644 src/program/services/updaters/__init__.py create mode 100644 src/program/services/updaters/emby.py create mode 100644 src/program/services/updaters/jellyfin.py create mode 100644 src/program/services/updaters/plex.py create mode 100644 src/program/settings/__init__.py create mode 100644 src/program/settings/manager.py create mode 100644 src/program/settings/migratable.py create mode 100644 src/program/settings/models.py create mode 100644 src/program/settings/versions.py create mode 100644 src/program/state_transition.py create mode 100644 src/program/symlink.py create mode 100644 src/program/types.py create mode 100644 src/program/utils/__init__.py create mode 100644 src/program/utils/cli.py create mode 100644 src/program/utils/logging.py create mode 100644 src/program/utils/notifications.py create mode 100644 src/program/utils/request.py create mode 100644 src/program/utils/useragents.py create mode 100644 src/pytest.ini create mode 100644 src/routers/__init__.py create mode 100644 src/routers/models/overseerr.py create mode 100644 src/routers/models/plex.py create mode 100644 src/routers/models/shared.py create mode 100644 src/routers/secure/__init__.py create mode 100644 src/routers/secure/default.py create mode 100644 src/routers/secure/items.py create mode 100644 src/routers/secure/scrape.py create mode 100644 src/routers/secure/settings.py create mode 100644 src/routers/secure/stream.py create mode 100644 src/routers/secure/webhooks.py create mode 100644 src/routers/secure/ws.py create mode 100644 src/tests/test_alldebrid_downloader.py create mode 100644 src/tests/test_cache.sqlite create mode 100644 src/tests/test_container.py create mode 100644 src/tests/test_data/alldebrid_magnet_delete.json create mode 100644 src/tests/test_data/alldebrid_magnet_instant.json create mode 100644 src/tests/test_data/alldebrid_magnet_instant_unavailable.json create mode 100644 src/tests/test_data/alldebrid_magnet_status_one_downloading.json create mode 100644 src/tests/test_data/alldebrid_magnet_status_one_ready.json create mode 100644 src/tests/test_data/alldebrid_magnet_upload_not_ready.json create mode 100644 src/tests/test_data/alldebrid_magnet_upload_ready.json create mode 100644 src/tests/test_db_functions.py create mode 100644 src/tests/test_debrid_matching.py create mode 100644 src/tests/test_ranking.py create mode 100644 src/tests/test_rate_limiting.py create mode 100644 src/tests/test_requests.py create mode 100644 src/tests/test_settings_migration.py create mode 100644 src/tests/test_states_processing.py create mode 100644 src/tests/test_symlink_creation.py create mode 100644 src/tests/test_symlink_library.py diff --git a/.coveragerc b/.coveragerc new file mode 100644 index 0000000..03e5f37 --- /dev/null +++ b/.coveragerc @@ -0,0 +1,18 @@ +[run] +branch = True +source = src + +[report] +omit = + */tests/* + */__init__.py + +# Show missing lines in report output +show_missing = False + +exclude_lines = + pragma: no cover + +[html] +# Directory where HTML reports will be saved +directory = htmlcov \ No newline at end of file diff --git a/.dockerignore b/.dockerignore new file mode 100644 index 0000000..9c83509 --- /dev/null +++ b/.dockerignore @@ -0,0 +1,46 @@ +# Extras That Shouldn't Be Here +.git/ +.gitignore +.dockerignore +docker-compose* +Dockerfile +makefile +htmlcov/ +coverage.xml +.coverage* +*.svg +frontend/node_modules/ +bin/ +*.bin + +.vscode/ +.ruff_cache/ +*.dat +profile.svg + +# Frontend +.DS_Store +/build +/.svelte-kit +/package +.example* + +# Backend +logs/ +settings.json +__pycache__ +*.log +data +test* + +# Jupyter Notebooks +.ipynb_checkpoints + +# Environments +.env* +.venv +env/ +venv/ +ENV/ +env.bak/ +venv.bak/ diff --git a/.env.example b/.env.example new file mode 100644 index 0000000..9f371e9 --- /dev/null +++ b/.env.example @@ -0,0 +1,355 @@ +# Example Environment Variables for Riven (backend only!) + +#------------------------------------- +# Non settings file variables +#------------------------------------- + +# This is used to force the use of the env variables all the time! +# By default, this is false, which only new setting files will be created +# using the env variables you specified. +RIVEN_FORCE_ENV=false + +# This will reset the database and recreate all tables, and then exit after running! +HARD_RESET=false + +# This will attempt to fix broken symlinks in the library, and then exit after running! +REPAIR_SYMLINKS=false + +# Manual api key, must be 32 characters long +API_KEY=1234567890qwertyuiopas + +# This is the number of workers to use for reindexing symlinks after a database reset. +# More workers = faster symlinking but uses more memory. +# For lower end machines, stick to around 1-3. +# For higher end machines, you can do as many as you want, or set it to the number of cores. +# If you are experiencing indexing issues after a database reset, try lowering this to 1. +SYMLINK_MAX_WORKERS=4 + +#------------------------------------- +# Riven Settings +#------------------------------------- + +RIVEN_VERSION=0.14.1 +RIVEN_DEBUG=true +RIVEN_LOG=true +RIVEN_FORCE_REFRESH=false +RIVEN_MAP_METADATA=true +RIVEN_TRACEMALLOC=false + +#------------------------------------- +# Symlink Settings +#------------------------------------- + +RIVEN_SYMLINK_RCLONE_PATH=. # The path to your rclone dir. If using Zurg, this should point to the `__all__` directory. +RIVEN_SYMLINK_LIBRARY_PATH=. # The path you want your symlinks to be placed in. +RIVEN_SYMLINK_SEPARATE_ANIME_DIRS=false # If you want to separate your anime symlinks into a separate directory. +RIVEN_SYMLINK_REPAIR_SYMLINKS=false # If you want to repair broken symlinks after a database reset. +RIVEN_SYMLINK_REPAIR_INTERVAL=6.0 # The interval at which to check for broken symlinks, in hours. + +#------------------------------------- +# Updaters +#------------------------------------- + +RIVEN_UPDATER_INTERVAL=120 +RIVEN_PLEX_ENABLED=false +RIVEN_PLEX_TOKEN= +RIVEN_PLEX_URL=http://localhost:32400 +RIVEN_JELLYFIN_ENABLED=false +RIVEN_JELLYFIN_API_KEY= +RIVEN_JELLYFIN_URL=http://localhost:8096 +RIVEN_EMBY_ENABLED=false +RIVEN_EMBY_API_KEY= +RIVEN_EMBY_URL=http://localhost:8096 + +#------------------------------------- +# Downloaders +#------------------------------------- + +RIVEN_DOWNLOADERS_VIDEO_EXTENSIONS=["mp4","mkv","avi"] +RIVEN_DOWNLOADERS_PREFER_SPEED_OVER_QUALITY=false +RIVEN_DOWNLOADERS_REAL_DEBRID_ENABLED=false +RIVEN_DOWNLOADERS_REAL_DEBRID_API_KEY= +RIVEN_DOWNLOADERS_REAL_DEBRID_PROXY_ENABLED=false +RIVEN_DOWNLOADERS_REAL_DEBRID_PROXY_URL= +RIVEN_DOWNLOADERS_ALL_DEBRID_ENABLED=false +RIVEN_DOWNLOADERS_ALL_DEBRID_API_KEY= +RIVEN_DOWNLOADERS_ALL_DEBRID_PROXY_ENABLED=false +RIVEN_DOWNLOADERS_ALL_DEBRID_PROXY_URL= +RIVEN_DOWNLOADERS_TORBOX_ENABLED=false +RIVEN_DOWNLOADERS_TORBOX_API_KEY= + +#------------------------------------- +# Content +#------------------------------------- + +RIVEN_CONTENT_OVERSEERR_ENABLED=false +RIVEN_CONTENT_OVERSEERR_API_KEY= +RIVEN_CONTENT_OVERSEERR_URL=http://localhost:5055 +RIVEN_CONTENT_OVERSEERR_USE_WEBHOOK=false +RIVEN_CONTENT_OVERSEERR_UPDATE_INTERVAL=60 +RIVEN_CONTENT_PLEX_WATCHLIST_ENABLED=false +RIVEN_CONTENT_PLEX_WATCHLIST_RSS= +RIVEN_CONTENT_PLEX_WATCHLIST_UPDATE_INTERVAL=60 +RIVEN_CONTENT_MDBLIST_ENABLED=false +RIVEN_CONTENT_MDBLIST_API_KEY= +RIVEN_CONTENT_MDBLIST_UPDATE_INTERVAL=300 +RIVEN_CONTENT_MDBLIST_LISTS=[] +RIVEN_CONTENT_LISTRR_ENABLED=false +RIVEN_CONTENT_LISTRR_API_KEY= +RIVEN_CONTENT_LISTRR_UPDATE_INTERVAL=300 +RIVEN_CONTENT_LISTRR_MOVIE_LISTS=[] +RIVEN_CONTENT_LISTRR_SHOW_LISTS=[] +RIVEN_CONTENT_TRAKT_ENABLED=false +RIVEN_CONTENT_TRAKT_API_KEY= +RIVEN_CONTENT_TRAKT_UPDATE_INTERVAL=300 + +#------------------------------------- +# Scrapers +#------------------------------------- + +# The interval to retry scraping an item after it's failed, in hours. +RIVEN_SCRAPING_AFTER_2=2.0 +RIVEN_SCRAPING_AFTER_5=6 +RIVEN_SCRAPING_AFTER_10=24 + +# This will enable debug mode for the scraper, which will log more information about the scraping process. +RIVEN_SCRAPING_PARSE_DEBUG=false + +# This will enable aliases for the scraper, which will allow for more accurate scraping. +# This can sometimes cause issues if there are alias titles that aren't exactly the same as the title in the library. +RIVEN_SCRAPING_ENABLE_ALIASES=true + +RIVEN_SCRAPING_TORRENTIO_ENABLED=false +RIVEN_SCRAPING_TORRENTIO_FILTER=sort=qualitysize%7Cqualityfilter=480p,scr,cam +RIVEN_SCRAPING_TORRENTIO_URL=http://torrentio.strem.fun +RIVEN_SCRAPING_TORRENTIO_TIMEOUT=30 +RIVEN_SCRAPING_TORRENTIO_RATELIMIT=true +RIVEN_SCRAPING_KNIGHTCRAWLER_ENABLED=false +RIVEN_SCRAPING_KNIGHTCRAWLER_FILTER=sort=qualitysize%7Cqualityfilter=480p,scr,cam +RIVEN_SCRAPING_KNIGHTCRAWLER_URL=https://knightcrawler.elfhosted.com +RIVEN_SCRAPING_KNIGHTCRAWLER_TIMEOUT=30 +RIVEN_SCRAPING_KNIGHTCRAWLER_RATELIMIT=true +RIVEN_SCRAPING_JACKETT_ENABLED=false +RIVEN_SCRAPING_JACKETT_URL=http://localhost:9117 +RIVEN_SCRAPING_JACKETT_API_KEY= +RIVEN_SCRAPING_JACKETT_TIMEOUT=30 +RIVEN_SCRAPING_JACKETT_RATELIMIT=true +RIVEN_SCRAPING_PROWLARR_ENABLED=false +RIVEN_SCRAPING_PROWLARR_URL=http://localhost:9696 +RIVEN_SCRAPING_PROWLARR_API_KEY= +RIVEN_SCRAPING_PROWLARR_TIMEOUT=30 +RIVEN_SCRAPING_PROWLARR_RATELIMIT=true +RIVEN_SCRAPING_PROWLARR_LIMITER_SECONDS=60 +RIVEN_SCRAPING_ORIONOID_ENABLED=false +RIVEN_SCRAPING_ORIONOID_API_KEY= +RIVEN_SCRAPING_ORIONOID_CACHED_RESULTS_ONLY=false +RIVEN_SCRAPING_ORIONOID_PARAMETERS_VIDEO3D=false # See the Orionoid API docs for more information on these parameters. +RIVEN_SCRAPING_ORIONOID_PARAMETERS_VIDEOQUALITY=sd_hd8k # See the Orionoid API docs for more information on these parameters. +RIVEN_SCRAPING_ORIONOID_PARAMETERS_LIMITCOUNT=5 # See the Orionoid API docs for more information on these parameters. +RIVEN_SCRAPING_ORIONOID_TIMEOUT=30 +RIVEN_SCRAPING_ORIONOID_RATELIMIT=true +RIVEN_SCRAPING_TORBOX_SCRAPER_ENABLED=false +RIVEN_SCRAPING_TORBOX_SCRAPER_TIMEOUT=30 +RIVEN_SCRAPING_MEDIAFUSION_ENABLED=false +RIVEN_SCRAPING_MEDIAFUSION_URL=https://mediafusion.elfhosted.com +RIVEN_SCRAPING_MEDIAFUSION_TIMEOUT=30 +RIVEN_SCRAPING_MEDIAFUSION_RATELIMIT=true +RIVEN_SCRAPING_MEDIAFUSION_CATALOGS=prowlarr_streams,torrentio_streams +RIVEN_SCRAPING_ZILEAN_ENABLED=false +RIVEN_SCRAPING_ZILEAN_URL=http://localhost:8181 +RIVEN_SCRAPING_ZILEAN_TIMEOUT=30 +RIVEN_SCRAPING_ZILEAN_RATELIMIT=true +RIVEN_SCRAPING_COMET_ENABLED=false +RIVEN_SCRAPING_COMET_URL=http://localhost:8000 +RIVEN_SCRAPING_COMET_INDEXERS=bitsearch,eztv,thepiratebay,therarbg,yts +RIVEN_SCRAPING_COMET_TIMEOUT=30 +RIVEN_SCRAPING_COMET_RATELIMIT=true + +#------------------------------------- +# Ranking +#------------------------------------- + +RIVEN_RANKING_PROFILE=default +RIVEN_RANKING_REQUIRE=[] +RIVEN_RANKING_EXCLUDE=[] +RIVEN_RANKING_PREFERRED=[] +RIVEN_RANKING_RESOLUTIONS_2160P=false +RIVEN_RANKING_RESOLUTIONS_1080P=true +RIVEN_RANKING_RESOLUTIONS_720P=true +RIVEN_RANKING_RESOLUTIONS_480P=false +RIVEN_RANKING_RESOLUTIONS_360P=false +RIVEN_RANKING_RESOLUTIONS_UNKNOWN=true +RIVEN_RANKING_OPTIONS_TITLE_SIMILARITY=0.85 +RIVEN_RANKING_OPTIONS_REMOVE_ALL_TRASH=true +RIVEN_RANKING_OPTIONS_REMOVE_RANKS_UNDER=-10000 +RIVEN_RANKING_OPTIONS_REMOVE_UNKNOWN_LANGUAGES=false +RIVEN_RANKING_OPTIONS_ALLOW_ENGLISH_IN_LANGUAGES=false +RIVEN_RANKING_LANGUAGES_REQUIRED=[] +RIVEN_RANKING_LANGUAGES_EXCLUDE=["common"] +RIVEN_RANKING_LANGUAGES_PREFERRED=[] +RIVEN_RANKING_CUSTOM_RANKS_QUALITY_AV1_FETCH=false +RIVEN_RANKING_CUSTOM_RANKS_QUALITY_AV1_USE_CUSTOM_RANK=false +RIVEN_RANKING_CUSTOM_RANKS_QUALITY_AV1_RANK=0 +RIVEN_RANKING_CUSTOM_RANKS_QUALITY_AVC_FETCH=true +RIVEN_RANKING_CUSTOM_RANKS_QUALITY_AVC_USE_CUSTOM_RANK=false +RIVEN_RANKING_CUSTOM_RANKS_QUALITY_AVC_RANK=0 +RIVEN_RANKING_CUSTOM_RANKS_QUALITY_BLURAY_FETCH=true +RIVEN_RANKING_CUSTOM_RANKS_QUALITY_BLURAY_USE_CUSTOM_RANK=false +RIVEN_RANKING_CUSTOM_RANKS_QUALITY_BLURAY_RANK=0 +RIVEN_RANKING_CUSTOM_RANKS_QUALITY_DVD_FETCH=false +RIVEN_RANKING_CUSTOM_RANKS_QUALITY_DVD_USE_CUSTOM_RANK=false +RIVEN_RANKING_CUSTOM_RANKS_QUALITY_DVD_RANK=0 +RIVEN_RANKING_CUSTOM_RANKS_QUALITY_HDTV_FETCH=true +RIVEN_RANKING_CUSTOM_RANKS_QUALITY_HDTV_USE_CUSTOM_RANK=false +RIVEN_RANKING_CUSTOM_RANKS_QUALITY_HDTV_RANK=0 +RIVEN_RANKING_CUSTOM_RANKS_QUALITY_HEVC_FETCH=true +RIVEN_RANKING_CUSTOM_RANKS_QUALITY_HEVC_USE_CUSTOM_RANK=false +RIVEN_RANKING_CUSTOM_RANKS_QUALITY_HEVC_RANK=0 +RIVEN_RANKING_CUSTOM_RANKS_QUALITY_MPEG_FETCH=false +RIVEN_RANKING_CUSTOM_RANKS_QUALITY_MPEG_USE_CUSTOM_RANK=false +RIVEN_RANKING_CUSTOM_RANKS_QUALITY_MPEG_RANK=0 +RIVEN_RANKING_CUSTOM_RANKS_QUALITY_REMUX_FETCH=false +RIVEN_RANKING_CUSTOM_RANKS_QUALITY_REMUX_USE_CUSTOM_RANK=false +RIVEN_RANKING_CUSTOM_RANKS_QUALITY_REMUX_RANK=0 +RIVEN_RANKING_CUSTOM_RANKS_QUALITY_VHS_FETCH=false +RIVEN_RANKING_CUSTOM_RANKS_QUALITY_VHS_USE_CUSTOM_RANK=false +RIVEN_RANKING_CUSTOM_RANKS_QUALITY_VHS_RANK=0 +RIVEN_RANKING_CUSTOM_RANKS_QUALITY_WEB_FETCH=true +RIVEN_RANKING_CUSTOM_RANKS_QUALITY_WEB_USE_CUSTOM_RANK=false +RIVEN_RANKING_CUSTOM_RANKS_QUALITY_WEB_RANK=0 +RIVEN_RANKING_CUSTOM_RANKS_QUALITY_WEBDL_FETCH=true +RIVEN_RANKING_CUSTOM_RANKS_QUALITY_WEBDL_USE_CUSTOM_RANK=false +RIVEN_RANKING_CUSTOM_RANKS_QUALITY_WEBDL_RANK=0 +RIVEN_RANKING_CUSTOM_RANKS_QUALITY_WEBMUX_FETCH=false +RIVEN_RANKING_CUSTOM_RANKS_QUALITY_WEBMUX_USE_CUSTOM_RANK=false +RIVEN_RANKING_CUSTOM_RANKS_QUALITY_WEBMUX_RANK=0 +RIVEN_RANKING_CUSTOM_RANKS_QUALITY_XVID_FETCH=false +RIVEN_RANKING_CUSTOM_RANKS_QUALITY_XVID_USE_CUSTOM_RANK=false +RIVEN_RANKING_CUSTOM_RANKS_QUALITY_XVID_RANK=0 +RIVEN_RANKING_CUSTOM_RANKS_RIPS_BDRIP_FETCH=false +RIVEN_RANKING_CUSTOM_RANKS_RIPS_BDRIP_USE_CUSTOM_RANK=false +RIVEN_RANKING_CUSTOM_RANKS_RIPS_BDRIP_RANK=0 +RIVEN_RANKING_CUSTOM_RANKS_RIPS_BRRIP_FETCH=false +RIVEN_RANKING_CUSTOM_RANKS_RIPS_BRRIP_USE_CUSTOM_RANK=false +RIVEN_RANKING_CUSTOM_RANKS_RIPS_BRRIP_RANK=0 +RIVEN_RANKING_CUSTOM_RANKS_RIPS_DVDRIP_FETCH=false +RIVEN_RANKING_CUSTOM_RANKS_RIPS_DVDRIP_USE_CUSTOM_RANK=false +RIVEN_RANKING_CUSTOM_RANKS_RIPS_DVDRIP_RANK=0 +RIVEN_RANKING_CUSTOM_RANKS_RIPS_HDRIP_FETCH=false +RIVEN_RANKING_CUSTOM_RANKS_RIPS_HDRIP_USE_CUSTOM_RANK=false +RIVEN_RANKING_CUSTOM_RANKS_RIPS_HDRIP_RANK=0 +RIVEN_RANKING_CUSTOM_RANKS_RIPS_PPVRIP_FETCH=false +RIVEN_RANKING_CUSTOM_RANKS_RIPS_PPVRIP_USE_CUSTOM_RANK=false +RIVEN_RANKING_CUSTOM_RANKS_RIPS_PPVRIP_RANK=0 +RIVEN_RANKING_CUSTOM_RANKS_RIPS_SATRIP_FETCH=false +RIVEN_RANKING_CUSTOM_RANKS_RIPS_SATRIP_USE_CUSTOM_RANK=false +RIVEN_RANKING_CUSTOM_RANKS_RIPS_SATRIP_RANK=0 +RIVEN_RANKING_CUSTOM_RANKS_RIPS_TVRIP_FETCH=false +RIVEN_RANKING_CUSTOM_RANKS_RIPS_TVRIP_USE_CUSTOM_RANK=false +RIVEN_RANKING_CUSTOM_RANKS_RIPS_TVRIP_RANK=0 +RIVEN_RANKING_CUSTOM_RANKS_RIPS_UHDRIP_FETCH=false +RIVEN_RANKING_CUSTOM_RANKS_RIPS_UHDRIP_USE_CUSTOM_RANK=false +RIVEN_RANKING_CUSTOM_RANKS_RIPS_UHDRIP_RANK=0 +RIVEN_RANKING_CUSTOM_RANKS_RIPS_VHSRIP_FETCH=false +RIVEN_RANKING_CUSTOM_RANKS_RIPS_VHSRIP_USE_CUSTOM_RANK=false +RIVEN_RANKING_CUSTOM_RANKS_RIPS_VHSRIP_RANK=0 +RIVEN_RANKING_CUSTOM_RANKS_RIPS_WEBDLRIP_FETCH=false +RIVEN_RANKING_CUSTOM_RANKS_RIPS_WEBDLRIP_USE_CUSTOM_RANK=false +RIVEN_RANKING_CUSTOM_RANKS_RIPS_WEBDLRIP_RANK=0 +RIVEN_RANKING_CUSTOM_RANKS_RIPS_WEBRIP_FETCH=true +RIVEN_RANKING_CUSTOM_RANKS_RIPS_WEBRIP_USE_CUSTOM_RANK=false +RIVEN_RANKING_CUSTOM_RANKS_RIPS_WEBRIP_RANK=0 +RIVEN_RANKING_CUSTOM_RANKS_HDR_10BIT_FETCH=true +RIVEN_RANKING_CUSTOM_RANKS_HDR_10BIT_USE_CUSTOM_RANK=false +RIVEN_RANKING_CUSTOM_RANKS_HDR_10BIT_RANK=0 +RIVEN_RANKING_CUSTOM_RANKS_HDR_DOLBY_VISION_FETCH=true +RIVEN_RANKING_CUSTOM_RANKS_HDR_DOLBY_VISION_USE_CUSTOM_RANK=false +RIVEN_RANKING_CUSTOM_RANKS_HDR_DOLBY_VISION_RANK=0 +RIVEN_RANKING_CUSTOM_RANKS_HDR_HDR_FETCH=true +RIVEN_RANKING_CUSTOM_RANKS_HDR_HDR_USE_CUSTOM_RANK=false +RIVEN_RANKING_CUSTOM_RANKS_HDR_HDR_RANK=0 +RIVEN_RANKING_CUSTOM_RANKS_HDR_HDR10PLUS_FETCH=true +RIVEN_RANKING_CUSTOM_RANKS_HDR_HDR10PLUS_USE_CUSTOM_RANK=false +RIVEN_RANKING_CUSTOM_RANKS_HDR_HDR10PLUS_RANK=0 +RIVEN_RANKING_CUSTOM_RANKS_HDR_SDR_FETCH=true +RIVEN_RANKING_CUSTOM_RANKS_HDR_SDR_USE_CUSTOM_RANK=false +RIVEN_RANKING_CUSTOM_RANKS_HDR_SDR_RANK=0 +RIVEN_RANKING_CUSTOM_RANKS_AUDIO_AAC_FETCH=true +RIVEN_RANKING_CUSTOM_RANKS_AUDIO_AAC_USE_CUSTOM_RANK=false +RIVEN_RANKING_CUSTOM_RANKS_AUDIO_AAC_RANK=0 +RIVEN_RANKING_CUSTOM_RANKS_AUDIO_AC3_FETCH=true +RIVEN_RANKING_CUSTOM_RANKS_AUDIO_AC3_USE_CUSTOM_RANK=false +RIVEN_RANKING_CUSTOM_RANKS_AUDIO_AC3_RANK=0 +RIVEN_RANKING_CUSTOM_RANKS_AUDIO_ATMOS_FETCH=true +RIVEN_RANKING_CUSTOM_RANKS_AUDIO_ATMOS_USE_CUSTOM_RANK=false +RIVEN_RANKING_CUSTOM_RANKS_AUDIO_ATMOS_RANK=0 +RIVEN_RANKING_CUSTOM_RANKS_AUDIO_DOLBY_DIGITAL_FETCH=true +RIVEN_RANKING_CUSTOM_RANKS_AUDIO_DOLBY_DIGITAL_USE_CUSTOM_RANK=false +RIVEN_RANKING_CUSTOM_RANKS_AUDIO_DOLBY_DIGITAL_RANK=0 +RIVEN_RANKING_CUSTOM_RANKS_AUDIO_DOLBY_DIGITAL_PLUS_FETCH=true +RIVEN_RANKING_CUSTOM_RANKS_AUDIO_DOLBY_DIGITAL_PLUS_USE_CUSTOM_RANK=false +RIVEN_RANKING_CUSTOM_RANKS_AUDIO_DOLBY_DIGITAL_PLUS_RANK=0 +RIVEN_RANKING_CUSTOM_RANKS_AUDIO_DTS_LOSSY_FETCH=true +RIVEN_RANKING_CUSTOM_RANKS_AUDIO_DTS_LOSSY_USE_CUSTOM_RANK=false +RIVEN_RANKING_CUSTOM_RANKS_AUDIO_DTS_LOSSY_RANK=0 +RIVEN_RANKING_CUSTOM_RANKS_AUDIO_DTS_LOSSLESS_FETCH=true +RIVEN_RANKING_CUSTOM_RANKS_AUDIO_DTS_LOSSLESS_USE_CUSTOM_RANK=false +RIVEN_RANKING_CUSTOM_RANKS_AUDIO_DTS_LOSSLESS_RANK=0 +RIVEN_RANKING_CUSTOM_RANKS_AUDIO_EAC3_FETCH=true +RIVEN_RANKING_CUSTOM_RANKS_AUDIO_EAC3_USE_CUSTOM_RANK=false +RIVEN_RANKING_CUSTOM_RANKS_AUDIO_EAC3_RANK=0 +RIVEN_RANKING_CUSTOM_RANKS_AUDIO_FLAC_FETCH=true +RIVEN_RANKING_CUSTOM_RANKS_AUDIO_FLAC_USE_CUSTOM_RANK=false +RIVEN_RANKING_CUSTOM_RANKS_AUDIO_FLAC_RANK=0 +RIVEN_RANKING_CUSTOM_RANKS_AUDIO_MONO_FETCH=false +RIVEN_RANKING_CUSTOM_RANKS_AUDIO_MONO_USE_CUSTOM_RANK=false +RIVEN_RANKING_CUSTOM_RANKS_AUDIO_MONO_RANK=0 +RIVEN_RANKING_CUSTOM_RANKS_AUDIO_MP3_FETCH=false +RIVEN_RANKING_CUSTOM_RANKS_AUDIO_MP3_USE_CUSTOM_RANK=false +RIVEN_RANKING_CUSTOM_RANKS_AUDIO_MP3_RANK=0 +RIVEN_RANKING_CUSTOM_RANKS_AUDIO_STEREO_FETCH=true +RIVEN_RANKING_CUSTOM_RANKS_AUDIO_STEREO_USE_CUSTOM_RANK=false +RIVEN_RANKING_CUSTOM_RANKS_AUDIO_STEREO_RANK=0 +RIVEN_RANKING_CUSTOM_RANKS_AUDIO_SURROUND_FETCH=true +RIVEN_RANKING_CUSTOM_RANKS_AUDIO_SURROUND_USE_CUSTOM_RANK=false +RIVEN_RANKING_CUSTOM_RANKS_AUDIO_SURROUND_RANK=0 +RIVEN_RANKING_CUSTOM_RANKS_AUDIO_TRUEHD_FETCH=true +RIVEN_RANKING_CUSTOM_RANKS_AUDIO_TRUEHD_USE_CUSTOM_RANK=false + +#------------------------------------- +# Logger personalisation +#------------------------------------- + +# If you don't like the colors or icons used in the logger, you can change them here. +# You can use either the color name, or a hex value. +# The format is RIVEN_LOGGER__FG= and RIVEN_LOGGER__ICON= +# You can find color names here: https://rich.readthedocs.io/en/stable/appendix/colors.html + +RIVEN_LOGGER_PROGRAM_FG= +RIVEN_LOGGER_PROGRAM_ICON= +RIVEN_LOGGER_DATABASE_FG= +RIVEN_LOGGER_DATABASE_ICON= +RIVEN_LOGGER_DEBRID_FG=FE6F47 +RIVEN_LOGGER_DEBRID_ICON=🔗 +RIVEN_LOGGER_SYMLINKER_FG=F9E79F +RIVEN_LOGGER_SYMLINKER_ICON=🔗 +RIVEN_LOGGER_SCRAPER_FG=D299EA +RIVEN_LOGGER_SCRAPER_ICON=👻 +RIVEN_LOGGER_COMPLETED_FG=FFFFFF +RIVEN_LOGGER_COMPLETED_ICON=🟢 +RIVEN_LOGGER_NOT_FOUND_FG=818589 +RIVEN_LOGGER_NOT_FOUND_ICON=🤷‍ +RIVEN_LOGGER_NEW_FG=e63946 +RIVEN_LOGGER_NEW_ICON=✨ +RIVEN_LOGGER_FILES_FG=FFFFE0 +RIVEN_LOGGER_FILES_ICON=🗃️ +RIVEN_LOGGER_ITEM_FG=92a1cf +RIVEN_LOGGER_ITEM_ICON=🗃️ +RIVEN_LOGGER_DISCOVERY_FG=e56c49 +RIVEN_LOGGER_DISCOVERY_ICON=🔍 +RIVEN_LOGGER_API_FG=006989 +RIVEN_LOGGER_API_ICON=👾 +RIVEN_LOGGER_PLEX_FG=DAD3BE +RIVEN_LOGGER_PLEX_ICON=📽️ +RIVEN_LOGGER_TRAKT_FG=1DB954 +RIVEN_LOGGER_TRAKT_ICON=🎵 \ No newline at end of file diff --git a/.github/FUNDING.yml b/.github/FUNDING.yml new file mode 100644 index 0000000..b278662 --- /dev/null +++ b/.github/FUNDING.yml @@ -0,0 +1,3 @@ +github: [dreulavelle] +ko_fi: spoked # Replace with a single Ko-fi username +# patreon: # Replace with a single Patreon username diff --git a/.github/ISSUE_TEMPLAYE/---bug-report.yml b/.github/ISSUE_TEMPLAYE/---bug-report.yml new file mode 100644 index 0000000..33a4e6f --- /dev/null +++ b/.github/ISSUE_TEMPLAYE/---bug-report.yml @@ -0,0 +1,39 @@ +name: "\U0001F41E Bug Report" +description: "Riven not working the way it is documented?" +title: "[Bug]: " +labels: ["kind/bug", "status/triage"] +assignees: + - dreulavelle + +body: + - type: markdown + attributes: + value: | + Thank you for taking the time to file a complete bug report. + + - type: textarea + attributes: + label: Description + description: | + Please describe what happened, with as much pertinent information as you can. Feel free to use markdown syntax. + + Also, ensure that the issue is not already fixed in the latest release. + validations: + required: true + + - type: textarea + attributes: + label: Workarounds + description: | + Is there a mitigation or workaround that allows users to avoid the issue today? + validations: + required: true + + - type: textarea + attributes: + label: Attach Error Logs + description: | + Please attach logs or error messages that can help in debugging the issue. + render: 'bash session' + validations: + required: true \ No newline at end of file diff --git a/.github/ISSUE_TEMPLAYE/---feature-request.yml b/.github/ISSUE_TEMPLAYE/---feature-request.yml new file mode 100644 index 0000000..f7b52de --- /dev/null +++ b/.github/ISSUE_TEMPLAYE/---feature-request.yml @@ -0,0 +1,53 @@ +name: "\U0001F381 Feature Request" +description: "Did you find bugs, errors, or anything that isn't straightforward in the documentation?" +title: "[Feature]: " +labels: ["kind/feature", "status/triage"] +assignees: + - dreulavelle + +body: + - type: markdown + attributes: + value: | + Thank you for taking the time to file a complete bug report. + + Before submitting your issue, please search [issues](https://github.com/rivenmedia/riven/issues) to ensure this is not a duplicate. + + If the issue is trivial, why not submit a pull request instead? + + - type: dropdown + attributes: + label: Issue Kind + description: | + What best describes this issue? + options: + - "New Feature Request" + - "Change in current behaviour" + - "Other" + validations: + required: true + + - type: textarea + attributes: + label: Description + description: | + Please describe the issue, with as much pertinent information as you can. Feel free to use markdown syntax. + + validations: + required: true + + - type: textarea + attributes: + label: Impact + description: | + Please describe the motivation for this issue. Describe, as best you can, how this improves or impacts the users of Riven and why this is important. + validations: + required: true + + - type: textarea + attributes: + label: Workarounds + description: | + Is there a mitigation, workaround, or another application that allows users to achieve the same functionality today? + validations: + required: true \ No newline at end of file diff --git a/.github/ISSUE_TEMPLAYE/config.yml b/.github/ISSUE_TEMPLAYE/config.yml new file mode 100644 index 0000000..b94df9f --- /dev/null +++ b/.github/ISSUE_TEMPLAYE/config.yml @@ -0,0 +1,11 @@ +# Ref: https://help.github.com/en/github/building-a-strong-community/configuring-issue-templates-for-your-repository#configuring-the-template-chooser +blank_issues_enabled: false +contact_links: +- name: '💬 Discussions' + url: https://github.com/dreulavelle/rank-torrent-name/discussions + about: | + Ask questions about using Riven, features and roadmap, or get support and feedback! +- name: '💬 Discord Server' + url: https://discord.gg/38SFhtN8ph + about: | + Chat with the community and Riven maintainers about both the usage of and development of the project. \ No newline at end of file diff --git a/.github/dependabot.yml b/.github/dependabot.yml new file mode 100644 index 0000000..dd955a4 --- /dev/null +++ b/.github/dependabot.yml @@ -0,0 +1,30 @@ +# https://docs.github.com/github/administering-a-repository/configuration-options-for-dependency-updates + +version: 2 + +updates: + + # Frontend (points to frontend/package.json) + - package-ecosystem: 'npm' + directory: '/frontend' + schedule: + interval: 'weekly' + ignore: + - dependency-name: '*' + update-types: ["version-update:semver-minor"] + commit-message: + prefix: 'chore' + include: 'scope' + assignees: + - 'AyushSehrawat' + + # Backend (points to pyproject.toml in root directory) + - package-ecosystem: 'pip' + directory: '/' + schedule: + interval: 'weekly' + commit-message: + prefix: 'chore' + include: 'scope' + assignees: + - 'dreulavelle' diff --git a/.github/pull_request_template.md b/.github/pull_request_template.md new file mode 100644 index 0000000..54f214c --- /dev/null +++ b/.github/pull_request_template.md @@ -0,0 +1,9 @@ +# Pull Request Check List + +Resolves: #issue-number-here + +- [ ] Added **tests** for changed code. +- [ ] Updated **documentation** for changed code. + +## Description: + diff --git a/.github/workflows/backend-battery.yml.disabled b/.github/workflows/backend-battery.yml.disabled new file mode 100644 index 0000000..4f3c31f --- /dev/null +++ b/.github/workflows/backend-battery.yml.disabled @@ -0,0 +1,52 @@ +name: Linting and Testing + +on: + pull_request: + branches: [ main ] + +jobs: + battery: + runs-on: ubuntu-latest + strategy: + matrix: + python-version: [3.11] + + steps: + - uses: actions/checkout@v4.1.2 + + - name: Set up Python ${{ matrix.python-version }} + uses: actions/setup-python@v5.1.0 + with: + python-version: ${{ matrix.python-version }} + + - name: Cache Poetry dependencies + uses: actions/cache@v4.0.2 + with: + path: | + ~/.cache/pypoetry + .venv + key: ${{ runner.os }}-poetry-${{ hashFiles('**/poetry.lock') }} + restore-keys: | + ${{ runner.os }}-poetry- + + - name: Install dependencies + run: | + pip install poetry + poetry install --no-root --with dev + + - name: Ruff & Isort Check + run: | + poetry run ruff check ./src + poetry run isort --check-only ./src + + - name: Type check + run: poetry run pyright + + - name: Run Tests & Coverage + run: poetry run pytest --cov=./src --cov-report=xml + + - name: Upload Coverage Report to Codecov + uses: codecov/codecov-action@v4.1.1 + with: + token: ${{ secrets.CODECOV_TOKEN }} + file: ./coverage.xml diff --git a/.github/workflows/conventional-commits.yml b/.github/workflows/conventional-commits.yml new file mode 100644 index 0000000..5490fe6 --- /dev/null +++ b/.github/workflows/conventional-commits.yml @@ -0,0 +1,16 @@ +name: Conventional Commits + +on: + pull_request: + branches: [ main ] + +jobs: + build: + name: Conventional Commits + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v3 + - uses: webiny/action-conventional-commits@v1.3.0 + #with: + #GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} # Optional, for private repositories. + #allowed-commit-types: "feat,fix," # Optional, set if you want a subset of commit types to be allowed. diff --git a/.github/workflows/docker-build-dev.yml b/.github/workflows/docker-build-dev.yml new file mode 100644 index 0000000..61edc14 --- /dev/null +++ b/.github/workflows/docker-build-dev.yml @@ -0,0 +1,68 @@ +name: Docker Build and Push Dev + +on: + push: + branches: + - main + workflow_dispatch: + +jobs: + build-and-push-dev: + runs-on: ubuntu-latest + permissions: + contents: write + packages: write + security-events: write + + steps: + - name: Checkout code + uses: actions/checkout@v4.1.2 + + - name: Docker Setup QEMU + uses: docker/setup-qemu-action@v3 + id: qemu + with: + platforms: amd64,arm64 + + - name: Log into ghcr.io registry + uses: docker/login-action@v3.1.0 + with: + registry: ghcr.io + username: ${{ github.repository_owner }} + password: ${{ secrets.GITHUB_TOKEN }} + + - name: Set up Docker Buildx + uses: docker/setup-buildx-action@v3.2.0 + + - name: Log in to Docker Hub + uses: docker/login-action@v3.1.0 + with: + username: ${{ secrets.DOCKER_HUB_USERNAME }} + password: ${{ secrets.DOCKER_HUB_ACCESS_TOKEN }} + + - name: Build Docker Metadata + id: docker-metadata + uses: docker/metadata-action@v5 + with: + images: | + ghcr.io/rivenmedia/riven + docker.io/spoked/riven + flavor: | + latest=auto + tags: | + type=ref,event=branch + type=sha,commit=${{ github.sha }} + type=raw,value=dev,enable={{is_default_branch}} + + - name: Push Dev Image to repo + uses: docker/build-push-action@v5 + with: + context: . + file: ./Dockerfile + push: true + provenance: mode=max + tags: ${{ steps.docker-metadata.outputs.tags }} + labels: ${{ steps.docker-metadata.outputs.labels }} + platforms: linux/amd64,linux/arm64 + cache-from: type=gha,scope=${{ github.workflow }} + cache-to: type=gha,mode=max,scope=${{ github.workflow }} diff --git a/.github/workflows/docker-build.yml b/.github/workflows/docker-build.yml new file mode 100644 index 0000000..293e5c3 --- /dev/null +++ b/.github/workflows/docker-build.yml @@ -0,0 +1,69 @@ +name: Docker Build and Push + +on: + push: + tags: + - 'v[0-9]+.[0-9]+.[0-9]+' + workflow_dispatch: + +jobs: + build-and-push: + runs-on: ubuntu-latest + permissions: + contents: write + packages: write + security-events: write + + steps: + - name: Checkout code + uses: actions/checkout@v4.1.2 + + - name: Docker Setup QEMU + uses: docker/setup-qemu-action@v3 + id: qemu + with: + platforms: amd64,arm64 + + - name: Log into ghcr.io registry + uses: docker/login-action@v3.1.0 + with: + registry: ghcr.io + username: ${{ github.repository_owner }} + password: ${{ secrets.GITHUB_TOKEN }} + + - name: Set up Docker Buildx + uses: docker/setup-buildx-action@v3.2.0 + + - name: Log in to Docker Hub + uses: docker/login-action@v3.1.0 + with: + username: ${{ secrets.DOCKER_HUB_USERNAME }} + password: ${{ secrets.DOCKER_HUB_ACCESS_TOKEN }} + + - name: Build Docker Metadata + id: docker-metadata + uses: docker/metadata-action@v5 + with: + images: | + ghcr.io/rivenmedia/riven + docker.io/spoked/riven + flavor: | + latest=auto + tags: | + type=ref,event=tag + type=sha,commit=${{ github.sha }} + type=semver,pattern={{version}} + type=raw,value=latest,enable={{is_default_branch}} + + - name: Push Service Image to repo + uses: docker/build-push-action@v5 + with: + context: . + file: ./Dockerfile + push: true + provenance: mode=max + tags: ${{ steps.docker-metadata.outputs.tags }} + labels: ${{ steps.docker-metadata.outputs.labels }} + platforms: linux/amd64,linux/arm64 + cache-from: type=gha,scope=${{ github.workflow }} + cache-to: type=gha,mode=max,scope=${{ github.workflow }} diff --git a/.github/workflows/notify-discord.yml b/.github/workflows/notify-discord.yml new file mode 100644 index 0000000..77fe8b0 --- /dev/null +++ b/.github/workflows/notify-discord.yml @@ -0,0 +1,23 @@ +name: "Notify Discord of Riven Release" + +on: + release: + types: [published] + +jobs: + github-releases-to-discord: + runs-on: ubuntu-latest + steps: + - name: Checkout + uses: actions/checkout@v4 + - name: Github Releases To Discord + uses: SethCohen/github-releases-to-discord@v1.13.1 + with: + webhook_url: ${{ secrets.DISCORD_WEBHOOK }} + color: "5378091" + username: "Riven Release Changelog" + avatar_url: "https://raw.githubusercontent.com/rivenmedia/riven/main/assets/riven-light.png" + # content: "||@here||" + footer_title: "Riven (Backend) Changelog" + footer_icon_url: "https://raw.githubusercontent.com/rivenmedia/riven/main/assets/riven-light.png" + footer_timestamp: true diff --git a/.github/workflows/release-please.yaml b/.github/workflows/release-please.yaml new file mode 100644 index 0000000..a3e6e07 --- /dev/null +++ b/.github/workflows/release-please.yaml @@ -0,0 +1,21 @@ +name: "Release Please and Notify Discord" + +on: + push: + branches: + - main + workflow_dispatch: + +permissions: + contents: write + pull-requests: write + +jobs: + release-please: + runs-on: ubuntu-latest + steps: + - uses: googleapis/release-please-action@v4 + id: release + with: + token: ${{ secrets.RELEASE_PLEASE_TOKEN }} + release-type: python diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..47a8889 --- /dev/null +++ b/.gitignore @@ -0,0 +1,64 @@ +data/ +logs/ +settings.json +ignore.txt +.vscode +.git +makefile +.ruff_cache/ +*.dat +profile.svg +*.gz +*.zip +*.lockb +*.pkl +*.bak +bin/ +*.bin +.secrets* +event.json +*.patch + +# Python bytecode / Byte-compiled / optimized / DLL files +__pycache__/ +__pypackages__/ +*.pyc +*.pyo +*.pyd +/.venv/ +*.py[cod] +*$py.class +.ruff_cache/ + +# Local Poetry artifact cache +/.cache/pypoetry/ + +# Unit test / coverage reports +htmlcov/ +.tox/ +.nox/ +.coverage +.coverage.* +.cache +nosetests.xml +coverage.xml +*.cover +*.py,cover +.hypothesis/ +.pytest_cache/ +cover/ + +# Environments +*.env +*.venv +env/ +venv/ +ENV/ +env.bak/ +venv.bak/ + +# Rider IDE +**/.idea/ + +# MacOs +**/.DS_Store \ No newline at end of file diff --git a/CHANGELOG.md b/CHANGELOG.md new file mode 100644 index 0000000..d7ba915 --- /dev/null +++ b/CHANGELOG.md @@ -0,0 +1,590 @@ +# Changelog + +## [0.20.0](https://github.com/rivenmedia/riven/compare/v0.19.0...v0.20.0) (2024-11-20) + + +### Features + +* add denied reasoning when trashing torrents and added adult parsing ([#888](https://github.com/rivenmedia/riven/issues/888)) ([d3b5293](https://github.com/rivenmedia/riven/commit/d3b5293dfdb07c7466ff77f7dba16754fbfa7d79)) + +## [0.19.0](https://github.com/rivenmedia/riven/compare/v0.18.0...v0.19.0) (2024-11-14) + + +### Features + +* add reindexing of movie/shows in unreleased or ongoing state ([139d936](https://github.com/rivenmedia/riven/commit/139d936442de4d5a37e32fb482beb2e65557464c)) +* added upload logs endpoint to be used by frontend ([3ad6cae](https://github.com/rivenmedia/riven/commit/3ad6caeb6b0299cf60314ca2f87a76e30eba57be)) +* implement filesize validation for movies and episodes ([#869](https://github.com/rivenmedia/riven/issues/869)) ([d1041db](https://github.com/rivenmedia/riven/commit/d1041db78c295873f8f5cf572d9f296704c85506)) + + +### Bug Fixes + +* added cleaner directory log when rebuilding symlinks ([bb85517](https://github.com/rivenmedia/riven/commit/bb85517197bf10e855c1cfaa41e0d765dfd298e1)) +* chunk initial symlinks on re-ingest ([#882](https://github.com/rivenmedia/riven/issues/882)) ([21cd393](https://github.com/rivenmedia/riven/commit/21cd393913253678f4f580330aa4e28e114fc16f)) +* correct Prowlarr capabilities ([#879](https://github.com/rivenmedia/riven/issues/879)) ([f2636e4](https://github.com/rivenmedia/riven/commit/f2636e408f66a730915cfb2f49f56e38b1faf8c9)) +* detecting multiple episodes in symlink library ([#862](https://github.com/rivenmedia/riven/issues/862)) ([ebd11fd](https://github.com/rivenmedia/riven/commit/ebd11fd7d94a7763f0869bde6ed9b545d499e14e)) +* disable reindexing. wip. change get items endpoint to use id instead of imdbid. ([5123567](https://github.com/rivenmedia/riven/commit/5123567d4fe9ce8ef65d4fc09fa130d19a714ef7)) +* more tweaks for scrapers and fine tuning. ([b25658d](https://github.com/rivenmedia/riven/commit/b25658d21a43d2e0a097abf608c7a96216ed90ec)) +* re-check ongoing/unreleased items ([#880](https://github.com/rivenmedia/riven/issues/880)) ([47f23fa](https://github.com/rivenmedia/riven/commit/47f23fa0d78c41473445140801f5c6a6a6e076aa)) +* skip unindexable items when resetting db ([98cb2c1](https://github.com/rivenmedia/riven/commit/98cb2c12acc40fd2f2c12f79af247f89aa5638fa)) +* update state filtering logic to allow 'All' as a valid state ([#870](https://github.com/rivenmedia/riven/issues/870)) ([4430d2d](https://github.com/rivenmedia/riven/commit/4430d2daf682f26b9141a3130fa869524840a2d9)) +* updated mediafusion and tweaked scrape func to be cleaner ([73c0bcc](https://github.com/rivenmedia/riven/commit/73c0bcc91eb99c4825764775e986057951c713ae)) +* updated torbox scraper to use api key. refactored scrapers slightly. added more logging to scrapers. ([afdb9f6](https://github.com/rivenmedia/riven/commit/afdb9f6f202690dae9b04e7d2c8ce5e078b94d0c)) + +## [0.18.0](https://github.com/rivenmedia/riven/compare/v0.17.0...v0.18.0) (2024-11-06) + + +### Features + +* add retry policy and connection pool configuration to request utils ([#864](https://github.com/rivenmedia/riven/issues/864)) ([1713a51](https://github.com/rivenmedia/riven/commit/1713a5169805cabcc828b3f82204c05f796a9aa6)) + + +### Bug Fixes + +* add HTTP adapter configuration for Jackett and Prowlarr scrapers to manage connection pool size ([0c8057a](https://github.com/rivenmedia/riven/commit/0c8057aef45fcccd2c855a8413729b39020439db)) +* add HTTP adapter configuration for Jackett and Prowlarr scrapers… ([#865](https://github.com/rivenmedia/riven/issues/865)) ([0c8057a](https://github.com/rivenmedia/riven/commit/0c8057aef45fcccd2c855a8413729b39020439db)) +* fixed log for downloaded message ([656506f](https://github.com/rivenmedia/riven/commit/656506ffba7ed34256291a31eb882dee3b5f4de6)) +* remove orionoid sub check ([d2cb0d9](https://github.com/rivenmedia/riven/commit/d2cb0d9baa4be3421e5c56cafdbb6d5c024ca675)) +* removed unused functions relating to resolving duplicates ([5aec8fb](https://github.com/rivenmedia/riven/commit/5aec8fb036b9b549477304f46b6ff0548a72d7f7)) +* wrong headers attr and added orionoid sub check ([91d3f7d](https://github.com/rivenmedia/riven/commit/91d3f7d87c56a2cb4cb6898b57c480d1b4df94e9)) + +## [0.17.0](https://github.com/rivenmedia/riven/compare/v0.16.2...v0.17.0) (2024-11-05) + + +### Features + +* add manual torrent adding ([#785](https://github.com/rivenmedia/riven/issues/785)) ([acb22ce](https://github.com/rivenmedia/riven/commit/acb22ce9bb54a09a542e1a587181eb731700243e)) +* Add Most Wanted items from Trakt ([#777](https://github.com/rivenmedia/riven/issues/777)) ([325df42](https://github.com/rivenmedia/riven/commit/325df42989e8d6d841ab625284c54d78b9dc02d1)) +* add rate limiting tests and update dependencies ([#857](https://github.com/rivenmedia/riven/issues/857)) ([27c8534](https://github.com/rivenmedia/riven/commit/27c8534f3084404f80e6bf8fc01b1be0b9d98ad8)) +* auth bearer authentication ([0de32fd](https://github.com/rivenmedia/riven/commit/0de32fd9e7255c8c91aae4cecb428cabe180aea9)) +* database migrations, so long db resets ([#858](https://github.com/rivenmedia/riven/issues/858)) ([14e818f](https://github.com/rivenmedia/riven/commit/14e818f1b84870ce7cd0af62319685a62cc32c1a)) +* enhance session management and event processing ([#842](https://github.com/rivenmedia/riven/issues/842)) ([13aa94e](https://github.com/rivenmedia/riven/commit/13aa94e5587661770d385d634fa1a3cef9b0d882)) +* filesize filter ([d2f8374](https://github.com/rivenmedia/riven/commit/d2f8374ae95fc763842750a67d1d9b9f3c545a8d)) +* integrate dependency injection with kink library ([#859](https://github.com/rivenmedia/riven/issues/859)) ([ed5fb2c](https://github.com/rivenmedia/riven/commit/ed5fb2cb1a33ad00fa332c11bbbcd67017fe9695)) +* requests second pass ([#848](https://github.com/rivenmedia/riven/issues/848)) ([d41c2ff](https://github.com/rivenmedia/riven/commit/d41c2ff33cc1e88325da6c8f9e10c24199eeb291)) +* stream management endpoints ([d75149e](https://github.com/rivenmedia/riven/commit/d75149eb5b246bf7312ddb3d3fac85417e2cb215)) +* we now server sse via /stream ([efbc471](https://github.com/rivenmedia/riven/commit/efbc471e4f4429c098df2a601b3f3c42b98afbb7)) + + +### Bug Fixes + +* add default value for API_KEY ([bc6ff28](https://github.com/rivenmedia/riven/commit/bc6ff28ff5b1d1632f2dd2ca64743c4012ccc396)) +* add python-dotenv to load .env variables ([65a4aec](https://github.com/rivenmedia/riven/commit/65a4aec275a1f7768a77ef0227d6fb402f9a8612)) +* correct type hint for incomplete_retries in StatsResponse ([#839](https://github.com/rivenmedia/riven/issues/839)) ([f91ffec](https://github.com/rivenmedia/riven/commit/f91ffece2a70af71967903847068642e58a4f51c)) +* duplicate item after scraping for media that isn't in the database already ([#834](https://github.com/rivenmedia/riven/issues/834)) ([4d7ac8d](https://github.com/rivenmedia/riven/commit/4d7ac8d62a22bf2453ed6e433f43f8ebdb969e5f)) +* ensure selected files are stored in session during manual selection ([#841](https://github.com/rivenmedia/riven/issues/841)) ([86e6fd0](https://github.com/rivenmedia/riven/commit/86e6fd0f1ddd5f89800d96569288a85238ba8c80)) +* files sometimes not found in mount ([02b7a81](https://github.com/rivenmedia/riven/commit/02b7a81f4b6f93d06e59f06791e99e1860e3ebe9)) +* future cancellation resulted in reset, retry endpoints fialing ([#817](https://github.com/rivenmedia/riven/issues/817)) ([19cedc8](https://github.com/rivenmedia/riven/commit/19cedc843382acb837c9cd23ddec522d342ed9f5)) +* handle removal of nested media items in remove_item function ([#840](https://github.com/rivenmedia/riven/issues/840)) ([2096a4e](https://github.com/rivenmedia/riven/commit/2096a4e85bd613136d9dfe353cdbd7ed0d765e3f)) +* hotfix blacklist active stream ([8631008](https://github.com/rivenmedia/riven/commit/86310082d77de6550d5277ffc21c7f0a28167502)) +* invalid rd instant availibility call if no infohashes should be checked ([#843](https://github.com/rivenmedia/riven/issues/843)) ([19cf38f](https://github.com/rivenmedia/riven/commit/19cf38fe0d8fefe1de341654401d0e8801b27bb1)) +* jackett again - my bad ([#860](https://github.com/rivenmedia/riven/issues/860)) ([703ad33](https://github.com/rivenmedia/riven/commit/703ad334c06671ecf3336beaf328e8a738bf0d87)) +* MediaFusion scraper. ([#850](https://github.com/rivenmedia/riven/issues/850)) ([0bbde7d](https://github.com/rivenmedia/riven/commit/0bbde7d3c0e817321b7603f4e5acc1ae80ca9f58)) +* mediafusion sometimes throwing error when parsing response ([#844](https://github.com/rivenmedia/riven/issues/844)) ([9c093ac](https://github.com/rivenmedia/riven/commit/9c093ac817ba541aecc552c3e1a6170cf767d58d)) +* misleading message when manually adding a torrent ([#822](https://github.com/rivenmedia/riven/issues/822)) ([18cfa3b](https://github.com/rivenmedia/riven/commit/18cfa3b441dba2dc1040157b39b228db35693118)) +* overseerr outputting items without imdbid's ([45528a9](https://github.com/rivenmedia/riven/commit/45528a9ee6701190dcc7c5358b2ea22365afcd60)) +* remove accidental cache enablement ([877ffec](https://github.com/rivenmedia/riven/commit/877ffec4c9cbcd54906f9bb86a45467c2c3974c7)) +* retry api now resets scraped_at ([#816](https://github.com/rivenmedia/riven/issues/816)) ([2676fe8](https://github.com/rivenmedia/riven/commit/2676fe801fe2522b8558daaa0fbbd899c0df5dbe)) + +## [0.16.2](https://github.com/rivenmedia/riven/compare/v0.16.1...v0.16.2) (2024-10-20) + + +### Bug Fixes + +* fixed replace torrents ([8db6541](https://github.com/rivenmedia/riven/commit/8db6541f5820f52ebb8550b81010e28bf9be589a)) + +## [0.16.1](https://github.com/rivenmedia/riven/compare/v0.16.0...v0.16.1) (2024-10-19) + + +### Bug Fixes + +* check item instance before add from content services ([7aa48ed](https://github.com/rivenmedia/riven/commit/7aa48ede46dc553beb424d2c9d765a293e6cc7d2)) +* listrr outputting imdbids instead of items. solves [#802](https://github.com/rivenmedia/riven/issues/802) ([502e52b](https://github.com/rivenmedia/riven/commit/502e52b5ecff8ac869de28654963fdfad3a2aa13)) + +## [0.16.0](https://github.com/rivenmedia/riven/compare/v0.15.3...v0.16.0) (2024-10-18) + + +### Features + +* Add debugpy as optional to entrypoint script if DEBUG env variable is set to anything. ([24904fc](https://github.com/rivenmedia/riven/commit/24904fcc27ccba96dfa13245f8eb3add096b36dd)) +* Types for the FastAPI API and API refactor ([#748](https://github.com/rivenmedia/riven/issues/748)) ([9eec02d](https://github.com/rivenmedia/riven/commit/9eec02dd65ace8598edc8822f1c1d69c5a5b1537)) + + +### Bug Fixes + +* address memory usage ([#787](https://github.com/rivenmedia/riven/issues/787)) ([612964e](https://github.com/rivenmedia/riven/commit/612964ee77395e99610db46febb14bd273aecc30)) +* changed default update interval from 5m to 24h on content list services ([7074fb0](https://github.com/rivenmedia/riven/commit/7074fb0e11ec16a34980bf9242bdb4cacd050760)) +* delete the movie relation before deleting the mediaitem ([#788](https://github.com/rivenmedia/riven/issues/788)) ([5bfe63a](https://github.com/rivenmedia/riven/commit/5bfe63aa31e78d418bb5df9a962b0ff4fe467bfe)) +* fix state filter in items endpoint ([#791](https://github.com/rivenmedia/riven/issues/791)) ([1f24e4f](https://github.com/rivenmedia/riven/commit/1f24e4fe787e174a366c4e1e20f94fef263db76e)) +* fixed wrongful checking of bad dirs and images when rebuilding symlink library ([8501c36](https://github.com/rivenmedia/riven/commit/8501c3634ff03b75b7fcc4419db1e4908580b360)) +* improved removing items from database ([e4b6e2b](https://github.com/rivenmedia/riven/commit/e4b6e2b61893517c01a35a272806a319c845dd77)) +* lower max events added to queue ([197713a](https://github.com/rivenmedia/riven/commit/197713ae9da78eb1d674e313489f0a378c29d03a)) +* minor fixes post merge ([01a506f](https://github.com/rivenmedia/riven/commit/01a506faabc675226d6a1412cb2cd3065e3437ca)) +* plex watchlist not returning any items ([bf34db5](https://github.com/rivenmedia/riven/commit/bf34db52bc1fc184597e1c6721968d7a33a5b15c)) +* remove add to recurring on plex watchlist ([943433c](https://github.com/rivenmedia/riven/commit/943433cba70dd9a3e51d7c51b4eb1e23d098345e)) +* reset the scraped time when replacing magnets ([82fe92d](https://github.com/rivenmedia/riven/commit/82fe92d952642408b98ea6a3f1fad51c86adffcb)) +* respect orm when removing items ([d6722fa](https://github.com/rivenmedia/riven/commit/d6722fa41e33bcfcb9ceaac32f4be4985af40b15)) +* serialize subtitles for api response ([0dd561a](https://github.com/rivenmedia/riven/commit/0dd561a11880ab4cfce4b6631b385b414b953f93)) +* service endpoint response for downloaders ([#782](https://github.com/rivenmedia/riven/issues/782)) ([f2020ed](https://github.com/rivenmedia/riven/commit/f2020ed8c0007e125871329e5cd3e821a9522494)) +* state filter in items endpoint ([1f24e4f](https://github.com/rivenmedia/riven/commit/1f24e4fe787e174a366c4e1e20f94fef263db76e)) +* stream results on stats endpoint ([ff14f85](https://github.com/rivenmedia/riven/commit/ff14f85532221997215e1a1f246a5b8041183e05)) +* switch to batched streaming stats endpoint for inc items ([a8a6aa9](https://github.com/rivenmedia/riven/commit/a8a6aa9f0670098441839042ab2ed3d4990860cd)) +* switch to generator for reset/retry endpoints ([bf4fc0e](https://github.com/rivenmedia/riven/commit/bf4fc0e79a31f2c4d8701e09ae662ebf3c5e2b3f)) +* update full compose with latest zilean changes ([d3ca7a4](https://github.com/rivenmedia/riven/commit/d3ca7a4abd2e0bc7cbef34ab5bbde201986acf55)) + + +### Documentation + +* remove duplicate service from readme ([8a9942a](https://github.com/rivenmedia/riven/commit/8a9942a20039281b00b2ddb261f75a543af13ac9)) + +## [0.15.3](https://github.com/rivenmedia/riven/compare/v0.15.2...v0.15.3) (2024-10-03) + + +### Bug Fixes + +* fixed comet unpack issue ([6ae2a68](https://github.com/rivenmedia/riven/commit/6ae2a686456c3c60390d635fcd6ddb24bdcd6a78)) + +## [0.15.2](https://github.com/rivenmedia/riven/compare/v0.15.1...v0.15.2) (2024-10-01) + + +### Bug Fixes + +* add log back to orion ([5a81a0c](https://github.com/rivenmedia/riven/commit/5a81a0c14b76f6b90b2d4224b53948707d867040)) +* changed to speed mode by default for downloaders ([7aeca0b](https://github.com/rivenmedia/riven/commit/7aeca0bf4fe38ec6ebe7d513ca8e305ef8223b08)) +* orionoid and mediafusion fixed ([52f466e](https://github.com/rivenmedia/riven/commit/52f466e35e2d2d3e2cfc9ce81f903a8c0df5e9f4)) +* prevent error when more than two streams with the same hash in set_torrent_rd ([c9b8010](https://github.com/rivenmedia/riven/commit/c9b80109c598a2083929214006114d3abe9d6b49)) +* refactor and re-enable alldebrid ([4ca9ca2](https://github.com/rivenmedia/riven/commit/4ca9ca2c27203e3ed5b7b9285a77b683db542a85)) +* refactor and re-enable alldebrid ([61bc680](https://github.com/rivenmedia/riven/commit/61bc6803eed86d138dd46836a1f271c1c53102c1)) +* support files in rclone root ([6ad6d4d](https://github.com/rivenmedia/riven/commit/6ad6d4ddbf01593453c12b39773c07cd028bd261)) + +## [0.15.1](https://github.com/rivenmedia/riven/compare/v0.15.0...v0.15.1) (2024-09-29) + + +### Bug Fixes + +* prevent error when more than two streams with the same hash in set_torrent_rd ([eaefd63](https://github.com/rivenmedia/riven/commit/eaefd631bf87cbdcd209204f36b716285a9c3046)) + +## [0.15.0](https://github.com/rivenmedia/riven/compare/v0.14.2...v0.15.0) (2024-09-26) + + +### Features + +* add magnets for use in frontend ([7fc5b1b](https://github.com/rivenmedia/riven/commit/7fc5b1b9be4b662a7ac3c2056cedab80e675a447)) +* added magnet handling for use in frontend ([40636dc](https://github.com/rivenmedia/riven/commit/40636dc35e5545ee5c3669145f40f1915c36b212)) + + +### Bug Fixes + +* housekeeping ([2308ce5](https://github.com/rivenmedia/riven/commit/2308ce5d2c1462f8dec2b5a0ebbd674d466cbf08)) + +## [0.14.2](https://github.com/rivenmedia/riven/compare/v0.14.1...v0.14.2) (2024-09-26) + + +### Bug Fixes + +* lower worker count on symlink repair from 8 to 4 workers ([8380b7c](https://github.com/rivenmedia/riven/commit/8380b7cecb47484730335946f8a2e0d8758c1ab3)) +* remove reverse on event sort ([13a278f](https://github.com/rivenmedia/riven/commit/13a278f3b76c9b28ef9fe43742c5f7d99f896fad)) + +## [0.14.1](https://github.com/rivenmedia/riven/compare/v0.14.0...v0.14.1) (2024-09-24) + + +### Bug Fixes + +* update notification workflow ([d768eb8](https://github.com/rivenmedia/riven/commit/d768eb8b845b771058f46216e8de267772f99394)) + +## [0.14.0](https://github.com/rivenmedia/riven/compare/v0.13.3...v0.14.0) (2024-09-24) + + +### Features + +* add manual scrape endpoint. fixed mdblist empty list issue. other small tweaks. ([57f23d6](https://github.com/rivenmedia/riven/commit/57f23d63ffeb575b32d6fe050fa72ea1ca21cc85)) + + +### Bug Fixes + +* torbox scraper missing setting issue fixed. ([f4619c4](https://github.com/rivenmedia/riven/commit/f4619c437786cb1f8761b2f4b1210207e8fb72aa)) + +## [0.13.3](https://github.com/rivenmedia/riven/compare/v0.13.2...v0.13.3) (2024-09-22) + + +### Bug Fixes + +* mdblist error on imdb_id as NoneType ([048cd71](https://github.com/rivenmedia/riven/commit/048cd718af36538eb2a4443ee5a9e0f57fe3d130)) + +## [0.13.2](https://github.com/rivenmedia/riven/compare/v0.13.1...v0.13.2) (2024-09-22) + + +### Features + +* add jellyfin & emby support. ([b600b6c](https://github.com/rivenmedia/riven/commit/b600b6ccb0cd50ad15e7a36465151793c766270e)) + + +### Bug Fixes + +* forgot to add updater files..... ([805182a](https://github.com/rivenmedia/riven/commit/805182a8648191f8b34b85697e897b6e2ef5c57b)) + + +### Miscellaneous Chores + +* release 0.13.2 ([76ccbf3](https://github.com/rivenmedia/riven/commit/76ccbf3080c6cc5af267d5e8a8b59860cd26c97c)) + +## [0.13.1](https://github.com/rivenmedia/riven/compare/v0.13.0...v0.13.1) (2024-09-22) + + +### Bug Fixes + +* jackett isinstance using list instead of tuple ([c925a5b](https://github.com/rivenmedia/riven/commit/c925a5b75a4b90af16c1ff5b04c5f2869c232b0a)) + +## [0.13.0](https://github.com/rivenmedia/riven/compare/v0.12.8...v0.13.0) (2024-09-22) + + +### Features + +* add jellyfin & emby support. ([375302e](https://github.com/rivenmedia/riven/commit/375302ea761b157178de4383fb6ad9a61e07f1d6)) + + +### Bug Fixes + +* mdblist nonetype on imdb_id ([10f1044](https://github.com/rivenmedia/riven/commit/10f1044792356a982c6aa3b07682c418d2fa8550)) + +## [0.12.8](https://github.com/rivenmedia/riven/compare/v0.12.7...v0.12.8) (2024-09-22) + + +### Bug Fixes + +* fixed type on env var for symlink workers ([5c50cc6](https://github.com/rivenmedia/riven/commit/5c50cc60a086f22bc0bc07dfc54ecb4447e7712d)) + +## [0.12.7](https://github.com/rivenmedia/riven/compare/v0.12.6...v0.12.7) (2024-09-22) + + +### Bug Fixes + +* lowered symlink max workers to 4 on db init ([0481b98](https://github.com/rivenmedia/riven/commit/0481b982a2c70a1130c66c4d7e01b71dbe7649aa)) + +## [0.12.6](https://github.com/rivenmedia/riven/compare/v0.12.5...v0.12.6) (2024-09-21) + + +### Bug Fixes + +* remove missing attr ([5625307](https://github.com/rivenmedia/riven/commit/5625307a029bf0d59b6615958dbad2e020afb52e)) + +## [0.12.5](https://github.com/rivenmedia/riven/compare/v0.12.4...v0.12.5) (2024-09-21) + + +### Bug Fixes + +* corrected rate limit for Torrentio ([540ba52](https://github.com/rivenmedia/riven/commit/540ba528797637e77accb9f66f7e38c58869b9d1)) + +## [0.12.4](https://github.com/rivenmedia/riven/compare/v0.12.3...v0.12.4) (2024-09-21) + + +### Bug Fixes + +* plex rss startswith error ([9a2a0c1](https://github.com/rivenmedia/riven/commit/9a2a0c14211f68af523af4cdb3c8f742496a7722)) +* revert schema validation, this is causing issues. ([12f4a1a](https://github.com/rivenmedia/riven/commit/12f4a1aa7d55210e1e65744c4ee8d8e082f3d68a)) + +## [0.12.3](https://github.com/rivenmedia/riven/compare/v0.12.2...v0.12.3) (2024-09-21) + + +### Bug Fixes + +* mdblist list item validation fixed ([63fc95b](https://github.com/rivenmedia/riven/commit/63fc95b7ef69cb8ffb6aeadcfa20988d834ca65a)) + +## [0.12.2](https://github.com/rivenmedia/riven/compare/v0.12.1...v0.12.2) (2024-09-21) + + +### Bug Fixes + +* update api with json schema ([1b7365c](https://github.com/rivenmedia/riven/commit/1b7365c77d3d121b6e7dccea2bd011fabb408aa6)) + +## [0.12.1](https://github.com/rivenmedia/riven/compare/v0.12.0...v0.12.1) (2024-09-21) + + +### Bug Fixes + +* tweak db reset. fixed issue with mdblist. ([652924e](https://github.com/rivenmedia/riven/commit/652924eb8cf6d82aec90eb514628b3c51849ab98)) + +## [0.12.0](https://github.com/rivenmedia/riven/compare/v0.11.1...v0.12.0) (2024-09-20) + + +### Features + +* add alias support in parsing when scraping torrents. several other tweaks. ([365f022](https://github.com/rivenmedia/riven/commit/365f02239cbed0f3e441a2e60abee31e78a05553)) +* improvements to reset/retry/remove endpoints ([98f9e49](https://github.com/rivenmedia/riven/commit/98f9e49581bf43e3602d8dcb74f14a5bed1d529d)) +* move symlink db init to progress bar. added threading to speed it up. needs testing! ([71fb859](https://github.com/rivenmedia/riven/commit/71fb8592528c9b1a60856ed5cedc069a3faf8b2c)) +* update RTN to latest ([bbc5ce7](https://github.com/rivenmedia/riven/commit/bbc5ce75487ed87a989253b444f53c71d757f7db)) + + +### Bug Fixes + +* add infohash to scraped log msg. added exclude for unreleased to retry lib. ([9491e53](https://github.com/rivenmedia/riven/commit/9491e53045d97585afd57d73523bebe1997a3509)) +* add sleep between event retries ([01e71f0](https://github.com/rivenmedia/riven/commit/01e71f021643348dc7dddc4b172cf0ecb548342d)) +* add torrent name and infohash to download log. update deps to resolve parsing bugs. ([aecaf37](https://github.com/rivenmedia/riven/commit/aecaf3725075879c16651434fa6add10ef56fcff)) +* anime movies not showing in correct dir ([44e0161](https://github.com/rivenmedia/riven/commit/44e0161c3234da3b6d26ce41ecaa50d557b1ff99)) +* content services now only output new items that arent in the db. tidied some initial startup logging. ([797778c](https://github.com/rivenmedia/riven/commit/797778ca36095b350ec336900e283a2a70b0a95f)) +* fixed bug with upscaled in parsett. update dep ([f3974ef](https://github.com/rivenmedia/riven/commit/f3974efc702fc351ddabfbbb8efa91d57d6b3d2c)) +* fixed completed items being added to queue on startup ([d45882f](https://github.com/rivenmedia/riven/commit/d45882f9ec405e9f3ee8423183e0ef38e6e63dd5)) +* moved log cleaning to scheduled func. fixed bug with new furiosa movie ([475f934](https://github.com/rivenmedia/riven/commit/475f9345ad40adbbb8e8b2a453cede253f86d2c0)) +* movie obj trying to index as show type ([c0e1e2c](https://github.com/rivenmedia/riven/commit/c0e1e2c4a1b1c068a1fe04bfc300a10dea927000)) +* ranking wasnt followed by downloader ([578ae8f](https://github.com/rivenmedia/riven/commit/578ae8f88b3865222e6ab6cca6e53ff73273ef12)) +* resetting a item would make it unresettable again ([f5c849f](https://github.com/rivenmedia/riven/commit/f5c849f0ccbb7028609221c397991e0f64380df5)) +* revert back to old way of retry library ([46a6510](https://github.com/rivenmedia/riven/commit/46a651043a65e5d42ecb8d104dcf7ac477985d18)) +* revert item in db check during state processing ([18f22c1](https://github.com/rivenmedia/riven/commit/18f22c1d1cb68ed1d8f8748bba9a63d122cf499d)) +* select biggest file for movie caches ([c6f9337](https://github.com/rivenmedia/riven/commit/c6f93375222dc32cc8b06060459be607e17758ba)) +* slow api calls due to calculating state for every item ([f5e08f8](https://github.com/rivenmedia/riven/commit/f5e08f8fd506eae2f6f693347e774929edbb24fe)) +* throw exception instead of error on plex validation ([17a579e](https://github.com/rivenmedia/riven/commit/17a579e1f129533e337e31990970978976bc7b91)) +* tweak logging for db init from symlinks. ([2f15fbd](https://github.com/rivenmedia/riven/commit/2f15fbd938dc70e8c1eb709a4d8debf281d9e2b0)) +* unhardcode orionoid limitcount. whoops! ([f7668c6](https://github.com/rivenmedia/riven/commit/f7668c68bd7b787145ce212fb0479705608db191)) + +## [0.11.1](https://github.com/rivenmedia/riven/compare/v0.11.0...v0.11.1) (2024-08-30) + + +### Miscellaneous Chores + +* release 0.11.1 ([4453a15](https://github.com/rivenmedia/riven/commit/4453a15d7d82edadbac8d9a96941217d09467798)) + +## [0.11.0](https://github.com/rivenmedia/riven/compare/v0.10.5...v0.11.0) (2024-08-30) + + +### Features + +* "Ongoing" and "Unreleased" states for shows ([6ee4742](https://github.com/rivenmedia/riven/commit/6ee47424fa5878bda99c0b4c57701ff24832af00)) +* Removal of Symlinks and Overseerr requests on removal of item from riven. ([276ed79](https://github.com/rivenmedia/riven/commit/276ed79f4374a0812300f78c1de42bae3a019bfd)) + + +### Bug Fixes + +* event updates for frontend ([6ee4742](https://github.com/rivenmedia/riven/commit/6ee47424fa5878bda99c0b4c57701ff24832af00)) +* get all content from content services (previously only one item was picked) ([6ee4742](https://github.com/rivenmedia/riven/commit/6ee47424fa5878bda99c0b4c57701ff24832af00)) +* remove local updater and stop possibility of looping with symlinked state ([6ee4742](https://github.com/rivenmedia/riven/commit/6ee47424fa5878bda99c0b4c57701ff24832af00)) +* trakt indexer not picking up shows ([6ee4742](https://github.com/rivenmedia/riven/commit/6ee47424fa5878bda99c0b4c57701ff24832af00)) +* trakt indexing was not copying correct item attributes in previous release ([6ee4742](https://github.com/rivenmedia/riven/commit/6ee47424fa5878bda99c0b4c57701ff24832af00)) +* updated settings.json variables for opensubtitles ([71012ef](https://github.com/rivenmedia/riven/commit/71012efe405ad2a26420ed331ceeb27ca49e580e)) +* validate subtitle providers on init, remove addic7ed and napiprojekt providers ([6ee4742](https://github.com/rivenmedia/riven/commit/6ee47424fa5878bda99c0b4c57701ff24832af00)) + +## [0.10.5](https://github.com/rivenmedia/riven/compare/v0.10.4...v0.10.5) (2024-08-19) + + +### Features + +* add a subtitle provider (subliminal) ([f96fe54](https://github.com/rivenmedia/riven/commit/f96fe54aa1ff6efe8ffcef161a173b74a7ca81c4)) + + +### Bug Fixes + +* address high memory usage ([f96fe54](https://github.com/rivenmedia/riven/commit/f96fe54aa1ff6efe8ffcef161a173b74a7ca81c4)) +* various small bug fixes ([f96fe54](https://github.com/rivenmedia/riven/commit/f96fe54aa1ff6efe8ffcef161a173b74a7ca81c4)) + + +### Miscellaneous Chores + +* bump version to 0.10.5 ([5c3c39f](https://github.com/rivenmedia/riven/commit/5c3c39f1eafd66e9a20b21a2cdb8215d7f2aebbb)) +* release 0.10.4 ([cacbc46](https://github.com/rivenmedia/riven/commit/cacbc46f35096956aab1f77d794942d68d76de16)) + +## [0.10.4](https://github.com/rivenmedia/riven/compare/v0.10.4...v0.10.4) (2024-08-19) + + +### Features + +* add a subtitle provider (subliminal) ([f96fe54](https://github.com/rivenmedia/riven/commit/f96fe54aa1ff6efe8ffcef161a173b74a7ca81c4)) + + +### Bug Fixes + +* address high memory usage ([f96fe54](https://github.com/rivenmedia/riven/commit/f96fe54aa1ff6efe8ffcef161a173b74a7ca81c4)) +* various small bug fixes ([f96fe54](https://github.com/rivenmedia/riven/commit/f96fe54aa1ff6efe8ffcef161a173b74a7ca81c4)) + + +### Miscellaneous Chores + +* release 0.10.4 ([cacbc46](https://github.com/rivenmedia/riven/commit/cacbc46f35096956aab1f77d794942d68d76de16)) + +## [0.10.3](https://github.com/rivenmedia/riven/compare/v0.10.2...v0.10.3) (2024-08-17) + + +### Bug Fixes + +* address memory leak by closing SQLAlchemy sessions and add connection pool options ([0ebd38f](https://github.com/rivenmedia/riven/commit/0ebd38fb3802d143b1bd9266f248d34c532d78e7)) + +## [0.10.2](https://github.com/rivenmedia/riven/compare/v0.10.1...v0.10.2) (2024-08-15) + + +### Bug Fixes + +* correct attribute names in zilean scraper ([6e26304](https://github.com/rivenmedia/riven/commit/6e26304f89cfb5456714d424cf8e6b75c8a4a3bc)) + +## [0.10.1](https://github.com/rivenmedia/riven/compare/v0.10.0...v0.10.1) (2024-08-11) + + +### Bug Fixes + +* add cascade drop on alembic table ([b110cac](https://github.com/rivenmedia/riven/commit/b110cac68b24a92ee196317b7a4df3a5718d475e)) + +## [0.10.0](https://github.com/rivenmedia/riven/compare/v0.9.2...v0.10.0) (2024-08-11) + + +### Features + +* release 0.9.3 ([a072821](https://github.com/rivenmedia/riven/commit/a072821c3d1ee82e8580494906881338f30d8691)) + +## [0.9.2](https://github.com/rivenmedia/riven/compare/v0.9.1...v0.9.2) (2024-07-31) + + +### Features + +* add ignore hash feature ([d8e565f](https://github.com/rivenmedia/riven/commit/d8e565f946e4bb75c6f4fa9736b36c59d3c8aef1)) + + +### Bug Fixes + +* moved blacklisting to an attr of item ([989bf8b](https://github.com/rivenmedia/riven/commit/989bf8bc56c0bc7271aa000de454ecaf784b6e3a)) +* removed lazy from mapped_column on blacklisted_streams ([aca5a0f](https://github.com/rivenmedia/riven/commit/aca5a0f07e9bea50583efb9fc8f4d093372dbd83)) + +## [0.9.1](https://github.com/rivenmedia/riven/compare/v0.9.0...v0.9.1) (2024-07-31) + + +### Bug Fixes + +* add libtorrent to docker image ([af88478](https://github.com/rivenmedia/riven/commit/af88478add731a351420595aafb2577bf721d7c0)) +* merged changes with db fixes ([f3103b6](https://github.com/rivenmedia/riven/commit/f3103b6f9dda4d078be32ccd5fad09f5d041bbce)) + + +### Documentation + +* Update ElfHosted details in README ([#578](https://github.com/rivenmedia/riven/issues/578)) ([6047b96](https://github.com/rivenmedia/riven/commit/6047b96edcbbdd5fcaf2f73ecdba9c6c6f0c93a2)) + +## [0.9.0](https://github.com/rivenmedia/riven/compare/v0.8.4...v0.9.0) (2024-07-27) + + +### Features + +* add automatic dev builds in pipeline ([d55e061](https://github.com/rivenmedia/riven/commit/d55e06173b3a35de6c0b586fd9aee0216e9455da)) + + +### Bug Fixes + +* add alembic reinit to hard reset ([91ba58b](https://github.com/rivenmedia/riven/commit/91ba58bfa24a50759115cd9e7190f81b7ddb58fe)) +* add extra logging to track issue. added mutex to add_to_running ([87c3241](https://github.com/rivenmedia/riven/commit/87c324189a1dd78fed0b06e502e10eba4ae1db58)) +* add hard reset to cli ([e3366a6](https://github.com/rivenmedia/riven/commit/e3366a630e0b2774cded15e7197187712e9561a4)) +* add parent object into stream ([16c1ceb](https://github.com/rivenmedia/riven/commit/16c1ceb3bd071be501d4436ba29e8ba90820c588)) +* include stream in db, rework blacklisting ([03c6023](https://github.com/rivenmedia/riven/commit/03c602362ac07122cd5e0153226a7136b1eb330a)) +* plex watchlist updated to work with new api changes. added db guards. improved trakt id detection. changed rd blacklisting to only blacklist on movie/episode items or on empty rd cache ([ce074b3](https://github.com/rivenmedia/riven/commit/ce074b3268f075365ad406af4cf629d1715458ec)) +* remove state logging where state is not available ([76fdd89](https://github.com/rivenmedia/riven/commit/76fdd8949f0c9620ad421c8b870e518823fcff04)) +* tidied push_event_queue. this func has been causing looping issues we're seeing. ([5c7943d](https://github.com/rivenmedia/riven/commit/5c7943d8b9255f49da01834c39cc901c401507c9)) +* update rollback ([e57d06c](https://github.com/rivenmedia/riven/commit/e57d06c4966b3e0178a56bfdce848872abf8b81a)) +* wrong symlink count at startup. corrected post symlink handling ([cbe9012](https://github.com/rivenmedia/riven/commit/cbe901260eeaa2465b93708134e715297ee0d998)) + +## [0.8.4](https://github.com/rivenmedia/riven/compare/v0.8.3...v0.8.4) (2024-07-25) + + +### Bug Fixes + +* Release 0.8.4 ([266cf0c](https://github.com/rivenmedia/riven/commit/266cf0cb455354d54edcb2e47ffc632f6c8e6b7b)) +* tweaked comet scraper. removed poetry venv from entrypoint. ([32be8fc](https://github.com/rivenmedia/riven/commit/32be8fc174eca148c2577a3941005da41e7f8513)) + +## [0.8.3](https://github.com/rivenmedia/riven/compare/v0.8.2...v0.8.3) (2024-07-25) + + +### Miscellaneous Chores + +* release 0.8.3 ([66085da](https://github.com/rivenmedia/riven/commit/66085da71a86f507d09cf21df121a24a2b2a0537)) + +## [0.8.2](https://github.com/rivenmedia/riven/compare/v0.8.1...v0.8.2) (2024-07-24) + + +### Bug Fixes + +* api port back to 8080 ([6a7cf4f](https://github.com/rivenmedia/riven/commit/6a7cf4fb16fc39142ab613afa05afca64908bfca)) + +## [0.8.1](https://github.com/rivenmedia/riven/compare/v0.8.0...v0.8.1) (2024-07-24) + + +### Bug Fixes + +* moved poetry files to root workdir ([a0eb41b](https://github.com/rivenmedia/riven/commit/a0eb41b7aa93a635deaf04a56f57a0201c91d418)) +* revert appendleft on push_event_queue ([8becb59](https://github.com/rivenmedia/riven/commit/8becb5923b1ef103ddd4cb76f59778b7c1f2269f)) + +## 0.8.0 (2024-07-24) + + +### ⚠ BREAKING CHANGES + +* add BACKEND_URL environment variable to support for custom backend URL ([#518](https://github.com/rivenmedia/riven/issues/518)) + +### Features + +* add BACKEND_URL environment variable to support for custom backend URL ([#518](https://github.com/rivenmedia/riven/issues/518)) ([e48ee93](https://github.com/rivenmedia/riven/commit/e48ee932823ad38732533ebaeb3de6937d416354)) +* add changelog. add version.txt ([#562](https://github.com/rivenmedia/riven/issues/562)) ([14eff8d](https://github.com/rivenmedia/riven/commit/14eff8d7c01f57f2659eddf4c619d30690b23001)) +* Add endpoint to manually request items ([#551](https://github.com/rivenmedia/riven/issues/551)) ([652671e](https://github.com/rivenmedia/riven/commit/652671e15379846700ec1f1c86651a6c1463f5b9)) +* add lazy loading for images in statistics and home pages ([#502](https://github.com/rivenmedia/riven/issues/502)) ([fadab73](https://github.com/rivenmedia/riven/commit/fadab73b6e8b3d9e6453f64e25a480b0f299a24a)) +* add support for mdblist urls ([#402](https://github.com/rivenmedia/riven/issues/402)) ([282eb35](https://github.com/rivenmedia/riven/commit/282eb3565b213c52aea66a597092e998e27708fa)) +* add top rated section ([#505](https://github.com/rivenmedia/riven/issues/505)) ([5ef689b](https://github.com/rivenmedia/riven/commit/5ef689bebc70d2fbe71485f876698a37a09083be)) +* added content settings and other minor improvements ([#88](https://github.com/rivenmedia/riven/issues/88)) ([f3444cc](https://github.com/rivenmedia/riven/commit/f3444ccfadeb5e0375f9331968d81bf079a0fcd3)) +* added tmdb api support ([#410](https://github.com/rivenmedia/riven/issues/410)) ([adc4e9a](https://github.com/rivenmedia/riven/commit/adc4e9a0622b2cf4deff5dc8daed56e4b03c0d5f)) +* enforce conventional commits ([5ffddc1](https://github.com/rivenmedia/riven/commit/5ffddc106a42dea5d406f7ae1a6bcd887cddcab0)) +* finish up trakt integration ([#333](https://github.com/rivenmedia/riven/issues/333)) ([5ca02a4](https://github.com/rivenmedia/riven/commit/5ca02a48fd22daff35230e5ed49cba5f7ee88efe)) +* fixed size of command palette on large device ([#98](https://github.com/rivenmedia/riven/issues/98)) ([c3326dd](https://github.com/rivenmedia/riven/commit/c3326dd92da82c196416ce6e8d45a53601b05a3d)) +* formatted using black & prettier (in frontend) and moved to crlf ([#51](https://github.com/rivenmedia/riven/issues/51)) ([315f310](https://github.com/rivenmedia/riven/commit/315f31096569e72e6cc3080f32b3e1e63bc26817)) +* frontend and backend improvements ([#197](https://github.com/rivenmedia/riven/issues/197)) ([080d02c](https://github.com/rivenmedia/riven/commit/080d02cf465456d230528b0b9b2aef94f071595e)) +* frontend backend and ui improvements ([#358](https://github.com/rivenmedia/riven/issues/358)) ([8a9e941](https://github.com/rivenmedia/riven/commit/8a9e941f4fd92e80c1093a74e562e46c80201a3e)) +* frontend fixes and improvements ([#29](https://github.com/rivenmedia/riven/issues/29)) ([fd19f8a](https://github.com/rivenmedia/riven/commit/fd19f8a8c599d5f0ddc50704b01d926255a5b1ca)) +* frontend improvements ([#158](https://github.com/rivenmedia/riven/issues/158)) ([1e714bf](https://github.com/rivenmedia/riven/commit/1e714bfcddb3fc97133d47060be31df2f5bff00e)) +* frontend improvements ([#159](https://github.com/rivenmedia/riven/issues/159)) ([b6c2699](https://github.com/rivenmedia/riven/commit/b6c269999e2883c50630a2c1690c93b323045156)) +* frontend improvements ([#16](https://github.com/rivenmedia/riven/issues/16)) ([d958a4b](https://github.com/rivenmedia/riven/commit/d958a4bae419d9245d1f983f9566375e5e1983a0)) +* frontend improvements ([#50](https://github.com/rivenmedia/riven/issues/50)) ([ffec1c4](https://github.com/rivenmedia/riven/commit/ffec1c4766f423392910830bf0c7be9962eb9530)) +* frontend improvements,, added settings! ([#86](https://github.com/rivenmedia/riven/issues/86)) ([2641de0](https://github.com/rivenmedia/riven/commit/2641de0f39eab2debe0b5fb998545f153280a24d)) +* frontend rewrite to sveltekit with basic features ([#13](https://github.com/rivenmedia/riven/issues/13)) ([8c519d7](https://github.com/rivenmedia/riven/commit/8c519d7b2a39af4cceb0352c46024475d90d645e)) +* improved frontend ui ([#195](https://github.com/rivenmedia/riven/issues/195)) ([77e7ad7](https://github.com/rivenmedia/riven/commit/77e7ad7309f4775f24aad49b6a904e8c7f08e38e)) +* improved ui ([#422](https://github.com/rivenmedia/riven/issues/422)) ([71e6365](https://github.com/rivenmedia/riven/commit/71e6365d1c96d224e2e946040f41901f13abb4c0)) +* Listrr Support Added ([#136](https://github.com/rivenmedia/riven/issues/136)) ([943b098](https://github.com/rivenmedia/riven/commit/943b098f396426c67848f28f2ad226e8f055fb8b)) + + +### Bug Fixes + +* add BACKEND_URL arg to avoid build error ([#519](https://github.com/rivenmedia/riven/issues/519)) ([b7309c4](https://github.com/rivenmedia/riven/commit/b7309c4916a330356d429afb6a1e20cff56eebcc)) +* add BACKEND_URL arg to avoid build error ([#520](https://github.com/rivenmedia/riven/issues/520)) ([ffad7e3](https://github.com/rivenmedia/riven/commit/ffad7e31d493f4306d4d8f33bb7afd1d780a76d9)) +* add new settings changes to frontend ([#416](https://github.com/rivenmedia/riven/issues/416)) ([38c1b75](https://github.com/rivenmedia/riven/commit/38c1b751eae37cec489c18bcf0a531ec23ee2a05)) +* add try-catch to submit_job for runtime errors ([d09f512](https://github.com/rivenmedia/riven/commit/d09f512a1667a73cb63193eb29d7a4bf9fc1fed5)) +* change mdblist str to int ([#382](https://github.com/rivenmedia/riven/issues/382)) ([b88c475](https://github.com/rivenmedia/riven/commit/b88c475459c140bd9b5ae95cdd1583c41dee94f9)) +* change Path objs to str ([#389](https://github.com/rivenmedia/riven/issues/389)) ([41bc74e](https://github.com/rivenmedia/riven/commit/41bc74e4fdb1f03dd988923b82dec19985c9b1e1)) +* change version filename in dockerfile ([5bf802d](https://github.com/rivenmedia/riven/commit/5bf802d399516633ec4683f4940ad3b649038386)) +* comet validation needed is_ok on response instead of ok ([#557](https://github.com/rivenmedia/riven/issues/557)) ([5f8d8c4](https://github.com/rivenmedia/riven/commit/5f8d8c42a8d02f586121da072697d40c8e5313ad)) +* continue instead of exit on failed to enhance metadata ([#560](https://github.com/rivenmedia/riven/issues/560)) ([657068f](https://github.com/rivenmedia/riven/commit/657068f8e1c4e241d096eaadd52e850eafb27aba)) +* convert str to path first ([#388](https://github.com/rivenmedia/riven/issues/388)) ([2944bf0](https://github.com/rivenmedia/riven/commit/2944bf07398972e3271e98cabcb64febd828addc)) +* correct parsing of external id's ([#163](https://github.com/rivenmedia/riven/issues/163)) ([b155e60](https://github.com/rivenmedia/riven/commit/b155e606ffbb130b1df4ad15246ca74bad490699)) +* crash on failed metadata enhancement ([88b7f0b](https://github.com/rivenmedia/riven/commit/88b7f0b98c1df574a06fd43cdbaaed50a69a0dc9)) +* disable ruff in ci ([5ffddc1](https://github.com/rivenmedia/riven/commit/5ffddc106a42dea5d406f7ae1a6bcd887cddcab0)) +* docker metadata from release please ([08b7144](https://github.com/rivenmedia/riven/commit/08b7144bb319986185d3cb1975dbef77a9945690)) +* docker metadata from release please ([e48659f](https://github.com/rivenmedia/riven/commit/e48659ff574f7caf6ab37c7d2a035c4bbe4edf01)) +* episode attr error when checking Show type ([#387](https://github.com/rivenmedia/riven/issues/387)) ([3e0a575](https://github.com/rivenmedia/riven/commit/3e0a5758910adc4b02d90bb2839f77ec3e6f6d3f)) +* fix around 200 ruff errors ([d30679d](https://github.com/rivenmedia/riven/commit/d30679d9adcfd41f751349328f658187a8285072)) +* fix around 200 ruff errors ([a73fbfd](https://github.com/rivenmedia/riven/commit/a73fbfd6a6f0e1464cf05e55492c3b69876363c0)) +* fixed about page github errors and other minor improvements ([#347](https://github.com/rivenmedia/riven/issues/347)) ([0c87f47](https://github.com/rivenmedia/riven/commit/0c87f47bbbe69de33c7bab9bdecc61d845f597fa)) +* fixed the errors in frontend to make it working, still some changes and rewrite needed for improvements ([#346](https://github.com/rivenmedia/riven/issues/346)) ([03cd45c](https://github.com/rivenmedia/riven/commit/03cd45c2cfe4f04d49f2bea754a5a641c68ba9f2)) +* handle bad quality manually in parser ([#145](https://github.com/rivenmedia/riven/issues/145)) ([6101511](https://github.com/rivenmedia/riven/commit/6101511b2589b7731025052db403b2c0adfd0376)) +* lower the z index and increase z index of header ([#504](https://github.com/rivenmedia/riven/issues/504)) ([41e2c71](https://github.com/rivenmedia/riven/commit/41e2c716db8e0ead3291e7f71fca9f20dd99ca94)) +* min/max filesize being returned undefined ([fadab73](https://github.com/rivenmedia/riven/commit/fadab73b6e8b3d9e6453f64e25a480b0f299a24a)) +* minor fix to hooks.server.ts ([#355](https://github.com/rivenmedia/riven/issues/355)) ([8edb0ce](https://github.com/rivenmedia/riven/commit/8edb0ce766dc5079b0f6ede269e7e2b2461f1d0d)) +* minor ui improvements ([#503](https://github.com/rivenmedia/riven/issues/503)) ([8085f15](https://github.com/rivenmedia/riven/commit/8085f15d424ca671b1f0293fbda70559682c5923)) +* remove frontend ci ([#552](https://github.com/rivenmedia/riven/issues/552)) ([eeb2d00](https://github.com/rivenmedia/riven/commit/eeb2d00610e2f4f7f3c1cfeb3922600fb645739a)) +* revert trakt/item modules back to 0.7.4 ([864535b](https://github.com/rivenmedia/riven/commit/864535b01dc790142e21284d24f71335dd116e38)) +* RTN import incorrect after updating package ([#415](https://github.com/rivenmedia/riven/issues/415)) ([f2b86e0](https://github.com/rivenmedia/riven/commit/f2b86e08d73479addf7bada77b23c8cfd72752a3)) +* switch to dynamic private env ([#522](https://github.com/rivenmedia/riven/issues/522)) ([eb8d3d0](https://github.com/rivenmedia/riven/commit/eb8d3d0a9010a9389d68dff8c4dd9cbdd6b71944)) +* switch to dynamic private env ([#523](https://github.com/rivenmedia/riven/issues/523)) ([0355e64](https://github.com/rivenmedia/riven/commit/0355e6485c6e43f66a04165a85a890aaf1d8c0c3)) +* text color on light theme ([#506](https://github.com/rivenmedia/riven/issues/506)) ([5379784](https://github.com/rivenmedia/riven/commit/5379784e7f84f97955fc4728cdb3301919c6f0ac)) +* tidy parser. add lint/test to makefile. ([#241](https://github.com/rivenmedia/riven/issues/241)) ([bd82b23](https://github.com/rivenmedia/riven/commit/bd82b2392330da31e443e66e780b01bc26f3a60d)) +* update packages ([15df41d](https://github.com/rivenmedia/riven/commit/15df41d3d30a03f9371bf90f99eedc96b32f41c7)) +* validate rd user data and updater settings on startup ([6016c54](https://github.com/rivenmedia/riven/commit/6016c54e1518a850102b6d09c6b51b3cef721a2d)) +* versioning to come from pyproject.toml ([d30679d](https://github.com/rivenmedia/riven/commit/d30679d9adcfd41f751349328f658187a8285072)) + + +### Documentation + +* minor improvements ([#160](https://github.com/rivenmedia/riven/issues/160)) ([0d0a12f](https://github.com/rivenmedia/riven/commit/0d0a12f5516254acd8be81fb97cd7694e9010d21)) +* minor improvements ([#161](https://github.com/rivenmedia/riven/issues/161)) ([2ad7986](https://github.com/rivenmedia/riven/commit/2ad79866e93336f2977fa1d6762bc867a26a1571)) +* minor improvements ([#162](https://github.com/rivenmedia/riven/issues/162)) ([bac8284](https://github.com/rivenmedia/riven/commit/bac8284f38f1cbe7e1d1b05dd486ba7eae68d5b2)) + + +### Miscellaneous Chores + +* release 0.8.0 ([091d0bc](https://github.com/rivenmedia/riven/commit/091d0bc13dad19dbbf4b3e8d870458e3cddcf246)) diff --git a/CODE_OF_CONDUCT.md b/CODE_OF_CONDUCT.md new file mode 100644 index 0000000..cb6dd7e --- /dev/null +++ b/CODE_OF_CONDUCT.md @@ -0,0 +1,128 @@ +# Contributor Covenant Code of Conduct + +## Our Pledge + +We as members, contributors, and leaders pledge to make participation in our +community a harassment-free experience for everyone, regardless of age, body +size, visible or invisible disability, ethnicity, sex characteristics, gender +identity and expression, level of experience, education, socio-economic status, +nationality, personal appearance, race, religion, or sexual identity +and orientation. + +We pledge to act and interact in ways that contribute to an open, welcoming, +diverse, inclusive, and healthy community. + +## Our Standards + +Examples of behavior that contributes to a positive environment for our +community include: + +* Demonstrating empathy and kindness toward other people +* Being respectful of differing opinions, viewpoints, and experiences +* Giving and gracefully accepting constructive feedback +* Accepting responsibility and apologizing to those affected by our mistakes, + and learning from the experience +* Focusing on what is best not just for us as individuals, but for the + overall community + +Examples of unacceptable behavior include: + +* The use of sexualized language or imagery, and sexual attention or + advances of any kind +* Trolling, insulting or derogatory comments, and personal or political attacks +* Public or private harassment +* Publishing others' private information, such as a physical or email + address, without their explicit permission +* Other conduct which could reasonably be considered inappropriate in a + professional setting + +## Enforcement Responsibilities + +Community leaders are responsible for clarifying and enforcing our standards of +acceptable behavior and will take appropriate and fair corrective action in +response to any behavior that they deem inappropriate, threatening, offensive, +or harmful. + +Community leaders have the right and responsibility to remove, edit, or reject +comments, commits, code, wiki edits, issues, and other contributions that are +not aligned to this Code of Conduct, and will communicate reasons for moderation +decisions when appropriate. + +## Scope + +This Code of Conduct applies within all community spaces, and also applies when +an individual is officially representing the community in public spaces. +Examples of representing our community include using an official e-mail address, +posting via an official social media account, or acting as an appointed +representative at an online or offline event. + +## Enforcement + +Instances of abusive, harassing, or otherwise unacceptable behavior may be +reported to the community leaders responsible for enforcement at +admin@debrid.wiki. +All complaints will be reviewed and investigated promptly and fairly. + +All community leaders are obligated to respect the privacy and security of the +reporter of any incident. + +## Enforcement Guidelines + +Community leaders will follow these Community Impact Guidelines in determining +the consequences for any action they deem in violation of this Code of Conduct: + +### 1. Correction + +**Community Impact**: Use of inappropriate language or other behavior deemed +unprofessional or unwelcome in the community. + +**Consequence**: A private, written warning from community leaders, providing +clarity around the nature of the violation and an explanation of why the +behavior was inappropriate. A public apology may be requested. + +### 2. Warning + +**Community Impact**: A violation through a single incident or series +of actions. + +**Consequence**: A warning with consequences for continued behavior. No +interaction with the people involved, including unsolicited interaction with +those enforcing the Code of Conduct, for a specified period of time. This +includes avoiding interactions in community spaces as well as external channels +like social media. Violating these terms may lead to a temporary or +permanent ban. + +### 3. Temporary Ban + +**Community Impact**: A serious violation of community standards, including +sustained inappropriate behavior. + +**Consequence**: A temporary ban from any sort of interaction or public +communication with the community for a specified period of time. No public or +private interaction with the people involved, including unsolicited interaction +with those enforcing the Code of Conduct, is allowed during this period. +Violating these terms may lead to a permanent ban. + +### 4. Permanent Ban + +**Community Impact**: Demonstrating a pattern of violation of community +standards, including sustained inappropriate behavior, harassment of an +individual, or aggression toward or disparagement of classes of individuals. + +**Consequence**: A permanent ban from any sort of public interaction within +the community. + +## Attribution + +This Code of Conduct is adapted from the [Contributor Covenant][homepage], +version 2.0, available at +https://www.contributor-covenant.org/version/2/0/code_of_conduct.html. + +Community Impact Guidelines were inspired by [Mozilla's code of conduct +enforcement ladder](https://github.com/mozilla/diversity). + +[homepage]: https://www.contributor-covenant.org + +For answers to common questions about this code of conduct, see the FAQ at +https://www.contributor-covenant.org/faq. Translations are available at +https://www.contributor-covenant.org/translations. diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md new file mode 100644 index 0000000..28f1f9a --- /dev/null +++ b/CONTRIBUTING.md @@ -0,0 +1,19 @@ +# Contributing + +We want to make contributing to this project as easy and transparent as +possible. + +### Submitting Changes + +1. **Open an Issue**: For major changes, start by opening an issue to discuss your proposed modifications. This helps us understand your intentions and provide feedback early in the process. +2. **Pull Requests**: Once your changes are ready, submit a pull request. Ensure your code adheres to our coding standards and passes all tests. Commits should follow [conventional-commits](https://www.conventionalcommits.org/) specification. + +### Code Formatting + +- **Backend**: We use [Black](https://black.readthedocs.io/en/stable/) for code formatting. Run `black` on your code before submitting. +- **Line Endings**: Use CRLF line endings unless the file is a shell script or another format that requires LF line endings. + +## License + +By contributing to examples, you agree that your contributions will be licensed +under the LICENSE file in the root directory of this source tree. diff --git a/Dockerfile b/Dockerfile new file mode 100644 index 0000000..e49a822 --- /dev/null +++ b/Dockerfile @@ -0,0 +1,70 @@ +# Builder Image for Python Dependencies +FROM python:3.11-alpine AS builder + +# Install necessary build dependencies +RUN apk add --no-cache \ + gcc \ + musl-dev \ + libffi-dev \ + python3-dev \ + build-base \ + curl + +# Upgrade pip and install poetry +RUN pip install --upgrade pip && pip install poetry==1.8.3 + +ENV POETRY_NO_INTERACTION=1 \ + POETRY_VIRTUALENVS_IN_PROJECT=1 \ + POETRY_VIRTUALENVS_CREATE=1 \ + POETRY_CACHE_DIR=/tmp/poetry_cache + +WORKDIR /app + +COPY pyproject.toml poetry.lock ./ +RUN touch README.md +RUN poetry install --without dev --no-root && rm -rf $POETRY_CACHE_DIR + +# Final Image +FROM python:3.11-alpine +LABEL name="Riven" \ + description="Riven Media Server" \ + url="https://github.com/rivenmedia/riven" + +# Install system dependencies and Node.js +ENV PYTHONUNBUFFERED=1 +RUN apk add --no-cache \ + curl \ + shadow \ + rclone \ + unzip \ + gcc \ + musl-dev \ + libffi-dev \ + python3-dev \ + libpq-dev \ + libtorrent + +# Install Poetry +RUN pip install poetry==1.8.3 + +# Set environment variable to force color output +ENV FORCE_COLOR=1 +ENV TERM=xterm-256color + +# Set working directory +WORKDIR /riven + +# Copy the virtual environment from the builder stage +COPY --from=builder /app/.venv /app/.venv +ENV VIRTUAL_ENV=/app/.venv +ENV PATH="/app/.venv/bin:$PATH" + +# Copy the rest of the application code +COPY src/ /riven/src +COPY pyproject.toml poetry.lock /riven/ +COPY entrypoint.sh /riven/ + +# Ensure entrypoint script is executable +RUN chmod +x /riven/entrypoint.sh + +ENTRYPOINT ["/riven/entrypoint.sh"] diff --git a/Dockerfile.slim b/Dockerfile.slim new file mode 100644 index 0000000..9a18c10 --- /dev/null +++ b/Dockerfile.slim @@ -0,0 +1,54 @@ +# Riven src Builder + +FROM python:3.11.9-alpine3.19 as Base +LABEL name="Riven" \ + description="Riven Debrid Downloader" \ + url="https://github.com/rivenmedia/riven" + +# Install system dependencies +RUN apk --update add --no-cache curl bash shadow gcc python3-dev musl-dev linux-headers patchelf clang ccache && \ + rm -rf /var/cache/apk/* +RUN pip install --upgrade pip && pip install poetry==1.8.3 + +ENV POETRY_NO_INTERACTION=1 \ + POETRY_VIRTUALENVS_IN_PROJECT=1 \ + POETRY_VIRTUALENVS_CREATE=1 \ + POETRY_CACHE_DIR=/tmp/poetry_cache + +# Install Poetry globally +ENV POETRY_HOME="/etc/poetry" +ENV PATH="$POETRY_HOME/bin:$PATH" +#RUN curl -sSL https://install.python-poetry.org | python3 - --yes + +# Setup the application directory +WORKDIR /riven + +# Expose ports +EXPOSE 8080 + +# Set environment variable to force color output +ENV FORCE_COLOR=1 +ENV TERM=xterm-256color + +# Copy the Python project files +COPY pyproject.toml poetry.lock* /riven/ + +# Install Python dependencies +RUN poetry install --without dev --no-root && rm -rf $POETRY_CACHE_DIR + +# Copy src code and other necessary files +COPY src/ /riven/src +COPY VERSION entrypoint.sh /riven/ + +RUN cd /riven/src && poetry add nuitka && \ + poetry run python3 -m nuitka --standalone --onefile --onefile-tempdir-spec=/onefile_%PID%_%TIME% --python-flag=nosite,-O --nofollow-import-to=pytest --clang --warn-implicit-exceptions --warn-unusual-code --prefer-source-code main.py + +FROM scratch + +COPY --from=Base /riven/src/main.bin /main.bin +COPY VERSION / +VOLUME /data +COPY --from=Base /lib/ /lib/ +# Ensure entrypoint script is executable + +ENTRYPOINT ["/main.bin"] \ No newline at end of file diff --git a/LICENSE.md b/LICENSE.md new file mode 100644 index 0000000..22a8bec --- /dev/null +++ b/LICENSE.md @@ -0,0 +1,674 @@ + GNU GENERAL PUBLIC LICENSE + Version 3, 29 June 2007 + + Copyright (C) 2007 Free Software Foundation, Inc. + Everyone is permitted to copy and distribute verbatim copies + of this license document, but changing it is not allowed. + + Preamble + + The GNU General Public License is a free, copyleft license for +software and other kinds of works. + + The licenses for most software and other practical works are designed +to take away your freedom to share and change the works. By contrast, +the GNU General Public License is intended to guarantee your freedom to +share and change all versions of a program--to make sure it remains free +software for all its users. We, the Free Software Foundation, use the +GNU General Public License for most of our software; it applies also to +any other work released this way by its authors. You can apply it to +your programs, too. + + When we speak of free software, we are referring to freedom, not +price. Our General Public Licenses are designed to make sure that you +have the freedom to distribute copies of free software (and charge for +them if you wish), that you receive source code or can get it if you +want it, that you can change the software or use pieces of it in new +free programs, and that you know you can do these things. + + To protect your rights, we need to prevent others from denying you +these rights or asking you to surrender the rights. Therefore, you have +certain responsibilities if you distribute copies of the software, or if +you modify it: responsibilities to respect the freedom of others. + + For example, if you distribute copies of such a program, whether +gratis or for a fee, you must pass on to the recipients the same +freedoms that you received. You must make sure that they, too, receive +or can get the source code. And you must show them these terms so they +know their rights. + + Developers that use the GNU GPL protect your rights with two steps: +(1) assert copyright on the software, and (2) offer you this License +giving you legal permission to copy, distribute and/or modify it. + + For the developers' and authors' protection, the GPL clearly explains +that there is no warranty for this free software. For both users' and +authors' sake, the GPL requires that modified versions be marked as +changed, so that their problems will not be attributed erroneously to +authors of previous versions. + + Some devices are designed to deny users access to install or run +modified versions of the software inside them, although the manufacturer +can do so. This is fundamentally incompatible with the aim of +protecting users' freedom to change the software. The systematic +pattern of such abuse occurs in the area of products for individuals to +use, which is precisely where it is most unacceptable. Therefore, we +have designed this version of the GPL to prohibit the practice for those +products. If such problems arise substantially in other domains, we +stand ready to extend this provision to those domains in future versions +of the GPL, as needed to protect the freedom of users. + + Finally, every program is threatened constantly by software patents. +States should not allow patents to restrict development and use of +software on general-purpose computers, but in those that do, we wish to +avoid the special danger that patents applied to a free program could +make it effectively proprietary. To prevent this, the GPL assures that +patents cannot be used to render the program non-free. + + The precise terms and conditions for copying, distribution and +modification follow. + + TERMS AND CONDITIONS + + 0. Definitions. + + "This License" refers to version 3 of the GNU General Public License. + + "Copyright" also means copyright-like laws that apply to other kinds of +works, such as semiconductor masks. + + "The Program" refers to any copyrightable work licensed under this +License. Each licensee is addressed as "you". "Licensees" and +"recipients" may be individuals or organizations. + + To "modify" a work means to copy from or adapt all or part of the work +in a fashion requiring copyright permission, other than the making of an +exact copy. The resulting work is called a "modified version" of the +earlier work or a work "based on" the earlier work. + + A "covered work" means either the unmodified Program or a work based +on the Program. + + To "propagate" a work means to do anything with it that, without +permission, would make you directly or secondarily liable for +infringement under applicable copyright law, except executing it on a +computer or modifying a private copy. Propagation includes copying, +distribution (with or without modification), making available to the +public, and in some countries other activities as well. + + To "convey" a work means any kind of propagation that enables other +parties to make or receive copies. Mere interaction with a user through +a computer network, with no transfer of a copy, is not conveying. + + An interactive user interface displays "Appropriate Legal Notices" +to the extent that it includes a convenient and prominently visible +feature that (1) displays an appropriate copyright notice, and (2) +tells the user that there is no warranty for the work (except to the +extent that warranties are provided), that licensees may convey the +work under this License, and how to view a copy of this License. If +the interface presents a list of user commands or options, such as a +menu, a prominent item in the list meets this criterion. + + 1. Source Code. + + The "source code" for a work means the preferred form of the work +for making modifications to it. "Object code" means any non-source +form of a work. + + A "Standard Interface" means an interface that either is an official +standard defined by a recognized standards body, or, in the case of +interfaces specified for a particular programming language, one that +is widely used among developers working in that language. + + The "System Libraries" of an executable work include anything, other +than the work as a whole, that (a) is included in the normal form of +packaging a Major Component, but which is not part of that Major +Component, and (b) serves only to enable use of the work with that +Major Component, or to implement a Standard Interface for which an +implementation is available to the public in source code form. A +"Major Component", in this context, means a major essential component +(kernel, window system, and so on) of the specific operating system +(if any) on which the executable work runs, or a compiler used to +produce the work, or an object code interpreter used to run it. + + The "Corresponding Source" for a work in object code form means all +the source code needed to generate, install, and (for an executable +work) run the object code and to modify the work, including scripts to +control those activities. However, it does not include the work's +System Libraries, or general-purpose tools or generally available free +programs which are used unmodified in performing those activities but +which are not part of the work. For example, Corresponding Source +includes interface definition files associated with source files for +the work, and the source code for shared libraries and dynamically +linked subprograms that the work is specifically designed to require, +such as by intimate data communication or control flow between those +subprograms and other parts of the work. + + The Corresponding Source need not include anything that users +can regenerate automatically from other parts of the Corresponding +Source. + + The Corresponding Source for a work in source code form is that +same work. + + 2. Basic Permissions. + + All rights granted under this License are granted for the term of +copyright on the Program, and are irrevocable provided the stated +conditions are met. This License explicitly affirms your unlimited +permission to run the unmodified Program. The output from running a +covered work is covered by this License only if the output, given its +content, constitutes a covered work. This License acknowledges your +rights of fair use or other equivalent, as provided by copyright law. + + You may make, run and propagate covered works that you do not +convey, without conditions so long as your license otherwise remains +in force. You may convey covered works to others for the sole purpose +of having them make modifications exclusively for you, or provide you +with facilities for running those works, provided that you comply with +the terms of this License in conveying all material for which you do +not control copyright. Those thus making or running the covered works +for you must do so exclusively on your behalf, under your direction +and control, on terms that prohibit them from making any copies of +your copyrighted material outside their relationship with you. + + Conveying under any other circumstances is permitted solely under +the conditions stated below. Sublicensing is not allowed; section 10 +makes it unnecessary. + + 3. Protecting Users' Legal Rights From Anti-Circumvention Law. + + No covered work shall be deemed part of an effective technological +measure under any applicable law fulfilling obligations under article +11 of the WIPO copyright treaty adopted on 20 December 1996, or +similar laws prohibiting or restricting circumvention of such +measures. + + When you convey a covered work, you waive any legal power to forbid +circumvention of technological measures to the extent such circumvention +is effected by exercising rights under this License with respect to +the covered work, and you disclaim any intention to limit operation or +modification of the work as a means of enforcing, against the work's +users, your or third parties' legal rights to forbid circumvention of +technological measures. + + 4. Conveying Verbatim Copies. + + You may convey verbatim copies of the Program's source code as you +receive it, in any medium, provided that you conspicuously and +appropriately publish on each copy an appropriate copyright notice; +keep intact all notices stating that this License and any +non-permissive terms added in accord with section 7 apply to the code; +keep intact all notices of the absence of any warranty; and give all +recipients a copy of this License along with the Program. + + You may charge any price or no price for each copy that you convey, +and you may offer support or warranty protection for a fee. + + 5. Conveying Modified Source Versions. + + You may convey a work based on the Program, or the modifications to +produce it from the Program, in the form of source code under the +terms of section 4, provided that you also meet all of these conditions: + + a) The work must carry prominent notices stating that you modified + it, and giving a relevant date. + + b) The work must carry prominent notices stating that it is + released under this License and any conditions added under section + 7. This requirement modifies the requirement in section 4 to + "keep intact all notices". + + c) You must license the entire work, as a whole, under this + License to anyone who comes into possession of a copy. This + License will therefore apply, along with any applicable section 7 + additional terms, to the whole of the work, and all its parts, + regardless of how they are packaged. This License gives no + permission to license the work in any other way, but it does not + invalidate such permission if you have separately received it. + + d) If the work has interactive user interfaces, each must display + Appropriate Legal Notices; however, if the Program has interactive + interfaces that do not display Appropriate Legal Notices, your + work need not make them do so. + + A compilation of a covered work with other separate and independent +works, which are not by their nature extensions of the covered work, +and which are not combined with it such as to form a larger program, +in or on a volume of a storage or distribution medium, is called an +"aggregate" if the compilation and its resulting copyright are not +used to limit the access or legal rights of the compilation's users +beyond what the individual works permit. Inclusion of a covered work +in an aggregate does not cause this License to apply to the other +parts of the aggregate. + + 6. Conveying Non-Source Forms. + + You may convey a covered work in object code form under the terms +of sections 4 and 5, provided that you also convey the +machine-readable Corresponding Source under the terms of this License, +in one of these ways: + + a) Convey the object code in, or embodied in, a physical product + (including a physical distribution medium), accompanied by the + Corresponding Source fixed on a durable physical medium + customarily used for software interchange. + + b) Convey the object code in, or embodied in, a physical product + (including a physical distribution medium), accompanied by a + written offer, valid for at least three years and valid for as + long as you offer spare parts or customer support for that product + model, to give anyone who possesses the object code either (1) a + copy of the Corresponding Source for all the software in the + product that is covered by this License, on a durable physical + medium customarily used for software interchange, for a price no + more than your reasonable cost of physically performing this + conveying of source, or (2) access to copy the + Corresponding Source from a network server at no charge. + + c) Convey individual copies of the object code with a copy of the + written offer to provide the Corresponding Source. This + alternative is allowed only occasionally and noncommercially, and + only if you received the object code with such an offer, in accord + with subsection 6b. + + d) Convey the object code by offering access from a designated + place (gratis or for a charge), and offer equivalent access to the + Corresponding Source in the same way through the same place at no + further charge. You need not require recipients to copy the + Corresponding Source along with the object code. If the place to + copy the object code is a network server, the Corresponding Source + may be on a different server (operated by you or a third party) + that supports equivalent copying facilities, provided you maintain + clear directions next to the object code saying where to find the + Corresponding Source. Regardless of what server hosts the + Corresponding Source, you remain obligated to ensure that it is + available for as long as needed to satisfy these requirements. + + e) Convey the object code using peer-to-peer transmission, provided + you inform other peers where the object code and Corresponding + Source of the work are being offered to the general public at no + charge under subsection 6d. + + A separable portion of the object code, whose source code is excluded +from the Corresponding Source as a System Library, need not be +included in conveying the object code work. + + A "User Product" is either (1) a "consumer product", which means any +tangible personal property which is normally used for personal, family, +or household purposes, or (2) anything designed or sold for incorporation +into a dwelling. In determining whether a product is a consumer product, +doubtful cases shall be resolved in favor of coverage. For a particular +product received by a particular user, "normally used" refers to a +typical or common use of that class of product, regardless of the status +of the particular user or of the way in which the particular user +actually uses, or expects or is expected to use, the product. A product +is a consumer product regardless of whether the product has substantial +commercial, industrial or non-consumer uses, unless such uses represent +the only significant mode of use of the product. + + "Installation Information" for a User Product means any methods, +procedures, authorization keys, or other information required to install +and execute modified versions of a covered work in that User Product from +a modified version of its Corresponding Source. The information must +suffice to ensure that the continued functioning of the modified object +code is in no case prevented or interfered with solely because +modification has been made. + + If you convey an object code work under this section in, or with, or +specifically for use in, a User Product, and the conveying occurs as +part of a transaction in which the right of possession and use of the +User Product is transferred to the recipient in perpetuity or for a +fixed term (regardless of how the transaction is characterized), the +Corresponding Source conveyed under this section must be accompanied +by the Installation Information. But this requirement does not apply +if neither you nor any third party retains the ability to install +modified object code on the User Product (for example, the work has +been installed in ROM). + + The requirement to provide Installation Information does not include a +requirement to continue to provide support service, warranty, or updates +for a work that has been modified or installed by the recipient, or for +the User Product in which it has been modified or installed. Access to a +network may be denied when the modification itself materially and +adversely affects the operation of the network or violates the rules and +protocols for communication across the network. + + Corresponding Source conveyed, and Installation Information provided, +in accord with this section must be in a format that is publicly +documented (and with an implementation available to the public in +source code form), and must require no special password or key for +unpacking, reading or copying. + + 7. Additional Terms. + + "Additional permissions" are terms that supplement the terms of this +License by making exceptions from one or more of its conditions. +Additional permissions that are applicable to the entire Program shall +be treated as though they were included in this License, to the extent +that they are valid under applicable law. If additional permissions +apply only to part of the Program, that part may be used separately +under those permissions, but the entire Program remains governed by +this License without regard to the additional permissions. + + When you convey a copy of a covered work, you may at your option +remove any additional permissions from that copy, or from any part of +it. (Additional permissions may be written to require their own +removal in certain cases when you modify the work.) You may place +additional permissions on material, added by you to a covered work, +for which you have or can give appropriate copyright permission. + + Notwithstanding any other provision of this License, for material you +add to a covered work, you may (if authorized by the copyright holders of +that material) supplement the terms of this License with terms: + + a) Disclaiming warranty or limiting liability differently from the + terms of sections 15 and 16 of this License; or + + b) Requiring preservation of specified reasonable legal notices or + author attributions in that material or in the Appropriate Legal + Notices displayed by works containing it; or + + c) Prohibiting misrepresentation of the origin of that material, or + requiring that modified versions of such material be marked in + reasonable ways as different from the original version; or + + d) Limiting the use for publicity purposes of names of licensors or + authors of the material; or + + e) Declining to grant rights under trademark law for use of some + trade names, trademarks, or service marks; or + + f) Requiring indemnification of licensors and authors of that + material by anyone who conveys the material (or modified versions of + it) with contractual assumptions of liability to the recipient, for + any liability that these contractual assumptions directly impose on + those licensors and authors. + + All other non-permissive additional terms are considered "further +restrictions" within the meaning of section 10. If the Program as you +received it, or any part of it, contains a notice stating that it is +governed by this License along with a term that is a further +restriction, you may remove that term. If a license document contains +a further restriction but permits relicensing or conveying under this +License, you may add to a covered work material governed by the terms +of that license document, provided that the further restriction does +not survive such relicensing or conveying. + + If you add terms to a covered work in accord with this section, you +must place, in the relevant source files, a statement of the +additional terms that apply to those files, or a notice indicating +where to find the applicable terms. + + Additional terms, permissive or non-permissive, may be stated in the +form of a separately written license, or stated as exceptions; +the above requirements apply either way. + + 8. Termination. + + You may not propagate or modify a covered work except as expressly +provided under this License. Any attempt otherwise to propagate or +modify it is void, and will automatically terminate your rights under +this License (including any patent licenses granted under the third +paragraph of section 11). + + However, if you cease all violation of this License, then your +license from a particular copyright holder is reinstated (a) +provisionally, unless and until the copyright holder explicitly and +finally terminates your license, and (b) permanently, if the copyright +holder fails to notify you of the violation by some reasonable means +prior to 60 days after the cessation. + + Moreover, your license from a particular copyright holder is +reinstated permanently if the copyright holder notifies you of the +violation by some reasonable means, this is the first time you have +received notice of violation of this License (for any work) from that +copyright holder, and you cure the violation prior to 30 days after +your receipt of the notice. + + Termination of your rights under this section does not terminate the +licenses of parties who have received copies or rights from you under +this License. If your rights have been terminated and not permanently +reinstated, you do not qualify to receive new licenses for the same +material under section 10. + + 9. Acceptance Not Required for Having Copies. + + You are not required to accept this License in order to receive or +run a copy of the Program. Ancillary propagation of a covered work +occurring solely as a consequence of using peer-to-peer transmission +to receive a copy likewise does not require acceptance. However, +nothing other than this License grants you permission to propagate or +modify any covered work. These actions infringe copyright if you do +not accept this License. Therefore, by modifying or propagating a +covered work, you indicate your acceptance of this License to do so. + + 10. Automatic Licensing of Downstream Recipients. + + Each time you convey a covered work, the recipient automatically +receives a license from the original licensors, to run, modify and +propagate that work, subject to this License. You are not responsible +for enforcing compliance by third parties with this License. + + An "entity transaction" is a transaction transferring control of an +organization, or substantially all assets of one, or subdividing an +organization, or merging organizations. If propagation of a covered +work results from an entity transaction, each party to that +transaction who receives a copy of the work also receives whatever +licenses to the work the party's predecessor in interest had or could +give under the previous paragraph, plus a right to possession of the +Corresponding Source of the work from the predecessor in interest, if +the predecessor has it or can get it with reasonable efforts. + + You may not impose any further restrictions on the exercise of the +rights granted or affirmed under this License. For example, you may +not impose a license fee, royalty, or other charge for exercise of +rights granted under this License, and you may not initiate litigation +(including a cross-claim or counterclaim in a lawsuit) alleging that +any patent claim is infringed by making, using, selling, offering for +sale, or importing the Program or any portion of it. + + 11. Patents. + + A "contributor" is a copyright holder who authorizes use under this +License of the Program or a work on which the Program is based. The +work thus licensed is called the contributor's "contributor version". + + A contributor's "essential patent claims" are all patent claims +owned or controlled by the contributor, whether already acquired or +hereafter acquired, that would be infringed by some manner, permitted +by this License, of making, using, or selling its contributor version, +but do not include claims that would be infringed only as a +consequence of further modification of the contributor version. For +purposes of this definition, "control" includes the right to grant +patent sublicenses in a manner consistent with the requirements of +this License. + + Each contributor grants you a non-exclusive, worldwide, royalty-free +patent license under the contributor's essential patent claims, to +make, use, sell, offer for sale, import and otherwise run, modify and +propagate the contents of its contributor version. + + In the following three paragraphs, a "patent license" is any express +agreement or commitment, however denominated, not to enforce a patent +(such as an express permission to practice a patent or covenant not to +sue for patent infringement). To "grant" such a patent license to a +party means to make such an agreement or commitment not to enforce a +patent against the party. + + If you convey a covered work, knowingly relying on a patent license, +and the Corresponding Source of the work is not available for anyone +to copy, free of charge and under the terms of this License, through a +publicly available network server or other readily accessible means, +then you must either (1) cause the Corresponding Source to be so +available, or (2) arrange to deprive yourself of the benefit of the +patent license for this particular work, or (3) arrange, in a manner +consistent with the requirements of this License, to extend the patent +license to downstream recipients. "Knowingly relying" means you have +actual knowledge that, but for the patent license, your conveying the +covered work in a country, or your recipient's use of the covered work +in a country, would infringe one or more identifiable patents in that +country that you have reason to believe are valid. + + If, pursuant to or in connection with a single transaction or +arrangement, you convey, or propagate by procuring conveyance of, a +covered work, and grant a patent license to some of the parties +receiving the covered work authorizing them to use, propagate, modify +or convey a specific copy of the covered work, then the patent license +you grant is automatically extended to all recipients of the covered +work and works based on it. + + A patent license is "discriminatory" if it does not include within +the scope of its coverage, prohibits the exercise of, or is +conditioned on the non-exercise of one or more of the rights that are +specifically granted under this License. You may not convey a covered +work if you are a party to an arrangement with a third party that is +in the business of distributing software, under which you make payment +to the third party based on the extent of your activity of conveying +the work, and under which the third party grants, to any of the +parties who would receive the covered work from you, a discriminatory +patent license (a) in connection with copies of the covered work +conveyed by you (or copies made from those copies), or (b) primarily +for and in connection with specific products or compilations that +contain the covered work, unless you entered into that arrangement, +or that patent license was granted, prior to 28 March 2007. + + Nothing in this License shall be construed as excluding or limiting +any implied license or other defenses to infringement that may +otherwise be available to you under applicable patent law. + + 12. No Surrender of Others' Freedom. + + If conditions are imposed on you (whether by court order, agreement or +otherwise) that contradict the conditions of this License, they do not +excuse you from the conditions of this License. If you cannot convey a +covered work so as to satisfy simultaneously your obligations under this +License and any other pertinent obligations, then as a consequence you may +not convey it at all. For example, if you agree to terms that obligate you +to collect a royalty for further conveying from those to whom you convey +the Program, the only way you could satisfy both those terms and this +License would be to refrain entirely from conveying the Program. + + 13. Use with the GNU Affero General Public License. + + Notwithstanding any other provision of this License, you have +permission to link or combine any covered work with a work licensed +under version 3 of the GNU Affero General Public License into a single +combined work, and to convey the resulting work. The terms of this +License will continue to apply to the part which is the covered work, +but the special requirements of the GNU Affero General Public License, +section 13, concerning interaction through a network will apply to the +combination as such. + + 14. Revised Versions of this License. + + The Free Software Foundation may publish revised and/or new versions of +the GNU General Public License from time to time. Such new versions will +be similar in spirit to the present version, but may differ in detail to +address new problems or concerns. + + Each version is given a distinguishing version number. If the +Program specifies that a certain numbered version of the GNU General +Public License "or any later version" applies to it, you have the +option of following the terms and conditions either of that numbered +version or of any later version published by the Free Software +Foundation. If the Program does not specify a version number of the +GNU General Public License, you may choose any version ever published +by the Free Software Foundation. + + If the Program specifies that a proxy can decide which future +versions of the GNU General Public License can be used, that proxy's +public statement of acceptance of a version permanently authorizes you +to choose that version for the Program. + + Later license versions may give you additional or different +permissions. However, no additional obligations are imposed on any +author or copyright holder as a result of your choosing to follow a +later version. + + 15. Disclaimer of Warranty. + + THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY +APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT +HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY +OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, +THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR +PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM +IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF +ALL NECESSARY SERVICING, REPAIR OR CORRECTION. + + 16. Limitation of Liability. + + IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING +WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS +THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY +GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE +USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF +DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD +PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS), +EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF +SUCH DAMAGES. + + 17. Interpretation of Sections 15 and 16. + + If the disclaimer of warranty and limitation of liability provided +above cannot be given local legal effect according to their terms, +reviewing courts shall apply local law that most closely approximates +an absolute waiver of all civil liability in connection with the +Program, unless a warranty or assumption of liability accompanies a +copy of the Program in return for a fee. + + END OF TERMS AND CONDITIONS + + How to Apply These Terms to Your New Programs + + If you develop a new program, and you want it to be of the greatest +possible use to the public, the best way to achieve this is to make it +free software which everyone can redistribute and change under these terms. + + To do so, attach the following notices to the program. It is safest +to attach them to the start of each source file to most effectively +state the exclusion of warranty; and each file should have at least +the "copyright" line and a pointer to where the full notice is found. + + Riven: Handle your Media Library with ease + Copyright (C) 2023-2024 Dreu Lavelle + + This program is free software: you can redistribute it and/or modify + it under the terms of the GNU General Public License as published by + the Free Software Foundation, either version 3 of the License, or + (at your option) any later version. + + This program is distributed in the hope that it will be useful, + but WITHOUT ANY WARRANTY; without even the implied warranty of + MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + GNU General Public License for more details. + + You should have received a copy of the GNU General Public License + along with this program. If not, see . + +Also add information on how to contact you by electronic and paper mail. + + If the program does terminal interaction, make it output a short +notice like this when it starts in an interactive mode: + + Riven Copyright (C) 2023-2024 Dreu Lavelle + This program comes with ABSOLUTELY NO WARRANTY; for details type `show w'. + This is free software, and you are welcome to redistribute it + under certain conditions; type `show c' for details. + +The hypothetical commands `show w' and `show c' should show the appropriate +parts of the General Public License. Of course, your program's commands +might be different; for a GUI interface, you would use an "about box". + + You should also get your employer (if you work as a programmer) or school, +if any, to sign a "copyright disclaimer" for the program, if necessary. +For more information on this, and how to apply and follow the GNU GPL, see +. + + The GNU General Public License does not permit incorporating your program +into proprietary programs. If your program is a subroutine library, you +may consider it more useful to permit linking proprietary applications with +the library. If this is what you want to do, use the GNU Lesser General +Public License instead of this License. But first, please read +. diff --git a/README.md b/README.md new file mode 100644 index 0000000..fc7c59f --- /dev/null +++ b/README.md @@ -0,0 +1,322 @@ + + +
+ GitHub Repo stars + Issues + License + Contributors + Discord +
+ +
+

Plex torrent streaming through Real Debrid and 3rd party services like Overseerr, Mdblist, etc.

+
+ +Services currently supported: + +| Service | Supported | +| ------------------ | --------- | +| Real Debrid | ✅ | +| Plex | ✅ | +| Overseerr | ✅ | +| Mdblist | ✅ | +| Trakt | ✅ | +| Jackett | ✅ | +| Plex Watchlist RSS | ✅ | +| Torrentio | ✅ | +| Orionoid | ✅ | +| Listrr | ✅ | + +| and more to come! + +Check out out [Project Board](https://github.com/users/dreulavelle/projects/2) to stay informed! + +Please add feature requests and issues over on our [Issue Tracker](https://github.com/rivenmedia/riven/issues) or join our [Discord](https://discord.gg/rivenmedia) to chat with us! + +We are constantly adding features and improvements as we go along and squashing bugs as they arise. + +--- + +## Table of Contents + +- [Table of Contents](#table-of-contents) +- [ElfHosted](#elfhosted) +- [Self Hosted](#self-hosted) + - [Docker Compose](#docker-compose) + - [What is ORIGIN ?](#what-is-origin-) + - [Running outside of Docker](#running-outside-of-docker) + - [First terminal:](#first-terminal) + - [Second terminal:](#second-terminal) + - [Symlinking settings](#symlinking-settings) +- [Development](#development) + - [Prerequisites](#prerequisites) + - [Initial Setup](#initial-setup) + - [Using `make` for Development](#using-make-for-development) + - [Development without `make`](#development-without-make) + - [Additional Tips](#additional-tips) +- [Contributing](#contributing) + - [Submitting Changes](#submitting-changes) + - [Code Formatting](#code-formatting) + - [Dependency Management](#dependency-management) + - [Setting Up Your Environment](#setting-up-your-environment) + - [Adding or Updating Dependencies](#adding-or-updating-dependencies) + - [Running Tests and Linters](#running-tests-and-linters) +- [License](#license) + +--- + +## ElfHosted + +[ElfHosted](https://elfhosted.com) is a geeky [open-source](https://elfhosted.com/open/) PaaS which provides all the "plumbing" (_hosting, security, updates, etc_) for your self-hosted apps. + +> [!IMPORTANT] +> **ElfHosted ❤️ Riven 100%** +> +> [Riven](https://elfhosted.com/app/riven/) is an "Elf-icial" app in the [ElfHosted app catalogue](https://elfhosted.com/apps/) - A whopping :heart_eyes_cat::heart_eyes_cat: 100% :heart_eyes_cat::heart_eyes_cat: of your subscription goes directly to Riven developers, who can usually be found in the [#elf-riven](https://discord.com/channels/396055506072109067/1253110932062601276) channel in the [ElfHosted Discord Server](https://discord.elfhosted.com). + +Curious how it works? Here's an [explainer video](https://www.youtube.com/watch?v=ZHZAEhLuJqk)! + +> [!TIP] +> **ElfHosted "Infinite Streaming" bundles** +> +> Riven is pre-packaged with Plex, Zurg, and symlinks, and ready-to-go, in these convenient bundles: +> * [Starter Kit](https://store.elfhosted.com/product/plex-riven-infinite-streaming-starter-kit) (*quick and easy setup*) +> * [Hobbit Bundle](https://store.elfhosted.com/product/hobbit-riven-real-debrid-infinite-streaming-bundle) (*12.5% dedicated node, with extras*) +> * [Ranger Bundle](https://store.elfhosted.com/product/plex-riven-infinite-streaming-plus-bundle) (*25% dedicated node, with extras*) + +## Self Hosted + +### Docker Compose + +Copy over the contents of [docker-compose.yml](docker-compose.yml) to your `docker-compose.yml` file. + +> [!NOTE] +> You can check out the [docker-compose-full.yml](docker-compose-full.yml) file to get an idea of how things tie together. + +Then run `docker compose up -d` to start the container in the background. You can then access the web interface at `http://localhost:3000` or whatever port and origin you set in the `docker-compose.yml` file. + +> [!TIP] +> On first run, Riven creates a `settings.json` file in the `data` directory. You can edit the settings from frontend, or manually edit the file and restart the container or use `.env` or docker-compose environment variables to set the settings (see `.env.example` for reference). + +#### What is ORIGIN ? + +`ORIGIN` is the URL of the frontend on which you will access it from anywhere. If you are hosting Riven on a vps with IP address `123.45.67.890` then you will need to set the `ORIGIN` to `http://123.45.67.890:3000` (no trailing slash). Similarly, if using a domain name, you will need to set the `ORIGIN` to `http://riven.example.com:3000` (no trailing slash). If you change the port in the `docker-compose.yml` file, you will need to change it in the `ORIGIN` as well. + +### Running outside of Docker + +To run outside of docker you will need to have node (v18.13+) and python (3.10+) installed. Then clone the repository + +```sh +git clone https://github.com/rivenmedia/riven.git && cd riven +``` + +and open two terminals in the root of the project and run the following commands in each. + +#### First terminal: + +```sh +cd frontend +npm install +npm run build +ORIGIN=http://localhost:3000 BACKEND_URL=http://127.0.0.1 node build +``` + +Read above for more info on `ORIGIN`. + +#### Second terminal: + +```sh +pip install poetry +poetry install --without dev +poetry run python backend/main.py +``` + +--- + +### Symlinking settings + +`rclone_path` should point to your rclone mount that has your torrents on your host. + +`library_path` should point to the location of the mount in plex container + +```json + "symlink": { + "rclone_path": "/mnt/zurg", + "library_path": "/mnt/library" + } +``` + +Plex libraries that are currently required to have sections: + +| Type | Categories | +| ------ | ------------------------ | +| Movies | `movies`, `anime_movies` | +| Shows | `shows`, `anime_shows` | + +> [!NOTE] +> Currently, these Plex library requirements are mandatory. However, we plan to make them customizable in the future to support additional libraries as per user preferences. + +--- + +## Development + +Welcome to the development section! Here, you'll find all the necessary steps to set up your development environment and start contributing to the project. + +### Prerequisites + +Ensure you have the following installed on your system: + +- **Node.js** (v18.13+) +- **Python** (3.10+) +- **Poetry** (for Python dependency management) +- **Docker** (optional, for containerized development) + +### Initial Setup + +1. **Clone the Repository:** + + ```sh + git clone https://github.com/rivenmedia/riven.git && cd riven + ``` + +2. **Install Backend Dependencies:** + + ```sh + pip install poetry + poetry install + ``` + +3. **Install Frontend Dependencies:** + ```sh + cd frontend + npm install + cd .. + ``` + +### Using `make` for Development + +We provide a `Makefile` to simplify common development tasks. Here are some useful commands: + +- **Initialize the Project:** + + ```sh + make + ``` + +- **Start the Development Environment:** + This command stops any previous containers, removes old images, and rebuilds the image using cached layers. Any changes in the code will trigger a rebuild. + + ```sh + make start + ``` + +- **Restart the Container:** + + ```sh + make restart + ``` + +- **View Logs:** + ```sh + make logs + ``` + +### Development without `make` + +If you prefer not to use `make` and Docker, you can manually set up the development environment with the following steps: + +1. **Start the Backend:** + + ```sh + poetry run python backend/main.py + ``` + +2. **Start the Frontend:** + ```sh + cd frontend + npm run dev + ``` + +### Additional Tips + +- **Environment Variables:** + Ensure you set the `ORIGIN` environment variable to the URL where the frontend will be accessible. For example: + + ```sh + export ORIGIN=http://localhost:3000 + ``` + +- **Code Formatting:** + We use `Black` for Python and `Prettier` for JavaScript. Make sure to format your code before submitting any changes. + +- **Running Tests:** + ```sh + poetry run pytest + ``` + +By following these guidelines, you'll be able to set up your development environment smoothly and start contributing to the project. Happy coding! + +--- + +## Contributing + +We welcome contributions from the community! To ensure a smooth collaboration, please follow these guidelines: + +### Submitting Changes + +1. **Open an Issue**: For major changes, start by opening an issue to discuss your proposed modifications. This helps us understand your intentions and provide feedback early in the process. +2. **Pull Requests**: Once your changes are ready, submit a pull request. Ensure your code adheres to our coding standards and passes all tests. Commits should follow [conventional-commits](https://www.conventionalcommits.org/) specification. + +### Code Formatting + +- **Backend**: We use [Black](https://black.readthedocs.io/en/stable/) for code formatting. Run `black` on your code before submitting. +- **Frontend**: We use [Prettier](https://prettier.io/) for code formatting. Run `prettier` on your code before submitting. +- **Line Endings**: Use CRLF line endings unless the file is a shell script or another format that requires LF line endings. + +### Dependency Management + +We use [Poetry](https://python-poetry.org/) for managing dependencies. Poetry simplifies dependency management by automatically handling package versions and resolving conflicts, ensuring consistency across all environments. + +#### Setting Up Your Environment + +1. **Install Poetry**: If you haven't already, install Poetry using `pip install poetry`. +2. **Install Dependencies**: After cloning the repository, navigate to the project's root directory and run `poetry install`. This command installs all necessary dependencies as defined in the `pyproject.toml` file and creates an isolated virtual environment. +3. **Activate Virtual Environment**: You can activate the virtual environment using `poetry shell` or run commands directly using `poetry run `. + +#### Adding or Updating Dependencies + +- **Add a Dependency**: Use `poetry add ` to add a new dependency. +- **Update a Dependency**: Use `poetry update ` to update an existing dependency. + +### Running Tests and Linters + +Before submitting a pull request, ensure your changes are compatible with the project's dependencies and coding standards. Use the following commands to run tests and linters: + +- **Run Tests**: `poetry run pytest` +- **Run Linters**: `poetry run ruff check backend` and `poetry run isort --check-only backend` + +By following these guidelines, you help us maintain a high-quality codebase and streamline the review process. Thank you for contributing! + +--- + + + + + +--- + +
+

Riven Analytics

+ Alt +
+ +## License + +This project is licensed under the GNU GPLv3 License - see the [LICENSE](LICENSE) file for details diff --git a/assets/riven-dark.png b/assets/riven-dark.png new file mode 100644 index 0000000000000000000000000000000000000000..f1cf2d3576a9cf98f1c9f341a57b050e70cfcd66 GIT binary patch literal 3831 zcmbtX_g@p~`h6!u1|-4gsw6aH0|5n$N)=4jhF}2$Hi`t}N(~^e1Qp4!SH*i3Z0y2P zB)Ucs3sD3SP)00RAtF^N!9r7t8#*L&XT10R1K;lt`OM^f&U?;z&ht(_nSXk2b=A`~ z)CB2LK{&A)td07r*Gd{lsNj)W+R00OTAZKTv=062h5A7%rRMN|3hC<2)Pa`bjOva2g z>o?8c=o!IA4(E=v9BFh6y7{27@B+JiG)(6(H%Kby(jKezk!cr*-=zWl_Bl zY)mo$d;x;a&Vc4o_x~TiHn|kp2(&C@hm*vB0W+XRl$lgO$$jA{x&QHt8`C{;$>4Xs zdn0<9!lv|Be3s>EUm$9s=V|BnF9)e<{0UR#Zsj9tWEqq&uWKr2`A$>~*AMe}b%ujK{!fcf2WD_%VdsM7NA zt|N1GB%H5Jz}EQ0bYSD;H~JE?O&?ndo`;F9-VY3)<;FlaSXx}_Q1@*EtJ z{L6}$bqDghG}wdBuA$tufR)iQ5BzGpD=&)!tancQ2l1V*q!?Y$czj>V%aTKWH#Vrb zLYl%X959>4ejuJU12b<2b?#o8=K%z}n>ma-59s8X!vdd_ceq6{nDfN|OL}<~_j0Cr za_rDmMSQ%biQ;`qPsxLc)o{qN5{cTn@V~d9#6Ro@L-4N%*5~6$@+x|r;ifRsaS$|% z?>>6RsC6zfmvRfTp#)G@pO4W*KYbPUo|wOMzV8uLjYaiC+iqcz&1I^EEDUpPRzJHk zX>+i(D=8?tI`@6=`$K_n(|hu~$X^^B!!^ zcXv@AWD3Ezy9pCq(|xq2NKFOO7WI*Ty2E4+wgRoaigPcw{2Z27h|aOc|HK1F+iivp zqIyz4b;Z;EqorxftfW|{_SiI~ddaOuQy8@E3~sG~BwABp7l-F#yD<$G+4n~B4CEbw zZ0977;LX{pB^X=ZhmA;_qAJF@ZhyXE3qyeZPu>Z((U&58f4B-L564F1`1*ll$(2iFEt12Q<&IA-pGBwYXY+qYSm7 za08SAPM`yA?$h!=JuV~n;zn4niy^)x`X3bat$iusQFReR4wN_dN&8MKq zS3D`u2LoAbN(!`%BCVfboO(w=nx}%+oPr*=Q}fi{aK8tV4bqQ=>UOxzThKZqSMP)Z z`shnLSGg}<7s@TLVPs8%7yAng!#MAEeZ_}P zZWTw6)ImWH^m09~Qe(@wddER z(&RS-tf(Ywc;4V+E;GN5Qra}t<$xG>F9+yVxyGaU@B643W9pWY6(wy1tdT6q*4rI1 z&`tK(mS}p3mlzVLEBHNyXj>>gzY2{hr2%Jkg?9(BV9#|xTCOw_7PFEi`%9)zeRvr* z;-G4O#8JliQ+XZ9k_~w{cHK%jy2CGexS;>mjk?_7RihHuvbWoTXR)T;TpnTT*i`{( z_8ya!IuDkuxyDgOC|^*%>c10$v}Huh17cLNxPvP2(nf7@{@7=q zRE12Y6yXG@==K>QuMk3q13u_FB6p2JgG6N3%Tnwk3EWCJ(iZ@mC~&Gwfo>uMDC?vH zvXV)Fw(Ez|gRKej>$g0GypzNjciO&vhF+l%=nY2=;7naYC3I~DM_NRJN;e95D@c%# z`0;w9MGzF3K4B^Ngi8G>b0KdQS^Mia8_v`r_q`hw@|KWGF$+#X^)SKtOwR@JCff`B zpb+wCWUX)m8_p+9<*?%Dd96gP_mN!4>m&6JE<6p@lU}Onj!0cNp*OtmIY+vm)Jxe` zg<6sXSC1RPp|(UUhgP<*JeQCJ)hC2lCt2${ZU~32Bx`ncEYF2xtsz*5T_I|X^Nt(9 z#SsK&iXej{_)6*>UYG)9Q3)^qFxX3Px&!UPZO_erhFX!<15YhS!ha&*dij$a!F7aS zI8n-mi**V1<0~pzp3VfjpVl+^%Ig(1p!NrrV5dX6O6SZWNrleq*AIU%#B5h}m+M$pac3G~16 z$~aPQl5Oj0A=XPa8s(n?nHm!4v%lIRX*vYDvR6dkWlNxcQoLo!<6yaFlSv8`rcGcv zhF2mt^a=eQJO5Kqm>q#0%Cvrl+7f-*^bIdZZjgP3@Kr+0hp=MPdTpE|xJkI%q2%KuFfzeJaFii7BSPFN&w{G6Wvh2@fudT#7zFEp$cGYQH8$ z!jTRiFD(*%(#Eo4s<|1ekEVJ`W|V5v&BHEb51d||1H07X^P4O`F7A35} zDw(eIU_RVaTdd#rR(6`QSjk2McE@i!_j1xz*-+uZOb+Q`hrL(_8Opap}%leya{{b4%7R#~5+i-zB** ynhGzbpD9>09Fh{iu)gN?|9oZp-|L=QM_u-7Q>^(AbS7OJRiU`%(y#J=vEmStc4wWG_lJ*OF*e_H`^}7e%t0D3PKP zNm*jDlq{17WA1OhzrXIi@44^!ob$ftdCxh|lWK2k!No4h4gdhx2}@H)005b~sVz@ZoUr>foR zlHj6BXDn*}#Kg&ZE8Dfc*wdf(t173MzartT3uuqpzt}jadh@&ud$~OwFDKw@$!p}W z{>!>?dTXz9VQ)Nc*h4eef9!4Tv<)u6aO_xcT|kj`+~zg04QD)~AzBS{*M{!fJ=l$c~(}1azhR<|5M1JpkD)Zr#zUvV*VZBzT z@2%P^OtcAbLYV(c;<(a)z^@QYhA9PuV?FPeApGgUuJG~zZxI&(jk{NMClI5Rh}T3D zmZz<)b4w1OCsM}!F@%E3b5h|ue=Ov`NiC!<@290>@fgY77?WZ@^ld=zq6#S}w^^i1 za;%IOG6$~1%|V>t9Za-2(3}{=`7~cAm(;Xfp#r)c3%qqB`S%mT%?NKVDCNocjxdxA z3o~7O?I-ZbFe^INDzFHM^$T=+_Jx2EhOn3%CuncIjS^{80QvagOKJh}^pDr(#jDSAmKw1)Crj42ox?AT&-0_A$% zv#YNG@AHpfbcT+(5;HEG)Q+utVC;dila));Ovi)goN3eTAJ`_9r0cizWr0??wW(?D z8F}D2rj05);)Her6H+GZmC$;!@CBu#8#A5cqQ9h@*H!A)EefL~O0=JyN`{pn6Ml+l zMg^7GYP&Ms6HopZJ>$s|o|Ecl|EgI`rRq>#q|*VAx4}9ckrq`G#IGf4-?v~66Am)p zCh4{pvhdRFJ5SVT_`eF30*sws^^?nCN=l!V~4zL3!u zzbMkc)Qmw(e;i4Wl6ZZrbj2HAav{B3M0LlqL0a46FkljgT7E>A75gF6>O1nq8LyWt zXpWQSC^^B>D>UU@eful7a;vauhO#-3bcC&a9k<_plUWpueAVIl3?fass0lYl#puw1eoh=q;nRV=Y1V zs^BDw%ILGd>KP9u`q7hnjuPU7lQt5nUy~278a^b_=77h#O07L44pw zg0&P4;aw?lfwvgL0G%J6zLoMq7W6!^tE;oq$9Kl(gNixub@CzghLOy{W3}vSQ{DU& zA)o{1q_#%qU;DB2d#H8#{_Dl25-cSPc#Mjzv64{zEY@2ES5kd0{zMJ6l2SG=$sQua zLj6hziDPDL5qEmE(u##TwC_8WoVNFUB|G?)n(o)FcK!)Wbb~YbfOo#Uo^us^F!-C;zpM9M{)}+7 zt`xN2nCj@xGU3PjZJ?NbV1lY_OLP&$dm%1HTmDwD0jrH5d2eytoBy8JQaR%S2sy&1)f7 z(e{|F+x3iVk z9;vsFqN~awxxVWYXRmw`%DcR?b!HHjWGjA1cddeK9ZQ?#DqH^e98zB%fgRWgn;S-* z0AM;=K<%XRehGq)jYzHI@#f3KgIZfk-H|8z#~L z@^#5_D04}8nP#Moi95yc?y5K4HRlm=TGZEjUB2* z%kECRZ*po&Ql5nSzI0C>7>aak^tpQbU)`RKz&qP=b4D{`7qZt{nNV`ya4BvK% zRcdd?peYKdoQa2DE5K58n)qgM?oGdgnZaTIyd(<=)!?&1r7KEo?&NSA+Bkf#7=(;XG3Zui<=OVz+NB&ZC+D;V5W zA-6igq#vZCL9q(B>Y>~{Z#bH!sdUTsnj)%aJ$jpnTi{Sl-}lU$!y(075in zzoI%502J+;p`SGTcvnw+m2@%euYsaASFqHK-Gqw>NiN@#3n>J0(R)I z>w6VGl|fe2!d+SWi45+p$8X?G3_&tQ+lw)f8xX00Kc;?ka43(i4-q;KP}IfoM~HYk zbojL$RsJ~zD#?xMCqkWIRqP_o0UdV%{}geXH8;~S*|VMO4CRQsy^2M>i% z!3$gD%`-R`8bZz3y$Xo4x5``{;eLnT*_s_7d$J~FkD7_R?|>{HAdO#tVU??o?uB`> zIG_A4jY_AO72@ZDgDoOt2|t8-&ZO0ujZ&X$;FdpIp?P&~Px$`LUkloC?R=>iuAlZZ zx%J}g5xVc4^(h}zxJ)x*W%O5_&m+nUvJ`-3X^61$GaAWnp1XZi779WO-zC|3*Q)H=nb`Q2xs{~oWhW$k zynh@J(Ks4mxqG5g8J^nhhSu(-h8JD9H!d{V&k*5j(3o-55r;>VK+A?5tDeOyL^io% ziX+8xLwB)9zx(}TL6hk38aV$?HstdEV}g&zgGzcMT51wnKF3hP%jM@bUr#6N7DxYi zd!Wxsq*%sy9$RMx$lK0)GS18=O)$5C*hkfHtk1tNROlbY&UL?xr64=hP(`# z+^O$w!Bz|jl>kRjJXVhCeEj7n=7Rl)E@Dkeoa=bSYU3|bY$2HBE5mw!`RZi`&5k|x z`YKDM!P@Dbw9~9=sZWPS;0*Y~mI`o3M3+WRwM(ZV&Mfv*Si)@NL`PL2f|iIA4F^)J zSQ^Yxp8Eq>FE`SV$U>GOD9?t$CdF(kz$&oe!pBGZYPz-5d#SP>e{N561nMy>3c-hc zBI$z;{$I9hX(Mmk;Y?6JJnso+lzpv)M=Z=39|t z)i&p)_o0h#Jz8jZ){b->+nnkapg3!E7JZI#61OG#&JGP1J#*XFF3@zntrD^<5iX(! zXkKy{vwabtFFCb4y>Bi2RugS2+V|m9akoK7#8qu96*j9+S7Rzkhe?I5!kf487dd@i(9YB z6u7uUo?ZH_xh90lF_hX21xTDTJQqzE67W*hi?Wg6S3|92S=&+57)2C&Ht@vZer{cU zH=Y01uf;tLLoPzs0o-r4Hs26qF!0h}*9jYgI~F{;1~tqnLX<7gTIRuq4?hvFhz@5o z`e>}2W2|&@a!TFSuybgA1Mqluhp&kL?yr^w(vb-hx$LV%EwgSS@J_V`ZQsWduNkze z=)?E`L+^6OZr;N?5%J0PA;-aJRYdb&CYcSjm7|Orw@VKIosny)=ODFhqwrJvJ$L$v zFgxTn0Seh6Cd&tRVv6GjjaW1ZYgOEXaQI@wi#Fz!B4E7xo}=adY8)(MIow5;S`gFU z3wRwdEf#6Ol{57!{3e;T@~TKbl{C#(asEOu5kK4IwC(s_@8dZ^nEr#@dBBDdJZQ+W zsr6S2&#U?l9Ui_z?tk$b&ZP@+78fOpKH;=K0USo+tZFWfY0QoQb?CIewBhSf!wq)C zuc>~G87Y*d<5W{ ziSW7t5p}e{rc391j1vhLl^12BP)WwX>Ce$aet{uU}15~Joihx;Jqml74YJBaS<48kfc%&c^4+AudvH^rp{3#f> za%~-K^y7DBO?&I;!0PPE4E2c@j*lGTmK1o>H%8#}o|4NV#qq%MO81p-6DbLpjVtn% z3SbUh=(ghlg#(a5Hu#JUPo)jtUT8Pm8{WU9v!(=nBvJ;XT?XL^G=Vn#Kn04Atl-bq ze07Wqr&9ZK1ki;DI}cmv0TUL{McN0@@r@d|ZBdGNOSK(}ih)Hi7k7ryZ}VKu@THt? z-*2BI;%y+p0EzWG_n%1;zcx{~@rx@X&ozfG_>T0K#&uVFc;=V>9_PG?qk085X{1C|zYu!{B}IJ8w%! z0?QjdJ;(3dmV6>|Lg`tyMzJ(FzSM0>{UUshx6+H%5^zXjUIdVy1RI6~81>X*|5uG$ zHqzLwLw*};T7)7X>1wMIB85gC{c85IVcbf3%Egz6=ONtZ1MQ0b5tt8+i`hbi52Yh~*bqNy$+T9^mTBGWIfuDx?_@9p zGluLsv1HKb#QK@SeR!Oo$eQ-Tx6A@w6+YOMEr4cJ@hc=m8Pi19!qq^<-i*|6W(G0u zE4`%q$$G9lzo~<;6tTdbHOU}1|G$~~CEb`aE*q^r|3DEn^k%k}h}U68yI#+GpT=X( zug^SKsnWn`1w!nZJvMyq-Pa3!ihxTO=TVxQu}H(pB@rkm0^TxYMna*)&*f|d;uR3p zeyc1Kbg=7snh9#4!)F_>&`7AF$-S^67H!QPuQYH%U}YBKWl^I1SQPp|1Z)J(%A+E& z_&mDL#gYN!-1qbRPQ1mLV8;D&9gDYVk2rZ{0o`qq)vd!w)fvCv+*r9d<9_Cu{xlM* z?LNiTf1HUWWz-Eb*p;;bL+Ekt;QYVo1{dk{Kl7G25*PliK7IDTqk8`U@&#WVx8rl_ zgvVhAto@>pw+P;AvAi%=1ZTtS#huaAXK;w3h|0mlep^+cdSkN=1M1r(N3kzUMw+%` z`HJ6k_{SacL$`7?Krd zHre{Kv?kSpMR5PL@Q%!khv}Vt7aN40e*IYCbW$N>t3VP}YqL87pM~BlBd1f; z(j}B*<;To_6Ja#iq>T6(1n+Tz5cGrytT(7_PXcVvdr9QHluVLeWoo>zvE2N$b~bo| zSv`ynAFLe?#H_smW~aJY)VC*L=l>w*q4o9h7tG5n@MY?QH%t@LJJr&?Q5TJQN`C*c z!OMd1gp+XoUt^TC7yHe_z)gBZ-T7vh*&h-d7AAz9Ys)J&9_*BvOi(Ftco67obHY%l zMuT=^v1ieNMBw~j!3*^^PD@Et}20^ENi$J4d2~*=%4zNI&DQ5-=wBYIK3p#_as&Xjyd~Ox0*9WhS?{X z?RH7B7yFx+gh@X+?Yp&3(H=7G=3{{lcBpZ5nmeQMRLC8RWYGHY;f$E=CN#lmc&;1RhKJ?w<{+qtzPdJaNT=`0=wSN&HQNiy{;p zux~Z&+yAN7rlezRZCU%zHj6@kFb#tMD;RQZUHbD6iKt3RyeVbz81TtTB>b9L+DQOm zpAj#u_@w0|s8M~}VeXJja=)HzsMfjqR~KvR1Mjm@;t=}epKYPVwf%x}Vq;TH386}qi4^xb1LpQ#eDC2(yA)- zZ;qG30n*WwKgNmTNSv8)WwH+8&;|MP&B483YpsCz){%A-ijSl3cNpj)W7nq;4mie< zb?lqDaQM5E%KrIhJUD+&n%+twF{yNwYVETuYFvw*Bvp9WX`fwK_Wn~roxNiV;zl9I zca?jDlTSC@OIT!QeAC@x`I+JgsRwf;un;|N`gDE7SWl-0mlJUH; z5ksheyiqf3fa*X4aNwy0x(Q3xh`akX_4)4rfbmjy%gYw?gpFVU{m1DO^n`EI^*w{= z)l#xy&H0job8RaQ`P}rW-v#YMX0kVdi_&YX01v7qQNL<_^MdiCq1wE?_hsjEJLug2 zqepkk=U2`<37Q2zs&OOR={bZ&%t2Bw~j4 z4vi&9LoEfL;z&#iwgQC#VxXLqlHG19=L*Q_7&*zYGkydA=d%=$0!#nsC=CD{!8d z$entmf(=UXDf(+|OyMis=aU#K0W{!UGO<}XJ348;_0z9h0=P}XODG2IR XLsGd}@BWuec@8*XW^4M`n2_*4v%U?s literal 0 HcmV?d00001 diff --git a/dev/attach-memray.sh b/dev/attach-memray.sh new file mode 100755 index 0000000..fbf6fd2 --- /dev/null +++ b/dev/attach-memray.sh @@ -0,0 +1,5 @@ +#!/bin/bash +# Attach memray to the running main.py process +# Usage: ./attach-memray.sh + +pgrep -f "main.py" | head -n 1 | xargs -I{} memray attach {} diff --git a/docker-compose-dev.yml b/docker-compose-dev.yml new file mode 100644 index 0000000..6e8f19e --- /dev/null +++ b/docker-compose-dev.yml @@ -0,0 +1,20 @@ +services: + riven: + build: + context: . + dockerfile: Dockerfile + image: riven:dev + container_name: riven + restart: unless-stopped + network_mode: host + tty: true + environment: + - PUID=1000 + - PGID=1000 + - TZ=UTC + - ORIGIN=${RIVEN_ORIGIN:-http://localhost:8080} + - RIVEN_FORCE_ENV=true + - RIVEN_DATABASE_HOST=sqlite:////riven/data/media.db + volumes: + - ./data:/riven/data + - /mnt:/mnt \ No newline at end of file diff --git a/docker-compose-full.yml b/docker-compose-full.yml new file mode 100644 index 0000000..7ab2b6a --- /dev/null +++ b/docker-compose-full.yml @@ -0,0 +1,144 @@ +# This is a full setup for Riven with Plex, Overseerr, and Zilean. +# This compose assumes you already setup rclone and zurg. See notes below! + +## Notes: + +# Zurg & Rclone will have to be supplied as well and visible to Riven as well as Plex. +# Rclone should be mounted to: /mnt/zurg (optional directory) +# You will need to set the rclone_path in riven to use the `/mnt/zurg/__all__` dir though +# so that Riven can see all the torrents from their parent directory. + +services: + riven-frontend: + image: spoked/riven-frontend:latest + container_name: riven-frontend + restart: unless-stopped + ports: + - "3000:3000" + tty: true + environment: + - PUID=1000 + - PGID=1000 + - ORIGIN=http://localhost:3000 # Set to IP or FQDN of the server + - BACKEND_URL=http://riven:8080 + - DIALECT=postgres + - DATABASE_URL=postgres://postgres:postgres@riven-db/riven + - TZ=America/New_York + depends_on: + riven: + condition: service_healthy + + riven: + image: spoked/riven:latest + container_name: riven + restart: unless-stopped + ports: + - "8080:8080" + tty: true + environment: + - PUID=1000 + - PGID=1000 + - TZ=Etc/UTC + - RIVEN_FORCE_ENV=true # forces the use of env vars to be always used! + - RIVEN_SYMLINK_RCLONE_PATH=/mnt/zurg/__all__ # Set this to your rclone's mount `__all__` dir if using Zurg + - RIVEN_SYMLINK_LIBRARY_PATH=/mnt/library # This is the path that symlinks will be placed in + - RIVEN_DATABASE_HOST=postgresql+psycopg2://postgres:postgres@riven-db/riven + - RIVEN_DOWNLOADERS_REAL_DEBRID_ENABLED=true + - RIVEN_DOWNLOADERS_REAL_DEBRID_API_KEY=xxxxx # set your real debrid api key + - RIVEN_UPDATERS_PLEX_ENABLED=true + - RIVEN_UPDATERS_PLEX_URL=http://plex:32400 + - RIVEN_UPDATERS_PLEX_TOKEN=xxxxx # set your plex token + - RIVEN_CONTENT_OVERSEERR_ENABLED=true + - RIVEN_CONTENT_OVERSEERR_URL=http://overseerr:5055 + - RIVEN_CONTENT_OVERSEERR_API_KEY=xxxxx # set your overseerr token + - RIVEN_SCRAPING_TORRENTIO_ENABLED=true + - RIVEN_SCRAPING_ZILEAN_ENABLED=true + - RIVEN_SCRAPING_ZILEAN_URL=http://zilean:8181 + healthcheck: + test: curl -s http://localhost:8080 >/dev/null || exit 1 + interval: 30s + timeout: 10s + retries: 10 + volumes: + - ./data:/riven/data + - /mnt:/mnt + depends_on: + riven_postgres: + condition: service_healthy + + riven_postgres: + image: postgres:16.3-alpine3.20 + container_name: riven-db + restart: unless-stopped + environment: + PGDATA: /var/lib/postgresql/data/pgdata + POSTGRES_USER: postgres + POSTGRES_PASSWORD: postgres + POSTGRES_DB: riven + volumes: + - ./riven-db:/var/lib/postgresql/data/pgdata + healthcheck: + test: ["CMD-SHELL", "pg_isready -U postgres"] + interval: 10s + timeout: 5s + retries: 5 + + ## Plex (optional media server) + + plex: + image: plexinc/pms-docker:latest + container_name: plex + restart: unless-stopped + ports: + - "32400:32400" + environment: + - PUID=1000 + - PGID=1000 + - TZ=Etc/UTC + - VERSION=docker + volumes: + - ./config:/config + - /mnt:/mnt + devices: + - "/dev/dri:/dev/dri" + + ## Overseerr (optional content service) + + overseerr: + image: lscr.io/linuxserver/overseerr:latest + container_name: overseerr + restart: unless-stopped + environment: + - PUID=1000 + - PGID=1000 + - TZ=Etc/UTC + volumes: + - ./config:/config + ports: + - 5055:5055 + + ## Zilean (optional scraper service) + + zilean: + image: ipromknight/zilean:latest + container_name: zilean + restart: unless-stopped + ports: + - "8181:8181" + volumes: + - zilean_data:/app/data + environment: + # You may have to create the zilean database manually with the following command: + # docker exec -it riven-db createdb -U postgres -W zilean + Zilean__Database__ConnectionString: "Host=riven-db;Port=5432;Database=zilean;Username=postgres;Password=postgres" + healthcheck: + test: curl --connect-timeout 10 --silent --show-error --fail http://localhost:8181/healthchecks/ping + timeout: 60s + interval: 30s + retries: 10 + depends_on: + riven_postgres: + condition: service_healthy + +volumes: + zilean_data: diff --git a/docker-compose.yml b/docker-compose.yml new file mode 100644 index 0000000..933c9d1 --- /dev/null +++ b/docker-compose.yml @@ -0,0 +1,60 @@ +services: + riven-frontend: + image: spoked/riven-frontend:latest + container_name: riven-frontend + restart: unless-stopped + ports: + - "3000:3000" + tty: true + environment: + - PUID=1000 + - PGID=1000 + - TZ=America/New_York + - ORIGIN=http://localhost:3000 # set to the url or ip where the frontend is hosted + - BACKEND_URL=http://riven:8080 + - DIALECT=postgres + - DATABASE_URL=postgres://postgres:postgres@riven-db/riven + depends_on: + riven: + condition: service_healthy + + riven: + image: spoked/riven:latest + container_name: riven + restart: unless-stopped + ports: + - "8080:8080" + tty: true + environment: + - PUID=1000 + - PGID=1000 + - TZ=America/New_York + - RIVEN_FORCE_ENV=true + - RIVEN_DATABASE_HOST=postgresql+psycopg2://postgres:postgres@riven-db/riven + healthcheck: + test: curl -s http://localhost:8080 >/dev/null || exit 1 + interval: 30s + timeout: 10s + retries: 10 + volumes: + - ./data:/riven/data + - /mnt:/mnt + depends_on: + riven_postgres: + condition: service_healthy + + riven_postgres: + image: postgres:17.0-alpine3.20 + container_name: riven-db + environment: + PGDATA: /var/lib/postgresql/data/pgdata + POSTGRES_USER: postgres + POSTGRES_PASSWORD: postgres + POSTGRES_DB: riven + volumes: + - ./riven-db:/var/lib/postgresql/data/pgdata + healthcheck: + test: ["CMD-SHELL", "pg_isready -U postgres"] + interval: 10s + timeout: 5s + retries: 5 diff --git a/entrypoint.sh b/entrypoint.sh new file mode 100644 index 0000000..604326a --- /dev/null +++ b/entrypoint.sh @@ -0,0 +1,91 @@ +#!/bin/sh + +# Default PUID and PGID to 1000 if not set +PUID=${PUID:-1000} +PGID=${PGID:-1000} + +echo "Starting Container with $PUID:$PGID permissions..." + +if [ "$PUID" = "0" ]; then + echo "Running as root user" + USER_HOME="/root" + mkdir -p "$USER_HOME" +else + # Validate PUID and PGID are integers + if ! echo "$PUID" | grep -qE '^[0-9]+$'; then + echo "PUID is not a valid integer. Exiting..." + exit 1 + fi + + if ! echo "$PGID" | grep -qE '^[0-9]+$'; then + echo "PGID is not a valid integer. Exiting..." + exit 1 + fi + + # Default USERNAME and GROUPNAME if not set + USERNAME=${USERNAME:-riven} + GROUPNAME=${GROUPNAME:-riven} + + # Create group if it doesn't exist + if ! getent group "$PGID" > /dev/null; then + addgroup --gid "$PGID" "$GROUPNAME" + if [ $? -ne 0 ]; then + echo "Failed to create group. Exiting..." + exit 1 + fi + else + GROUPNAME=$(getent group "$PGID" | cut -d: -f1) + fi + + # Create user if it doesn't exist + if ! getent passwd "$USERNAME" > /dev/null; then + adduser -D -h "$USER_HOME" -u "$PUID" -G "$GROUPNAME" "$USERNAME" + if [ $? -ne 0 ]; then + echo "Failed to create user. Exiting..." + exit 1 + fi + else + if [ "$PUID" -ne 0 ]; then + usermod -u "$PUID" -g "$PGID" "$USERNAME" + if [ $? -ne 0 ]; then + echo "Failed to modify user UID/GID. Exiting..." + exit 1 + fi + else + echo "Skipping usermod for root user." + fi + fi + + USER_HOME="/home/$USERNAME" + mkdir -p "$USER_HOME" + chown -R "$PUID:$PGID" "$USER_HOME" + chown -R "$PUID:$PGID" /riven/data +fi + +umask 002 + +export XDG_CONFIG_HOME="$USER_HOME/.config" +export XDG_DATA_HOME="$USER_HOME/.local/share" +export POETRY_CACHE_DIR="$USER_HOME/.cache/pypoetry" +export HOME="$USER_HOME" + +# Ensure poetry is in the PATH +export PATH="$PATH:/app/.venv/bin" + +echo "Container Initialization complete." + +echo "Starting Riven (Backend)..." +if [ "$PUID" = "0" ]; then + if [ "${DEBUG}" != "" ]; then # check if DEBUG is set to a truthy value + cd /riven/src && poetry add debugpy && poetry run python3 -m debugpy --listen 0.0.0.0:5678 main.py + else + cd /riven/src && poetry run python3 main.py + fi +else + if [ "${DEBUG}" != "" ]; then # check if DEBUG is set to a truthy value + poetry add debugpy + exec su -m $USERNAME -c "cd /riven/src && poetry run python3 -m debugpy --listen 0.0.0.0:5678 main.py" + else + su -m "$USERNAME" -c "cd /riven/src && poetry run python3 main.py" + fi +fi \ No newline at end of file diff --git a/poetry.lock b/poetry.lock new file mode 100644 index 0000000..a1e4412 --- /dev/null +++ b/poetry.lock @@ -0,0 +1,3329 @@ +# This file is automatically @generated by Poetry 1.8.3 and should not be changed by hand. + +[[package]] +name = "alembic" +version = "1.14.0" +description = "A database migration tool for SQLAlchemy." +optional = false +python-versions = ">=3.8" +files = [ + {file = "alembic-1.14.0-py3-none-any.whl", hash = "sha256:99bd884ca390466db5e27ffccff1d179ec5c05c965cfefc0607e69f9e411cb25"}, + {file = "alembic-1.14.0.tar.gz", hash = "sha256:b00892b53b3642d0b8dbedba234dbf1924b69be83a9a769d5a624b01094e304b"}, +] + +[package.dependencies] +Mako = "*" +SQLAlchemy = ">=1.3.0" +typing-extensions = ">=4" + +[package.extras] +tz = ["backports.zoneinfo"] + +[[package]] +name = "annotated-types" +version = "0.7.0" +description = "Reusable constraint types to use with typing.Annotated" +optional = false +python-versions = ">=3.8" +files = [ + {file = "annotated_types-0.7.0-py3-none-any.whl", hash = "sha256:1f02e8b43a8fbbc3f3e0d4f0f4bfc8131bcb4eebe8849b8e5c773f3a1c582a53"}, + {file = "annotated_types-0.7.0.tar.gz", hash = "sha256:aff07c09a53a08bc8cfccb9c85b05f1aa9a2a6f23728d790723543408344ce89"}, +] + +[[package]] +name = "anyio" +version = "4.6.2.post1" +description = "High level compatibility layer for multiple asynchronous event loop implementations" +optional = false +python-versions = ">=3.9" +files = [ + {file = "anyio-4.6.2.post1-py3-none-any.whl", hash = "sha256:6d170c36fba3bdd840c73d3868c1e777e33676a69c3a72cf0a0d5d6d8009b61d"}, + {file = "anyio-4.6.2.post1.tar.gz", hash = "sha256:4c8bc31ccdb51c7f7bd251f51c609e038d63e34219b44aa86e47576389880b4c"}, +] + +[package.dependencies] +idna = ">=2.8" +sniffio = ">=1.1" + +[package.extras] +doc = ["Sphinx (>=7.4,<8.0)", "packaging", "sphinx-autodoc-typehints (>=1.2.0)", "sphinx-rtd-theme"] +test = ["anyio[trio]", "coverage[toml] (>=7)", "exceptiongroup (>=1.2.0)", "hypothesis (>=4.0)", "psutil (>=5.9)", "pytest (>=7.0)", "pytest-mock (>=3.6.1)", "trustme", "truststore (>=0.9.1)", "uvloop (>=0.21.0b1)"] +trio = ["trio (>=0.26.1)"] + +[[package]] +name = "apprise" +version = "1.9.0" +description = "Push Notifications that work with just about every platform!" +optional = false +python-versions = ">=3.6" +files = [ + {file = "apprise-1.9.0-py3-none-any.whl", hash = "sha256:7192c953eeb282a7afee012512d3de0104b5a6a11bdda29283435df5a79dfe7f"}, + {file = "apprise-1.9.0.tar.gz", hash = "sha256:b5c93afd6331afe4b63a55d1cea9076e47becb4ba89b562b181c13e25bb0c7d6"}, +] + +[package.dependencies] +certifi = "*" +click = ">=5.0" +markdown = "*" +PyYAML = "*" +requests = "*" +requests-oauthlib = "*" + +[[package]] +name = "apscheduler" +version = "3.10.4" +description = "In-process task scheduler with Cron-like capabilities" +optional = false +python-versions = ">=3.6" +files = [ + {file = "APScheduler-3.10.4-py3-none-any.whl", hash = "sha256:fb91e8a768632a4756a585f79ec834e0e27aad5860bac7eaa523d9ccefd87661"}, + {file = "APScheduler-3.10.4.tar.gz", hash = "sha256:e6df071b27d9be898e486bc7940a7be50b4af2e9da7c08f0744a96d4bd4cef4a"}, +] + +[package.dependencies] +pytz = "*" +six = ">=1.4.0" +tzlocal = ">=2.0,<3.dev0 || >=4.dev0" + +[package.extras] +doc = ["sphinx", "sphinx-rtd-theme"] +gevent = ["gevent"] +mongodb = ["pymongo (>=3.0)"] +redis = ["redis (>=3.0)"] +rethinkdb = ["rethinkdb (>=2.4.0)"] +sqlalchemy = ["sqlalchemy (>=1.4)"] +testing = ["pytest", "pytest-asyncio", "pytest-cov", "pytest-tornado5"] +tornado = ["tornado (>=4.3)"] +twisted = ["twisted"] +zookeeper = ["kazoo"] + +[[package]] +name = "arrow" +version = "1.3.0" +description = "Better dates & times for Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "arrow-1.3.0-py3-none-any.whl", hash = "sha256:c728b120ebc00eb84e01882a6f5e7927a53960aa990ce7dd2b10f39005a67f80"}, + {file = "arrow-1.3.0.tar.gz", hash = "sha256:d4540617648cb5f895730f1ad8c82a65f2dad0166f57b75f3ca54759c4d67a85"}, +] + +[package.dependencies] +python-dateutil = ">=2.7.0" +types-python-dateutil = ">=2.8.10" + +[package.extras] +doc = ["doc8", "sphinx (>=7.0.0)", "sphinx-autobuild", "sphinx-autodoc-typehints", "sphinx_rtd_theme (>=1.3.0)"] +test = ["dateparser (==1.*)", "pre-commit", "pytest", "pytest-cov", "pytest-mock", "pytz (==2021.1)", "simplejson (==3.*)"] + +[[package]] +name = "attrs" +version = "24.2.0" +description = "Classes Without Boilerplate" +optional = false +python-versions = ">=3.7" +files = [ + {file = "attrs-24.2.0-py3-none-any.whl", hash = "sha256:81921eb96de3191c8258c199618104dd27ac608d9366f5e35d011eae1867ede2"}, + {file = "attrs-24.2.0.tar.gz", hash = "sha256:5cfb1b9148b5b086569baec03f20d7b6bf3bcacc9a42bebf87ffaaca362f6346"}, +] + +[package.extras] +benchmark = ["cloudpickle", "hypothesis", "mypy (>=1.11.1)", "pympler", "pytest (>=4.3.0)", "pytest-codspeed", "pytest-mypy-plugins", "pytest-xdist[psutil]"] +cov = ["cloudpickle", "coverage[toml] (>=5.3)", "hypothesis", "mypy (>=1.11.1)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"] +dev = ["cloudpickle", "hypothesis", "mypy (>=1.11.1)", "pre-commit", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"] +docs = ["cogapp", "furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier (<24.7)"] +tests = ["cloudpickle", "hypothesis", "mypy (>=1.11.1)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"] +tests-mypy = ["mypy (>=1.11.1)", "pytest-mypy-plugins"] + +[[package]] +name = "babelfish" +version = "0.6.1" +description = "A module to work with countries and languages" +optional = false +python-versions = "<4.0,>=3.8" +files = [ + {file = "babelfish-0.6.1-py3-none-any.whl", hash = "sha256:512f1501d4c8f7d38f0921f48660be7542de1a7b24abb6a6a65324a670150293"}, + {file = "babelfish-0.6.1.tar.gz", hash = "sha256:decb67a4660888d48480ab6998309837174158d0f1aa63bebb1c2e11aab97aab"}, +] + +[[package]] +name = "beautifulsoup4" +version = "4.12.3" +description = "Screen-scraping library" +optional = false +python-versions = ">=3.6.0" +files = [ + {file = "beautifulsoup4-4.12.3-py3-none-any.whl", hash = "sha256:b80878c9f40111313e55da8ba20bdba06d8fa3969fc68304167741bbf9e082ed"}, + {file = "beautifulsoup4-4.12.3.tar.gz", hash = "sha256:74e3d1928edc070d21748185c46e3fb33490f22f52a3addee9aee0f4f7781051"}, +] + +[package.dependencies] +soupsieve = ">1.2" + +[package.extras] +cchardet = ["cchardet"] +chardet = ["chardet"] +charset-normalizer = ["charset-normalizer"] +html5lib = ["html5lib"] +lxml = ["lxml"] + +[[package]] +name = "cachetools" +version = "5.5.0" +description = "Extensible memoizing collections and decorators" +optional = false +python-versions = ">=3.7" +files = [ + {file = "cachetools-5.5.0-py3-none-any.whl", hash = "sha256:02134e8439cdc2ffb62023ce1debca2944c3f289d66bb17ead3ab3dede74b292"}, + {file = "cachetools-5.5.0.tar.gz", hash = "sha256:2cc24fb4cbe39633fb7badd9db9ca6295d766d9c2995f245725a46715d050f2a"}, +] + +[[package]] +name = "cattrs" +version = "24.1.2" +description = "Composable complex class support for attrs and dataclasses." +optional = false +python-versions = ">=3.8" +files = [ + {file = "cattrs-24.1.2-py3-none-any.whl", hash = "sha256:67c7495b760168d931a10233f979b28dc04daf853b30752246f4f8471c6d68d0"}, + {file = "cattrs-24.1.2.tar.gz", hash = "sha256:8028cfe1ff5382df59dd36474a86e02d817b06eaf8af84555441bac915d2ef85"}, +] + +[package.dependencies] +attrs = ">=23.1.0" + +[package.extras] +bson = ["pymongo (>=4.4.0)"] +cbor2 = ["cbor2 (>=5.4.6)"] +msgpack = ["msgpack (>=1.0.5)"] +msgspec = ["msgspec (>=0.18.5)"] +orjson = ["orjson (>=3.9.2)"] +pyyaml = ["pyyaml (>=6.0)"] +tomlkit = ["tomlkit (>=0.11.8)"] +ujson = ["ujson (>=5.7.0)"] + +[[package]] +name = "certifi" +version = "2024.8.30" +description = "Python package for providing Mozilla's CA Bundle." +optional = false +python-versions = ">=3.6" +files = [ + {file = "certifi-2024.8.30-py3-none-any.whl", hash = "sha256:922820b53db7a7257ffbda3f597266d435245903d80737e34f8a45ff3e3230d8"}, + {file = "certifi-2024.8.30.tar.gz", hash = "sha256:bec941d2aa8195e248a60b31ff9f0558284cf01a52591ceda73ea9afffd69fd9"}, +] + +[[package]] +name = "chardet" +version = "5.2.0" +description = "Universal encoding detector for Python 3" +optional = false +python-versions = ">=3.7" +files = [ + {file = "chardet-5.2.0-py3-none-any.whl", hash = "sha256:e1cf59446890a00105fe7b7912492ea04b6e6f06d4b742b2c788469e34c82970"}, + {file = "chardet-5.2.0.tar.gz", hash = "sha256:1b3b6ff479a8c414bc3fa2c0852995695c4a026dcd6d0633b2dd092ca39c1cf7"}, +] + +[[package]] +name = "charset-normalizer" +version = "3.4.0" +description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." +optional = false +python-versions = ">=3.7.0" +files = [ + {file = "charset_normalizer-3.4.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:4f9fc98dad6c2eaa32fc3af1417d95b5e3d08aff968df0cd320066def971f9a6"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0de7b687289d3c1b3e8660d0741874abe7888100efe14bd0f9fd7141bcbda92b"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:5ed2e36c3e9b4f21dd9422f6893dec0abf2cca553af509b10cd630f878d3eb99"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:40d3ff7fc90b98c637bda91c89d51264a3dcf210cade3a2c6f838c7268d7a4ca"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1110e22af8ca26b90bd6364fe4c763329b0ebf1ee213ba32b68c73de5752323d"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:86f4e8cca779080f66ff4f191a685ced73d2f72d50216f7112185dc02b90b9b7"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7f683ddc7eedd742e2889d2bfb96d69573fde1d92fcb811979cdb7165bb9c7d3"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:27623ba66c183eca01bf9ff833875b459cad267aeeb044477fedac35e19ba907"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:f606a1881d2663630ea5b8ce2efe2111740df4b687bd78b34a8131baa007f79b"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:0b309d1747110feb25d7ed6b01afdec269c647d382c857ef4663bbe6ad95a912"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:136815f06a3ae311fae551c3df1f998a1ebd01ddd424aa5603a4336997629e95"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:14215b71a762336254351b00ec720a8e85cada43b987da5a042e4ce3e82bd68e"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:79983512b108e4a164b9c8d34de3992f76d48cadc9554c9e60b43f308988aabe"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-win32.whl", hash = "sha256:c94057af19bc953643a33581844649a7fdab902624d2eb739738a30e2b3e60fc"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-win_amd64.whl", hash = "sha256:55f56e2ebd4e3bc50442fbc0888c9d8c94e4e06a933804e2af3e89e2f9c1c749"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:0d99dd8ff461990f12d6e42c7347fd9ab2532fb70e9621ba520f9e8637161d7c"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c57516e58fd17d03ebe67e181a4e4e2ccab1168f8c2976c6a334d4f819fe5944"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:6dba5d19c4dfab08e58d5b36304b3f92f3bd5d42c1a3fa37b5ba5cdf6dfcbcee"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bf4475b82be41b07cc5e5ff94810e6a01f276e37c2d55571e3fe175e467a1a1c"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ce031db0408e487fd2775d745ce30a7cd2923667cf3b69d48d219f1d8f5ddeb6"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8ff4e7cdfdb1ab5698e675ca622e72d58a6fa2a8aa58195de0c0061288e6e3ea"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3710a9751938947e6327ea9f3ea6332a09bf0ba0c09cae9cb1f250bd1f1549bc"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:82357d85de703176b5587dbe6ade8ff67f9f69a41c0733cf2425378b49954de5"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:47334db71978b23ebcf3c0f9f5ee98b8d65992b65c9c4f2d34c2eaf5bcaf0594"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:8ce7fd6767a1cc5a92a639b391891bf1c268b03ec7e021c7d6d902285259685c"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:f1a2f519ae173b5b6a2c9d5fa3116ce16e48b3462c8b96dfdded11055e3d6365"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:63bc5c4ae26e4bc6be6469943b8253c0fd4e4186c43ad46e713ea61a0ba49129"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:bcb4f8ea87d03bc51ad04add8ceaf9b0f085ac045ab4d74e73bbc2dc033f0236"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-win32.whl", hash = "sha256:9ae4ef0b3f6b41bad6366fb0ea4fc1d7ed051528e113a60fa2a65a9abb5b1d99"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-win_amd64.whl", hash = "sha256:cee4373f4d3ad28f1ab6290684d8e2ebdb9e7a1b74fdc39e4c211995f77bec27"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:0713f3adb9d03d49d365b70b84775d0a0d18e4ab08d12bc46baa6132ba78aaf6"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:de7376c29d95d6719048c194a9cf1a1b0393fbe8488a22008610b0361d834ecf"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:4a51b48f42d9358460b78725283f04bddaf44a9358197b889657deba38f329db"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b295729485b06c1a0683af02a9e42d2caa9db04a373dc38a6a58cdd1e8abddf1"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ee803480535c44e7f5ad00788526da7d85525cfefaf8acf8ab9a310000be4b03"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3d59d125ffbd6d552765510e3f31ed75ebac2c7470c7274195b9161a32350284"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8cda06946eac330cbe6598f77bb54e690b4ca93f593dee1568ad22b04f347c15"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:07afec21bbbbf8a5cc3651aa96b980afe2526e7f048fdfb7f1014d84acc8b6d8"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:6b40e8d38afe634559e398cc32b1472f376a4099c75fe6299ae607e404c033b2"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:b8dcd239c743aa2f9c22ce674a145e0a25cb1566c495928440a181ca1ccf6719"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:84450ba661fb96e9fd67629b93d2941c871ca86fc38d835d19d4225ff946a631"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:44aeb140295a2f0659e113b31cfe92c9061622cadbc9e2a2f7b8ef6b1e29ef4b"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:1db4e7fefefd0f548d73e2e2e041f9df5c59e178b4c72fbac4cc6f535cfb1565"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-win32.whl", hash = "sha256:5726cf76c982532c1863fb64d8c6dd0e4c90b6ece9feb06c9f202417a31f7dd7"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-win_amd64.whl", hash = "sha256:b197e7094f232959f8f20541ead1d9862ac5ebea1d58e9849c1bf979255dfac9"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:dd4eda173a9fcccb5f2e2bd2a9f423d180194b1bf17cf59e3269899235b2a114"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:e9e3c4c9e1ed40ea53acf11e2a386383c3304212c965773704e4603d589343ed"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:92a7e36b000bf022ef3dbb9c46bfe2d52c047d5e3f3343f43204263c5addc250"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:54b6a92d009cbe2fb11054ba694bc9e284dad30a26757b1e372a1fdddaf21920"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ffd9493de4c922f2a38c2bf62b831dcec90ac673ed1ca182fe11b4d8e9f2a64"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:35c404d74c2926d0287fbd63ed5d27eb911eb9e4a3bb2c6d294f3cfd4a9e0c23"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4796efc4faf6b53a18e3d46343535caed491776a22af773f366534056c4e1fbc"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e7fdd52961feb4c96507aa649550ec2a0d527c086d284749b2f582f2d40a2e0d"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:92db3c28b5b2a273346bebb24857fda45601aef6ae1c011c0a997106581e8a88"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:ab973df98fc99ab39080bfb0eb3a925181454d7c3ac8a1e695fddfae696d9e90"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:4b67fdab07fdd3c10bb21edab3cbfe8cf5696f453afce75d815d9d7223fbe88b"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:aa41e526a5d4a9dfcfbab0716c7e8a1b215abd3f3df5a45cf18a12721d31cb5d"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:ffc519621dce0c767e96b9c53f09c5d215578e10b02c285809f76509a3931482"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-win32.whl", hash = "sha256:f19c1585933c82098c2a520f8ec1227f20e339e33aca8fa6f956f6691b784e67"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-win_amd64.whl", hash = "sha256:707b82d19e65c9bd28b81dde95249b07bf9f5b90ebe1ef17d9b57473f8a64b7b"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:dbe03226baf438ac4fda9e2d0715022fd579cb641c4cf639fa40d53b2fe6f3e2"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dd9a8bd8900e65504a305bf8ae6fa9fbc66de94178c420791d0293702fce2df7"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b8831399554b92b72af5932cdbbd4ddc55c55f631bb13ff8fe4e6536a06c5c51"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a14969b8691f7998e74663b77b4c36c0337cb1df552da83d5c9004a93afdb574"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dcaf7c1524c0542ee2fc82cc8ec337f7a9f7edee2532421ab200d2b920fc97cf"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:425c5f215d0eecee9a56cdb703203dda90423247421bf0d67125add85d0c4455"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-musllinux_1_2_aarch64.whl", hash = "sha256:d5b054862739d276e09928de37c79ddeec42a6e1bfc55863be96a36ba22926f6"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-musllinux_1_2_i686.whl", hash = "sha256:f3e73a4255342d4eb26ef6df01e3962e73aa29baa3124a8e824c5d3364a65748"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-musllinux_1_2_ppc64le.whl", hash = "sha256:2f6c34da58ea9c1a9515621f4d9ac379871a8f21168ba1b5e09d74250de5ad62"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-musllinux_1_2_s390x.whl", hash = "sha256:f09cb5a7bbe1ecae6e87901a2eb23e0256bb524a79ccc53eb0b7629fbe7677c4"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:0099d79bdfcf5c1f0c2c72f91516702ebf8b0b8ddd8905f97a8aecf49712c621"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-win32.whl", hash = "sha256:9c98230f5042f4945f957d006edccc2af1e03ed5e37ce7c373f00a5a4daa6149"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-win_amd64.whl", hash = "sha256:62f60aebecfc7f4b82e3f639a7d1433a20ec32824db2199a11ad4f5e146ef5ee"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:af73657b7a68211996527dbfeffbb0864e043d270580c5aef06dc4b659a4b578"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:cab5d0b79d987c67f3b9e9c53f54a61360422a5a0bc075f43cab5621d530c3b6"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:9289fd5dddcf57bab41d044f1756550f9e7cf0c8e373b8cdf0ce8773dc4bd417"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6b493a043635eb376e50eedf7818f2f322eabbaa974e948bd8bdd29eb7ef2a51"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9fa2566ca27d67c86569e8c85297aaf413ffab85a8960500f12ea34ff98e4c41"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a8e538f46104c815be19c975572d74afb53f29650ea2025bbfaef359d2de2f7f"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6fd30dc99682dc2c603c2b315bded2799019cea829f8bf57dc6b61efde6611c8"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2006769bd1640bdf4d5641c69a3d63b71b81445473cac5ded39740a226fa88ab"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:dc15e99b2d8a656f8e666854404f1ba54765871104e50c8e9813af8a7db07f12"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:ab2e5bef076f5a235c3774b4f4028a680432cded7cad37bba0fd90d64b187d19"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:4ec9dd88a5b71abfc74e9df5ebe7921c35cbb3b641181a531ca65cdb5e8e4dea"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:43193c5cda5d612f247172016c4bb71251c784d7a4d9314677186a838ad34858"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:aa693779a8b50cd97570e5a0f343538a8dbd3e496fa5dcb87e29406ad0299654"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-win32.whl", hash = "sha256:7706f5850360ac01d80c89bcef1640683cc12ed87f42579dab6c5d3ed6888613"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-win_amd64.whl", hash = "sha256:c3e446d253bd88f6377260d07c895816ebf33ffffd56c1c792b13bff9c3e1ade"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:980b4f289d1d90ca5efcf07958d3eb38ed9c0b7676bf2831a54d4f66f9c27dfa"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:f28f891ccd15c514a0981f3b9db9aa23d62fe1a99997512b0491d2ed323d229a"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a8aacce6e2e1edcb6ac625fb0f8c3a9570ccc7bfba1f63419b3769ccf6a00ed0"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bd7af3717683bea4c87acd8c0d3d5b44d56120b26fd3f8a692bdd2d5260c620a"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5ff2ed8194587faf56555927b3aa10e6fb69d931e33953943bc4f837dfee2242"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e91f541a85298cf35433bf66f3fab2a4a2cff05c127eeca4af174f6d497f0d4b"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:309a7de0a0ff3040acaebb35ec45d18db4b28232f21998851cfa709eeff49d62"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:285e96d9d53422efc0d7a17c60e59f37fbf3dfa942073f666db4ac71e8d726d0"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:5d447056e2ca60382d460a604b6302d8db69476fd2015c81e7c35417cfabe4cd"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:20587d20f557fe189b7947d8e7ec5afa110ccf72a3128d61a2a387c3313f46be"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:130272c698667a982a5d0e626851ceff662565379baf0ff2cc58067b81d4f11d"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:ab22fbd9765e6954bc0bcff24c25ff71dcbfdb185fcdaca49e81bac68fe724d3"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:7782afc9b6b42200f7362858f9e73b1f8316afb276d316336c0ec3bd73312742"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-win32.whl", hash = "sha256:2de62e8801ddfff069cd5c504ce3bc9672b23266597d4e4f50eda28846c322f2"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-win_amd64.whl", hash = "sha256:95c3c157765b031331dd4db3c775e58deaee050a3042fcad72cbc4189d7c8dca"}, + {file = "charset_normalizer-3.4.0-py3-none-any.whl", hash = "sha256:fe9f97feb71aa9896b81973a7bbada8c49501dc73e58a10fcef6663af95e5079"}, + {file = "charset_normalizer-3.4.0.tar.gz", hash = "sha256:223217c3d4f82c3ac5e29032b3f1c2eb0fb591b72161f86d93f5719079dae93e"}, +] + +[[package]] +name = "click" +version = "8.1.7" +description = "Composable command line interface toolkit" +optional = false +python-versions = ">=3.7" +files = [ + {file = "click-8.1.7-py3-none-any.whl", hash = "sha256:ae74fb96c20a0277a1d615f1e4d73c8414f5a98db8b799a7931d1582f3390c28"}, + {file = "click-8.1.7.tar.gz", hash = "sha256:ca9853ad459e787e2192211578cc907e7594e294c7ccc834310722b41b9ca6de"}, +] + +[package.dependencies] +colorama = {version = "*", markers = "platform_system == \"Windows\""} + +[[package]] +name = "click-option-group" +version = "0.5.6" +description = "Option groups missing in Click" +optional = false +python-versions = ">=3.6,<4" +files = [ + {file = "click-option-group-0.5.6.tar.gz", hash = "sha256:97d06703873518cc5038509443742b25069a3c7562d1ea72ff08bfadde1ce777"}, + {file = "click_option_group-0.5.6-py3-none-any.whl", hash = "sha256:38a26d963ee3ad93332ddf782f9259c5bdfe405e73408d943ef5e7d0c3767ec7"}, +] + +[package.dependencies] +Click = ">=7.0,<9" + +[package.extras] +docs = ["Pallets-Sphinx-Themes", "m2r2", "sphinx"] +tests = ["pytest"] +tests-cov = ["coverage", "coveralls", "pytest", "pytest-cov"] + +[[package]] +name = "codecov" +version = "2.1.13" +description = "Hosted coverage reports for GitHub, Bitbucket and Gitlab" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +files = [ + {file = "codecov-2.1.13-py2.py3-none-any.whl", hash = "sha256:c2ca5e51bba9ebb43644c43d0690148a55086f7f5e6fd36170858fa4206744d5"}, + {file = "codecov-2.1.13.tar.gz", hash = "sha256:2362b685633caeaf45b9951a9b76ce359cd3581dd515b430c6c3f5dfb4d92a8c"}, +] + +[package.dependencies] +coverage = "*" +requests = ">=2.7.9" + +[[package]] +name = "colorama" +version = "0.4.6" +description = "Cross-platform colored terminal text." +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" +files = [ + {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, + {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, +] + +[[package]] +name = "coverage" +version = "7.6.7" +description = "Code coverage measurement for Python" +optional = false +python-versions = ">=3.9" +files = [ + {file = "coverage-7.6.7-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:108bb458827765d538abcbf8288599fee07d2743357bdd9b9dad456c287e121e"}, + {file = "coverage-7.6.7-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c973b2fe4dc445cb865ab369df7521df9c27bf40715c837a113edaa2aa9faf45"}, + {file = "coverage-7.6.7-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3c6b24007c4bcd0b19fac25763a7cac5035c735ae017e9a349b927cfc88f31c1"}, + {file = "coverage-7.6.7-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:acbb8af78f8f91b3b51f58f288c0994ba63c646bc1a8a22ad072e4e7e0a49f1c"}, + {file = "coverage-7.6.7-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ad32a981bcdedb8d2ace03b05e4fd8dace8901eec64a532b00b15217d3677dd2"}, + {file = "coverage-7.6.7-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:34d23e28ccb26236718a3a78ba72744212aa383141961dd6825f6595005c8b06"}, + {file = "coverage-7.6.7-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:e25bacb53a8c7325e34d45dddd2f2fbae0dbc230d0e2642e264a64e17322a777"}, + {file = "coverage-7.6.7-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:af05bbba896c4472a29408455fe31b3797b4d8648ed0a2ccac03e074a77e2314"}, + {file = "coverage-7.6.7-cp310-cp310-win32.whl", hash = "sha256:796c9b107d11d2d69e1849b2dfe41730134b526a49d3acb98ca02f4985eeff7a"}, + {file = "coverage-7.6.7-cp310-cp310-win_amd64.whl", hash = "sha256:987a8e3da7da4eed10a20491cf790589a8e5e07656b6dc22d3814c4d88faf163"}, + {file = "coverage-7.6.7-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:7e61b0e77ff4dddebb35a0e8bb5a68bf0f8b872407d8d9f0c726b65dfabe2469"}, + {file = "coverage-7.6.7-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1a5407a75ca4abc20d6252efeb238377a71ce7bda849c26c7a9bece8680a5d99"}, + {file = "coverage-7.6.7-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:df002e59f2d29e889c37abd0b9ee0d0e6e38c24f5f55d71ff0e09e3412a340ec"}, + {file = "coverage-7.6.7-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:673184b3156cba06154825f25af33baa2671ddae6343f23175764e65a8c4c30b"}, + {file = "coverage-7.6.7-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e69ad502f1a2243f739f5bd60565d14a278be58be4c137d90799f2c263e7049a"}, + {file = "coverage-7.6.7-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:60dcf7605c50ea72a14490d0756daffef77a5be15ed1b9fea468b1c7bda1bc3b"}, + {file = "coverage-7.6.7-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:9c2eb378bebb2c8f65befcb5147877fc1c9fbc640fc0aad3add759b5df79d55d"}, + {file = "coverage-7.6.7-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:3c0317288f032221d35fa4cbc35d9f4923ff0dfd176c79c9b356e8ef8ef2dff4"}, + {file = "coverage-7.6.7-cp311-cp311-win32.whl", hash = "sha256:951aade8297358f3618a6e0660dc74f6b52233c42089d28525749fc8267dccd2"}, + {file = "coverage-7.6.7-cp311-cp311-win_amd64.whl", hash = "sha256:5e444b8e88339a2a67ce07d41faabb1d60d1004820cee5a2c2b54e2d8e429a0f"}, + {file = "coverage-7.6.7-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:f07ff574986bc3edb80e2c36391678a271d555f91fd1d332a1e0f4b5ea4b6ea9"}, + {file = "coverage-7.6.7-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:49ed5ee4109258973630c1f9d099c7e72c5c36605029f3a91fe9982c6076c82b"}, + {file = "coverage-7.6.7-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f3e8796434a8106b3ac025fd15417315d7a58ee3e600ad4dbcfddc3f4b14342c"}, + {file = "coverage-7.6.7-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a3b925300484a3294d1c70f6b2b810d6526f2929de954e5b6be2bf8caa1f12c1"}, + {file = "coverage-7.6.7-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3c42ec2c522e3ddd683dec5cdce8e62817afb648caedad9da725001fa530d354"}, + {file = "coverage-7.6.7-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:0266b62cbea568bd5e93a4da364d05de422110cbed5056d69339bd5af5685433"}, + {file = "coverage-7.6.7-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:e5f2a0f161d126ccc7038f1f3029184dbdf8f018230af17ef6fd6a707a5b881f"}, + {file = "coverage-7.6.7-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:c132b5a22821f9b143f87446805e13580b67c670a548b96da945a8f6b4f2efbb"}, + {file = "coverage-7.6.7-cp312-cp312-win32.whl", hash = "sha256:7c07de0d2a110f02af30883cd7dddbe704887617d5c27cf373362667445a4c76"}, + {file = "coverage-7.6.7-cp312-cp312-win_amd64.whl", hash = "sha256:fd49c01e5057a451c30c9b892948976f5d38f2cbd04dc556a82743ba8e27ed8c"}, + {file = "coverage-7.6.7-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:46f21663e358beae6b368429ffadf14ed0a329996248a847a4322fb2e35d64d3"}, + {file = "coverage-7.6.7-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:40cca284c7c310d622a1677f105e8507441d1bb7c226f41978ba7c86979609ab"}, + {file = "coverage-7.6.7-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:77256ad2345c29fe59ae861aa11cfc74579c88d4e8dbf121cbe46b8e32aec808"}, + {file = "coverage-7.6.7-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:87ea64b9fa52bf395272e54020537990a28078478167ade6c61da7ac04dc14bc"}, + {file = "coverage-7.6.7-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2d608a7808793e3615e54e9267519351c3ae204a6d85764d8337bd95993581a8"}, + {file = "coverage-7.6.7-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:cdd94501d65adc5c24f8a1a0eda110452ba62b3f4aeaba01e021c1ed9cb8f34a"}, + {file = "coverage-7.6.7-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:82c809a62e953867cf57e0548c2b8464207f5f3a6ff0e1e961683e79b89f2c55"}, + {file = "coverage-7.6.7-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:bb684694e99d0b791a43e9fc0fa58efc15ec357ac48d25b619f207c41f2fd384"}, + {file = "coverage-7.6.7-cp313-cp313-win32.whl", hash = "sha256:963e4a08cbb0af6623e61492c0ec4c0ec5c5cf74db5f6564f98248d27ee57d30"}, + {file = "coverage-7.6.7-cp313-cp313-win_amd64.whl", hash = "sha256:14045b8bfd5909196a90da145a37f9d335a5d988a83db34e80f41e965fb7cb42"}, + {file = "coverage-7.6.7-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:f2c7a045eef561e9544359a0bf5784b44e55cefc7261a20e730baa9220c83413"}, + {file = "coverage-7.6.7-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:5dd4e4a49d9c72a38d18d641135d2fb0bdf7b726ca60a103836b3d00a1182acd"}, + {file = "coverage-7.6.7-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5c95e0fa3d1547cb6f021ab72f5c23402da2358beec0a8e6d19a368bd7b0fb37"}, + {file = "coverage-7.6.7-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f63e21ed474edd23f7501f89b53280014436e383a14b9bd77a648366c81dce7b"}, + {file = "coverage-7.6.7-cp313-cp313t-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ead9b9605c54d15be228687552916c89c9683c215370c4a44f1f217d2adcc34d"}, + {file = "coverage-7.6.7-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:0573f5cbf39114270842d01872952d301027d2d6e2d84013f30966313cadb529"}, + {file = "coverage-7.6.7-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:e2c8e3384c12dfa19fa9a52f23eb091a8fad93b5b81a41b14c17c78e23dd1d8b"}, + {file = "coverage-7.6.7-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:70a56a2ec1869e6e9fa69ef6b76b1a8a7ef709972b9cc473f9ce9d26b5997ce3"}, + {file = "coverage-7.6.7-cp313-cp313t-win32.whl", hash = "sha256:dbba8210f5067398b2c4d96b4e64d8fb943644d5eb70be0d989067c8ca40c0f8"}, + {file = "coverage-7.6.7-cp313-cp313t-win_amd64.whl", hash = "sha256:dfd14bcae0c94004baba5184d1c935ae0d1231b8409eb6c103a5fd75e8ecdc56"}, + {file = "coverage-7.6.7-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:37a15573f988b67f7348916077c6d8ad43adb75e478d0910957394df397d2874"}, + {file = "coverage-7.6.7-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:b6cce5c76985f81da3769c52203ee94722cd5d5889731cd70d31fee939b74bf0"}, + {file = "coverage-7.6.7-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a1ab9763d291a17b527ac6fd11d1a9a9c358280adb320e9c2672a97af346ac2c"}, + {file = "coverage-7.6.7-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6cf96ceaa275f071f1bea3067f8fd43bec184a25a962c754024c973af871e1b7"}, + {file = "coverage-7.6.7-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aee9cf6b0134d6f932d219ce253ef0e624f4fa588ee64830fcba193269e4daa3"}, + {file = "coverage-7.6.7-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:2bc3e45c16564cc72de09e37413262b9f99167803e5e48c6156bccdfb22c8327"}, + {file = "coverage-7.6.7-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:623e6965dcf4e28a3debaa6fcf4b99ee06d27218f46d43befe4db1c70841551c"}, + {file = "coverage-7.6.7-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:850cfd2d6fc26f8346f422920ac204e1d28814e32e3a58c19c91980fa74d8289"}, + {file = "coverage-7.6.7-cp39-cp39-win32.whl", hash = "sha256:c296263093f099da4f51b3dff1eff5d4959b527d4f2f419e16508c5da9e15e8c"}, + {file = "coverage-7.6.7-cp39-cp39-win_amd64.whl", hash = "sha256:90746521206c88bdb305a4bf3342b1b7316ab80f804d40c536fc7d329301ee13"}, + {file = "coverage-7.6.7-pp39.pp310-none-any.whl", hash = "sha256:0ddcb70b3a3a57581b450571b31cb774f23eb9519c2aaa6176d3a84c9fc57671"}, + {file = "coverage-7.6.7.tar.gz", hash = "sha256:d79d4826e41441c9a118ff045e4bccb9fdbdcb1d02413e7ea6eb5c87b5439d24"}, +] + +[package.extras] +toml = ["tomli"] + +[[package]] +name = "decorator" +version = "5.1.1" +description = "Decorators for Humans" +optional = false +python-versions = ">=3.5" +files = [ + {file = "decorator-5.1.1-py3-none-any.whl", hash = "sha256:b8c3f85900b9dc423225913c5aace94729fe1fa9763b38939a95226f02d37186"}, + {file = "decorator-5.1.1.tar.gz", hash = "sha256:637996211036b6385ef91435e4fae22989472f9d571faba8927ba8253acbc330"}, +] + +[[package]] +name = "deprecated" +version = "1.2.15" +description = "Python @deprecated decorator to deprecate old python classes, functions or methods." +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,>=2.7" +files = [ + {file = "Deprecated-1.2.15-py2.py3-none-any.whl", hash = "sha256:353bc4a8ac4bfc96800ddab349d89c25dec1079f65fd53acdcc1e0b975b21320"}, + {file = "deprecated-1.2.15.tar.gz", hash = "sha256:683e561a90de76239796e6b6feac66b99030d2dd3fcf61ef996330f14bbb9b0d"}, +] + +[package.dependencies] +wrapt = ">=1.10,<2" + +[package.extras] +dev = ["PyTest", "PyTest-Cov", "bump2version (<1)", "jinja2 (>=3.0.3,<3.1.0)", "setuptools", "sphinx (<2)", "tox"] + +[[package]] +name = "dill" +version = "0.3.9" +description = "serialize all of Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "dill-0.3.9-py3-none-any.whl", hash = "sha256:468dff3b89520b474c0397703366b7b95eebe6303f108adf9b19da1f702be87a"}, + {file = "dill-0.3.9.tar.gz", hash = "sha256:81aa267dddf68cbfe8029c42ca9ec6a4ab3b22371d1c450abc54422577b4512c"}, +] + +[package.extras] +graph = ["objgraph (>=1.7.2)"] +profile = ["gprof2dot (>=2022.7.29)"] + +[[package]] +name = "docker" +version = "7.1.0" +description = "A Python library for the Docker Engine API." +optional = false +python-versions = ">=3.8" +files = [ + {file = "docker-7.1.0-py3-none-any.whl", hash = "sha256:c96b93b7f0a746f9e77d325bcfb87422a3d8bd4f03136ae8a85b37f1898d5fc0"}, + {file = "docker-7.1.0.tar.gz", hash = "sha256:ad8c70e6e3f8926cb8a92619b832b4ea5299e2831c14284663184e200546fa6c"}, +] + +[package.dependencies] +pywin32 = {version = ">=304", markers = "sys_platform == \"win32\""} +requests = ">=2.26.0" +urllib3 = ">=1.26.0" + +[package.extras] +dev = ["coverage (==7.2.7)", "pytest (==7.4.2)", "pytest-cov (==4.1.0)", "pytest-timeout (==2.1.0)", "ruff (==0.1.8)"] +docs = ["myst-parser (==0.18.0)", "sphinx (==5.1.1)"] +ssh = ["paramiko (>=2.4.3)"] +websockets = ["websocket-client (>=1.3.0)"] + +[[package]] +name = "dogpile-cache" +version = "1.3.3" +description = "A caching front-end based on the Dogpile lock." +optional = false +python-versions = ">=3.8" +files = [ + {file = "dogpile.cache-1.3.3-py3-none-any.whl", hash = "sha256:5e211c4902ebdf88c678d268e22454b41e68071632daa9402d8ee24e825ed8ca"}, + {file = "dogpile.cache-1.3.3.tar.gz", hash = "sha256:f84b8ed0b0fb297d151055447fa8dcaf7bae566d4dbdefecdcc1f37662ab588b"}, +] + +[package.dependencies] +decorator = ">=4.0.0" +stevedore = ">=3.0.0" + +[package.extras] +pifpaf = ["pifpaf (>=2.5.0)", "setuptools"] + +[[package]] +name = "enzyme" +version = "0.5.2" +description = "Video metadata parser" +optional = false +python-versions = ">=3.8" +files = [ + {file = "enzyme-0.5.2-py3-none-any.whl", hash = "sha256:5a85306c136368d78f299bb74bf0c5f5d37e2689adc5caec5aba5ee2f029296b"}, + {file = "enzyme-0.5.2.tar.gz", hash = "sha256:7cf779148d9e66eb2838603eace140c53c3cefc8b8fe5d4d5a03a5fb5d57b3c1"}, +] + +[package.extras] +dev = ["doc8", "mypy", "ruff", "tox", "typos", "validate-pyproject"] +docs = ["myst-parser", "sphinx", "sphinx-rtd-theme"] +test = ["PyYAML", "importlib-metadata (>=4.6)", "mypy", "pytest (>=6.0)", "requests"] + +[[package]] +name = "fastapi" +version = "0.110.3" +description = "FastAPI framework, high performance, easy to learn, fast to code, ready for production" +optional = false +python-versions = ">=3.8" +files = [ + {file = "fastapi-0.110.3-py3-none-any.whl", hash = "sha256:fd7600612f755e4050beb74001310b5a7e1796d149c2ee363124abdfa0289d32"}, + {file = "fastapi-0.110.3.tar.gz", hash = "sha256:555700b0159379e94fdbfc6bb66a0f1c43f4cf7060f25239af3d84b63a656626"}, +] + +[package.dependencies] +pydantic = ">=1.7.4,<1.8 || >1.8,<1.8.1 || >1.8.1,<2.0.0 || >2.0.0,<2.0.1 || >2.0.1,<2.1.0 || >2.1.0,<3.0.0" +starlette = ">=0.37.2,<0.38.0" +typing-extensions = ">=4.8.0" + +[package.extras] +all = ["email_validator (>=2.0.0)", "httpx (>=0.23.0)", "itsdangerous (>=1.1.0)", "jinja2 (>=2.11.2)", "orjson (>=3.2.1)", "pydantic-extra-types (>=2.0.0)", "pydantic-settings (>=2.0.0)", "python-multipart (>=0.0.7)", "pyyaml (>=5.3.1)", "ujson (>=4.0.1,!=4.0.2,!=4.1.0,!=4.2.0,!=4.3.0,!=5.0.0,!=5.1.0)", "uvicorn[standard] (>=0.12.0)"] + +[[package]] +name = "greenlet" +version = "3.1.1" +description = "Lightweight in-process concurrent programming" +optional = false +python-versions = ">=3.7" +files = [ + {file = "greenlet-3.1.1-cp310-cp310-macosx_11_0_universal2.whl", hash = "sha256:0bbae94a29c9e5c7e4a2b7f0aae5c17e8e90acbfd3bf6270eeba60c39fce3563"}, + {file = "greenlet-3.1.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0fde093fb93f35ca72a556cf72c92ea3ebfda3d79fc35bb19fbe685853869a83"}, + {file = "greenlet-3.1.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:36b89d13c49216cadb828db8dfa6ce86bbbc476a82d3a6c397f0efae0525bdd0"}, + {file = "greenlet-3.1.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:94b6150a85e1b33b40b1464a3f9988dcc5251d6ed06842abff82e42632fac120"}, + {file = "greenlet-3.1.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:93147c513fac16385d1036b7e5b102c7fbbdb163d556b791f0f11eada7ba65dc"}, + {file = "greenlet-3.1.1-cp310-cp310-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:da7a9bff22ce038e19bf62c4dd1ec8391062878710ded0a845bcf47cc0200617"}, + {file = "greenlet-3.1.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:b2795058c23988728eec1f36a4e5e4ebad22f8320c85f3587b539b9ac84128d7"}, + {file = "greenlet-3.1.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:ed10eac5830befbdd0c32f83e8aa6288361597550ba669b04c48f0f9a2c843c6"}, + {file = "greenlet-3.1.1-cp310-cp310-win_amd64.whl", hash = "sha256:77c386de38a60d1dfb8e55b8c1101d68c79dfdd25c7095d51fec2dd800892b80"}, + {file = "greenlet-3.1.1-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:e4d333e558953648ca09d64f13e6d8f0523fa705f51cae3f03b5983489958c70"}, + {file = "greenlet-3.1.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:09fc016b73c94e98e29af67ab7b9a879c307c6731a2c9da0db5a7d9b7edd1159"}, + {file = "greenlet-3.1.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d5e975ca70269d66d17dd995dafc06f1b06e8cb1ec1e9ed54c1d1e4a7c4cf26e"}, + {file = "greenlet-3.1.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3b2813dc3de8c1ee3f924e4d4227999285fd335d1bcc0d2be6dc3f1f6a318ec1"}, + {file = "greenlet-3.1.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e347b3bfcf985a05e8c0b7d462ba6f15b1ee1c909e2dcad795e49e91b152c383"}, + {file = "greenlet-3.1.1-cp311-cp311-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9e8f8c9cb53cdac7ba9793c276acd90168f416b9ce36799b9b885790f8ad6c0a"}, + {file = "greenlet-3.1.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:62ee94988d6b4722ce0028644418d93a52429e977d742ca2ccbe1c4f4a792511"}, + {file = "greenlet-3.1.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:1776fd7f989fc6b8d8c8cb8da1f6b82c5814957264d1f6cf818d475ec2bf6395"}, + {file = "greenlet-3.1.1-cp311-cp311-win_amd64.whl", hash = "sha256:48ca08c771c268a768087b408658e216133aecd835c0ded47ce955381105ba39"}, + {file = "greenlet-3.1.1-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:4afe7ea89de619adc868e087b4d2359282058479d7cfb94970adf4b55284574d"}, + {file = "greenlet-3.1.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f406b22b7c9a9b4f8aa9d2ab13d6ae0ac3e85c9a809bd590ad53fed2bf70dc79"}, + {file = "greenlet-3.1.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c3a701fe5a9695b238503ce5bbe8218e03c3bcccf7e204e455e7462d770268aa"}, + {file = "greenlet-3.1.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2846930c65b47d70b9d178e89c7e1a69c95c1f68ea5aa0a58646b7a96df12441"}, + {file = "greenlet-3.1.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:99cfaa2110534e2cf3ba31a7abcac9d328d1d9f1b95beede58294a60348fba36"}, + {file = "greenlet-3.1.1-cp312-cp312-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:1443279c19fca463fc33e65ef2a935a5b09bb90f978beab37729e1c3c6c25fe9"}, + {file = "greenlet-3.1.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:b7cede291382a78f7bb5f04a529cb18e068dd29e0fb27376074b6d0317bf4dd0"}, + {file = "greenlet-3.1.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:23f20bb60ae298d7d8656c6ec6db134bca379ecefadb0b19ce6f19d1f232a942"}, + {file = "greenlet-3.1.1-cp312-cp312-win_amd64.whl", hash = "sha256:7124e16b4c55d417577c2077be379514321916d5790fa287c9ed6f23bd2ffd01"}, + {file = "greenlet-3.1.1-cp313-cp313-macosx_11_0_universal2.whl", hash = "sha256:05175c27cb459dcfc05d026c4232f9de8913ed006d42713cb8a5137bd49375f1"}, + {file = "greenlet-3.1.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:935e943ec47c4afab8965954bf49bfa639c05d4ccf9ef6e924188f762145c0ff"}, + {file = "greenlet-3.1.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:667a9706c970cb552ede35aee17339a18e8f2a87a51fba2ed39ceeeb1004798a"}, + {file = "greenlet-3.1.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b8a678974d1f3aa55f6cc34dc480169d58f2e6d8958895d68845fa4ab566509e"}, + {file = "greenlet-3.1.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:efc0f674aa41b92da8c49e0346318c6075d734994c3c4e4430b1c3f853e498e4"}, + {file = "greenlet-3.1.1-cp313-cp313-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0153404a4bb921f0ff1abeb5ce8a5131da56b953eda6e14b88dc6bbc04d2049e"}, + {file = "greenlet-3.1.1-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:275f72decf9932639c1c6dd1013a1bc266438eb32710016a1c742df5da6e60a1"}, + {file = "greenlet-3.1.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:c4aab7f6381f38a4b42f269057aee279ab0fc7bf2e929e3d4abfae97b682a12c"}, + {file = "greenlet-3.1.1-cp313-cp313-win_amd64.whl", hash = "sha256:b42703b1cf69f2aa1df7d1030b9d77d3e584a70755674d60e710f0af570f3761"}, + {file = "greenlet-3.1.1-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f1695e76146579f8c06c1509c7ce4dfe0706f49c6831a817ac04eebb2fd02011"}, + {file = "greenlet-3.1.1-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7876452af029456b3f3549b696bb36a06db7c90747740c5302f74a9e9fa14b13"}, + {file = "greenlet-3.1.1-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4ead44c85f8ab905852d3de8d86f6f8baf77109f9da589cb4fa142bd3b57b475"}, + {file = "greenlet-3.1.1-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8320f64b777d00dd7ccdade271eaf0cad6636343293a25074cc5566160e4de7b"}, + {file = "greenlet-3.1.1-cp313-cp313t-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:6510bf84a6b643dabba74d3049ead221257603a253d0a9873f55f6a59a65f822"}, + {file = "greenlet-3.1.1-cp313-cp313t-musllinux_1_1_aarch64.whl", hash = "sha256:04b013dc07c96f83134b1e99888e7a79979f1a247e2a9f59697fa14b5862ed01"}, + {file = "greenlet-3.1.1-cp313-cp313t-musllinux_1_1_x86_64.whl", hash = "sha256:411f015496fec93c1c8cd4e5238da364e1da7a124bcb293f085bf2860c32c6f6"}, + {file = "greenlet-3.1.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:47da355d8687fd65240c364c90a31569a133b7b60de111c255ef5b606f2ae291"}, + {file = "greenlet-3.1.1-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:98884ecf2ffb7d7fe6bd517e8eb99d31ff7855a840fa6d0d63cd07c037f6a981"}, + {file = "greenlet-3.1.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f1d4aeb8891338e60d1ab6127af1fe45def5259def8094b9c7e34690c8858803"}, + {file = "greenlet-3.1.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:db32b5348615a04b82240cc67983cb315309e88d444a288934ee6ceaebcad6cc"}, + {file = "greenlet-3.1.1-cp37-cp37m-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:dcc62f31eae24de7f8dce72134c8651c58000d3b1868e01392baea7c32c247de"}, + {file = "greenlet-3.1.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:1d3755bcb2e02de341c55b4fca7a745a24a9e7212ac953f6b3a48d117d7257aa"}, + {file = "greenlet-3.1.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:b8da394b34370874b4572676f36acabac172602abf054cbc4ac910219f3340af"}, + {file = "greenlet-3.1.1-cp37-cp37m-win32.whl", hash = "sha256:a0dfc6c143b519113354e780a50381508139b07d2177cb6ad6a08278ec655798"}, + {file = "greenlet-3.1.1-cp37-cp37m-win_amd64.whl", hash = "sha256:54558ea205654b50c438029505def3834e80f0869a70fb15b871c29b4575ddef"}, + {file = "greenlet-3.1.1-cp38-cp38-macosx_11_0_universal2.whl", hash = "sha256:346bed03fe47414091be4ad44786d1bd8bef0c3fcad6ed3dee074a032ab408a9"}, + {file = "greenlet-3.1.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dfc59d69fc48664bc693842bd57acfdd490acafda1ab52c7836e3fc75c90a111"}, + {file = "greenlet-3.1.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d21e10da6ec19b457b82636209cbe2331ff4306b54d06fa04b7c138ba18c8a81"}, + {file = "greenlet-3.1.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:37b9de5a96111fc15418819ab4c4432e4f3c2ede61e660b1e33971eba26ef9ba"}, + {file = "greenlet-3.1.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6ef9ea3f137e5711f0dbe5f9263e8c009b7069d8a1acea822bd5e9dae0ae49c8"}, + {file = "greenlet-3.1.1-cp38-cp38-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:85f3ff71e2e60bd4b4932a043fbbe0f499e263c628390b285cb599154a3b03b1"}, + {file = "greenlet-3.1.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:95ffcf719966dd7c453f908e208e14cde192e09fde6c7186c8f1896ef778d8cd"}, + {file = "greenlet-3.1.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:03a088b9de532cbfe2ba2034b2b85e82df37874681e8c470d6fb2f8c04d7e4b7"}, + {file = "greenlet-3.1.1-cp38-cp38-win32.whl", hash = "sha256:8b8b36671f10ba80e159378df9c4f15c14098c4fd73a36b9ad715f057272fbef"}, + {file = "greenlet-3.1.1-cp38-cp38-win_amd64.whl", hash = "sha256:7017b2be767b9d43cc31416aba48aab0d2309ee31b4dbf10a1d38fb7972bdf9d"}, + {file = "greenlet-3.1.1-cp39-cp39-macosx_11_0_universal2.whl", hash = "sha256:396979749bd95f018296af156201d6211240e7a23090f50a8d5d18c370084dc3"}, + {file = "greenlet-3.1.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ca9d0ff5ad43e785350894d97e13633a66e2b50000e8a183a50a88d834752d42"}, + {file = "greenlet-3.1.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f6ff3b14f2df4c41660a7dec01045a045653998784bf8cfcb5a525bdffffbc8f"}, + {file = "greenlet-3.1.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:94ebba31df2aa506d7b14866fed00ac141a867e63143fe5bca82a8e503b36437"}, + {file = "greenlet-3.1.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:73aaad12ac0ff500f62cebed98d8789198ea0e6f233421059fa68a5aa7220145"}, + {file = "greenlet-3.1.1-cp39-cp39-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:63e4844797b975b9af3a3fb8f7866ff08775f5426925e1e0bbcfe7932059a12c"}, + {file = "greenlet-3.1.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:7939aa3ca7d2a1593596e7ac6d59391ff30281ef280d8632fa03d81f7c5f955e"}, + {file = "greenlet-3.1.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:d0028e725ee18175c6e422797c407874da24381ce0690d6b9396c204c7f7276e"}, + {file = "greenlet-3.1.1-cp39-cp39-win32.whl", hash = "sha256:5e06afd14cbaf9e00899fae69b24a32f2196c19de08fcb9f4779dd4f004e5e7c"}, + {file = "greenlet-3.1.1-cp39-cp39-win_amd64.whl", hash = "sha256:3319aa75e0e0639bc15ff54ca327e8dc7a6fe404003496e3c6925cd3142e0e22"}, + {file = "greenlet-3.1.1.tar.gz", hash = "sha256:4ce3ac6cdb6adf7946475d7ef31777c26d94bccc377e070a7986bd2d5c515467"}, +] + +[package.extras] +docs = ["Sphinx", "furo"] +test = ["objgraph", "psutil"] + +[[package]] +name = "guessit" +version = "3.8.0" +description = "GuessIt - a library for guessing information from video filenames." +optional = false +python-versions = "*" +files = [ + {file = "guessit-3.8.0-py3-none-any.whl", hash = "sha256:eb5747b1d0fbca926562c1e5894dbc3f6507c35e8c0bd9e38148401cd9579d83"}, + {file = "guessit-3.8.0.tar.gz", hash = "sha256:6619fcbbf9a0510ec8c2c33744c4251cad0507b1d573d05c875de17edc5edbed"}, +] + +[package.dependencies] +babelfish = ">=0.6.0" +python-dateutil = "*" +rebulk = ">=3.2.0" + +[package.extras] +dev = ["mkdocs", "mkdocs-material", "pyinstaller", "python-semantic-release", "tox", "twine", "wheel"] +test = ["PyYAML", "pylint", "pytest", "pytest-benchmark", "pytest-cov", "pytest-mock"] + +[[package]] +name = "h11" +version = "0.14.0" +description = "A pure-Python, bring-your-own-I/O implementation of HTTP/1.1" +optional = false +python-versions = ">=3.7" +files = [ + {file = "h11-0.14.0-py3-none-any.whl", hash = "sha256:e3fe4ac4b851c468cc8363d500db52c2ead036020723024a109d37346efaa761"}, + {file = "h11-0.14.0.tar.gz", hash = "sha256:8f19fbbe99e72420ff35c00b27a34cb9937e902a8b810e2c88300c6f0a3b699d"}, +] + +[[package]] +name = "httpcore" +version = "1.0.7" +description = "A minimal low-level HTTP client." +optional = false +python-versions = ">=3.8" +files = [ + {file = "httpcore-1.0.7-py3-none-any.whl", hash = "sha256:a3fff8f43dc260d5bd363d9f9cf1830fa3a458b332856f34282de498ed420edd"}, + {file = "httpcore-1.0.7.tar.gz", hash = "sha256:8551cb62a169ec7162ac7be8d4817d561f60e08eaa485234898414bb5a8a0b4c"}, +] + +[package.dependencies] +certifi = "*" +h11 = ">=0.13,<0.15" + +[package.extras] +asyncio = ["anyio (>=4.0,<5.0)"] +http2 = ["h2 (>=3,<5)"] +socks = ["socksio (==1.*)"] +trio = ["trio (>=0.22.0,<1.0)"] + +[[package]] +name = "httptools" +version = "0.6.4" +description = "A collection of framework independent HTTP protocol utils." +optional = false +python-versions = ">=3.8.0" +files = [ + {file = "httptools-0.6.4-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:3c73ce323711a6ffb0d247dcd5a550b8babf0f757e86a52558fe5b86d6fefcc0"}, + {file = "httptools-0.6.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:345c288418f0944a6fe67be8e6afa9262b18c7626c3ef3c28adc5eabc06a68da"}, + {file = "httptools-0.6.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:deee0e3343f98ee8047e9f4c5bc7cedbf69f5734454a94c38ee829fb2d5fa3c1"}, + {file = "httptools-0.6.4-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ca80b7485c76f768a3bc83ea58373f8db7b015551117375e4918e2aa77ea9b50"}, + {file = "httptools-0.6.4-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:90d96a385fa941283ebd231464045187a31ad932ebfa541be8edf5b3c2328959"}, + {file = "httptools-0.6.4-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:59e724f8b332319e2875efd360e61ac07f33b492889284a3e05e6d13746876f4"}, + {file = "httptools-0.6.4-cp310-cp310-win_amd64.whl", hash = "sha256:c26f313951f6e26147833fc923f78f95604bbec812a43e5ee37f26dc9e5a686c"}, + {file = "httptools-0.6.4-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:f47f8ed67cc0ff862b84a1189831d1d33c963fb3ce1ee0c65d3b0cbe7b711069"}, + {file = "httptools-0.6.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:0614154d5454c21b6410fdf5262b4a3ddb0f53f1e1721cfd59d55f32138c578a"}, + {file = "httptools-0.6.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f8787367fbdfccae38e35abf7641dafc5310310a5987b689f4c32cc8cc3ee975"}, + {file = "httptools-0.6.4-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:40b0f7fe4fd38e6a507bdb751db0379df1e99120c65fbdc8ee6c1d044897a636"}, + {file = "httptools-0.6.4-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:40a5ec98d3f49904b9fe36827dcf1aadfef3b89e2bd05b0e35e94f97c2b14721"}, + {file = "httptools-0.6.4-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:dacdd3d10ea1b4ca9df97a0a303cbacafc04b5cd375fa98732678151643d4988"}, + {file = "httptools-0.6.4-cp311-cp311-win_amd64.whl", hash = "sha256:288cd628406cc53f9a541cfaf06041b4c71d751856bab45e3702191f931ccd17"}, + {file = "httptools-0.6.4-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:df017d6c780287d5c80601dafa31f17bddb170232d85c066604d8558683711a2"}, + {file = "httptools-0.6.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:85071a1e8c2d051b507161f6c3e26155b5c790e4e28d7f236422dbacc2a9cc44"}, + {file = "httptools-0.6.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69422b7f458c5af875922cdb5bd586cc1f1033295aa9ff63ee196a87519ac8e1"}, + {file = "httptools-0.6.4-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:16e603a3bff50db08cd578d54f07032ca1631450ceb972c2f834c2b860c28ea2"}, + {file = "httptools-0.6.4-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:ec4f178901fa1834d4a060320d2f3abc5c9e39766953d038f1458cb885f47e81"}, + {file = "httptools-0.6.4-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:f9eb89ecf8b290f2e293325c646a211ff1c2493222798bb80a530c5e7502494f"}, + {file = "httptools-0.6.4-cp312-cp312-win_amd64.whl", hash = "sha256:db78cb9ca56b59b016e64b6031eda5653be0589dba2b1b43453f6e8b405a0970"}, + {file = "httptools-0.6.4-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:ade273d7e767d5fae13fa637f4d53b6e961fb7fd93c7797562663f0171c26660"}, + {file = "httptools-0.6.4-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:856f4bc0478ae143bad54a4242fccb1f3f86a6e1be5548fecfd4102061b3a083"}, + {file = "httptools-0.6.4-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:322d20ea9cdd1fa98bd6a74b77e2ec5b818abdc3d36695ab402a0de8ef2865a3"}, + {file = "httptools-0.6.4-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4d87b29bd4486c0093fc64dea80231f7c7f7eb4dc70ae394d70a495ab8436071"}, + {file = "httptools-0.6.4-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:342dd6946aa6bda4b8f18c734576106b8a31f2fe31492881a9a160ec84ff4bd5"}, + {file = "httptools-0.6.4-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:4b36913ba52008249223042dca46e69967985fb4051951f94357ea681e1f5dc0"}, + {file = "httptools-0.6.4-cp313-cp313-win_amd64.whl", hash = "sha256:28908df1b9bb8187393d5b5db91435ccc9c8e891657f9cbb42a2541b44c82fc8"}, + {file = "httptools-0.6.4-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:d3f0d369e7ffbe59c4b6116a44d6a8eb4783aae027f2c0b366cf0aa964185dba"}, + {file = "httptools-0.6.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:94978a49b8f4569ad607cd4946b759d90b285e39c0d4640c6b36ca7a3ddf2efc"}, + {file = "httptools-0.6.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:40dc6a8e399e15ea525305a2ddba998b0af5caa2566bcd79dcbe8948181eeaff"}, + {file = "httptools-0.6.4-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ab9ba8dcf59de5181f6be44a77458e45a578fc99c31510b8c65b7d5acc3cf490"}, + {file = "httptools-0.6.4-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:fc411e1c0a7dcd2f902c7c48cf079947a7e65b5485dea9decb82b9105ca71a43"}, + {file = "httptools-0.6.4-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:d54efd20338ac52ba31e7da78e4a72570cf729fac82bc31ff9199bedf1dc7440"}, + {file = "httptools-0.6.4-cp38-cp38-win_amd64.whl", hash = "sha256:df959752a0c2748a65ab5387d08287abf6779ae9165916fe053e68ae1fbdc47f"}, + {file = "httptools-0.6.4-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:85797e37e8eeaa5439d33e556662cc370e474445d5fab24dcadc65a8ffb04003"}, + {file = "httptools-0.6.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:db353d22843cf1028f43c3651581e4bb49374d85692a85f95f7b9a130e1b2cab"}, + {file = "httptools-0.6.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d1ffd262a73d7c28424252381a5b854c19d9de5f56f075445d33919a637e3547"}, + {file = "httptools-0.6.4-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:703c346571fa50d2e9856a37d7cd9435a25e7fd15e236c397bf224afaa355fe9"}, + {file = "httptools-0.6.4-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:aafe0f1918ed07b67c1e838f950b1c1fabc683030477e60b335649b8020e1076"}, + {file = "httptools-0.6.4-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:0e563e54979e97b6d13f1bbc05a96109923e76b901f786a5eae36e99c01237bd"}, + {file = "httptools-0.6.4-cp39-cp39-win_amd64.whl", hash = "sha256:b799de31416ecc589ad79dd85a0b2657a8fe39327944998dea368c1d4c9e55e6"}, + {file = "httptools-0.6.4.tar.gz", hash = "sha256:4e93eee4add6493b59a5c514da98c939b244fce4a0d8879cd3f466562f4b7d5c"}, +] + +[package.extras] +test = ["Cython (>=0.29.24)"] + +[[package]] +name = "httpx" +version = "0.27.2" +description = "The next generation HTTP client." +optional = false +python-versions = ">=3.8" +files = [ + {file = "httpx-0.27.2-py3-none-any.whl", hash = "sha256:7bb2708e112d8fdd7829cd4243970f0c223274051cb35ee80c03301ee29a3df0"}, + {file = "httpx-0.27.2.tar.gz", hash = "sha256:f7c2be1d2f3c3c3160d441802406b206c2b76f5947b11115e6df10c6c65e66c2"}, +] + +[package.dependencies] +anyio = "*" +certifi = "*" +httpcore = "==1.*" +idna = "*" +sniffio = "*" + +[package.extras] +brotli = ["brotli", "brotlicffi"] +cli = ["click (==8.*)", "pygments (==2.*)", "rich (>=10,<14)"] +http2 = ["h2 (>=3,<5)"] +socks = ["socksio (==1.*)"] +zstd = ["zstandard (>=0.18.0)"] + +[[package]] +name = "idna" +version = "3.10" +description = "Internationalized Domain Names in Applications (IDNA)" +optional = false +python-versions = ">=3.6" +files = [ + {file = "idna-3.10-py3-none-any.whl", hash = "sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3"}, + {file = "idna-3.10.tar.gz", hash = "sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9"}, +] + +[package.extras] +all = ["flake8 (>=7.1.1)", "mypy (>=1.11.2)", "pytest (>=8.3.2)", "ruff (>=0.6.2)"] + +[[package]] +name = "importlib-metadata" +version = "7.1.0" +description = "Read metadata from Python packages" +optional = false +python-versions = ">=3.8" +files = [ + {file = "importlib_metadata-7.1.0-py3-none-any.whl", hash = "sha256:30962b96c0c223483ed6cc7280e7f0199feb01a0e40cfae4d4450fc6fab1f570"}, + {file = "importlib_metadata-7.1.0.tar.gz", hash = "sha256:b78938b926ee8d5f020fc4772d487045805a55ddbad2ecf21c6d60938dc7fcd2"}, +] + +[package.dependencies] +zipp = ">=0.5" + +[package.extras] +docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] +perf = ["ipython"] +testing = ["flufl.flake8", "importlib-resources (>=1.3)", "jaraco.test (>=5.4)", "packaging", "pyfakefs", "pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy", "pytest-perf (>=0.9.2)", "pytest-ruff (>=0.2.1)"] + +[[package]] +name = "iniconfig" +version = "2.0.0" +description = "brain-dead simple config-ini parsing" +optional = false +python-versions = ">=3.7" +files = [ + {file = "iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374"}, + {file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"}, +] + +[[package]] +name = "isort" +version = "5.13.2" +description = "A Python utility / library to sort Python imports." +optional = false +python-versions = ">=3.8.0" +files = [ + {file = "isort-5.13.2-py3-none-any.whl", hash = "sha256:8ca5e72a8d85860d5a3fa69b8745237f2939afe12dbf656afbcb47fe72d947a6"}, + {file = "isort-5.13.2.tar.gz", hash = "sha256:48fdfcb9face5d58a4f6dde2e72a1fb8dcaf8ab26f95ab49fab84c2ddefb0109"}, +] + +[package.extras] +colors = ["colorama (>=0.4.6)"] + +[[package]] +name = "jsonschema" +version = "4.23.0" +description = "An implementation of JSON Schema validation for Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "jsonschema-4.23.0-py3-none-any.whl", hash = "sha256:fbadb6f8b144a8f8cf9f0b89ba94501d143e50411a1278633f56a7acf7fd5566"}, + {file = "jsonschema-4.23.0.tar.gz", hash = "sha256:d71497fef26351a33265337fa77ffeb82423f3ea21283cd9467bb03999266bc4"}, +] + +[package.dependencies] +attrs = ">=22.2.0" +jsonschema-specifications = ">=2023.03.6" +referencing = ">=0.28.4" +rpds-py = ">=0.7.1" + +[package.extras] +format = ["fqdn", "idna", "isoduration", "jsonpointer (>1.13)", "rfc3339-validator", "rfc3987", "uri-template", "webcolors (>=1.11)"] +format-nongpl = ["fqdn", "idna", "isoduration", "jsonpointer (>1.13)", "rfc3339-validator", "rfc3986-validator (>0.1.0)", "uri-template", "webcolors (>=24.6.0)"] + +[[package]] +name = "jsonschema-specifications" +version = "2024.10.1" +description = "The JSON Schema meta-schemas and vocabularies, exposed as a Registry" +optional = false +python-versions = ">=3.9" +files = [ + {file = "jsonschema_specifications-2024.10.1-py3-none-any.whl", hash = "sha256:a09a0680616357d9a0ecf05c12ad234479f549239d0f5b55f3deea67475da9bf"}, + {file = "jsonschema_specifications-2024.10.1.tar.gz", hash = "sha256:0f38b83639958ce1152d02a7f062902c41c8fd20d558b0c34344292d417ae272"}, +] + +[package.dependencies] +referencing = ">=0.31.0" + +[[package]] +name = "kink" +version = "0.8.1" +description = "Dependency injection for python." +optional = false +python-versions = "<4.0,>=3.8" +files = [ + {file = "kink-0.8.1-py3-none-any.whl", hash = "sha256:c046be42395de6e18776daa93ac78280a70b3aa5c70b9ea5ca716cc71b3ff91a"}, + {file = "kink-0.8.1.tar.gz", hash = "sha256:9310fa5860ad4df3cdd4a2b998517a718cbc83ed4975c51b8ebd60f640a9702c"}, +] + +[package.dependencies] +typing_extensions = ">=4.9.0,<5.0.0" + +[[package]] +name = "levenshtein" +version = "0.25.1" +description = "Python extension for computing string edit distances and similarities." +optional = false +python-versions = ">=3.8" +files = [ + {file = "Levenshtein-0.25.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:eb4d1ec9f2dcbde1757c4b7fb65b8682bc2de45b9552e201988f287548b7abdf"}, + {file = "Levenshtein-0.25.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:b4d9fa3affef48a7e727cdbd0d9502cd060da86f34d8b3627edd769d347570e2"}, + {file = "Levenshtein-0.25.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c1b6cd186e58196ff8b402565317e9346b408d0c04fa0ed12ce4868c0fcb6d03"}, + {file = "Levenshtein-0.25.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:82637ef5428384dd1812849dd7328992819bf0c4a20bff0a3b3ee806821af7ed"}, + {file = "Levenshtein-0.25.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e73656da6cc3e32a6e4bcd48562fcb64599ef124997f2c91f5320d7f1532c069"}, + {file = "Levenshtein-0.25.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5abff796f92cdfba69b9cbf6527afae918d0e95cbfac000bd84017f74e0bd427"}, + {file = "Levenshtein-0.25.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:38827d82f2ca9cb755da6f03e686866f2f411280db005f4304272378412b4cba"}, + {file = "Levenshtein-0.25.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2b989df1e3231261a87d68dfa001a2070771e178b09650f9cf99a20e3d3abc28"}, + {file = "Levenshtein-0.25.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:2011d3b3897d438a2f88ef7aed7747f28739cae8538ec7c18c33dd989930c7a0"}, + {file = "Levenshtein-0.25.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:6c375b33ec7acc1c6855e8ee8c7c8ac6262576ffed484ff5c556695527f49686"}, + {file = "Levenshtein-0.25.1-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:ce0cb9dd012ef1bf4d5b9d40603e7709b6581aec5acd32fcea9b371b294ca7aa"}, + {file = "Levenshtein-0.25.1-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:9da9ecb81bae67d784defed7274f894011259b038ec31f2339c4958157970115"}, + {file = "Levenshtein-0.25.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:3bd7be5dbe5f4a1b691f381e39512927b39d1e195bd0ad61f9bf217a25bf36c9"}, + {file = "Levenshtein-0.25.1-cp310-cp310-win32.whl", hash = "sha256:f6abb9ced98261de67eb495b95e1d2325fa42b0344ed5763f7c0f36ee2e2bdba"}, + {file = "Levenshtein-0.25.1-cp310-cp310-win_amd64.whl", hash = "sha256:97581af3e0a6d359af85c6cf06e51f77f4d635f7109ff7f8ed7fd634d8d8c923"}, + {file = "Levenshtein-0.25.1-cp310-cp310-win_arm64.whl", hash = "sha256:9ba008f490788c6d8d5a10735fcf83559965be97e4ef0812db388a84b1cc736a"}, + {file = "Levenshtein-0.25.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:f57d9cf06dac55c2d2f01f0d06e32acc074ab9a902921dc8fddccfb385053ad5"}, + {file = "Levenshtein-0.25.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:22b60c6d791f4ca67a3686b557ddb2a48de203dae5214f220f9dddaab17f44bb"}, + {file = "Levenshtein-0.25.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d0444ee62eccf1e6cedc7c5bc01a9face6ff70cc8afa3f3ca9340e4e16f601a4"}, + {file = "Levenshtein-0.25.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7e8758be8221a274c83924bae8dd8f42041792565a3c3bdd3c10e3f9b4a5f94e"}, + {file = "Levenshtein-0.25.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:147221cfb1d03ed81d22fdd2a4c7fc2112062941b689e027a30d2b75bbced4a3"}, + {file = "Levenshtein-0.25.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a454d5bc4f4a289f5471418788517cc122fcc00d5a8aba78c54d7984840655a2"}, + {file = "Levenshtein-0.25.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5c25f3778bbac78286bef2df0ca80f50517b42b951af0a5ddaec514412f79fac"}, + {file = "Levenshtein-0.25.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:181486cf465aff934694cc9a19f3898a1d28025a9a5f80fc1608217e7cd1c799"}, + {file = "Levenshtein-0.25.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:b8db9f672a5d150706648b37b044dba61f36ab7216c6a121cebbb2899d7dfaa3"}, + {file = "Levenshtein-0.25.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:f2a69fe5ddea586d439f9a50d0c51952982f6c0db0e3573b167aa17e6d1dfc48"}, + {file = "Levenshtein-0.25.1-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:3b684675a3bd35efa6997856e73f36c8a41ef62519e0267dcbeefd15e26cae71"}, + {file = "Levenshtein-0.25.1-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:cc707ef7edb71f6bf8339198b929ead87c022c78040e41668a4db68360129cef"}, + {file = "Levenshtein-0.25.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:41512c436b8c691326e2d07786d906cba0e92b5e3f455bf338befb302a0ca76d"}, + {file = "Levenshtein-0.25.1-cp311-cp311-win32.whl", hash = "sha256:2a3830175c01ade832ba0736091283f14a6506a06ffe8c846f66d9fbca91562f"}, + {file = "Levenshtein-0.25.1-cp311-cp311-win_amd64.whl", hash = "sha256:9e0af4e6e023e0c8f79af1d1ca5f289094eb91201f08ad90f426d71e4ae84052"}, + {file = "Levenshtein-0.25.1-cp311-cp311-win_arm64.whl", hash = "sha256:38e5d9a1d737d7b49fa17d6a4c03a0359288154bf46dc93b29403a9dd0cd1a7d"}, + {file = "Levenshtein-0.25.1-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:4a40fa16ecd0bf9e557db67131aabeea957f82fe3e8df342aa413994c710c34e"}, + {file = "Levenshtein-0.25.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:4f7d2045d5927cffa65a0ac671c263edbfb17d880fdce2d358cd0bda9bcf2b6d"}, + {file = "Levenshtein-0.25.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40f96590539f9815be70e330b4d2efcce0219db31db5a22fffe99565192f5662"}, + {file = "Levenshtein-0.25.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2d78512dd25b572046ff86d8903bec283c373063349f8243430866b6a9946425"}, + {file = "Levenshtein-0.25.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c161f24a1b216e8555c874c7dd70c1a0d98f783f252a16c9face920a8b8a6f3e"}, + {file = "Levenshtein-0.25.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:06ebbfd010a00490795f478d18d7fa2ffc79c9c03fc03b678081f31764d16bab"}, + {file = "Levenshtein-0.25.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eaa9ec0a4489ebfb25a9ec2cba064ed68d0d2485b8bc8b7203f84a7874755e0f"}, + {file = "Levenshtein-0.25.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:26408938a6db7b252824a701545d50dc9cdd7a3e4c7ee70834cca17953b76ad8"}, + {file = "Levenshtein-0.25.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:330ec2faff957281f4e6a1a8c88286d1453e1d73ee273ea0f937e0c9281c2156"}, + {file = "Levenshtein-0.25.1-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:9115d1b08626dfdea6f3955cb49ba5a578f7223205f80ead0038d6fc0442ce13"}, + {file = "Levenshtein-0.25.1-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:bbd602edab758e93a5c67bf0d8322f374a47765f1cdb6babaf593a64dc9633ad"}, + {file = "Levenshtein-0.25.1-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:b930b4df32cd3aabbed0e9f0c4fdd1ea4090a5c022ba9f1ae4ab70ccf1cf897a"}, + {file = "Levenshtein-0.25.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:dd66fb51f88a3f73a802e1ff19a14978ddc9fbcb7ce3a667ca34f95ef54e0e44"}, + {file = "Levenshtein-0.25.1-cp312-cp312-win32.whl", hash = "sha256:386de94bd1937a16ae3c8f8b7dd2eff1b733994ecf56ce4d05dfdd0e776d0261"}, + {file = "Levenshtein-0.25.1-cp312-cp312-win_amd64.whl", hash = "sha256:9ee1902153d47886c9787598a4a5c324ce7fde44d44daa34fcf3652ac0de21bc"}, + {file = "Levenshtein-0.25.1-cp312-cp312-win_arm64.whl", hash = "sha256:b56a7e7676093c3aee50402226f4079b15bd21b5b8f1820f9d6d63fe99dc4927"}, + {file = "Levenshtein-0.25.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:6b5dfdf6a0e2f35fd155d4c26b03398499c24aba7bc5db40245789c46ad35c04"}, + {file = "Levenshtein-0.25.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:355ff797f704459ddd8b95354d699d0d0642348636c92d5e67b49be4b0e6112b"}, + {file = "Levenshtein-0.25.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:933b827a3b721210fff522f3dca9572f9f374a0e88fa3a6c7ee3164406ae7794"}, + {file = "Levenshtein-0.25.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:be1da669a240f272d904ab452ad0a1603452e190f4e03e886e6b3a9904152b89"}, + {file = "Levenshtein-0.25.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:265cbd78962503a26f2bea096258a3b70b279bb1a74a525c671d3ee43a190f9c"}, + {file = "Levenshtein-0.25.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:63cc4d53a35e673b12b721a58b197b4a65734688fb72aa1987ce63ed612dca96"}, + {file = "Levenshtein-0.25.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:75fee0c471b8799c70dad9d0d5b70f1f820249257f9617601c71b6c1b37bee92"}, + {file = "Levenshtein-0.25.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:045d6b0db124fbd37379b2b91f6d0786c2d9220e7a848e2dd31b99509a321240"}, + {file = "Levenshtein-0.25.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:db7a2e9c51ac9cc2fd5679484f1eac6e0ab2085cb181240445f7fbf10df73230"}, + {file = "Levenshtein-0.25.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:c379c588aa0d93d4607db7eb225fd683263d49669b1bbe49e28c978aa6a4305d"}, + {file = "Levenshtein-0.25.1-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:966dd00424df7f69b78da02a29b530fbb6c1728e9002a2925ed7edf26b231924"}, + {file = "Levenshtein-0.25.1-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:09daa6b068709cc1e68b670a706d928ed8f0b179a26161dd04b3911d9f757525"}, + {file = "Levenshtein-0.25.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:d6bed0792635081accf70a7e11cfece986f744fddf46ce26808cd8bfc067e430"}, + {file = "Levenshtein-0.25.1-cp38-cp38-win32.whl", hash = "sha256:28e7b7faf5a745a690d1b1706ab82a76bbe9fa6b729d826f0cfdd24fd7c19740"}, + {file = "Levenshtein-0.25.1-cp38-cp38-win_amd64.whl", hash = "sha256:8ca0cc9b9e07316b5904f158d5cfa340d55b4a3566ac98eaac9f087c6efb9a1a"}, + {file = "Levenshtein-0.25.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:45682cdb3ac4a5465c01b2dce483bdaa1d5dcd1a1359fab37d26165b027d3de2"}, + {file = "Levenshtein-0.25.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:f8dc3e63c4cd746ec162a4cd744c6dde857e84aaf8c397daa46359c3d54e6219"}, + {file = "Levenshtein-0.25.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:01ad1eb09933a499a49923e74e05b1428ca4ef37fed32965fef23f1334a11563"}, + {file = "Levenshtein-0.25.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cbb4e8c4b8b7bbe0e1aa64710b806b6c3f31d93cb14969ae2c0eff0f3a592db8"}, + {file = "Levenshtein-0.25.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b48d1fe224b365975002e3e2ea947cbb91d2936a16297859b71c4abe8a39932c"}, + {file = "Levenshtein-0.25.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a164df16d876aab0a400f72aeac870ea97947ea44777c89330e9a16c7dc5cc0e"}, + {file = "Levenshtein-0.25.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:995d3bcedcf64be6ceca423f6cfe29184a36d7c4cbac199fdc9a0a5ec7196cf5"}, + {file = "Levenshtein-0.25.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bdaf62d637bef6711d6f3457e2684faab53b2db2ed53c05bc0dc856464c74742"}, + {file = "Levenshtein-0.25.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:af9de3b5f8f5f3530cfd97daab9ab480d1b121ef34d8c0aa5bab0c645eae219e"}, + {file = "Levenshtein-0.25.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:78fba73c352383b356a30c4674e39f086ffef7122fa625e7550b98be2392d387"}, + {file = "Levenshtein-0.25.1-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:9e0df0dcea3943321398f72e330c089b5d5447318310db6f17f5421642f3ade6"}, + {file = "Levenshtein-0.25.1-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:387f768bb201b9bc45f0f49557e2fb9a3774d9d087457bab972162dcd4fd352b"}, + {file = "Levenshtein-0.25.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5dcf931b64311039b43495715e9b795fbd97ab44ba3dd6bf24360b15e4e87649"}, + {file = "Levenshtein-0.25.1-cp39-cp39-win32.whl", hash = "sha256:2449f8668c0bd62a2b305a5e797348984c06ac20903b38b3bab74e55671ddd51"}, + {file = "Levenshtein-0.25.1-cp39-cp39-win_amd64.whl", hash = "sha256:28803fd6ec7b58065621f5ec0d24e44e2a7dc4842b64dcab690cb0a7ea545210"}, + {file = "Levenshtein-0.25.1-cp39-cp39-win_arm64.whl", hash = "sha256:0b074d452dff8ee86b5bdb6031aa32bb2ed3c8469a56718af5e010b9bb5124dc"}, + {file = "Levenshtein-0.25.1-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:e9e060ef3925a68aeb12276f0e524fb1264592803d562ec0306c7c3f5c68eae0"}, + {file = "Levenshtein-0.25.1-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5f84b84049318d44722db307c448f9dcb8d27c73525a378e901189a94889ba61"}, + {file = "Levenshtein-0.25.1-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:07e23fdf330cb185a0c7913ca5bd73a189dfd1742eae3a82e31ed8688b191800"}, + {file = "Levenshtein-0.25.1-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d06958e4a81ea0f0b2b7768a2ad05bcd50a9ad04c4d521dd37d5730ff12decdc"}, + {file = "Levenshtein-0.25.1-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:2ea7c34ec22b2fce21299b0caa6dde6bdebafcc2970e265853c9cfea8d1186da"}, + {file = "Levenshtein-0.25.1-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:fddc0ccbdd94f57aa32e2eb3ac8310d08df2e175943dc20b3e1fc7a115850af4"}, + {file = "Levenshtein-0.25.1-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7d52249cb3448bfe661d3d7db3a6673e835c7f37b30b0aeac499a1601bae873d"}, + {file = "Levenshtein-0.25.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e8dd4c201b15f8c1e612f9074335392c8208ac147acbce09aff04e3974bf9b16"}, + {file = "Levenshtein-0.25.1-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:23a4d95ce9d44161c7aa87ab76ad6056bc1093c461c60c097054a46dc957991f"}, + {file = "Levenshtein-0.25.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:65eea8a9c33037b23069dca4b3bc310e3c28ca53f60ec0c958d15c0952ba39fa"}, + {file = "Levenshtein-0.25.1.tar.gz", hash = "sha256:2df14471c778c75ffbd59cb64bbecfd4b0ef320ef9f80e4804764be7d5678980"}, +] + +[package.dependencies] +rapidfuzz = ">=3.8.0,<4.0.0" + +[[package]] +name = "loguru" +version = "0.7.2" +description = "Python logging made (stupidly) simple" +optional = false +python-versions = ">=3.5" +files = [ + {file = "loguru-0.7.2-py3-none-any.whl", hash = "sha256:003d71e3d3ed35f0f8984898359d65b79e5b21943f78af86aa5491210429b8eb"}, + {file = "loguru-0.7.2.tar.gz", hash = "sha256:e671a53522515f34fd406340ee968cb9ecafbc4b36c679da03c18fd8d0bd51ac"}, +] + +[package.dependencies] +colorama = {version = ">=0.3.4", markers = "sys_platform == \"win32\""} +win32-setctime = {version = ">=1.0.0", markers = "sys_platform == \"win32\""} + +[package.extras] +dev = ["Sphinx (==7.2.5)", "colorama (==0.4.5)", "colorama (==0.4.6)", "exceptiongroup (==1.1.3)", "freezegun (==1.1.0)", "freezegun (==1.2.2)", "mypy (==v0.910)", "mypy (==v0.971)", "mypy (==v1.4.1)", "mypy (==v1.5.1)", "pre-commit (==3.4.0)", "pytest (==6.1.2)", "pytest (==7.4.0)", "pytest-cov (==2.12.1)", "pytest-cov (==4.1.0)", "pytest-mypy-plugins (==1.9.3)", "pytest-mypy-plugins (==3.0.0)", "sphinx-autobuild (==2021.3.14)", "sphinx-rtd-theme (==1.3.0)", "tox (==3.27.1)", "tox (==4.11.0)"] + +[[package]] +name = "lxml" +version = "5.3.0" +description = "Powerful and Pythonic XML processing library combining libxml2/libxslt with the ElementTree API." +optional = false +python-versions = ">=3.6" +files = [ + {file = "lxml-5.3.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:dd36439be765e2dde7660212b5275641edbc813e7b24668831a5c8ac91180656"}, + {file = "lxml-5.3.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ae5fe5c4b525aa82b8076c1a59d642c17b6e8739ecf852522c6321852178119d"}, + {file = "lxml-5.3.0-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:501d0d7e26b4d261fca8132854d845e4988097611ba2531408ec91cf3fd9d20a"}, + {file = "lxml-5.3.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fb66442c2546446944437df74379e9cf9e9db353e61301d1a0e26482f43f0dd8"}, + {file = "lxml-5.3.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9e41506fec7a7f9405b14aa2d5c8abbb4dbbd09d88f9496958b6d00cb4d45330"}, + {file = "lxml-5.3.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f7d4a670107d75dfe5ad080bed6c341d18c4442f9378c9f58e5851e86eb79965"}, + {file = "lxml-5.3.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:41ce1f1e2c7755abfc7e759dc34d7d05fd221723ff822947132dc934d122fe22"}, + {file = "lxml-5.3.0-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:44264ecae91b30e5633013fb66f6ddd05c006d3e0e884f75ce0b4755b3e3847b"}, + {file = "lxml-5.3.0-cp310-cp310-manylinux_2_28_ppc64le.whl", hash = "sha256:3c174dc350d3ec52deb77f2faf05c439331d6ed5e702fc247ccb4e6b62d884b7"}, + {file = "lxml-5.3.0-cp310-cp310-manylinux_2_28_s390x.whl", hash = "sha256:2dfab5fa6a28a0b60a20638dc48e6343c02ea9933e3279ccb132f555a62323d8"}, + {file = "lxml-5.3.0-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:b1c8c20847b9f34e98080da785bb2336ea982e7f913eed5809e5a3c872900f32"}, + {file = "lxml-5.3.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:2c86bf781b12ba417f64f3422cfc302523ac9cd1d8ae8c0f92a1c66e56ef2e86"}, + {file = "lxml-5.3.0-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:c162b216070f280fa7da844531169be0baf9ccb17263cf5a8bf876fcd3117fa5"}, + {file = "lxml-5.3.0-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:36aef61a1678cb778097b4a6eeae96a69875d51d1e8f4d4b491ab3cfb54b5a03"}, + {file = "lxml-5.3.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:f65e5120863c2b266dbcc927b306c5b78e502c71edf3295dfcb9501ec96e5fc7"}, + {file = "lxml-5.3.0-cp310-cp310-win32.whl", hash = "sha256:ef0c1fe22171dd7c7c27147f2e9c3e86f8bdf473fed75f16b0c2e84a5030ce80"}, + {file = "lxml-5.3.0-cp310-cp310-win_amd64.whl", hash = "sha256:052d99051e77a4f3e8482c65014cf6372e61b0a6f4fe9edb98503bb5364cfee3"}, + {file = "lxml-5.3.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:74bcb423462233bc5d6066e4e98b0264e7c1bed7541fff2f4e34fe6b21563c8b"}, + {file = "lxml-5.3.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a3d819eb6f9b8677f57f9664265d0a10dd6551d227afb4af2b9cd7bdc2ccbf18"}, + {file = "lxml-5.3.0-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5b8f5db71b28b8c404956ddf79575ea77aa8b1538e8b2ef9ec877945b3f46442"}, + {file = "lxml-5.3.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2c3406b63232fc7e9b8783ab0b765d7c59e7c59ff96759d8ef9632fca27c7ee4"}, + {file = "lxml-5.3.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2ecdd78ab768f844c7a1d4a03595038c166b609f6395e25af9b0f3f26ae1230f"}, + {file = "lxml-5.3.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:168f2dfcfdedf611eb285efac1516c8454c8c99caf271dccda8943576b67552e"}, + {file = "lxml-5.3.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aa617107a410245b8660028a7483b68e7914304a6d4882b5ff3d2d3eb5948d8c"}, + {file = "lxml-5.3.0-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:69959bd3167b993e6e710b99051265654133a98f20cec1d9b493b931942e9c16"}, + {file = "lxml-5.3.0-cp311-cp311-manylinux_2_28_ppc64le.whl", hash = "sha256:bd96517ef76c8654446fc3db9242d019a1bb5fe8b751ba414765d59f99210b79"}, + {file = "lxml-5.3.0-cp311-cp311-manylinux_2_28_s390x.whl", hash = "sha256:ab6dd83b970dc97c2d10bc71aa925b84788c7c05de30241b9e96f9b6d9ea3080"}, + {file = "lxml-5.3.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:eec1bb8cdbba2925bedc887bc0609a80e599c75b12d87ae42ac23fd199445654"}, + {file = "lxml-5.3.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:6a7095eeec6f89111d03dabfe5883a1fd54da319c94e0fb104ee8f23616b572d"}, + {file = "lxml-5.3.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:6f651ebd0b21ec65dfca93aa629610a0dbc13dbc13554f19b0113da2e61a4763"}, + {file = "lxml-5.3.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:f422a209d2455c56849442ae42f25dbaaba1c6c3f501d58761c619c7836642ec"}, + {file = "lxml-5.3.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:62f7fdb0d1ed2065451f086519865b4c90aa19aed51081979ecd05a21eb4d1be"}, + {file = "lxml-5.3.0-cp311-cp311-win32.whl", hash = "sha256:c6379f35350b655fd817cd0d6cbeef7f265f3ae5fedb1caae2eb442bbeae9ab9"}, + {file = "lxml-5.3.0-cp311-cp311-win_amd64.whl", hash = "sha256:9c52100e2c2dbb0649b90467935c4b0de5528833c76a35ea1a2691ec9f1ee7a1"}, + {file = "lxml-5.3.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:e99f5507401436fdcc85036a2e7dc2e28d962550afe1cbfc07c40e454256a859"}, + {file = "lxml-5.3.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:384aacddf2e5813a36495233b64cb96b1949da72bef933918ba5c84e06af8f0e"}, + {file = "lxml-5.3.0-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:874a216bf6afaf97c263b56371434e47e2c652d215788396f60477540298218f"}, + {file = "lxml-5.3.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:65ab5685d56914b9a2a34d67dd5488b83213d680b0c5d10b47f81da5a16b0b0e"}, + {file = "lxml-5.3.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:aac0bbd3e8dd2d9c45ceb82249e8bdd3ac99131a32b4d35c8af3cc9db1657179"}, + {file = "lxml-5.3.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b369d3db3c22ed14c75ccd5af429086f166a19627e84a8fdade3f8f31426e52a"}, + {file = "lxml-5.3.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c24037349665434f375645fa9d1f5304800cec574d0310f618490c871fd902b3"}, + {file = "lxml-5.3.0-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:62d172f358f33a26d6b41b28c170c63886742f5b6772a42b59b4f0fa10526cb1"}, + {file = "lxml-5.3.0-cp312-cp312-manylinux_2_28_ppc64le.whl", hash = "sha256:c1f794c02903c2824fccce5b20c339a1a14b114e83b306ff11b597c5f71a1c8d"}, + {file = "lxml-5.3.0-cp312-cp312-manylinux_2_28_s390x.whl", hash = "sha256:5d6a6972b93c426ace71e0be9a6f4b2cfae9b1baed2eed2006076a746692288c"}, + {file = "lxml-5.3.0-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:3879cc6ce938ff4eb4900d901ed63555c778731a96365e53fadb36437a131a99"}, + {file = "lxml-5.3.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:74068c601baff6ff021c70f0935b0c7bc528baa8ea210c202e03757c68c5a4ff"}, + {file = "lxml-5.3.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:ecd4ad8453ac17bc7ba3868371bffb46f628161ad0eefbd0a855d2c8c32dd81a"}, + {file = "lxml-5.3.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:7e2f58095acc211eb9d8b5771bf04df9ff37d6b87618d1cbf85f92399c98dae8"}, + {file = "lxml-5.3.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:e63601ad5cd8f860aa99d109889b5ac34de571c7ee902d6812d5d9ddcc77fa7d"}, + {file = "lxml-5.3.0-cp312-cp312-win32.whl", hash = "sha256:17e8d968d04a37c50ad9c456a286b525d78c4a1c15dd53aa46c1d8e06bf6fa30"}, + {file = "lxml-5.3.0-cp312-cp312-win_amd64.whl", hash = "sha256:c1a69e58a6bb2de65902051d57fde951febad631a20a64572677a1052690482f"}, + {file = "lxml-5.3.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:8c72e9563347c7395910de6a3100a4840a75a6f60e05af5e58566868d5eb2d6a"}, + {file = "lxml-5.3.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:e92ce66cd919d18d14b3856906a61d3f6b6a8500e0794142338da644260595cd"}, + {file = "lxml-5.3.0-cp313-cp313-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1d04f064bebdfef9240478f7a779e8c5dc32b8b7b0b2fc6a62e39b928d428e51"}, + {file = "lxml-5.3.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5c2fb570d7823c2bbaf8b419ba6e5662137f8166e364a8b2b91051a1fb40ab8b"}, + {file = "lxml-5.3.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0c120f43553ec759f8de1fee2f4794452b0946773299d44c36bfe18e83caf002"}, + {file = "lxml-5.3.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:562e7494778a69086f0312ec9689f6b6ac1c6b65670ed7d0267e49f57ffa08c4"}, + {file = "lxml-5.3.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:423b121f7e6fa514ba0c7918e56955a1d4470ed35faa03e3d9f0e3baa4c7e492"}, + {file = "lxml-5.3.0-cp313-cp313-manylinux_2_28_aarch64.whl", hash = "sha256:c00f323cc00576df6165cc9d21a4c21285fa6b9989c5c39830c3903dc4303ef3"}, + {file = "lxml-5.3.0-cp313-cp313-manylinux_2_28_ppc64le.whl", hash = "sha256:1fdc9fae8dd4c763e8a31e7630afef517eab9f5d5d31a278df087f307bf601f4"}, + {file = "lxml-5.3.0-cp313-cp313-manylinux_2_28_s390x.whl", hash = "sha256:658f2aa69d31e09699705949b5fc4719cbecbd4a97f9656a232e7d6c7be1a367"}, + {file = "lxml-5.3.0-cp313-cp313-manylinux_2_28_x86_64.whl", hash = "sha256:1473427aff3d66a3fa2199004c3e601e6c4500ab86696edffdbc84954c72d832"}, + {file = "lxml-5.3.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:a87de7dd873bf9a792bf1e58b1c3887b9264036629a5bf2d2e6579fe8e73edff"}, + {file = "lxml-5.3.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:0d7b36afa46c97875303a94e8f3ad932bf78bace9e18e603f2085b652422edcd"}, + {file = "lxml-5.3.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:cf120cce539453ae086eacc0130a324e7026113510efa83ab42ef3fcfccac7fb"}, + {file = "lxml-5.3.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:df5c7333167b9674aa8ae1d4008fa4bc17a313cc490b2cca27838bbdcc6bb15b"}, + {file = "lxml-5.3.0-cp313-cp313-win32.whl", hash = "sha256:c802e1c2ed9f0c06a65bc4ed0189d000ada8049312cfeab6ca635e39c9608957"}, + {file = "lxml-5.3.0-cp313-cp313-win_amd64.whl", hash = "sha256:406246b96d552e0503e17a1006fd27edac678b3fcc9f1be71a2f94b4ff61528d"}, + {file = "lxml-5.3.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:8f0de2d390af441fe8b2c12626d103540b5d850d585b18fcada58d972b74a74e"}, + {file = "lxml-5.3.0-cp36-cp36m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1afe0a8c353746e610bd9031a630a95bcfb1a720684c3f2b36c4710a0a96528f"}, + {file = "lxml-5.3.0-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:56b9861a71575f5795bde89256e7467ece3d339c9b43141dbdd54544566b3b94"}, + {file = "lxml-5.3.0-cp36-cp36m-manylinux_2_28_x86_64.whl", hash = "sha256:9fb81d2824dff4f2e297a276297e9031f46d2682cafc484f49de182aa5e5df99"}, + {file = "lxml-5.3.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:2c226a06ecb8cdef28845ae976da407917542c5e6e75dcac7cc33eb04aaeb237"}, + {file = "lxml-5.3.0-cp36-cp36m-musllinux_1_2_x86_64.whl", hash = "sha256:7d3d1ca42870cdb6d0d29939630dbe48fa511c203724820fc0fd507b2fb46577"}, + {file = "lxml-5.3.0-cp36-cp36m-win32.whl", hash = "sha256:094cb601ba9f55296774c2d57ad68730daa0b13dc260e1f941b4d13678239e70"}, + {file = "lxml-5.3.0-cp36-cp36m-win_amd64.whl", hash = "sha256:eafa2c8658f4e560b098fe9fc54539f86528651f61849b22111a9b107d18910c"}, + {file = "lxml-5.3.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:cb83f8a875b3d9b458cada4f880fa498646874ba4011dc974e071a0a84a1b033"}, + {file = "lxml-5.3.0-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:25f1b69d41656b05885aa185f5fdf822cb01a586d1b32739633679699f220391"}, + {file = "lxml-5.3.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:23e0553b8055600b3bf4a00b255ec5c92e1e4aebf8c2c09334f8368e8bd174d6"}, + {file = "lxml-5.3.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9ada35dd21dc6c039259596b358caab6b13f4db4d4a7f8665764d616daf9cc1d"}, + {file = "lxml-5.3.0-cp37-cp37m-manylinux_2_28_aarch64.whl", hash = "sha256:81b4e48da4c69313192d8c8d4311e5d818b8be1afe68ee20f6385d0e96fc9512"}, + {file = "lxml-5.3.0-cp37-cp37m-manylinux_2_28_x86_64.whl", hash = "sha256:2bc9fd5ca4729af796f9f59cd8ff160fe06a474da40aca03fcc79655ddee1a8b"}, + {file = "lxml-5.3.0-cp37-cp37m-musllinux_1_2_aarch64.whl", hash = "sha256:07da23d7ee08577760f0a71d67a861019103e4812c87e2fab26b039054594cc5"}, + {file = "lxml-5.3.0-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:ea2e2f6f801696ad7de8aec061044d6c8c0dd4037608c7cab38a9a4d316bfb11"}, + {file = "lxml-5.3.0-cp37-cp37m-win32.whl", hash = "sha256:5c54afdcbb0182d06836cc3d1be921e540be3ebdf8b8a51ee3ef987537455f84"}, + {file = "lxml-5.3.0-cp37-cp37m-win_amd64.whl", hash = "sha256:f2901429da1e645ce548bf9171784c0f74f0718c3f6150ce166be39e4dd66c3e"}, + {file = "lxml-5.3.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:c56a1d43b2f9ee4786e4658c7903f05da35b923fb53c11025712562d5cc02753"}, + {file = "lxml-5.3.0-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6ee8c39582d2652dcd516d1b879451500f8db3fe3607ce45d7c5957ab2596040"}, + {file = "lxml-5.3.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0fdf3a3059611f7585a78ee10399a15566356116a4288380921a4b598d807a22"}, + {file = "lxml-5.3.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:146173654d79eb1fc97498b4280c1d3e1e5d58c398fa530905c9ea50ea849b22"}, + {file = "lxml-5.3.0-cp38-cp38-manylinux_2_28_aarch64.whl", hash = "sha256:0a7056921edbdd7560746f4221dca89bb7a3fe457d3d74267995253f46343f15"}, + {file = "lxml-5.3.0-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:9e4b47ac0f5e749cfc618efdf4726269441014ae1d5583e047b452a32e221920"}, + {file = "lxml-5.3.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:f914c03e6a31deb632e2daa881fe198461f4d06e57ac3d0e05bbcab8eae01945"}, + {file = "lxml-5.3.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:213261f168c5e1d9b7535a67e68b1f59f92398dd17a56d934550837143f79c42"}, + {file = "lxml-5.3.0-cp38-cp38-win32.whl", hash = "sha256:218c1b2e17a710e363855594230f44060e2025b05c80d1f0661258142b2add2e"}, + {file = "lxml-5.3.0-cp38-cp38-win_amd64.whl", hash = "sha256:315f9542011b2c4e1d280e4a20ddcca1761993dda3afc7a73b01235f8641e903"}, + {file = "lxml-5.3.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:1ffc23010330c2ab67fac02781df60998ca8fe759e8efde6f8b756a20599c5de"}, + {file = "lxml-5.3.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:2b3778cb38212f52fac9fe913017deea2fdf4eb1a4f8e4cfc6b009a13a6d3fcc"}, + {file = "lxml-5.3.0-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4b0c7a688944891086ba192e21c5229dea54382f4836a209ff8d0a660fac06be"}, + {file = "lxml-5.3.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:747a3d3e98e24597981ca0be0fd922aebd471fa99d0043a3842d00cdcad7ad6a"}, + {file = "lxml-5.3.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:86a6b24b19eaebc448dc56b87c4865527855145d851f9fc3891673ff97950540"}, + {file = "lxml-5.3.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b11a5d918a6216e521c715b02749240fb07ae5a1fefd4b7bf12f833bc8b4fe70"}, + {file = "lxml-5.3.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:68b87753c784d6acb8a25b05cb526c3406913c9d988d51f80adecc2b0775d6aa"}, + {file = "lxml-5.3.0-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:109fa6fede314cc50eed29e6e56c540075e63d922455346f11e4d7a036d2b8cf"}, + {file = "lxml-5.3.0-cp39-cp39-manylinux_2_28_ppc64le.whl", hash = "sha256:02ced472497b8362c8e902ade23e3300479f4f43e45f4105c85ef43b8db85229"}, + {file = "lxml-5.3.0-cp39-cp39-manylinux_2_28_s390x.whl", hash = "sha256:6b038cc86b285e4f9fea2ba5ee76e89f21ed1ea898e287dc277a25884f3a7dfe"}, + {file = "lxml-5.3.0-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:7437237c6a66b7ca341e868cda48be24b8701862757426852c9b3186de1da8a2"}, + {file = "lxml-5.3.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:7f41026c1d64043a36fda21d64c5026762d53a77043e73e94b71f0521939cc71"}, + {file = "lxml-5.3.0-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:482c2f67761868f0108b1743098640fbb2a28a8e15bf3f47ada9fa59d9fe08c3"}, + {file = "lxml-5.3.0-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:1483fd3358963cc5c1c9b122c80606a3a79ee0875bcac0204149fa09d6ff2727"}, + {file = "lxml-5.3.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:2dec2d1130a9cda5b904696cec33b2cfb451304ba9081eeda7f90f724097300a"}, + {file = "lxml-5.3.0-cp39-cp39-win32.whl", hash = "sha256:a0eabd0a81625049c5df745209dc7fcef6e2aea7793e5f003ba363610aa0a3ff"}, + {file = "lxml-5.3.0-cp39-cp39-win_amd64.whl", hash = "sha256:89e043f1d9d341c52bf2af6d02e6adde62e0a46e6755d5eb60dc6e4f0b8aeca2"}, + {file = "lxml-5.3.0-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:7b1cd427cb0d5f7393c31b7496419da594fe600e6fdc4b105a54f82405e6626c"}, + {file = "lxml-5.3.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:51806cfe0279e06ed8500ce19479d757db42a30fd509940b1701be9c86a5ff9a"}, + {file = "lxml-5.3.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ee70d08fd60c9565ba8190f41a46a54096afa0eeb8f76bd66f2c25d3b1b83005"}, + {file = "lxml-5.3.0-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:8dc2c0395bea8254d8daebc76dcf8eb3a95ec2a46fa6fae5eaccee366bfe02ce"}, + {file = "lxml-5.3.0-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:6ba0d3dcac281aad8a0e5b14c7ed6f9fa89c8612b47939fc94f80b16e2e9bc83"}, + {file = "lxml-5.3.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:6e91cf736959057f7aac7adfc83481e03615a8e8dd5758aa1d95ea69e8931dba"}, + {file = "lxml-5.3.0-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:94d6c3782907b5e40e21cadf94b13b0842ac421192f26b84c45f13f3c9d5dc27"}, + {file = "lxml-5.3.0-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c300306673aa0f3ed5ed9372b21867690a17dba38c68c44b287437c362ce486b"}, + {file = "lxml-5.3.0-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:78d9b952e07aed35fe2e1a7ad26e929595412db48535921c5013edc8aa4a35ce"}, + {file = "lxml-5.3.0-pp37-pypy37_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:01220dca0d066d1349bd6a1726856a78f7929f3878f7e2ee83c296c69495309e"}, + {file = "lxml-5.3.0-pp37-pypy37_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:2d9b8d9177afaef80c53c0a9e30fa252ff3036fb1c6494d427c066a4ce6a282f"}, + {file = "lxml-5.3.0-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:20094fc3f21ea0a8669dc4c61ed7fa8263bd37d97d93b90f28fc613371e7a875"}, + {file = "lxml-5.3.0-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:ace2c2326a319a0bb8a8b0e5b570c764962e95818de9f259ce814ee666603f19"}, + {file = "lxml-5.3.0-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:92e67a0be1639c251d21e35fe74df6bcc40cba445c2cda7c4a967656733249e2"}, + {file = "lxml-5.3.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd5350b55f9fecddc51385463a4f67a5da829bc741e38cf689f38ec9023f54ab"}, + {file = "lxml-5.3.0-pp38-pypy38_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:4c1fefd7e3d00921c44dc9ca80a775af49698bbfd92ea84498e56acffd4c5469"}, + {file = "lxml-5.3.0-pp38-pypy38_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:71a8dd38fbd2f2319136d4ae855a7078c69c9a38ae06e0c17c73fd70fc6caad8"}, + {file = "lxml-5.3.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:97acf1e1fd66ab53dacd2c35b319d7e548380c2e9e8c54525c6e76d21b1ae3b1"}, + {file = "lxml-5.3.0-pp39-pypy39_pp73-macosx_10_15_x86_64.whl", hash = "sha256:68934b242c51eb02907c5b81d138cb977b2129a0a75a8f8b60b01cb8586c7b21"}, + {file = "lxml-5.3.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b710bc2b8292966b23a6a0121f7a6c51d45d2347edcc75f016ac123b8054d3f2"}, + {file = "lxml-5.3.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:18feb4b93302091b1541221196a2155aa296c363fd233814fa11e181adebc52f"}, + {file = "lxml-5.3.0-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:3eb44520c4724c2e1a57c0af33a379eee41792595023f367ba3952a2d96c2aab"}, + {file = "lxml-5.3.0-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:609251a0ca4770e5a8768ff902aa02bf636339c5a93f9349b48eb1f606f7f3e9"}, + {file = "lxml-5.3.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:516f491c834eb320d6c843156440fe7fc0d50b33e44387fcec5b02f0bc118a4c"}, + {file = "lxml-5.3.0.tar.gz", hash = "sha256:4e109ca30d1edec1ac60cdbe341905dc3b8f55b16855e03a54aaf59e51ec8c6f"}, +] + +[package.extras] +cssselect = ["cssselect (>=0.7)"] +html-clean = ["lxml-html-clean"] +html5 = ["html5lib"] +htmlsoup = ["BeautifulSoup4"] +source = ["Cython (>=3.0.11)"] + +[[package]] +name = "mako" +version = "1.3.6" +description = "A super-fast templating language that borrows the best ideas from the existing templating languages." +optional = false +python-versions = ">=3.8" +files = [ + {file = "Mako-1.3.6-py3-none-any.whl", hash = "sha256:a91198468092a2f1a0de86ca92690fb0cfc43ca90ee17e15d93662b4c04b241a"}, + {file = "mako-1.3.6.tar.gz", hash = "sha256:9ec3a1583713479fae654f83ed9fa8c9a4c16b7bb0daba0e6bbebff50c0d983d"}, +] + +[package.dependencies] +MarkupSafe = ">=0.9.2" + +[package.extras] +babel = ["Babel"] +lingua = ["lingua"] +testing = ["pytest"] + +[[package]] +name = "markdown" +version = "3.7" +description = "Python implementation of John Gruber's Markdown." +optional = false +python-versions = ">=3.8" +files = [ + {file = "Markdown-3.7-py3-none-any.whl", hash = "sha256:7eb6df5690b81a1d7942992c97fad2938e956e79df20cbc6186e9c3a77b1c803"}, + {file = "markdown-3.7.tar.gz", hash = "sha256:2ae2471477cfd02dbbf038d5d9bc226d40def84b4fe2986e49b59b6b472bbed2"}, +] + +[package.extras] +docs = ["mdx-gh-links (>=0.2)", "mkdocs (>=1.5)", "mkdocs-gen-files", "mkdocs-literate-nav", "mkdocs-nature (>=0.6)", "mkdocs-section-index", "mkdocstrings[python]"] +testing = ["coverage", "pyyaml"] + +[[package]] +name = "markdown-it-py" +version = "3.0.0" +description = "Python port of markdown-it. Markdown parsing, done right!" +optional = false +python-versions = ">=3.8" +files = [ + {file = "markdown-it-py-3.0.0.tar.gz", hash = "sha256:e3f60a94fa066dc52ec76661e37c851cb232d92f9886b15cb560aaada2df8feb"}, + {file = "markdown_it_py-3.0.0-py3-none-any.whl", hash = "sha256:355216845c60bd96232cd8d8c40e8f9765cc86f46880e43a8fd22dc1a1a8cab1"}, +] + +[package.dependencies] +mdurl = ">=0.1,<1.0" + +[package.extras] +benchmarking = ["psutil", "pytest", "pytest-benchmark"] +code-style = ["pre-commit (>=3.0,<4.0)"] +compare = ["commonmark (>=0.9,<1.0)", "markdown (>=3.4,<4.0)", "mistletoe (>=1.0,<2.0)", "mistune (>=2.0,<3.0)", "panflute (>=2.3,<3.0)"] +linkify = ["linkify-it-py (>=1,<3)"] +plugins = ["mdit-py-plugins"] +profiling = ["gprof2dot"] +rtd = ["jupyter_sphinx", "mdit-py-plugins", "myst-parser", "pyyaml", "sphinx", "sphinx-copybutton", "sphinx-design", "sphinx_book_theme"] +testing = ["coverage", "pytest", "pytest-cov", "pytest-regressions"] + +[[package]] +name = "markupsafe" +version = "3.0.2" +description = "Safely add untrusted strings to HTML/XML markup." +optional = false +python-versions = ">=3.9" +files = [ + {file = "MarkupSafe-3.0.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:7e94c425039cde14257288fd61dcfb01963e658efbc0ff54f5306b06054700f8"}, + {file = "MarkupSafe-3.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9e2d922824181480953426608b81967de705c3cef4d1af983af849d7bd619158"}, + {file = "MarkupSafe-3.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:38a9ef736c01fccdd6600705b09dc574584b89bea478200c5fbf112a6b0d5579"}, + {file = "MarkupSafe-3.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bbcb445fa71794da8f178f0f6d66789a28d7319071af7a496d4d507ed566270d"}, + {file = "MarkupSafe-3.0.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:57cb5a3cf367aeb1d316576250f65edec5bb3be939e9247ae594b4bcbc317dfb"}, + {file = "MarkupSafe-3.0.2-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:3809ede931876f5b2ec92eef964286840ed3540dadf803dd570c3b7e13141a3b"}, + {file = "MarkupSafe-3.0.2-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:e07c3764494e3776c602c1e78e298937c3315ccc9043ead7e685b7f2b8d47b3c"}, + {file = "MarkupSafe-3.0.2-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:b424c77b206d63d500bcb69fa55ed8d0e6a3774056bdc4839fc9298a7edca171"}, + {file = "MarkupSafe-3.0.2-cp310-cp310-win32.whl", hash = "sha256:fcabf5ff6eea076f859677f5f0b6b5c1a51e70a376b0579e0eadef8db48c6b50"}, + {file = "MarkupSafe-3.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:6af100e168aa82a50e186c82875a5893c5597a0c1ccdb0d8b40240b1f28b969a"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:9025b4018f3a1314059769c7bf15441064b2207cb3f065e6ea1e7359cb46db9d"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:93335ca3812df2f366e80509ae119189886b0f3c2b81325d39efdb84a1e2ae93"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2cb8438c3cbb25e220c2ab33bb226559e7afb3baec11c4f218ffa7308603c832"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a123e330ef0853c6e822384873bef7507557d8e4a082961e1defa947aa59ba84"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1e084f686b92e5b83186b07e8a17fc09e38fff551f3602b249881fec658d3eca"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:d8213e09c917a951de9d09ecee036d5c7d36cb6cb7dbaece4c71a60d79fb9798"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:5b02fb34468b6aaa40dfc198d813a641e3a63b98c2b05a16b9f80b7ec314185e"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:0bff5e0ae4ef2e1ae4fdf2dfd5b76c75e5c2fa4132d05fc1b0dabcd20c7e28c4"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-win32.whl", hash = "sha256:6c89876f41da747c8d3677a2b540fb32ef5715f97b66eeb0c6b66f5e3ef6f59d"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:70a87b411535ccad5ef2f1df5136506a10775d267e197e4cf531ced10537bd6b"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:9778bd8ab0a994ebf6f84c2b949e65736d5575320a17ae8984a77fab08db94cf"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:846ade7b71e3536c4e56b386c2a47adf5741d2d8b94ec9dc3e92e5e1ee1e2225"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1c99d261bd2d5f6b59325c92c73df481e05e57f19837bdca8413b9eac4bd8028"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e17c96c14e19278594aa4841ec148115f9c7615a47382ecb6b82bd8fea3ab0c8"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:88416bd1e65dcea10bc7569faacb2c20ce071dd1f87539ca2ab364bf6231393c"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:2181e67807fc2fa785d0592dc2d6206c019b9502410671cc905d132a92866557"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:52305740fe773d09cffb16f8ed0427942901f00adedac82ec8b67752f58a1b22"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:ad10d3ded218f1039f11a75f8091880239651b52e9bb592ca27de44eed242a48"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-win32.whl", hash = "sha256:0f4ca02bea9a23221c0182836703cbf8930c5e9454bacce27e767509fa286a30"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:8e06879fc22a25ca47312fbe7c8264eb0b662f6db27cb2d3bbbc74b1df4b9b87"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:ba9527cdd4c926ed0760bc301f6728ef34d841f405abf9d4f959c478421e4efd"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:f8b3d067f2e40fe93e1ccdd6b2e1d16c43140e76f02fb1319a05cf2b79d99430"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:569511d3b58c8791ab4c2e1285575265991e6d8f8700c7be0e88f86cb0672094"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:15ab75ef81add55874e7ab7055e9c397312385bd9ced94920f2802310c930396"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f3818cb119498c0678015754eba762e0d61e5b52d34c8b13d770f0719f7b1d79"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:cdb82a876c47801bb54a690c5ae105a46b392ac6099881cdfb9f6e95e4014c6a"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:cabc348d87e913db6ab4aa100f01b08f481097838bdddf7c7a84b7575b7309ca"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:444dcda765c8a838eaae23112db52f1efaf750daddb2d9ca300bcae1039adc5c"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-win32.whl", hash = "sha256:bcf3e58998965654fdaff38e58584d8937aa3096ab5354d493c77d1fdd66d7a1"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-win_amd64.whl", hash = "sha256:e6a2a455bd412959b57a172ce6328d2dd1f01cb2135efda2e4576e8a23fa3b0f"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:b5a6b3ada725cea8a5e634536b1b01c30bcdcd7f9c6fff4151548d5bf6b3a36c"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:a904af0a6162c73e3edcb969eeeb53a63ceeb5d8cf642fade7d39e7963a22ddb"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4aa4e5faecf353ed117801a068ebab7b7e09ffb6e1d5e412dc852e0da018126c"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c0ef13eaeee5b615fb07c9a7dadb38eac06a0608b41570d8ade51c56539e509d"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d16a81a06776313e817c951135cf7340a3e91e8c1ff2fac444cfd75fffa04afe"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:6381026f158fdb7c72a168278597a5e3a5222e83ea18f543112b2662a9b699c5"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:3d79d162e7be8f996986c064d1c7c817f6df3a77fe3d6859f6f9e7be4b8c213a"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:131a3c7689c85f5ad20f9f6fb1b866f402c445b220c19fe4308c0b147ccd2ad9"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-win32.whl", hash = "sha256:ba8062ed2cf21c07a9e295d5b8a2a5ce678b913b45fdf68c32d95d6c1291e0b6"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-win_amd64.whl", hash = "sha256:e444a31f8db13eb18ada366ab3cf45fd4b31e4db1236a4448f68778c1d1a5a2f"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:eaa0a10b7f72326f1372a713e73c3f739b524b3af41feb43e4921cb529f5929a"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:48032821bbdf20f5799ff537c7ac3d1fba0ba032cfc06194faffa8cda8b560ff"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1a9d3f5f0901fdec14d8d2f66ef7d035f2157240a433441719ac9a3fba440b13"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:88b49a3b9ff31e19998750c38e030fc7bb937398b1f78cfa599aaef92d693144"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cfad01eed2c2e0c01fd0ecd2ef42c492f7f93902e39a42fc9ee1692961443a29"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:1225beacc926f536dc82e45f8a4d68502949dc67eea90eab715dea3a21c1b5f0"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:3169b1eefae027567d1ce6ee7cae382c57fe26e82775f460f0b2778beaad66c0"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:eb7972a85c54febfb25b5c4b4f3af4dcc731994c7da0d8a0b4a6eb0640e1d178"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-win32.whl", hash = "sha256:8c4e8c3ce11e1f92f6536ff07154f9d49677ebaaafc32db9db4620bc11ed480f"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-win_amd64.whl", hash = "sha256:6e296a513ca3d94054c2c881cc913116e90fd030ad1c656b3869762b754f5f8a"}, + {file = "markupsafe-3.0.2.tar.gz", hash = "sha256:ee55d3edf80167e48ea11a923c7386f4669df67d7994554387f84e7d8b0a2bf0"}, +] + +[[package]] +name = "mdurl" +version = "0.1.2" +description = "Markdown URL utilities" +optional = false +python-versions = ">=3.7" +files = [ + {file = "mdurl-0.1.2-py3-none-any.whl", hash = "sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8"}, + {file = "mdurl-0.1.2.tar.gz", hash = "sha256:bb413d29f5eea38f31dd4754dd7377d4465116fb207585f97bf925588687c1ba"}, +] + +[[package]] +name = "mypy" +version = "1.13.0" +description = "Optional static typing for Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "mypy-1.13.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:6607e0f1dd1fb7f0aca14d936d13fd19eba5e17e1cd2a14f808fa5f8f6d8f60a"}, + {file = "mypy-1.13.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8a21be69bd26fa81b1f80a61ee7ab05b076c674d9b18fb56239d72e21d9f4c80"}, + {file = "mypy-1.13.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:7b2353a44d2179846a096e25691d54d59904559f4232519d420d64da6828a3a7"}, + {file = "mypy-1.13.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:0730d1c6a2739d4511dc4253f8274cdd140c55c32dfb0a4cf8b7a43f40abfa6f"}, + {file = "mypy-1.13.0-cp310-cp310-win_amd64.whl", hash = "sha256:c5fc54dbb712ff5e5a0fca797e6e0aa25726c7e72c6a5850cfd2adbc1eb0a372"}, + {file = "mypy-1.13.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:581665e6f3a8a9078f28d5502f4c334c0c8d802ef55ea0e7276a6e409bc0d82d"}, + {file = "mypy-1.13.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:3ddb5b9bf82e05cc9a627e84707b528e5c7caaa1c55c69e175abb15a761cec2d"}, + {file = "mypy-1.13.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:20c7ee0bc0d5a9595c46f38beb04201f2620065a93755704e141fcac9f59db2b"}, + {file = "mypy-1.13.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:3790ded76f0b34bc9c8ba4def8f919dd6a46db0f5a6610fb994fe8efdd447f73"}, + {file = "mypy-1.13.0-cp311-cp311-win_amd64.whl", hash = "sha256:51f869f4b6b538229c1d1bcc1dd7d119817206e2bc54e8e374b3dfa202defcca"}, + {file = "mypy-1.13.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:5c7051a3461ae84dfb5dd15eff5094640c61c5f22257c8b766794e6dd85e72d5"}, + {file = "mypy-1.13.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:39bb21c69a5d6342f4ce526e4584bc5c197fd20a60d14a8624d8743fffb9472e"}, + {file = "mypy-1.13.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:164f28cb9d6367439031f4c81e84d3ccaa1e19232d9d05d37cb0bd880d3f93c2"}, + {file = "mypy-1.13.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:a4c1bfcdbce96ff5d96fc9b08e3831acb30dc44ab02671eca5953eadad07d6d0"}, + {file = "mypy-1.13.0-cp312-cp312-win_amd64.whl", hash = "sha256:a0affb3a79a256b4183ba09811e3577c5163ed06685e4d4b46429a271ba174d2"}, + {file = "mypy-1.13.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:a7b44178c9760ce1a43f544e595d35ed61ac2c3de306599fa59b38a6048e1aa7"}, + {file = "mypy-1.13.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:5d5092efb8516d08440e36626f0153b5006d4088c1d663d88bf79625af3d1d62"}, + {file = "mypy-1.13.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:de2904956dac40ced10931ac967ae63c5089bd498542194b436eb097a9f77bc8"}, + {file = "mypy-1.13.0-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:7bfd8836970d33c2105562650656b6846149374dc8ed77d98424b40b09340ba7"}, + {file = "mypy-1.13.0-cp313-cp313-win_amd64.whl", hash = "sha256:9f73dba9ec77acb86457a8fc04b5239822df0c14a082564737833d2963677dbc"}, + {file = "mypy-1.13.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:100fac22ce82925f676a734af0db922ecfea991e1d7ec0ceb1e115ebe501301a"}, + {file = "mypy-1.13.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:7bcb0bb7f42a978bb323a7c88f1081d1b5dee77ca86f4100735a6f541299d8fb"}, + {file = "mypy-1.13.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:bde31fc887c213e223bbfc34328070996061b0833b0a4cfec53745ed61f3519b"}, + {file = "mypy-1.13.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:07de989f89786f62b937851295ed62e51774722e5444a27cecca993fc3f9cd74"}, + {file = "mypy-1.13.0-cp38-cp38-win_amd64.whl", hash = "sha256:4bde84334fbe19bad704b3f5b78c4abd35ff1026f8ba72b29de70dda0916beb6"}, + {file = "mypy-1.13.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:0246bcb1b5de7f08f2826451abd947bf656945209b140d16ed317f65a17dc7dc"}, + {file = "mypy-1.13.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:7f5b7deae912cf8b77e990b9280f170381fdfbddf61b4ef80927edd813163732"}, + {file = "mypy-1.13.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:7029881ec6ffb8bc233a4fa364736789582c738217b133f1b55967115288a2bc"}, + {file = "mypy-1.13.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:3e38b980e5681f28f033f3be86b099a247b13c491f14bb8b1e1e134d23bb599d"}, + {file = "mypy-1.13.0-cp39-cp39-win_amd64.whl", hash = "sha256:a6789be98a2017c912ae6ccb77ea553bbaf13d27605d2ca20a76dfbced631b24"}, + {file = "mypy-1.13.0-py3-none-any.whl", hash = "sha256:9c250883f9fd81d212e0952c92dbfcc96fc237f4b7c92f56ac81fd48460b3e5a"}, + {file = "mypy-1.13.0.tar.gz", hash = "sha256:0291a61b6fbf3e6673e3405cfcc0e7650bebc7939659fdca2702958038bd835e"}, +] + +[package.dependencies] +mypy-extensions = ">=1.0.0" +typing-extensions = ">=4.6.0" + +[package.extras] +dmypy = ["psutil (>=4.0)"] +faster-cache = ["orjson"] +install-types = ["pip"] +mypyc = ["setuptools (>=50)"] +reports = ["lxml"] + +[[package]] +name = "mypy-extensions" +version = "1.0.0" +description = "Type system extensions for programs checked with the mypy type checker." +optional = false +python-versions = ">=3.5" +files = [ + {file = "mypy_extensions-1.0.0-py3-none-any.whl", hash = "sha256:4392f6c0eb8a5668a69e23d168ffa70f0be9ccfd32b5cc2d26a34ae5b844552d"}, + {file = "mypy_extensions-1.0.0.tar.gz", hash = "sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782"}, +] + +[[package]] +name = "nodeenv" +version = "1.9.1" +description = "Node.js virtual environment builder" +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" +files = [ + {file = "nodeenv-1.9.1-py2.py3-none-any.whl", hash = "sha256:ba11c9782d29c27c70ffbdda2d7415098754709be8a7056d79a737cd901155c9"}, + {file = "nodeenv-1.9.1.tar.gz", hash = "sha256:6ec12890a2dab7946721edbfbcd91f3319c6ccc9aec47be7c7e6b7011ee6645f"}, +] + +[[package]] +name = "oauthlib" +version = "3.2.2" +description = "A generic, spec-compliant, thorough implementation of the OAuth request-signing logic" +optional = false +python-versions = ">=3.6" +files = [ + {file = "oauthlib-3.2.2-py3-none-any.whl", hash = "sha256:8139f29aac13e25d502680e9e19963e83f16838d48a0d71c287fe40e7067fbca"}, + {file = "oauthlib-3.2.2.tar.gz", hash = "sha256:9859c40929662bec5d64f34d01c99e093149682a3f38915dc0655d5a633dd918"}, +] + +[package.extras] +rsa = ["cryptography (>=3.0.0)"] +signals = ["blinker (>=1.4.0)"] +signedtoken = ["cryptography (>=3.0.0)", "pyjwt (>=2.0.0,<3)"] + +[[package]] +name = "opentelemetry-api" +version = "1.25.0" +description = "OpenTelemetry Python API" +optional = false +python-versions = ">=3.8" +files = [ + {file = "opentelemetry_api-1.25.0-py3-none-any.whl", hash = "sha256:757fa1aa020a0f8fa139f8959e53dec2051cc26b832e76fa839a6d76ecefd737"}, + {file = "opentelemetry_api-1.25.0.tar.gz", hash = "sha256:77c4985f62f2614e42ce77ee4c9da5fa5f0bc1e1821085e9a47533a9323ae869"}, +] + +[package.dependencies] +deprecated = ">=1.2.6" +importlib-metadata = ">=6.0,<=7.1" + +[[package]] +name = "opentelemetry-exporter-prometheus" +version = "0.46b0" +description = "Prometheus Metric Exporter for OpenTelemetry" +optional = false +python-versions = ">=3.8" +files = [ + {file = "opentelemetry_exporter_prometheus-0.46b0-py3-none-any.whl", hash = "sha256:caefdeea5c4d52b72479710d22cc4c469d42fa1dba2f4a2e46ae0ebeaf51cd96"}, + {file = "opentelemetry_exporter_prometheus-0.46b0.tar.gz", hash = "sha256:28cc6456a5d5bf49c34be2f1d22bbc761c36af9b32d909ea5b4c13fe6deac47b"}, +] + +[package.dependencies] +opentelemetry-api = ">=1.12,<2.0" +opentelemetry-sdk = ">=1.25.0,<1.26.0" +prometheus-client = ">=0.5.0,<1.0.0" + +[[package]] +name = "opentelemetry-sdk" +version = "1.25.0" +description = "OpenTelemetry Python SDK" +optional = false +python-versions = ">=3.8" +files = [ + {file = "opentelemetry_sdk-1.25.0-py3-none-any.whl", hash = "sha256:d97ff7ec4b351692e9d5a15af570c693b8715ad78b8aafbec5c7100fe966b4c9"}, + {file = "opentelemetry_sdk-1.25.0.tar.gz", hash = "sha256:ce7fc319c57707ef5bf8b74fb9f8ebdb8bfafbe11898410e0d2a761d08a98ec7"}, +] + +[package.dependencies] +opentelemetry-api = "1.25.0" +opentelemetry-semantic-conventions = "0.46b0" +typing-extensions = ">=3.7.4" + +[[package]] +name = "opentelemetry-semantic-conventions" +version = "0.46b0" +description = "OpenTelemetry Semantic Conventions" +optional = false +python-versions = ">=3.8" +files = [ + {file = "opentelemetry_semantic_conventions-0.46b0-py3-none-any.whl", hash = "sha256:6daef4ef9fa51d51855d9f8e0ccd3a1bd59e0e545abe99ac6203804e36ab3e07"}, + {file = "opentelemetry_semantic_conventions-0.46b0.tar.gz", hash = "sha256:fbc982ecbb6a6e90869b15c1673be90bd18c8a56ff1cffc0864e38e2edffaefa"}, +] + +[package.dependencies] +opentelemetry-api = "1.25.0" + +[[package]] +name = "packaging" +version = "24.2" +description = "Core utilities for Python packages" +optional = false +python-versions = ">=3.8" +files = [ + {file = "packaging-24.2-py3-none-any.whl", hash = "sha256:09abb1bccd265c01f4a3aa3f7a7db064b36514d2cba19a2f694fe6150451a759"}, + {file = "packaging-24.2.tar.gz", hash = "sha256:c228a6dc5e932d346bc5739379109d49e8853dd8223571c7c5b55260edc0b97f"}, +] + +[[package]] +name = "parsett" +version = "1.5.1" +description = "PTT" +optional = false +python-versions = "<4.0,>=3.11" +files = [ + {file = "parsett-1.5.1-py3-none-any.whl", hash = "sha256:79c7a13e27e28a7975b1a5f3b2da551f86232e5227d424ef43aa12f782c5aab6"}, + {file = "parsett-1.5.1.tar.gz", hash = "sha256:f9775a536f806d8a6b4fe719b0ee2356e110b629aedb1dc628a151e890168da5"}, +] + +[package.dependencies] +arrow = ">=1.3.0,<2.0.0" +regex = ">=2023.12.25,<2024.0.0" + +[[package]] +name = "pbr" +version = "6.1.0" +description = "Python Build Reasonableness" +optional = false +python-versions = ">=2.6" +files = [ + {file = "pbr-6.1.0-py2.py3-none-any.whl", hash = "sha256:a776ae228892d8013649c0aeccbb3d5f99ee15e005a4cbb7e61d55a067b28a2a"}, + {file = "pbr-6.1.0.tar.gz", hash = "sha256:788183e382e3d1d7707db08978239965e8b9e4e5ed42669bf4758186734d5f24"}, +] + +[[package]] +name = "platformdirs" +version = "4.3.6" +description = "A small Python package for determining appropriate platform-specific dirs, e.g. a `user data dir`." +optional = false +python-versions = ">=3.8" +files = [ + {file = "platformdirs-4.3.6-py3-none-any.whl", hash = "sha256:73e575e1408ab8103900836b97580d5307456908a03e92031bab39e4554cc3fb"}, + {file = "platformdirs-4.3.6.tar.gz", hash = "sha256:357fb2acbc885b0419afd3ce3ed34564c13c9b95c89360cd9563f73aa5e2b907"}, +] + +[package.extras] +docs = ["furo (>=2024.8.6)", "proselint (>=0.14)", "sphinx (>=8.0.2)", "sphinx-autodoc-typehints (>=2.4)"] +test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=8.3.2)", "pytest-cov (>=5)", "pytest-mock (>=3.14)"] +type = ["mypy (>=1.11.2)"] + +[[package]] +name = "plexapi" +version = "4.16.0" +description = "Python bindings for the Plex API." +optional = false +python-versions = ">=3.9" +files = [ + {file = "PlexAPI-4.16.0-py3-none-any.whl", hash = "sha256:0e96f9fbfdae55a0d707517f2843067a2c8e05e38f23e4fd30beb3a9ba6f0712"}, + {file = "plexapi-4.16.0.tar.gz", hash = "sha256:d2798e4f234e139675ac8413bb30657425b3c74d8ff3b2efaff18bfe1644c2e9"}, +] + +[package.dependencies] +requests = "*" + +[package.extras] +alert = ["websocket-client (>=1.3.3)"] + +[[package]] +name = "pluggy" +version = "1.5.0" +description = "plugin and hook calling mechanisms for python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pluggy-1.5.0-py3-none-any.whl", hash = "sha256:44e1ad92c8ca002de6377e165f3e0f1be63266ab4d554740532335b9d75ea669"}, + {file = "pluggy-1.5.0.tar.gz", hash = "sha256:2cffa88e94fdc978c4c574f15f9e59b7f4201d439195c3715ca9e2486f1d0cf1"}, +] + +[package.extras] +dev = ["pre-commit", "tox"] +testing = ["pytest", "pytest-benchmark"] + +[[package]] +name = "prometheus-client" +version = "0.20.0" +description = "Python client for the Prometheus monitoring system." +optional = false +python-versions = ">=3.8" +files = [ + {file = "prometheus_client-0.20.0-py3-none-any.whl", hash = "sha256:cde524a85bce83ca359cc837f28b8c0db5cac7aa653a588fd7e84ba061c329e7"}, + {file = "prometheus_client-0.20.0.tar.gz", hash = "sha256:287629d00b147a32dcb2be0b9df905da599b2d82f80377083ec8463309a4bb89"}, +] + +[package.extras] +twisted = ["twisted"] + +[[package]] +name = "psutil" +version = "6.1.0" +description = "Cross-platform lib for process and system monitoring in Python." +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7" +files = [ + {file = "psutil-6.1.0-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:ff34df86226c0227c52f38b919213157588a678d049688eded74c76c8ba4a5d0"}, + {file = "psutil-6.1.0-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:c0e0c00aa18ca2d3b2b991643b799a15fc8f0563d2ebb6040f64ce8dc027b942"}, + {file = "psutil-6.1.0-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:000d1d1ebd634b4efb383f4034437384e44a6d455260aaee2eca1e9c1b55f047"}, + {file = "psutil-6.1.0-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:5cd2bcdc75b452ba2e10f0e8ecc0b57b827dd5d7aaffbc6821b2a9a242823a76"}, + {file = "psutil-6.1.0-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:045f00a43c737f960d273a83973b2511430d61f283a44c96bf13a6e829ba8fdc"}, + {file = "psutil-6.1.0-cp27-none-win32.whl", hash = "sha256:9118f27452b70bb1d9ab3198c1f626c2499384935aaf55388211ad982611407e"}, + {file = "psutil-6.1.0-cp27-none-win_amd64.whl", hash = "sha256:a8506f6119cff7015678e2bce904a4da21025cc70ad283a53b099e7620061d85"}, + {file = "psutil-6.1.0-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:6e2dcd475ce8b80522e51d923d10c7871e45f20918e027ab682f94f1c6351688"}, + {file = "psutil-6.1.0-cp36-abi3-macosx_11_0_arm64.whl", hash = "sha256:0895b8414afafc526712c498bd9de2b063deaac4021a3b3c34566283464aff8e"}, + {file = "psutil-6.1.0-cp36-abi3-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9dcbfce5d89f1d1f2546a2090f4fcf87c7f669d1d90aacb7d7582addece9fb38"}, + {file = "psutil-6.1.0-cp36-abi3-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:498c6979f9c6637ebc3a73b3f87f9eb1ec24e1ce53a7c5173b8508981614a90b"}, + {file = "psutil-6.1.0-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d905186d647b16755a800e7263d43df08b790d709d575105d419f8b6ef65423a"}, + {file = "psutil-6.1.0-cp36-cp36m-win32.whl", hash = "sha256:6d3fbbc8d23fcdcb500d2c9f94e07b1342df8ed71b948a2649b5cb060a7c94ca"}, + {file = "psutil-6.1.0-cp36-cp36m-win_amd64.whl", hash = "sha256:1209036fbd0421afde505a4879dee3b2fd7b1e14fee81c0069807adcbbcca747"}, + {file = "psutil-6.1.0-cp37-abi3-win32.whl", hash = "sha256:1ad45a1f5d0b608253b11508f80940985d1d0c8f6111b5cb637533a0e6ddc13e"}, + {file = "psutil-6.1.0-cp37-abi3-win_amd64.whl", hash = "sha256:a8fb3752b491d246034fa4d279ff076501588ce8cbcdbb62c32fd7a377d996be"}, + {file = "psutil-6.1.0.tar.gz", hash = "sha256:353815f59a7f64cdaca1c0307ee13558a0512f6db064e92fe833784f08539c7a"}, +] + +[package.extras] +dev = ["black", "check-manifest", "coverage", "packaging", "pylint", "pyperf", "pypinfo", "pytest-cov", "requests", "rstcheck", "ruff", "sphinx", "sphinx_rtd_theme", "toml-sort", "twine", "virtualenv", "wheel"] +test = ["pytest", "pytest-xdist", "setuptools"] + +[[package]] +name = "psycopg2-binary" +version = "2.9.10" +description = "psycopg2 - Python-PostgreSQL Database Adapter" +optional = false +python-versions = ">=3.8" +files = [ + {file = "psycopg2-binary-2.9.10.tar.gz", hash = "sha256:4b3df0e6990aa98acda57d983942eff13d824135fe2250e6522edaa782a06de2"}, + {file = "psycopg2_binary-2.9.10-cp310-cp310-macosx_12_0_x86_64.whl", hash = "sha256:0ea8e3d0ae83564f2fc554955d327fa081d065c8ca5cc6d2abb643e2c9c1200f"}, + {file = "psycopg2_binary-2.9.10-cp310-cp310-macosx_14_0_arm64.whl", hash = "sha256:3e9c76f0ac6f92ecfc79516a8034a544926430f7b080ec5a0537bca389ee0906"}, + {file = "psycopg2_binary-2.9.10-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2ad26b467a405c798aaa1458ba09d7e2b6e5f96b1ce0ac15d82fd9f95dc38a92"}, + {file = "psycopg2_binary-2.9.10-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:270934a475a0e4b6925b5f804e3809dd5f90f8613621d062848dd82f9cd62007"}, + {file = "psycopg2_binary-2.9.10-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:48b338f08d93e7be4ab2b5f1dbe69dc5e9ef07170fe1f86514422076d9c010d0"}, + {file = "psycopg2_binary-2.9.10-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7f4152f8f76d2023aac16285576a9ecd2b11a9895373a1f10fd9db54b3ff06b4"}, + {file = "psycopg2_binary-2.9.10-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:32581b3020c72d7a421009ee1c6bf4a131ef5f0a968fab2e2de0c9d2bb4577f1"}, + {file = "psycopg2_binary-2.9.10-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:2ce3e21dc3437b1d960521eca599d57408a695a0d3c26797ea0f72e834c7ffe5"}, + {file = "psycopg2_binary-2.9.10-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:e984839e75e0b60cfe75e351db53d6db750b00de45644c5d1f7ee5d1f34a1ce5"}, + {file = "psycopg2_binary-2.9.10-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:3c4745a90b78e51d9ba06e2088a2fe0c693ae19cc8cb051ccda44e8df8a6eb53"}, + {file = "psycopg2_binary-2.9.10-cp310-cp310-win32.whl", hash = "sha256:e5720a5d25e3b99cd0dc5c8a440570469ff82659bb09431c1439b92caf184d3b"}, + {file = "psycopg2_binary-2.9.10-cp310-cp310-win_amd64.whl", hash = "sha256:3c18f74eb4386bf35e92ab2354a12c17e5eb4d9798e4c0ad3a00783eae7cd9f1"}, + {file = "psycopg2_binary-2.9.10-cp311-cp311-macosx_12_0_x86_64.whl", hash = "sha256:04392983d0bb89a8717772a193cfaac58871321e3ec69514e1c4e0d4957b5aff"}, + {file = "psycopg2_binary-2.9.10-cp311-cp311-macosx_14_0_arm64.whl", hash = "sha256:1a6784f0ce3fec4edc64e985865c17778514325074adf5ad8f80636cd029ef7c"}, + {file = "psycopg2_binary-2.9.10-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b5f86c56eeb91dc3135b3fd8a95dc7ae14c538a2f3ad77a19645cf55bab1799c"}, + {file = "psycopg2_binary-2.9.10-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2b3d2491d4d78b6b14f76881905c7a8a8abcf974aad4a8a0b065273a0ed7a2cb"}, + {file = "psycopg2_binary-2.9.10-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2286791ececda3a723d1910441c793be44625d86d1a4e79942751197f4d30341"}, + {file = "psycopg2_binary-2.9.10-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:512d29bb12608891e349af6a0cccedce51677725a921c07dba6342beaf576f9a"}, + {file = "psycopg2_binary-2.9.10-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:5a507320c58903967ef7384355a4da7ff3f28132d679aeb23572753cbf2ec10b"}, + {file = "psycopg2_binary-2.9.10-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:6d4fa1079cab9018f4d0bd2db307beaa612b0d13ba73b5c6304b9fe2fb441ff7"}, + {file = "psycopg2_binary-2.9.10-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:851485a42dbb0bdc1edcdabdb8557c09c9655dfa2ca0460ff210522e073e319e"}, + {file = "psycopg2_binary-2.9.10-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:35958ec9e46432d9076286dda67942ed6d968b9c3a6a2fd62b48939d1d78bf68"}, + {file = "psycopg2_binary-2.9.10-cp311-cp311-win32.whl", hash = "sha256:ecced182e935529727401b24d76634a357c71c9275b356efafd8a2a91ec07392"}, + {file = "psycopg2_binary-2.9.10-cp311-cp311-win_amd64.whl", hash = "sha256:ee0e8c683a7ff25d23b55b11161c2663d4b099770f6085ff0a20d4505778d6b4"}, + {file = "psycopg2_binary-2.9.10-cp312-cp312-macosx_12_0_x86_64.whl", hash = "sha256:880845dfe1f85d9d5f7c412efea7a08946a46894537e4e5d091732eb1d34d9a0"}, + {file = "psycopg2_binary-2.9.10-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:9440fa522a79356aaa482aa4ba500b65f28e5d0e63b801abf6aa152a29bd842a"}, + {file = "psycopg2_binary-2.9.10-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e3923c1d9870c49a2d44f795df0c889a22380d36ef92440ff618ec315757e539"}, + {file = "psycopg2_binary-2.9.10-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7b2c956c028ea5de47ff3a8d6b3cc3330ab45cf0b7c3da35a2d6ff8420896526"}, + {file = "psycopg2_binary-2.9.10-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f758ed67cab30b9a8d2833609513ce4d3bd027641673d4ebc9c067e4d208eec1"}, + {file = "psycopg2_binary-2.9.10-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8cd9b4f2cfab88ed4a9106192de509464b75a906462fb846b936eabe45c2063e"}, + {file = "psycopg2_binary-2.9.10-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:6dc08420625b5a20b53551c50deae6e231e6371194fa0651dbe0fb206452ae1f"}, + {file = "psycopg2_binary-2.9.10-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:d7cd730dfa7c36dbe8724426bf5612798734bff2d3c3857f36f2733f5bfc7c00"}, + {file = "psycopg2_binary-2.9.10-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:155e69561d54d02b3c3209545fb08938e27889ff5a10c19de8d23eb5a41be8a5"}, + {file = "psycopg2_binary-2.9.10-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:c3cc28a6fd5a4a26224007712e79b81dbaee2ffb90ff406256158ec4d7b52b47"}, + {file = "psycopg2_binary-2.9.10-cp312-cp312-win32.whl", hash = "sha256:ec8a77f521a17506a24a5f626cb2aee7850f9b69a0afe704586f63a464f3cd64"}, + {file = "psycopg2_binary-2.9.10-cp312-cp312-win_amd64.whl", hash = "sha256:18c5ee682b9c6dd3696dad6e54cc7ff3a1a9020df6a5c0f861ef8bfd338c3ca0"}, + {file = "psycopg2_binary-2.9.10-cp313-cp313-macosx_12_0_x86_64.whl", hash = "sha256:26540d4a9a4e2b096f1ff9cce51253d0504dca5a85872c7f7be23be5a53eb18d"}, + {file = "psycopg2_binary-2.9.10-cp313-cp313-macosx_14_0_arm64.whl", hash = "sha256:e217ce4d37667df0bc1c397fdcd8de5e81018ef305aed9415c3b093faaeb10fb"}, + {file = "psycopg2_binary-2.9.10-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:245159e7ab20a71d989da00f280ca57da7641fa2cdcf71749c193cea540a74f7"}, + {file = "psycopg2_binary-2.9.10-cp313-cp313-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3c4ded1a24b20021ebe677b7b08ad10bf09aac197d6943bfe6fec70ac4e4690d"}, + {file = "psycopg2_binary-2.9.10-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3abb691ff9e57d4a93355f60d4f4c1dd2d68326c968e7db17ea96df3c023ef73"}, + {file = "psycopg2_binary-2.9.10-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8608c078134f0b3cbd9f89b34bd60a943b23fd33cc5f065e8d5f840061bd0673"}, + {file = "psycopg2_binary-2.9.10-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:230eeae2d71594103cd5b93fd29d1ace6420d0b86f4778739cb1a5a32f607d1f"}, + {file = "psycopg2_binary-2.9.10-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:bb89f0a835bcfc1d42ccd5f41f04870c1b936d8507c6df12b7737febc40f0909"}, + {file = "psycopg2_binary-2.9.10-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:f0c2d907a1e102526dd2986df638343388b94c33860ff3bbe1384130828714b1"}, + {file = "psycopg2_binary-2.9.10-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:f8157bed2f51db683f31306aa497311b560f2265998122abe1dce6428bd86567"}, + {file = "psycopg2_binary-2.9.10-cp38-cp38-macosx_12_0_x86_64.whl", hash = "sha256:eb09aa7f9cecb45027683bb55aebaaf45a0df8bf6de68801a6afdc7947bb09d4"}, + {file = "psycopg2_binary-2.9.10-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b73d6d7f0ccdad7bc43e6d34273f70d587ef62f824d7261c4ae9b8b1b6af90e8"}, + {file = "psycopg2_binary-2.9.10-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ce5ab4bf46a211a8e924d307c1b1fcda82368586a19d0a24f8ae166f5c784864"}, + {file = "psycopg2_binary-2.9.10-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:056470c3dc57904bbf63d6f534988bafc4e970ffd50f6271fc4ee7daad9498a5"}, + {file = "psycopg2_binary-2.9.10-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:73aa0e31fa4bb82578f3a6c74a73c273367727de397a7a0f07bd83cbea696baa"}, + {file = "psycopg2_binary-2.9.10-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:8de718c0e1c4b982a54b41779667242bc630b2197948405b7bd8ce16bcecac92"}, + {file = "psycopg2_binary-2.9.10-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:5c370b1e4975df846b0277b4deba86419ca77dbc25047f535b0bb03d1a544d44"}, + {file = "psycopg2_binary-2.9.10-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:ffe8ed017e4ed70f68b7b371d84b7d4a790368db9203dfc2d222febd3a9c8863"}, + {file = "psycopg2_binary-2.9.10-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:8aecc5e80c63f7459a1a2ab2c64df952051df196294d9f739933a9f6687e86b3"}, + {file = "psycopg2_binary-2.9.10-cp39-cp39-macosx_12_0_x86_64.whl", hash = "sha256:7a813c8bdbaaaab1f078014b9b0b13f5de757e2b5d9be6403639b298a04d218b"}, + {file = "psycopg2_binary-2.9.10-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d00924255d7fc916ef66e4bf22f354a940c67179ad3fd7067d7a0a9c84d2fbfc"}, + {file = "psycopg2_binary-2.9.10-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7559bce4b505762d737172556a4e6ea8a9998ecac1e39b5233465093e8cee697"}, + {file = "psycopg2_binary-2.9.10-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e8b58f0a96e7a1e341fc894f62c1177a7c83febebb5ff9123b579418fdc8a481"}, + {file = "psycopg2_binary-2.9.10-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6b269105e59ac96aba877c1707c600ae55711d9dcd3fc4b5012e4af68e30c648"}, + {file = "psycopg2_binary-2.9.10-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:79625966e176dc97ddabc142351e0409e28acf4660b88d1cf6adb876d20c490d"}, + {file = "psycopg2_binary-2.9.10-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:8aabf1c1a04584c168984ac678a668094d831f152859d06e055288fa515e4d30"}, + {file = "psycopg2_binary-2.9.10-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:19721ac03892001ee8fdd11507e6a2e01f4e37014def96379411ca99d78aeb2c"}, + {file = "psycopg2_binary-2.9.10-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:7f5d859928e635fa3ce3477704acee0f667b3a3d3e4bb109f2b18d4005f38287"}, + {file = "psycopg2_binary-2.9.10-cp39-cp39-win32.whl", hash = "sha256:3216ccf953b3f267691c90c6fe742e45d890d8272326b4a8b20850a03d05b7b8"}, + {file = "psycopg2_binary-2.9.10-cp39-cp39-win_amd64.whl", hash = "sha256:30e34c4e97964805f715206c7b789d54a78b70f3ff19fbe590104b71c45600e5"}, +] + +[[package]] +name = "pydantic" +version = "2.10.0" +description = "Data validation using Python type hints" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pydantic-2.10.0-py3-none-any.whl", hash = "sha256:5e7807ba9201bdf61b1b58aa6eb690916c40a47acfb114b1b4fef3e7fd5b30fc"}, + {file = "pydantic-2.10.0.tar.gz", hash = "sha256:0aca0f045ff6e2f097f1fe89521115335f15049eeb8a7bef3dafe4b19a74e289"}, +] + +[package.dependencies] +annotated-types = ">=0.6.0" +pydantic-core = "2.27.0" +typing-extensions = ">=4.12.2" + +[package.extras] +email = ["email-validator (>=2.0.0)"] +timezone = ["tzdata"] + +[[package]] +name = "pydantic-core" +version = "2.27.0" +description = "Core functionality for Pydantic validation and serialization" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pydantic_core-2.27.0-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:cd2ac6b919f7fed71b17fe0b4603c092a4c9b5bae414817c9c81d3c22d1e1bcc"}, + {file = "pydantic_core-2.27.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e015833384ca3e1a0565a79f5d953b0629d9138021c27ad37c92a9fa1af7623c"}, + {file = "pydantic_core-2.27.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:db72e40628967f6dc572020d04b5f800d71264e0531c6da35097e73bdf38b003"}, + {file = "pydantic_core-2.27.0-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:df45c4073bed486ea2f18757057953afed8dd77add7276ff01bccb79982cf46c"}, + {file = "pydantic_core-2.27.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:836a4bfe0cc6d36dc9a9cc1a7b391265bf6ce9d1eb1eac62ac5139f5d8d9a6fa"}, + {file = "pydantic_core-2.27.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4bf1340ae507f6da6360b24179c2083857c8ca7644aab65807023cf35404ea8d"}, + {file = "pydantic_core-2.27.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5ab325fc86fbc077284c8d7f996d904d30e97904a87d6fb303dce6b3de7ebba9"}, + {file = "pydantic_core-2.27.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:1da0c98a85a6c6ed702d5556db3b09c91f9b0b78de37b7593e2de8d03238807a"}, + {file = "pydantic_core-2.27.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:7b0202ebf2268954090209a84f9897345719e46a57c5f2c9b7b250ca0a9d3e63"}, + {file = "pydantic_core-2.27.0-cp310-cp310-musllinux_1_1_armv7l.whl", hash = "sha256:35380671c3c921fe8adf31ad349dc6f7588b7e928dbe44e1093789734f607399"}, + {file = "pydantic_core-2.27.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:6b4c19525c3538fbc0bbda6229f9682fb8199ce9ac37395880e6952798e00373"}, + {file = "pydantic_core-2.27.0-cp310-none-win32.whl", hash = "sha256:333c840a1303d1474f491e7be0b718226c730a39ead0f7dab2c7e6a2f3855555"}, + {file = "pydantic_core-2.27.0-cp310-none-win_amd64.whl", hash = "sha256:99b2863c1365f43f74199c980a3d40f18a218fbe683dd64e470199db426c4d6a"}, + {file = "pydantic_core-2.27.0-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:4523c4009c3f39d948e01962223c9f5538602e7087a628479b723c939fab262d"}, + {file = "pydantic_core-2.27.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:84af1cf7bfdcbc6fcf5a5f70cc9896205e0350306e4dd73d54b6a18894f79386"}, + {file = "pydantic_core-2.27.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e65466b31be1070b4a5b7dbfbd14b247884cb8e8b79c64fb0f36b472912dbaea"}, + {file = "pydantic_core-2.27.0-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:a5c022bb0d453192426221605efc865373dde43b17822a264671c53b068ac20c"}, + {file = "pydantic_core-2.27.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6bb69bf3b6500f195c3deb69c1205ba8fc3cb21d1915f1f158a10d6b1ef29b6a"}, + {file = "pydantic_core-2.27.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0aa4d1b2eba9a325897308b3124014a142cdccb9f3e016f31d3ebee6b5ea5e75"}, + {file = "pydantic_core-2.27.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8e96ca781e0c01e32115912ebdf7b3fb0780ce748b80d7d28a0802fa9fbaf44e"}, + {file = "pydantic_core-2.27.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b872c86d8d71827235c7077461c502feb2db3f87d9d6d5a9daa64287d75e4fa0"}, + {file = "pydantic_core-2.27.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:82e1ad4ca170e8af4c928b67cff731b6296e6a0a0981b97b2eb7c275cc4e15bd"}, + {file = "pydantic_core-2.27.0-cp311-cp311-musllinux_1_1_armv7l.whl", hash = "sha256:eb40f828bc2f73f777d1eb8fee2e86cd9692a4518b63b6b5aa8af915dfd3207b"}, + {file = "pydantic_core-2.27.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:9a8fbf506fde1529a1e3698198fe64bfbe2e0c09557bc6a7dcf872e7c01fec40"}, + {file = "pydantic_core-2.27.0-cp311-none-win32.whl", hash = "sha256:24f984fc7762ed5f806d9e8c4c77ea69fdb2afd987b4fd319ef06c87595a8c55"}, + {file = "pydantic_core-2.27.0-cp311-none-win_amd64.whl", hash = "sha256:68950bc08f9735306322bfc16a18391fcaac99ded2509e1cc41d03ccb6013cfe"}, + {file = "pydantic_core-2.27.0-cp311-none-win_arm64.whl", hash = "sha256:3eb8849445c26b41c5a474061032c53e14fe92a11a5db969f722a2716cd12206"}, + {file = "pydantic_core-2.27.0-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:8117839a9bdbba86e7f9df57018fe3b96cec934c3940b591b0fd3fbfb485864a"}, + {file = "pydantic_core-2.27.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:a291d0b4243a259c8ea7e2b84eb9ccb76370e569298875a7c5e3e71baf49057a"}, + {file = "pydantic_core-2.27.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:84e35afd9e10b2698e6f2f32256678cb23ca6c1568d02628033a837638b3ed12"}, + {file = "pydantic_core-2.27.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:58ab0d979c969983cdb97374698d847a4acffb217d543e172838864636ef10d9"}, + {file = "pydantic_core-2.27.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0d06b667e53320332be2bf6f9461f4a9b78092a079b8ce8634c9afaa7e10cd9f"}, + {file = "pydantic_core-2.27.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:78f841523729e43e3928a364ec46e2e3f80e6625a4f62aca5c345f3f626c6e8a"}, + {file = "pydantic_core-2.27.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:400bf470e4327e920883b51e255617dfe4496d4e80c3fea0b5a5d0bf2c404dd4"}, + {file = "pydantic_core-2.27.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:951e71da6c89d354572098bada5ba5b5dc3a9390c933af8a614e37755d3d1840"}, + {file = "pydantic_core-2.27.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:2a51ce96224eadd1845150b204389623c8e129fde5a67a84b972bd83a85c6c40"}, + {file = "pydantic_core-2.27.0-cp312-cp312-musllinux_1_1_armv7l.whl", hash = "sha256:483c2213a609e7db2c592bbc015da58b6c75af7360ca3c981f178110d9787bcf"}, + {file = "pydantic_core-2.27.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:359e7951f04ad35111b5ddce184db3391442345d0ab073aa63a95eb8af25a5ef"}, + {file = "pydantic_core-2.27.0-cp312-none-win32.whl", hash = "sha256:ee7d9d5537daf6d5c74a83b38a638cc001b648096c1cae8ef695b0c919d9d379"}, + {file = "pydantic_core-2.27.0-cp312-none-win_amd64.whl", hash = "sha256:2be0ad541bb9f059954ccf8877a49ed73877f862529575ff3d54bf4223e4dd61"}, + {file = "pydantic_core-2.27.0-cp312-none-win_arm64.whl", hash = "sha256:6e19401742ed7b69e51d8e4df3c03ad5ec65a83b36244479fd70edde2828a5d9"}, + {file = "pydantic_core-2.27.0-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:5f2b19b8d6fca432cb3acf48cf5243a7bf512988029b6e6fd27e9e8c0a204d85"}, + {file = "pydantic_core-2.27.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:c86679f443e7085ea55a7376462553996c688395d18ef3f0d3dbad7838f857a2"}, + {file = "pydantic_core-2.27.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:510b11e9c3b1a852876d1ccd8d5903684336d635214148637ceb27366c75a467"}, + {file = "pydantic_core-2.27.0-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:eb704155e73b833801c247f39d562229c0303f54770ca14fb1c053acb376cf10"}, + {file = "pydantic_core-2.27.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9ce048deb1e033e7a865ca384770bccc11d44179cf09e5193a535c4c2f497bdc"}, + {file = "pydantic_core-2.27.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:58560828ee0951bb125c6f2862fbc37f039996d19ceb6d8ff1905abf7da0bf3d"}, + {file = "pydantic_core-2.27.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:abb4785894936d7682635726613c44578c420a096729f1978cd061a7e72d5275"}, + {file = "pydantic_core-2.27.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2883b260f7a93235488699d39cbbd94fa7b175d3a8063fbfddd3e81ad9988cb2"}, + {file = "pydantic_core-2.27.0-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:c6fcb3fa3855d583aa57b94cf146f7781d5d5bc06cb95cb3afece33d31aac39b"}, + {file = "pydantic_core-2.27.0-cp313-cp313-musllinux_1_1_armv7l.whl", hash = "sha256:e851a051f7260e6d688267eb039c81f05f23a19431bd7dfa4bf5e3cb34c108cd"}, + {file = "pydantic_core-2.27.0-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:edb1bfd45227dec8d50bc7c7d86463cd8728bcc574f9b07de7369880de4626a3"}, + {file = "pydantic_core-2.27.0-cp313-none-win32.whl", hash = "sha256:678f66462058dd978702db17eb6a3633d634f7aa0deaea61e0a674152766d3fc"}, + {file = "pydantic_core-2.27.0-cp313-none-win_amd64.whl", hash = "sha256:d28ca7066d6cdd347a50d8b725dc10d9a1d6a1cce09836cf071ea6a2d4908be0"}, + {file = "pydantic_core-2.27.0-cp313-none-win_arm64.whl", hash = "sha256:6f4a53af9e81d757756508b57cae1cf28293f0f31b9fa2bfcb416cc7fb230f9d"}, + {file = "pydantic_core-2.27.0-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:e9f9feee7f334b72ceae46313333d002b56f325b5f04271b4ae2aadd9e993ae4"}, + {file = "pydantic_core-2.27.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:225bfff5d425c34e1fd562cef52d673579d59b967d9de06178850c4802af9039"}, + {file = "pydantic_core-2.27.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c921ad596ff1a82f9c692b0758c944355abc9f0de97a4c13ca60ffc6d8dc15d4"}, + {file = "pydantic_core-2.27.0-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:6354e18a9be37bfa124d6b288a87fb30c673745806c92956f1a25e3ae6e76b96"}, + {file = "pydantic_core-2.27.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8ee4c2a75af9fe21269a4a0898c5425afb01af1f5d276063f57e2ae1bc64e191"}, + {file = "pydantic_core-2.27.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c91e3c04f5191fd3fb68764bddeaf02025492d5d9f23343b283870f6ace69708"}, + {file = "pydantic_core-2.27.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7a6ebfac28fd51890a61df36ef202adbd77d00ee5aca4a3dadb3d9ed49cfb929"}, + {file = "pydantic_core-2.27.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:36aa167f69d8807ba7e341d67ea93e50fcaaf6bc433bb04939430fa3dab06f31"}, + {file = "pydantic_core-2.27.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:3e8d89c276234579cd3d095d5fa2a44eb10db9a218664a17b56363cddf226ff3"}, + {file = "pydantic_core-2.27.0-cp38-cp38-musllinux_1_1_armv7l.whl", hash = "sha256:5cc822ab90a70ea3a91e6aed3afac570b276b1278c6909b1d384f745bd09c714"}, + {file = "pydantic_core-2.27.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:e15315691fe2253eb447503153acef4d7223dfe7e7702f9ed66539fcd0c43801"}, + {file = "pydantic_core-2.27.0-cp38-none-win32.whl", hash = "sha256:dfa5f5c0a4c8fced1422dc2ca7eefd872d5d13eb33cf324361dbf1dbfba0a9fe"}, + {file = "pydantic_core-2.27.0-cp38-none-win_amd64.whl", hash = "sha256:513cb14c0cc31a4dfd849a4674b20c46d87b364f997bbcb02282306f5e187abf"}, + {file = "pydantic_core-2.27.0-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:4148dc9184ab79e356dc00a4199dc0ee8647973332cb385fc29a7cced49b9f9c"}, + {file = "pydantic_core-2.27.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:5fc72fbfebbf42c0856a824b8b0dc2b5cd2e4a896050281a21cfa6fed8879cb1"}, + {file = "pydantic_core-2.27.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:185ef205256cd8b38431205698531026979db89a79587725c1e55c59101d64e9"}, + {file = "pydantic_core-2.27.0-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:395e3e1148fa7809016231f8065f30bb0dc285a97b4dc4360cd86e17bab58af7"}, + {file = "pydantic_core-2.27.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:33d14369739c5d07e2e7102cdb0081a1fa46ed03215e07f097b34e020b83b1ae"}, + {file = "pydantic_core-2.27.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e7820bb0d65e3ce1e3e70b6708c2f66143f55912fa02f4b618d0f08b61575f12"}, + {file = "pydantic_core-2.27.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:43b61989068de9ce62296cde02beffabcadb65672207fc51e7af76dca75e6636"}, + {file = "pydantic_core-2.27.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:15e350efb67b855cd014c218716feea4986a149ed1f42a539edd271ee074a196"}, + {file = "pydantic_core-2.27.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:433689845288f9a1ee5714444e65957be26d30915f7745091ede4a83cfb2d7bb"}, + {file = "pydantic_core-2.27.0-cp39-cp39-musllinux_1_1_armv7l.whl", hash = "sha256:3fd8bc2690e7c39eecdf9071b6a889ce7b22b72073863940edc2a0a23750ca90"}, + {file = "pydantic_core-2.27.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:884f1806609c2c66564082540cffc96868c5571c7c3cf3a783f63f2fb49bd3cd"}, + {file = "pydantic_core-2.27.0-cp39-none-win32.whl", hash = "sha256:bf37b72834e7239cf84d4a0b2c050e7f9e48bced97bad9bdf98d26b8eb72e846"}, + {file = "pydantic_core-2.27.0-cp39-none-win_amd64.whl", hash = "sha256:31a2cae5f059329f9cfe3d8d266d3da1543b60b60130d186d9b6a3c20a346361"}, + {file = "pydantic_core-2.27.0-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:4fb49cfdb53af5041aba909be00cccfb2c0d0a2e09281bf542371c5fd36ad04c"}, + {file = "pydantic_core-2.27.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:49633583eb7dc5cba61aaf7cdb2e9e662323ad394e543ee77af265736bcd3eaa"}, + {file = "pydantic_core-2.27.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:153017e3d6cd3ce979de06d84343ca424bb6092727375eba1968c8b4693c6ecb"}, + {file = "pydantic_core-2.27.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ff63a92f6e249514ef35bc795de10745be0226eaea06eb48b4bbeaa0c8850a4a"}, + {file = "pydantic_core-2.27.0-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:5982048129f40b082c2654de10c0f37c67a14f5ff9d37cf35be028ae982f26df"}, + {file = "pydantic_core-2.27.0-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:91bc66f878557313c2a6bcf396e7befcffe5ab4354cfe4427318968af31143c3"}, + {file = "pydantic_core-2.27.0-pp310-pypy310_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:68ef5377eb582fa4343c9d0b57a5b094046d447b4c73dd9fbd9ffb216f829e7d"}, + {file = "pydantic_core-2.27.0-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:c5726eec789ee38f2c53b10b1821457b82274f81f4f746bb1e666d8741fcfadb"}, + {file = "pydantic_core-2.27.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:c0c431e4be5c1a0c6654e0c31c661cd89e0ca956ef65305c3c3fd96f4e72ca39"}, + {file = "pydantic_core-2.27.0-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:8e21d927469d04b39386255bf00d0feedead16f6253dcc85e9e10ddebc334084"}, + {file = "pydantic_core-2.27.0-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:4b51f964fcbb02949fc546022e56cdb16cda457af485e9a3e8b78ac2ecf5d77e"}, + {file = "pydantic_core-2.27.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:25a7fd4de38f7ff99a37e18fa0098c3140286451bc823d1746ba80cec5b433a1"}, + {file = "pydantic_core-2.27.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6fda87808429c520a002a85d6e7cdadbf58231d60e96260976c5b8f9a12a8e13"}, + {file = "pydantic_core-2.27.0-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:8a150392102c402c538190730fda06f3bce654fc498865579a9f2c1d2b425833"}, + {file = "pydantic_core-2.27.0-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:c9ed88b398ba7e3bad7bd64d66cc01dcde9cfcb7ec629a6fd78a82fa0b559d78"}, + {file = "pydantic_core-2.27.0-pp39-pypy39_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:9fe94d9d2a2b4edd7a4b22adcd45814b1b59b03feb00e56deb2e89747aec7bfe"}, + {file = "pydantic_core-2.27.0-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:d8b5ee4ae9170e2775d495b81f414cc20268041c42571530513496ba61e94ba3"}, + {file = "pydantic_core-2.27.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:d29e235ce13c91902ef3efc3d883a677655b3908b1cbc73dee816e5e1f8f7739"}, + {file = "pydantic_core-2.27.0.tar.gz", hash = "sha256:f57783fbaf648205ac50ae7d646f27582fc706be3977e87c3c124e7a92407b10"}, +] + +[package.dependencies] +typing-extensions = ">=4.6.0,<4.7.0 || >4.7.0" + +[[package]] +name = "pyfakefs" +version = "5.7.1" +description = "pyfakefs implements a fake file system that mocks the Python file system modules." +optional = false +python-versions = ">=3.7" +files = [ + {file = "pyfakefs-5.7.1-py3-none-any.whl", hash = "sha256:6503ffe7f401701cf974b502311f926da2b0657a72244a6ba36e985ceb3dd783"}, + {file = "pyfakefs-5.7.1.tar.gz", hash = "sha256:24774c632f3b67ea26fd56b08115ba7c339d5cd65655410bca8572d73a1ae9a4"}, +] + +[[package]] +name = "pygments" +version = "2.18.0" +description = "Pygments is a syntax highlighting package written in Python." +optional = false +python-versions = ">=3.8" +files = [ + {file = "pygments-2.18.0-py3-none-any.whl", hash = "sha256:b8e6aca0523f3ab76fee51799c488e38782ac06eafcf95e7ba832985c8e7b13a"}, + {file = "pygments-2.18.0.tar.gz", hash = "sha256:786ff802f32e91311bff3889f6e9a86e81505fe99f2735bb6d60ae0c5004f199"}, +] + +[package.extras] +windows-terminal = ["colorama (>=0.4.6)"] + +[[package]] +name = "pyperf" +version = "2.8.1" +description = "Python module to run and analyze benchmarks" +optional = false +python-versions = ">=3.9" +files = [ + {file = "pyperf-2.8.1-py3-none-any.whl", hash = "sha256:12a974a800a96568575be51d229b88e6b14197d02440afd98e908d80a42a1a44"}, + {file = "pyperf-2.8.1.tar.gz", hash = "sha256:ef103e21a4d04999315003026a2d659c48a7cfce5e1440f03d6e72591400713a"}, +] + +[package.dependencies] +psutil = ">=5.9.0" + +[package.extras] +dev = ["tox"] + +[[package]] +name = "pyrate-limiter" +version = "2.10.0" +description = "Python Rate-Limiter using Leaky-Bucket Algorithm" +optional = false +python-versions = ">=3.7,<4.0" +files = [ + {file = "pyrate_limiter-2.10.0-py3-none-any.whl", hash = "sha256:a99e52159f5ed5eb58118bed8c645e30818e7c0e0d127a0585c8277c776b0f7f"}, + {file = "pyrate_limiter-2.10.0.tar.gz", hash = "sha256:98cc52cdbe058458e945ae87d4fd5a73186497ffa545ee6e98372f8599a5bd34"}, +] + +[package.extras] +all = ["filelock (>=3.0)", "redis (>=3.3,<4.0)", "redis-py-cluster (>=2.1.3,<3.0.0)"] +docs = ["furo (>=2022.3.4,<2023.0.0)", "myst-parser (>=0.17)", "sphinx (>=4.3.0,<5.0.0)", "sphinx-autodoc-typehints (>=1.17,<2.0)", "sphinx-copybutton (>=0.5)", "sphinxcontrib-apidoc (>=0.3,<0.4)"] + +[[package]] +name = "pyright" +version = "1.1.389" +description = "Command line wrapper for pyright" +optional = false +python-versions = ">=3.7" +files = [ + {file = "pyright-1.1.389-py3-none-any.whl", hash = "sha256:41e9620bba9254406dc1f621a88ceab5a88af4c826feb4f614d95691ed243a60"}, + {file = "pyright-1.1.389.tar.gz", hash = "sha256:716bf8cc174ab8b4dcf6828c3298cac05c5ed775dda9910106a5dcfe4c7fe220"}, +] + +[package.dependencies] +nodeenv = ">=1.6.0" +typing-extensions = ">=4.1" + +[package.extras] +all = ["nodejs-wheel-binaries", "twine (>=3.4.1)"] +dev = ["twine (>=3.4.1)"] +nodejs = ["nodejs-wheel-binaries"] + +[[package]] +name = "pysubs2" +version = "1.7.3" +description = "A library for editing subtitle files" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pysubs2-1.7.3-py3-none-any.whl", hash = "sha256:de438c868d2c656781c4a78f220ec3a6fd6d52be49266c81fe912d2527002d44"}, + {file = "pysubs2-1.7.3.tar.gz", hash = "sha256:b0130f373390736754531be4e68a0fa521e825fa15cc8ff506e4f8ca2c17459a"}, +] + +[[package]] +name = "pytest" +version = "8.3.3" +description = "pytest: simple powerful testing with Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pytest-8.3.3-py3-none-any.whl", hash = "sha256:a6853c7375b2663155079443d2e45de913a911a11d669df02a50814944db57b2"}, + {file = "pytest-8.3.3.tar.gz", hash = "sha256:70b98107bd648308a7952b06e6ca9a50bc660be218d53c257cc1fc94fda10181"}, +] + +[package.dependencies] +colorama = {version = "*", markers = "sys_platform == \"win32\""} +iniconfig = "*" +packaging = "*" +pluggy = ">=1.5,<2" + +[package.extras] +dev = ["argcomplete", "attrs (>=19.2)", "hypothesis (>=3.56)", "mock", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"] + +[[package]] +name = "pytest-mock" +version = "3.14.0" +description = "Thin-wrapper around the mock package for easier use with pytest" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pytest-mock-3.14.0.tar.gz", hash = "sha256:2719255a1efeceadbc056d6bf3df3d1c5015530fb40cf347c0f9afac88410bd0"}, + {file = "pytest_mock-3.14.0-py3-none-any.whl", hash = "sha256:0b72c38033392a5f4621342fe11e9219ac11ec9d375f8e2a0c164539e0d70f6f"}, +] + +[package.dependencies] +pytest = ">=6.2.5" + +[package.extras] +dev = ["pre-commit", "pytest-asyncio", "tox"] + +[[package]] +name = "python-dateutil" +version = "2.9.0.post0" +description = "Extensions to the standard Python datetime module" +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" +files = [ + {file = "python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3"}, + {file = "python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427"}, +] + +[package.dependencies] +six = ">=1.5" + +[[package]] +name = "python-dotenv" +version = "1.0.1" +description = "Read key-value pairs from a .env file and set them as environment variables" +optional = false +python-versions = ">=3.8" +files = [ + {file = "python-dotenv-1.0.1.tar.gz", hash = "sha256:e324ee90a023d808f1959c46bcbc04446a10ced277783dc6ee09987c37ec10ca"}, + {file = "python_dotenv-1.0.1-py3-none-any.whl", hash = "sha256:f7b63ef50f1b690dddf550d03497b66d609393b40b564ed0d674909a68ebf16a"}, +] + +[package.extras] +cli = ["click (>=5.0)"] + +[[package]] +name = "pytz" +version = "2024.2" +description = "World timezone definitions, modern and historical" +optional = false +python-versions = "*" +files = [ + {file = "pytz-2024.2-py2.py3-none-any.whl", hash = "sha256:31c7c1817eb7fae7ca4b8c7ee50c72f93aa2dd863de768e1ef4245d426aa0725"}, + {file = "pytz-2024.2.tar.gz", hash = "sha256:2aa355083c50a0f93fa581709deac0c9ad65cca8a9e9beac660adcbd493c798a"}, +] + +[[package]] +name = "pywin32" +version = "308" +description = "Python for Window Extensions" +optional = false +python-versions = "*" +files = [ + {file = "pywin32-308-cp310-cp310-win32.whl", hash = "sha256:796ff4426437896550d2981b9c2ac0ffd75238ad9ea2d3bfa67a1abd546d262e"}, + {file = "pywin32-308-cp310-cp310-win_amd64.whl", hash = "sha256:4fc888c59b3c0bef905ce7eb7e2106a07712015ea1c8234b703a088d46110e8e"}, + {file = "pywin32-308-cp310-cp310-win_arm64.whl", hash = "sha256:a5ab5381813b40f264fa3495b98af850098f814a25a63589a8e9eb12560f450c"}, + {file = "pywin32-308-cp311-cp311-win32.whl", hash = "sha256:5d8c8015b24a7d6855b1550d8e660d8daa09983c80e5daf89a273e5c6fb5095a"}, + {file = "pywin32-308-cp311-cp311-win_amd64.whl", hash = "sha256:575621b90f0dc2695fec346b2d6302faebd4f0f45c05ea29404cefe35d89442b"}, + {file = "pywin32-308-cp311-cp311-win_arm64.whl", hash = "sha256:100a5442b7332070983c4cd03f2e906a5648a5104b8a7f50175f7906efd16bb6"}, + {file = "pywin32-308-cp312-cp312-win32.whl", hash = "sha256:587f3e19696f4bf96fde9d8a57cec74a57021ad5f204c9e627e15c33ff568897"}, + {file = "pywin32-308-cp312-cp312-win_amd64.whl", hash = "sha256:00b3e11ef09ede56c6a43c71f2d31857cf7c54b0ab6e78ac659497abd2834f47"}, + {file = "pywin32-308-cp312-cp312-win_arm64.whl", hash = "sha256:9b4de86c8d909aed15b7011182c8cab38c8850de36e6afb1f0db22b8959e3091"}, + {file = "pywin32-308-cp313-cp313-win32.whl", hash = "sha256:1c44539a37a5b7b21d02ab34e6a4d314e0788f1690d65b48e9b0b89f31abbbed"}, + {file = "pywin32-308-cp313-cp313-win_amd64.whl", hash = "sha256:fd380990e792eaf6827fcb7e187b2b4b1cede0585e3d0c9e84201ec27b9905e4"}, + {file = "pywin32-308-cp313-cp313-win_arm64.whl", hash = "sha256:ef313c46d4c18dfb82a2431e3051ac8f112ccee1a34f29c263c583c568db63cd"}, + {file = "pywin32-308-cp37-cp37m-win32.whl", hash = "sha256:1f696ab352a2ddd63bd07430080dd598e6369152ea13a25ebcdd2f503a38f1ff"}, + {file = "pywin32-308-cp37-cp37m-win_amd64.whl", hash = "sha256:13dcb914ed4347019fbec6697a01a0aec61019c1046c2b905410d197856326a6"}, + {file = "pywin32-308-cp38-cp38-win32.whl", hash = "sha256:5794e764ebcabf4ff08c555b31bd348c9025929371763b2183172ff4708152f0"}, + {file = "pywin32-308-cp38-cp38-win_amd64.whl", hash = "sha256:3b92622e29d651c6b783e368ba7d6722b1634b8e70bd376fd7610fe1992e19de"}, + {file = "pywin32-308-cp39-cp39-win32.whl", hash = "sha256:7873ca4dc60ab3287919881a7d4f88baee4a6e639aa6962de25a98ba6b193341"}, + {file = "pywin32-308-cp39-cp39-win_amd64.whl", hash = "sha256:71b3322d949b4cc20776436a9c9ba0eeedcbc9c650daa536df63f0ff111bb920"}, +] + +[[package]] +name = "pyyaml" +version = "6.0.2" +description = "YAML parser and emitter for Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "PyYAML-6.0.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0a9a2848a5b7feac301353437eb7d5957887edbf81d56e903999a75a3d743086"}, + {file = "PyYAML-6.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:29717114e51c84ddfba879543fb232a6ed60086602313ca38cce623c1d62cfbf"}, + {file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8824b5a04a04a047e72eea5cec3bc266db09e35de6bdfe34c9436ac5ee27d237"}, + {file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7c36280e6fb8385e520936c3cb3b8042851904eba0e58d277dca80a5cfed590b"}, + {file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ec031d5d2feb36d1d1a24380e4db6d43695f3748343d99434e6f5f9156aaa2ed"}, + {file = "PyYAML-6.0.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:936d68689298c36b53b29f23c6dbb74de12b4ac12ca6cfe0e047bedceea56180"}, + {file = "PyYAML-6.0.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:23502f431948090f597378482b4812b0caae32c22213aecf3b55325e049a6c68"}, + {file = "PyYAML-6.0.2-cp310-cp310-win32.whl", hash = "sha256:2e99c6826ffa974fe6e27cdb5ed0021786b03fc98e5ee3c5bfe1fd5015f42b99"}, + {file = "PyYAML-6.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:a4d3091415f010369ae4ed1fc6b79def9416358877534caf6a0fdd2146c87a3e"}, + {file = "PyYAML-6.0.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:cc1c1159b3d456576af7a3e4d1ba7e6924cb39de8f67111c735f6fc832082774"}, + {file = "PyYAML-6.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1e2120ef853f59c7419231f3bf4e7021f1b936f6ebd222406c3b60212205d2ee"}, + {file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5d225db5a45f21e78dd9358e58a98702a0302f2659a3c6cd320564b75b86f47c"}, + {file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5ac9328ec4831237bec75defaf839f7d4564be1e6b25ac710bd1a96321cc8317"}, + {file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ad2a3decf9aaba3d29c8f537ac4b243e36bef957511b4766cb0057d32b0be85"}, + {file = "PyYAML-6.0.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:ff3824dc5261f50c9b0dfb3be22b4567a6f938ccce4587b38952d85fd9e9afe4"}, + {file = "PyYAML-6.0.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:797b4f722ffa07cc8d62053e4cff1486fa6dc094105d13fea7b1de7d8bf71c9e"}, + {file = "PyYAML-6.0.2-cp311-cp311-win32.whl", hash = "sha256:11d8f3dd2b9c1207dcaf2ee0bbbfd5991f571186ec9cc78427ba5bd32afae4b5"}, + {file = "PyYAML-6.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:e10ce637b18caea04431ce14fabcf5c64a1c61ec9c56b071a4b7ca131ca52d44"}, + {file = "PyYAML-6.0.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:c70c95198c015b85feafc136515252a261a84561b7b1d51e3384e0655ddf25ab"}, + {file = "PyYAML-6.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ce826d6ef20b1bc864f0a68340c8b3287705cae2f8b4b1d932177dcc76721725"}, + {file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f71ea527786de97d1a0cc0eacd1defc0985dcf6b3f17bb77dcfc8c34bec4dc5"}, + {file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9b22676e8097e9e22e36d6b7bda33190d0d400f345f23d4065d48f4ca7ae0425"}, + {file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:80bab7bfc629882493af4aa31a4cfa43a4c57c83813253626916b8c7ada83476"}, + {file = "PyYAML-6.0.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:0833f8694549e586547b576dcfaba4a6b55b9e96098b36cdc7ebefe667dfed48"}, + {file = "PyYAML-6.0.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8b9c7197f7cb2738065c481a0461e50ad02f18c78cd75775628afb4d7137fb3b"}, + {file = "PyYAML-6.0.2-cp312-cp312-win32.whl", hash = "sha256:ef6107725bd54b262d6dedcc2af448a266975032bc85ef0172c5f059da6325b4"}, + {file = "PyYAML-6.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:7e7401d0de89a9a855c839bc697c079a4af81cf878373abd7dc625847d25cbd8"}, + {file = "PyYAML-6.0.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:efdca5630322a10774e8e98e1af481aad470dd62c3170801852d752aa7a783ba"}, + {file = "PyYAML-6.0.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:50187695423ffe49e2deacb8cd10510bc361faac997de9efef88badc3bb9e2d1"}, + {file = "PyYAML-6.0.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0ffe8360bab4910ef1b9e87fb812d8bc0a308b0d0eef8c8f44e0254ab3b07133"}, + {file = "PyYAML-6.0.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:17e311b6c678207928d649faa7cb0d7b4c26a0ba73d41e99c4fff6b6c3276484"}, + {file = "PyYAML-6.0.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:70b189594dbe54f75ab3a1acec5f1e3faa7e8cf2f1e08d9b561cb41b845f69d5"}, + {file = "PyYAML-6.0.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:41e4e3953a79407c794916fa277a82531dd93aad34e29c2a514c2c0c5fe971cc"}, + {file = "PyYAML-6.0.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:68ccc6023a3400877818152ad9a1033e3db8625d899c72eacb5a668902e4d652"}, + {file = "PyYAML-6.0.2-cp313-cp313-win32.whl", hash = "sha256:bc2fa7c6b47d6bc618dd7fb02ef6fdedb1090ec036abab80d4681424b84c1183"}, + {file = "PyYAML-6.0.2-cp313-cp313-win_amd64.whl", hash = "sha256:8388ee1976c416731879ac16da0aff3f63b286ffdd57cdeb95f3f2e085687563"}, + {file = "PyYAML-6.0.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:24471b829b3bf607e04e88d79542a9d48bb037c2267d7927a874e6c205ca7e9a"}, + {file = "PyYAML-6.0.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d7fded462629cfa4b685c5416b949ebad6cec74af5e2d42905d41e257e0869f5"}, + {file = "PyYAML-6.0.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d84a1718ee396f54f3a086ea0a66d8e552b2ab2017ef8b420e92edbc841c352d"}, + {file = "PyYAML-6.0.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9056c1ecd25795207ad294bcf39f2db3d845767be0ea6e6a34d856f006006083"}, + {file = "PyYAML-6.0.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:82d09873e40955485746739bcb8b4586983670466c23382c19cffecbf1fd8706"}, + {file = "PyYAML-6.0.2-cp38-cp38-win32.whl", hash = "sha256:43fa96a3ca0d6b1812e01ced1044a003533c47f6ee8aca31724f78e93ccc089a"}, + {file = "PyYAML-6.0.2-cp38-cp38-win_amd64.whl", hash = "sha256:01179a4a8559ab5de078078f37e5c1a30d76bb88519906844fd7bdea1b7729ff"}, + {file = "PyYAML-6.0.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:688ba32a1cffef67fd2e9398a2efebaea461578b0923624778664cc1c914db5d"}, + {file = "PyYAML-6.0.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a8786accb172bd8afb8be14490a16625cbc387036876ab6ba70912730faf8e1f"}, + {file = "PyYAML-6.0.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d8e03406cac8513435335dbab54c0d385e4a49e4945d2909a581c83647ca0290"}, + {file = "PyYAML-6.0.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f753120cb8181e736c57ef7636e83f31b9c0d1722c516f7e86cf15b7aa57ff12"}, + {file = "PyYAML-6.0.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3b1fdb9dc17f5a7677423d508ab4f243a726dea51fa5e70992e59a7411c89d19"}, + {file = "PyYAML-6.0.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:0b69e4ce7a131fe56b7e4d770c67429700908fc0752af059838b1cfb41960e4e"}, + {file = "PyYAML-6.0.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:a9f8c2e67970f13b16084e04f134610fd1d374bf477b17ec1599185cf611d725"}, + {file = "PyYAML-6.0.2-cp39-cp39-win32.whl", hash = "sha256:6395c297d42274772abc367baaa79683958044e5d3835486c16da75d2a694631"}, + {file = "PyYAML-6.0.2-cp39-cp39-win_amd64.whl", hash = "sha256:39693e1f8320ae4f43943590b49779ffb98acb81f788220ea932a6b6c51004d8"}, + {file = "pyyaml-6.0.2.tar.gz", hash = "sha256:d584d9ec91ad65861cc08d42e834324ef890a082e591037abe114850ff7bbc3e"}, +] + +[[package]] +name = "rank-torrent-name" +version = "1.5.4" +description = "Rank filenames based on user-defined rules!" +optional = false +python-versions = "<4.0,>=3.11" +files = [ + {file = "rank_torrent_name-1.5.4-py3-none-any.whl", hash = "sha256:6b97c3ddb9eca5418df026530e03ed3de47cfe2463cc33164e47b0c3aa823805"}, + {file = "rank_torrent_name-1.5.4.tar.gz", hash = "sha256:fe8575347a33a3aa03b0c115fe28ee563ca39deea0572bde208d934de2d19e96"}, +] + +[package.dependencies] +levenshtein = ">=0.25.0,<0.26.0" +parsett = ">=1.5.1,<2.0.0" +pydantic = ">=2.6.3,<3.0.0" +regex = ">=2023.12.25,<2024.0.0" + +[[package]] +name = "rapidfuzz" +version = "3.10.1" +description = "rapid fuzzy string matching" +optional = false +python-versions = ">=3.9" +files = [ + {file = "rapidfuzz-3.10.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:f17d9f21bf2f2f785d74f7b0d407805468b4c173fa3e52c86ec94436b338e74a"}, + {file = "rapidfuzz-3.10.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:b31f358a70efc143909fb3d75ac6cd3c139cd41339aa8f2a3a0ead8315731f2b"}, + {file = "rapidfuzz-3.10.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7f4f43f2204b56a61448ec2dd061e26fd344c404da99fb19f3458200c5874ba2"}, + {file = "rapidfuzz-3.10.1-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9d81bf186a453a2757472133b24915768abc7c3964194406ed93e170e16c21cb"}, + {file = "rapidfuzz-3.10.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3611c8f45379a12063d70075c75134f2a8bd2e4e9b8a7995112ddae95ca1c982"}, + {file = "rapidfuzz-3.10.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3c3b537b97ac30da4b73930fa8a4fe2f79c6d1c10ad535c5c09726612cd6bed9"}, + {file = "rapidfuzz-3.10.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:231ef1ec9cf7b59809ce3301006500b9d564ddb324635f4ea8f16b3e2a1780da"}, + {file = "rapidfuzz-3.10.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:ed4f3adc1294834955b7e74edd3c6bd1aad5831c007f2d91ea839e76461a5879"}, + {file = "rapidfuzz-3.10.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:7b6015da2e707bf632a71772a2dbf0703cff6525732c005ad24987fe86e8ec32"}, + {file = "rapidfuzz-3.10.1-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:1b35a118d61d6f008e8e3fb3a77674d10806a8972c7b8be433d6598df4d60b01"}, + {file = "rapidfuzz-3.10.1-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:bc308d79a7e877226f36bdf4e149e3ed398d8277c140be5c1fd892ec41739e6d"}, + {file = "rapidfuzz-3.10.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:f017dbfecc172e2d0c37cf9e3d519179d71a7f16094b57430dffc496a098aa17"}, + {file = "rapidfuzz-3.10.1-cp310-cp310-win32.whl", hash = "sha256:36c0e1483e21f918d0f2f26799fe5ac91c7b0c34220b73007301c4f831a9c4c7"}, + {file = "rapidfuzz-3.10.1-cp310-cp310-win_amd64.whl", hash = "sha256:10746c1d4c8cd8881c28a87fd7ba0c9c102346dfe7ff1b0d021cdf093e9adbff"}, + {file = "rapidfuzz-3.10.1-cp310-cp310-win_arm64.whl", hash = "sha256:dfa64b89dcb906835e275187569e51aa9d546a444489e97aaf2cc84011565fbe"}, + {file = "rapidfuzz-3.10.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:92958ae075c87fef393f835ed02d4fe8d5ee2059a0934c6c447ea3417dfbf0e8"}, + {file = "rapidfuzz-3.10.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ba7521e072c53e33c384e78615d0718e645cab3c366ecd3cc8cb732befd94967"}, + {file = "rapidfuzz-3.10.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:00d02cbd75d283c287471b5b3738b3e05c9096150f93f2d2dfa10b3d700f2db9"}, + {file = "rapidfuzz-3.10.1-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:efa1582a397da038e2f2576c9cd49b842f56fde37d84a6b0200ffebc08d82350"}, + {file = "rapidfuzz-3.10.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f12912acee1f506f974f58de9fdc2e62eea5667377a7e9156de53241c05fdba8"}, + {file = "rapidfuzz-3.10.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:666d5d8b17becc3f53447bcb2b6b33ce6c2df78792495d1fa82b2924cd48701a"}, + {file = "rapidfuzz-3.10.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:26f71582c0d62445067ee338ddad99b655a8f4e4ed517a90dcbfbb7d19310474"}, + {file = "rapidfuzz-3.10.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:8a2ef08b27167bcff230ffbfeedd4c4fa6353563d6aaa015d725dd3632fc3de7"}, + {file = "rapidfuzz-3.10.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:365e4fc1a2b95082c890f5e98489b894e6bf8c338c6ac89bb6523c2ca6e9f086"}, + {file = "rapidfuzz-3.10.1-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:1996feb7a61609fa842e6b5e0c549983222ffdedaf29644cc67e479902846dfe"}, + {file = "rapidfuzz-3.10.1-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:cf654702f144beaa093103841a2ea6910d617d0bb3fccb1d1fd63c54dde2cd49"}, + {file = "rapidfuzz-3.10.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:ec108bf25de674781d0a9a935030ba090c78d49def3d60f8724f3fc1e8e75024"}, + {file = "rapidfuzz-3.10.1-cp311-cp311-win32.whl", hash = "sha256:031f8b367e5d92f7a1e27f7322012f3c321c3110137b43cc3bf678505583ef48"}, + {file = "rapidfuzz-3.10.1-cp311-cp311-win_amd64.whl", hash = "sha256:f98f36c6a1bb9a6c8bbec99ad87c8c0e364f34761739b5ea9adf7b48129ae8cf"}, + {file = "rapidfuzz-3.10.1-cp311-cp311-win_arm64.whl", hash = "sha256:f1da2028cb4e41be55ee797a82d6c1cf589442504244249dfeb32efc608edee7"}, + {file = "rapidfuzz-3.10.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:1340b56340896bede246f612b6ecf685f661a56aabef3d2512481bfe23ac5835"}, + {file = "rapidfuzz-3.10.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2316515169b7b5a453f0ce3adbc46c42aa332cae9f2edb668e24d1fc92b2f2bb"}, + {file = "rapidfuzz-3.10.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8e06fe6a12241ec1b72c0566c6b28cda714d61965d86569595ad24793d1ab259"}, + {file = "rapidfuzz-3.10.1-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d99c1cd9443b19164ec185a7d752f4b4db19c066c136f028991a480720472e23"}, + {file = "rapidfuzz-3.10.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a1d9aa156ed52d3446388ba4c2f335e312191d1ca9d1f5762ee983cf23e4ecf6"}, + {file = "rapidfuzz-3.10.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:54bcf4efaaee8e015822be0c2c28214815f4f6b4f70d8362cfecbd58a71188ac"}, + {file = "rapidfuzz-3.10.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c0c955e32afdbfdf6e9ee663d24afb25210152d98c26d22d399712d29a9b976b"}, + {file = "rapidfuzz-3.10.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:191633722203f5b7717efcb73a14f76f3b124877d0608c070b827c5226d0b972"}, + {file = "rapidfuzz-3.10.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:195baad28057ec9609e40385991004e470af9ef87401e24ebe72c064431524ab"}, + {file = "rapidfuzz-3.10.1-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:0fff4a6b87c07366662b62ae994ffbeadc472e72f725923f94b72a3db49f4671"}, + {file = "rapidfuzz-3.10.1-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:4ffed25f9fdc0b287f30a98467493d1e1ce5b583f6317f70ec0263b3c97dbba6"}, + {file = "rapidfuzz-3.10.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:d02cf8e5af89a9ac8f53c438ddff6d773f62c25c6619b29db96f4aae248177c0"}, + {file = "rapidfuzz-3.10.1-cp312-cp312-win32.whl", hash = "sha256:f3bb81d4fe6a5d20650f8c0afcc8f6e1941f6fecdb434f11b874c42467baded0"}, + {file = "rapidfuzz-3.10.1-cp312-cp312-win_amd64.whl", hash = "sha256:aaf83e9170cb1338922ae42d320699dccbbdca8ffed07faeb0b9257822c26e24"}, + {file = "rapidfuzz-3.10.1-cp312-cp312-win_arm64.whl", hash = "sha256:c5da802a0d085ad81b0f62828fb55557996c497b2d0b551bbdfeafd6d447892f"}, + {file = "rapidfuzz-3.10.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:fc22d69a1c9cccd560a5c434c0371b2df0f47c309c635a01a913e03bbf183710"}, + {file = "rapidfuzz-3.10.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:38b0dac2c8e057562b8f0d8ae5b663d2d6a28c5ab624de5b73cef9abb6129a24"}, + {file = "rapidfuzz-3.10.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6fde3bbb14e92ce8fcb5c2edfff72e474d0080cadda1c97785bf4822f037a309"}, + {file = "rapidfuzz-3.10.1-cp313-cp313-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9141fb0592e55f98fe9ac0f3ce883199b9c13e262e0bf40c5b18cdf926109d16"}, + {file = "rapidfuzz-3.10.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:237bec5dd1bfc9b40bbd786cd27949ef0c0eb5fab5eb491904c6b5df59d39d3c"}, + {file = "rapidfuzz-3.10.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:18123168cba156ab5794ea6de66db50f21bb3c66ae748d03316e71b27d907b95"}, + {file = "rapidfuzz-3.10.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0b75fe506c8e02769cc47f5ab21ce3e09b6211d3edaa8f8f27331cb6988779be"}, + {file = "rapidfuzz-3.10.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:9da82aa4b46973aaf9e03bb4c3d6977004648c8638febfc0f9d237e865761270"}, + {file = "rapidfuzz-3.10.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:c34c022d5ad564f1a5a57a4a89793bd70d7bad428150fb8ff2760b223407cdcf"}, + {file = "rapidfuzz-3.10.1-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:1e96c84d6c2a0ca94e15acb5399118fff669f4306beb98a6d8ec6f5dccab4412"}, + {file = "rapidfuzz-3.10.1-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:e8e154b84a311263e1aca86818c962e1fa9eefdd643d1d5d197fcd2738f88cb9"}, + {file = "rapidfuzz-3.10.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:335fee93188f8cd585552bb8057228ce0111bd227fa81bfd40b7df6b75def8ab"}, + {file = "rapidfuzz-3.10.1-cp313-cp313-win32.whl", hash = "sha256:6729b856166a9e95c278410f73683957ea6100c8a9d0a8dbe434c49663689255"}, + {file = "rapidfuzz-3.10.1-cp313-cp313-win_amd64.whl", hash = "sha256:0e06d99ad1ad97cb2ef7f51ec6b1fedd74a3a700e4949353871cf331d07b382a"}, + {file = "rapidfuzz-3.10.1-cp313-cp313-win_arm64.whl", hash = "sha256:8d1b7082104d596a3eb012e0549b2634ed15015b569f48879701e9d8db959dbb"}, + {file = "rapidfuzz-3.10.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:779027d3307e1a2b1dc0c03c34df87a470a368a1a0840a9d2908baf2d4067956"}, + {file = "rapidfuzz-3.10.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:440b5608ab12650d0390128d6858bc839ae77ffe5edf0b33a1551f2fa9860651"}, + {file = "rapidfuzz-3.10.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:82cac41a411e07a6f3dc80dfbd33f6be70ea0abd72e99c59310819d09f07d945"}, + {file = "rapidfuzz-3.10.1-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:958473c9f0bca250590200fd520b75be0dbdbc4a7327dc87a55b6d7dc8d68552"}, + {file = "rapidfuzz-3.10.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9ef60dfa73749ef91cb6073be1a3e135f4846ec809cc115f3cbfc6fe283a5584"}, + {file = "rapidfuzz-3.10.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a7fbac18f2c19fc983838a60611e67e3262e36859994c26f2ee85bb268de2355"}, + {file = "rapidfuzz-3.10.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9a0d519ff39db887cd73f4e297922786d548f5c05d6b51f4e6754f452a7f4296"}, + {file = "rapidfuzz-3.10.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:bebb7bc6aeb91cc57e4881b222484c26759ca865794187217c9dcea6c33adae6"}, + {file = "rapidfuzz-3.10.1-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:fe07f8b9c3bb5c5ad1d2c66884253e03800f4189a60eb6acd6119ebaf3eb9894"}, + {file = "rapidfuzz-3.10.1-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:bfa48a4a2d45a41457f0840c48e579db157a927f4e97acf6e20df8fc521c79de"}, + {file = "rapidfuzz-3.10.1-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:2cf44d01bfe8ee605b7eaeecbc2b9ca64fc55765f17b304b40ed8995f69d7716"}, + {file = "rapidfuzz-3.10.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:1e6bbca9246d9eedaa1c84e04a7f555493ba324d52ae4d9f3d9ddd1b740dcd87"}, + {file = "rapidfuzz-3.10.1-cp39-cp39-win32.whl", hash = "sha256:567f88180f2c1423b4fe3f3ad6e6310fc97b85bdba574801548597287fc07028"}, + {file = "rapidfuzz-3.10.1-cp39-cp39-win_amd64.whl", hash = "sha256:6b2cd7c29d6ecdf0b780deb587198f13213ac01c430ada6913452fd0c40190fc"}, + {file = "rapidfuzz-3.10.1-cp39-cp39-win_arm64.whl", hash = "sha256:9f912d459e46607ce276128f52bea21ebc3e9a5ccf4cccfef30dd5bddcf47be8"}, + {file = "rapidfuzz-3.10.1-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:ac4452f182243cfab30ba4668ef2de101effaedc30f9faabb06a095a8c90fd16"}, + {file = "rapidfuzz-3.10.1-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:565c2bd4f7d23c32834652b27b51dd711814ab614b4e12add8476be4e20d1cf5"}, + {file = "rapidfuzz-3.10.1-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:187d9747149321607be4ccd6f9f366730078bed806178ec3eeb31d05545e9e8f"}, + {file = "rapidfuzz-3.10.1-pp310-pypy310_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:616290fb9a8fa87e48cb0326d26f98d4e29f17c3b762c2d586f2b35c1fd2034b"}, + {file = "rapidfuzz-3.10.1-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:073a5b107e17ebd264198b78614c0206fa438cce749692af5bc5f8f484883f50"}, + {file = "rapidfuzz-3.10.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:39c4983e2e2ccb9732f3ac7d81617088822f4a12291d416b09b8a1eadebb3e29"}, + {file = "rapidfuzz-3.10.1-pp39-pypy39_pp73-macosx_10_15_x86_64.whl", hash = "sha256:ac7adee6bcf0c6fee495d877edad1540a7e0f5fc208da03ccb64734b43522d7a"}, + {file = "rapidfuzz-3.10.1-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:425f4ac80b22153d391ee3f94bc854668a0c6c129f05cf2eaf5ee74474ddb69e"}, + {file = "rapidfuzz-3.10.1-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:65a2fa13e8a219f9b5dcb9e74abe3ced5838a7327e629f426d333dfc8c5a6e66"}, + {file = "rapidfuzz-3.10.1-pp39-pypy39_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:75561f3df9a906aaa23787e9992b228b1ab69007932dc42070f747103e177ba8"}, + {file = "rapidfuzz-3.10.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:edd062490537e97ca125bc6c7f2b7331c2b73d21dc304615afe61ad1691e15d5"}, + {file = "rapidfuzz-3.10.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:cfcc8feccf63245a22dfdd16e222f1a39771a44b870beb748117a0e09cbb4a62"}, + {file = "rapidfuzz-3.10.1.tar.gz", hash = "sha256:5a15546d847a915b3f42dc79ef9b0c78b998b4e2c53b252e7166284066585979"}, +] + +[package.extras] +all = ["numpy"] + +[[package]] +name = "rarfile" +version = "4.2" +description = "RAR archive reader for Python" +optional = false +python-versions = ">=3.6" +files = [ + {file = "rarfile-4.2-py3-none-any.whl", hash = "sha256:8757e1e3757e32962e229cab2432efc1f15f210823cc96ccba0f6a39d17370c9"}, + {file = "rarfile-4.2.tar.gz", hash = "sha256:8e1c8e72d0845ad2b32a47ab11a719bc2e41165ec101fd4d3fe9e92aa3f469ef"}, +] + +[[package]] +name = "rebulk" +version = "3.2.0" +description = "Rebulk - Define simple search patterns in bulk to perform advanced matching on any string." +optional = false +python-versions = "*" +files = [ + {file = "rebulk-3.2.0-py3-none-any.whl", hash = "sha256:6bc31ae4b37200623c5827d2f539f9ec3e52b50431322dad8154642a39b0a53e"}, + {file = "rebulk-3.2.0.tar.gz", hash = "sha256:0d30bf80fca00fa9c697185ac475daac9bde5f646ce3338c9ff5d5dc1ebdfebc"}, +] + +[package.extras] +dev = ["pylint", "pytest", "tox"] +native = ["regex"] +test = ["pylint", "pytest"] + +[[package]] +name = "referencing" +version = "0.35.1" +description = "JSON Referencing + Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "referencing-0.35.1-py3-none-any.whl", hash = "sha256:eda6d3234d62814d1c64e305c1331c9a3a6132da475ab6382eaa997b21ee75de"}, + {file = "referencing-0.35.1.tar.gz", hash = "sha256:25b42124a6c8b632a425174f24087783efb348a6f1e0008e63cd4466fedf703c"}, +] + +[package.dependencies] +attrs = ">=22.2.0" +rpds-py = ">=0.7.0" + +[[package]] +name = "regex" +version = "2023.12.25" +description = "Alternative regular expression module, to replace re." +optional = false +python-versions = ">=3.7" +files = [ + {file = "regex-2023.12.25-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:0694219a1d54336fd0445ea382d49d36882415c0134ee1e8332afd1529f0baa5"}, + {file = "regex-2023.12.25-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:b014333bd0217ad3d54c143de9d4b9a3ca1c5a29a6d0d554952ea071cff0f1f8"}, + {file = "regex-2023.12.25-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:d865984b3f71f6d0af64d0d88f5733521698f6c16f445bb09ce746c92c97c586"}, + {file = "regex-2023.12.25-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1e0eabac536b4cc7f57a5f3d095bfa557860ab912f25965e08fe1545e2ed8b4c"}, + {file = "regex-2023.12.25-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c25a8ad70e716f96e13a637802813f65d8a6760ef48672aa3502f4c24ea8b400"}, + {file = "regex-2023.12.25-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a9b6d73353f777630626f403b0652055ebfe8ff142a44ec2cf18ae470395766e"}, + {file = "regex-2023.12.25-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a9cc99d6946d750eb75827cb53c4371b8b0fe89c733a94b1573c9dd16ea6c9e4"}, + {file = "regex-2023.12.25-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:88d1f7bef20c721359d8675f7d9f8e414ec5003d8f642fdfd8087777ff7f94b5"}, + {file = "regex-2023.12.25-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:cb3fe77aec8f1995611f966d0c656fdce398317f850d0e6e7aebdfe61f40e1cd"}, + {file = "regex-2023.12.25-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:7aa47c2e9ea33a4a2a05f40fcd3ea36d73853a2aae7b4feab6fc85f8bf2c9704"}, + {file = "regex-2023.12.25-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:df26481f0c7a3f8739fecb3e81bc9da3fcfae34d6c094563b9d4670b047312e1"}, + {file = "regex-2023.12.25-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:c40281f7d70baf6e0db0c2f7472b31609f5bc2748fe7275ea65a0b4601d9b392"}, + {file = "regex-2023.12.25-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:d94a1db462d5690ebf6ae86d11c5e420042b9898af5dcf278bd97d6bda065423"}, + {file = "regex-2023.12.25-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:ba1b30765a55acf15dce3f364e4928b80858fa8f979ad41f862358939bdd1f2f"}, + {file = "regex-2023.12.25-cp310-cp310-win32.whl", hash = "sha256:150c39f5b964e4d7dba46a7962a088fbc91f06e606f023ce57bb347a3b2d4630"}, + {file = "regex-2023.12.25-cp310-cp310-win_amd64.whl", hash = "sha256:09da66917262d9481c719599116c7dc0c321ffcec4b1f510c4f8a066f8768105"}, + {file = "regex-2023.12.25-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:1b9d811f72210fa9306aeb88385b8f8bcef0dfbf3873410413c00aa94c56c2b6"}, + {file = "regex-2023.12.25-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:d902a43085a308cef32c0d3aea962524b725403fd9373dea18110904003bac97"}, + {file = "regex-2023.12.25-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d166eafc19f4718df38887b2bbe1467a4f74a9830e8605089ea7a30dd4da8887"}, + {file = "regex-2023.12.25-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c7ad32824b7f02bb3c9f80306d405a1d9b7bb89362d68b3c5a9be53836caebdb"}, + {file = "regex-2023.12.25-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:636ba0a77de609d6510235b7f0e77ec494d2657108f777e8765efc060094c98c"}, + {file = "regex-2023.12.25-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0fda75704357805eb953a3ee15a2b240694a9a514548cd49b3c5124b4e2ad01b"}, + {file = "regex-2023.12.25-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f72cbae7f6b01591f90814250e636065850c5926751af02bb48da94dfced7baa"}, + {file = "regex-2023.12.25-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:db2a0b1857f18b11e3b0e54ddfefc96af46b0896fb678c85f63fb8c37518b3e7"}, + {file = "regex-2023.12.25-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:7502534e55c7c36c0978c91ba6f61703faf7ce733715ca48f499d3dbbd7657e0"}, + {file = "regex-2023.12.25-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:e8c7e08bb566de4faaf11984af13f6bcf6a08f327b13631d41d62592681d24fe"}, + {file = "regex-2023.12.25-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:283fc8eed679758de38fe493b7d7d84a198b558942b03f017b1f94dda8efae80"}, + {file = "regex-2023.12.25-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:f44dd4d68697559d007462b0a3a1d9acd61d97072b71f6d1968daef26bc744bd"}, + {file = "regex-2023.12.25-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:67d3ccfc590e5e7197750fcb3a2915b416a53e2de847a728cfa60141054123d4"}, + {file = "regex-2023.12.25-cp311-cp311-win32.whl", hash = "sha256:68191f80a9bad283432385961d9efe09d783bcd36ed35a60fb1ff3f1ec2efe87"}, + {file = "regex-2023.12.25-cp311-cp311-win_amd64.whl", hash = "sha256:7d2af3f6b8419661a0c421584cfe8aaec1c0e435ce7e47ee2a97e344b98f794f"}, + {file = "regex-2023.12.25-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:8a0ccf52bb37d1a700375a6b395bff5dd15c50acb745f7db30415bae3c2b0715"}, + {file = "regex-2023.12.25-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:c3c4a78615b7762740531c27cf46e2f388d8d727d0c0c739e72048beb26c8a9d"}, + {file = "regex-2023.12.25-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ad83e7545b4ab69216cef4cc47e344d19622e28aabec61574b20257c65466d6a"}, + {file = "regex-2023.12.25-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b7a635871143661feccce3979e1727c4e094f2bdfd3ec4b90dfd4f16f571a87a"}, + {file = "regex-2023.12.25-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d498eea3f581fbe1b34b59c697512a8baef88212f92e4c7830fcc1499f5b45a5"}, + {file = "regex-2023.12.25-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:43f7cd5754d02a56ae4ebb91b33461dc67be8e3e0153f593c509e21d219c5060"}, + {file = "regex-2023.12.25-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:51f4b32f793812714fd5307222a7f77e739b9bc566dc94a18126aba3b92b98a3"}, + {file = "regex-2023.12.25-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ba99d8077424501b9616b43a2d208095746fb1284fc5ba490139651f971d39d9"}, + {file = "regex-2023.12.25-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:4bfc2b16e3ba8850e0e262467275dd4d62f0d045e0e9eda2bc65078c0110a11f"}, + {file = "regex-2023.12.25-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:8c2c19dae8a3eb0ea45a8448356ed561be843b13cbc34b840922ddf565498c1c"}, + {file = "regex-2023.12.25-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:60080bb3d8617d96f0fb7e19796384cc2467447ef1c491694850ebd3670bc457"}, + {file = "regex-2023.12.25-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:b77e27b79448e34c2c51c09836033056a0547aa360c45eeeb67803da7b0eedaf"}, + {file = "regex-2023.12.25-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:518440c991f514331f4850a63560321f833979d145d7d81186dbe2f19e27ae3d"}, + {file = "regex-2023.12.25-cp312-cp312-win32.whl", hash = "sha256:e2610e9406d3b0073636a3a2e80db05a02f0c3169b5632022b4e81c0364bcda5"}, + {file = "regex-2023.12.25-cp312-cp312-win_amd64.whl", hash = "sha256:cc37b9aeebab425f11f27e5e9e6cf580be7206c6582a64467a14dda211abc232"}, + {file = "regex-2023.12.25-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:da695d75ac97cb1cd725adac136d25ca687da4536154cdc2815f576e4da11c69"}, + {file = "regex-2023.12.25-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d126361607b33c4eb7b36debc173bf25d7805847346dd4d99b5499e1fef52bc7"}, + {file = "regex-2023.12.25-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4719bb05094d7d8563a450cf8738d2e1061420f79cfcc1fa7f0a44744c4d8f73"}, + {file = "regex-2023.12.25-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5dd58946bce44b53b06d94aa95560d0b243eb2fe64227cba50017a8d8b3cd3e2"}, + {file = "regex-2023.12.25-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:22a86d9fff2009302c440b9d799ef2fe322416d2d58fc124b926aa89365ec482"}, + {file = "regex-2023.12.25-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2aae8101919e8aa05ecfe6322b278f41ce2994c4a430303c4cd163fef746e04f"}, + {file = "regex-2023.12.25-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:e692296c4cc2873967771345a876bcfc1c547e8dd695c6b89342488b0ea55cd8"}, + {file = "regex-2023.12.25-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:263ef5cc10979837f243950637fffb06e8daed7f1ac1e39d5910fd29929e489a"}, + {file = "regex-2023.12.25-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:d6f7e255e5fa94642a0724e35406e6cb7001c09d476ab5fce002f652b36d0c39"}, + {file = "regex-2023.12.25-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:88ad44e220e22b63b0f8f81f007e8abbb92874d8ced66f32571ef8beb0643b2b"}, + {file = "regex-2023.12.25-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:3a17d3ede18f9cedcbe23d2daa8a2cd6f59fe2bf082c567e43083bba3fb00347"}, + {file = "regex-2023.12.25-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:d15b274f9e15b1a0b7a45d2ac86d1f634d983ca40d6b886721626c47a400bf39"}, + {file = "regex-2023.12.25-cp37-cp37m-win32.whl", hash = "sha256:ed19b3a05ae0c97dd8f75a5d8f21f7723a8c33bbc555da6bbe1f96c470139d3c"}, + {file = "regex-2023.12.25-cp37-cp37m-win_amd64.whl", hash = "sha256:a6d1047952c0b8104a1d371f88f4ab62e6275567d4458c1e26e9627ad489b445"}, + {file = "regex-2023.12.25-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:b43523d7bc2abd757119dbfb38af91b5735eea45537ec6ec3a5ec3f9562a1c53"}, + {file = "regex-2023.12.25-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:efb2d82f33b2212898f1659fb1c2e9ac30493ac41e4d53123da374c3b5541e64"}, + {file = "regex-2023.12.25-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:b7fca9205b59c1a3d5031f7e64ed627a1074730a51c2a80e97653e3e9fa0d415"}, + {file = "regex-2023.12.25-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:086dd15e9435b393ae06f96ab69ab2d333f5d65cbe65ca5a3ef0ec9564dfe770"}, + {file = "regex-2023.12.25-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e81469f7d01efed9b53740aedd26085f20d49da65f9c1f41e822a33992cb1590"}, + {file = "regex-2023.12.25-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:34e4af5b27232f68042aa40a91c3b9bb4da0eeb31b7632e0091afc4310afe6cb"}, + {file = "regex-2023.12.25-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9852b76ab558e45b20bf1893b59af64a28bd3820b0c2efc80e0a70a4a3ea51c1"}, + {file = "regex-2023.12.25-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ff100b203092af77d1a5a7abe085b3506b7eaaf9abf65b73b7d6905b6cb76988"}, + {file = "regex-2023.12.25-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:cc038b2d8b1470364b1888a98fd22d616fba2b6309c5b5f181ad4483e0017861"}, + {file = "regex-2023.12.25-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:094ba386bb5c01e54e14434d4caabf6583334090865b23ef58e0424a6286d3dc"}, + {file = "regex-2023.12.25-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:5cd05d0f57846d8ba4b71d9c00f6f37d6b97d5e5ef8b3c3840426a475c8f70f4"}, + {file = "regex-2023.12.25-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:9aa1a67bbf0f957bbe096375887b2505f5d8ae16bf04488e8b0f334c36e31360"}, + {file = "regex-2023.12.25-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:98a2636994f943b871786c9e82bfe7883ecdaba2ef5df54e1450fa9869d1f756"}, + {file = "regex-2023.12.25-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:37f8e93a81fc5e5bd8db7e10e62dc64261bcd88f8d7e6640aaebe9bc180d9ce2"}, + {file = "regex-2023.12.25-cp38-cp38-win32.whl", hash = "sha256:d78bd484930c1da2b9679290a41cdb25cc127d783768a0369d6b449e72f88beb"}, + {file = "regex-2023.12.25-cp38-cp38-win_amd64.whl", hash = "sha256:b521dcecebc5b978b447f0f69b5b7f3840eac454862270406a39837ffae4e697"}, + {file = "regex-2023.12.25-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:f7bc09bc9c29ebead055bcba136a67378f03d66bf359e87d0f7c759d6d4ffa31"}, + {file = "regex-2023.12.25-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:e14b73607d6231f3cc4622809c196b540a6a44e903bcfad940779c80dffa7be7"}, + {file = "regex-2023.12.25-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:9eda5f7a50141291beda3edd00abc2d4a5b16c29c92daf8d5bd76934150f3edc"}, + {file = "regex-2023.12.25-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cc6bb9aa69aacf0f6032c307da718f61a40cf970849e471254e0e91c56ffca95"}, + {file = "regex-2023.12.25-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:298dc6354d414bc921581be85695d18912bea163a8b23cac9a2562bbcd5088b1"}, + {file = "regex-2023.12.25-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2f4e475a80ecbd15896a976aa0b386c5525d0ed34d5c600b6d3ebac0a67c7ddf"}, + {file = "regex-2023.12.25-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:531ac6cf22b53e0696f8e1d56ce2396311254eb806111ddd3922c9d937151dae"}, + {file = "regex-2023.12.25-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:22f3470f7524b6da61e2020672df2f3063676aff444db1daa283c2ea4ed259d6"}, + {file = "regex-2023.12.25-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:89723d2112697feaa320c9d351e5f5e7b841e83f8b143dba8e2d2b5f04e10923"}, + {file = "regex-2023.12.25-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:0ecf44ddf9171cd7566ef1768047f6e66975788258b1c6c6ca78098b95cf9a3d"}, + {file = "regex-2023.12.25-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:905466ad1702ed4acfd67a902af50b8db1feeb9781436372261808df7a2a7bca"}, + {file = "regex-2023.12.25-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:4558410b7a5607a645e9804a3e9dd509af12fb72b9825b13791a37cd417d73a5"}, + {file = "regex-2023.12.25-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:7e316026cc1095f2a3e8cc012822c99f413b702eaa2ca5408a513609488cb62f"}, + {file = "regex-2023.12.25-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:3b1de218d5375cd6ac4b5493e0b9f3df2be331e86520f23382f216c137913d20"}, + {file = "regex-2023.12.25-cp39-cp39-win32.whl", hash = "sha256:11a963f8e25ab5c61348d090bf1b07f1953929c13bd2309a0662e9ff680763c9"}, + {file = "regex-2023.12.25-cp39-cp39-win_amd64.whl", hash = "sha256:e693e233ac92ba83a87024e1d32b5f9ab15ca55ddd916d878146f4e3406b5c91"}, + {file = "regex-2023.12.25.tar.gz", hash = "sha256:29171aa128da69afdf4bde412d5bedc335f2ca8fcfe4489038577d05f16181e5"}, +] + +[[package]] +name = "requests" +version = "2.32.3" +description = "Python HTTP for Humans." +optional = false +python-versions = ">=3.8" +files = [ + {file = "requests-2.32.3-py3-none-any.whl", hash = "sha256:70761cfe03c773ceb22aa2f671b4757976145175cdfca038c02654d061d6dcc6"}, + {file = "requests-2.32.3.tar.gz", hash = "sha256:55365417734eb18255590a9ff9eb97e9e1da868d4ccd6402399eaf68af20a760"}, +] + +[package.dependencies] +certifi = ">=2017.4.17" +charset-normalizer = ">=2,<4" +idna = ">=2.5,<4" +urllib3 = ">=1.21.1,<3" + +[package.extras] +socks = ["PySocks (>=1.5.6,!=1.5.7)"] +use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] + +[[package]] +name = "requests-cache" +version = "1.2.1" +description = "A persistent cache for python requests" +optional = false +python-versions = ">=3.8" +files = [ + {file = "requests_cache-1.2.1-py3-none-any.whl", hash = "sha256:1285151cddf5331067baa82598afe2d47c7495a1334bfe7a7d329b43e9fd3603"}, + {file = "requests_cache-1.2.1.tar.gz", hash = "sha256:68abc986fdc5b8d0911318fbb5f7c80eebcd4d01bfacc6685ecf8876052511d1"}, +] + +[package.dependencies] +attrs = ">=21.2" +cattrs = ">=22.2" +platformdirs = ">=2.5" +requests = ">=2.22" +url-normalize = ">=1.4" +urllib3 = ">=1.25.5" + +[package.extras] +all = ["boto3 (>=1.15)", "botocore (>=1.18)", "itsdangerous (>=2.0)", "pymongo (>=3)", "pyyaml (>=6.0.1)", "redis (>=3)", "ujson (>=5.4)"] +bson = ["bson (>=0.5)"] +docs = ["furo (>=2023.3,<2024.0)", "linkify-it-py (>=2.0,<3.0)", "myst-parser (>=1.0,<2.0)", "sphinx (>=5.0.2,<6.0.0)", "sphinx-autodoc-typehints (>=1.19)", "sphinx-automodapi (>=0.14)", "sphinx-copybutton (>=0.5)", "sphinx-design (>=0.2)", "sphinx-notfound-page (>=0.8)", "sphinxcontrib-apidoc (>=0.3)", "sphinxext-opengraph (>=0.9)"] +dynamodb = ["boto3 (>=1.15)", "botocore (>=1.18)"] +json = ["ujson (>=5.4)"] +mongodb = ["pymongo (>=3)"] +redis = ["redis (>=3)"] +security = ["itsdangerous (>=2.0)"] +yaml = ["pyyaml (>=6.0.1)"] + +[[package]] +name = "requests-oauthlib" +version = "2.0.0" +description = "OAuthlib authentication support for Requests." +optional = false +python-versions = ">=3.4" +files = [ + {file = "requests-oauthlib-2.0.0.tar.gz", hash = "sha256:b3dffaebd884d8cd778494369603a9e7b58d29111bf6b41bdc2dcd87203af4e9"}, + {file = "requests_oauthlib-2.0.0-py2.py3-none-any.whl", hash = "sha256:7dd8a5c40426b779b0868c404bdef9768deccf22749cde15852df527e6269b36"}, +] + +[package.dependencies] +oauthlib = ">=3.0.0" +requests = ">=2.0.0" + +[package.extras] +rsa = ["oauthlib[signedtoken] (>=3.0.0)"] + +[[package]] +name = "requests-ratelimiter" +version = "0.7.0" +description = "Rate-limiting for the requests library" +optional = false +python-versions = "<4.0,>=3.7" +files = [ + {file = "requests_ratelimiter-0.7.0-py3-none-any.whl", hash = "sha256:1a7ef2faaa790272722db8539728690046237766fcc479f85b9591e5356a8185"}, + {file = "requests_ratelimiter-0.7.0.tar.gz", hash = "sha256:a070c8a359a6f3a001b0ccb08f17228b7ae0a6e21d8df5b6f6bd58389cddde45"}, +] + +[package.dependencies] +pyrate-limiter = "<3.0" +requests = ">=2.20" + +[package.extras] +docs = ["furo (>=2023.3,<2024.0)", "myst-parser (>=1.0)", "sphinx (>=5.2,<6.0)", "sphinx-autodoc-typehints (>=1.22,<2.0)", "sphinx-copybutton (>=0.5)"] + +[[package]] +name = "responses" +version = "0.25.3" +description = "A utility library for mocking out the `requests` Python library." +optional = false +python-versions = ">=3.8" +files = [ + {file = "responses-0.25.3-py3-none-any.whl", hash = "sha256:521efcbc82081ab8daa588e08f7e8a64ce79b91c39f6e62199b19159bea7dbcb"}, + {file = "responses-0.25.3.tar.gz", hash = "sha256:617b9247abd9ae28313d57a75880422d55ec63c29d33d629697590a034358dba"}, +] + +[package.dependencies] +pyyaml = "*" +requests = ">=2.30.0,<3.0" +urllib3 = ">=1.25.10,<3.0" + +[package.extras] +tests = ["coverage (>=6.0.0)", "flake8", "mypy", "pytest (>=7.0.0)", "pytest-asyncio", "pytest-cov", "pytest-httpserver", "tomli", "tomli-w", "types-PyYAML", "types-requests"] + +[[package]] +name = "rich" +version = "13.9.4" +description = "Render rich text, tables, progress bars, syntax highlighting, markdown and more to the terminal" +optional = false +python-versions = ">=3.8.0" +files = [ + {file = "rich-13.9.4-py3-none-any.whl", hash = "sha256:6049d5e6ec054bf2779ab3358186963bac2ea89175919d699e378b99738c2a90"}, + {file = "rich-13.9.4.tar.gz", hash = "sha256:439594978a49a09530cff7ebc4b5c7103ef57baf48d5ea3184f21d9a2befa098"}, +] + +[package.dependencies] +markdown-it-py = ">=2.2.0" +pygments = ">=2.13.0,<3.0.0" + +[package.extras] +jupyter = ["ipywidgets (>=7.5.1,<9)"] + +[[package]] +name = "rpds-py" +version = "0.21.0" +description = "Python bindings to Rust's persistent data structures (rpds)" +optional = false +python-versions = ">=3.9" +files = [ + {file = "rpds_py-0.21.0-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:a017f813f24b9df929674d0332a374d40d7f0162b326562daae8066b502d0590"}, + {file = "rpds_py-0.21.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:20cc1ed0bcc86d8e1a7e968cce15be45178fd16e2ff656a243145e0b439bd250"}, + {file = "rpds_py-0.21.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ad116dda078d0bc4886cb7840e19811562acdc7a8e296ea6ec37e70326c1b41c"}, + {file = "rpds_py-0.21.0-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:808f1ac7cf3b44f81c9475475ceb221f982ef548e44e024ad5f9e7060649540e"}, + {file = "rpds_py-0.21.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:de552f4a1916e520f2703ec474d2b4d3f86d41f353e7680b597512ffe7eac5d0"}, + {file = "rpds_py-0.21.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:efec946f331349dfc4ae9d0e034c263ddde19414fe5128580f512619abed05f1"}, + {file = "rpds_py-0.21.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b80b4690bbff51a034bfde9c9f6bf9357f0a8c61f548942b80f7b66356508bf5"}, + {file = "rpds_py-0.21.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:085ed25baac88953d4283e5b5bd094b155075bb40d07c29c4f073e10623f9f2e"}, + {file = "rpds_py-0.21.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:daa8efac2a1273eed2354397a51216ae1e198ecbce9036fba4e7610b308b6153"}, + {file = "rpds_py-0.21.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:95a5bad1ac8a5c77b4e658671642e4af3707f095d2b78a1fdd08af0dfb647624"}, + {file = "rpds_py-0.21.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:3e53861b29a13d5b70116ea4230b5f0f3547b2c222c5daa090eb7c9c82d7f664"}, + {file = "rpds_py-0.21.0-cp310-none-win32.whl", hash = "sha256:ea3a6ac4d74820c98fcc9da4a57847ad2cc36475a8bd9683f32ab6d47a2bd682"}, + {file = "rpds_py-0.21.0-cp310-none-win_amd64.whl", hash = "sha256:b8f107395f2f1d151181880b69a2869c69e87ec079c49c0016ab96860b6acbe5"}, + {file = "rpds_py-0.21.0-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:5555db3e618a77034954b9dc547eae94166391a98eb867905ec8fcbce1308d95"}, + {file = "rpds_py-0.21.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:97ef67d9bbc3e15584c2f3c74bcf064af36336c10d2e21a2131e123ce0f924c9"}, + {file = "rpds_py-0.21.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4ab2c2a26d2f69cdf833174f4d9d86118edc781ad9a8fa13970b527bf8236027"}, + {file = "rpds_py-0.21.0-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:4e8921a259f54bfbc755c5bbd60c82bb2339ae0324163f32868f63f0ebb873d9"}, + {file = "rpds_py-0.21.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8a7ff941004d74d55a47f916afc38494bd1cfd4b53c482b77c03147c91ac0ac3"}, + {file = "rpds_py-0.21.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5145282a7cd2ac16ea0dc46b82167754d5e103a05614b724457cffe614f25bd8"}, + {file = "rpds_py-0.21.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:de609a6f1b682f70bb7163da745ee815d8f230d97276db049ab447767466a09d"}, + {file = "rpds_py-0.21.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:40c91c6e34cf016fa8e6b59d75e3dbe354830777fcfd74c58b279dceb7975b75"}, + {file = "rpds_py-0.21.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:d2132377f9deef0c4db89e65e8bb28644ff75a18df5293e132a8d67748397b9f"}, + {file = "rpds_py-0.21.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:0a9e0759e7be10109645a9fddaaad0619d58c9bf30a3f248a2ea57a7c417173a"}, + {file = "rpds_py-0.21.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:9e20da3957bdf7824afdd4b6eeb29510e83e026473e04952dca565170cd1ecc8"}, + {file = "rpds_py-0.21.0-cp311-none-win32.whl", hash = "sha256:f71009b0d5e94c0e86533c0b27ed7cacc1239cb51c178fd239c3cfefefb0400a"}, + {file = "rpds_py-0.21.0-cp311-none-win_amd64.whl", hash = "sha256:e168afe6bf6ab7ab46c8c375606298784ecbe3ba31c0980b7dcbb9631dcba97e"}, + {file = "rpds_py-0.21.0-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:30b912c965b2aa76ba5168fd610087bad7fcde47f0a8367ee8f1876086ee6d1d"}, + {file = "rpds_py-0.21.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ca9989d5d9b1b300bc18e1801c67b9f6d2c66b8fd9621b36072ed1df2c977f72"}, + {file = "rpds_py-0.21.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6f54e7106f0001244a5f4cf810ba8d3f9c542e2730821b16e969d6887b664266"}, + {file = "rpds_py-0.21.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:fed5dfefdf384d6fe975cc026886aece4f292feaf69d0eeb716cfd3c5a4dd8be"}, + {file = "rpds_py-0.21.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:590ef88db231c9c1eece44dcfefd7515d8bf0d986d64d0caf06a81998a9e8cab"}, + {file = "rpds_py-0.21.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f983e4c2f603c95dde63df633eec42955508eefd8d0f0e6d236d31a044c882d7"}, + {file = "rpds_py-0.21.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b229ce052ddf1a01c67d68166c19cb004fb3612424921b81c46e7ea7ccf7c3bf"}, + {file = "rpds_py-0.21.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:ebf64e281a06c904a7636781d2e973d1f0926a5b8b480ac658dc0f556e7779f4"}, + {file = "rpds_py-0.21.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:998a8080c4495e4f72132f3d66ff91f5997d799e86cec6ee05342f8f3cda7dca"}, + {file = "rpds_py-0.21.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:98486337f7b4f3c324ab402e83453e25bb844f44418c066623db88e4c56b7c7b"}, + {file = "rpds_py-0.21.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:a78d8b634c9df7f8d175451cfeac3810a702ccb85f98ec95797fa98b942cea11"}, + {file = "rpds_py-0.21.0-cp312-none-win32.whl", hash = "sha256:a58ce66847711c4aa2ecfcfaff04cb0327f907fead8945ffc47d9407f41ff952"}, + {file = "rpds_py-0.21.0-cp312-none-win_amd64.whl", hash = "sha256:e860f065cc4ea6f256d6f411aba4b1251255366e48e972f8a347cf88077b24fd"}, + {file = "rpds_py-0.21.0-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:ee4eafd77cc98d355a0d02f263efc0d3ae3ce4a7c24740010a8b4012bbb24937"}, + {file = "rpds_py-0.21.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:688c93b77e468d72579351a84b95f976bd7b3e84aa6686be6497045ba84be560"}, + {file = "rpds_py-0.21.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c38dbf31c57032667dd5a2f0568ccde66e868e8f78d5a0d27dcc56d70f3fcd3b"}, + {file = "rpds_py-0.21.0-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:2d6129137f43f7fa02d41542ffff4871d4aefa724a5fe38e2c31a4e0fd343fb0"}, + {file = "rpds_py-0.21.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:520ed8b99b0bf86a176271f6fe23024323862ac674b1ce5b02a72bfeff3fff44"}, + {file = "rpds_py-0.21.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:aaeb25ccfb9b9014a10eaf70904ebf3f79faaa8e60e99e19eef9f478651b9b74"}, + {file = "rpds_py-0.21.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:af04ac89c738e0f0f1b913918024c3eab6e3ace989518ea838807177d38a2e94"}, + {file = "rpds_py-0.21.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b9b76e2afd585803c53c5b29e992ecd183f68285b62fe2668383a18e74abe7a3"}, + {file = "rpds_py-0.21.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:5afb5efde74c54724e1a01118c6e5c15e54e642c42a1ba588ab1f03544ac8c7a"}, + {file = "rpds_py-0.21.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:52c041802a6efa625ea18027a0723676a778869481d16803481ef6cc02ea8cb3"}, + {file = "rpds_py-0.21.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:ee1e4fc267b437bb89990b2f2abf6c25765b89b72dd4a11e21934df449e0c976"}, + {file = "rpds_py-0.21.0-cp313-none-win32.whl", hash = "sha256:0c025820b78817db6a76413fff6866790786c38f95ea3f3d3c93dbb73b632202"}, + {file = "rpds_py-0.21.0-cp313-none-win_amd64.whl", hash = "sha256:320c808df533695326610a1b6a0a6e98f033e49de55d7dc36a13c8a30cfa756e"}, + {file = "rpds_py-0.21.0-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:2c51d99c30091f72a3c5d126fad26236c3f75716b8b5e5cf8effb18889ced928"}, + {file = "rpds_py-0.21.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:cbd7504a10b0955ea287114f003b7ad62330c9e65ba012c6223dba646f6ffd05"}, + {file = "rpds_py-0.21.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6dcc4949be728ede49e6244eabd04064336012b37f5c2200e8ec8eb2988b209c"}, + {file = "rpds_py-0.21.0-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:f414da5c51bf350e4b7960644617c130140423882305f7574b6cf65a3081cecb"}, + {file = "rpds_py-0.21.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9afe42102b40007f588666bc7de82451e10c6788f6f70984629db193849dced1"}, + {file = "rpds_py-0.21.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3b929c2bb6e29ab31f12a1117c39f7e6d6450419ab7464a4ea9b0b417174f044"}, + {file = "rpds_py-0.21.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8404b3717da03cbf773a1d275d01fec84ea007754ed380f63dfc24fb76ce4592"}, + {file = "rpds_py-0.21.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:e12bb09678f38b7597b8346983d2323a6482dcd59e423d9448108c1be37cac9d"}, + {file = "rpds_py-0.21.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:58a0e345be4b18e6b8501d3b0aa540dad90caeed814c515e5206bb2ec26736fd"}, + {file = "rpds_py-0.21.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:c3761f62fcfccf0864cc4665b6e7c3f0c626f0380b41b8bd1ce322103fa3ef87"}, + {file = "rpds_py-0.21.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:c2b2f71c6ad6c2e4fc9ed9401080badd1469fa9889657ec3abea42a3d6b2e1ed"}, + {file = "rpds_py-0.21.0-cp39-none-win32.whl", hash = "sha256:b21747f79f360e790525e6f6438c7569ddbfb1b3197b9e65043f25c3c9b489d8"}, + {file = "rpds_py-0.21.0-cp39-none-win_amd64.whl", hash = "sha256:0626238a43152918f9e72ede9a3b6ccc9e299adc8ade0d67c5e142d564c9a83d"}, + {file = "rpds_py-0.21.0-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:6b4ef7725386dc0762857097f6b7266a6cdd62bfd209664da6712cb26acef035"}, + {file = "rpds_py-0.21.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:6bc0e697d4d79ab1aacbf20ee5f0df80359ecf55db33ff41481cf3e24f206919"}, + {file = "rpds_py-0.21.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:da52d62a96e61c1c444f3998c434e8b263c384f6d68aca8274d2e08d1906325c"}, + {file = "rpds_py-0.21.0-pp310-pypy310_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:98e4fe5db40db87ce1c65031463a760ec7906ab230ad2249b4572c2fc3ef1f9f"}, + {file = "rpds_py-0.21.0-pp310-pypy310_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:30bdc973f10d28e0337f71d202ff29345320f8bc49a31c90e6c257e1ccef4333"}, + {file = "rpds_py-0.21.0-pp310-pypy310_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:faa5e8496c530f9c71f2b4e1c49758b06e5f4055e17144906245c99fa6d45356"}, + {file = "rpds_py-0.21.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:32eb88c30b6a4f0605508023b7141d043a79b14acb3b969aa0b4f99b25bc7d4a"}, + {file = "rpds_py-0.21.0-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:a89a8ce9e4e75aeb7fa5d8ad0f3fecdee813802592f4f46a15754dcb2fd6b061"}, + {file = "rpds_py-0.21.0-pp310-pypy310_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:241e6c125568493f553c3d0fdbb38c74babf54b45cef86439d4cd97ff8feb34d"}, + {file = "rpds_py-0.21.0-pp310-pypy310_pp73-musllinux_1_2_i686.whl", hash = "sha256:3b766a9f57663396e4f34f5140b3595b233a7b146e94777b97a8413a1da1be18"}, + {file = "rpds_py-0.21.0-pp310-pypy310_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:af4a644bf890f56e41e74be7d34e9511e4954894d544ec6b8efe1e21a1a8da6c"}, + {file = "rpds_py-0.21.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:3e30a69a706e8ea20444b98a49f386c17b26f860aa9245329bab0851ed100677"}, + {file = "rpds_py-0.21.0-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:031819f906bb146561af051c7cef4ba2003d28cff07efacef59da973ff7969ba"}, + {file = "rpds_py-0.21.0-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:b876f2bc27ab5954e2fd88890c071bd0ed18b9c50f6ec3de3c50a5ece612f7a6"}, + {file = "rpds_py-0.21.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dc5695c321e518d9f03b7ea6abb5ea3af4567766f9852ad1560f501b17588c7b"}, + {file = "rpds_py-0.21.0-pp39-pypy39_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:b4de1da871b5c0fd5537b26a6fc6814c3cc05cabe0c941db6e9044ffbb12f04a"}, + {file = "rpds_py-0.21.0-pp39-pypy39_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:878f6fea96621fda5303a2867887686d7a198d9e0f8a40be100a63f5d60c88c9"}, + {file = "rpds_py-0.21.0-pp39-pypy39_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a8eeec67590e94189f434c6d11c426892e396ae59e4801d17a93ac96b8c02a6c"}, + {file = "rpds_py-0.21.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1ff2eba7f6c0cb523d7e9cff0903f2fe1feff8f0b2ceb6bd71c0e20a4dcee271"}, + {file = "rpds_py-0.21.0-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:a429b99337062877d7875e4ff1a51fe788424d522bd64a8c0a20ef3021fdb6ed"}, + {file = "rpds_py-0.21.0-pp39-pypy39_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:d167e4dbbdac48bd58893c7e446684ad5d425b407f9336e04ab52e8b9194e2ed"}, + {file = "rpds_py-0.21.0-pp39-pypy39_pp73-musllinux_1_2_i686.whl", hash = "sha256:4eb2de8a147ffe0626bfdc275fc6563aa7bf4b6db59cf0d44f0ccd6ca625a24e"}, + {file = "rpds_py-0.21.0-pp39-pypy39_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:e78868e98f34f34a88e23ee9ccaeeec460e4eaf6db16d51d7a9b883e5e785a5e"}, + {file = "rpds_py-0.21.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:4991ca61656e3160cdaca4851151fd3f4a92e9eba5c7a530ab030d6aee96ec89"}, + {file = "rpds_py-0.21.0.tar.gz", hash = "sha256:ed6378c9d66d0de903763e7706383d60c33829581f0adff47b6535f1802fa6db"}, +] + +[[package]] +name = "ruff" +version = "0.7.4" +description = "An extremely fast Python linter and code formatter, written in Rust." +optional = false +python-versions = ">=3.7" +files = [ + {file = "ruff-0.7.4-py3-none-linux_armv6l.whl", hash = "sha256:a4919925e7684a3f18e18243cd6bea7cfb8e968a6eaa8437971f681b7ec51478"}, + {file = "ruff-0.7.4-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:cfb365c135b830778dda8c04fb7d4280ed0b984e1aec27f574445231e20d6c63"}, + {file = "ruff-0.7.4-py3-none-macosx_11_0_arm64.whl", hash = "sha256:63a569b36bc66fbadec5beaa539dd81e0527cb258b94e29e0531ce41bacc1f20"}, + {file = "ruff-0.7.4-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0d06218747d361d06fd2fdac734e7fa92df36df93035db3dc2ad7aa9852cb109"}, + {file = "ruff-0.7.4-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e0cea28d0944f74ebc33e9f934238f15c758841f9f5edd180b5315c203293452"}, + {file = "ruff-0.7.4-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:80094ecd4793c68b2571b128f91754d60f692d64bc0d7272ec9197fdd09bf9ea"}, + {file = "ruff-0.7.4-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:997512325c6620d1c4c2b15db49ef59543ef9cd0f4aa8065ec2ae5103cedc7e7"}, + {file = "ruff-0.7.4-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:00b4cf3a6b5fad6d1a66e7574d78956bbd09abfd6c8a997798f01f5da3d46a05"}, + {file = "ruff-0.7.4-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7dbdc7d8274e1422722933d1edddfdc65b4336abf0b16dfcb9dedd6e6a517d06"}, + {file = "ruff-0.7.4-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0e92dfb5f00eaedb1501b2f906ccabfd67b2355bdf117fea9719fc99ac2145bc"}, + {file = "ruff-0.7.4-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:3bd726099f277d735dc38900b6a8d6cf070f80828877941983a57bca1cd92172"}, + {file = "ruff-0.7.4-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:2e32829c429dd081ee5ba39aef436603e5b22335c3d3fff013cd585806a6486a"}, + {file = "ruff-0.7.4-py3-none-musllinux_1_2_i686.whl", hash = "sha256:662a63b4971807623f6f90c1fb664613f67cc182dc4d991471c23c541fee62dd"}, + {file = "ruff-0.7.4-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:876f5e09eaae3eb76814c1d3b68879891d6fde4824c015d48e7a7da4cf066a3a"}, + {file = "ruff-0.7.4-py3-none-win32.whl", hash = "sha256:75c53f54904be42dd52a548728a5b572344b50d9b2873d13a3f8c5e3b91f5cac"}, + {file = "ruff-0.7.4-py3-none-win_amd64.whl", hash = "sha256:745775c7b39f914238ed1f1b0bebed0b9155a17cd8bc0b08d3c87e4703b990d6"}, + {file = "ruff-0.7.4-py3-none-win_arm64.whl", hash = "sha256:11bff065102c3ae9d3ea4dc9ecdfe5a5171349cdd0787c1fc64761212fc9cf1f"}, + {file = "ruff-0.7.4.tar.gz", hash = "sha256:cd12e35031f5af6b9b93715d8c4f40360070b2041f81273d0527683d5708fce2"}, +] + +[[package]] +name = "scalar-fastapi" +version = "1.0.3" +description = "This plugin provides an easy way to render a beautiful API reference based on a OpenAPI/Swagger file with FastAPI." +optional = false +python-versions = "*" +files = [ + {file = "scalar_fastapi-1.0.3-py3-none-any.whl", hash = "sha256:4a47a140795097ad034518ce0e32940f2c54f0f4bc60e4c3289ca30a7e6f954d"}, + {file = "scalar_fastapi-1.0.3.tar.gz", hash = "sha256:9e9cb8398e298cd435a0171eebe1675b8899eb21e47c238db0d48783143f0ffb"}, +] + +[[package]] +name = "six" +version = "1.16.0" +description = "Python 2 and 3 compatibility utilities" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" +files = [ + {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"}, + {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, +] + +[[package]] +name = "sniffio" +version = "1.3.1" +description = "Sniff out which async library your code is running under" +optional = false +python-versions = ">=3.7" +files = [ + {file = "sniffio-1.3.1-py3-none-any.whl", hash = "sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2"}, + {file = "sniffio-1.3.1.tar.gz", hash = "sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc"}, +] + +[[package]] +name = "soupsieve" +version = "2.6" +description = "A modern CSS selector implementation for Beautiful Soup." +optional = false +python-versions = ">=3.8" +files = [ + {file = "soupsieve-2.6-py3-none-any.whl", hash = "sha256:e72c4ff06e4fb6e4b5a9f0f55fe6e81514581fca1515028625d0f299c602ccc9"}, + {file = "soupsieve-2.6.tar.gz", hash = "sha256:e2e68417777af359ec65daac1057404a3c8a5455bb8abc36f1a9866ab1a51abb"}, +] + +[[package]] +name = "sqla-wrapper" +version = "6.0.0" +description = "A framework-independent modern wrapper for SQLAlchemy & Alembic" +optional = false +python-versions = ">=3.9,<4.0" +files = [ + {file = "sqla_wrapper-6.0.0-py3-none-any.whl", hash = "sha256:39665c54c310a40832bf79db36c6c4230d5654a73b109847203cd6b98c93af95"}, + {file = "sqla_wrapper-6.0.0.tar.gz", hash = "sha256:4159aed7aa391d5a664574230fc2c9f6185eb7f1d10852495aadecff55864ac7"}, +] + +[package.dependencies] +alembic = ">=1.9,<2.0" +sqlalchemy = ">=2.0,<3.0" + +[[package]] +name = "sqlalchemy" +version = "2.0.36" +description = "Database Abstraction Library" +optional = false +python-versions = ">=3.7" +files = [ + {file = "SQLAlchemy-2.0.36-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:59b8f3adb3971929a3e660337f5dacc5942c2cdb760afcabb2614ffbda9f9f72"}, + {file = "SQLAlchemy-2.0.36-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:37350015056a553e442ff672c2d20e6f4b6d0b2495691fa239d8aa18bb3bc908"}, + {file = "SQLAlchemy-2.0.36-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8318f4776c85abc3f40ab185e388bee7a6ea99e7fa3a30686580b209eaa35c08"}, + {file = "SQLAlchemy-2.0.36-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c245b1fbade9c35e5bd3b64270ab49ce990369018289ecfde3f9c318411aaa07"}, + {file = "SQLAlchemy-2.0.36-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:69f93723edbca7342624d09f6704e7126b152eaed3cdbb634cb657a54332a3c5"}, + {file = "SQLAlchemy-2.0.36-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:f9511d8dd4a6e9271d07d150fb2f81874a3c8c95e11ff9af3a2dfc35fe42ee44"}, + {file = "SQLAlchemy-2.0.36-cp310-cp310-win32.whl", hash = "sha256:c3f3631693003d8e585d4200730616b78fafd5a01ef8b698f6967da5c605b3fa"}, + {file = "SQLAlchemy-2.0.36-cp310-cp310-win_amd64.whl", hash = "sha256:a86bfab2ef46d63300c0f06936bd6e6c0105faa11d509083ba8f2f9d237fb5b5"}, + {file = "SQLAlchemy-2.0.36-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:fd3a55deef00f689ce931d4d1b23fa9f04c880a48ee97af488fd215cf24e2a6c"}, + {file = "SQLAlchemy-2.0.36-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:4f5e9cd989b45b73bd359f693b935364f7e1f79486e29015813c338450aa5a71"}, + {file = "SQLAlchemy-2.0.36-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d0ddd9db6e59c44875211bc4c7953a9f6638b937b0a88ae6d09eb46cced54eff"}, + {file = "SQLAlchemy-2.0.36-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2519f3a5d0517fc159afab1015e54bb81b4406c278749779be57a569d8d1bb0d"}, + {file = "SQLAlchemy-2.0.36-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:59b1ee96617135f6e1d6f275bbe988f419c5178016f3d41d3c0abb0c819f75bb"}, + {file = "SQLAlchemy-2.0.36-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:39769a115f730d683b0eb7b694db9789267bcd027326cccc3125e862eb03bfd8"}, + {file = "SQLAlchemy-2.0.36-cp311-cp311-win32.whl", hash = "sha256:66bffbad8d6271bb1cc2f9a4ea4f86f80fe5e2e3e501a5ae2a3dc6a76e604e6f"}, + {file = "SQLAlchemy-2.0.36-cp311-cp311-win_amd64.whl", hash = "sha256:23623166bfefe1487d81b698c423f8678e80df8b54614c2bf4b4cfcd7c711959"}, + {file = "SQLAlchemy-2.0.36-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:f7b64e6ec3f02c35647be6b4851008b26cff592a95ecb13b6788a54ef80bbdd4"}, + {file = "SQLAlchemy-2.0.36-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:46331b00096a6db1fdc052d55b101dbbfc99155a548e20a0e4a8e5e4d1362855"}, + {file = "SQLAlchemy-2.0.36-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fdf3386a801ea5aba17c6410dd1dc8d39cf454ca2565541b5ac42a84e1e28f53"}, + {file = "SQLAlchemy-2.0.36-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ac9dfa18ff2a67b09b372d5db8743c27966abf0e5344c555d86cc7199f7ad83a"}, + {file = "SQLAlchemy-2.0.36-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:90812a8933df713fdf748b355527e3af257a11e415b613dd794512461eb8a686"}, + {file = "SQLAlchemy-2.0.36-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:1bc330d9d29c7f06f003ab10e1eaced295e87940405afe1b110f2eb93a233588"}, + {file = "SQLAlchemy-2.0.36-cp312-cp312-win32.whl", hash = "sha256:79d2e78abc26d871875b419e1fd3c0bca31a1cb0043277d0d850014599626c2e"}, + {file = "SQLAlchemy-2.0.36-cp312-cp312-win_amd64.whl", hash = "sha256:b544ad1935a8541d177cb402948b94e871067656b3a0b9e91dbec136b06a2ff5"}, + {file = "SQLAlchemy-2.0.36-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:b5cc79df7f4bc3d11e4b542596c03826063092611e481fcf1c9dfee3c94355ef"}, + {file = "SQLAlchemy-2.0.36-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:3c01117dd36800f2ecaa238c65365b7b16497adc1522bf84906e5710ee9ba0e8"}, + {file = "SQLAlchemy-2.0.36-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9bc633f4ee4b4c46e7adcb3a9b5ec083bf1d9a97c1d3854b92749d935de40b9b"}, + {file = "SQLAlchemy-2.0.36-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9e46ed38affdfc95d2c958de328d037d87801cfcbea6d421000859e9789e61c2"}, + {file = "SQLAlchemy-2.0.36-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:b2985c0b06e989c043f1dc09d4fe89e1616aadd35392aea2844f0458a989eacf"}, + {file = "SQLAlchemy-2.0.36-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:4a121d62ebe7d26fec9155f83f8be5189ef1405f5973ea4874a26fab9f1e262c"}, + {file = "SQLAlchemy-2.0.36-cp313-cp313-win32.whl", hash = "sha256:0572f4bd6f94752167adfd7c1bed84f4b240ee6203a95e05d1e208d488d0d436"}, + {file = "SQLAlchemy-2.0.36-cp313-cp313-win_amd64.whl", hash = "sha256:8c78ac40bde930c60e0f78b3cd184c580f89456dd87fc08f9e3ee3ce8765ce88"}, + {file = "SQLAlchemy-2.0.36-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:be9812b766cad94a25bc63bec11f88c4ad3629a0cec1cd5d4ba48dc23860486b"}, + {file = "SQLAlchemy-2.0.36-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:50aae840ebbd6cdd41af1c14590e5741665e5272d2fee999306673a1bb1fdb4d"}, + {file = "SQLAlchemy-2.0.36-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4557e1f11c5f653ebfdd924f3f9d5ebfc718283b0b9beebaa5dd6b77ec290971"}, + {file = "SQLAlchemy-2.0.36-cp37-cp37m-musllinux_1_2_aarch64.whl", hash = "sha256:07b441f7d03b9a66299ce7ccf3ef2900abc81c0db434f42a5694a37bd73870f2"}, + {file = "SQLAlchemy-2.0.36-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:28120ef39c92c2dd60f2721af9328479516844c6b550b077ca450c7d7dc68575"}, + {file = "SQLAlchemy-2.0.36-cp37-cp37m-win32.whl", hash = "sha256:b81ee3d84803fd42d0b154cb6892ae57ea6b7c55d8359a02379965706c7efe6c"}, + {file = "SQLAlchemy-2.0.36-cp37-cp37m-win_amd64.whl", hash = "sha256:f942a799516184c855e1a32fbc7b29d7e571b52612647866d4ec1c3242578fcb"}, + {file = "SQLAlchemy-2.0.36-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:3d6718667da04294d7df1670d70eeddd414f313738d20a6f1d1f379e3139a545"}, + {file = "SQLAlchemy-2.0.36-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:72c28b84b174ce8af8504ca28ae9347d317f9dba3999e5981a3cd441f3712e24"}, + {file = "SQLAlchemy-2.0.36-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b11d0cfdd2b095e7b0686cf5fabeb9c67fae5b06d265d8180715b8cfa86522e3"}, + {file = "SQLAlchemy-2.0.36-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e32092c47011d113dc01ab3e1d3ce9f006a47223b18422c5c0d150af13a00687"}, + {file = "SQLAlchemy-2.0.36-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:6a440293d802d3011028e14e4226da1434b373cbaf4a4bbb63f845761a708346"}, + {file = "SQLAlchemy-2.0.36-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:c54a1e53a0c308a8e8a7dffb59097bff7facda27c70c286f005327f21b2bd6b1"}, + {file = "SQLAlchemy-2.0.36-cp38-cp38-win32.whl", hash = "sha256:1e0d612a17581b6616ff03c8e3d5eff7452f34655c901f75d62bd86449d9750e"}, + {file = "SQLAlchemy-2.0.36-cp38-cp38-win_amd64.whl", hash = "sha256:8958b10490125124463095bbdadda5aa22ec799f91958e410438ad6c97a7b793"}, + {file = "SQLAlchemy-2.0.36-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:dc022184d3e5cacc9579e41805a681187650e170eb2fd70e28b86192a479dcaa"}, + {file = "SQLAlchemy-2.0.36-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:b817d41d692bf286abc181f8af476c4fbef3fd05e798777492618378448ee689"}, + {file = "SQLAlchemy-2.0.36-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a4e46a888b54be23d03a89be510f24a7652fe6ff660787b96cd0e57a4ebcb46d"}, + {file = "SQLAlchemy-2.0.36-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c4ae3005ed83f5967f961fd091f2f8c5329161f69ce8480aa8168b2d7fe37f06"}, + {file = "SQLAlchemy-2.0.36-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:03e08af7a5f9386a43919eda9de33ffda16b44eb11f3b313e6822243770e9763"}, + {file = "SQLAlchemy-2.0.36-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:3dbb986bad3ed5ceaf090200eba750b5245150bd97d3e67343a3cfed06feecf7"}, + {file = "SQLAlchemy-2.0.36-cp39-cp39-win32.whl", hash = "sha256:9fe53b404f24789b5ea9003fc25b9a3988feddebd7e7b369c8fac27ad6f52f28"}, + {file = "SQLAlchemy-2.0.36-cp39-cp39-win_amd64.whl", hash = "sha256:af148a33ff0349f53512a049c6406923e4e02bf2f26c5fb285f143faf4f0e46a"}, + {file = "SQLAlchemy-2.0.36-py3-none-any.whl", hash = "sha256:fddbe92b4760c6f5d48162aef14824add991aeda8ddadb3c31d56eb15ca69f8e"}, + {file = "sqlalchemy-2.0.36.tar.gz", hash = "sha256:7f2767680b6d2398aea7082e45a774b2b0767b5c8d8ffb9c8b683088ea9b29c5"}, +] + +[package.dependencies] +greenlet = {version = "!=0.4.17", markers = "python_version < \"3.13\" and (platform_machine == \"aarch64\" or platform_machine == \"ppc64le\" or platform_machine == \"x86_64\" or platform_machine == \"amd64\" or platform_machine == \"AMD64\" or platform_machine == \"win32\" or platform_machine == \"WIN32\")"} +typing-extensions = ">=4.6.0" + +[package.extras] +aiomysql = ["aiomysql (>=0.2.0)", "greenlet (!=0.4.17)"] +aioodbc = ["aioodbc", "greenlet (!=0.4.17)"] +aiosqlite = ["aiosqlite", "greenlet (!=0.4.17)", "typing_extensions (!=3.10.0.1)"] +asyncio = ["greenlet (!=0.4.17)"] +asyncmy = ["asyncmy (>=0.2.3,!=0.2.4,!=0.2.6)", "greenlet (!=0.4.17)"] +mariadb-connector = ["mariadb (>=1.0.1,!=1.1.2,!=1.1.5,!=1.1.10)"] +mssql = ["pyodbc"] +mssql-pymssql = ["pymssql"] +mssql-pyodbc = ["pyodbc"] +mypy = ["mypy (>=0.910)"] +mysql = ["mysqlclient (>=1.4.0)"] +mysql-connector = ["mysql-connector-python"] +oracle = ["cx_oracle (>=8)"] +oracle-oracledb = ["oracledb (>=1.0.1)"] +postgresql = ["psycopg2 (>=2.7)"] +postgresql-asyncpg = ["asyncpg", "greenlet (!=0.4.17)"] +postgresql-pg8000 = ["pg8000 (>=1.29.1)"] +postgresql-psycopg = ["psycopg (>=3.0.7)"] +postgresql-psycopg2binary = ["psycopg2-binary"] +postgresql-psycopg2cffi = ["psycopg2cffi"] +postgresql-psycopgbinary = ["psycopg[binary] (>=3.0.7)"] +pymysql = ["pymysql"] +sqlcipher = ["sqlcipher3_binary"] + +[[package]] +name = "srt" +version = "3.5.3" +description = "A tiny library for parsing, modifying, and composing SRT files." +optional = false +python-versions = ">=2.7" +files = [ + {file = "srt-3.5.3.tar.gz", hash = "sha256:4884315043a4f0740fd1f878ed6caa376ac06d70e135f306a6dc44632eed0cc0"}, +] + +[[package]] +name = "starlette" +version = "0.37.2" +description = "The little ASGI library that shines." +optional = false +python-versions = ">=3.8" +files = [ + {file = "starlette-0.37.2-py3-none-any.whl", hash = "sha256:6fe59f29268538e5d0d182f2791a479a0c64638e6935d1c6989e63fb2699c6ee"}, + {file = "starlette-0.37.2.tar.gz", hash = "sha256:9af890290133b79fc3db55474ade20f6220a364a0402e0b556e7cd5e1e093823"}, +] + +[package.dependencies] +anyio = ">=3.4.0,<5" + +[package.extras] +full = ["httpx (>=0.22.0)", "itsdangerous", "jinja2", "python-multipart (>=0.0.7)", "pyyaml"] + +[[package]] +name = "stevedore" +version = "5.4.0" +description = "Manage dynamic plugins for Python applications" +optional = false +python-versions = ">=3.9" +files = [ + {file = "stevedore-5.4.0-py3-none-any.whl", hash = "sha256:b0be3c4748b3ea7b854b265dcb4caa891015e442416422be16f8b31756107857"}, + {file = "stevedore-5.4.0.tar.gz", hash = "sha256:79e92235ecb828fe952b6b8b0c6c87863248631922c8e8e0fa5b17b232c4514d"}, +] + +[package.dependencies] +pbr = ">=2.0.0" + +[[package]] +name = "subliminal" +version = "2.2.1" +description = "Subtitles, faster than your thoughts" +optional = false +python-versions = ">=3.8" +files = [ + {file = "subliminal-2.2.1-py3-none-any.whl", hash = "sha256:421a71f2e3f604e5dffb551b2a51d14500c7615d7eaf16c23e713d7ad295504c"}, + {file = "subliminal-2.2.1.tar.gz", hash = "sha256:2ed6024a07bbb3c68fe3db76374244ad91adfca9d93fc24d3ddb9ef61825756e"}, +] + +[package.dependencies] +babelfish = ">=0.6.1" +beautifulsoup4 = ">=4.4.0" +chardet = ">=5.0" +click = ">=8.0" +click-option-group = ">=0.5.6" +"dogpile.cache" = ">=1.0" +enzyme = ">=0.5.0" +guessit = ">=3.0.0" +platformdirs = ">=3" +pysubs2 = ">=1.7" +rarfile = ">=2.7" +requests = ">=2.0" +srt = ">=3.5" +stevedore = ">=3.0" +tomli = ">=2" + +[package.extras] +dev = ["doc8", "mypy", "ruff", "tox", "typos", "validate-pyproject"] +docs = ["sphinx", "sphinx-rtd-theme", "sphinxcontrib-programoutput"] +test = ["importlib-metadata (>=4.6)", "lxml", "mypy", "pytest (>=6.0)", "pytest-cov", "pytest-flakes", "sympy", "vcrpy (>=1.6.1)"] + +[[package]] +name = "testcontainers" +version = "4.8.2" +description = "Python library for throwaway instances of anything that can run in a Docker container" +optional = false +python-versions = "<4.0,>=3.9" +files = [ + {file = "testcontainers-4.8.2-py3-none-any.whl", hash = "sha256:9e19af077cd96e1957c13ee466f1f32905bc6c5bc1bc98643eb18be1a989bfb0"}, + {file = "testcontainers-4.8.2.tar.gz", hash = "sha256:dd4a6a2ea09e3c3ecd39e180b6548105929d0bb78d665ce9919cb3f8c98f9853"}, +] + +[package.dependencies] +docker = "*" +typing-extensions = "*" +urllib3 = "*" +wrapt = "*" + +[package.extras] +arangodb = ["python-arango (>=7.8,<8.0)"] +aws = ["boto3", "httpx"] +azurite = ["azure-storage-blob (>=12.19,<13.0)"] +chroma = ["chromadb-client"] +clickhouse = ["clickhouse-driver"] +cosmosdb = ["azure-cosmos"] +db2 = ["ibm_db_sa", "sqlalchemy"] +generic = ["httpx", "redis"] +google = ["google-cloud-datastore (>=2)", "google-cloud-pubsub (>=2)"] +influxdb = ["influxdb", "influxdb-client"] +k3s = ["kubernetes", "pyyaml"] +keycloak = ["python-keycloak"] +localstack = ["boto3"] +mailpit = ["cryptography"] +minio = ["minio"] +mongodb = ["pymongo"] +mssql = ["pymssql", "sqlalchemy"] +mysql = ["pymysql[rsa]", "sqlalchemy"] +nats = ["nats-py"] +neo4j = ["neo4j"] +opensearch = ["opensearch-py"] +oracle = ["oracledb", "sqlalchemy"] +oracle-free = ["oracledb", "sqlalchemy"] +qdrant = ["qdrant-client"] +rabbitmq = ["pika"] +redis = ["redis"] +registry = ["bcrypt"] +scylla = ["cassandra-driver (==3.29.1)"] +selenium = ["selenium"] +sftp = ["cryptography"] +test-module-import = ["httpx"] +trino = ["trino"] +weaviate = ["weaviate-client (>=4.5.4,<5.0.0)"] + +[[package]] +name = "tomli" +version = "2.1.0" +description = "A lil' TOML parser" +optional = false +python-versions = ">=3.8" +files = [ + {file = "tomli-2.1.0-py3-none-any.whl", hash = "sha256:a5c57c3d1c56f5ccdf89f6523458f60ef716e210fc47c4cfb188c5ba473e0391"}, + {file = "tomli-2.1.0.tar.gz", hash = "sha256:3f646cae2aec94e17d04973e4249548320197cfabdf130015d023de4b74d8ab8"}, +] + +[[package]] +name = "types-python-dateutil" +version = "2.9.0.20241003" +description = "Typing stubs for python-dateutil" +optional = false +python-versions = ">=3.8" +files = [ + {file = "types-python-dateutil-2.9.0.20241003.tar.gz", hash = "sha256:58cb85449b2a56d6684e41aeefb4c4280631246a0da1a719bdbe6f3fb0317446"}, + {file = "types_python_dateutil-2.9.0.20241003-py3-none-any.whl", hash = "sha256:250e1d8e80e7bbc3a6c99b907762711d1a1cdd00e978ad39cb5940f6f0a87f3d"}, +] + +[[package]] +name = "typing-extensions" +version = "4.12.2" +description = "Backported and Experimental Type Hints for Python 3.8+" +optional = false +python-versions = ">=3.8" +files = [ + {file = "typing_extensions-4.12.2-py3-none-any.whl", hash = "sha256:04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d"}, + {file = "typing_extensions-4.12.2.tar.gz", hash = "sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8"}, +] + +[[package]] +name = "tzdata" +version = "2024.2" +description = "Provider of IANA time zone data" +optional = false +python-versions = ">=2" +files = [ + {file = "tzdata-2024.2-py2.py3-none-any.whl", hash = "sha256:a48093786cdcde33cad18c2555e8532f34422074448fbc874186f0abd79565cd"}, + {file = "tzdata-2024.2.tar.gz", hash = "sha256:7d85cc416e9382e69095b7bdf4afd9e3880418a2413feec7069d533d6b4e31cc"}, +] + +[[package]] +name = "tzlocal" +version = "5.2" +description = "tzinfo object for the local timezone" +optional = false +python-versions = ">=3.8" +files = [ + {file = "tzlocal-5.2-py3-none-any.whl", hash = "sha256:49816ef2fe65ea8ac19d19aa7a1ae0551c834303d5014c6d5a62e4cbda8047b8"}, + {file = "tzlocal-5.2.tar.gz", hash = "sha256:8d399205578f1a9342816409cc1e46a93ebd5755e39ea2d85334bea911bf0e6e"}, +] + +[package.dependencies] +tzdata = {version = "*", markers = "platform_system == \"Windows\""} + +[package.extras] +devenv = ["check-manifest", "pytest (>=4.3)", "pytest-cov", "pytest-mock (>=3.3)", "zest.releaser"] + +[[package]] +name = "url-normalize" +version = "1.4.3" +description = "URL normalization for Python" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" +files = [ + {file = "url-normalize-1.4.3.tar.gz", hash = "sha256:d23d3a070ac52a67b83a1c59a0e68f8608d1cd538783b401bc9de2c0fac999b2"}, + {file = "url_normalize-1.4.3-py2.py3-none-any.whl", hash = "sha256:ec3c301f04e5bb676d333a7fa162fa977ad2ca04b7e652bfc9fac4e405728eed"}, +] + +[package.dependencies] +six = "*" + +[[package]] +name = "urllib3" +version = "2.2.3" +description = "HTTP library with thread-safe connection pooling, file post, and more." +optional = false +python-versions = ">=3.8" +files = [ + {file = "urllib3-2.2.3-py3-none-any.whl", hash = "sha256:ca899ca043dcb1bafa3e262d73aa25c465bfb49e0bd9dd5d59f1d0acba2f8fac"}, + {file = "urllib3-2.2.3.tar.gz", hash = "sha256:e7d814a81dad81e6caf2ec9fdedb284ecc9c73076b62654547cc64ccdcae26e9"}, +] + +[package.extras] +brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)"] +h2 = ["h2 (>=4,<5)"] +socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"] +zstd = ["zstandard (>=0.18.0)"] + +[[package]] +name = "uvicorn" +version = "0.30.6" +description = "The lightning-fast ASGI server." +optional = false +python-versions = ">=3.8" +files = [ + {file = "uvicorn-0.30.6-py3-none-any.whl", hash = "sha256:65fd46fe3fda5bdc1b03b94eb634923ff18cd35b2f084813ea79d1f103f711b5"}, + {file = "uvicorn-0.30.6.tar.gz", hash = "sha256:4b15decdda1e72be08209e860a1e10e92439ad5b97cf44cc945fcbee66fc5788"}, +] + +[package.dependencies] +click = ">=7.0" +colorama = {version = ">=0.4", optional = true, markers = "sys_platform == \"win32\" and extra == \"standard\""} +h11 = ">=0.8" +httptools = {version = ">=0.5.0", optional = true, markers = "extra == \"standard\""} +python-dotenv = {version = ">=0.13", optional = true, markers = "extra == \"standard\""} +pyyaml = {version = ">=5.1", optional = true, markers = "extra == \"standard\""} +uvloop = {version = ">=0.14.0,<0.15.0 || >0.15.0,<0.15.1 || >0.15.1", optional = true, markers = "(sys_platform != \"win32\" and sys_platform != \"cygwin\") and platform_python_implementation != \"PyPy\" and extra == \"standard\""} +watchfiles = {version = ">=0.13", optional = true, markers = "extra == \"standard\""} +websockets = {version = ">=10.4", optional = true, markers = "extra == \"standard\""} + +[package.extras] +standard = ["colorama (>=0.4)", "httptools (>=0.5.0)", "python-dotenv (>=0.13)", "pyyaml (>=5.1)", "uvloop (>=0.14.0,!=0.15.0,!=0.15.1)", "watchfiles (>=0.13)", "websockets (>=10.4)"] + +[[package]] +name = "uvloop" +version = "0.21.0" +description = "Fast implementation of asyncio event loop on top of libuv" +optional = false +python-versions = ">=3.8.0" +files = [ + {file = "uvloop-0.21.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:ec7e6b09a6fdded42403182ab6b832b71f4edaf7f37a9a0e371a01db5f0cb45f"}, + {file = "uvloop-0.21.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:196274f2adb9689a289ad7d65700d37df0c0930fd8e4e743fa4834e850d7719d"}, + {file = "uvloop-0.21.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f38b2e090258d051d68a5b14d1da7203a3c3677321cf32a95a6f4db4dd8b6f26"}, + {file = "uvloop-0.21.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:87c43e0f13022b998eb9b973b5e97200c8b90823454d4bc06ab33829e09fb9bb"}, + {file = "uvloop-0.21.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:10d66943def5fcb6e7b37310eb6b5639fd2ccbc38df1177262b0640c3ca68c1f"}, + {file = "uvloop-0.21.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:67dd654b8ca23aed0a8e99010b4c34aca62f4b7fce88f39d452ed7622c94845c"}, + {file = "uvloop-0.21.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:c0f3fa6200b3108919f8bdabb9a7f87f20e7097ea3c543754cabc7d717d95cf8"}, + {file = "uvloop-0.21.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:0878c2640cf341b269b7e128b1a5fed890adc4455513ca710d77d5e93aa6d6a0"}, + {file = "uvloop-0.21.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b9fb766bb57b7388745d8bcc53a359b116b8a04c83a2288069809d2b3466c37e"}, + {file = "uvloop-0.21.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8a375441696e2eda1c43c44ccb66e04d61ceeffcd76e4929e527b7fa401b90fb"}, + {file = "uvloop-0.21.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:baa0e6291d91649c6ba4ed4b2f982f9fa165b5bbd50a9e203c416a2797bab3c6"}, + {file = "uvloop-0.21.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:4509360fcc4c3bd2c70d87573ad472de40c13387f5fda8cb58350a1d7475e58d"}, + {file = "uvloop-0.21.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:359ec2c888397b9e592a889c4d72ba3d6befba8b2bb01743f72fffbde663b59c"}, + {file = "uvloop-0.21.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:f7089d2dc73179ce5ac255bdf37c236a9f914b264825fdaacaded6990a7fb4c2"}, + {file = "uvloop-0.21.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:baa4dcdbd9ae0a372f2167a207cd98c9f9a1ea1188a8a526431eef2f8116cc8d"}, + {file = "uvloop-0.21.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:86975dca1c773a2c9864f4c52c5a55631038e387b47eaf56210f873887b6c8dc"}, + {file = "uvloop-0.21.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:461d9ae6660fbbafedd07559c6a2e57cd553b34b0065b6550685f6653a98c1cb"}, + {file = "uvloop-0.21.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:183aef7c8730e54c9a3ee3227464daed66e37ba13040bb3f350bc2ddc040f22f"}, + {file = "uvloop-0.21.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:bfd55dfcc2a512316e65f16e503e9e450cab148ef11df4e4e679b5e8253a5281"}, + {file = "uvloop-0.21.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:787ae31ad8a2856fc4e7c095341cccc7209bd657d0e71ad0dc2ea83c4a6fa8af"}, + {file = "uvloop-0.21.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5ee4d4ef48036ff6e5cfffb09dd192c7a5027153948d85b8da7ff705065bacc6"}, + {file = "uvloop-0.21.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f3df876acd7ec037a3d005b3ab85a7e4110422e4d9c1571d4fc89b0fc41b6816"}, + {file = "uvloop-0.21.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:bd53ecc9a0f3d87ab847503c2e1552b690362e005ab54e8a48ba97da3924c0dc"}, + {file = "uvloop-0.21.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:a5c39f217ab3c663dc699c04cbd50c13813e31d917642d459fdcec07555cc553"}, + {file = "uvloop-0.21.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:17df489689befc72c39a08359efac29bbee8eee5209650d4b9f34df73d22e414"}, + {file = "uvloop-0.21.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:bc09f0ff191e61c2d592a752423c767b4ebb2986daa9ed62908e2b1b9a9ae206"}, + {file = "uvloop-0.21.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f0ce1b49560b1d2d8a2977e3ba4afb2414fb46b86a1b64056bc4ab929efdafbe"}, + {file = "uvloop-0.21.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e678ad6fe52af2c58d2ae3c73dc85524ba8abe637f134bf3564ed07f555c5e79"}, + {file = "uvloop-0.21.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:460def4412e473896ef179a1671b40c039c7012184b627898eea5072ef6f017a"}, + {file = "uvloop-0.21.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:10da8046cc4a8f12c91a1c39d1dd1585c41162a15caaef165c2174db9ef18bdc"}, + {file = "uvloop-0.21.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:c097078b8031190c934ed0ebfee8cc5f9ba9642e6eb88322b9958b649750f72b"}, + {file = "uvloop-0.21.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:46923b0b5ee7fc0020bef24afe7836cb068f5050ca04caf6b487c513dc1a20b2"}, + {file = "uvloop-0.21.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:53e420a3afe22cdcf2a0f4846e377d16e718bc70103d7088a4f7623567ba5fb0"}, + {file = "uvloop-0.21.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:88cb67cdbc0e483da00af0b2c3cdad4b7c61ceb1ee0f33fe00e09c81e3a6cb75"}, + {file = "uvloop-0.21.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:221f4f2a1f46032b403bf3be628011caf75428ee3cc204a22addf96f586b19fd"}, + {file = "uvloop-0.21.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:2d1f581393673ce119355d56da84fe1dd9d2bb8b3d13ce792524e1607139feff"}, + {file = "uvloop-0.21.0.tar.gz", hash = "sha256:3bf12b0fda68447806a7ad847bfa591613177275d35b6724b1ee573faa3704e3"}, +] + +[package.extras] +dev = ["Cython (>=3.0,<4.0)", "setuptools (>=60)"] +docs = ["Sphinx (>=4.1.2,<4.2.0)", "sphinx-rtd-theme (>=0.5.2,<0.6.0)", "sphinxcontrib-asyncio (>=0.3.0,<0.4.0)"] +test = ["aiohttp (>=3.10.5)", "flake8 (>=5.0,<6.0)", "mypy (>=0.800)", "psutil", "pyOpenSSL (>=23.0.0,<23.1.0)", "pycodestyle (>=2.9.0,<2.10.0)"] + +[[package]] +name = "watchfiles" +version = "0.24.0" +description = "Simple, modern and high performance file watching and code reload in python." +optional = false +python-versions = ">=3.8" +files = [ + {file = "watchfiles-0.24.0-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:083dc77dbdeef09fa44bb0f4d1df571d2e12d8a8f985dccde71ac3ac9ac067a0"}, + {file = "watchfiles-0.24.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e94e98c7cb94cfa6e071d401ea3342767f28eb5a06a58fafdc0d2a4974f4f35c"}, + {file = "watchfiles-0.24.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:82ae557a8c037c42a6ef26c494d0631cacca040934b101d001100ed93d43f361"}, + {file = "watchfiles-0.24.0-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:acbfa31e315a8f14fe33e3542cbcafc55703b8f5dcbb7c1eecd30f141df50db3"}, + {file = "watchfiles-0.24.0-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b74fdffce9dfcf2dc296dec8743e5b0332d15df19ae464f0e249aa871fc1c571"}, + {file = "watchfiles-0.24.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:449f43f49c8ddca87c6b3980c9284cab6bd1f5c9d9a2b00012adaaccd5e7decd"}, + {file = "watchfiles-0.24.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4abf4ad269856618f82dee296ac66b0cd1d71450fc3c98532d93798e73399b7a"}, + {file = "watchfiles-0.24.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9f895d785eb6164678ff4bb5cc60c5996b3ee6df3edb28dcdeba86a13ea0465e"}, + {file = "watchfiles-0.24.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:7ae3e208b31be8ce7f4c2c0034f33406dd24fbce3467f77223d10cd86778471c"}, + {file = "watchfiles-0.24.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:2efec17819b0046dde35d13fb8ac7a3ad877af41ae4640f4109d9154ed30a188"}, + {file = "watchfiles-0.24.0-cp310-none-win32.whl", hash = "sha256:6bdcfa3cd6fdbdd1a068a52820f46a815401cbc2cb187dd006cb076675e7b735"}, + {file = "watchfiles-0.24.0-cp310-none-win_amd64.whl", hash = "sha256:54ca90a9ae6597ae6dc00e7ed0a040ef723f84ec517d3e7ce13e63e4bc82fa04"}, + {file = "watchfiles-0.24.0-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:bdcd5538e27f188dd3c804b4a8d5f52a7fc7f87e7fd6b374b8e36a4ca03db428"}, + {file = "watchfiles-0.24.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:2dadf8a8014fde6addfd3c379e6ed1a981c8f0a48292d662e27cabfe4239c83c"}, + {file = "watchfiles-0.24.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6509ed3f467b79d95fc62a98229f79b1a60d1b93f101e1c61d10c95a46a84f43"}, + {file = "watchfiles-0.24.0-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:8360f7314a070c30e4c976b183d1d8d1585a4a50c5cb603f431cebcbb4f66327"}, + {file = "watchfiles-0.24.0-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:316449aefacf40147a9efaf3bd7c9bdd35aaba9ac5d708bd1eb5763c9a02bef5"}, + {file = "watchfiles-0.24.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:73bde715f940bea845a95247ea3e5eb17769ba1010efdc938ffcb967c634fa61"}, + {file = "watchfiles-0.24.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3770e260b18e7f4e576edca4c0a639f704088602e0bc921c5c2e721e3acb8d15"}, + {file = "watchfiles-0.24.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aa0fd7248cf533c259e59dc593a60973a73e881162b1a2f73360547132742823"}, + {file = "watchfiles-0.24.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:d7a2e3b7f5703ffbd500dabdefcbc9eafeff4b9444bbdd5d83d79eedf8428fab"}, + {file = "watchfiles-0.24.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:d831ee0a50946d24a53821819b2327d5751b0c938b12c0653ea5be7dea9c82ec"}, + {file = "watchfiles-0.24.0-cp311-none-win32.whl", hash = "sha256:49d617df841a63b4445790a254013aea2120357ccacbed00253f9c2b5dc24e2d"}, + {file = "watchfiles-0.24.0-cp311-none-win_amd64.whl", hash = "sha256:d3dcb774e3568477275cc76554b5a565024b8ba3a0322f77c246bc7111c5bb9c"}, + {file = "watchfiles-0.24.0-cp311-none-win_arm64.whl", hash = "sha256:9301c689051a4857d5b10777da23fafb8e8e921bcf3abe6448a058d27fb67633"}, + {file = "watchfiles-0.24.0-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:7211b463695d1e995ca3feb38b69227e46dbd03947172585ecb0588f19b0d87a"}, + {file = "watchfiles-0.24.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:4b8693502d1967b00f2fb82fc1e744df128ba22f530e15b763c8d82baee15370"}, + {file = "watchfiles-0.24.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cdab9555053399318b953a1fe1f586e945bc8d635ce9d05e617fd9fe3a4687d6"}, + {file = "watchfiles-0.24.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:34e19e56d68b0dad5cff62273107cf5d9fbaf9d75c46277aa5d803b3ef8a9e9b"}, + {file = "watchfiles-0.24.0-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:41face41f036fee09eba33a5b53a73e9a43d5cb2c53dad8e61fa6c9f91b5a51e"}, + {file = "watchfiles-0.24.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5148c2f1ea043db13ce9b0c28456e18ecc8f14f41325aa624314095b6aa2e9ea"}, + {file = "watchfiles-0.24.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7e4bd963a935aaf40b625c2499f3f4f6bbd0c3776f6d3bc7c853d04824ff1c9f"}, + {file = "watchfiles-0.24.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c79d7719d027b7a42817c5d96461a99b6a49979c143839fc37aa5748c322f234"}, + {file = "watchfiles-0.24.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:32aa53a9a63b7f01ed32e316e354e81e9da0e6267435c7243bf8ae0f10b428ef"}, + {file = "watchfiles-0.24.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:ce72dba6a20e39a0c628258b5c308779b8697f7676c254a845715e2a1039b968"}, + {file = "watchfiles-0.24.0-cp312-none-win32.whl", hash = "sha256:d9018153cf57fc302a2a34cb7564870b859ed9a732d16b41a9b5cb2ebed2d444"}, + {file = "watchfiles-0.24.0-cp312-none-win_amd64.whl", hash = "sha256:551ec3ee2a3ac9cbcf48a4ec76e42c2ef938a7e905a35b42a1267fa4b1645896"}, + {file = "watchfiles-0.24.0-cp312-none-win_arm64.whl", hash = "sha256:b52a65e4ea43c6d149c5f8ddb0bef8d4a1e779b77591a458a893eb416624a418"}, + {file = "watchfiles-0.24.0-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:3d2e3ab79a1771c530233cadfd277fcc762656d50836c77abb2e5e72b88e3a48"}, + {file = "watchfiles-0.24.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:327763da824817b38ad125dcd97595f942d720d32d879f6c4ddf843e3da3fe90"}, + {file = "watchfiles-0.24.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bd82010f8ab451dabe36054a1622870166a67cf3fce894f68895db6f74bbdc94"}, + {file = "watchfiles-0.24.0-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d64ba08db72e5dfd5c33be1e1e687d5e4fcce09219e8aee893a4862034081d4e"}, + {file = "watchfiles-0.24.0-cp313-cp313-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1cf1f6dd7825053f3d98f6d33f6464ebdd9ee95acd74ba2c34e183086900a827"}, + {file = "watchfiles-0.24.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:43e3e37c15a8b6fe00c1bce2473cfa8eb3484bbeecf3aefbf259227e487a03df"}, + {file = "watchfiles-0.24.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:88bcd4d0fe1d8ff43675360a72def210ebad3f3f72cabfeac08d825d2639b4ab"}, + {file = "watchfiles-0.24.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:999928c6434372fde16c8f27143d3e97201160b48a614071261701615a2a156f"}, + {file = "watchfiles-0.24.0-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:30bbd525c3262fd9f4b1865cb8d88e21161366561cd7c9e1194819e0a33ea86b"}, + {file = "watchfiles-0.24.0-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:edf71b01dec9f766fb285b73930f95f730bb0943500ba0566ae234b5c1618c18"}, + {file = "watchfiles-0.24.0-cp313-none-win32.whl", hash = "sha256:f4c96283fca3ee09fb044f02156d9570d156698bc3734252175a38f0e8975f07"}, + {file = "watchfiles-0.24.0-cp313-none-win_amd64.whl", hash = "sha256:a974231b4fdd1bb7f62064a0565a6b107d27d21d9acb50c484d2cdba515b9366"}, + {file = "watchfiles-0.24.0-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:ee82c98bed9d97cd2f53bdb035e619309a098ea53ce525833e26b93f673bc318"}, + {file = "watchfiles-0.24.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:fd92bbaa2ecdb7864b7600dcdb6f2f1db6e0346ed425fbd01085be04c63f0b05"}, + {file = "watchfiles-0.24.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f83df90191d67af5a831da3a33dd7628b02a95450e168785586ed51e6d28943c"}, + {file = "watchfiles-0.24.0-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:fca9433a45f18b7c779d2bae7beeec4f740d28b788b117a48368d95a3233ed83"}, + {file = "watchfiles-0.24.0-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b995bfa6bf01a9e09b884077a6d37070464b529d8682d7691c2d3b540d357a0c"}, + {file = "watchfiles-0.24.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ed9aba6e01ff6f2e8285e5aa4154e2970068fe0fc0998c4380d0e6278222269b"}, + {file = "watchfiles-0.24.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e5171ef898299c657685306d8e1478a45e9303ddcd8ac5fed5bd52ad4ae0b69b"}, + {file = "watchfiles-0.24.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4933a508d2f78099162da473841c652ad0de892719043d3f07cc83b33dfd9d91"}, + {file = "watchfiles-0.24.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:95cf3b95ea665ab03f5a54765fa41abf0529dbaf372c3b83d91ad2cfa695779b"}, + {file = "watchfiles-0.24.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:01def80eb62bd5db99a798d5e1f5f940ca0a05986dcfae21d833af7a46f7ee22"}, + {file = "watchfiles-0.24.0-cp38-none-win32.whl", hash = "sha256:4d28cea3c976499475f5b7a2fec6b3a36208656963c1a856d328aeae056fc5c1"}, + {file = "watchfiles-0.24.0-cp38-none-win_amd64.whl", hash = "sha256:21ab23fdc1208086d99ad3f69c231ba265628014d4aed31d4e8746bd59e88cd1"}, + {file = "watchfiles-0.24.0-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:b665caeeda58625c3946ad7308fbd88a086ee51ccb706307e5b1fa91556ac886"}, + {file = "watchfiles-0.24.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:5c51749f3e4e269231510da426ce4a44beb98db2dce9097225c338f815b05d4f"}, + {file = "watchfiles-0.24.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:82b2509f08761f29a0fdad35f7e1638b8ab1adfa2666d41b794090361fb8b855"}, + {file = "watchfiles-0.24.0-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:9a60e2bf9dc6afe7f743e7c9b149d1fdd6dbf35153c78fe3a14ae1a9aee3d98b"}, + {file = "watchfiles-0.24.0-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f7d9b87c4c55e3ea8881dfcbf6d61ea6775fffed1fedffaa60bd047d3c08c430"}, + {file = "watchfiles-0.24.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:78470906a6be5199524641f538bd2c56bb809cd4bf29a566a75051610bc982c3"}, + {file = "watchfiles-0.24.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:07cdef0c84c03375f4e24642ef8d8178e533596b229d32d2bbd69e5128ede02a"}, + {file = "watchfiles-0.24.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d337193bbf3e45171c8025e291530fb7548a93c45253897cd764a6a71c937ed9"}, + {file = "watchfiles-0.24.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:ec39698c45b11d9694a1b635a70946a5bad066b593af863460a8e600f0dff1ca"}, + {file = "watchfiles-0.24.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:2e28d91ef48eab0afb939fa446d8ebe77e2f7593f5f463fd2bb2b14132f95b6e"}, + {file = "watchfiles-0.24.0-cp39-none-win32.whl", hash = "sha256:7138eff8baa883aeaa074359daabb8b6c1e73ffe69d5accdc907d62e50b1c0da"}, + {file = "watchfiles-0.24.0-cp39-none-win_amd64.whl", hash = "sha256:b3ef2c69c655db63deb96b3c3e587084612f9b1fa983df5e0c3379d41307467f"}, + {file = "watchfiles-0.24.0-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:632676574429bee8c26be8af52af20e0c718cc7f5f67f3fb658c71928ccd4f7f"}, + {file = "watchfiles-0.24.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:a2a9891723a735d3e2540651184be6fd5b96880c08ffe1a98bae5017e65b544b"}, + {file = "watchfiles-0.24.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4a7fa2bc0efef3e209a8199fd111b8969fe9db9c711acc46636686331eda7dd4"}, + {file = "watchfiles-0.24.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:01550ccf1d0aed6ea375ef259706af76ad009ef5b0203a3a4cce0f6024f9b68a"}, + {file = "watchfiles-0.24.0-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:96619302d4374de5e2345b2b622dc481257a99431277662c30f606f3e22f42be"}, + {file = "watchfiles-0.24.0-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:85d5f0c7771dcc7a26c7a27145059b6bb0ce06e4e751ed76cdf123d7039b60b5"}, + {file = "watchfiles-0.24.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:951088d12d339690a92cef2ec5d3cfd957692834c72ffd570ea76a6790222777"}, + {file = "watchfiles-0.24.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:49fb58bcaa343fedc6a9e91f90195b20ccb3135447dc9e4e2570c3a39565853e"}, + {file = "watchfiles-0.24.0.tar.gz", hash = "sha256:afb72325b74fa7a428c009c1b8be4b4d7c2afedafb2982827ef2156646df2fe1"}, +] + +[package.dependencies] +anyio = ">=3.0.0" + +[[package]] +name = "websockets" +version = "14.1" +description = "An implementation of the WebSocket Protocol (RFC 6455 & 7692)" +optional = false +python-versions = ">=3.9" +files = [ + {file = "websockets-14.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a0adf84bc2e7c86e8a202537b4fd50e6f7f0e4a6b6bf64d7ccb96c4cd3330b29"}, + {file = "websockets-14.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:90b5d9dfbb6d07a84ed3e696012610b6da074d97453bd01e0e30744b472c8179"}, + {file = "websockets-14.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:2177ee3901075167f01c5e335a6685e71b162a54a89a56001f1c3e9e3d2ad250"}, + {file = "websockets-14.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3f14a96a0034a27f9d47fd9788913924c89612225878f8078bb9d55f859272b0"}, + {file = "websockets-14.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1f874ba705deea77bcf64a9da42c1f5fc2466d8f14daf410bc7d4ceae0a9fcb0"}, + {file = "websockets-14.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9607b9a442392e690a57909c362811184ea429585a71061cd5d3c2b98065c199"}, + {file = "websockets-14.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:bea45f19b7ca000380fbd4e02552be86343080120d074b87f25593ce1700ad58"}, + {file = "websockets-14.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:219c8187b3ceeadbf2afcf0f25a4918d02da7b944d703b97d12fb01510869078"}, + {file = "websockets-14.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:ad2ab2547761d79926effe63de21479dfaf29834c50f98c4bf5b5480b5838434"}, + {file = "websockets-14.1-cp310-cp310-win32.whl", hash = "sha256:1288369a6a84e81b90da5dbed48610cd7e5d60af62df9851ed1d1d23a9069f10"}, + {file = "websockets-14.1-cp310-cp310-win_amd64.whl", hash = "sha256:e0744623852f1497d825a49a99bfbec9bea4f3f946df6eb9d8a2f0c37a2fec2e"}, + {file = "websockets-14.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:449d77d636f8d9c17952628cc7e3b8faf6e92a17ec581ec0c0256300717e1512"}, + {file = "websockets-14.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a35f704be14768cea9790d921c2c1cc4fc52700410b1c10948511039be824aac"}, + {file = "websockets-14.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:b1f3628a0510bd58968c0f60447e7a692933589b791a6b572fcef374053ca280"}, + {file = "websockets-14.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3c3deac3748ec73ef24fc7be0b68220d14d47d6647d2f85b2771cb35ea847aa1"}, + {file = "websockets-14.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7048eb4415d46368ef29d32133134c513f507fff7d953c18c91104738a68c3b3"}, + {file = "websockets-14.1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f6cf0ad281c979306a6a34242b371e90e891bce504509fb6bb5246bbbf31e7b6"}, + {file = "websockets-14.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:cc1fc87428c1d18b643479caa7b15db7d544652e5bf610513d4a3478dbe823d0"}, + {file = "websockets-14.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:f95ba34d71e2fa0c5d225bde3b3bdb152e957150100e75c86bc7f3964c450d89"}, + {file = "websockets-14.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:9481a6de29105d73cf4515f2bef8eb71e17ac184c19d0b9918a3701c6c9c4f23"}, + {file = "websockets-14.1-cp311-cp311-win32.whl", hash = "sha256:368a05465f49c5949e27afd6fbe0a77ce53082185bbb2ac096a3a8afaf4de52e"}, + {file = "websockets-14.1-cp311-cp311-win_amd64.whl", hash = "sha256:6d24fc337fc055c9e83414c94e1ee0dee902a486d19d2a7f0929e49d7d604b09"}, + {file = "websockets-14.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:ed907449fe5e021933e46a3e65d651f641975a768d0649fee59f10c2985529ed"}, + {file = "websockets-14.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:87e31011b5c14a33b29f17eb48932e63e1dcd3fa31d72209848652310d3d1f0d"}, + {file = "websockets-14.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:bc6ccf7d54c02ae47a48ddf9414c54d48af9c01076a2e1023e3b486b6e72c707"}, + {file = "websockets-14.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9777564c0a72a1d457f0848977a1cbe15cfa75fa2f67ce267441e465717dcf1a"}, + {file = "websockets-14.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a655bde548ca98f55b43711b0ceefd2a88a71af6350b0c168aa77562104f3f45"}, + {file = "websockets-14.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a3dfff83ca578cada2d19e665e9c8368e1598d4e787422a460ec70e531dbdd58"}, + {file = "websockets-14.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:6a6c9bcf7cdc0fd41cc7b7944447982e8acfd9f0d560ea6d6845428ed0562058"}, + {file = "websockets-14.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:4b6caec8576e760f2c7dd878ba817653144d5f369200b6ddf9771d64385b84d4"}, + {file = "websockets-14.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:eb6d38971c800ff02e4a6afd791bbe3b923a9a57ca9aeab7314c21c84bf9ff05"}, + {file = "websockets-14.1-cp312-cp312-win32.whl", hash = "sha256:1d045cbe1358d76b24d5e20e7b1878efe578d9897a25c24e6006eef788c0fdf0"}, + {file = "websockets-14.1-cp312-cp312-win_amd64.whl", hash = "sha256:90f4c7a069c733d95c308380aae314f2cb45bd8a904fb03eb36d1a4983a4993f"}, + {file = "websockets-14.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:3630b670d5057cd9e08b9c4dab6493670e8e762a24c2c94ef312783870736ab9"}, + {file = "websockets-14.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:36ebd71db3b89e1f7b1a5deaa341a654852c3518ea7a8ddfdf69cc66acc2db1b"}, + {file = "websockets-14.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:5b918d288958dc3fa1c5a0b9aa3256cb2b2b84c54407f4813c45d52267600cd3"}, + {file = "websockets-14.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:00fe5da3f037041da1ee0cf8e308374e236883f9842c7c465aa65098b1c9af59"}, + {file = "websockets-14.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8149a0f5a72ca36720981418eeffeb5c2729ea55fa179091c81a0910a114a5d2"}, + {file = "websockets-14.1-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:77569d19a13015e840b81550922056acabc25e3f52782625bc6843cfa034e1da"}, + {file = "websockets-14.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:cf5201a04550136ef870aa60ad3d29d2a59e452a7f96b94193bee6d73b8ad9a9"}, + {file = "websockets-14.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:88cf9163ef674b5be5736a584c999e98daf3aabac6e536e43286eb74c126b9c7"}, + {file = "websockets-14.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:836bef7ae338a072e9d1863502026f01b14027250a4545672673057997d5c05a"}, + {file = "websockets-14.1-cp313-cp313-win32.whl", hash = "sha256:0d4290d559d68288da9f444089fd82490c8d2744309113fc26e2da6e48b65da6"}, + {file = "websockets-14.1-cp313-cp313-win_amd64.whl", hash = "sha256:8621a07991add373c3c5c2cf89e1d277e49dc82ed72c75e3afc74bd0acc446f0"}, + {file = "websockets-14.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:01bb2d4f0a6d04538d3c5dfd27c0643269656c28045a53439cbf1c004f90897a"}, + {file = "websockets-14.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:414ffe86f4d6f434a8c3b7913655a1a5383b617f9bf38720e7c0799fac3ab1c6"}, + {file = "websockets-14.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:8fda642151d5affdee8a430bd85496f2e2517be3a2b9d2484d633d5712b15c56"}, + {file = "websockets-14.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cd7c11968bc3860d5c78577f0dbc535257ccec41750675d58d8dc66aa47fe52c"}, + {file = "websockets-14.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a032855dc7db987dff813583d04f4950d14326665d7e714d584560b140ae6b8b"}, + {file = "websockets-14.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b7e7ea2f782408c32d86b87a0d2c1fd8871b0399dd762364c731d86c86069a78"}, + {file = "websockets-14.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:39450e6215f7d9f6f7bc2a6da21d79374729f5d052333da4d5825af8a97e6735"}, + {file = "websockets-14.1-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:ceada5be22fa5a5a4cdeec74e761c2ee7db287208f54c718f2df4b7e200b8d4a"}, + {file = "websockets-14.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:3fc753451d471cff90b8f467a1fc0ae64031cf2d81b7b34e1811b7e2691bc4bc"}, + {file = "websockets-14.1-cp39-cp39-win32.whl", hash = "sha256:14839f54786987ccd9d03ed7f334baec0f02272e7ec4f6e9d427ff584aeea8b4"}, + {file = "websockets-14.1-cp39-cp39-win_amd64.whl", hash = "sha256:d9fd19ecc3a4d5ae82ddbfb30962cf6d874ff943e56e0c81f5169be2fda62979"}, + {file = "websockets-14.1-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:e5dc25a9dbd1a7f61eca4b7cb04e74ae4b963d658f9e4f9aad9cd00b688692c8"}, + {file = "websockets-14.1-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:04a97aca96ca2acedf0d1f332c861c5a4486fdcba7bcef35873820f940c4231e"}, + {file = "websockets-14.1-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:df174ece723b228d3e8734a6f2a6febbd413ddec39b3dc592f5a4aa0aff28098"}, + {file = "websockets-14.1-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:034feb9f4286476f273b9a245fb15f02c34d9586a5bc936aff108c3ba1b21beb"}, + {file = "websockets-14.1-pp310-pypy310_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:660c308dabd2b380807ab64b62985eaccf923a78ebc572bd485375b9ca2b7dc7"}, + {file = "websockets-14.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:5a42d3ecbb2db5080fc578314439b1d79eef71d323dc661aa616fb492436af5d"}, + {file = "websockets-14.1-pp39-pypy39_pp73-macosx_10_15_x86_64.whl", hash = "sha256:ddaa4a390af911da6f680be8be4ff5aaf31c4c834c1a9147bc21cbcbca2d4370"}, + {file = "websockets-14.1-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:a4c805c6034206143fbabd2d259ec5e757f8b29d0a2f0bf3d2fe5d1f60147a4a"}, + {file = "websockets-14.1-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:205f672a6c2c671a86d33f6d47c9b35781a998728d2c7c2a3e1cf3333fcb62b7"}, + {file = "websockets-14.1-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5ef440054124728cc49b01c33469de06755e5a7a4e83ef61934ad95fc327fbb0"}, + {file = "websockets-14.1-pp39-pypy39_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e7591d6f440af7f73c4bd9404f3772bfee064e639d2b6cc8c94076e71b2471c1"}, + {file = "websockets-14.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:25225cc79cfebc95ba1d24cd3ab86aaa35bcd315d12fa4358939bd55e9bd74a5"}, + {file = "websockets-14.1-py3-none-any.whl", hash = "sha256:4d4fc827a20abe6d544a119896f6b78ee13fe81cbfef416f3f2ddf09a03f0e2e"}, + {file = "websockets-14.1.tar.gz", hash = "sha256:398b10c77d471c0aab20a845e7a60076b6390bfdaac7a6d2edb0d2c59d75e8d8"}, +] + +[[package]] +name = "win32-setctime" +version = "1.1.0" +description = "A small Python utility to set file creation time on Windows" +optional = false +python-versions = ">=3.5" +files = [ + {file = "win32_setctime-1.1.0-py3-none-any.whl", hash = "sha256:231db239e959c2fe7eb1d7dc129f11172354f98361c4fa2d6d2d7e278baa8aad"}, + {file = "win32_setctime-1.1.0.tar.gz", hash = "sha256:15cf5750465118d6929ae4de4eb46e8edae9a5634350c01ba582df868e932cb2"}, +] + +[package.extras] +dev = ["black (>=19.3b0)", "pytest (>=4.6.2)"] + +[[package]] +name = "wrapt" +version = "1.16.0" +description = "Module for decorators, wrappers and monkey patching." +optional = false +python-versions = ">=3.6" +files = [ + {file = "wrapt-1.16.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ffa565331890b90056c01db69c0fe634a776f8019c143a5ae265f9c6bc4bd6d4"}, + {file = "wrapt-1.16.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e4fdb9275308292e880dcbeb12546df7f3e0f96c6b41197e0cf37d2826359020"}, + {file = "wrapt-1.16.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bb2dee3874a500de01c93d5c71415fcaef1d858370d405824783e7a8ef5db440"}, + {file = "wrapt-1.16.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2a88e6010048489cda82b1326889ec075a8c856c2e6a256072b28eaee3ccf487"}, + {file = "wrapt-1.16.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ac83a914ebaf589b69f7d0a1277602ff494e21f4c2f743313414378f8f50a4cf"}, + {file = "wrapt-1.16.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:73aa7d98215d39b8455f103de64391cb79dfcad601701a3aa0dddacf74911d72"}, + {file = "wrapt-1.16.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:807cc8543a477ab7422f1120a217054f958a66ef7314f76dd9e77d3f02cdccd0"}, + {file = "wrapt-1.16.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:bf5703fdeb350e36885f2875d853ce13172ae281c56e509f4e6eca049bdfb136"}, + {file = "wrapt-1.16.0-cp310-cp310-win32.whl", hash = "sha256:f6b2d0c6703c988d334f297aa5df18c45e97b0af3679bb75059e0e0bd8b1069d"}, + {file = "wrapt-1.16.0-cp310-cp310-win_amd64.whl", hash = "sha256:decbfa2f618fa8ed81c95ee18a387ff973143c656ef800c9f24fb7e9c16054e2"}, + {file = "wrapt-1.16.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:1a5db485fe2de4403f13fafdc231b0dbae5eca4359232d2efc79025527375b09"}, + {file = "wrapt-1.16.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:75ea7d0ee2a15733684badb16de6794894ed9c55aa5e9903260922f0482e687d"}, + {file = "wrapt-1.16.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a452f9ca3e3267cd4d0fcf2edd0d035b1934ac2bd7e0e57ac91ad6b95c0c6389"}, + {file = "wrapt-1.16.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:43aa59eadec7890d9958748db829df269f0368521ba6dc68cc172d5d03ed8060"}, + {file = "wrapt-1.16.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:72554a23c78a8e7aa02abbd699d129eead8b147a23c56e08d08dfc29cfdddca1"}, + {file = "wrapt-1.16.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:d2efee35b4b0a347e0d99d28e884dfd82797852d62fcd7ebdeee26f3ceb72cf3"}, + {file = "wrapt-1.16.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:6dcfcffe73710be01d90cae08c3e548d90932d37b39ef83969ae135d36ef3956"}, + {file = "wrapt-1.16.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:eb6e651000a19c96f452c85132811d25e9264d836951022d6e81df2fff38337d"}, + {file = "wrapt-1.16.0-cp311-cp311-win32.whl", hash = "sha256:66027d667efe95cc4fa945af59f92c5a02c6f5bb6012bff9e60542c74c75c362"}, + {file = "wrapt-1.16.0-cp311-cp311-win_amd64.whl", hash = "sha256:aefbc4cb0a54f91af643660a0a150ce2c090d3652cf4052a5397fb2de549cd89"}, + {file = "wrapt-1.16.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:5eb404d89131ec9b4f748fa5cfb5346802e5ee8836f57d516576e61f304f3b7b"}, + {file = "wrapt-1.16.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:9090c9e676d5236a6948330e83cb89969f433b1943a558968f659ead07cb3b36"}, + {file = "wrapt-1.16.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:94265b00870aa407bd0cbcfd536f17ecde43b94fb8d228560a1e9d3041462d73"}, + {file = "wrapt-1.16.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f2058f813d4f2b5e3a9eb2eb3faf8f1d99b81c3e51aeda4b168406443e8ba809"}, + {file = "wrapt-1.16.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:98b5e1f498a8ca1858a1cdbffb023bfd954da4e3fa2c0cb5853d40014557248b"}, + {file = "wrapt-1.16.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:14d7dc606219cdd7405133c713f2c218d4252f2a469003f8c46bb92d5d095d81"}, + {file = "wrapt-1.16.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:49aac49dc4782cb04f58986e81ea0b4768e4ff197b57324dcbd7699c5dfb40b9"}, + {file = "wrapt-1.16.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:418abb18146475c310d7a6dc71143d6f7adec5b004ac9ce08dc7a34e2babdc5c"}, + {file = "wrapt-1.16.0-cp312-cp312-win32.whl", hash = "sha256:685f568fa5e627e93f3b52fda002c7ed2fa1800b50ce51f6ed1d572d8ab3e7fc"}, + {file = "wrapt-1.16.0-cp312-cp312-win_amd64.whl", hash = "sha256:dcdba5c86e368442528f7060039eda390cc4091bfd1dca41e8046af7c910dda8"}, + {file = "wrapt-1.16.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:d462f28826f4657968ae51d2181a074dfe03c200d6131690b7d65d55b0f360f8"}, + {file = "wrapt-1.16.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a33a747400b94b6d6b8a165e4480264a64a78c8a4c734b62136062e9a248dd39"}, + {file = "wrapt-1.16.0-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b3646eefa23daeba62643a58aac816945cadc0afaf21800a1421eeba5f6cfb9c"}, + {file = "wrapt-1.16.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ebf019be5c09d400cf7b024aa52b1f3aeebeff51550d007e92c3c1c4afc2a40"}, + {file = "wrapt-1.16.0-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:0d2691979e93d06a95a26257adb7bfd0c93818e89b1406f5a28f36e0d8c1e1fc"}, + {file = "wrapt-1.16.0-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:1acd723ee2a8826f3d53910255643e33673e1d11db84ce5880675954183ec47e"}, + {file = "wrapt-1.16.0-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:bc57efac2da352a51cc4658878a68d2b1b67dbe9d33c36cb826ca449d80a8465"}, + {file = "wrapt-1.16.0-cp36-cp36m-win32.whl", hash = "sha256:da4813f751142436b075ed7aa012a8778aa43a99f7b36afe9b742d3ed8bdc95e"}, + {file = "wrapt-1.16.0-cp36-cp36m-win_amd64.whl", hash = "sha256:6f6eac2360f2d543cc875a0e5efd413b6cbd483cb3ad7ebf888884a6e0d2e966"}, + {file = "wrapt-1.16.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:a0ea261ce52b5952bf669684a251a66df239ec6d441ccb59ec7afa882265d593"}, + {file = "wrapt-1.16.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7bd2d7ff69a2cac767fbf7a2b206add2e9a210e57947dd7ce03e25d03d2de292"}, + {file = "wrapt-1.16.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9159485323798c8dc530a224bd3ffcf76659319ccc7bbd52e01e73bd0241a0c5"}, + {file = "wrapt-1.16.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a86373cf37cd7764f2201b76496aba58a52e76dedfaa698ef9e9688bfd9e41cf"}, + {file = "wrapt-1.16.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:73870c364c11f03ed072dda68ff7aea6d2a3a5c3fe250d917a429c7432e15228"}, + {file = "wrapt-1.16.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:b935ae30c6e7400022b50f8d359c03ed233d45b725cfdd299462f41ee5ffba6f"}, + {file = "wrapt-1.16.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:db98ad84a55eb09b3c32a96c576476777e87c520a34e2519d3e59c44710c002c"}, + {file = "wrapt-1.16.0-cp37-cp37m-win32.whl", hash = "sha256:9153ed35fc5e4fa3b2fe97bddaa7cbec0ed22412b85bcdaf54aeba92ea37428c"}, + {file = "wrapt-1.16.0-cp37-cp37m-win_amd64.whl", hash = "sha256:66dfbaa7cfa3eb707bbfcd46dab2bc6207b005cbc9caa2199bcbc81d95071a00"}, + {file = "wrapt-1.16.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1dd50a2696ff89f57bd8847647a1c363b687d3d796dc30d4dd4a9d1689a706f0"}, + {file = "wrapt-1.16.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:44a2754372e32ab315734c6c73b24351d06e77ffff6ae27d2ecf14cf3d229202"}, + {file = "wrapt-1.16.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8e9723528b9f787dc59168369e42ae1c3b0d3fadb2f1a71de14531d321ee05b0"}, + {file = "wrapt-1.16.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dbed418ba5c3dce92619656802cc5355cb679e58d0d89b50f116e4a9d5a9603e"}, + {file = "wrapt-1.16.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:941988b89b4fd6b41c3f0bfb20e92bd23746579736b7343283297c4c8cbae68f"}, + {file = "wrapt-1.16.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:6a42cd0cfa8ffc1915aef79cb4284f6383d8a3e9dcca70c445dcfdd639d51267"}, + {file = "wrapt-1.16.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:1ca9b6085e4f866bd584fb135a041bfc32cab916e69f714a7d1d397f8c4891ca"}, + {file = "wrapt-1.16.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:d5e49454f19ef621089e204f862388d29e6e8d8b162efce05208913dde5b9ad6"}, + {file = "wrapt-1.16.0-cp38-cp38-win32.whl", hash = "sha256:c31f72b1b6624c9d863fc095da460802f43a7c6868c5dda140f51da24fd47d7b"}, + {file = "wrapt-1.16.0-cp38-cp38-win_amd64.whl", hash = "sha256:490b0ee15c1a55be9c1bd8609b8cecd60e325f0575fc98f50058eae366e01f41"}, + {file = "wrapt-1.16.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9b201ae332c3637a42f02d1045e1d0cccfdc41f1f2f801dafbaa7e9b4797bfc2"}, + {file = "wrapt-1.16.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:2076fad65c6736184e77d7d4729b63a6d1ae0b70da4868adeec40989858eb3fb"}, + {file = "wrapt-1.16.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c5cd603b575ebceca7da5a3a251e69561bec509e0b46e4993e1cac402b7247b8"}, + {file = "wrapt-1.16.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b47cfad9e9bbbed2339081f4e346c93ecd7ab504299403320bf85f7f85c7d46c"}, + {file = "wrapt-1.16.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f8212564d49c50eb4565e502814f694e240c55551a5f1bc841d4fcaabb0a9b8a"}, + {file = "wrapt-1.16.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:5f15814a33e42b04e3de432e573aa557f9f0f56458745c2074952f564c50e664"}, + {file = "wrapt-1.16.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:db2e408d983b0e61e238cf579c09ef7020560441906ca990fe8412153e3b291f"}, + {file = "wrapt-1.16.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:edfad1d29c73f9b863ebe7082ae9321374ccb10879eeabc84ba3b69f2579d537"}, + {file = "wrapt-1.16.0-cp39-cp39-win32.whl", hash = "sha256:ed867c42c268f876097248e05b6117a65bcd1e63b779e916fe2e33cd6fd0d3c3"}, + {file = "wrapt-1.16.0-cp39-cp39-win_amd64.whl", hash = "sha256:eb1b046be06b0fce7249f1d025cd359b4b80fc1c3e24ad9eca33e0dcdb2e4a35"}, + {file = "wrapt-1.16.0-py3-none-any.whl", hash = "sha256:6906c4100a8fcbf2fa735f6059214bb13b97f75b1a61777fcf6432121ef12ef1"}, + {file = "wrapt-1.16.0.tar.gz", hash = "sha256:5f370f952971e7d17c7d1ead40e49f32345a7f7a5373571ef44d800d06b1899d"}, +] + +[[package]] +name = "xmltodict" +version = "0.13.0" +description = "Makes working with XML feel like you are working with JSON" +optional = false +python-versions = ">=3.4" +files = [ + {file = "xmltodict-0.13.0-py2.py3-none-any.whl", hash = "sha256:aa89e8fd76320154a40d19a0df04a4695fb9dc5ba977cbb68ab3e4eb225e7852"}, + {file = "xmltodict-0.13.0.tar.gz", hash = "sha256:341595a488e3e01a85a9d8911d8912fd922ede5fecc4dce437eb4b6c8d037e56"}, +] + +[[package]] +name = "zipp" +version = "3.21.0" +description = "Backport of pathlib-compatible object wrapper for zip files" +optional = false +python-versions = ">=3.9" +files = [ + {file = "zipp-3.21.0-py3-none-any.whl", hash = "sha256:ac1bbe05fd2991f160ebce24ffbac5f6d11d83dc90891255885223d42b3cd931"}, + {file = "zipp-3.21.0.tar.gz", hash = "sha256:2c9958f6430a2040341a52eb608ed6dd93ef4392e02ffe219417c1b28b5dd1f4"}, +] + +[package.extras] +check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1)"] +cover = ["pytest-cov"] +doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] +enabler = ["pytest-enabler (>=2.2)"] +test = ["big-O", "importlib-resources", "jaraco.functools", "jaraco.itertools", "jaraco.test", "more-itertools", "pytest (>=6,!=8.1.*)", "pytest-ignore-flaky"] +type = ["pytest-mypy"] + +[metadata] +lock-version = "2.0" +python-versions = "^3.11" +content-hash = "e1b0c1da9e0f1c2c345b10509efdfc709ecde73ad946714013c590f71d8051a1" diff --git a/pyproject.toml b/pyproject.toml new file mode 100644 index 0000000..a8c1b39 --- /dev/null +++ b/pyproject.toml @@ -0,0 +1,120 @@ +[tool.poetry] +name = "riven" +version = "0.20.0" +description = "Plex torrent streaming through Real Debrid and 3rd party services like Overseerr, Mdblist, etc." +authors = ["Riven Developers"] +license = "GPL-3.0" +readme = "README.md" +package-mode = false + +[tool.poetry.dependencies] +python = "^3.11" +dill = "^0.3.8" +plexapi = "^4.15.10" +requests = "^2.31.0" +xmltodict = "^0.13.0" +lxml = "^5.1.0" +pydantic = "^2.6.3" +fastapi = "^0.110.0" +uvicorn = {extras = ["standard"], version = "^0.30.6"} +apscheduler = "^3.10.4" +regex = "^2023.12.25" +coverage = "^7.5.4" +cachetools = "^5.3.3" +loguru = "^0.7.2" +rich = "^13.7.1" +opentelemetry-api = "^1.25.0" +opentelemetry-sdk = "^1.25.0" +opentelemetry-exporter-prometheus = "^0.46b0" +prometheus-client = "^0.20.0" +sqlalchemy = "^2.0.31" +sqla-wrapper = "^6.0.0" +alembic = "^1.13.2" +psycopg2-binary = "^2.9.9" +apprise = "^1.8.1" +subliminal = "^2.2.1" +rank-torrent-name = ">=1.5,<2.0" +jsonschema = "^4.23.0" +scalar-fastapi = "^1.0.3" +psutil = "^6.0.0" +python-dotenv = "^1.0.1" +requests-ratelimiter = "^0.7.0" +requests-cache = "^1.2.1" +kink = "^0.8.1" + +[tool.poetry.group.dev.dependencies] +pyright = "^1.1.352" +pyperf = "^2.2.0" +pytest = "^8.3.2" +pytest-mock = "^3.14.0" +responses = "0.25.3" +pyfakefs = "^5.4.1" +ruff = "^0.7.2" +isort = "^5.10.1" +codecov = "^2.1.13" +httpx = "^0.27.0" +# memray = "^1.13.4" +testcontainers = "^4.8.0" +mypy = "^1.11.2" + +[tool.poetry.group.test] +optional = true + +[tool.poetry.group.test.dependencies] +pytest = "^8.3.2" + +[build-system] +requires = ["poetry-core"] +build-backend = "poetry.core.masonry.api" + +[tool.isort] +profile = "black" + +[tool.black] +line-length = 88 +include = '\.pyi?$' +exclude = ''' +/( + \.git + | \.hg + | \.mypy_cache + | \.tox + | \.venv + | _build + | buck-out + | build + | dist +)/ +''' + +[tool.ruff.lint] +# https://docs.astral.sh/ruff/rules/ +ignore = [ + "PLR0913", # flask8-bugbear: Too many arguments for a method or function + "PLR0911", # flask8-bugbear: Too many return statements + "PLR2004", # flake8-bugbear: Magic value used in comparison + "S104", # flake8-bandit: Possible binding to all interfaces + "S108", # flake8-bandit: Probable insecure usage of temp file/directory + "S311", # flake8-bandit: Standard pseudo-random generators are not suitable for security/cryptographic purposes + "S101", # ruff: Ignore assert warnings on tests + "RET505", # + "RET503", # ruff: Ignore required explicit returns (is this desired?) + "SLF001", # private member accessing from pickle + "B904" # ruff: ignore raising exceptions from except for the API +] +extend-select = [ + "I", # isort + "C90", # mccabe complexity + "B", # flake8-bugbear + "PL", # pycodestyle + "S", # flake8-bandit + "T10", # flake8-debugger + "PIE", # flake8-pie + "T20", # flake8-print + "Q", # flake8-quotes + "RSE", # flake8-raise + "RET", # flake8-return + "SLF", # flake8-self + "SIM", # flake8-simplify + "ARG", # flake8-unused-arguments +] \ No newline at end of file diff --git a/src/.gitignore b/src/.gitignore new file mode 100644 index 0000000..72868b8 --- /dev/null +++ b/src/.gitignore @@ -0,0 +1,203 @@ +# Created by https://www.toptal.com/developers/gitignore/api/python,pythonvanilla,visualstudiocode,pydev,pycharm +# Edit at https://www.toptal.com/developers/gitignore?templates=python,pythonvanilla,visualstudiocode,pydev,pycharm + +### PyCharm ### +# Covers JetBrains IDEs: IntelliJ, RubyMine, PhpStorm, AppCode, PyCharm, CLion, Android Studio, WebStorm and Rider +# Reference: https://intellij-support.jetbrains.com/hc/en-us/articles/206544839 + +# Gaisberg PD Project specific stuff +.vscode +__pycache__ +settings.json +*.log +data + +# User-specific stuff +.idea/**/workspace.xml +.idea/**/tasks.xml +.idea/**/usage.statistics.xml +.idea/**/dictionaries +.idea/**/shelf + +# AWS User-specific +.idea/**/aws.xml + +# CMake +cmake-build-*/ + +# IntelliJ +out/ + +# JIRA plugin +atlassian-ide-plugin.xml + + +# Editor-based Rest Client +.idea/httpRequests + +### Python ### +# Byte-compiled / optimized / DLL files +__pycache__/ +*.py[cod] +*$py.class + +# C extensions +*.so + +# Distribution / packaging +.Python +build/ +develop-eggs/ +dist/ +downloads/ +eggs/ +.eggs/ +lib/ +lib64/ +parts/ +sdist/ +var/ +wheels/ +share/python-wheels/ +*.egg-info/ +.installed.cfg +*.egg +MANIFEST + +# PyInstaller +# Usually these files are written by a python script from a template +# before PyInstaller builds the exe, so as to inject date/other infos into it. +*.manifest +*.spec + +# Installer logs +pip-log.txt +pip-delete-this-directory.txt + +# Unit test / coverage reports +htmlcov/ +.tox/ +.nox/ +.coverage +.coverage.* +.cache +nosetests.xml +coverage.xml +*.cover +*.py,cover +.hypothesis/ +.pytest_cache/ +cover/ + +# Translations +*.mo +*.pot + +# Flask stuff: +instance/ +.webassets-cache + +# Scrapy stuff: +.scrapy + +# Sphinx documentation +docs/_build/ + +# PyBuilder +.pybuilder/ +target/ + +# Jupyter Notebook +.ipynb_checkpoints + +# IPython +profile_default/ +ipython_config.py + +# pyenv +# For a library or package, you might want to ignore these files since the code is +# intended to run in multiple environments; otherwise, check them in: +# .python-version + +# pipenv +# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. +# However, in case of collaboration, if having platform-specific dependencies or dependencies +# having no cross-platform support, pipenv may install dependencies that don't work, or not +# install all needed dependencies. +#Pipfile.lock + +# poetry +# Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control. +# This is especially recommended for binary packages to ensure reproducibility, and is more +# commonly ignored for libraries. +# https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control +#poetry.lock + +# PEP 582; used by e.g. github.com/David-OConnor/pyflow +__pypackages__/ + +# Celery stuff +celerybeat-schedule +celerybeat.pid + +# Environments +.env +.venv +env/ +venv/ +ENV/ +env.bak/ +venv.bak/ + +# PyCharm +# JetBrains specific template is maintainted in a separate JetBrains.gitignore that can +# be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore +# and can be added to the global gitignore or merged into this file. For a more nuclear +# option (not recommended) you can uncomment the following to ignore the entire idea folder. +#.idea/ + +### PythonVanilla ### +# Byte-compiled / optimized / DLL files + +# C extensions + +# Distribution / packaging + +# Installer logs + +# Unit test / coverage reports + +# Translations + +# pyenv +# For a library or package, you might want to ignore these files since the code is +# intended to run in multiple environments; otherwise, check them in: +# .python-version + +# pipenv +# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. +# However, in case of collaboration, if having platform-specific dependencies or dependencies +# having no cross-platform support, pipenv may install dependencies that don't work, or not +# install all needed dependencies. + +# PEP 582; used by e.g. github.com/David-OConnor/pyflow + + +### VisualStudioCode ### +.vscode/* +!.vscode/settings.json +!.vscode/tasks.json +!.vscode/launch.json +!.vscode/extensions.json +!.vscode/*.code-snippets + +# Local History for Visual Studio Code +.history/ + +# Built Visual Studio Code Extensions +*.vsix + +### VisualStudioCode Patch ### +# Ignore all local history of files +.history +.ionide diff --git a/src/__init__.py b/src/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/src/alembic.ini b/src/alembic.ini new file mode 100644 index 0000000..13afa1d --- /dev/null +++ b/src/alembic.ini @@ -0,0 +1,12 @@ +# A generic, single database configuration. + +[alembic] + +script_location = %(here)s/alembic +file_template = %%(year)d%%(month).2d%%(day).2d_%%(hour).2d%%(minute).2d_%%(rev)s_%%(slug)s +prepend_sys_path = . +truncate_slug_length = 40 +version_locations = %(here)s/alembic/versions +version_path_separator = os +output_encoding = utf-8 +sqlalchemy.url = driver://user:pass@localhost/dbname \ No newline at end of file diff --git a/src/alembic/env.py b/src/alembic/env.py new file mode 100644 index 0000000..95492a5 --- /dev/null +++ b/src/alembic/env.py @@ -0,0 +1,124 @@ +import logging + +from loguru import logger +from sqlalchemy import engine_from_config, pool, text +from sqlalchemy.exc import OperationalError, ProgrammingError + +from alembic import context +from program.db.db import db +from program.settings.manager import settings_manager + + +# Loguru handler for alembic logs +class LoguruHandler(logging.Handler): + def emit(self, record): + logger.opt(depth=1, exception=record.exc_info).log("DATABASE", record.getMessage()) + +if settings_manager.settings.debug_database: + # Configure only alembic and SQLAlchemy loggers + logging.getLogger("alembic").handlers = [LoguruHandler()] + logging.getLogger("alembic").propagate = False + logging.getLogger("sqlalchemy").handlers = [LoguruHandler()] + logging.getLogger("sqlalchemy").propagate = False + + # Set log levels + logging.getLogger("alembic").setLevel(logging.DEBUG if settings_manager.settings.debug else logging.FATAL) + logging.getLogger("sqlalchemy").setLevel(logging.DEBUG if settings_manager.settings.debug else logging.FATAL) + +# Alembic configuration +config = context.config +config.set_main_option("sqlalchemy.url", settings_manager.settings.database.host) + +# Set MetaData object for autogenerate support +target_metadata = db.Model.metadata + +def reset_database(connection) -> bool: + """Reset database if needed""" + try: + # Drop and recreate schema + if db.engine.name == "postgresql": + connection.execute(text(""" + SELECT pg_terminate_backend(pid) + FROM pg_stat_activity + WHERE datname = current_database() + AND pid <> pg_backend_pid() + """)) + connection.execute(text("DROP SCHEMA public CASCADE")) + connection.execute(text("CREATE SCHEMA public")) + connection.execute(text("GRANT ALL ON SCHEMA public TO public")) + + logger.log("DATABASE", "Database reset complete") + return True + except Exception as e: + logger.error(f"Database reset failed: {e}") + return False + +def run_migrations_offline() -> None: + """Run migrations in 'offline' mode.""" + url = config.get_main_option("sqlalchemy.url") + context.configure( + url=url, + target_metadata=target_metadata, + literal_binds=True, + dialect_opts={"paramstyle": "named"}, + ) + + with context.begin_transaction(): + context.run_migrations() + +def run_migrations_online() -> None: + """Run migrations in 'online' mode.""" + connectable = engine_from_config( + config.get_section(config.config_ini_section), + prefix="sqlalchemy.", + poolclass=pool.NullPool, + ) + + with connectable.connect() as connection: + connection = connection.execution_options(isolation_level="AUTOCOMMIT") + try: + context.configure( + connection=connection, + target_metadata=target_metadata, + compare_type=True, # Compare column types + compare_server_default=True, # Compare default values + include_schemas=True, # Include schema in migrations + render_as_batch=True, # Enable batch operations + ) + + with context.begin_transaction(): + logger.debug("Starting migrations...") + context.run_migrations() + logger.debug("Migrations completed successfully") + + except (OperationalError, ProgrammingError) as e: + logger.error(f"Database error during migration: {e}") + logger.warning("Attempting database reset...") + + if reset_database(connection): + # Configure alembic again after reset + context.configure( + connection=connection, + target_metadata=target_metadata, + compare_type=True, + compare_server_default=True, + include_schemas=True, + render_as_batch=True, + ) + + # Try migrations again + with context.begin_transaction(): + logger.debug("Rerunning migrations after reset...") + context.run_migrations() + logger.debug("Migrations completed successfully") + else: + raise Exception("Migration recovery failed") + + except Exception as e: + logger.error(f"Unexpected error during migration: {e}") + raise + +if context.is_offline_mode(): + run_migrations_offline() +else: + run_migrations_online() \ No newline at end of file diff --git a/src/alembic/script.py.mako b/src/alembic/script.py.mako new file mode 100644 index 0000000..fbc4b07 --- /dev/null +++ b/src/alembic/script.py.mako @@ -0,0 +1,26 @@ +"""${message} + +Revision ID: ${up_revision} +Revises: ${down_revision | comma,n} +Create Date: ${create_date} + +""" +from typing import Sequence, Union + +from alembic import op +import sqlalchemy as sa +${imports if imports else ""} + +# revision identifiers, used by Alembic. +revision: str = ${repr(up_revision)} +down_revision: Union[str, None] = ${repr(down_revision)} +branch_labels: Union[str, Sequence[str], None] = ${repr(branch_labels)} +depends_on: Union[str, Sequence[str], None] = ${repr(depends_on)} + + +def upgrade() -> None: + ${upgrades if upgrades else "pass"} + + +def downgrade() -> None: + ${downgrades if downgrades else "pass"} diff --git a/src/alembic/versions/20241105_1300_c99709e3648f_baseline_schema.py b/src/alembic/versions/20241105_1300_c99709e3648f_baseline_schema.py new file mode 100644 index 0000000..701c793 --- /dev/null +++ b/src/alembic/versions/20241105_1300_c99709e3648f_baseline_schema.py @@ -0,0 +1,179 @@ +"""baseline_schema + +Revision ID: c99709e3648f +Revises: +Create Date: 2024-11-05 13:00:06.356164 + +""" +from typing import Sequence, Union + +import sqlalchemy as sa + +from alembic import op + +# revision identifiers, used by Alembic. +revision: str = 'c99709e3648f' +down_revision: Union[str, None] = None +branch_labels: Union[str, Sequence[str], None] = None +depends_on: Union[str, Sequence[str], None] = None + + +def upgrade() -> None: + # ### commands auto generated by Alembic - please adjust! ### + op.create_table('MediaItem', + sa.Column('id', sa.String(), nullable=False), + sa.Column('imdb_id', sa.String(), nullable=True), + sa.Column('tvdb_id', sa.String(), nullable=True), + sa.Column('tmdb_id', sa.String(), nullable=True), + sa.Column('number', sa.Integer(), nullable=True), + sa.Column('type', sa.String(), nullable=False), + sa.Column('requested_at', sa.DateTime(), nullable=True), + sa.Column('requested_by', sa.String(), nullable=True), + sa.Column('requested_id', sa.Integer(), nullable=True), + sa.Column('indexed_at', sa.DateTime(), nullable=True), + sa.Column('scraped_at', sa.DateTime(), nullable=True), + sa.Column('scraped_times', sa.Integer(), nullable=True), + sa.Column('active_stream', sa.JSON(), nullable=True), + sa.Column('symlinked', sa.Boolean(), nullable=True), + sa.Column('symlinked_at', sa.DateTime(), nullable=True), + sa.Column('symlinked_times', sa.Integer(), nullable=True), + sa.Column('symlink_path', sa.String(), nullable=True), + sa.Column('file', sa.String(), nullable=True), + sa.Column('folder', sa.String(), nullable=True), + sa.Column('alternative_folder', sa.String(), nullable=True), + sa.Column('aliases', sa.JSON(), nullable=True), + sa.Column('is_anime', sa.Boolean(), nullable=True), + sa.Column('title', sa.String(), nullable=True), + sa.Column('network', sa.String(), nullable=True), + sa.Column('country', sa.String(), nullable=True), + sa.Column('language', sa.String(), nullable=True), + sa.Column('aired_at', sa.DateTime(), nullable=True), + sa.Column('year', sa.Integer(), nullable=True), + sa.Column('genres', sa.JSON(), nullable=True), + sa.Column('key', sa.String(), nullable=True), + sa.Column('guid', sa.String(), nullable=True), + sa.Column('update_folder', sa.String(), nullable=True), + sa.Column('overseerr_id', sa.Integer(), nullable=True), + sa.Column('last_state', sa.Enum('Unknown', 'Unreleased', 'Ongoing', 'Requested', 'Indexed', 'Scraped', 'Downloaded', 'Symlinked', 'Completed', 'PartiallyCompleted', 'Failed', name='states'), nullable=True), + sa.PrimaryKeyConstraint('id') + ) + op.create_index('ix_mediaitem_aired_at', 'MediaItem', ['aired_at'], unique=False) + op.create_index('ix_mediaitem_country', 'MediaItem', ['country'], unique=False) + op.create_index('ix_mediaitem_imdb_id', 'MediaItem', ['imdb_id'], unique=False) + op.create_index('ix_mediaitem_language', 'MediaItem', ['language'], unique=False) + op.create_index('ix_mediaitem_network', 'MediaItem', ['network'], unique=False) + op.create_index('ix_mediaitem_overseerr_id', 'MediaItem', ['overseerr_id'], unique=False) + op.create_index('ix_mediaitem_requested_by', 'MediaItem', ['requested_by'], unique=False) + op.create_index('ix_mediaitem_title', 'MediaItem', ['title'], unique=False) + op.create_index('ix_mediaitem_tmdb_id', 'MediaItem', ['tmdb_id'], unique=False) + op.create_index('ix_mediaitem_tvdb_id', 'MediaItem', ['tvdb_id'], unique=False) + op.create_index('ix_mediaitem_type', 'MediaItem', ['type'], unique=False) + op.create_index('ix_mediaitem_type_aired_at', 'MediaItem', ['type', 'aired_at'], unique=False) + op.create_index('ix_mediaitem_year', 'MediaItem', ['year'], unique=False) + op.create_table('Stream', + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('infohash', sa.String(), nullable=False), + sa.Column('raw_title', sa.String(), nullable=False), + sa.Column('parsed_title', sa.String(), nullable=False), + sa.Column('rank', sa.Integer(), nullable=False), + sa.Column('lev_ratio', sa.Float(), nullable=False), + sa.PrimaryKeyConstraint('id') + ) + op.create_index('ix_stream_infohash', 'Stream', ['infohash'], unique=False) + op.create_index('ix_stream_parsed_title', 'Stream', ['parsed_title'], unique=False) + op.create_index('ix_stream_rank', 'Stream', ['rank'], unique=False) + op.create_index('ix_stream_raw_title', 'Stream', ['raw_title'], unique=False) + op.create_table('Movie', + sa.Column('id', sa.String(), nullable=False), + sa.ForeignKeyConstraint(['id'], ['MediaItem.id'], ), + sa.PrimaryKeyConstraint('id') + ) + op.create_table('Show', + sa.Column('id', sa.String(), nullable=False), + sa.ForeignKeyConstraint(['id'], ['MediaItem.id'], ), + sa.PrimaryKeyConstraint('id') + ) + op.create_table('StreamBlacklistRelation', + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('media_item_id', sa.String(), nullable=False), + sa.Column('stream_id', sa.Integer(), nullable=False), + sa.ForeignKeyConstraint(['media_item_id'], ['MediaItem.id'], ondelete='CASCADE'), + sa.ForeignKeyConstraint(['stream_id'], ['Stream.id'], ondelete='CASCADE'), + sa.PrimaryKeyConstraint('id') + ) + op.create_index('ix_streamblacklistrelation_media_item_id', 'StreamBlacklistRelation', ['media_item_id'], unique=False) + op.create_index('ix_streamblacklistrelation_stream_id', 'StreamBlacklistRelation', ['stream_id'], unique=False) + op.create_table('StreamRelation', + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('parent_id', sa.String(), nullable=False), + sa.Column('child_id', sa.Integer(), nullable=False), + sa.ForeignKeyConstraint(['child_id'], ['Stream.id'], ondelete='CASCADE'), + sa.ForeignKeyConstraint(['parent_id'], ['MediaItem.id'], ondelete='CASCADE'), + sa.PrimaryKeyConstraint('id') + ) + op.create_index('ix_streamrelation_child_id', 'StreamRelation', ['child_id'], unique=False) + op.create_index('ix_streamrelation_parent_id', 'StreamRelation', ['parent_id'], unique=False) + op.create_table('Subtitle', + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('language', sa.String(), nullable=False), + sa.Column('file', sa.String(), nullable=True), + sa.Column('parent_id', sa.String(), nullable=False), + sa.ForeignKeyConstraint(['parent_id'], ['MediaItem.id'], ondelete='CASCADE'), + sa.PrimaryKeyConstraint('id') + ) + op.create_index('ix_subtitle_file', 'Subtitle', ['file'], unique=False) + op.create_index('ix_subtitle_language', 'Subtitle', ['language'], unique=False) + op.create_index('ix_subtitle_parent_id', 'Subtitle', ['parent_id'], unique=False) + op.create_table('Season', + sa.Column('id', sa.String(), nullable=False), + sa.Column('parent_id', sa.String(), nullable=False), + sa.ForeignKeyConstraint(['id'], ['MediaItem.id'], ), + sa.ForeignKeyConstraint(['parent_id'], ['Show.id'], ), + sa.PrimaryKeyConstraint('id') + ) + op.create_table('Episode', + sa.Column('id', sa.String(), nullable=False), + sa.Column('parent_id', sa.String(), nullable=False), + sa.ForeignKeyConstraint(['id'], ['MediaItem.id'], ), + sa.ForeignKeyConstraint(['parent_id'], ['Season.id'], ), + sa.PrimaryKeyConstraint('id') + ) + # ### end Alembic commands ### + + +def downgrade() -> None: + # ### commands auto generated by Alembic - please adjust! ### + op.drop_table('Episode') + op.drop_table('Season') + op.drop_index('ix_subtitle_parent_id', table_name='Subtitle') + op.drop_index('ix_subtitle_language', table_name='Subtitle') + op.drop_index('ix_subtitle_file', table_name='Subtitle') + op.drop_table('Subtitle') + op.drop_index('ix_streamrelation_parent_id', table_name='StreamRelation') + op.drop_index('ix_streamrelation_child_id', table_name='StreamRelation') + op.drop_table('StreamRelation') + op.drop_index('ix_streamblacklistrelation_stream_id', table_name='StreamBlacklistRelation') + op.drop_index('ix_streamblacklistrelation_media_item_id', table_name='StreamBlacklistRelation') + op.drop_table('StreamBlacklistRelation') + op.drop_table('Show') + op.drop_table('Movie') + op.drop_index('ix_stream_raw_title', table_name='Stream') + op.drop_index('ix_stream_rank', table_name='Stream') + op.drop_index('ix_stream_parsed_title', table_name='Stream') + op.drop_index('ix_stream_infohash', table_name='Stream') + op.drop_table('Stream') + op.drop_index('ix_mediaitem_year', table_name='MediaItem') + op.drop_index('ix_mediaitem_type_aired_at', table_name='MediaItem') + op.drop_index('ix_mediaitem_type', table_name='MediaItem') + op.drop_index('ix_mediaitem_tvdb_id', table_name='MediaItem') + op.drop_index('ix_mediaitem_tmdb_id', table_name='MediaItem') + op.drop_index('ix_mediaitem_title', table_name='MediaItem') + op.drop_index('ix_mediaitem_requested_by', table_name='MediaItem') + op.drop_index('ix_mediaitem_overseerr_id', table_name='MediaItem') + op.drop_index('ix_mediaitem_network', table_name='MediaItem') + op.drop_index('ix_mediaitem_language', table_name='MediaItem') + op.drop_index('ix_mediaitem_imdb_id', table_name='MediaItem') + op.drop_index('ix_mediaitem_country', table_name='MediaItem') + op.drop_index('ix_mediaitem_aired_at', table_name='MediaItem') + op.drop_table('MediaItem') + # ### end Alembic commands ### diff --git a/src/auth.py b/src/auth.py new file mode 100644 index 0000000..8e82192 --- /dev/null +++ b/src/auth.py @@ -0,0 +1,23 @@ +from typing import Optional + +from fastapi import HTTPException, Security, status +from fastapi.security import APIKeyHeader, HTTPAuthorizationCredentials, HTTPBearer + +from program.settings.manager import settings_manager + + +def header_auth(header = Security(APIKeyHeader(name="x-api-key", auto_error=False))): + return header == settings_manager.settings.api_key + +def bearer_auth(bearer: HTTPAuthorizationCredentials = Security(HTTPBearer(auto_error=False))): + return bearer and bearer.credentials == settings_manager.settings.api_key + +def resolve_api_key( + header: Optional[str] = Security(header_auth), + bearer: Optional[HTTPAuthorizationCredentials] = Security(bearer_auth) +): + if not (header or bearer): + raise HTTPException( + status_code=status.HTTP_401_UNAUTHORIZED, + detail="Invalid authentication credentials" + ) \ No newline at end of file diff --git a/src/main.py b/src/main.py new file mode 100644 index 0000000..ad755b7 --- /dev/null +++ b/src/main.py @@ -0,0 +1,113 @@ +import contextlib +import signal +import sys +import threading +import time +import traceback + +import uvicorn +from dotenv import load_dotenv +from fastapi import FastAPI +from fastapi.middleware.cors import CORSMiddleware +from loguru import logger +from scalar_fastapi import get_scalar_api_reference +from starlette.middleware.base import BaseHTTPMiddleware +from starlette.requests import Request + +from program import Program +from program.settings.models import get_version +from program.utils.cli import handle_args +from routers import app_router + +load_dotenv() + + +class LoguruMiddleware(BaseHTTPMiddleware): + async def dispatch(self, request: Request, call_next): + start_time = time.time() + try: + response = await call_next(request) + except Exception as e: + logger.exception(f"Exception during request processing: {e}") + raise + finally: + process_time = time.time() - start_time + logger.log( + "API", + f"{request.method} {request.url.path} - {response.status_code if 'response' in locals() else '500'} - {process_time:.2f}s", + ) + return response + +args = handle_args() + +app = FastAPI( + title="Riven", + summary="A media management system.", + version=get_version(), + redoc_url=None, + license_info={ + "name": "GPL-3.0", + "url": "https://www.gnu.org/licenses/gpl-3.0.en.html", + }, +) + +@app.get("/scalar", include_in_schema=False) +async def scalar_html(): + return get_scalar_api_reference( + openapi_url=app.openapi_url, + title=app.title, + ) + +app.program = Program() +app.add_middleware(LoguruMiddleware) +app.add_middleware( + CORSMiddleware, + allow_origins=["*"], + allow_credentials=True, + allow_methods=["*"], + allow_headers=["*"], +) + +app.include_router(app_router) + +class Server(uvicorn.Server): + def install_signal_handlers(self): + pass + + @contextlib.contextmanager + def run_in_thread(self): + thread = threading.Thread(target=self.run, name="Riven") + thread.start() + try: + while not self.started: + time.sleep(1e-3) + yield + except Exception as e: + logger.error(f"Error in server thread: {e}") + logger.exception(traceback.format_exc()) + raise e + finally: + self.should_exit = True + sys.exit(0) + +def signal_handler(signum, frame): + logger.log("PROGRAM","Exiting Gracefully.") + app.program.stop() + sys.exit(0) + +signal.signal(signal.SIGINT, signal_handler) +signal.signal(signal.SIGTERM, signal_handler) + +config = uvicorn.Config(app, host="0.0.0.0", port=args.port, log_config=None) +server = Server(config=config) + +with server.run_in_thread(): + try: + app.program.start() + app.program.run() + except Exception as e: + logger.error(f"Error in main thread: {e}") + logger.exception(traceback.format_exc()) + finally: + logger.critical("Server has been stopped") + sys.exit(0) \ No newline at end of file diff --git a/src/program/__init__.py b/src/program/__init__.py new file mode 100644 index 0000000..8621dc2 --- /dev/null +++ b/src/program/__init__.py @@ -0,0 +1,4 @@ +"""Program main module""" + +from program.media.item import MediaItem # noqa: F401 +from program.program import Event, Program # noqa: F401 diff --git a/src/program/apis/__init__.py b/src/program/apis/__init__.py new file mode 100644 index 0000000..5fd463b --- /dev/null +++ b/src/program/apis/__init__.py @@ -0,0 +1,45 @@ +from kink import di + +from program.settings.manager import settings_manager + +from .listrr_api import ListrrAPI, ListrrAPIError +from .mdblist_api import MdblistAPI, MdblistAPIError +from .overseerr_api import OverseerrAPI, OverseerrAPIError +from .plex_api import PlexAPI, PlexAPIError +from .trakt_api import TraktAPI, TraktAPIError + + +def bootstrap_apis(): + __setup_trakt() + __setup_plex() + __setup_mdblist() + __setup_overseerr() + __setup_listrr() + +def __setup_trakt(): + traktApi = TraktAPI(settings_manager.settings.content.trakt) + di[TraktAPI] = traktApi + +def __setup_plex(): + if not settings_manager.settings.updaters.plex.enabled: + return + plexApi = PlexAPI(settings_manager.settings.updaters.plex.token, settings_manager.settings.updaters.plex.url) + di[PlexAPI] = plexApi + +def __setup_overseerr(): + if not settings_manager.settings.content.overseerr.enabled: + return + overseerrApi = OverseerrAPI(settings_manager.settings.content.overseerr.api_key, settings_manager.settings.content.overseerr.url) + di[OverseerrAPI] = overseerrApi + +def __setup_mdblist(): + if not settings_manager.settings.content.mdblist.enabled: + return + mdblistApi = MdblistAPI(settings_manager.settings.content.mdblist.api_key) + di[MdblistAPI] = mdblistApi + +def __setup_listrr(): + if not settings_manager.settings.content.listrr.enabled: + return + listrrApi = ListrrAPI(settings_manager.settings.content.listrr.api_key) + di[ListrrAPI] = listrrApi diff --git a/src/program/apis/listrr_api.py b/src/program/apis/listrr_api.py new file mode 100644 index 0000000..441f685 --- /dev/null +++ b/src/program/apis/listrr_api.py @@ -0,0 +1,74 @@ +from kink import di +from loguru import logger +from requests.exceptions import HTTPError + +from program.apis.trakt_api import TraktAPI +from program.media.item import MediaItem +from program.utils.request import ( + BaseRequestHandler, + HttpMethod, + ResponseObject, + ResponseType, + Session, + create_service_session, +) + + +class ListrrAPIError(Exception): + """Base exception for ListrrAPI related errors""" + +class ListrrRequestHandler(BaseRequestHandler): + def __init__(self, session: Session, base_url: str, request_logging: bool = False): + super().__init__(session, base_url=base_url, response_type=ResponseType.SIMPLE_NAMESPACE, custom_exception=ListrrAPIError, request_logging=request_logging) + + def execute(self, method: HttpMethod, endpoint: str, **kwargs) -> ResponseObject: + return super()._request(method, endpoint, **kwargs) + +class ListrrAPI: + """Handles Listrr API communication""" + + def __init__(self, api_key: str): + self.BASE_URL = "https://listrr.pro" + self.api_key = api_key + self.headers = {"X-Api-Key": self.api_key} + session = create_service_session() + session.headers.update(self.headers) + self.request_handler = ListrrRequestHandler(session, base_url=self.BASE_URL) + self.trakt_api = di[TraktAPI] + + def validate(self): + return self.request_handler.execute(HttpMethod.GET, self.BASE_URL) + + def get_items_from_Listrr(self, content_type, content_lists) -> list[MediaItem] | list[str]: # noqa: C901, PLR0912 + """Fetch unique IMDb IDs from Listrr for a given type and list of content.""" + unique_ids: set[str] = set() + if not content_lists: + return list(unique_ids) + + for list_id in content_lists: + if not list_id or len(list_id) != 24: + continue + + page, total_pages = 1, 1 + while page <= total_pages: + try: + url = f"api/List/{content_type}/{list_id}/ReleaseDate/Descending/{page}" + response = self.request_handler.execute(HttpMethod.GET, url) + data = response.data + total_pages = data.get("pages", 1) + for item in data.get("items", []): + imdb_id = item.get("imDbId") + if imdb_id: + unique_ids.add(imdb_id) + elif content_type == "Movies" and item.get("tmDbId"): + imdb_id = self.trakt_api.get_imdbid_from_tmdb(item["tmDbId"]) + if imdb_id: + unique_ids.add(imdb_id) + except HTTPError as e: + if e.response.status_code in [400, 404, 429, 500]: + break + except Exception as e: + logger.error(f"An error occurred: {e}") + break + page += 1 + return list(unique_ids) diff --git a/src/program/apis/mdblist_api.py b/src/program/apis/mdblist_api.py new file mode 100644 index 0000000..0157311 --- /dev/null +++ b/src/program/apis/mdblist_api.py @@ -0,0 +1,50 @@ +from program.utils.request import ( + BaseRequestHandler, + HttpMethod, + ResponseObject, + ResponseType, + Session, + create_service_session, + get_rate_limit_params, +) + + +class MdblistAPIError(Exception): + """Base exception for MdblistAPI related errors""" + +class MdblistRequestHandler(BaseRequestHandler): + def __init__(self, session: Session, base_url: str, api_key: str, request_logging: bool = False): + self.api_key = api_key + super().__init__(session, base_url=base_url, response_type=ResponseType.SIMPLE_NAMESPACE, custom_exception=MdblistAPIError, request_logging=request_logging) + + def execute(self, method: HttpMethod, endpoint: str, ignore_base_url: bool = False, **kwargs) -> ResponseObject: + return super()._request(method, endpoint, ignore_base_url=ignore_base_url, params={"apikey": self.api_key}, **kwargs) + + +class MdblistAPI: + """Handles Mdblist API communication""" + BASE_URL = "https://mdblist.com" + + def __init__(self, api_key: str): + rate_limit_params = get_rate_limit_params(per_minute=60) + session = create_service_session(rate_limit_params=rate_limit_params) + self.request_handler = MdblistRequestHandler(session, base_url=self.BASE_URL, api_key=api_key) + + def validate(self): + return self.request_handler.execute(HttpMethod.GET, f"api/user") + + def my_limits(self): + """Wrapper for mdblist api method 'My limits'""" + response = self.request_handler.execute(HttpMethod.GET,f"api/user") + return response.data + + def list_items_by_id(self, list_id: int): + """Wrapper for mdblist api method 'List items'""" + response = self.request_handler.execute(HttpMethod.GET,f"api/lists/{str(list_id)}/items") + return response.data + + def list_items_by_url(self, url: str): + url = url if url.endswith("/") else f"{url}/" + url = url if url.endswith("json/") else f"{url}json/" + response = self.request_handler.execute(HttpMethod.GET, url, ignore_base_url=True) + return response.data \ No newline at end of file diff --git a/src/program/apis/overseerr_api.py b/src/program/apis/overseerr_api.py new file mode 100644 index 0000000..e41afe1 --- /dev/null +++ b/src/program/apis/overseerr_api.py @@ -0,0 +1,186 @@ +from typing import Union + +from kink import di +from loguru import logger +from requests.exceptions import ConnectionError, RetryError +from urllib3.exceptions import MaxRetryError + +from program.apis.trakt_api import TraktAPI +from program.media.item import MediaItem +from program.settings.manager import settings_manager +from program.utils.request import ( + BaseRequestHandler, + HttpMethod, + ResponseObject, + ResponseType, + Session, + create_service_session, + get_rate_limit_params, +) + + +class OverseerrAPIError(Exception): + """Base exception for OverseerrAPI related errors""" + +class OverseerrRequestHandler(BaseRequestHandler): + def __init__(self, session: Session, base_url: str, request_logging: bool = False): + super().__init__(session, base_url=base_url, response_type=ResponseType.SIMPLE_NAMESPACE, custom_exception=OverseerrAPIError, request_logging=request_logging) + + def execute(self, method: HttpMethod, endpoint: str, **kwargs) -> ResponseObject: + return super()._request(method, endpoint, **kwargs) + + +class OverseerrAPI: + """Handles Overseerr API communication""" + + def __init__(self, api_key: str, base_url: str): + self.api_key = api_key + rate_limit_params = get_rate_limit_params(max_calls=1000, period=300) + session = create_service_session(rate_limit_params=rate_limit_params) + self.trakt_api = di[TraktAPI] + self.headers = {"X-Api-Key": self.api_key} + session.headers.update(self.headers) + self.request_handler = OverseerrRequestHandler(session, base_url=base_url) + + def validate(self): + return self.request_handler.execute(HttpMethod.GET, "api/v1/auth/me", timeout=30) + + def get_media_requests(self, service_key: str) -> list[MediaItem]: + """Get media requests from `Overseerr`""" + try: + response = self.request_handler.execute(HttpMethod.GET, f"api/v1/request?take={10000}&filter=approved&sort=added") + if not response.is_ok: + logger.error(f"Failed to fetch requests from overseerr: {response.data}") + return [] + except (ConnectionError, RetryError, MaxRetryError) as e: + logger.error(f"Failed to fetch requests from overseerr: {str(e)}") + return [] + except Exception as e: + logger.error(f"Unexpected error during fetching requests: {str(e)}") + return [] + + if not hasattr(response.data, "pageInfo") or getattr(response.data.pageInfo, "results", 0) == 0: + return [] + + # Lets look at approved items only that are only in the pending state + pending_items = [ + item for item in response.data.results + if item.status == 2 and item.media.status == 3 + ] + + media_items = [] + for item in pending_items: + imdb_id = self.get_imdb_id(item.media) + if imdb_id: + media_items.append( + MediaItem({ + "imdb_id": imdb_id, + "requested_by": service_key, + "overseerr_id": item.media.id + }) + ) + elif item.media.tmdbId: + logger.debug(f"Skipping {item.type} with TMDb ID {item.media.tmdbId} due to missing IMDb ID") + elif item.media.tvdbId: + logger.debug(f"Skipping {item.type} with TVDb ID {item.media.tvdbId} due to missing IMDb ID") + else: + logger.debug(f"Skipping {item.type} with Overseerr ID {item.media.id} due to missing IMDb ID") + return media_items + + + def get_imdb_id(self, data) -> str | None: + """Get imdbId for item from overseerr""" + if data.mediaType == "show": + external_id = data.tvdbId + data.mediaType = "tv" + else: + external_id = data.tmdbId + + try: + response = self.request_handler.execute(HttpMethod.GET, f"api/v1/{data.mediaType}/{external_id}?language=en") + except (ConnectionError, RetryError, MaxRetryError) as e: + logger.error(f"Failed to fetch media details from overseerr: {str(e)}") + return None + except Exception as e: + logger.error(f"Unexpected error during fetching media details: {str(e)}") + return None + + if not response.is_ok or not hasattr(response.data, "externalIds"): + return None + + imdb_id = getattr(response.data.externalIds, "imdbId", None) + if imdb_id: + return imdb_id + + # Try alternate IDs if IMDb ID is not available + alternate_ids = [("tmdbId", self.trakt_api.get_imdbid_from_tmdb)] + for id_attr, fetcher in alternate_ids: + external_id_value = getattr(response.data.externalIds, id_attr, None) + if external_id_value: + _type = data.media_type + if _type == "tv": + _type = "show" + try: + new_imdb_id: Union[str, None] = fetcher(external_id_value, type=_type) + if not new_imdb_id: + continue + return new_imdb_id + except Exception as e: + logger.error(f"Error fetching alternate ID: {str(e)}") + continue + + def delete_request(self, mediaId: int) -> bool: + """Delete request from `Overseerr`""" + settings = settings_manager.settings.content.overseerr + headers = {"X-Api-Key": settings.api_key} + try: + response = self.request_handler.execute(HttpMethod.DELETE, f"api/v1/request/{mediaId}", headers=headers) + logger.debug(f"Deleted request {mediaId} from overseerr") + return response.is_ok + except Exception as e: + logger.error(f"Failed to delete request from overseerr: {str(e)}") + return False + + def mark_processing(self, mediaId: int) -> bool: + """Mark item as processing in overseerr""" + try: + response = self.request_handler.execute(HttpMethod.POST, f"api/v1/media/{mediaId}/pending", data={"is4k": False}) + logger.info(f"Marked media {mediaId} as processing in overseerr") + return response.is_ok + except Exception as e: + logger.error(f"Failed to mark media as processing in overseerr with id {mediaId}: {str(e)}") + return False + + def mark_partially_available(self, mediaId: int) -> bool: + """Mark item as partially available in overseerr""" + try: + response = self.request_handler.execute(HttpMethod.POST, f"api/v1/media/{mediaId}/partial", data={"is4k": False}) + logger.info(f"Marked media {mediaId} as partially available in overseerr") + return response.is_ok + except Exception as e: + logger.error(f"Failed to mark media as partially available in overseerr with id {mediaId}: {str(e)}") + return False + + def mark_completed(self, mediaId: int) -> bool: + """Mark item as completed in overseerr""" + try: + response = self.request_handler.execute(HttpMethod.POST, f"api/v1/media/{mediaId}/available", data={"is4k": False}) + logger.info(f"Marked media {mediaId} as completed in overseerr") + return response.is_ok + except Exception as e: + logger.error(f"Failed to mark media as completed in overseerr with id {mediaId}: {str(e)}") + return False + +# Statuses for Media Requests endpoint /api/v1/request: +# item.status: +# 1 = PENDING APPROVAL, +# 2 = APPROVED, +# 3 = DECLINED + +# Statuses for Media Info endpoint /api/v1/media: +# item.media.status: +# 1 = UNKNOWN, +# 2 = PENDING, +# 3 = PROCESSING, +# 4 = PARTIALLY_AVAILABLE, +# 5 = AVAILABLE \ No newline at end of file diff --git a/src/program/apis/plex_api.py b/src/program/apis/plex_api.py new file mode 100644 index 0000000..ca11732 --- /dev/null +++ b/src/program/apis/plex_api.py @@ -0,0 +1,135 @@ +from typing import Dict, List, Optional, Union + +from loguru import logger +from plexapi.library import LibrarySection +from plexapi.myplex import MyPlexAccount +from plexapi.server import PlexServer +from requests import Session + +from program.media import Episode, Movie +from program.settings.manager import settings_manager +from program.utils.request import ( + BaseRequestHandler, + HttpMethod, + ResponseObject, + ResponseType, + create_service_session, +) + + +class PlexAPIError(Exception): + """Base exception for PlexApi related errors""" + +class PlexRequestHandler(BaseRequestHandler): + def __init__(self, session: Session, request_logging: bool = False): + super().__init__(session, response_type=ResponseType.SIMPLE_NAMESPACE, custom_exception=PlexAPIError, request_logging=request_logging) + + def execute(self, method: HttpMethod, endpoint: str, overriden_response_type: ResponseType = None, **kwargs) -> ResponseObject: + return super()._request(method, endpoint, overriden_response_type=overriden_response_type, **kwargs) + +class PlexAPI: + """Handles Plex API communication""" + + def __init__(self, token: str, base_url: str): + self.rss_urls: Optional[List[str]] = None + self.token = token + self.BASE_URL = base_url + session = create_service_session() + self.request_handler = PlexRequestHandler(session) + self.account = None + self.plex_server = None + self.rss_enabled = False + + def validate_account(self): + try: + self.account = MyPlexAccount(session=self.request_handler.session, token=self.token) + except Exception as e: + logger.error(f"Failed to authenticate Plex account: {e}") + return False + return True + + def validate_server(self): + self.plex_server = PlexServer(self.BASE_URL, token=self.token, session=self.request_handler.session, timeout=60) + + def set_rss_urls(self, rss_urls: List[str]): + self.rss_urls = rss_urls + + def clear_rss_urls(self): + self.rss_urls = None + self.rss_enabled = False + + def validate_rss(self, url: str): + return self.request_handler.execute(HttpMethod.GET, url) + + def ratingkey_to_imdbid(self, ratingKey: str) -> str | None: + """Convert Plex rating key to IMDb ID""" + token = settings_manager.settings.updaters.plex.token + filter_params = "includeGuids=1&includeFields=guid,title,year&includeElements=Guid" + url = f"https://metadata.provider.plex.tv/library/metadata/{ratingKey}?X-Plex-Token={token}&{filter_params}" + response = self.request_handler.execute(HttpMethod.GET, url) + if response.is_ok and hasattr(response.data, "MediaContainer"): + metadata = response.data.MediaContainer.Metadata[0] + return next((guid.id.split("//")[-1] for guid in metadata.Guid if "imdb://" in guid.id), None) + logger.debug(f"Failed to fetch IMDb ID for ratingKey: {ratingKey}") + return None + + def get_items_from_rss(self) -> list[str]: + """Fetch media from Plex RSS Feeds.""" + rss_items: list[str] = [] + for rss_url in self.rss_urls: + try: + response = self.request_handler.execute(HttpMethod.GET, rss_url + "?format=json", overriden_response_type=ResponseType.DICT, timeout=60) + for _item in response.data.get("items", []): + imdb_id = self.extract_imdb_ids(_item.get("guids", [])) + if imdb_id and imdb_id.startswith("tt"): + rss_items.append(imdb_id) + else: + logger.log("NOT_FOUND", f"Failed to extract IMDb ID from {_item['title']}") + except Exception as e: + logger.error(f"An unexpected error occurred while fetching Plex RSS feed from {rss_url}: {e}") + return rss_items + + + def get_items_from_watchlist(self) -> list[str]: + """Fetch media from Plex watchlist""" + items = self.account.watchlist() + watchlist_items: list[str] = [] + for item in items: + try: + if hasattr(item, "guids") and item.guids: + imdb_id: str = next((guid.id.split("//")[-1] for guid in item.guids if guid.id.startswith("imdb://")), "") + if imdb_id and imdb_id.startswith("tt"): + watchlist_items.append(imdb_id) + else: + logger.log("NOT_FOUND", f"Unable to extract IMDb ID from {item.title} ({item.year}) with data id: {imdb_id}") + else: + logger.log("NOT_FOUND", f"{item.title} ({item.year}) is missing guids attribute from Plex") + except Exception as e: + logger.error(f"An unexpected error occurred while fetching Plex watchlist item {item.title}: {e}") + return watchlist_items + + + def extract_imdb_ids(self, guids: list) -> str | None: + """Helper method to extract IMDb IDs from guids""" + for guid in guids: + if guid and guid.startswith("imdb://"): + imdb_id = guid.split("//")[-1] + if imdb_id: + return imdb_id + return None + + + def update_section(self, section, item: Union[Movie, Episode]) -> bool: + """Update the Plex section for the given item""" + if item.symlinked and item.get("update_folder") != "updated": + update_folder = item.update_folder + section.update(str(update_folder)) + return True + return False + + def map_sections_with_paths(self) -> Dict[LibrarySection, List[str]]: + """Map Plex sections with their paths""" + # Skip sections without locations and non-movie/show sections + sections = [section for section in self.plex_server.library.sections() if section.type in ["show", "movie"] and section.locations] + # Map sections with their locations with the section obj as key and the location strings as values + return {section: section.locations for section in sections} \ No newline at end of file diff --git a/src/program/apis/trakt_api.py b/src/program/apis/trakt_api.py new file mode 100644 index 0000000..31ffafe --- /dev/null +++ b/src/program/apis/trakt_api.py @@ -0,0 +1,366 @@ +import re +from datetime import datetime +from types import SimpleNamespace +from typing import List, Optional, Union +from urllib.parse import urlencode + +from requests import RequestException, Session + +from program import MediaItem +from program.media import Episode, Movie, Season, Show +from program.settings.manager import settings_manager +from program.settings.models import TraktModel +from program.utils.request import ( + BaseRequestHandler, + HttpMethod, + ResponseObject, + ResponseType, + create_service_session, + get_cache_params, + get_rate_limit_params, + logger, +) + + +class TraktAPIError(Exception): + """Base exception for TraktApi related errors""" + +class TraktRequestHandler(BaseRequestHandler): + def __init__(self, session: Session, response_type=ResponseType.SIMPLE_NAMESPACE, request_logging: bool = False): + super().__init__(session, response_type=response_type, custom_exception=TraktAPIError, request_logging=request_logging) + + def execute(self, method: HttpMethod, endpoint: str, **kwargs) -> ResponseObject: + return super()._request(method, endpoint, **kwargs) + + +class TraktAPI: + """Handles Trakt API communication""" + BASE_URL = "https://api.trakt.tv" + CLIENT_ID = "0183a05ad97098d87287fe46da4ae286f434f32e8e951caad4cc147c947d79a3" + + patterns: dict[str, re.Pattern] = { + "user_list": re.compile(r"https://trakt.tv/users/([^/]+)/lists/([^/]+)"), + "short_list": re.compile(r"https://trakt.tv/lists/\d+") + } + + def __init__(self, settings: TraktModel): + self.settings = settings + self.oauth_client_id = self.settings.oauth.oauth_client_id + self.oauth_client_secret = self.settings.oauth.oauth_client_secret + self.oauth_redirect_uri = self.settings.oauth.oauth_redirect_uri + rate_limit_params = get_rate_limit_params(max_calls=1000, period=300) + trakt_cache = get_cache_params("trakt", 86400) + session = create_service_session(rate_limit_params=rate_limit_params, use_cache=True, cache_params=trakt_cache) + self.headers = { + "Content-type": "application/json", + "trakt-api-key": self.CLIENT_ID, + "trakt-api-version": "2" + } + session.headers.update(self.headers) + self.request_handler = TraktRequestHandler(session) + + def validate(self): + return self.request_handler.execute(HttpMethod.GET, f"{self.BASE_URL}/lists/2") + + def _fetch_data(self, url, params): + """Fetch paginated data from Trakt API with rate limiting.""" + all_data = [] + page = 1 + + while True: + try: + response = self.request_handler.execute(HttpMethod.GET, url, params={**params, "page": page}) + if response.is_ok: + data = response.data + if not data: + break + all_data.extend(data) + if "X-Pagination-Page-Count" not in response.response.headers: + break + if params.get("limit") and len(all_data) >= params["limit"]: + break + page += 1 + elif response.status_code == 429: + logger.warning("Rate limit exceeded. Retrying after rate limit period.") + break + else: + logger.error(f"Failed to fetch data: {response.status_code}") + break + except Exception as e: + logger.error(f"Error fetching data: {str(e)}") + break + return all_data + + def get_watchlist_items(self, user): + """Get watchlist items from Trakt with pagination support.""" + url = f"{self.BASE_URL}/users/{user}/watchlist" + return self._fetch_data(url,{}) + + def get_user_list(self, user, list_name): + """Get user list items from Trakt with pagination support.""" + url = f"{self.BASE_URL}/users/{user}/lists/{list_name}/items" + return self._fetch_data(url, {}) + + def get_collection_items(self, user, media_type): + """Get collections from Trakt with pagination support.""" + url = f"{self.BASE_URL}/users/{user}/collection/{media_type}" + return self._fetch_data(url, {}) + + # UNUSED + def get_liked_lists(self): + """Get liked lists from Trakt with pagination support.""" + url = f"{self.BASE_URL}/users/likes/lists" + return self._fetch_data(url, {}) + + def get_trending_items(self, media_type, limit=10): + """Get trending items from Trakt with pagination support.""" + url = f"{self.BASE_URL}/{media_type}/trending" + return self._fetch_data(url, {"limit": limit}) + + def get_popular_items(self, media_type, limit=10): + """Get popular items from Trakt with pagination support.""" + url = f"{self.BASE_URL}/{media_type}/popular" + return self._fetch_data(url, {"limit": limit}) + + def get_most_watched_items(self, media_type, period="weekly", limit=10): + """Get popular items from Trakt with pagination support.""" + url = f"{self.BASE_URL}/{media_type}/watched/{period}" + return self._fetch_data(url, {"limit": limit}) + + # UNUSED + def get_favorited_items(self, user, limit=10): + """Get favorited items from Trakt with pagination support.""" + url = f"{self.BASE_URL}/users/{user}/favorites" + return self._fetch_data(url, {"limit": limit}) + + def extract_user_list_from_url(self, url) -> tuple: + """Extract user and list name from Trakt URL""" + + def match_full_url(url: str) -> tuple: + """Helper function to match full URL format""" + match = self.patterns["user_list"].match(url) + if match: + return match.groups() + return None, None + + # First try to match the original URL + user, list_name = match_full_url(url) + if user and list_name: + return user, list_name + + # If it's a short URL, resolve it and try to match again + match = self.patterns["short_list"].match(url) + if match: + full_url = self.resolve_short_url(url) + if full_url: + user, list_name = match_full_url(full_url) + if user and list_name: + return user, list_name + + return None, None + + def get_show(self, imdb_id: str) -> dict: + """Wrapper for trakt.tv API show method.""" + if not imdb_id: + return {} + url = f"{self.BASE_URL}/shows/{imdb_id}/seasons?extended=episodes,full" + response = self.request_handler.execute(HttpMethod.GET, url, timeout=30) + return response.data if response.is_ok and response.data else {} + + def get_show_aliases(self, imdb_id: str, item_type: str) -> List[dict]: + """Wrapper for trakt.tv API show method.""" + if not imdb_id: + return [] + url = f"{self.BASE_URL}/{item_type}/{imdb_id}/aliases" + try: + response = self.request_handler.execute(HttpMethod.GET, url, timeout=30) + if response.is_ok and response.data: + aliases = {} + for ns in response.data: + country = ns.country + title = ns.title + if title and title.startswith("Anime-"): + title = title[len("Anime-"):] + if country not in aliases: + aliases[country] = [] + if title not in aliases[country]: + aliases[country].append(title) + return aliases + except Exception: + logger.error(f"Failed to get show aliases for {imdb_id}") + return {} + + + def create_item_from_imdb_id(self, imdb_id: str, type: str = None) -> Optional[MediaItem]: + """Wrapper for trakt.tv API search method.""" + url = f"{self.BASE_URL}/search/imdb/{imdb_id}?extended=full" + response = self.request_handler.execute(HttpMethod.GET, url, timeout=30) + if not response.is_ok or not response.data: + logger.error( + f"Failed to create item using imdb id: {imdb_id}") # This returns an empty list for response.data + return None + + data = next((d for d in response.data if d.type == type), None) + if not data: + clause = lambda x: x.type == type if type else x in ["show", "movie", "season", "episode"] + data = next((d for d in response.data if clause), None) + + return self.map_item_from_data(getattr(data, data.type), data.type) if data else None + + def get_imdbid_from_tmdb(self, tmdb_id: str, type: str = "movie") -> Optional[str]: + """Wrapper for trakt.tv API search method.""" + url = f"{self.BASE_URL}/search/tmdb/{tmdb_id}" # ?extended=full + response = self.request_handler.execute(HttpMethod.GET, url, timeout=30) + if not response.is_ok or not response.data: + return None + imdb_id = self._get_imdb_id_from_list(response.data, id_type="tmdb", _id=tmdb_id, type=type) + if imdb_id and imdb_id.startswith("tt"): + return imdb_id + logger.error(f"Failed to fetch imdb_id for tmdb_id: {tmdb_id}") + return None + + def get_imdbid_from_tvdb(self, tvdb_id: str, type: str = "show") -> Optional[str]: + """Wrapper for trakt.tv API search method.""" + url = f"{self.BASE_URL}/search/tvdb/{tvdb_id}" + response = self.request_handler.execute(HttpMethod.GET, url, timeout=30) + if not response.is_ok or not response.data: + return None + imdb_id = self._get_imdb_id_from_list(response.data, id_type="tvdb", _id=tvdb_id, type=type) + if imdb_id and imdb_id.startswith("tt"): + return imdb_id + logger.error(f"Failed to fetch imdb_id for tvdb_id: {tvdb_id}") + return None + + def resolve_short_url(self, short_url) -> Union[str, None]: + """Resolve short URL to full URL""" + try: + response = self.request_handler.execute(HttpMethod.GET, endpoint=short_url, headers={"Content-Type": "application/json", "Accept": "text/html"}) + if response.is_ok: + return response.response.url + else: + logger.error(f"Failed to resolve short URL: {short_url} (with status code: {response.status_code})") + return None + except RequestException as e: + logger.error(f"Error resolving short URL: {str(e)}") + return None + + def map_item_from_data(self, data, item_type: str, show_genres: List[str] = None) -> Optional[MediaItem]: + """Map trakt.tv API data to MediaItemContainer.""" + if item_type not in ["movie", "show", "season", "episode"]: + logger.debug(f"Unknown item type {item_type} for {data.title} not found in list of acceptable items") + return None + + formatted_aired_at = self._get_formatted_date(data, item_type) + genres = getattr(data, "genres", None) or show_genres + + item = { + "trakt_id": data.ids.trakt, + "title": getattr(data, "title", None), + "year": getattr(data, "year", None), + "status": getattr(data, "status", None), + "aired_at": formatted_aired_at, + "imdb_id": getattr(data.ids, "imdb", None), + "tvdb_id": getattr(data.ids, "tvdb", None), + "tmdb_id": getattr(data.ids, "tmdb", None), + "genres": genres, + "network": getattr(data, "network", None), + "country": getattr(data, "country", None), + "language": getattr(data, "language", None), + "requested_at": datetime.now(), + "type": item_type, + } + + item["is_anime"] = ( + ("anime" in genres) + or ("animation" in genres and (item["country"] in ("jp", "kr") or item["language"] == "ja")) + if genres + else False + ) + + match item_type: + case "movie": + item["aliases"] = self.get_show_aliases(item["imdb_id"], "movies") + return Movie(item) + case "show": + item["aliases"] = self.get_show_aliases(item["imdb_id"], "shows") + return Show(item) + case "season": + item["number"] = data.number + return Season(item) + case "episode": + item["number"] = data.number + return Episode(item) + case _: + logger.error(f"Unknown item type {item_type} for {data.title} not found in list of acceptable items") + return None + + def perform_oauth_flow(self) -> str: + """Initiate the OAuth flow and return the authorization URL.""" + if not self.oauth_client_id or not self.oauth_client_secret or not self.oauth_redirect_uri: + logger.error("OAuth settings not found in Trakt settings") + raise TraktAPIError("OAuth settings not found in Trakt settings") + + params = { + "response_type": "code", + "client_id": self.oauth_client_id, + "redirect_uri": self.oauth_redirect_uri, + } + return f"{self.BASE_URL}/oauth/authorize?{urlencode(params)}" + + def handle_oauth_callback(self, api_key:str, code: str) -> bool: + """Handle the OAuth callback and exchange the code for an access token.""" + if not self.oauth_client_id or not self.oauth_client_secret or not self.oauth_redirect_uri: + logger.error("OAuth settings not found in Trakt settings") + return False + + token_url = f"{self.BASE_URL}/oauth/token" + payload = { + "code": code, + "client_id": self.oauth_client_id, + "client_secret": self.oauth_client_secret, + "redirect_uri": self.oauth_redirect_uri, + "grant_type": "authorization_code", + } + headers = self.headers.copy() + headers["trakt-api-key"] = api_key + response = self.request_handler.execute(HttpMethod.POST, token_url, data=payload, additional_headers=headers) + if response.is_ok: + token_data = response.data + self.settings.access_token = token_data.get("access_token") + self.settings.refresh_token = token_data.get("refresh_token") + settings_manager.save() # Save the tokens to settings + return True + else: + logger.error(f"Failed to obtain OAuth token: {response.status_code}") + return False + + def _get_imdb_id_from_list(self, namespaces: List[SimpleNamespace], id_type: str = None, _id: str = None, + type: str = None) -> Optional[str]: + """Get the imdb_id from the list of namespaces.""" + if not any([id_type, _id, type]): + return None + + for ns in namespaces: + if type == "movie" and hasattr(ns, "movie") and hasattr(ns.movie, "ids") and hasattr(ns.movie.ids, "imdb"): + if str(getattr(ns.movie.ids, id_type)) == str(_id): + return ns.movie.ids.imdb + elif type == "show" and hasattr(ns, "show") and hasattr(ns.show, "ids") and hasattr(ns.show.ids, "imdb"): + if str(getattr(ns.show.ids, id_type)) == str(_id): + return ns.show.ids.imdb + elif type == "season" and hasattr(ns, "season") and hasattr(ns.season, "ids") and hasattr(ns.season.ids, + "imdb"): + if str(getattr(ns.season.ids, id_type)) == str(_id): + return ns.season.ids.imdb + elif type == "episode" and hasattr(ns, "episode") and hasattr(ns.episode, "ids") and hasattr(ns.episode.ids, + "imdb"): + if str(getattr(ns.episode.ids, id_type)) == str(_id): + return ns.episode.ids.imdb + return None + + def _get_formatted_date(self, data, item_type: str) -> Optional[datetime]: + """Get the formatted aired date from the data.""" + if item_type in ["show", "season", "episode"] and (first_aired := getattr(data, "first_aired", None)): + return datetime.strptime(first_aired, "%Y-%m-%dT%H:%M:%S.%fZ") + if item_type == "movie" and (released := getattr(data, "released", None)): + return datetime.strptime(released, "%Y-%m-%d") + return None \ No newline at end of file diff --git a/src/program/db/__init__.py b/src/program/db/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/src/program/db/db.py b/src/program/db/db.py new file mode 100644 index 0000000..a2cc302 --- /dev/null +++ b/src/program/db/db.py @@ -0,0 +1,66 @@ +from loguru import logger +from sqla_wrapper import SQLAlchemy +from sqlalchemy import text + +from alembic import command +from alembic.config import Config +from program.settings.manager import settings_manager +from program.utils import root_dir + +engine_options = { + "pool_size": 25, # Prom: Set to 1 when debugging sql queries + "max_overflow": 25, # Prom: Set to 0 when debugging sql queries + "pool_pre_ping": True, # Prom: Set to False when debugging sql queries + "pool_recycle": 1800, # Prom: Set to -1 when debugging sql queries + "echo": False, # Prom: Set to true when debugging sql queries +} + +# Prom: This is a good place to set the statement timeout for the database when debugging. +# @event.listens_for(Engine, "connect") +# def set_statement_timeout(dbapi_connection, connection_record): +# cursor = dbapi_connection.cursor() +# cursor.execute("SET statement_timeout = 300000") +# cursor.close() + +db_host = settings_manager.settings.database.host +db = SQLAlchemy(db_host, engine_options=engine_options) + +def get_db(): + _db = db.Session() + try: + yield _db + finally: + _db.close() + +def create_database_if_not_exists(): + """Create the database if it doesn't exist.""" + db_name = db_host.split("/")[-1] + db_base_host = "/".join(db_host.split("/")[:-1]) + try: + temp_db = SQLAlchemy(db_base_host, engine_options=engine_options) + with temp_db.engine.connect() as connection: + connection.execution_options(isolation_level="AUTOCOMMIT").execute(text(f"CREATE DATABASE {db_name}")) + return True + except Exception as e: + logger.error(f"Failed to create database {db_name}: {e}") + return False + +def vacuum_and_analyze_index_maintenance() -> None: + # PROM: Use the raw connection to execute VACUUM outside a transaction + try: + with db.engine.connect() as connection: + connection = connection.execution_options(isolation_level="AUTOCOMMIT") + connection.execute(text("VACUUM;")) + connection.execute(text("ANALYZE;")) + logger.log("DATABASE","VACUUM and ANALYZE completed successfully.") + except Exception as e: + logger.error(f"Error during VACUUM and ANALYZE: {e}") + +def run_migrations(): + """Run any pending migrations on startup""" + try: + alembic_cfg = Config(root_dir / "src" / "alembic.ini") + command.upgrade(alembic_cfg, "head") + except Exception as e: + logger.error(f"Migration failed: {e}") + raise \ No newline at end of file diff --git a/src/program/db/db_functions.py b/src/program/db/db_functions.py new file mode 100644 index 0000000..ed373a9 --- /dev/null +++ b/src/program/db/db_functions.py @@ -0,0 +1,506 @@ +import os +import shutil +from threading import Event +from typing import TYPE_CHECKING + +from loguru import logger +from sqlalchemy import delete, exists, insert, inspect, or_, select, text +from sqlalchemy.orm import Session, joinedload, selectinload + +import alembic +from program.media.stream import Stream, StreamBlacklistRelation, StreamRelation +from program.services.libraries.symlink import fix_broken_symlinks +from program.settings.manager import settings_manager +from program.utils import root_dir + +from .db import db + +if TYPE_CHECKING: + from program.media.item import MediaItem + +def get_item_by_id(item_id: str, item_types = None, session = None): + if not item_id: + return None + + from program.media.item import MediaItem, Season, Show + _session = session if session else db.Session() + + with _session: + query = (select(MediaItem) + .where(MediaItem.id == item_id) + .options( + selectinload(Show.seasons) + .selectinload(Season.episodes) + )) + if item_types: + query = query.where(MediaItem.type.in_(item_types)) + + item = _session.execute(query).unique().scalar_one_or_none() + if item: + _session.expunge(item) + return item + +def get_items_by_ids(ids: list, item_types = None, session = None): + items = [] + for id in ids: + items.append(get_item_by_id(id, item_types, session)) + return items + +def get_item_by_external_id(imdb_id: str = None, tvdb_id: int = None, tmdb_id: int = None, session = None): + from program.media.item import MediaItem, Season, Show + + _session = session if session else db.Session() + query = ( + select(MediaItem) + .options( + joinedload(Show.seasons) + .joinedload(Season.episodes), + joinedload(Season.episodes) + ) + ) + + if imdb_id: + query = query.where(MediaItem.imdb_id == imdb_id) + elif tvdb_id: + query = query.where(MediaItem.tvdb_id == tvdb_id) + elif tmdb_id: + query = query.where(MediaItem.tmdb_id == tmdb_id) + else: + raise ValueError("One of the external ids must be given") + + with _session: + item = _session.execute(query).unique().scalar_one_or_none() + if item: + _session.expunge(item) + return item + +def delete_media_item(item: "MediaItem"): + """Delete a MediaItem and all its associated relationships.""" + with db.Session() as session: + item = session.merge(item) + session.delete(item) + session.commit() + +def delete_media_item_by_id(media_item_id: str, batch_size: int = 30): + """Delete a Movie or Show by _id. If it's a Show, delete its Seasons and Episodes in batches, committing after each batch.""" + from sqlalchemy.exc import IntegrityError + + from program.media.item import Episode, MediaItem, Movie, Season, Show + + if not media_item_id: + logger.error("Item ID can not be empty") + return False + + with db.Session() as session: + try: + # First, retrieve the media item's type + media_item_type = session.execute( + select(MediaItem.type) + .where(MediaItem.id == media_item_id) + ).scalar_one_or_none() + + if not media_item_type: + logger.error(f"No item found with ID {media_item_id}") + return False + + if media_item_type == "show": + season_ids = session.execute( + select(Season.id).where(Season.parent_id == media_item_id) + ).scalars().all() + + delete_seasons_and_episodes(session, season_ids, batch_size) + session.execute(delete(Show).where(Show.id == media_item_id)) + + if media_item_type == "movie": + session.execute(delete(Movie).where(Movie.id == media_item_id)) + + if media_item_type == "season": + delete_seasons_and_episodes(session, [media_item_id], batch_size) + session.execute(delete(Season).where(Season.id == media_item_id)) + + if media_item_type == "episode": + session.execute(delete(Episode).where(Episode.id == media_item_id)) + + session.execute(delete(MediaItem).where(MediaItem.id == media_item_id)) + session.commit() + return True + + except IntegrityError as e: + logger.error(f"Integrity error while deleting media item with ID {media_item_id}: {e}") + session.rollback() + return False + except Exception as e: + logger.error(f"Unexpected error while deleting media item with ID {media_item_id}: {e}") + session.rollback() + return False + +def delete_seasons_and_episodes(session, season_ids: list[str], batch_size: int = 30): + """Delete seasons and episodes of a show in batches, committing after each batch.""" + from program.media.item import Episode, Season + from program.media.stream import StreamBlacklistRelation, StreamRelation + from program.media.subtitle import Subtitle + + for season_id in season_ids: + # Load the season object + season = session.query(Season).get(season_id) + + # Bulk delete related streams and subtitles + session.execute(delete(StreamRelation).where(StreamRelation.parent_id == season_id)) + session.execute(delete(StreamBlacklistRelation).where(StreamBlacklistRelation.media_item_id == season_id)) + session.execute(delete(Subtitle).where(Subtitle.parent_id == season_id)) + session.commit() # Commit after bulk deletion + + # Delete episodes in batches for each season + while True: + episode_ids = session.execute( + select(Episode.id).where(Episode.parent_id == season_id).limit(batch_size) + ).scalars().all() + + if not episode_ids: + break + + session.execute(delete(Episode).where(Episode.id.in_(episode_ids))) + session.commit() # Commit after each batch of episodes + + session.delete(season) # Delete the season itself + session.commit() # Commit after deleting the season + +def reset_media_item(item: "MediaItem"): + """Reset a MediaItem.""" + with db.Session() as session: + item = session.merge(item) + item.reset() + session.commit() + +def reset_streams(item: "MediaItem"): + """Reset streams associated with a MediaItem.""" + with db.Session() as session: + + session.execute( + delete(StreamRelation).where(StreamRelation.parent_id == item.id) + ) + + session.execute( + delete(StreamBlacklistRelation).where(StreamBlacklistRelation.media_item_id == item.id) + ) + session.commit() + +def clear_streams(item: "MediaItem"): + """Clear all streams for a media item.""" + reset_streams(item) + +def clear_streams_by_id(media_item_id: str): + """Clear all streams for a media item by the MediaItem id.""" + with db.Session() as session: + session.execute( + delete(StreamRelation).where(StreamRelation.parent_id == media_item_id) + ) + session.execute( + delete(StreamBlacklistRelation).where(StreamBlacklistRelation.media_item_id == media_item_id) + ) + session.commit() + +def blacklist_stream(item: "MediaItem", stream: Stream, session: Session = None) -> bool: + """Blacklist a stream for a media item.""" + close_session = False + if session is None: + session = db.Session() + item = session.execute(select(type(item)).where(type(item).id == item.id)).unique().scalar_one() + close_session = True + + try: + item = session.merge(item) + association_exists = session.query( + session.query(StreamRelation) + .filter(StreamRelation.parent_id == item.id) + .filter(StreamRelation.child_id == stream.id) + .exists() + ).scalar() + + if association_exists: + session.execute( + delete(StreamRelation) + .where(StreamRelation.parent_id == item.id) + .where(StreamRelation.child_id == stream.id) + ) + session.execute( + insert(StreamBlacklistRelation) + .values(media_item_id=item.id, stream_id=stream.id) + ) + item.store_state() + session.commit() + return True + return False + finally: + if close_session: + session.close() + +def unblacklist_stream(item: "MediaItem", stream: Stream, session: Session = None) -> bool: + close_session = False + if session is None: + session = db.Session() + item = session.execute(select(type(item)).where(type(item).id == item.id)).unique().scalar_one() + close_session = True + + try: + item = session.merge(item) + association_exists = session.query( + session.query(StreamBlacklistRelation) + .filter(StreamBlacklistRelation.media_item_id == item.id) + .filter(StreamBlacklistRelation.stream_id == stream.id) + .exists() + ).scalar() + + if association_exists: + session.execute( + delete(StreamBlacklistRelation) + .where(StreamBlacklistRelation.media_item_id == item.id) + .where(StreamBlacklistRelation.stream_id == stream.id) + ) + session.execute( + insert(StreamRelation) + .values(parent_id=item.id, child_id=stream.id) + ) + item.store_state() + session.commit() + return True + return False + finally: + if close_session: + session.close() + +def get_item_ids(session, item_id: str) -> tuple[str, list[str]]: + """Get the item ID and all related item IDs for a given MediaItem.""" + from program.media.item import Episode, MediaItem, Season + + item_type = session.query(MediaItem.type).filter(MediaItem.id == item_id).scalar() + related_ids = [] + + if item_type == "show": + season_ids = session.execute( + select(Season.id).where(Season.parent_id == item_id) + ).scalars().all() + + for season_id in season_ids: + episode_ids = session.execute( + select(Episode.id).where(Episode.parent_id == season_id) + ).scalars().all() + related_ids.extend(episode_ids) + related_ids.extend(season_ids) + + elif item_type == "season": + episode_ids = session.execute( + select(Episode.id).where(Episode.parent_id == item_id) + ).scalars().all() + related_ids.extend(episode_ids) + + return item_id, related_ids + +def run_thread_with_db_item(fn, service, program, event: Event, cancellation_event: Event): + from program.media.item import MediaItem + if event: + with db.Session() as session: + if event.item_id: + input_item = get_item_by_id(event.item_id, session=session) + if input_item: + input_item = session.merge(input_item) + res = next(fn(input_item), None) + if res: + if isinstance(res, tuple): + item, run_at = res + res = item.id, run_at + else: + item = res + res = item.id + if not isinstance(item, MediaItem): + logger.log("PROGRAM", f"Service {service.__name__} emitted {item} from input item {input_item} of type {type(item).__name__}, backing off.") + program.em.remove_id_from_queues(input_item.id) + + if not cancellation_event.is_set(): + # Update parent item + if input_item.type == "episode": + input_item.parent.parent.store_state() + elif input_item.type == "season": + input_item.parent.store_state() + else: + input_item.store_state() + session.commit() + return res + # This is in bad need of indexing... + if event.content_item: + indexed_item = next(fn(event.content_item), None) + if indexed_item is None: + logger.debug(f"Unable to index {event.content_item.log_string}") + return None + indexed_item.store_state() + session.add(indexed_item) + item_id = indexed_item.id + if not cancellation_event.is_set(): + session.commit() + return item_id + # Content services dont pass events, get ready for a ride! + else: + for i in fn(): + if isinstance(i, MediaItem): + i = [i] + if isinstance(i, list): + for item in i: + if isinstance(item, MediaItem): + program.em.add_item(item, service) + return None + +def hard_reset_database(): + """Resets the database to a fresh state while maintaining migration capability.""" + logger.log("DATABASE", "Starting Hard Reset of Database") + + # Store current alembic version before reset + current_version = None + try: + with db.engine.connect() as connection: + result = connection.execute(text("SELECT version_num FROM alembic_version")) + current_version = result.scalar() + except Exception: + pass + + with db.engine.connect() as connection: + # Ensure we're in AUTOCOMMIT mode for PostgreSQL schema operations + connection = connection.execution_options(isolation_level="AUTOCOMMIT") + + try: + # Terminate existing connections for PostgreSQL + if db.engine.name == "postgresql": + connection.execute(text(""" + SELECT pg_terminate_backend(pid) + FROM pg_stat_activity + WHERE datname = current_database() + AND pid <> pg_backend_pid() + """)) + + # Drop and recreate schema + connection.execute(text("DROP SCHEMA public CASCADE")) + connection.execute(text("CREATE SCHEMA public")) + connection.execute(text("GRANT ALL ON SCHEMA public TO public")) + logger.log("DATABASE", "Schema reset complete") + + # For SQLite, drop all tables + elif db.engine.name == "sqlite": + connection.execute(text("PRAGMA foreign_keys = OFF")) + + # Get all tables + tables = connection.execute(text( + "SELECT name FROM sqlite_master WHERE type='table'" + )).scalars().all() + + # Drop each table + for table in tables: + connection.execute(text(f"DROP TABLE IF EXISTS {table}")) + + connection.execute(text("PRAGMA foreign_keys = ON")) + logger.log("DATABASE", "All tables dropped") + + # Recreate all tables + db.Model.metadata.create_all(connection) + logger.log("DATABASE", "All tables recreated") + + # If we had a previous version, restore it + if current_version: + connection.execute(text("CREATE TABLE IF NOT EXISTS alembic_version (version_num VARCHAR(32) NOT NULL)")) + connection.execute(text("INSERT INTO alembic_version (version_num) VALUES (:version)"), + {"version": current_version}) + logger.log("DATABASE", f"Restored alembic version to: {current_version}") + else: + # Stamp with head version if no previous version + alembic.stamp("head") + logger.log("DATABASE", "Database stamped with head revision") + + except Exception as e: + logger.error(f"Error during database reset: {str(e)}") + raise + + logger.log("DATABASE", "Hard Reset Complete") + + # Verify database state + try: + with db.engine.connect() as connection: + # Check if all tables exist + inspector = inspect(db.engine) + all_tables = inspector.get_table_names() + logger.log("DATABASE", f"Verified tables: {', '.join(all_tables)}") + + # Verify alembic version + result = connection.execute(text("SELECT version_num FROM alembic_version")) + version = result.scalar() + logger.log("DATABASE", f"Verified alembic version: {version}") + + except Exception as e: + logger.error(f"Error verifying database state: {str(e)}") + raise + +def hard_reset_database_pre_migration(): + """Resets the database to a fresh state.""" + logger.log("DATABASE", "Starting Hard Reset of Database") + + # Disable foreign key checks temporarily + with db.engine.connect() as connection: + if db.engine.name == "sqlite": + connection.execute(text("PRAGMA foreign_keys = OFF")) + elif db.engine.name == "postgresql": + connection.execute(text("SET CONSTRAINTS ALL DEFERRED")) + + try: + for table in reversed(db.Model.metadata.sorted_tables): + try: + table.drop(connection, checkfirst=True) + logger.log("DATABASE", f"Dropped table: {table.name}") + except Exception as e: + logger.log("DATABASE", f"Error dropping table {table.name}: {str(e)}") + + try: + connection.execute(text("DROP TABLE IF EXISTS alembic_version")) + logger.log("DATABASE", "Alembic version table dropped") + except Exception as e: + logger.log("DATABASE", f"Error dropping alembic_version table: {str(e)}") + + db.Model.metadata.create_all(connection) + logger.log("DATABASE", "All tables recreated") + + # Re-enable foreign key checks + if db.engine.name == "sqlite": + connection.execute(text("PRAGMA foreign_keys = ON")) + elif db.engine.name == "postgresql": + connection.execute(text("SET CONSTRAINTS ALL IMMEDIATE")) + + connection.commit() + except Exception as e: + connection.rollback() + logger.log("DATABASE", f"Error during database reset: {str(e)}") + raise + + try: + alembic_dir = root_dir / "data" / "alembic" + logger.log("DATABASE", "Removing Alembic Directory") + shutil.rmtree(alembic_dir, ignore_errors=True) + os.makedirs(alembic_dir, exist_ok=True) + alembic.init(alembic_dir) + logger.log("DATABASE", "Alembic reinitialized") + except Exception as e: + logger.log("DATABASE", f"Error reinitializing Alembic: {str(e)}") + + logger.log("DATABASE", "Pre Migration - Hard Reset Complete") + +# Hard Reset Database +reset = os.getenv("HARD_RESET", None) +if reset is not None and reset.lower() in ["true","1"]: + hard_reset_database() + exit(0) + +# Hard Reset Database +reset = os.getenv("HARD_RESET_PRE_MIGRATION", None) +if reset is not None and reset.lower() in ["true","1"]: + hard_reset_database_pre_migration() + exit(0) + +# Repair Symlinks +if os.getenv("REPAIR_SYMLINKS", None) is not None and os.getenv("REPAIR_SYMLINKS").lower() in ["true","1"]: + fix_broken_symlinks(settings_manager.settings.symlink.library_path, settings_manager.settings.symlink.rclone_path) + exit(0) diff --git a/src/program/managers/event_manager.py b/src/program/managers/event_manager.py new file mode 100644 index 0000000..9c932de --- /dev/null +++ b/src/program/managers/event_manager.py @@ -0,0 +1,327 @@ +import os +import threading +import traceback +from concurrent.futures import Future, ThreadPoolExecutor +from datetime import datetime +from queue import Empty +from threading import Lock +from typing import Dict, List + +from loguru import logger +from pydantic import BaseModel + +from program.db import db_functions +from program.db.db import db +from program.managers.sse_manager import sse_manager +from program.types import Event + + +class EventUpdate(BaseModel): + item_id: int + emitted_by: str + run_at: str + + +class EventManager: + """ + Manages the execution of services and the handling of events. + """ + def __init__(self): + self._executors: list[ThreadPoolExecutor] = [] + self._futures: list[Future] = [] + self._queued_events: list[Event] = [] + self._running_events: list[Event] = [] + self._canceled_futures: list[Future] = [] + self._content_queue: list[Event] = [] + self.mutex = Lock() + + def _find_or_create_executor(self, service_cls) -> ThreadPoolExecutor: + """ + Finds or creates a ThreadPoolExecutor for the given service class. + + Args: + service_cls (type): The service class for which to find or create an executor. + + Returns: + concurrent.futures.ThreadPoolExecutor: The executor for the service class. + """ + service_name = service_cls.__name__ + env_var_name = f"{service_name.upper()}_MAX_WORKERS" + max_workers = int(os.environ.get(env_var_name, 1)) + for executor in self._executors: + if executor["_name_prefix"] == service_name: + logger.debug(f"Executor for {service_name} found.") + return executor["_executor"] + _executor = ThreadPoolExecutor(thread_name_prefix=service_name, max_workers=max_workers) + self._executors.append({ "_name_prefix": service_name, "_executor": _executor }) + logger.debug(f"Created executor for {service_name} with {max_workers} max workers.") + return _executor + + def _process_future(self, future, service): + """ + Processes the result of a future once it is completed. + + Args: + future (concurrent.futures.Future): The future to process. + service (type): The service class associated with the future. + """ + + if future.cancelled(): + logger.debug(f"Future for {future} was cancelled.") + return # Skip processing if the future was cancelled + + try: + result = future.result() + if future in self._futures: + self._futures.remove(future) + sse_manager.publish_event("event_update", self.get_event_updates()) + if isinstance(result, tuple): + item_id, timestamp = result + else: + item_id, timestamp = result, datetime.now() + if item_id: + self.remove_event_from_running(future.event) + logger.debug(f"Removed {future.event.log_message} from running events.") + if future.cancellation_event.is_set(): + logger.debug(f"Future with Item ID: {item_id} was cancelled discarding results...") + return + self.add_event(Event(emitted_by=service, item_id=item_id, run_at=timestamp)) + except Exception as e: + logger.error(f"Error in future for {future}: {e}") + logger.exception(traceback.format_exc()) + log_message = f"Service {service.__name__} executed" + if hasattr(future, "event"): + log_message += f" with {future.event.log_message}" + logger.debug(log_message) + + def add_event_to_queue(self, event: Event, log_message=True): + """ + Adds an event to the queue. + + Args: + event (Event): The event to add to the queue. + """ + with self.mutex: + self._queued_events.append(event) + if log_message: + logger.debug(f"Added {event.log_message} to the queue.") + + def remove_event_from_queue(self, event: Event): + with self.mutex: + self._queued_events.remove(event) + logger.debug(f"Removed {event.log_message} from the queue.") + + def remove_event_from_running(self, event: Event): + with self.mutex: + if event in self._running_events: + self._running_events.remove(event) + logger.debug(f"Removed {event.log_message} from running events.") + + def remove_id_from_queue(self, item_id: str): + """ + Removes an item from the queue. + + Args: + item (MediaItem): The event item to remove from the queue. + """ + for event in self._queued_events: + if event.item_id == item_id: + self.remove_event_from_queue(event) + + def add_event_to_running(self, event: Event): + """ + Adds an event to the running events. + + Args: + event (Event): The event to add to the running events. + """ + with self.mutex: + self._running_events.append(event) + logger.debug(f"Added {event.log_message} to running events.") + + def remove_id_from_running(self, item_id: str): + """ + Removes an item from the running events. + + Args: + item (MediaItem): The event item to remove from the running events. + """ + for event in self._running_events: + if event.item_id == item_id: + self.remove_event_from_running(event) + + def remove_id_from_queues(self, item_id: str): + """ + Removes an item from both the queue and the running events. + + Args: + item_id: The event item to remove from both the queue and the running events. + """ + self.remove_id_from_queue(item_id) + self.remove_id_from_running(item_id) + + def submit_job(self, service, program, event=None): + """ + Submits a job to be executed by the service. + + Args: + service (type): The service class to execute. + program (Program): The program containing the service. + item (Event, optional): The event item to process. Defaults to None. + """ + log_message = f"Submitting service {service.__name__} to be executed" + item_id = None + # Content services dont provide an event. + if event: + log_message += f" with {event.log_message}" + logger.debug(log_message) + + cancellation_event = threading.Event() + executor = self._find_or_create_executor(service) + future = executor.submit(db_functions.run_thread_with_db_item, program.all_services[service].run, service, program, event, cancellation_event) + future.cancellation_event = cancellation_event + if event: + future.event = event + self._futures.append(future) + sse_manager.publish_event("event_update", self.get_event_updates()) + future.add_done_callback(lambda f:self._process_future(f, service)) + + def cancel_job(self, item_id: str, suppress_logs=False): + """ + Cancels a job associated with the given item. + + Args: + item_id (int): The event item whose job needs to be canceled. + suppress_logs (bool): If True, suppresses debug logging for this operation. + """ + with db.Session() as session: + item_id, related_ids = db_functions.get_item_ids(session, item_id) + ids_to_cancel = set([item_id] + related_ids) + + for future in self._futures: + future_item_id = None + future_related_ids = [] + + if hasattr(future, "event") and hasattr(future.event, "item_id"): + future_item = future.event.item_id + future_item_id, future_related_ids = db_functions.get_item_ids(session, future_item) + + if future_item_id in ids_to_cancel or any(rid in ids_to_cancel for rid in future_related_ids): + self.remove_id_from_queues(future_item) + if not future.done() and not future.cancelled(): + try: + future.cancellation_event.set() + future.cancel() + self._canceled_futures.append(future) + except Exception as e: + if not suppress_logs: + logger.error(f"Error cancelling future for {future_item.log_string}: {str(e)}") + + + logger.debug(f"Canceled jobs for Item ID {item_id} and its children.") + + def next(self): + """ + Get the next event in the queue with an optional timeout. + + Raises: + Empty: If the queue is empty. + + Returns: + Event: The next event in the queue. + """ + while True: + if self._queued_events: + with self.mutex: + self._queued_events.sort(key=lambda event: event.run_at) + if datetime.now() >= self._queued_events[0].run_at: + event = self._queued_events.pop(0) + return event + raise Empty + + def _id_in_queue(self, _id): + """ + Checks if an item with the given ID is in the queue. + + Args: + _id (str): The ID of the item to check. + + Returns: + bool: True if the item is in the queue, False otherwise. + """ + return any(event.item_id == _id for event in self._queued_events) + + def _id_in_running_events(self, _id): + """ + Checks if an item with the given ID is in the running events. + + Args: + _id (str): The ID of the item to check. + + Returns: + bool: True if the item is in the running events, False otherwise. + """ + return any(event.item_id == _id for event in self._running_events) + + def add_event(self, event: Event): + """ + Adds an event to the queue if it is not already present in the queue or running events. + + Args: + event (Event): The event to add to the queue. + + Returns: + bool: True if the event was added to the queue, False if it was already present. + """ + # Check if the event's item is a show and its seasons or episodes are in the queue or running + with db.Session() as session: + item_id, related_ids = db_functions.get_item_ids(session, event.item_id) + if item_id: + if self._id_in_queue(item_id): + logger.debug(f"Item ID {item_id} is already in the queue, skipping.") + return False + if self._id_in_running_events(item_id): + logger.debug(f"Item ID {item_id} is already running, skipping.") + return False + for related_id in related_ids: + if self._id_in_queue(related_id) or self._id_in_running_events(related_id): + logger.debug(f"Child of Item ID {item_id} is already in the queue or running, skipping.") + return False + else: + imdb_id = event.content_item.imdb_id + if any(event.content_item and event.content_item.imdb_id == imdb_id for event in self._queued_events): + logger.debug(f"Content Item with IMDB ID {imdb_id} is already in queue, skipping.") + return False + if any( + event.content_item and event.content_item.imdb_id == imdb_id for event in self._running_events + ): + logger.debug(f"Content Item with IMDB ID {imdb_id} is already running, skipping.") + return False + + self.add_event_to_queue(event) + return True + + def add_item(self, item, service="Manual"): + """ + Adds an item to the queue as an event. + + Args: + item (MediaItem): The item to add to the queue as an event. + """ + # For now lets just support imdb_ids... + if not db_functions.get_item_by_external_id(imdb_id=item.imdb_id): + if self.add_event(Event(service, content_item=item)): + logger.debug(f"Added item with IMDB ID {item.imdb_id} to the queue.") + + + def get_event_updates(self) -> Dict[str, List[str]]: + events = [future.event for future in self._futures if hasattr(future, "event")] + event_types = ["Scraping", "Downloader", "Symlinker", "Updater", "PostProcessing"] + + updates = {event_type: [] for event_type in event_types} + for event in events: + table = updates.get(event.emitted_by.__name__, None) + if table is not None: + table.append(event.item_id) + + return updates \ No newline at end of file diff --git a/src/program/managers/sse_manager.py b/src/program/managers/sse_manager.py new file mode 100644 index 0000000..4e757ff --- /dev/null +++ b/src/program/managers/sse_manager.py @@ -0,0 +1,27 @@ +import asyncio +from typing import Any, Dict + + +class ServerSentEventManager: + def __init__(self): + self.event_queues: Dict[str, asyncio.Queue] = {} + + def publish_event(self, event_type: str, data: Any): + if not data: + return + if event_type not in self.event_queues: + self.event_queues[event_type] = asyncio.Queue() + self.event_queues[event_type].put_nowait(data) + + async def subscribe(self, event_type: str): + if event_type not in self.event_queues: + self.event_queues[event_type] = asyncio.Queue() + + while True: + try: + data = await asyncio.wait_for(self.event_queues[event_type].get(), timeout=1.0) + yield f"{data}\n" + except asyncio.TimeoutError: + pass + +sse_manager = ServerSentEventManager() \ No newline at end of file diff --git a/src/program/media/__init__.py b/src/program/media/__init__.py new file mode 100644 index 0000000..1bf3c79 --- /dev/null +++ b/src/program/media/__init__.py @@ -0,0 +1,2 @@ +from .item import Episode, MediaItem, Movie, Season, Show, ShowMediaType, MovieMediaType, MediaType # noqa +from .state import States # noqa diff --git a/src/program/media/item.py b/src/program/media/item.py new file mode 100644 index 0000000..5b08196 --- /dev/null +++ b/src/program/media/item.py @@ -0,0 +1,733 @@ +"""MediaItem class""" +from datetime import datetime +from enum import Enum +from pathlib import Path +from typing import List, Optional, Self + +import sqlalchemy +from loguru import logger +from RTN import parse +from sqlalchemy import Index +from sqlalchemy.orm import Mapped, mapped_column, object_session, relationship + +from program.db.db import db +from program.managers.sse_manager import sse_manager +from program.media.state import States +from program.media.subtitle import Subtitle + +from ..db.db_functions import blacklist_stream, reset_streams +from .stream import Stream + +class ShowMediaType(Enum): + """Show media types""" + Show = "show" + Season = "season" + Episode = "episode" + +class MovieMediaType(Enum): + """Media types""" + Movie = "movie" + +class MediaType(Enum): + """Combined media types""" + Show = ShowMediaType.Show.value + Season = ShowMediaType.Season.value + Episode = ShowMediaType.Episode.value + Movie = MovieMediaType.Movie.value + +class MediaItem(db.Model): + """MediaItem class""" + __tablename__ = "MediaItem" + id: Mapped[str] = mapped_column(sqlalchemy.String, primary_key=True) + imdb_id: Mapped[Optional[str]] = mapped_column(sqlalchemy.String, nullable=True) + tvdb_id: Mapped[Optional[str]] = mapped_column(sqlalchemy.String, nullable=True) + tmdb_id: Mapped[Optional[str]] = mapped_column(sqlalchemy.String, nullable=True) + number: Mapped[Optional[int]] = mapped_column(sqlalchemy.Integer, nullable=True) + type: Mapped[str] = mapped_column(sqlalchemy.String, nullable=False) + requested_at: Mapped[Optional[datetime]] = mapped_column(sqlalchemy.DateTime, default=datetime.now()) + requested_by: Mapped[Optional[str]] = mapped_column(sqlalchemy.String, nullable=True) + requested_id: Mapped[Optional[int]] = mapped_column(sqlalchemy.Integer, nullable=True) + indexed_at: Mapped[Optional[datetime]] = mapped_column(sqlalchemy.DateTime, nullable=True) + scraped_at: Mapped[Optional[datetime]] = mapped_column(sqlalchemy.DateTime, nullable=True) + scraped_times: Mapped[Optional[int]] = mapped_column(sqlalchemy.Integer, default=0) + active_stream: Mapped[Optional[dict]] = mapped_column(sqlalchemy.JSON, nullable=True) + streams: Mapped[list[Stream]] = relationship(secondary="StreamRelation", back_populates="parents", lazy="selectin", cascade="all") + blacklisted_streams: Mapped[list[Stream]] = relationship(secondary="StreamBlacklistRelation", back_populates="blacklisted_parents", lazy="selectin", cascade="all") + symlinked: Mapped[Optional[bool]] = mapped_column(sqlalchemy.Boolean, default=False) + symlinked_at: Mapped[Optional[datetime]] = mapped_column(sqlalchemy.DateTime, nullable=True) + symlinked_times: Mapped[Optional[int]] = mapped_column(sqlalchemy.Integer, default=0) + symlink_path: Mapped[Optional[str]] = mapped_column(sqlalchemy.String, nullable=True) + file: Mapped[Optional[str]] = mapped_column(sqlalchemy.String, nullable=True) + folder: Mapped[Optional[str]] = mapped_column(sqlalchemy.String, nullable=True) + alternative_folder: Mapped[Optional[str]] = mapped_column(sqlalchemy.String, nullable=True) + aliases: Mapped[Optional[dict]] = mapped_column(sqlalchemy.JSON, default={}) + is_anime: Mapped[Optional[bool]] = mapped_column(sqlalchemy.Boolean, default=False) + title: Mapped[Optional[str]] = mapped_column(sqlalchemy.String, nullable=True) + + network: Mapped[Optional[str]] = mapped_column(sqlalchemy.String, nullable=True) + country: Mapped[Optional[str]] = mapped_column(sqlalchemy.String, nullable=True) + language: Mapped[Optional[str]] = mapped_column(sqlalchemy.String, nullable=True) + aired_at: Mapped[Optional[datetime]] = mapped_column(sqlalchemy.DateTime, nullable=True) + year: Mapped[Optional[int]] = mapped_column(sqlalchemy.Integer, nullable=True) + genres: Mapped[Optional[List[str]]] = mapped_column(sqlalchemy.JSON, nullable=True) + key: Mapped[Optional[str]] = mapped_column(sqlalchemy.String, nullable=True) + guid: Mapped[Optional[str]] = mapped_column(sqlalchemy.String, nullable=True) + update_folder: Mapped[Optional[str]] = mapped_column(sqlalchemy.String, nullable=True) + overseerr_id: Mapped[Optional[int]] = mapped_column(sqlalchemy.Integer, nullable=True) + last_state: Mapped[Optional[States]] = mapped_column(sqlalchemy.Enum(States), default=States.Unknown) + subtitles: Mapped[list[Subtitle]] = relationship(Subtitle, back_populates="parent", lazy="selectin", cascade="all, delete-orphan") + + __mapper_args__ = { + "polymorphic_identity": "mediaitem", + "polymorphic_on":"type", + "with_polymorphic":"*", + } + + __table_args__ = ( + Index("ix_mediaitem_type", "type"), + Index("ix_mediaitem_requested_by", "requested_by"), + Index("ix_mediaitem_title", "title"), + Index("ix_mediaitem_imdb_id", "imdb_id"), + Index("ix_mediaitem_tvdb_id", "tvdb_id"), + Index("ix_mediaitem_tmdb_id", "tmdb_id"), + Index("ix_mediaitem_network", "network"), + Index("ix_mediaitem_country", "country"), + Index("ix_mediaitem_language", "language"), + Index("ix_mediaitem_aired_at", "aired_at"), + Index("ix_mediaitem_year", "year"), + Index("ix_mediaitem_overseerr_id", "overseerr_id"), + Index("ix_mediaitem_type_aired_at", "type", "aired_at"), # Composite index + ) + + def __init__(self, item: dict | None) -> None: + if item is None: + return + self.id = self.__generate_composite_key(item) + self.requested_at = item.get("requested_at", datetime.now()) + self.requested_by = item.get("requested_by") + self.requested_id = item.get("requested_id") + + self.indexed_at = None + + self.scraped_at = None + self.scraped_times = 0 + self.active_stream = item.get("active_stream", {}) + self.streams: List[Stream] = [] + self.blacklisted_streams: List[Stream] = [] + + self.symlinked = False + self.symlinked_at = None + self.symlinked_times = 0 + + self.file = None + self.folder = None + self.is_anime = item.get("is_anime", False) + + # Media related + self.title = item.get("title") + self.imdb_id = item.get("imdb_id") + if self.imdb_id: + self.imdb_link = f"https://www.imdb.com/title/{self.imdb_id}/" + self.tvdb_id = item.get("tvdb_id") + self.tmdb_id = item.get("tmdb_id") + self.network = item.get("network") + self.country = item.get("country") + self.language = item.get("language") + self.aired_at = item.get("aired_at") + self.year = item.get("year") + self.genres = item.get("genres", []) + self.aliases = item.get("aliases", {}) + + # Plex related + self.key = item.get("key") + self.guid = item.get("guid") + self.update_folder = item.get("update_folder") + + # Overseerr related + self.overseerr_id = item.get("overseerr_id") + + # Post-processing + self.subtitles = item.get("subtitles", []) + + @staticmethod + def __generate_composite_key(item: dict) -> str | None: + """Generate a composite key for the item.""" + trakt_id = item.get("trakt_id", None) + if not trakt_id: + return None + item_type = item.get("type", "unknown") + return f"{item_type}_{trakt_id}" + + def store_state(self, given_state=None) -> tuple[States, States]: + """Store the state of the item.""" + previous_state = self.last_state + new_state = given_state if given_state else self._determine_state() + if previous_state and previous_state != new_state: + sse_manager.publish_event("item_update", {"last_state": previous_state, "new_state": new_state, "item_id": self.id}) + self.last_state = new_state + return (previous_state, new_state) + + def is_stream_blacklisted(self, stream: Stream): + """Check if a stream is blacklisted for this item.""" + session = object_session(self) + if session: + session.refresh(self, attribute_names=["blacklisted_streams"]) + return stream in self.blacklisted_streams + + def blacklist_active_stream(self): + stream = next((stream for stream in self.streams if stream.infohash == self.active_stream.get("infohash", None)), None) + if stream: + self.blacklist_stream(stream) + else: + logger.debug(f"No active stream for {self.log_string}, will not blacklist") + + def blacklist_stream(self, stream: Stream): + value = blacklist_stream(self, stream) + if value: + logger.debug(f"Blacklisted stream {stream.infohash} for {self.log_string}") + return value + + @property + def is_released(self) -> bool: + """Check if an item has been released.""" + if self.aired_at and self.aired_at <= datetime.now(): + return True + return False + + @property + def state(self): + return self._determine_state() + + def _determine_state(self): + if self.key or self.update_folder == "updated": + return States.Completed + elif self.symlinked: + return States.Symlinked + elif self.file and self.folder: + return States.Downloaded + elif self.is_scraped(): + return States.Scraped + elif self.title and self.is_released: + return States.Indexed + elif self.title: + return States.Unreleased + elif self.imdb_id and self.requested_by: + return States.Requested + return States.Unknown + + def copy_other_media_attr(self, other): + """Copy attributes from another media item.""" + self.title = getattr(other, "title", None) + self.tvdb_id = getattr(other, "tvdb_id", None) + self.tmdb_id = getattr(other, "tmdb_id", None) + self.network = getattr(other, "network", None) + self.country = getattr(other, "country", None) + self.language = getattr(other, "language", None) + self.aired_at = getattr(other, "aired_at", None) + self.genres = getattr(other, "genres", []) + self.is_anime = getattr(other, "is_anime", False) + self.overseerr_id = getattr(other, "overseerr_id", None) + + def is_scraped(self): + session = object_session(self) + if session and session.is_active: + try: + session.refresh(self, attribute_names=["blacklisted_streams"]) + return (len(self.streams) > 0 and any(stream not in self.blacklisted_streams for stream in self.streams)) + except (sqlalchemy.exc.InvalidRequestError, sqlalchemy.orm.exc.DetachedInstanceError): + return False + return False + + def to_dict(self): + """Convert item to dictionary (API response)""" + return { + "id": str(self.id), + "title": self.title, + "type": self.__class__.__name__, + "imdb_id": self.imdb_id if hasattr(self, "imdb_id") else None, + "tvdb_id": self.tvdb_id if hasattr(self, "tvdb_id") else None, + "tmdb_id": self.tmdb_id if hasattr(self, "tmdb_id") else None, + "state": self.last_state.name, + "imdb_link": self.imdb_link if hasattr(self, "imdb_link") else None, + "aired_at": str(self.aired_at), + "genres": self.genres if hasattr(self, "genres") else None, + "is_anime": self.is_anime if hasattr(self, "is_anime") else False, + "guid": self.guid, + "requested_at": str(self.requested_at), + "requested_by": self.requested_by, + "scraped_at": str(self.scraped_at), + "scraped_times": self.scraped_times, + } + + def to_extended_dict(self, abbreviated_children=False, with_streams=True): + """Convert item to extended dictionary (API response)""" + dict = self.to_dict() + match self: + case Show(): + dict["seasons"] = ( + [season.to_extended_dict(with_streams=with_streams) for season in self.seasons] + if not abbreviated_children + else self.represent_children + ) + case Season(): + dict["episodes"] = ( + [episode.to_extended_dict(with_streams=with_streams) for episode in self.episodes] + if not abbreviated_children + else self.represent_children + ) + dict["language"] = self.language if hasattr(self, "language") else None + dict["country"] = self.country if hasattr(self, "country") else None + dict["network"] = self.network if hasattr(self, "network") else None + if with_streams: + dict["streams"] = getattr(self, "streams", []) + dict["blacklisted_streams"] = getattr(self, "blacklisted_streams", []) + dict["active_stream"] = ( + self.active_stream if hasattr(self, "active_stream") else None + ) + dict["number"] = self.number if hasattr(self, "number") else None + dict["symlinked"] = self.symlinked if hasattr(self, "symlinked") else None + dict["symlinked_at"] = ( + self.symlinked_at if hasattr(self, "symlinked_at") else None + ) + dict["symlinked_times"] = ( + self.symlinked_times if hasattr(self, "symlinked_times") else None + ) + dict["is_anime"] = self.is_anime if hasattr(self, "is_anime") else None + dict["update_folder"] = ( + self.update_folder if hasattr(self, "update_folder") else None + ) + dict["file"] = self.file if hasattr(self, "file") else None + dict["folder"] = self.folder if hasattr(self, "folder") else None + dict["symlink_path"] = self.symlink_path if hasattr(self, "symlink_path") else None + dict["subtitles"] = [subtitle.to_dict() for subtitle in self.subtitles] if hasattr(self, "subtitles") else [] + return dict + + def __iter__(self): + for attr, _ in vars(self).items(): + yield attr + + def __eq__(self, other): + if type(other) == type(self): + return self.id == other.id + return False + + def copy(self, other): + if other is None: + return None + self.id = getattr(other, "id", None) + if hasattr(self, "number"): + self.number = getattr(other, "number", None) + return self + + def get(self, key, default=None): + """Get item attribute""" + return getattr(self, key, default) + + def set(self, key, value): + """Set item attribute""" + _set_nested_attr(self, key, value) + + def get_top_title(self) -> str: + """Get the top title of the item.""" + if self.type == "season": + return self.parent.title + elif self.type == "episode": + return self.parent.parent.title + else: + return self.title + + def get_top_imdb_id(self) -> str: + """Get the imdb_id of the item at the top of the hierarchy.""" + if self.type == "season": + return self.parent.imdb_id + elif self.type == "episode": + return self.parent.parent.imdb_id + return self.imdb_id + + def get_aliases(self) -> dict: + """Get the aliases of the item.""" + if self.type == "season": + return self.parent.aliases + elif self.type == "episode": + return self.parent.parent.aliases + else: + return self.aliases + + def __hash__(self): + return hash(self.id) + + def reset(self): + """Reset item attributes.""" + if self.type == "show": + for season in self.seasons: + for episode in season.episodes: + episode._reset() + season._reset() + elif self.type == "season": + for episode in self.episodes: + episode._reset() + self._reset() + if self.title: + self.store_state(States.Indexed) + else: + self.store_state(States.Requested) + + def _reset(self): + """Reset item attributes for rescraping.""" + if self.symlink_path: + if Path(self.symlink_path).exists(): + Path(self.symlink_path).unlink() + self.set("symlink_path", None) + + try: + for subtitle in self.subtitles: + subtitle.remove() + except Exception as e: + logger.warning(f"Failed to remove subtitles for {self.log_string}: {str(e)}") + + self.set("file", None) + self.set("folder", None) + self.set("alternative_folder", None) + + reset_streams(self) + self.active_stream = {} + + self.set("active_stream", {}) + self.set("symlinked", False) + self.set("symlinked_at", None) + self.set("update_folder", None) + self.set("scraped_at", None) + + self.set("symlinked_times", 0) + self.set("scraped_times", 0) + + logger.debug(f"Item {self.log_string} has been reset") + + @property + def log_string(self): + return self.title or self.id + + @property + def collection(self): + return self.parent.collection if self.parent else self.id + + +class Movie(MediaItem): + """Movie class""" + __tablename__ = "Movie" + id: Mapped[str] = mapped_column(sqlalchemy.ForeignKey("MediaItem.id"), primary_key=True) + __mapper_args__ = { + "polymorphic_identity": "movie", + "polymorphic_load": "inline", + } + + def copy(self, other): + super().copy(other) + return self + + def __init__(self, item): + self.type = MovieMediaType.Movie.value + self.file = item.get("file", None) + super().__init__(item) + + def __repr__(self): + return f"Movie:{self.log_string}:{self.state.name}" + + def __hash__(self): + return super().__hash__() + +class Show(MediaItem): + """Show class""" + __tablename__ = "Show" + id: Mapped[str] = mapped_column(sqlalchemy.ForeignKey("MediaItem.id"), primary_key=True) + seasons: Mapped[List["Season"]] = relationship(back_populates="parent", foreign_keys="Season.parent_id", lazy="joined", cascade="all, delete-orphan", order_by="Season.number") + + __mapper_args__ = { + "polymorphic_identity": "show", + "polymorphic_load": "inline", + } + + def __init__(self, item): + self.type = ShowMediaType.Show.value + self.locations = item.get("locations", []) + self.seasons: list[Season] = item.get("seasons", []) + self.propagate_attributes_to_childs() + super().__init__(item) + + def get_season_index_by_id(self, item_id): + """Find the index of an season by its _id.""" + for i, season in enumerate(self.seasons): + if season.id == item_id: + return i + return None + + def _determine_state(self): + if all(season.state == States.Completed for season in self.seasons): + return States.Completed + if any(season.state in [States.Ongoing, States.Unreleased] for season in self.seasons): + return States.Ongoing + if any( + season.state in (States.Completed, States.PartiallyCompleted) + for season in self.seasons + ): + return States.PartiallyCompleted + if any(season.state == States.Symlinked for season in self.seasons): + return States.Symlinked + if any(season.state == States.Downloaded for season in self.seasons): + return States.Downloaded + if self.is_scraped(): + return States.Scraped + if any(season.state == States.Indexed for season in self.seasons): + return States.Indexed + + if all(not season.is_released for season in self.seasons): + return States.Unreleased + if any(season.state == States.Requested for season in self.seasons): + return States.Requested + return States.Unknown + + def store_state(self, given_state: States =None) -> None: + for season in self.seasons: + season.store_state(given_state) + super().store_state(given_state) + + def __repr__(self): + return f"Show:{self.log_string}:{self.state.name}" + + def __hash__(self): + return super().__hash__() + + def copy(self, other): + super(Show, self).copy(other) + self.seasons = [] + for season in other.seasons: + new_season = Season(item={}).copy(season, False) + new_season.parent = self + self.seasons.append(new_season) + return self + + def fill_in_missing_children(self, other: Self): + existing_seasons = [s.number for s in self.seasons] + for s in other.seasons: + if s.number not in existing_seasons: + self.add_season(s) + else: + existing_season = next( + es for es in self.seasons if s.number == es.number + ) + existing_season.fill_in_missing_children(s) + + def add_season(self, season): + """Add season to show""" + if season.number not in [s.number for s in self.seasons]: + season.is_anime = self.is_anime + self.seasons.append(season) + season.parent = self + self.seasons = sorted(self.seasons, key=lambda s: s.number) + + def propagate_attributes_to_childs(self): + """Propagate show attributes to seasons and episodes if they are empty or do not match.""" + # Important attributes that need to be connected. + attributes = ["genres", "country", "network", "language", "is_anime"] + + def propagate(target, source): + for attr in attributes: + source_value = getattr(source, attr, None) + target_value = getattr(target, attr, None) + # Check if the attribute source is not falsy (none, false, 0, []) + # and if the target is not None we set the source to the target + if (not target_value) and source_value is not None: + setattr(target, attr, source_value) + + for season in self.seasons: + propagate(season, self) + for episode in season.episodes: + propagate(episode, self) + + +class Season(MediaItem): + """Season class""" + __tablename__ = "Season" + id: Mapped[str] = mapped_column(sqlalchemy.ForeignKey("MediaItem.id"), primary_key=True) + parent_id: Mapped[str] = mapped_column(sqlalchemy.ForeignKey("Show.id"), use_existing_column=True) + parent: Mapped["Show"] = relationship(lazy=False, back_populates="seasons", foreign_keys="Season.parent_id") + episodes: Mapped[List["Episode"]] = relationship(back_populates="parent", foreign_keys="Episode.parent_id", lazy="joined", cascade="all, delete-orphan", order_by="Episode.number") + __mapper_args__ = { + "polymorphic_identity": "season", + "polymorphic_load": "inline", + } + + def store_state(self, given_state: States = None) -> None: + for episode in self.episodes: + episode.store_state(given_state) + super().store_state(given_state) + + def __init__(self, item): + self.type = ShowMediaType.Season.value + self.number = item.get("number", None) + self.episodes: list[Episode] = item.get("episodes", []) + super().__init__(item) + if self.parent and isinstance(self.parent, Show): + self.is_anime = self.parent.is_anime + + def _determine_state(self): + if len(self.episodes) > 0: + if all(episode.state == States.Completed for episode in self.episodes): + return States.Completed + if any(episode.state == States.Unreleased for episode in self.episodes): + if any(episode.state != States.Unreleased for episode in self.episodes): + return States.Ongoing + if any(episode.state == States.Completed for episode in self.episodes): + return States.PartiallyCompleted + if any(episode.state == States.Symlinked for episode in self.episodes): + return States.Symlinked + if any(episode.file and episode.folder for episode in self.episodes): + return States.Downloaded + if self.is_scraped(): + return States.Scraped + if any(episode.state == States.Indexed for episode in self.episodes): + return States.Indexed + if any(episode.state == States.Unreleased for episode in self.episodes): + return States.Unreleased + if any(episode.state == States.Requested for episode in self.episodes): + return States.Requested + return States.Unknown + else: + return States.Unreleased + + @property + def is_released(self) -> bool: + return any(episode.is_released for episode in self.episodes) + + def __repr__(self): + return f"Season:{self.number}:{self.state.name}" + + def __hash__(self): + return super().__hash__() + + def copy(self, other, copy_parent=True): + super(Season, self).copy(other) + for episode in other.episodes: + new_episode = Episode(item={}).copy(episode, False) + new_episode.parent = self + self.episodes.append(new_episode) + if copy_parent and other.parent: + self.parent = Show(item={}).copy(other.parent) + return self + + def fill_in_missing_children(self, other: Self): + existing_episodes = [s.number for s in self.episodes] + for e in other.episodes: + if e.number not in existing_episodes: + self.add_episode(e) + + def get_episode_index_by_id(self, item_id: int): + """Find the index of an episode by its _id.""" + for i, episode in enumerate(self.episodes): + if episode.id == item_id: + return i + return None + + def represent_children(self): + return [e.log_string for e in self.episodes] + + def add_episode(self, episode): + """Add episode to season""" + if episode.number in [e.number for e in self.episodes]: + return + + episode.is_anime = self.is_anime + self.episodes.append(episode) + episode.parent = self + self.episodes = sorted(self.episodes, key=lambda e: e.number) + + @property + def log_string(self): + return self.parent.log_string + " S" + str(self.number).zfill(2) + + def get_top_title(self) -> str: + return self.parent.title + + +class Episode(MediaItem): + """Episode class""" + __tablename__ = "Episode" + id: Mapped[str] = mapped_column(sqlalchemy.ForeignKey("MediaItem.id"), primary_key=True) + parent_id: Mapped[str] = mapped_column(sqlalchemy.ForeignKey("Season.id"), use_existing_column=True) + parent: Mapped["Season"] = relationship(back_populates="episodes", foreign_keys="Episode.parent_id", lazy="joined") + + __mapper_args__ = { + "polymorphic_identity": "episode", + "polymorphic_load": "inline", + } + + def __init__(self, item): + self.type = ShowMediaType.Episode.value + self.number = item.get("number", None) + self.file = item.get("file", None) + super().__init__(item) + if self.parent and isinstance(self.parent, Season): + self.is_anime = self.parent.parent.is_anime + + def __repr__(self): + return f"Episode:{self.number}:{self.state.name}" + + def __hash__(self): + return super().__hash__() + + def copy(self, other, copy_parent=True): + super(Episode, self).copy(other) + if copy_parent and other.parent: + self.parent = Season(item={}).copy(other.parent) + return self + + def get_file_episodes(self) -> List[int]: + if not self.file or not isinstance(self.file, str): + raise ValueError("The file attribute must be a non-empty string.") + # return list of episodes + return parse(self.file).episodes + + @property + def log_string(self): + return f"{self.parent.log_string}E{self.number:02}" + + def get_top_title(self) -> str: + return self.parent.parent.title + + def get_top_year(self) -> Optional[int]: + return self.parent.parent.year + + def get_season_year(self) -> Optional[int]: + return self.parent.year + + +def _set_nested_attr(obj, key, value): + if "." in key: + parts = key.split(".", 1) + current_key, rest_of_keys = parts[0], parts[1] + + if not hasattr(obj, current_key): + raise AttributeError(f"Object does not have the attribute '{current_key}'.") + + current_obj = getattr(obj, current_key) + _set_nested_attr(current_obj, rest_of_keys, value) + elif isinstance(obj, dict): + obj[key] = value + else: + setattr(obj, key, value) + + +def copy_item(item): + """Copy an item""" + if isinstance(item, Movie): + return Movie(item={}).copy(item) + elif isinstance(item, Show): + return Show(item={}).copy(item) + elif isinstance(item, Season): + return Season(item={}).copy(item) + elif isinstance(item, Episode): + return Episode(item={}).copy(item) + elif isinstance(item, MediaItem): + return MediaItem(item={}).copy(item) + else: + raise ValueError(f"Cannot copy item of type {type(item)}") \ No newline at end of file diff --git a/src/program/media/state.py b/src/program/media/state.py new file mode 100644 index 0000000..d0652e6 --- /dev/null +++ b/src/program/media/state.py @@ -0,0 +1,15 @@ +from enum import Enum + + +class States(Enum): + Unknown = "Unknown" + Unreleased = "Unreleased" + Ongoing = "Ongoing" + Requested = "Requested" + Indexed = "Indexed" + Scraped = "Scraped" + Downloaded = "Downloaded" + Symlinked = "Symlinked" + Completed = "Completed" + PartiallyCompleted = "PartiallyCompleted" + Failed = "Failed" diff --git a/src/program/media/stream.py b/src/program/media/stream.py new file mode 100644 index 0000000..19964f5 --- /dev/null +++ b/src/program/media/stream.py @@ -0,0 +1,70 @@ +from typing import TYPE_CHECKING + +import sqlalchemy +from RTN import Torrent +from sqlalchemy import Index +from sqlalchemy.orm import Mapped, mapped_column, relationship + +from program.db.db import db + +if TYPE_CHECKING: + from program.media.item import MediaItem + + +class StreamRelation(db.Model): + __tablename__ = "StreamRelation" + + id: Mapped[int] = mapped_column(sqlalchemy.Integer, primary_key=True) + parent_id: Mapped[str] = mapped_column(sqlalchemy.ForeignKey("MediaItem.id", ondelete="CASCADE")) + child_id: Mapped[int] = mapped_column(sqlalchemy.ForeignKey("Stream.id", ondelete="CASCADE")) + + __table_args__ = ( + Index("ix_streamrelation_parent_id", "parent_id"), + Index("ix_streamrelation_child_id", "child_id"), + ) + +class StreamBlacklistRelation(db.Model): + __tablename__ = "StreamBlacklistRelation" + + id: Mapped[int] = mapped_column(sqlalchemy.Integer, primary_key=True) + media_item_id: Mapped[str] = mapped_column(sqlalchemy.ForeignKey("MediaItem.id", ondelete="CASCADE")) + stream_id: Mapped[int] = mapped_column(sqlalchemy.ForeignKey("Stream.id", ondelete="CASCADE")) + + __table_args__ = ( + Index("ix_streamblacklistrelation_media_item_id", "media_item_id"), + Index("ix_streamblacklistrelation_stream_id", "stream_id"), + ) + +class Stream(db.Model): + __tablename__ = "Stream" + + id: Mapped[int] = mapped_column(sqlalchemy.Integer, primary_key=True) + infohash: Mapped[str] = mapped_column(sqlalchemy.String, nullable=False) + raw_title: Mapped[str] = mapped_column(sqlalchemy.String, nullable=False) + parsed_title: Mapped[str] = mapped_column(sqlalchemy.String, nullable=False) + rank: Mapped[int] = mapped_column(sqlalchemy.Integer, nullable=False) + lev_ratio: Mapped[float] = mapped_column(sqlalchemy.Float, nullable=False) + + parents: Mapped[list["MediaItem"]] = relationship(secondary="StreamRelation", back_populates="streams", lazy="selectin") + blacklisted_parents: Mapped[list["MediaItem"]] = relationship(secondary="StreamBlacklistRelation", back_populates="blacklisted_streams", lazy="selectin") + + __table_args__ = ( + Index("ix_stream_infohash", "infohash"), + Index("ix_stream_raw_title", "raw_title"), + Index("ix_stream_parsed_title", "parsed_title"), + Index("ix_stream_rank", "rank"), + ) + + def __init__(self, torrent: Torrent): + self.raw_title = torrent.raw_title + self.infohash = torrent.infohash + self.parsed_title = torrent.data.parsed_title + self.parsed_data = torrent.data + self.rank = torrent.rank + self.lev_ratio = torrent.lev_ratio + + def __hash__(self): + return self.infohash + + def __eq__(self, other): + return isinstance(other, Stream) and self.infohash == other.infohash \ No newline at end of file diff --git a/src/program/media/subtitle.py b/src/program/media/subtitle.py new file mode 100644 index 0000000..b09bb88 --- /dev/null +++ b/src/program/media/subtitle.py @@ -0,0 +1,46 @@ +from pathlib import Path +from typing import TYPE_CHECKING + +from sqlalchemy import ForeignKey, Index, Integer, String +from sqlalchemy.orm import Mapped, mapped_column, relationship + +from program.db.db import db + +if TYPE_CHECKING: + from program.media.item import MediaItem + + +class Subtitle(db.Model): + __tablename__ = "Subtitle" + + id: Mapped[int] = mapped_column(Integer, primary_key=True) + language: Mapped[str] = mapped_column(String) + file: Mapped[str] = mapped_column(String, nullable=True) + + parent_id: Mapped[str] = mapped_column(ForeignKey("MediaItem.id", ondelete="CASCADE")) + parent: Mapped["MediaItem"] = relationship("MediaItem", back_populates="subtitles") + + __table_args__ = ( + Index("ix_subtitle_language", "language"), + Index("ix_subtitle_file", "file"), + Index("ix_subtitle_parent_id", "parent_id"), + ) + + def __init__(self, optional={}): + for key in optional.keys(): + self.language = key + self.file = optional[key] + + def remove(self): + if self.file and Path(self.file).exists(): + Path(self.file).unlink() + self.file = None + return self + + def to_dict(self): + return { + "id": str(self.id), + "language": self.language, + "file": self.file, + "parent_id": self.parent_id + } \ No newline at end of file diff --git a/src/program/program.py b/src/program/program.py new file mode 100644 index 0000000..eb92ada --- /dev/null +++ b/src/program/program.py @@ -0,0 +1,478 @@ +import linecache +import os +import threading +import time +from concurrent.futures import ThreadPoolExecutor, as_completed +from datetime import datetime +from queue import Empty + +from apscheduler.schedulers.background import BackgroundScheduler +from rich.live import Live + +from program.apis import bootstrap_apis +from program.managers.event_manager import EventManager +from program.media.item import Episode, MediaItem, Movie, Season, Show +from program.media.state import States +from program.services.content import ( + Listrr, + Mdblist, + Overseerr, + PlexWatchlist, + TraktContent, +) +from program.services.downloaders import Downloader +from program.services.indexers.trakt import TraktIndexer +from program.services.libraries import SymlinkLibrary +from program.services.libraries.symlink import fix_broken_symlinks +from program.services.post_processing import PostProcessing +from program.services.scrapers import Scraping +from program.services.updaters import Updater +from program.settings.manager import settings_manager +from program.settings.models import get_version +from program.utils import data_dir_path +from program.utils.logging import create_progress_bar, log_cleaner, logger + +from .state_transition import process_event +from .symlink import Symlinker +from .types import Event, ProcessedEvent, Service + +if settings_manager.settings.tracemalloc: + import tracemalloc + +from sqlalchemy import func, select, text + +from program.db import db_functions +from program.db.db import ( + create_database_if_not_exists, + db, + run_migrations, + vacuum_and_analyze_index_maintenance, +) + + +class Program(threading.Thread): + """Program class""" + + def __init__(self): + super().__init__(name="Riven") + self.initialized = False + self.running = False + self.services = {} + self.enable_trace = settings_manager.settings.tracemalloc + self.em = EventManager() + if self.enable_trace: + tracemalloc.start() + self.malloc_time = time.monotonic()-50 + self.last_snapshot = None + + def initialize_apis(self): + bootstrap_apis() + + def initialize_services(self): + """Initialize all services.""" + self.requesting_services = { + Overseerr: Overseerr(), + PlexWatchlist: PlexWatchlist(), + Listrr: Listrr(), + Mdblist: Mdblist(), + TraktContent: TraktContent(), + } + + self.services = { + TraktIndexer: TraktIndexer(), + Scraping: Scraping(), + Symlinker: Symlinker(), + Updater: Updater(), + Downloader: Downloader(), + # Depends on Symlinker having created the file structure so needs + # to run after it + SymlinkLibrary: SymlinkLibrary(), + PostProcessing: PostProcessing(), + } + + self.all_services = { + **self.requesting_services, + **self.services + } + + if len([service for service in self.requesting_services.values() if service.initialized]) == 0: + logger.warning("No content services initialized, items need to be added manually.") + if not self.services[Scraping].initialized: + logger.error("No Scraping service initialized, you must enable at least one.") + if not self.services[Downloader].initialized: + logger.error("No Downloader service initialized, you must enable at least one.") + if not self.services[Updater].initialized: + logger.error("No Updater service initialized, you must enable at least one.") + + if self.enable_trace: + self.last_snapshot = tracemalloc.take_snapshot() + + + def validate(self) -> bool: + """Validate that all required services are initialized.""" + return all(s.initialized for s in self.services.values()) + + def validate_database(self) -> bool: + """Validate that the database is accessible.""" + try: + with db.Session() as session: + session.execute(text("SELECT 1")) + return True + except Exception: + logger.error("Database connection failed. Is the database running?") + return False + + def start(self): + latest_version = get_version() + logger.log("PROGRAM", f"Riven v{latest_version} starting!") + + settings_manager.register_observer(self.initialize_apis) + settings_manager.register_observer(self.initialize_services) + os.makedirs(data_dir_path, exist_ok=True) + + if not settings_manager.settings_file.exists(): + logger.log("PROGRAM", "Settings file not found, creating default settings") + settings_manager.save() + + self.initialize_apis() + self.initialize_services() + + max_worker_env_vars = [var for var in os.environ if var.endswith("_MAX_WORKERS")] + if max_worker_env_vars: + for var in max_worker_env_vars: + logger.log("PROGRAM", f"{var} is set to {os.environ[var]} workers") + + if not self.validate(): + logger.log("PROGRAM", "----------------------------------------------") + logger.error("Riven is waiting for configuration to start!") + logger.log("PROGRAM", "----------------------------------------------") + + while not self.validate(): + time.sleep(1) + + if not self.validate_database(): + # We should really make this configurable via frontend... + logger.log("PROGRAM", "Database not found, trying to create database") + if not create_database_if_not_exists(): + logger.error("Failed to create database, exiting") + return + logger.success("Database created successfully") + + run_migrations() + self._init_db_from_symlinks() + + with db.Session() as session: + movies_symlinks = session.execute(select(func.count(Movie.id)).where(Movie.symlinked == True)).scalar_one() # noqa + episodes_symlinks = session.execute(select(func.count(Episode.id)).where(Episode.symlinked == True)).scalar_one() # noqa + total_symlinks = movies_symlinks + episodes_symlinks + total_movies = session.execute(select(func.count(Movie.id))).scalar_one() + total_shows = session.execute(select(func.count(Show.id))).scalar_one() + total_seasons = session.execute(select(func.count(Season.id))).scalar_one() + total_episodes = session.execute(select(func.count(Episode.id))).scalar_one() + total_items = session.execute(select(func.count(MediaItem.id))).scalar_one() + + logger.log("ITEM", f"Movies: {total_movies} (Symlinks: {movies_symlinks})") + logger.log("ITEM", f"Shows: {total_shows}") + logger.log("ITEM", f"Seasons: {total_seasons}") + logger.log("ITEM", f"Episodes: {total_episodes} (Symlinks: {episodes_symlinks})") + logger.log("ITEM", f"Total Items: {total_items} (Symlinks: {total_symlinks})") + + self.executors = [] + self.scheduler = BackgroundScheduler() + self._schedule_services() + self._schedule_functions() + + super().start() + self.scheduler.start() + logger.success("Riven is running!") + self.initialized = True + + def _retry_library(self) -> None: + """Retry items that failed to download.""" + with db.Session() as session: + count = session.execute( + select(func.count(MediaItem.id)) + .where(MediaItem.last_state.not_in([States.Completed, States.Unreleased])) + .where(MediaItem.type.in_(["movie", "show"])) + ).scalar_one() + + if count == 0: + return + + logger.log("PROGRAM", f"Starting retry process for {count} items.") + + items_query = ( + select(MediaItem.id) + .where(MediaItem.last_state.not_in([States.Completed, States.Unreleased])) + .where(MediaItem.type.in_(["movie", "show"])) + .order_by(MediaItem.requested_at.desc()) + ) + + result = session.execute(items_query) + for item_id in result.scalars(): + self.em.add_event(Event(emitted_by="RetryLibrary", item_id=item_id)) + + def _update_ongoing(self) -> None: + """Update state for ongoing and unreleased items.""" + with db.Session() as session: + item_ids = session.execute( + select(MediaItem.id) + .where(MediaItem.type.in_(["movie", "episode"])) + .where(MediaItem.last_state.in_([States.Ongoing, States.Unreleased])) + ).scalars().all() + + if not item_ids: + logger.debug("No ongoing or unreleased items to update.") + return + + logger.debug(f"Updating state for {len(item_ids)} ongoing and unreleased items.") + + counter = 0 + for item_id in item_ids: + try: + item = session.execute(select(MediaItem).filter_by(id=item_id)).unique().scalar_one_or_none() + if item: + previous_state, new_state = item.store_state() + if previous_state != new_state: + self.em.add_event(Event(emitted_by="UpdateOngoing", item_id=item_id)) + logger.debug(f"Updated state for {item.log_string} ({item.id}) from {previous_state.name} to {new_state.name}") + counter += 1 + session.merge(item) + session.commit() + except Exception as e: + logger.error(f"Failed to update state for item with ID {item_id}: {e}") + + logger.debug(f"Found {counter} items with updated state.") + + def _schedule_functions(self) -> None: + """Schedule each service based on its update interval.""" + scheduled_functions = { + self._update_ongoing: {"interval": 60 * 60 * 24}, + self._retry_library: {"interval": 60 * 60 * 24}, + log_cleaner: {"interval": 60 * 60}, + vacuum_and_analyze_index_maintenance: {"interval": 60 * 60 * 24}, + } + + if settings_manager.settings.symlink.repair_symlinks: + scheduled_functions[fix_broken_symlinks] = { + "interval": 60 * 60 * settings_manager.settings.symlink.repair_interval, + "args": [settings_manager.settings.symlink.library_path, settings_manager.settings.symlink.rclone_path] + } + + for func, config in scheduled_functions.items(): + self.scheduler.add_job( + func, + "interval", + seconds=config["interval"], + args=config.get("args"), + id=f"{func.__name__}", + max_instances=config.get("max_instances", 1), + replace_existing=True, + next_run_time=datetime.now(), + misfire_grace_time=30 + ) + logger.debug(f"Scheduled {func.__name__} to run every {config['interval']} seconds.") + + def _schedule_services(self) -> None: + """Schedule each service based on its update interval.""" + scheduled_services = {**self.requesting_services, SymlinkLibrary: self.services[SymlinkLibrary]} + for service_cls, service_instance in scheduled_services.items(): + if not service_instance.initialized: + continue + if not (update_interval := getattr(service_instance.settings, "update_interval", False)): + continue + + self.scheduler.add_job( + self.em.submit_job, + "interval", + seconds=update_interval, + args=[service_cls, self], + id=f"{service_cls.__name__}_update", + max_instances=1, + replace_existing=True, + next_run_time=datetime.now() if service_cls != SymlinkLibrary else None, + coalesce=False, + ) + logger.debug(f"Scheduled {service_cls.__name__} to run every {update_interval} seconds.") + + def display_top_allocators(self, snapshot, key_type="lineno", limit=10): + import psutil + process = psutil.Process(os.getpid()) + top_stats = snapshot.compare_to(self.last_snapshot, "lineno") + + logger.debug("Top %s lines" % limit) + for index, stat in enumerate(top_stats[:limit], 1): + frame = stat.traceback[0] + # replace "/path/to/module/file.py" with "module/file.py" + filename = os.sep.join(frame.filename.split(os.sep)[-2:]) + logger.debug("#%s: %s:%s: %.1f KiB" + % (index, filename, frame.lineno, stat.size / 1024)) + line = linecache.getline(frame.filename, frame.lineno).strip() + if line: + logger.debug(" %s" % line) + + other = top_stats[limit:] + if other: + size = sum(stat.size for stat in other) + logger.debug("%s other: %.1f MiB" % (len(other), size / (1024 * 1024))) + total = sum(stat.size for stat in top_stats) + logger.debug("Total allocated size: %.1f MiB" % (total / (1024 * 1024))) + logger.debug(f"Process memory: {process.memory_info().rss / (1024 * 1024):.2f} MiB") + + def dump_tracemalloc(self): + if time.monotonic() - self.malloc_time > 60: + self.malloc_time = time.monotonic() + snapshot = tracemalloc.take_snapshot() + self.display_top_allocators(snapshot) + + def run(self): + while self.initialized: + if not self.validate(): + time.sleep(1) + continue + + try: + event: Event = self.em.next() + self.em.add_event_to_running(event) + if self.enable_trace: + self.dump_tracemalloc() + except Empty: + if self.enable_trace: + self.dump_tracemalloc() + time.sleep(0.1) + continue + + existing_item: MediaItem = db_functions.get_item_by_id(event.item_id) + + next_service, items_to_submit = process_event( + event.emitted_by, existing_item, event.content_item + ) + + self.em.remove_event_from_running(event) + + for item_to_submit in items_to_submit: + if not next_service: + self.em.add_event_to_queue(Event("StateTransition", item_id=item_to_submit.id)) + else: + # We are in the database, pass on id. + if item_to_submit.id: + event = Event(next_service, item_id=item_to_submit.id) + # We are not, lets pass the MediaItem + else: + event = Event(next_service, content_item=item_to_submit) + + self.em.add_event_to_running(event) + self.em.submit_job(next_service, self, event) + + def stop(self): + if not self.initialized: + return + + if hasattr(self, "executors"): + for executor in self.executors: + if not executor["_executor"]._shutdown: + executor["_executor"].shutdown(wait=False) + if hasattr(self, "scheduler") and self.scheduler.running: + self.scheduler.shutdown(wait=False) + logger.log("PROGRAM", "Riven has been stopped.") + + def _enhance_item(self, item: MediaItem) -> MediaItem | None: + try: + enhanced_item = next(self.services[TraktIndexer].run(item, log_msg=False)) + return enhanced_item + except StopIteration: + return None + + def _init_db_from_symlinks(self): + """Initialize the database from symlinks.""" + start_time = datetime.now() + with db.Session() as session: + # Check if database is empty + if not session.execute(select(func.count(MediaItem.id))).scalar_one(): + if not settings_manager.settings.map_metadata: + return + + logger.log("PROGRAM", "Collecting items from symlinks, this may take a while depending on library size") + try: + items = self.services[SymlinkLibrary].run() + errors = [] + added_items = set() + + # Convert items to list and get total count + items_list = [item for item in items if isinstance(item, (Movie, Show))] + total_items = len(items_list) + + progress, console = create_progress_bar(total_items) + task = progress.add_task("Enriching items with metadata", total=total_items, log="") + + # Process in chunks of 100 items + chunk_size = 100 + with Live(progress, console=console, refresh_per_second=10): + workers = int(os.getenv("SYMLINK_MAX_WORKERS", 4)) + + for i in range(0, total_items, chunk_size): + chunk = items_list[i:i + chunk_size] + + try: + with ThreadPoolExecutor(thread_name_prefix="EnhanceSymlinks", max_workers=workers) as executor: + future_to_item = { + executor.submit(self._enhance_item, item): item + for item in chunk + } + + for future in as_completed(future_to_item): + item = future_to_item[future] + log_message = "" + + try: + if not item or item.imdb_id in added_items: + errors.append(f"Duplicate symlink directory found for {item.log_string}") + continue + + if db_functions.get_item_by_id(item.id, session=session): + errors.append(f"Duplicate item found in database for id: {item.id}") + continue + + enhanced_item = future.result() + if not enhanced_item: + errors.append(f"Failed to enhance {item.log_string} ({item.imdb_id}) with Trakt Indexer") + continue + + enhanced_item.store_state() + session.add(enhanced_item) + added_items.add(item.imdb_id) + + log_message = f"Indexed IMDb Id: {enhanced_item.id} as {enhanced_item.type.title()}: {enhanced_item.log_string}" + except NotADirectoryError: + errors.append(f"Skipping {item.log_string} as it is not a valid directory") + except Exception as e: + logger.exception(f"Error processing {item.log_string}: {e}") + raise # Re-raise to trigger rollback + finally: + progress.update(task, advance=1, log=log_message) + + # Only commit if the entire chunk was successful + session.commit() + + except Exception as e: + session.rollback() + logger.error(f"Failed to process chunk {i//chunk_size + 1}, rolling back all changes: {str(e)}") + raise # Re-raise to abort the entire process + + progress.update(task, log="Finished Indexing Symlinks!") + + if errors: + logger.error("Errors encountered during initialization") + for error in errors: + logger.error(error) + + except Exception as e: + session.rollback() + logger.error(f"Failed to initialize database from symlinks: {str(e)}") + return + + elapsed_time = datetime.now() - start_time + total_seconds = elapsed_time.total_seconds() + hours, remainder = divmod(total_seconds, 3600) + minutes, seconds = divmod(remainder, 60) + logger.success(f"Database initialized, time taken: h{int(hours):02d}:m{int(minutes):02d}:s{int(seconds):02d}") \ No newline at end of file diff --git a/src/program/services/content/__init__.py b/src/program/services/content/__init__.py new file mode 100644 index 0000000..0a2bd45 --- /dev/null +++ b/src/program/services/content/__init__.py @@ -0,0 +1,32 @@ +# from typing import Generator +# from program.media.item import MediaItem + +from .listrr import Listrr +from .mdblist import Mdblist +from .overseerr import Overseerr +from .plex_watchlist import PlexWatchlist +from .trakt import TraktContent + +__all__ = ["Listrr", "Mdblist", "Overseerr", "PlexWatchlist", "TraktContent"] + +# class Requester: +# def __init__(self): +# self.key = "content" +# self.initialized = False +# self.services = { +# Listrr: Listrr(), +# Mdblist: Mdblist(), +# Overseerr: Overseerr(), +# PlexWatchlist: PlexWatchlist(), +# TraktContent: TraktContent() +# } +# self.initialized = self.validate() +# if not self.initialized: +# return + +# def validate(self): +# return any(service.initialized for service in self.services.values()) + +# def run(self, item: MediaItem) -> Generator[MediaItem, None, None]: +# """Index newly requested items.""" +# yield item diff --git a/src/program/services/content/listrr.py b/src/program/services/content/listrr.py new file mode 100644 index 0000000..06b6e34 --- /dev/null +++ b/src/program/services/content/listrr.py @@ -0,0 +1,69 @@ +"""Listrr content module""" +from typing import Generator + +from kink import di + +from program.apis.listrr_api import ListrrAPI +from program.media.item import MediaItem +from program.settings.manager import settings_manager +from program.utils.request import logger + + +class Listrr: + """Content class for Listrr""" + + def __init__(self): + self.key = "listrr" + self.settings = settings_manager.settings.content.listrr + self.api = None + self.initialized = self.validate() + if not self.initialized: + return + logger.success("Listrr initialized!") + + def validate(self) -> bool: + """Validate Listrr settings.""" + if not self.settings.enabled: + return False + if self.settings.api_key == "" or len(self.settings.api_key) != 64: + logger.error("Listrr api key is not set or invalid.") + return False + valid_list_found = False + for _, content_list in [ + ("movie_lists", self.settings.movie_lists), + ("show_lists", self.settings.show_lists), + ]: + if content_list is None or not any(content_list): + continue + for item in content_list: + if item == "" or len(item) != 24: + return False + valid_list_found = True + if not valid_list_found: + logger.error("Both Movie and Show lists are empty or not set.") + return False + try: + self.api = di[ListrrAPI] + response = self.api.validate() + if not response.is_ok: + logger.error( + f"Listrr ping failed - Status Code: {response.status_code}, Reason: {response.response.reason}", + ) + return response.is_ok + except Exception as e: + logger.error(f"Listrr ping exception: {e}") + return False + + def run(self) -> Generator[MediaItem, None, None]: + """Fetch new media from `Listrr`""" + try: + movie_items = self.api.get_items_from_Listrr("Movies", self.settings.movie_lists) + show_items = self.api.get_items_from_Listrr("Shows", self.settings.show_lists) + except Exception as e: + logger.error(f"Failed to fetch items from Listrr: {e}") + return + + imdb_ids = movie_items + show_items + listrr_items = [MediaItem({"imdb_id": imdb_id, "requested_by": self.api.key}) for imdb_id in imdb_ids if imdb_id.startswith("tt")] + logger.info(f"Fetched {len(listrr_items)} items from Listrr") + yield listrr_items \ No newline at end of file diff --git a/src/program/services/content/mdblist.py b/src/program/services/content/mdblist.py new file mode 100644 index 0000000..d7f83a7 --- /dev/null +++ b/src/program/services/content/mdblist.py @@ -0,0 +1,71 @@ +"""Mdblist content module""" + +from typing import Generator + +from kink import di +from loguru import logger + +from program.apis.mdblist_api import MdblistAPI +from program.media.item import MediaItem +from program.settings.manager import settings_manager +from program.utils.request import RateLimitExceeded + + +class Mdblist: + """Content class for mdblist""" + def __init__(self): + self.key = "mdblist" + self.settings = settings_manager.settings.content.mdblist + self.api = None + self.initialized = self.validate() + if not self.initialized: + return + self.requests_per_2_minutes = self._calculate_request_time() + logger.success("mdblist initialized") + + def validate(self): + if not self.settings.enabled: + return False + if self.settings.api_key == "" or len(self.settings.api_key) != 25: + logger.error("Mdblist api key is not set.") + return False + if not self.settings.lists: + logger.error("Mdblist is enabled, but list is empty.") + return False + self.api = di[MdblistAPI] + response = self.api.validate() + if "Invalid API key!" in response.response.text: + logger.error("Mdblist api key is invalid.") + return False + return True + + def run(self) -> Generator[MediaItem, None, None]: + """Fetch media from mdblist and add them to media_items attribute + if they are not already there""" + items_to_yield = [] + try: + for list in self.settings.lists: + if not list: + continue + + if isinstance(list, int): + items = self.api.list_items_by_id(list) + else: + items = self.api.list_items_by_url(list) + for item in items: + if hasattr(item, "error") or not item or item.imdb_id is None: + continue + if item.imdb_id.startswith("tt"): + items_to_yield.append(MediaItem( + {"imdb_id": item.imdb_id, "requested_by": self.key} + )) + except RateLimitExceeded: + pass + + logger.info(f"Fetched {len(items_to_yield)} items from mdblist.com") + yield items_to_yield + + def _calculate_request_time(self): + limits = self.api.my_limits().limits + daily_requests = limits.api_requests + return daily_requests / 24 / 60 * 2 \ No newline at end of file diff --git a/src/program/services/content/overseerr.py b/src/program/services/content/overseerr.py new file mode 100644 index 0000000..f545ad9 --- /dev/null +++ b/src/program/services/content/overseerr.py @@ -0,0 +1,61 @@ +"""Overseerr content module""" + +from kink import di +from loguru import logger +from requests.exceptions import ConnectionError, RetryError +from urllib3.exceptions import MaxRetryError, NewConnectionError + +from program.apis.overseerr_api import OverseerrAPI +from program.media.item import MediaItem +from program.settings.manager import settings_manager + + +class Overseerr: + """Content class for overseerr""" + + def __init__(self): + self.key = "overseerr" + self.settings = settings_manager.settings.content.overseerr + self.api = None + self.initialized = self.validate() + self.run_once = False + if not self.initialized: + return + logger.success("Overseerr initialized!") + + def validate(self) -> bool: + if not self.settings.enabled: + return False + if self.settings.api_key == "" or len(self.settings.api_key) != 68: + logger.error("Overseerr api key is not set.") + return False + try: + self.api = di[OverseerrAPI] + response = self.api.validate() + if response.status_code >= 201: + logger.error( + f"Overseerr ping failed - Status Code: {response.status_code}, Reason: {response.response.reason}" + ) + return False + return response.is_ok + except (ConnectionError, RetryError, MaxRetryError, NewConnectionError): + logger.error("Overseerr URL is not reachable, or it timed out") + return False + except Exception as e: + logger.error(f"Unexpected error during Overseerr validation: {str(e)}") + return False + + def run(self): + """Fetch new media from `Overseerr`""" + if self.settings.use_webhook and self.run_once: + return + + overseerr_items: list[MediaItem] = self.api.get_media_requests(self.key) + + if self.settings.use_webhook: + logger.debug("Webhook is enabled. Running Overseerr once before switching to webhook only mode") + self.run_once = True + + logger.info(f"Fetched {len(overseerr_items)} items from overseerr") + + yield overseerr_items \ No newline at end of file diff --git a/src/program/services/content/plex_watchlist.py b/src/program/services/content/plex_watchlist.py new file mode 100644 index 0000000..2e312d0 --- /dev/null +++ b/src/program/services/content/plex_watchlist.py @@ -0,0 +1,71 @@ +"""Plex Watchlist Module""" +from typing import Generator + +from kink import di +from loguru import logger +from requests import HTTPError + +from program.apis.plex_api import PlexAPI +from program.media.item import MediaItem +from program.settings.manager import settings_manager + + +class PlexWatchlist: + """Class for managing Plex Watchlists""" + + def __init__(self): + self.key = "plex_watchlist" + self.settings = settings_manager.settings.content.plex_watchlist + self.api = None + self.initialized = self.validate() + if not self.initialized: + return + logger.success("Plex Watchlist initialized!") + + def validate(self): + if not self.settings.enabled: + return False + if not settings_manager.settings.updaters.plex.token: + logger.error("Plex token is not set!") + return False + try: + self.api = di[PlexAPI] + self.api.validate_account() + except Exception as e: + logger.error(f"Unable to authenticate Plex account: {e}") + return False + if self.settings.rss: + self.api.set_rss_urls(self.settings.rss) + for rss_url in self.settings.rss: + try: + response = self.api.validate_rss(rss_url) + response.response.raise_for_status() + self.api.rss_enabled = True + except HTTPError as e: + if e.response.status_code == 404: + logger.warning(f"Plex RSS URL {rss_url} is Not Found. Please check your RSS URL in settings.") + return False + else: + logger.warning( + f"Plex RSS URL {rss_url} is not reachable (HTTP status code: {e.response.status_code})." + ) + return False + except Exception as e: + logger.error(f"Failed to validate Plex RSS URL {rss_url}: {e}", exc_info=True) + return False + return True + + def run(self) -> Generator[MediaItem, None, None]: + """Fetch new media from `Plex Watchlist` and RSS feed if enabled.""" + try: + watchlist_items: list[str] = self.api.get_items_from_watchlist() + rss_items: list[str] = self.api.get_items_from_rss() if self.api.rss_enabled else [] + except Exception as e: + logger.warning(f"Error fetching items: {e}") + return + + plex_items: set[str] = set(watchlist_items) | set(rss_items) + items_to_yield: list[MediaItem] = [MediaItem({"imdb_id": imdb_id, "requested_by": self.key}) for imdb_id in plex_items if imdb_id and imdb_id.startswith("tt")] + + logger.info(f"Fetched {len(items_to_yield)} items from plex watchlist") + yield items_to_yield \ No newline at end of file diff --git a/src/program/services/content/trakt.py b/src/program/services/content/trakt.py new file mode 100644 index 0000000..931c508 --- /dev/null +++ b/src/program/services/content/trakt.py @@ -0,0 +1,177 @@ +"""Trakt content module""" + +from datetime import datetime, timedelta + +from kink import di +from loguru import logger +from requests import RequestException + +from program.apis.trakt_api import TraktAPI +from program.media.item import MediaItem +from program.settings.manager import settings_manager + + +class TraktContent: + """Content class for Trakt""" + + def __init__(self): + self.key = "trakt" + self.settings = settings_manager.settings.content.trakt + self.api = di[TraktAPI] + self.initialized = self.validate() + if not self.initialized: + return + self.last_update = None + logger.success("Trakt initialized!") + + def validate(self) -> bool: + """Validate Trakt settings.""" + try: + if not self.settings.enabled: + return False + if not self.settings.api_key: + logger.error("Trakt API key is not set.") + return False + response = self.api.validate() + if not getattr(response.data, "name", None): + logger.error("Invalid user settings received from Trakt.") + return False + return True + except ConnectionError: + logger.error("Connection error during Trakt validation.") + return False + except TimeoutError: + logger.error("Timeout error during Trakt validation.") + return False + except RequestException as e: + logger.error(f"Request exception during Trakt validation: {str(e)}") + return False + except Exception as e: + logger.error(f"Exception during Trakt validation: {str(e)}") + return False + + def run(self): + """Fetch media from Trakt and yield Movie, Show, or MediaItem instances.""" + watchlist_ids = self._get_watchlist(self.settings.watchlist) if self.settings.watchlist else [] + collection_ids = self._get_collection(self.settings.collection) if self.settings.collection else [] + user_list_ids = self._get_list(self.settings.user_lists) if self.settings.user_lists else [] + + # Check if it's the first run or if a day has passed since the last update + current_time = datetime.now() + if self.last_update is None or (current_time - self.last_update) > timedelta(days=1): + trending_ids = self._get_trending_items() if self.settings.fetch_trending else [] + popular_ids = self._get_popular_items() if self.settings.fetch_popular else [] + most_watched_ids = self._get_most_watched_items() if self.settings.fetch_most_watched else [] + self.last_update = current_time + logger.log("TRAKT", "Updated trending, popular, and most watched items.") + else: + trending_ids = [] + popular_ids = [] + most_watched_ids = [] + logger.log("TRAKT", "Skipped updating trending, popular, and most watched items (last update was less than a day ago).") + + # Combine all IMDb IDs and types into a set to avoid duplicates + all_ids = set(watchlist_ids + collection_ids + user_list_ids + trending_ids + popular_ids + most_watched_ids) + + items_to_yield = [] + for imdb_id, _ in all_ids: + items_to_yield.append(MediaItem({"imdb_id": imdb_id, "requested_by": self.key})) + + if not items_to_yield: + return + + logger.info(f"Fetched {len(items_to_yield)} items from trakt") + yield items_to_yield + + def _get_watchlist(self, watchlist_users: list) -> list: + """Get IMDb IDs from Trakt watchlist""" + if not watchlist_users: + return [] + imdb_ids = [] + for user in watchlist_users: + items = self.api.get_watchlist_items(user) + imdb_ids.extend(self._extract_imdb_ids(items)) + return imdb_ids + + def _get_collection(self, collection_users: list) -> list: + """Get IMDb IDs from Trakt collection""" + if not collection_users: + return [] + imdb_ids = [] + for user in collection_users: + items = self.api.get_collection_items(user, "movies") + items.extend(self.api.get_collection_items(user, "shows")) + imdb_ids.extend(self._extract_imdb_ids(items)) + return imdb_ids + + + def _get_list(self, list_items: list) -> list: + """Get IMDb IDs from Trakt user list""" + if not list_items or not any(list_items): + return [] + imdb_ids = [] + for url in list_items: + user, list_name = self.api.extract_user_list_from_url(url) + if not user or not list_name: + logger.error(f"Invalid list URL: {url}") + continue + + items = self.api.get_user_list(user, list_name) + for item in items: + if hasattr(item, "movie"): + imdb_id = getattr(item.movie.ids, "imdb", None) + if imdb_id: + imdb_ids.append((imdb_id, "movie")) + elif hasattr(item, "show"): + imdb_id = getattr(item.show.ids, "imdb", None) + if imdb_id: + imdb_ids.append((imdb_id, "show")) + return imdb_ids + + def _get_trending_items(self) -> list: + """Get IMDb IDs from Trakt trending items""" + trending_movies = self.api.get_trending_items("movies", self.settings.trending_count) + trending_shows = self.api.get_trending_items("shows", self.settings.trending_count) + return self._extract_imdb_ids(trending_movies[:self.settings.trending_count] + trending_shows[:self.settings.trending_count]) + + def _get_popular_items(self) -> list: + """Get IMDb IDs from Trakt popular items""" + popular_movies = self.api.get_popular_items("movies", self.settings.popular_count) + popular_shows = self.api.get_popular_items( "shows", self.settings.popular_count) + return self._extract_imdb_ids_with_none_type(popular_movies[:self.settings.popular_count] + popular_shows[:self.settings.popular_count]) + + def _get_most_watched_items(self) -> list: + """Get IMDb IDs from Trakt popular items""" + most_watched_movies = self.api.get_most_watched_items( "movies", self.settings.most_watched_period, self.settings.most_watched_count) + most_watched_shows = self.api.get_most_watched_items( "shows", self.settings.most_watched_period, self.settings.most_watched_count) + return self._extract_imdb_ids(most_watched_movies[:self.settings.most_watched_count] + most_watched_shows[:self.settings.most_watched_count]) + + def _extract_imdb_ids(self, items: list) -> list: + """Extract IMDb IDs and types from a list of items""" + imdb_ids = [] + for item in items: + if hasattr(item, "show"): + ids = getattr(item.show, "ids", None) + if ids: + imdb_id = getattr(ids, "imdb", None) + if imdb_id: + imdb_ids.append((imdb_id, "show")) + elif hasattr(item, "movie"): + ids = getattr(item.movie, "ids", None) + if ids: + imdb_id = getattr(ids, "imdb", None) + if imdb_id: + imdb_ids.append((imdb_id, "movie")) + return imdb_ids + + @staticmethod + def _extract_imdb_ids_with_none_type(items: list) -> list: + """Extract IMDb IDs from a list of items, returning None for type""" + imdb_ids = [] + for item in items: + ids = getattr(item, "ids", None) + if ids: + imdb_id = getattr(ids, "imdb", None) + if imdb_id: + imdb_ids.append((imdb_id, None)) + return imdb_ids \ No newline at end of file diff --git a/src/program/services/downloaders/__init__.py b/src/program/services/downloaders/__init__.py new file mode 100644 index 0000000..b62f59c --- /dev/null +++ b/src/program/services/downloaders/__init__.py @@ -0,0 +1,232 @@ +from loguru import logger + +from program.media.item import MediaItem, MovieMediaType, ShowMediaType +from program.media.state import States +from program.media.stream import Stream +from program.settings.manager import settings_manager +from program.services.downloaders.shared import ( + DownloadCachedStreamResult, + filesize_is_acceptable, + get_invalid_filesize_log_string, +) + +from .alldebrid import AllDebridDownloader +from .realdebrid import RealDebridDownloader, TorrentNotFoundError, InvalidFileIDError +# from .torbox import TorBoxDownloader +import os + +class InvalidFileSizeException(Exception): + pass + +class Downloader: + def __init__(self): + self.key = "downloader" + self.initialized = False + self.speed_mode = ( + settings_manager.settings.downloaders.prefer_speed_over_quality + ) + self.services = { + RealDebridDownloader: RealDebridDownloader(), + AllDebridDownloader: AllDebridDownloader(), + # TorBoxDownloader: TorBoxDownloader() + } + self.service = next( + (service for service in self.services.values() if service.initialized), None + ) + + self.initialized = self.validate() + + def validate(self): + if self.service is None: + logger.error( + "No downloader service is initialized. Please initialize a downloader service." + ) + return False + return True + + def run(self, item: MediaItem): + """Run downloader for media item with concurrent downloads""" + logger.debug(f"Running downloader for {item.log_string}") + + # Skip if item is already in a completed state + if item.state in [States.Downloaded, States.Symlinked, States.Completed]: + logger.debug(f"Skipping download for {item.log_string} - already in state: {item.state}") + return + + # If no streams available, try to scrape first + if not item.streams: + from program.services.scrapers import Scraping + scraper = Scraping() + if scraper.initialized and scraper.can_we_scrape(item): + logger.debug(f"No streams found for {item.log_string}, attempting to scrape first") + for updated_item in scraper.run(item): + item = updated_item + else: + logger.warning(f"No streams available for {item.log_string} and cannot scrape") + return + + # Sort streams by RTN rank (higher rank is better) + sorted_streams = sorted(item.streams, key=lambda x: x.rank, reverse=True) + if not sorted_streams: + logger.warning(f"No streams available for {item.log_string} after scraping") + return + + # Take only the top 6 streams to try + concurrent_streams = sorted_streams[:5] + successful_download = False + + for stream in concurrent_streams: + try: + result = self.service.download_cached_stream(item, stream) + + # Skip if no result or container + if not result or not result.container: + if not result: + logger.debug(f"No result returned for stream {stream.infohash}") + else: + logger.debug(f"No valid files found in torrent for stream {stream.infohash}") + item.blacklist_stream(stream) + continue + + # For episodes, check if the required episode is present + if item.type == ShowMediaType.Episode.value: + required_season = item.parent.number + required_episode = item.number + found_required_episode = False + + for file_data in result.container.values(): + season, episodes = self.service.file_finder.container_file_matches_episode(file_data) + if season == required_season and episodes and required_episode in episodes: + found_required_episode = True + break + + if not found_required_episode: + logger.debug(f"Required episode S{required_season:02d}E{required_episode:02d} not found in torrent {result.torrent_id}, trying next stream") + item.blacklist_stream(stream) + continue + + # Validate filesize + try: + self.validate_filesize(item, result) + except InvalidFileSizeException: + logger.debug(f"Invalid filesize for stream {stream.infohash}") + item.blacklist_stream(stream) + continue + + # Update item attributes if download was successful + if result.torrent_id and self.update_item_attributes(item, result): + successful_download = True + item.store_state() + yield item + break # Exit loop since we have a successful download + + except InvalidFileIDError as e: + # Don't blacklist for file ID errors as they may be temporary + logger.debug(f"File selection failed for stream {stream.infohash}: {str(e)}") + continue + except Exception as e: + logger.debug(f"Invalid stream: {stream.infohash} - reason: {str(e)}") + item.blacklist_stream(stream) + continue + + if not successful_download: + logger.warning(f"No successful download for {item.log_string} after trying {len(concurrent_streams)} streams") + + def download_cached_stream(self, item: MediaItem, stream: Stream) -> DownloadCachedStreamResult: + """Download a cached stream from the active debrid service""" + return self.service.download_cached_stream(item, stream) + + def get_instant_availability(self, infohashes: list[str]) -> dict[str, list[dict]]: + return self.service.get_instant_availability(infohashes) + + def add_torrent(self, infohash: str) -> int: + return self.service.add_torrent(infohash) + + def get_torrent_info(self, torrent_id: int): + return self.service.get_torrent_info(torrent_id) + + def select_files(self, torrent_id, container): + self.service.select_files(torrent_id, container) + + def delete_torrent(self, torrent_id): + self.service.delete_torrent(torrent_id) + + def update_item_attributes(self, item: MediaItem, download_result: DownloadCachedStreamResult) -> bool: + """Update the item attributes with the downloaded files and active stream""" + found = False + info_hash = download_result.info_hash + id = download_result.torrent_id + + # Get the original filename from the torrent info + original_filename = download_result.info.get("filename", "") + filename = original_filename + + # Process each file in the container + for file in download_result.container.values(): + if item.type == MovieMediaType.Movie.value: + if self.service.file_finder.container_file_matches_movie(file): + file_path = file[self.service.file_finder.filename_attr] + logger.debug(f"Found matching movie file: {file_path}") + # Get just the filename from the path + item.file = os.path.basename(file_path) + # Get the parent folder from the path, fallback to torrent name + item.folder = os.path.dirname(file_path) or filename + # Store the original torrent name for alternative matching + item.alternative_folder = original_filename + item.active_stream = {"infohash": info_hash, "id": id} + found = True + break + + if item.type in (ShowMediaType.Show.value, ShowMediaType.Season.value, ShowMediaType.Episode.value): + show = item + if item.type == ShowMediaType.Season.value: + show = item.parent + elif item.type == ShowMediaType.Episode.value: + show = item.parent.parent + + file_season, file_episodes = self.service.file_finder.container_file_matches_episode(file) + logger.debug(f"Episode match result - season: {file_season}, episodes: {file_episodes}") + + if file_season and file_episodes: + season = next((season for season in show.seasons if season.number == file_season), None) + if season: + logger.debug(f"Found matching season {file_season}") + for file_episode in file_episodes: + episode = next((episode for episode in season.episodes if episode.number == file_episode), None) + if episode and episode.state not in [States.Completed, States.Symlinked, States.Downloaded]: + logger.debug(f"Found matching episode {file_episode} in season {file_season}") + # Store the full file path for the episode + file_path = file[self.service.file_finder.filename_attr] + # Get just the filename from the path + episode.file = os.path.basename(file_path) + # Get the parent folder from the path, fallback to torrent name + episode.folder = os.path.dirname(file_path) or filename + # Store the original torrent name for alternative matching + episode.alternative_folder = original_filename + # Store stream info for future reference + episode.active_stream = {"infohash": info_hash, "id": id} + # Log the stored paths for debugging + logger.debug(f"Stored paths for {episode.log_string}:") + logger.debug(f" File: {episode.file}") + logger.debug(f" Folder: {episode.folder}") + logger.debug(f" Alt Folder: {episode.alternative_folder}") + # We have to make sure the episode is correct if item is an episode + if item.type != ShowMediaType.Episode.value or (item.type == ShowMediaType.Episode.value and episode.number == item.number): + found = True + else: + logger.debug(f"No matching season found for season {file_season}") + return found + + def validate_filesize(self, item: MediaItem, download_result: DownloadCachedStreamResult): + for file in download_result.container.values(): + item_media_type = self._get_item_media_type(item) + if not filesize_is_acceptable(file[self.service.file_finder.filesize_attr], item_media_type): + + raise InvalidFileSizeException(f"File '{file[self.service.file_finder.filename_attr]}' is invalid: {get_invalid_filesize_log_string(file[self.service.file_finder.filesize_attr], item_media_type)}") + logger.debug(f"All files for {download_result.info_hash} are of an acceptable size") + + @staticmethod + def _get_item_media_type(item): + if item.type in (media_type.value for media_type in ShowMediaType): + return ShowMediaType.Show.value + return MovieMediaType.Movie.value \ No newline at end of file diff --git a/src/program/services/downloaders/alldebrid.py b/src/program/services/downloaders/alldebrid.py new file mode 100644 index 0000000..b944ee9 --- /dev/null +++ b/src/program/services/downloaders/alldebrid.py @@ -0,0 +1,246 @@ +from datetime import datetime +from typing import Dict, Iterator, List, Optional, Tuple + +from loguru import logger +from requests import Session +from requests.exceptions import ConnectTimeout + +from program.settings.manager import settings_manager +from program.utils.request import ( + BaseRequestHandler, + BaseRequestParameters, + HttpMethod, + ResponseType, + create_service_session, + get_rate_limit_params, +) + +from .shared import VIDEO_EXTENSIONS, DownloaderBase, FileFinder, premium_days_left + + +class AllDebridError(Exception): + """Base exception for AllDebrid related errors""" + +class AllDebridBaseRequestParameters(BaseRequestParameters): + """AllDebrid base request parameters""" + agent: Optional[str] = None + +class AllDebridRequestHandler(BaseRequestHandler): + def __init__(self, session: Session, base_url: str, base_params: AllDebridBaseRequestParameters, request_logging: bool = False): + super().__init__(session, response_type=ResponseType.DICT, base_url=base_url, base_params=base_params, custom_exception=AllDebridError, request_logging=request_logging) + + def execute(self, method: HttpMethod, endpoint: str, **kwargs) -> dict: + response = super()._request(method, endpoint, **kwargs) + if not response.is_ok or not response.data or "data" not in response.data: + raise AllDebridError("Invalid response from AllDebrid") + return response.data["data"] + +class AllDebridAPI: + """Handles AllDebrid API communication""" + BASE_URL = "https://api.alldebrid.com/v4" + AGENT = "Riven" + + def __init__(self, api_key: str, proxy_url: Optional[str] = None): + self.api_key = api_key + rate_limit_params = get_rate_limit_params(per_minute=600, per_second=12) + self.session = create_service_session(rate_limit_params=rate_limit_params) + self.session.headers.update({ + "Authorization": f"Bearer {api_key}" + }) + if proxy_url: + self.session.proxies = {"http": proxy_url, "https": proxy_url} + base_params = AllDebridBaseRequestParameters() + base_params.agent = self.AGENT + self.request_handler = AllDebridRequestHandler(self.session, self.BASE_URL, base_params) + + +class AllDebridDownloader(DownloaderBase): + """Main AllDebrid downloader class implementing DownloaderBase""" + + def __init__(self): + self.key = "alldebrid" + self.settings = settings_manager.settings.downloaders.all_debrid + self.api = None + self.file_finder = None + self.initialized = self.validate() + + def validate(self) -> bool: + """ + Validate AllDebrid settings and premium status + Required by DownloaderBase + """ + if not self._validate_settings(): + return False + + self.api = AllDebridAPI( + api_key=self.settings.api_key, + proxy_url=self.settings.proxy_url if self.settings.proxy_enabled else None + ) + + if not self._validate_premium(): + return False + + self.file_finder = FileFinder("filename", "filesize") + logger.success("AllDebrid initialized!") + return True + + def _validate_settings(self) -> bool: + """Validate configuration settings""" + if not self.settings.enabled: + return False + if not self.settings.api_key: + logger.warning("AllDebrid API key is not set") + return False + if self.settings.proxy_enabled and not self.settings.proxy_url: + logger.error("Proxy is enabled but no proxy URL is provided") + return False + return True + + def _validate_premium(self) -> bool: + """Validate premium status""" + try: + user_info = self.api.request_handler.execute(HttpMethod.GET, "user") + user = user_info.get("user", {}) + + if not user.get("isPremium", False): + logger.error("Premium membership required") + return False + + expiration = datetime.utcfromtimestamp(user.get("premiumUntil", 0)) + logger.log("DEBRID", premium_days_left(expiration)) + return True + + except ConnectTimeout: + logger.error("Connection to AllDebrid timed out") + except Exception as e: + logger.error(f"Failed to validate premium status: {e}") + return False + + def get_instant_availability(self, infohashes: List[str]) -> Dict[str, list]: + """ + Get instant availability for multiple infohashes + Required by DownloaderBase + """ + if not self.initialized: + logger.error("Downloader not properly initialized") + return {} + + try: + params = {f"magnets[{i}]": infohash for i, infohash in enumerate(infohashes)} + response = self.api.request_handler.execute(HttpMethod.GET, "magnet/instant", **params) + magnets = response.get("magnets", []) + + availability = {} + for magnet in magnets: + if not isinstance(magnet, dict) or "files" not in magnet: + continue + + files = magnet.get("files", []) + valid_files = self._process_files(files) + + if valid_files: + availability[magnet["hash"]] = [valid_files] + + return availability + + except Exception as e: + logger.error(f"Failed to get instant availability: {e}") + return {} + + def _walk_files(self, files: List[dict]) -> Iterator[Tuple[str, int]]: + """Walks nested files structure and yields filename, size pairs""" + dirs = [] + for file in files: + try: + size = int(file.get("s", "")) + yield file.get("n", "UNKNOWN"), size + except ValueError: + dirs.append(file) + + for directory in dirs: + yield from self._walk_files(directory.get("e", [])) + + def _process_files(self, files: List[dict]) -> Dict[str, dict]: + """Process and filter valid video files""" + result = {} + for i, (name, size) in enumerate(self._walk_files(files)): + if ( + any(name.lower().endswith(ext) for ext in VIDEO_EXTENSIONS) + and "sample" not in name.lower() + ): + result[str(i)] = {"filename": name, "filesize": size} + return result + + def add_torrent(self, infohash: str) -> str: + """ + Add a torrent by infohash + Required by DownloaderBase + """ + if not self.initialized: + raise AllDebridError("Downloader not properly initialized") + + try: + response = self.api.request_handler.execute( + HttpMethod.GET, + "magnet/upload", + **{"magnets[]": infohash} + ) + magnet_info = response.get("magnets", [])[0] + torrent_id = magnet_info.get("id") + + if not torrent_id: + raise AllDebridError("No torrent ID in response") + + return str(torrent_id) + + except Exception as e: + logger.error(f"Failed to add torrent {infohash}: {e}") + raise + + def select_files(self, torrent_id: str, files: List[str]): + """ + Select files from a torrent + Required by DownloaderBase + """ + if not self.initialized: + raise AllDebridError("Downloader not properly initialized") + + try: + # AllDebrid doesn't have a separate file selection endpoint + # All files are automatically selected when adding the torrent + pass + except Exception as e: + logger.error(f"Failed to select files for torrent {torrent_id}: {e}") + raise + + def get_torrent_info(self, torrent_id: str) -> dict: + """ + Get information about a torrent + Required by DownloaderBase + """ + if not self.initialized: + raise AllDebridError("Downloader not properly initialized") + + try: + response = self.api.request_handler.execute(HttpMethod.GET, "magnet/status", id=torrent_id) + info = response.get("magnets", {}) + if "filename" not in info: + raise AllDebridError("Invalid torrent info response") + return info + except Exception as e: + logger.error(f"Failed to get torrent info for {torrent_id}: {e}") + raise + + def delete_torrent(self, torrent_id: str): + """ + Delete a torrent + Required by DownloaderBase + """ + if not self.initialized: + raise AllDebridError("Downloader not properly initialized") + + try: + self.api.request_handler.execute(HttpMethod.GET, "magnet/delete", id=torrent_id) + except Exception as e: + logger.error(f"Failed to delete torrent {torrent_id}: {e}") + raise \ No newline at end of file diff --git a/src/program/services/downloaders/realdebrid.py b/src/program/services/downloaders/realdebrid.py new file mode 100644 index 0000000..b94657a --- /dev/null +++ b/src/program/services/downloaders/realdebrid.py @@ -0,0 +1,1510 @@ +import time +from datetime import datetime +from enum import Enum +from typing import Dict, List, Optional, Set, Tuple, Union +from collections import defaultdict +from datetime import timedelta + +from loguru import logger +from pydantic import BaseModel +from requests import Session + +from program.media.item import MediaItem +from program.media.stream import Stream +from program.settings.manager import settings_manager +from program.utils.request import ( + BaseRequestHandler, + HttpMethod, + ResponseType, + create_service_session, + get_rate_limit_params, +) + +from .shared import ( + VIDEO_EXTENSIONS, + DownloadCachedStreamResult, + DownloaderBase, + FileFinder, + premium_days_left, +) + +class RDTorrentStatus(str, Enum): + """Real-Debrid torrent status enumeration""" + MAGNET_ERROR = "magnet_error" + MAGNET_CONVERSION = "magnet_conversion" + WAITING_FILES = "waiting_files_selection" + DOWNLOADING = "downloading" + DOWNLOADED = "downloaded" + ERROR = "error" + SEEDING = "seeding" + DEAD = "dead" + UPLOADING = "uploading" + COMPRESSING = "compressing" + QUEUED = "queued" + +class RDTorrent(BaseModel): + """Real-Debrid torrent model""" + id: str + hash: str + filename: str + bytes: int + status: RDTorrentStatus + added: datetime + links: List[str] + ended: Optional[datetime] = None + speed: Optional[int] = None + seeders: Optional[int] = None + +class RealDebridError(Exception): + """Base exception for Real-Debrid related errors""" + +class TorrentNotFoundError(RealDebridError): + """Raised when a torrent is not found on Real-Debrid servers""" + +class InvalidFileIDError(RealDebridError): + """Raised when invalid file IDs are provided""" + +class DownloadFailedError(RealDebridError): + """Raised when a torrent download fails""" + +class QueuedTooManyTimesError(RealDebridError): + """Raised when a torrent is queued too many times""" + +class RealDebridActiveLimitError(RealDebridError): + """Raised when Real-Debrid's active torrent limit is exceeded""" + +class RealDebridRequestHandler(BaseRequestHandler): + def __init__(self, session: Session, base_url: str, request_logging: bool = False): + super().__init__(session, response_type=ResponseType.DICT, base_url=base_url, custom_exception=RealDebridError, request_logging=request_logging) + + def execute(self, method: HttpMethod, endpoint: str, **kwargs) -> Union[dict, list]: + response = super()._request(method, endpoint, **kwargs) + # Handle 202 (action already done) as success + if response.status_code in (204, 202): + return {} + if not response.data and not response.is_ok: + raise RealDebridError("Invalid JSON response from RealDebrid") + return response.data + +class RealDebridAPI: + """Handles Real-Debrid API communication""" + BASE_URL = "https://api.real-debrid.com/rest/1.0" + + def __init__(self, api_key: str, proxy_url: Optional[str] = None): + self.api_key = api_key + rate_limit_params = get_rate_limit_params(per_minute=60) + self.session = create_service_session(rate_limit_params=rate_limit_params) + self.session.headers.update({"Authorization": f"Bearer {api_key}"}) + if proxy_url: + self.session.proxies = {"http": proxy_url, "https": proxy_url} + self.request_handler = RealDebridRequestHandler(self.session, self.BASE_URL) + +class RealDebridDownloader(DownloaderBase): + """Main Real-Debrid downloader class implementing DownloaderBase""" + MAX_RETRIES = 3 + RETRY_DELAY = 1.0 + DOWNLOAD_POLL_INTERVAL = 5 # seconds + BASE_TIMEOUT = 300 # 5 minutes + MAX_TIMEOUT = 1800 # 30 minutes + TIMEOUT_PER_50MB = 10 # 10 seconds per 50MB + MAX_QUEUE_ATTEMPTS = 6 # Maximum number of queued torrents before retrying item later + CLEANUP_INTERVAL = 60 # Check every minute instead of 5 minutes + CLEANUP_MINIMAL_PROGRESS_TIME = 900 # 15 minutes instead of 30 + CLEANUP_MINIMAL_PROGRESS_THRESHOLD = 5 # 5% instead of 1% + CLEANUP_STUCK_UPLOAD_TIME = 1800 # 30 minutes instead of 1 hour + CLEANUP_STUCK_COMPRESSION_TIME = 900 # 15 minutes instead of 30 + CLEANUP_BATCH_SIZE = 10 # Process deletions in batches + CLEANUP_SPEED_THRESHOLD = 50000 # 50 KB/s minimum speed + CLEANUP_INACTIVE_TIME = 300 # 5 minutes of inactivity + MAX_CONCURRENT_TOTAL = 5 # Reduced from 10 to 5 + MAX_CONCURRENT_PER_CONTENT = 2 # Reduced from 3 to 2 + + def __init__(self, api_key: str, proxy_url: Optional[str] = None): + super().__init__() + self.api = RealDebridAPI(api_key, proxy_url) + self.initialized = True + self.download_complete = {} + self.active_downloads = defaultdict(set) + self.queue_attempts = {} + self.last_cleanup_time = datetime.now() + self.scraping_settings = settings_manager.get("scraping") + + def _cleanup(self) -> int: + """Clean up torrents that are no longer needed""" + try: + current_time = datetime.now() + if (current_time - self.last_cleanup_time).total_seconds() < self.CLEANUP_INTERVAL: + return 0 + + # Get current torrents + torrents = self.api.request_handler.execute(HttpMethod.GET, "torrents") + if not torrents: + return 0 + + # Get current downloads + downloads = self.api.request_handler.execute(HttpMethod.GET, "downloads") + + # Get active torrents by status + active_by_status = defaultdict(list) + for torrent in torrents: + status = torrent.get("status", "") + active_by_status[status].append(torrent) + + # Get active torrent count by status + active_count = defaultdict(int) + for status, torrents in active_by_status.items(): + active_count[status] = len(torrents) + + # Get total active torrents + total_active = sum(active_count.values()) + + # Get limit from settings + limit = self.MAX_CONCURRENT_TOTAL + + # Mark torrents for deletion + to_delete = [] + for status, torrents in active_by_status.items(): + for torrent in torrents: + torrent_id = torrent.get("id", "") + filename = torrent.get("filename", "") + status = torrent.get("status", "") + progress = torrent.get("progress", 0) + speed = torrent.get("speed", 0) + seeders = torrent.get("seeders", 0) + time_elapsed = torrent.get("time_elapsed", 0) + + # Case 1: Completed torrents + if status == RDTorrentStatus.DOWNLOADED: + reason = "download completed" + to_delete.append((0, torrent_id, reason, time_elapsed)) + + # Case 2: Stuck torrents + elif status == RDTorrentStatus.DOWNLOADING and speed == 0 and time_elapsed > self.CLEANUP_INACTIVE_TIME: + reason = "download is stuck (zero speed)" + to_delete.append((1, torrent_id, reason, time_elapsed)) + + # Case 3: Torrents with zero progress + elif status == RDTorrentStatus.DOWNLOADING and progress == 0 and time_elapsed > self.CLEANUP_MINIMAL_PROGRESS_TIME: + reason = "download has zero progress" + to_delete.append((2, torrent_id, reason, time_elapsed)) + + # Case 4: Torrents with minimal progress + elif status == RDTorrentStatus.DOWNLOADING and progress < self.CLEANUP_MINIMAL_PROGRESS_THRESHOLD and time_elapsed > self.CLEANUP_MINIMAL_PROGRESS_TIME: + reason = f"download has minimal progress ({progress}%)" + to_delete.append((3, torrent_id, reason, time_elapsed)) + + # Case 5: Stuck uploading torrents + elif status == RDTorrentStatus.UPLOADING and speed == 0 and time_elapsed > self.CLEANUP_STUCK_UPLOAD_TIME: + reason = "upload is stuck (zero speed)" + to_delete.append((4, torrent_id, reason, time_elapsed)) + + # Case 6: Stuck compressing torrents + elif status == RDTorrentStatus.COMPRESSING and speed == 0 and time_elapsed > self.CLEANUP_STUCK_COMPRESSION_TIME: + reason = "compression is stuck (zero speed)" + to_delete.append((5, torrent_id, reason, time_elapsed)) + + # Case 7: Torrents with no seeders + elif status == RDTorrentStatus.DOWNLOADING and seeders == 0 and time_elapsed > self.CLEANUP_INACTIVE_TIME: + reason = "download has no seeders" + to_delete.append((6, torrent_id, reason, time_elapsed)) + + # Case 8: Waiting files selection + elif status == RDTorrentStatus.WAITING_FILES: + reason = "waiting files selection" + to_delete.append((7, torrent_id, reason, time_elapsed)) + + # If no torrents were marked for deletion but we're still over limit, + # force delete the slowest/least progressed torrents + if not to_delete and total_active > active_count["limit"]: + logger.info("No torrents met deletion criteria but still over limit, using fallback cleanup") + + # First try to clean up just duplicates + duplicates_only = True + cleanup_attempts = 2 # Try duplicates first, then all torrents if needed + + while cleanup_attempts > 0: + # Collect all active torrents into a single list for sorting + all_active = [] + seen_filenames = set() + + for status, torrents in active_by_status.items(): + for t in torrents: + filename = t["filename"] + + # Skip non-duplicates on first pass + is_duplicate = filename in seen_filenames + if duplicates_only and not is_duplicate: + continue + + seen_filenames.add(filename) + + score = 0 + # Prioritize keeping torrents with more progress + score += t["progress"] * 100 + # And those with higher speeds + score += min(t["speed"] / 1024, 1000) # Cap speed bonus at 1000 + # And those with more seeders + score += t["seeders"] * 10 + # Penalize older torrents slightly + score -= min(t["time_elapsed"] / 60, 60) # Cap age penalty at 60 minutes + # Heavy penalty for duplicates + if is_duplicate: + score -= 5000 # Ensure duplicates are cleaned up first + + all_active.append({ + "id": t["id"], + "score": score, + "stats": t, + "status": status, + "is_duplicate": is_duplicate + }) + + if all_active: + # Sort by score (lowest first - these will be deleted) + all_active.sort(key=lambda x: x["score"]) + + # Take enough torrents to get under the limit + to_remove = min( + len(all_active), # Don't try to remove more than we have + total_active - active_count["limit"] + 1 # +1 for safety margin + ) + + for torrent in all_active[:to_remove]: + stats = torrent["stats"] + reason = (f"fallback cleanup{' (duplicate)' if duplicates_only else ''} - {torrent['status']} " + f"(progress: {stats['progress']}%, " + f"speed: {stats['speed']/1024:.1f} KB/s, " + f"seeders: {stats['seeders']}, " + f"age: {stats['time_elapsed']/60:.1f}m)") + to_delete.append((0, torrent["id"], reason, stats["time_elapsed"])) + logger.info(f"Fallback cleanup marking: {stats['filename']} - {reason}") + + # If we found enough torrents to delete, we're done + if len(to_delete) >= (total_active - active_count["limit"]): + break + + # If we get here and duplicates_only is True, try again with all torrents + duplicates_only = False + cleanup_attempts -= 1 + + # Log what we're about to delete + if to_delete: + logger.info(f"Found {len(to_delete)} torrents to clean up, processing in batches of {self.CLEANUP_BATCH_SIZE}") + for _, _, reason, _ in to_delete[:5]: # Log first 5 for debugging + logger.debug(f"Will delete: {reason}") + + # Convert to final format + to_delete = [(t[1], t[2]) for t in to_delete] + + # Process deletion in batches + while to_delete: + batch = to_delete[:self.CLEANUP_BATCH_SIZE] + to_delete = to_delete[self.CLEANUP_BATCH_SIZE:] + cleaned += self._batch_delete_torrents(batch) + + # Update last cleanup time if any torrents were cleaned + if cleaned > 0: + self.last_cleanup_time = current_time + logger.info(f"Cleaned up {cleaned} torrents") + else: + logger.warning("No torrents were cleaned up despite being over the limit!") + + return cleaned + + except Exception as e: + logger.error(f"Error during cleanup: {e}") + return 0 + + def _batch_delete_torrents(self, torrents: List[Tuple[str, str]]) -> int: + """Delete a batch of torrents efficiently. + Args: + torrents: List of (torrent_id, reason) tuples + Returns: + Number of successfully deleted torrents + """ + deleted = 0 + for torrent_id, reason in torrents: + try: + # First try to delete associated downloads + try: + downloads = self.api.request_handler.execute(HttpMethod.GET, "downloads") + for download in downloads: + if download.get("torrent_id") == torrent_id: + try: + self.api.request_handler.execute(HttpMethod.DELETE, f"downloads/delete/{download['id']}") + logger.debug(f"Deleted download {download['id']} associated with torrent {torrent_id}") + except Exception as e: + logger.warning(f"Failed to delete download {download['id']}: {e}") + except Exception as e: + logger.warning(f"Failed to cleanup downloads for torrent {torrent_id}: {e}") + + # Then delete the torrent + self.api.request_handler.execute(HttpMethod.DELETE, f"torrents/delete/{torrent_id}") + logger.info(f"Deleted torrent {torrent_id}: {reason}") + deleted += 1 + except Exception as e: + if "404" in str(e): + # Torrent was already deleted, count it as success + logger.debug(f"Torrent {torrent_id} was already deleted") + deleted += 1 + elif "401" in str(e): + logger.error("API token expired or invalid") + break # Stop processing batch + elif "403" in str(e): + logger.error("Account locked or permission denied") + break # Stop processing batch + else: + logger.error(f"Failed to delete torrent {torrent_id}: {e}") + return deleted + + def _cleanup_downloads(self) -> int: + """Clean up old downloads that are no longer needed. + Returns number of downloads cleaned up.""" + try: + downloads = self.api.request_handler.execute(HttpMethod.GET, "downloads") + if not isinstance(downloads, list): + logger.error(f"Unexpected downloads response type: {type(downloads)}") + return 0 + + deleted = 0 + + # Get current torrents for reference + try: + torrents = {t["id"]: t for t in self.api.request_handler.execute(HttpMethod.GET, "torrents")} + except Exception as e: + logger.warning(f"Failed to get torrents list for reference: {e}") + torrents = {} + + # Track active downloads to update our counters + active_by_content = {} + + for download in downloads: + try: + if not isinstance(download, dict): + logger.warning(f"Unexpected download entry type: {type(download)}") + continue + + download_id = download.get("id") + torrent_id = download.get("torrent_id") + filename = download.get("filename", "unknown") + status = download.get("status", "unknown") + progress = download.get("progress", 0) + speed = download.get("speed", 0) + + # Find content ID for this download + content_id = None + for cid, downloads in self.active_downloads.items(): + if download_id in downloads: + content_id = cid + break + + # Track active downloads + if status in ("downloading", "queued"): + if content_id: + active_by_content.setdefault(content_id, set()).add(download_id) + + # Never delete successfully downloaded files + if status == "downloaded": + if content_id: + self.download_complete[content_id] = True + continue + + reason = None + + # Case 1: No associated torrent ID (but not if downloaded) + if not torrent_id and status != "downloaded": + reason = "orphaned download (no torrent ID)" + + # Case 2: Associated torrent no longer exists (but not if downloaded) + elif torrent_id and torrent_id not in torrents and status != "downloaded": + reason = f"orphaned download (torrent {torrent_id} no longer exists)" + + # Case 3: Download failed or errored + elif status in ("error", "magnet_error", "virus", "dead", "waiting_files_selection"): + reason = f"download in {status} state" + + # Case 4: Zero progress downloads (excluding queued and downloaded) + elif progress == 0 and status not in ("queued", "downloaded") and speed == 0: + reason = "download has zero progress and speed" + + # Case 5: Stuck downloads (but not if already downloaded) + elif status == "downloading" and speed == 0 and progress < 100 and status != "downloaded": + reason = "download is stuck (zero speed)" + + if reason: + # Double check status hasn't changed to downloaded + try: + current = self.api.request_handler.execute(HttpMethod.GET, f"downloads/info/{download_id}") + if isinstance(current, dict) and current.get("status") == "downloaded": + logger.debug(f"Skipping deletion of {download_id} ({filename}): status changed to downloaded") + if content_id: + self.download_complete[content_id] = True + continue + except Exception as e: + logger.debug(f"Failed to double-check download status for {download_id}: {e}") + + try: + self.api.request_handler.execute(HttpMethod.DELETE, f"downloads/delete/{download_id}") + deleted += 1 + logger.info(f"Deleted download {download_id} ({filename}): {reason}, status: {status}") + + # Update our tracking + if content_id: + if download_id in self.active_downloads[content_id]: + self.active_downloads[content_id].remove(download_id) + except Exception as e: + if "404" in str(e): + deleted += 1 # Already deleted + logger.debug(f"Download {download_id} was already deleted") + # Update our tracking + if content_id and download_id in self.active_downloads[content_id]: + self.active_downloads[content_id].remove(download_id) + elif "401" in str(e): + logger.error("API token expired or invalid") + break # Stop processing + elif "403" in str(e): + logger.error("Account locked or permission denied") + break # Stop processing + else: + logger.warning(f"Failed to delete download {download_id}: {e}") + + except Exception as e: + logger.warning(f"Failed to process download {download.get('id')}: {e}") + + # Update our active downloads tracking + for content_id in list(self.active_downloads.keys()): + actual_active = active_by_content.get(content_id, set()) + self.active_downloads[content_id] = actual_active + + if deleted: + logger.info(f"Cleaned up {deleted} downloads") + # Log current download counts + total = sum(len(downloads) for downloads in self.active_downloads.values()) + logger.debug(f"Current download counts - Total: {total}, By content: {dict((k, len(v)) for k, v in self.active_downloads.items())}") + return deleted + + except Exception as e: + logger.error(f"Failed to cleanup downloads: {e}") + return 0 + + def select_files(self, torrent_id: str, files: List[str]): + """ + Select files from a torrent + Required by DownloaderBase + """ + if not self.initialized: + raise RealDebridError("Downloader not properly initialized") + + MAX_RETRIES = 5 + RETRY_DELAY = 2.0 + MAX_WAIT_TIME = 30 + + last_error = None + start_time = time.time() + + for attempt in range(MAX_RETRIES): + try: + # First verify the torrent exists and is ready + try: + torrent_info = self.get_torrent_info(torrent_id) + status = torrent_info.get("status", "") + + # Wait for magnet conversion to complete + while status == "magnet_conversion": + if time.time() - start_time > MAX_WAIT_TIME: + raise RealDebridError("Magnet conversion timeout") + logger.debug(f"Waiting for magnet conversion... (status: {status})") + time.sleep(2) + torrent_info = self.get_torrent_info(torrent_id) + status = torrent_info.get("status", "") + + # Check if torrent is in a state where we can select files + if status not in ["waiting_files_selection", "downloaded"]: + logger.warning(f"Torrent in unexpected state: {status}, retrying...") + time.sleep(RETRY_DELAY) + continue + + except Exception as e: + if "404" in str(e): + logger.error(f"Torrent {torrent_id} no longer exists on Real-Debrid servers") + raise TorrentNotFoundError(f"Torrent {torrent_id} not found") from e + raise + + # Get available files + available_files = torrent_info.get("files", []) + if not available_files: + if time.time() - start_time > MAX_WAIT_TIME: + raise RealDebridError("Timeout waiting for files to become available") + logger.debug("No files available yet, waiting...") + time.sleep(RETRY_DELAY) + continue + + # Handle special "all" files case or no specific files requested + if not files or (files and "all" in files): + files = [str(f["id"]) for f in available_files] + logger.debug(f"Selecting all available files: {files}") + + # Verify file IDs are valid + valid_ids = {str(f["id"]) for f in available_files} + invalid_files = set(files) - valid_ids + if invalid_files: + logger.error(f"Invalid file IDs for torrent {torrent_id}: {invalid_files}") + logger.debug(f"Available file IDs: {valid_ids}") + raise InvalidFileIDError(f"Invalid file IDs: {invalid_files}") + + # Select the files + try: + data = {"files": ",".join(files)} + logger.debug(f"Selecting files with data: {data}") + self.api.request_handler.execute( + HttpMethod.POST, + f"torrents/selectFiles/{torrent_id}", + data=data + ) + logger.debug(f"Successfully selected files for torrent {torrent_id}") + return # Success, exit retry loop + except Exception as e: + if "404" in str(e): + logger.error(f"Torrent {torrent_id} was removed while selecting files") + raise TorrentNotFoundError(f"Torrent {torrent_id} was removed") from e + if "422" in str(e): + logger.error(f"Invalid file selection request: {data}") + logger.debug(f"Available files: {available_files}") + raise + + except (TorrentNotFoundError, InvalidFileIDError): + raise # Don't retry these errors + except Exception as e: + last_error = e + if attempt < MAX_RETRIES - 1: + logger.warning(f"Failed to select files (attempt {attempt + 1}/{MAX_RETRIES}): {str(e)}") + time.sleep(RETRY_DELAY) + continue + + logger.error(f"Failed to select files for torrent {torrent_id} after {MAX_RETRIES} attempts") + raise last_error if last_error else RealDebridError("Failed to select files") + + def get_torrent_info(self, torrent_id: str) -> dict: + """ + Get information about a torrent + Required by DownloaderBase + """ + if not self.initialized: + raise RealDebridError("Downloader not properly initialized") + + response = self.api.request_handler.execute( + HttpMethod.GET, + f"torrents/info/{torrent_id}" + ) + + # Log a cleaner version with just the important info + if response: + status = response.get('status', 'unknown') + progress = response.get('progress', 0) + speed = response.get('speed', 0) + seeders = response.get('seeders', 0) + filename = response.get('filename', 'unknown') + files = response.get('files', []) + + speed_mb = speed / 1000000 if speed else 0 # Convert to MB/s + + logger.debug( + f"Torrent: {filename}\n" + f"Status: \033[94m{status}\033[0m, " + f"Progress: \033[95m{progress}%\033[0m, " + f"Speed: \033[92m{speed_mb:.2f}MB/s\033[0m, " + f"Seeders: \033[93m{seeders}\033[0m\n" + f"Files: {len(files)} available" + ) + + # Log file details if available + if files: + logger.debug("Available files:") + for f in files: + logger.debug(f"- {f.get('path', 'unknown')} ({f.get('bytes', 0)} bytes)") + + return response + + def delete_torrent(self, torrent_id: str): + """ + Delete a torrent + Required by DownloaderBase + """ + if not self.initialized: + raise RealDebridError("Downloader not properly initialized") + + try: + self.api.request_handler.execute( + HttpMethod.DELETE, + f"torrents/delete/{torrent_id}" + ) + except Exception as e: + error_str = str(e) + if "404" in error_str: + # Could mean: already deleted, invalid ID, or never existed + logger.warning(f"Could not delete torrent {torrent_id}: Unknown resource (404)") + return + elif "401" in str(e): + logger.error(f"Failed to delete torrent {torrent_id}: Bad token (expired/invalid)") + raise + elif "403" in str(e): + logger.error(f"Failed to delete torrent {torrent_id}: Permission denied (account locked)") + raise + else: + logger.error(f"Failed to delete torrent {torrent_id}: {error_str}") + raise + + def _process_files(self, files: List[dict]) -> Dict[str, dict]: + """Process and filter valid video files""" + logger.debug(f"Processing {len(files)} files from Real-Debrid") + result = {} + + # If no files yet, return empty result to trigger retry + if not files: + logger.debug("No files available yet, will retry") + return {} + + # Process all video files + valid_videos = [] + + for file in files: + path = file.get("path", "") + name = path.lower() + size = file.get("bytes", 0) + file_id = str(file.get("id", "")) + + # Skip if no valid ID + if not file_id: + logger.debug(f"✗ Skipped file with no ID: {path}") + continue + + # Skip sample files and unwanted files + if "/sample/" in name.lower() or "sample" in name.lower(): + logger.debug(f"✗ Skipped sample file: {name}") + continue + + if any(name.endswith(f".{ext}") for ext in VIDEO_EXTENSIONS): + valid_videos.append(file) + logger.debug(f"✓ Found valid video file: {name} (size: {size} bytes, id: {file_id})") + else: + # Log why file was rejected + logger.debug(f"✗ Skipped non-video file: {name}") + + # Sort videos by size (largest first) to ensure main episodes are prioritized + valid_videos.sort(key=lambda x: x.get("bytes", 0), reverse=True) + + # Add all valid video files + for video in valid_videos: + path = video.get("path", "") + file_id = str(video.get("id", "")) + size = video.get("bytes", 0) + + # Extract parent folder name from path + path_parts = path.split("/") + parent_path = path_parts[-2] if len(path_parts) > 1 else "" + + result[file_id] = { + "filename": path, + "filesize": size, + "parent_path": parent_path + } + logger.debug(f"✓ Selected file for download: {path} (size: {size} bytes, id: {file_id})") + + if not result: + # Log all files for debugging + logger.debug("No valid video files found. Available files:") + for file in files: + logger.debug(f"- {file.get('path', '')} ({file.get('bytes', 0)} bytes)") + else: + logger.debug(f"Selected {len(result)} video files for download") + + return result + + def _can_start_download(self, content_id: str) -> bool: + """Check if we can start a new download for this content.""" + # Get total active downloads across all content + total_downloads = sum(len(downloads) for downloads in self.active_downloads.values()) + current_content_downloads = len(self.active_downloads.get(content_id, set())) + + logger.debug(f"Download count check - Total: {total_downloads}/{self.MAX_CONCURRENT_TOTAL}, " + f"Content {content_id}: {current_content_downloads}/{self.MAX_CONCURRENT_PER_CONTENT}") + + # Check both total and per-content limits + if total_downloads >= self.MAX_CONCURRENT_TOTAL: + if not self._cleanup_if_needed(): + return False + # Recalculate after cleanup + total_downloads = sum(len(downloads) for downloads in self.active_downloads.values()) + current_content_downloads = len(self.active_downloads.get(content_id, set())) + + if current_content_downloads >= self.MAX_CONCURRENT_PER_CONTENT: + logger.warning(f"Too many concurrent downloads for content {content_id} " + f"({current_content_downloads}/{self.MAX_CONCURRENT_PER_CONTENT})") + return False + + return True + + def _cleanup_if_needed(self) -> bool: + """Check active count and cleanup if needed. + Returns True if cleanup was successful in reducing count below limit.""" + total_downloads = sum(len(downloads) for downloads in self.active_downloads.values()) + if total_downloads >= self.MAX_CONCURRENT_TOTAL: + logger.debug(f"At max concurrent downloads ({total_downloads}/{self.MAX_CONCURRENT_TOTAL}), attempting cleanup...") + + # First try to clean up any completed downloads that might still be tracked + try: + downloads = self.api.request_handler.execute(HttpMethod.GET, "downloads") + if isinstance(downloads, list): + for download in downloads: + if isinstance(download, dict): + download_id = download.get("id") + status = download.get("status") + if status == "downloaded": + # Find and remove from any content's active downloads + for content_id, active_set in self.active_downloads.items(): + if download_id in active_set: + active_set.remove(download_id) + logger.debug(f"Removed completed download {download_id} from content {content_id} tracking") + self.download_complete[content_id] = True + except Exception as e: + logger.warning(f"Failed to check for completed downloads: {e}") + + # Recalculate after removing completed downloads + total_downloads = sum(len(downloads) for downloads in self.active_downloads.values()) + if total_downloads < self.MAX_CONCURRENT_TOTAL: + logger.debug(f"Cleanup of completed downloads successful, now at {total_downloads}/{self.MAX_CONCURRENT_TOTAL}") + return True + + # If still at limit, try regular cleanup + for attempt in range(2): + cleaned = self._cleanup_downloads() + if cleaned: + # Recalculate total after cleanup + total_downloads = sum(len(downloads) for downloads in self.active_downloads.values()) + if total_downloads < self.MAX_CONCURRENT_TOTAL: + logger.debug(f"Cleanup successful, now at {total_downloads}/{self.MAX_CONCURRENT_TOTAL} downloads") + return True + else: + logger.debug(f"Cleanup removed {cleaned} downloads but still at limit ({total_downloads}/{self.MAX_CONCURRENT_TOTAL})") + else: + logger.debug(f"Cleanup attempt {attempt + 1} removed no downloads") + if attempt == 0: # Wait between attempts + time.sleep(2) + + # Do one final check of our tracking vs reality + try: + downloads = self.api.request_handler.execute(HttpMethod.GET, "downloads") + if isinstance(downloads, list): + actual_active = set() + for download in downloads: + if isinstance(download, dict): + download_id = download.get("id") + status = download.get("status") + if status not in ("downloaded", "error", "magnet_error", "virus", "dead"): + actual_active.add(download_id) + + # Update our tracking to match reality + for content_id in list(self.active_downloads.keys()): + self.active_downloads[content_id] = { + d_id for d_id in self.active_downloads[content_id] + if d_id in actual_active + } + if not self.active_downloads[content_id]: + del self.active_downloads[content_id] + + total_downloads = sum(len(downloads) for downloads in self.active_downloads.values()) + logger.debug(f"After final tracking sync: {total_downloads}/{self.MAX_CONCURRENT_TOTAL} downloads") + return total_downloads < self.MAX_CONCURRENT_TOTAL + except Exception as e: + logger.warning(f"Failed final tracking check: {e}") + + logger.warning(f"Could not reduce download count below limit ({total_downloads}/{self.MAX_CONCURRENT_TOTAL}) after cleanup") + return False + return True + + def download_cached_stream(self, item: MediaItem, stream: Stream) -> DownloadCachedStreamResult: + """Download a stream from Real-Debrid""" + if not self.initialized: + raise RealDebridError("Downloader not properly initialized") + + content_id = str(item.id) + torrent_id = None + + try: + # Check and cleanup if needed before adding magnet + if not self._cleanup_if_needed(): + logger.warning(f"Cannot start download for {content_id} - max concurrent downloads reached even after cleanup") + return DownloadCachedStreamResult(None, torrent_id, None, stream.infohash) + + # Add torrent and get initial info to check files + torrent_id = self.add_torrent(stream.infohash) + info = self.get_torrent_info(torrent_id) + + # Process files to find valid video files + files = info.get("files", []) + container = self._process_files(files) + if not container: + logger.debug(f"No valid video files found in torrent {torrent_id}") + return DownloadCachedStreamResult(None, torrent_id, info, stream.infohash) + + # Check if we can start a new download for this content + if not self._can_start_download(content_id): + logger.warning(f"Cannot start download for {item.log_string} - max concurrent downloads reached") + return DownloadCachedStreamResult(container, torrent_id, info, stream.infohash) + + # If content is complete but we found valid files, proceed with download + if self._is_content_complete(content_id): + logger.info(f"Content {item.log_string} marked as complete but valid files found - proceeding with download") + + self._add_active_download(content_id, torrent_id) + + # Select all files by default + self.select_files(torrent_id, list(container.keys())) + + # Wait for download to complete + info = self.wait_for_download(torrent_id, content_id, item) + + logger.log("DEBRID", f"Downloading {item.log_string} from '{stream.raw_title}' [{stream.infohash}]") + + # Mark content as complete since download succeeded + self._mark_content_complete(content_id) + # Reset queue attempts on successful download + self.queue_attempts[content_id] = 0 + + return DownloadCachedStreamResult(container, torrent_id, info, stream.infohash) + + except RealDebridActiveLimitError: + # Don't blacklist the stream, mark for retry after a short delay + retry_time = datetime.now() + timedelta(minutes=30) # Retry after 30 minutes + logger.warning(f"Real-Debrid active limit exceeded for {item.log_string}, will retry after 30 minutes") + item.set("retry_after", retry_time) + return DownloadCachedStreamResult(None, torrent_id, None, stream.infohash) + except QueuedTooManyTimesError: + # Don't blacklist the stream, but mark the item for retry later based on scrape count + retry_hours = self._get_retry_hours(item.scraped_times) + retry_time = datetime.now() + timedelta(hours=retry_hours) + logger.warning(f"Too many queued attempts for {item.log_string}, will retry after {retry_hours} hours") + item.set("retry_after", retry_time) + return DownloadCachedStreamResult(container if 'container' in locals() else None, torrent_id, None, stream.infohash) + except Exception as e: + # Clean up torrent if something goes wrong + if torrent_id: + try: + self.delete_torrent(torrent_id) + except Exception as delete_error: + logger.error(f"Failed to delete torrent {torrent_id} after error: {delete_error}") + raise + finally: + if torrent_id: + self._remove_active_download(content_id, torrent_id) + + def _get_retry_hours(self, scrape_times: int) -> float: + """Get retry hours based on number of scrape attempts.""" + if scrape_times >= 10: + return self.scraping_settings.after_10 + elif scrape_times >= 5: + return self.scraping_settings.after_5 + elif scrape_times >= 2: + return self.scraping_settings.after_2 + return 2.0 # Default to 2 hours + + def wait_for_download(self, torrent_id: str, content_id: str, item: MediaItem) -> dict: + """Wait for torrent to finish downloading""" + start_time = time.time() + last_check_time = time.time() + zero_seeder_count = 0 # Track consecutive zero seeder checks + + while True: + info = self.get_torrent_info(torrent_id) + status = RDTorrentStatus(info.get("status", "")) + seeders = info.get("seeders", 0) + filename = info.get("filename", "Unknown") + progress = info.get("progress", 0) + current_time = time.time() + + # Handle queued torrents + if status == RDTorrentStatus.QUEUED: + self.queue_attempts[content_id] += 1 + if self.queue_attempts[content_id] >= self.MAX_QUEUE_ATTEMPTS: + logger.warning(f"Hit maximum queue attempts ({self.MAX_QUEUE_ATTEMPTS}) for content {content_id}") + raise QueuedTooManyTimesError(f"Too many queued attempts for {filename}") + + logger.debug(f"{filename} is queued on Real-Debrid (attempt {self.queue_attempts[content_id]}/{self.MAX_QUEUE_ATTEMPTS}), blacklisting and trying next stream") + raise DownloadFailedError(f"{filename} is queued on Real-Debrid") + + # Use dynamic timeout based on file size and progress + file_size_mb = info.get("bytes", 0) / (1024 * 1024) # Convert to MB + size_based_timeout = (file_size_mb / 50) * self.TIMEOUT_PER_50MB # 10 seconds per 50MB + timeout = min( + self.BASE_TIMEOUT + size_based_timeout, + self.MAX_TIMEOUT + ) + + # Log timeout calculation on first check + if not hasattr(self, '_logged_timeout') and size_based_timeout > 0: + logger.debug( + f"Timeout calculation for {filename}:\n" + f" File size: {file_size_mb:.1f}MB\n" + f" Base timeout: {self.BASE_TIMEOUT}s\n" + f" Size-based addition: {size_based_timeout:.1f}s\n" + f" Total timeout: {timeout:.1f}s" + ) + self._logged_timeout = True + + if current_time - start_time > timeout: + logger.warning(f"{filename} download taking too long ({int(timeout)} seconds), skipping and trying next stream") + # Don't delete torrent, just break and let Real-Debrid continue in background + break + + # Check status and seeders every minute + if current_time - last_check_time >= 60: # Check every minute + logger.debug(f"{filename} status: {status}, seeders: {seeders}") + if "progress" in info: + logger.debug(f"{filename} progress: \033[95m{progress}%\033[0m") + + # Only check seeders if download is not complete + if progress < 100 and status == RDTorrentStatus.DOWNLOADING: + if seeders == 0: + zero_seeder_count += 1 + logger.debug(f"{filename} has no seeders ({zero_seeder_count}/2 checks)") + if zero_seeder_count >= 2: # Give up after 2 consecutive zero seeder checks + logger.warning(f"{filename} has no seeders available after 2 consecutive checks, skipping and trying next stream") + break + else: + zero_seeder_count = 0 # Reset counter if we find seeders + + last_check_time = current_time + + if status == RDTorrentStatus.DOWNLOADED: + return info + elif status in (RDTorrentStatus.ERROR, RDTorrentStatus.MAGNET_ERROR, RDTorrentStatus.DEAD): + logger.error(f"{filename} failed with status: {status}") + # Don't delete torrent, just skip and try next stream + break + + time.sleep(self.DOWNLOAD_POLL_INTERVAL) + + # If we broke out of loop due to timeout, no seeders, or error status + if current_time - start_time > timeout: + raise DownloadFailedError(f"{filename} download taking too long") + elif zero_seeder_count >= 2: + raise DownloadFailedError(f"{filename} has no seeders available") + elif status in (RDTorrentStatus.ERROR, RDTorrentStatus.MAGNET_ERROR, RDTorrentStatus.DEAD): + raise DownloadFailedError(f"{filename} failed with status: {status}") + + def _add_active_download(self, content_id: str, torrent_id: str): + """Add a download to active downloads tracking.""" + self.active_downloads[content_id].add(torrent_id) + logger.debug(f"Added download {torrent_id} to content {content_id} tracking") + + def _remove_active_download(self, content_id: str, torrent_id: str): + """Remove a download from active downloads tracking.""" + if content_id in self.active_downloads: + self.active_downloads[content_id].discard(torrent_id) + logger.debug(f"Removed download {torrent_id} from content {content_id} tracking") + if not self.active_downloads[content_id]: + del self.active_downloads[content_id] + logger.debug(f"Removed empty content {content_id} from tracking") + + def _mark_content_complete(self, content_id: str): + """Mark a content as having completed download.""" + self.download_complete[content_id] = True + logger.debug(f"Marked content {content_id} as complete") + + def _is_content_complete(self, content_id: str) -> bool: + """Check if content has completed download.""" + is_complete = content_id in self.download_complete and self.download_complete[content_id] + logger.debug(f"Content {content_id} complete status: {is_complete}") + return is_complete + + def __init__(self): + self.key = "realdebrid" + self.settings = settings_manager.settings.downloaders.real_debrid + self.scraping_settings = settings_manager.settings.scraping + self.api = None + self.file_finder = None + self.initialized = self.validate() + self.active_downloads = defaultdict(set) # {content_id: set(torrent_ids)} + self.download_complete = {} # Track if a content's download is complete + self.queue_attempts = {} # Track number of queued attempts per content + self.last_cleanup_time = datetime.now() + + def validate(self) -> bool: + """ + Validate Real-Debrid settings and premium status + Required by DownloaderBase + """ + if not self._validate_settings(): + return False + + self.api = RealDebridAPI( + api_key=self.settings.api_key, + proxy_url=self.settings.proxy_url if self.settings.proxy_enabled else None + ) + self.file_finder = FileFinder("filename", "filesize") + + return self._validate_premium() + + def _validate_settings(self) -> bool: + """Validate configuration settings""" + if not self.settings.enabled: + return False + if not self.settings.api_key: + logger.warning("Real-Debrid API key is not set") + return False + if self.settings.proxy_enabled and not self.settings.proxy_url: + logger.error("Proxy is enabled but no proxy URL is provided") + return False + return True + + def _validate_premium(self) -> bool: + """Validate premium status""" + try: + user_info = self.api.request_handler.execute(HttpMethod.GET, "user") + if not user_info.get("premium"): + logger.error("Premium membership required") + return False + + expiration = datetime.fromisoformat( + user_info["expiration"].replace("Z", "+00:00") + ).replace(tzinfo=None) + logger.info(premium_days_left(expiration)) + return True + except Exception as e: + logger.error(f"Failed to validate premium status: {e}") + return False + + def get_instant_availability(self, infohashes: List[str]) -> Dict[str, list]: + """ + Get instant availability for multiple infohashes + Required by DownloaderBase + Note: Returns all torrents as available to attempt download of everything + """ + # Return all infohashes as available with a dummy file entry + result = {} + for infohash in infohashes: + result[infohash] = [{ + "files": [{ + "id": 1, + "path": "pending.mkv", + "bytes": 1000000000 + }] + }] + return result + + def add_torrent(self, infohash: str) -> str: + """Add a torrent to Real-Debrid and return its ID.""" + # Check and cleanup if needed before adding torrent + if not self._cleanup_if_needed(): + raise Exception("Cannot add torrent - max concurrent downloads reached even after cleanup") + + attempts = 3 + last_error = None + + for attempt in range(attempts): + try: + # First try to add directly + try: + result = self.api.request_handler.execute( + HttpMethod.POST, + "torrents/addMagnet", + data={"magnet": f"magnet:?xt=urn:btih:{infohash}"} + ) + return result["id"] + except Exception as e: + error_str = str(e).lower() + if "404" in error_str: + # If 404, try adding raw hash + result = self.api.request_handler.execute( + HttpMethod.POST, + "torrents/addMagnet", + data={"magnet": infohash} + ) + return result["id"] + elif "403" in error_str or "forbidden" in error_str: + # Force cleanup on 403/Forbidden + logger.debug(f"Got 403/Forbidden error, forcing cleanup (attempt {attempt + 1}/{attempts})") + self._cleanup_downloads() + time.sleep(2) # Wait before retry + elif "509" in error_str or "active limit exceeded" in error_str.lower(): + # Force cleanup on active limit + logger.debug(f"Active limit exceeded, forcing cleanup (attempt {attempt + 1}/{attempts})") + self._cleanup_downloads() + time.sleep(2) # Wait before retry + elif "429" in error_str or "too many requests" in error_str.lower(): + # Rate limit - wait longer + wait_time = 5 if attempt == 0 else 10 + logger.debug(f"Rate limited, waiting {wait_time}s (attempt {attempt + 1}/{attempts})") + time.sleep(wait_time) + else: + raise + + except Exception as e: + last_error = e + if attempt < attempts - 1: # Don't log on last attempt + logger.warning(f"Failed to add torrent {infohash} (attempt {attempt + 1}/{attempts}): {e}") + time.sleep(2) # Wait before retry + continue + + # If we get here, all attempts failed + logger.error(f"Failed to add torrent {infohash} after {attempts} attempts: {last_error}") + raise last_error + + def _is_active_status(self, status: str) -> bool: + """Check if a torrent status counts as active.""" + return status in ("downloading", "uploading", "compressing", "magnet_conversion", "waiting_files_selection") + + def _cleanup_inactive_torrents(self) -> int: + """Clean up inactive, errored, or stalled torrents to free up slots. + Returns number of torrents cleaned up.""" + + # Check if enough time has passed since last cleanup + current_time = datetime.now() + if (current_time - self.last_cleanup_time).total_seconds() < self.CLEANUP_INTERVAL: + return 0 + + try: + # First check active torrent count + try: + active_count = self.api.request_handler.execute(HttpMethod.GET, "torrents/activeCount") + logger.debug(f"Active torrents: {active_count['nb']}/{active_count['limit']}") + if active_count["nb"] < active_count["limit"]: + return 0 + + # Calculate how aggressive we should be based on how far over the limit we are + overage = active_count["nb"] - active_count["limit"] + logger.warning(f"Over active torrent limit by {overage} torrents") + # If we're over by more than 5, be extremely aggressive + extremely_aggressive = overage >= 5 + # If we're over by any amount, be somewhat aggressive + aggressive_cleanup = overage > 0 + except Exception as e: + logger.warning(f"Failed to get active torrent count: {e}") + extremely_aggressive = True # Be extremely aggressive if we can't check + aggressive_cleanup = True + + # Get list of all torrents + torrents = self.api.request_handler.execute(HttpMethod.GET, "torrents") + to_delete = [] # List of (priority, torrent_id, reason) tuples + cleaned = 0 + + # Count active torrents by status and collect stats + active_by_status = defaultdict(list) + magnet_times = [] # Track magnet conversion times + downloading_stats = [] # Track download stats + total_active = 0 + + # Track duplicates by filename + filename_to_torrents = defaultdict(list) + + for torrent in torrents: + status = torrent.get("status", "") + if self._is_active_status(status): + # Calculate time_elapsed first + time_elapsed = 0 + try: + added = torrent.get("added", "") + if added: + # Convert to UTC, then to local time + added_time = datetime.fromisoformat(added.replace("Z", "+00:00")) + added_time = added_time.astimezone().replace(tzinfo=None) + time_elapsed = (current_time - added_time).total_seconds() + except (ValueError, TypeError): + logger.warning(f"Invalid timestamp format for torrent: {torrent.get('added')}") + + torrent_stats = { + "status": status, + "filename": torrent.get("filename", "unknown"), + "progress": torrent.get("progress", 0), + "speed": torrent.get("speed", 0), + "seeders": torrent.get("seeders", 0), + "time_elapsed": time_elapsed, + "id": torrent.get("id", "") + } + + active_by_status[status].append(torrent_stats) + filename_to_torrents[torrent_stats["filename"]].append(torrent_stats) + total_active += 1 + + if status == "magnet_conversion" and time_elapsed > 0: + magnet_times.append(time_elapsed) + elif status == "downloading": + downloading_stats.append(torrent_stats) + + # First handle duplicates - keep only the most progressed version of each file + for filename, dupes in filename_to_torrents.items(): + if len(dupes) > 1: + logger.info(f"Found {len(dupes)} duplicates of {filename}") + # Sort by progress (highest first), then by speed (highest first) + dupes.sort(key=lambda x: (x["progress"], x["speed"]), reverse=True) + + # Keep the best one, mark others for deletion + best = dupes[0] + logger.info(f"Keeping best duplicate: {best['progress']}% @ {best['speed']/1024:.1f} KB/s") + + for dupe in dupes[1:]: + reason = (f"duplicate of {filename} " + f"(keeping: {best['progress']}% @ {best['speed']/1024:.1f} KB/s, " + f"removing: {dupe['progress']}% @ {dupe['speed']/1024:.1f} KB/s)") + to_delete.append((150, dupe["id"], reason, dupe["time_elapsed"])) # Highest priority for duplicates + logger.info(f"Marking duplicate for deletion: {reason}") + + # Find stalled or problematic torrents + stalled_threshold = 60 # 1 minute without progress + near_complete_threshold = 95.0 # Protect torrents above this % + min_speed_threshold = 100 * 1024 # 100 KB/s minimum speed + + for status, torrents in active_by_status.items(): + for t in torrents: + # Skip nearly complete downloads unless they're completely stalled + if t["progress"] >= near_complete_threshold: + if t["speed"] == 0: + logger.warning(f"Nearly complete torrent stalled: {t['filename']} at {t['progress']}%") + reason = f"stalled at {t['progress']}% complete (no speed for {t['time_elapsed']/60:.1f}m)" + to_delete.append((90, t["id"], reason, t["time_elapsed"])) + continue + + # Check for stalled downloads + if status == "downloading": + if t["speed"] < min_speed_threshold: + time_stalled = t["time_elapsed"] + if time_stalled > stalled_threshold: + reason = (f"stalled download: {t['filename']} " + f"(progress: {t['progress']}%, " + f"speed: {t['speed']/1024:.1f} KB/s, " + f"stalled for: {time_stalled/60:.1f}m)") + priority = 120 if t["progress"] < 10 else 100 # Higher priority for early stalls + to_delete.append((priority, t["id"], reason, time_stalled)) + logger.info(f"Marking stalled download for deletion: {reason}") + + # Handle stuck magnet conversions more aggressively + elif status == "magnet_conversion": + if t["time_elapsed"] > 300: # 5 minutes + reason = f"stuck in magnet conversion for {t['time_elapsed']/60:.1f} minutes" + to_delete.append((130, t["id"], reason, t["time_elapsed"])) + logger.info(f"Marking stuck magnet for deletion: {reason}") + + # Log active torrent distribution and detailed stats + logger.info("=== Active Torrent Stats ===") + for status, active_torrents in active_by_status.items(): + count = len(active_torrents) + logger.info(f"\n{status.upper()} ({count} torrents):") + + # Sort by time elapsed + active_torrents.sort(key=lambda x: x["time_elapsed"], reverse=True) + + for t in active_torrents: + stats = [] + if t["progress"] > 0: + stats.append(f"progress: {t['progress']}%") + if t["speed"] > 0: + stats.append(f"speed: {t['speed']/1024:.1f} KB/s") + if t["seeders"] > 0: + stats.append(f"seeders: {t['seeders']}") + if t["time_elapsed"] > 0: + stats.append(f"age: {t['time_elapsed']/60:.1f}m") + + stats_str = ", ".join(stats) if stats else f"age: {t['time_elapsed']/60:.1f}m" + logger.info(f" - {t['filename']} ({stats_str})") + + # Calculate duplicate ratio and adjust aggressiveness + unique_filenames = set() + for status, torrents in active_by_status.items(): + for t in torrents: + unique_filenames.add(t["filename"]) + + duplicate_ratio = (total_active - len(unique_filenames)) / total_active if total_active > 0 else 0 + if duplicate_ratio > 0.5: # If more than 50% are duplicates + extremely_aggressive = True + logger.info(f"High duplicate ratio ({duplicate_ratio:.1%}), using extremely aggressive cleanup") + + # Set base thresholds + if extremely_aggressive: + magnet_threshold = 30 # 30 seconds + time_threshold = self.CLEANUP_INACTIVE_TIME / 4 + elif aggressive_cleanup: + magnet_threshold = 60 # 1 minute + time_threshold = self.CLEANUP_INACTIVE_TIME / 2 + else: + magnet_threshold = 300 # 5 minutes + time_threshold = self.CLEANUP_INACTIVE_TIME + + logger.debug(f"Using thresholds - Magnet: {magnet_threshold/60:.1f}m, General: {time_threshold/60:.1f}m") + + # Process all torrents for cleanup + for status, torrents in active_by_status.items(): + for torrent_stats in torrents: + should_delete = False + reason = "" + priority = 0 + time_elapsed = torrent_stats["time_elapsed"] + + # 1. Error states (highest priority) + if status in ("error", "magnet_error", "virus", "dead"): + should_delete = True + reason = f"error status: {status}" + priority = 100 + + # 2. Magnet conversion (high priority if taking too long) + elif status == "magnet_conversion": + if time_elapsed > magnet_threshold: + should_delete = True + reason = f"stuck in magnet conversion for {time_elapsed/60:.1f} minutes" + priority = 95 # Very high priority since we have so many + + # 3. Stalled or slow downloads + elif status == "downloading": + progress = torrent_stats["progress"] + speed = torrent_stats["speed"] + seeders = torrent_stats["seeders"] + + if progress == 0 and time_elapsed > time_threshold: + should_delete = True + reason = f"no progress after {time_elapsed/60:.1f} minutes" + priority = 85 + elif progress < self.CLEANUP_MINIMAL_PROGRESS_THRESHOLD and time_elapsed > time_threshold: + should_delete = True + reason = f"minimal progress ({progress}%) after {time_elapsed/60:.1f} minutes" + priority = 80 + elif speed < self.CLEANUP_SPEED_THRESHOLD: + should_delete = True + reason = f"slow speed ({speed/1024:.1f} KB/s)" + priority = 75 + elif seeders == 0: + should_delete = True + reason = f"no seeders" + priority = 85 + + # 4. Stuck uploads/compression + elif status in ("uploading", "compressing"): + speed = torrent_stats["speed"] + if time_elapsed > time_threshold or speed < self.CLEANUP_SPEED_THRESHOLD: + should_delete = True + reason = f"stuck in {status} for {time_elapsed/60:.1f} minutes" + priority = 60 + + # 5. Other states + elif status in ("waiting_files_selection", "queued"): + if time_elapsed > time_threshold: + should_delete = True + reason = f"stuck in {status} for {time_elapsed/60:.1f} minutes" + priority = 50 + + if should_delete: + filename = torrent_stats["filename"] + progress = torrent_stats["progress"] + speed = torrent_stats["speed"] + full_reason = f"{reason} (file: {filename}, progress: {progress}%, speed: {speed/1024:.1f} KB/s)" + to_delete.append((priority, torrent_stats["id"], full_reason, time_elapsed)) + + # Sort by priority (highest first) and extract torrent_id and reason + to_delete.sort(reverse=True) + + # If we're extremely aggressive, take more torrents + batch_size = self.CLEANUP_BATCH_SIZE * 2 if extremely_aggressive else self.CLEANUP_BATCH_SIZE + + # If no torrents were marked for deletion but we're still over limit, + # force delete the slowest/least progressed torrents + if not to_delete and total_active > active_count["limit"]: + logger.info("No torrents met deletion criteria but still over limit, using fallback cleanup") + + # First try to clean up just duplicates + duplicates_only = True + cleanup_attempts = 2 # Try duplicates first, then all torrents if needed + + while cleanup_attempts > 0: + # Collect all active torrents into a single list for sorting + all_active = [] + seen_filenames = set() + + for status, torrents in active_by_status.items(): + for t in torrents: + filename = t["filename"] + + # Skip non-duplicates on first pass + is_duplicate = filename in seen_filenames + if duplicates_only and not is_duplicate: + continue + + seen_filenames.add(filename) + + score = 0 + # Prioritize keeping torrents with more progress + score += t["progress"] * 100 + # And those with higher speeds + score += min(t["speed"] / 1024, 1000) # Cap speed bonus at 1000 + # And those with more seeders + score += t["seeders"] * 10 + # Penalize older torrents slightly + score -= min(t["time_elapsed"] / 60, 60) # Cap age penalty at 60 minutes + # Heavy penalty for duplicates + if is_duplicate: + score -= 5000 # Ensure duplicates are cleaned up first + + all_active.append({ + "id": t["id"], + "score": score, + "stats": t, + "status": status, + "is_duplicate": is_duplicate + }) + + if all_active: + # Sort by score (lowest first - these will be deleted) + all_active.sort(key=lambda x: x["score"]) + + # Take enough torrents to get under the limit + to_remove = min( + len(all_active), # Don't try to remove more than we have + total_active - active_count["limit"] + 1 # +1 for safety margin + ) + + for torrent in all_active[:to_remove]: + stats = torrent["stats"] + reason = (f"fallback cleanup{' (duplicate)' if duplicates_only else ''} - {torrent['status']} " + f"(progress: {stats['progress']}%, " + f"speed: {stats['speed']/1024:.1f} KB/s, " + f"seeders: {stats['seeders']}, " + f"age: {stats['time_elapsed']/60:.1f}m)") + to_delete.append((0, torrent["id"], reason, stats["time_elapsed"])) + logger.info(f"Fallback cleanup marking: {stats['filename']} - {reason}") + + # If we found enough torrents to delete, we're done + if len(to_delete) >= (total_active - active_count["limit"]): + break + + # If we get here and duplicates_only is True, try again with all torrents + duplicates_only = False + cleanup_attempts -= 1 + + # Log what we're about to delete + if to_delete: + logger.info(f"Found {len(to_delete)} torrents to clean up, processing in batches of {batch_size}") + for _, _, reason, _ in to_delete[:5]: # Log first 5 for debugging + logger.debug(f"Will delete: {reason}") + + # Convert to final format and process deletions + to_delete = [(t[1], t[2]) for t in to_delete] + + # Process deletion in batches + while to_delete: + batch = to_delete[:batch_size] + to_delete = to_delete[batch_size:] + + for torrent_id, reason in batch: + try: + self.api.request_handler.execute(HttpMethod.DELETE, f"torrents/delete/{torrent_id}") + cleaned += 1 + logger.info(f"Cleaned up torrent: {reason}") + except Exception as e: + logger.error(f"Failed to delete torrent {torrent_id}: {e}") + + if to_delete: # If we have more to process, wait briefly + time.sleep(0.5) + + self.last_cleanup_time = current_time + return cleaned + + except Exception as e: + logger.error(f"Error during cleanup: {e}") + return 0 \ No newline at end of file diff --git a/src/program/services/downloaders/shared.py b/src/program/services/downloaders/shared.py new file mode 100644 index 0000000..7669187 --- /dev/null +++ b/src/program/services/downloaders/shared.py @@ -0,0 +1,220 @@ +from abc import ABC, abstractmethod +from datetime import datetime +from typing import Tuple + +from loguru import logger +from RTN import parse + +from program.media import MovieMediaType, ShowMediaType +from program.settings.manager import settings_manager + +DEFAULT_VIDEO_EXTENSIONS = ["mp4", "mkv", "avi"] +ALLOWED_VIDEO_EXTENSIONS = [ + "mp4", + "mkv", + "avi", + "mov", + "wmv", + "flv", + "m4v", + "webm", + "mpg", + "mpeg", + "m2ts", + "ts", +] + +VIDEO_EXTENSIONS = ( + settings_manager.settings.downloaders.video_extensions or DEFAULT_VIDEO_EXTENSIONS +) +VIDEO_EXTENSIONS = [ext for ext in VIDEO_EXTENSIONS if ext in ALLOWED_VIDEO_EXTENSIONS] + +if not VIDEO_EXTENSIONS: + VIDEO_EXTENSIONS = DEFAULT_VIDEO_EXTENSIONS + +# Type aliases +InfoHash = str # A torrent hash +DebridTorrentId = ( + str # Identifier issued by the debrid service for a torrent in their cache +) + + +class DownloaderBase(ABC): + """ + The abstract base class for all Downloader implementations. + """ + + @abstractmethod + def validate(): + pass + @abstractmethod + def get_instant_availability(): + pass + + @abstractmethod + def add_torrent(): + pass + + @abstractmethod + def select_files(): + pass + + @abstractmethod + def get_torrent_info(): + pass + + @abstractmethod + def delete_torrent(): + pass + +class DownloadCachedStreamResult: + """Result object for cached stream downloads""" + def __init__(self, container=None, torrent_id=None, info=None, info_hash=None): + self.container = container + self.torrent_id = torrent_id + self.info = info + self.info_hash = info_hash + +class FileFinder: + """ + A class that helps you find files. + + Attributes: + filename_attr (str): The name of the file attribute. + """ + + def __init__(self, name, size): + self.filename_attr = name + self.filesize_attr = size + + def container_file_matches_episode(self, file): + filename = file[self.filename_attr] + logger.debug(f"Attempting to parse filename for episode matching: {filename}") + try: + # First try parsing the full path + parsed_data = parse(filename) + if not parsed_data.seasons or not parsed_data.episodes: + # If full path doesn't work, try just the filename + filename_only = filename.split('/')[-1] + logger.debug(f"Full path parse failed, trying filename only: {filename_only}") + parsed_data = parse(filename_only) + + logger.debug(f"Successfully parsed '{filename}': seasons={parsed_data.seasons}, episodes={parsed_data.episodes}") + return parsed_data.seasons[0], parsed_data.episodes + except Exception as e: + logger.debug(f"Failed to parse '{filename}' for episode matching: {str(e)}") + return None, None + + def container_file_matches_movie(self, file): + filename = file[self.filename_attr] + logger.debug(f"Attempting to parse filename for movie matching: {filename}") + try: + # First try parsing the full path + parsed_data = parse(filename) + if parsed_data.type != "movie": + # If full path doesn't work, try just the filename + filename_only = filename.split('/')[-1] + logger.debug(f"Full path parse failed, trying filename only: {filename_only}") + parsed_data = parse(filename_only) + + is_movie = parsed_data.type == "movie" + logger.debug(f"Successfully parsed '{filename}': type={parsed_data.type}, is_movie={is_movie}") + return is_movie + except Exception as e: + logger.debug(f"Failed to parse '{filename}' for movie matching: {str(e)}") + return None + +def premium_days_left(expiration: datetime) -> str: + """Convert an expiration date into a message showing days remaining on the user's premium account""" + time_left = expiration - datetime.utcnow() + days_left = time_left.days + hours_left, minutes_left = divmod(time_left.seconds // 3600, 60) + expiration_message = "" + + if days_left > 0: + expiration_message = f"Your account expires in {days_left} days." + elif hours_left > 0: + expiration_message = ( + f"Your account expires in {hours_left} hours and {minutes_left} minutes." + ) + else: + expiration_message = "Your account expires soon." + return expiration_message + + +def hash_from_uri(magnet_uri: str) -> str: + if len(magnet_uri) == 40: + # Probably already a hash + return magnet_uri + start = magnet_uri.index("urn:btih:") + len("urn:btih:") + return magnet_uri[start : start + 40] + +min_movie_filesize = settings_manager.settings.downloaders.movie_filesize_mb_min +max_movie_filesize = settings_manager.settings.downloaders.movie_filesize_mb_max +min_episode_filesize = settings_manager.settings.downloaders.episode_filesize_mb_min +max_episode_filesize = settings_manager.settings.downloaders.episode_filesize_mb_max + +def _validate_filesize_setting(value: int, setting_name: str) -> bool: + """Validate a single filesize setting.""" + if not isinstance(value, int) or value < -1: + logger.error(f"{setting_name} is not valid. Got {value}, expected integer >= -1") + return False + return True + +def _validate_filesizes() -> bool: + """ + Validate all filesize settings from configuration. + Returns True if all settings are valid integers >= -1, False otherwise. + """ + settings = settings_manager.settings.downloaders + return all([ + _validate_filesize_setting(settings.movie_filesize_mb_min, "Movie filesize min"), + _validate_filesize_setting(settings.movie_filesize_mb_max, "Movie filesize max"), + _validate_filesize_setting(settings.episode_filesize_mb_min, "Episode filesize min"), + _validate_filesize_setting(settings.episode_filesize_mb_max, "Episode filesize max") + ]) + +are_filesizes_valid = _validate_filesizes() + +BYTES_PER_MB = 1_000_000 + +def _convert_to_bytes(size_mb: int) -> int: + """Convert size from megabytes to bytes.""" + return size_mb * BYTES_PER_MB + +def _get_size_limits(media_type: str) -> Tuple[int, int]: + """Get min and max size limits in MB for given media type.""" + settings = settings_manager.settings.downloaders + if media_type == MovieMediaType.Movie.value: + return (settings.movie_filesize_mb_min, settings.movie_filesize_mb_max) + return (settings.episode_filesize_mb_min, settings.episode_filesize_mb_max) + +def _validate_filesize(filesize: int, media_type: str) -> bool: + """ + Validate file size against configured limits. + + Args: + filesize: Size in bytes to validate + media_type: Type of media being validated + + Returns: + bool: True if size is within configured range + """ + if not are_filesizes_valid: + logger.error(f"Filesize settings are invalid, {media_type} file sizes will not be checked.") + return True + + min_mb, max_mb = _get_size_limits(media_type) + min_size = 0 if min_mb == -1 else _convert_to_bytes(min_mb) + max_size = float("inf") if max_mb == -1 else _convert_to_bytes(max_mb) + + return min_size <= filesize <= max_size + + +def filesize_is_acceptable(filesize: int, media_type: str) -> bool: + return _validate_filesize(filesize, media_type) + +def get_invalid_filesize_log_string(filesize: int, media_type: str) -> str: + min_mb, max_mb = _get_size_limits(media_type) + friendly_filesize = round(filesize / BYTES_PER_MB, 2) + return f"{friendly_filesize} MB is not within acceptable range of [{min_mb}MB] to [{max_mb}MB]" \ No newline at end of file diff --git a/src/program/services/downloaders/torbox.py b/src/program/services/downloaders/torbox.py new file mode 100644 index 0000000..54bd9a5 --- /dev/null +++ b/src/program/services/downloaders/torbox.py @@ -0,0 +1,334 @@ +# import contextlib +# from datetime import datetime +# from pathlib import Path +# from posixpath import splitext +# from typing import Generator + +# from requests import ConnectTimeout +# from RTN import parse +# from RTN.exceptions import GarbageTorrent + +# from program.db.db import db +# from program.db.db_functions import get_stream_count, load_streams_in_pages +# from program.media.item import MediaItem +# from program.media.state import States +# from program.media.stream import Stream +# from program.settings.manager import settings_manager +# from loguru import logger +# from program.utils.request import get, post + +# API_URL = "https://api.torbox.app/v1/api" +# WANTED_FORMATS = {".mkv", ".mp4", ".avi"} + + +# class TorBoxDownloader: +# """TorBox Downloader""" + +# def __init__(self): +# self.key = "torbox_downloader" +# self.settings = settings_manager.settings.downloaders.torbox +# self.api_key = self.settings.api_key +# self.base_url = "https://api.torbox.app/v1/api" +# self.headers = {"Authorization": f"Bearer {self.api_key}"} +# self.initialized = self.validate() +# if not self.initialized: +# return +# logger.success("TorBox Downloader initialized!") + +# def validate(self) -> bool: +# """Validate the TorBox Downloader as a service""" +# if not self.settings.enabled: +# return False +# if not self.settings.api_key: +# logger.error("Torbox API key is not set") +# try: +# response = get(f"{self.base_url}/user/me", headers=self.headers) +# if response.is_ok: +# user_info = response.data.data +# expiration = user_info.premium_expires_at +# expiration_date_time = datetime.fromisoformat(expiration) +# expiration_date_time.replace(tzinfo=None) +# delta = expiration_date_time - datetime.now().replace( +# tzinfo=expiration_date_time.tzinfo +# ) + +# if delta.days > 0: +# expiration_message = f"Your account expires in {delta.days} days." +# else: +# expiration_message = "Your account expires soon." + +# if user_info.plan == 0: +# logger.error("You are not a premium member.") +# return False +# else: +# logger.log("DEBRID", expiration_message) + +# return user_info.plan != 0 +# except ConnectTimeout: +# logger.error("Connection to Torbox timed out.") +# except Exception as e: +# logger.exception(f"Failed to validate Torbox settings: {e}") +# return False + +# def run(self, item: MediaItem) -> bool: +# """Download media item from torbox.app""" +# return_value = False +# stream_count = get_stream_count(item._id) +# processed_stream_hashes = set() # Track processed stream hashes +# stream_hashes = {} + +# number_of_rows_per_page = 5 +# total_pages = (stream_count // number_of_rows_per_page) + 1 + +# for page_number in range(total_pages): +# with db.Session() as session: +# for stream_id, infohash, stream in load_streams_in_pages( +# session, item._id, page_number, page_size=number_of_rows_per_page +# ): +# stream_hash_lower = infohash.lower() + +# if stream_hash_lower in processed_stream_hashes: +# continue + +# processed_stream_hashes.add(stream_hash_lower) +# stream_hashes[stream_hash_lower] = stream + +# cached_hashes = self.get_torrent_cached(list(stream_hashes.keys())) +# if cached_hashes: +# for cache in cached_hashes.values(): +# item.active_stream = cache +# if self.find_required_files(item, cache["files"]): +# logger.log( +# "DEBRID", +# f"Item is cached, proceeding with: {item.log_string}", +# ) +# item.set( +# "active_stream", +# { +# "hash": cache["hash"], +# "files": cache["files"], +# "id": None, +# }, +# ) +# self.download(item) +# return_value = True +# break +# else: +# stream = stream_hashes.get(cache["hash"].lower()) +# if stream: +# stream.blacklisted = True +# else: +# logger.log("DEBRID", f"Item is not cached: {item.log_string}") +# for stream in stream_hashes.values(): +# logger.log( +# "DEBUG", +# f"Blacklisting uncached hash ({stream.infohash}) for item: {item.log_string}", +# ) +# stream.blacklisted = True + +# return return_value + +# def get_cached_hashes(self, item: MediaItem, streams: list[str]) -> list[str]: +# """Check if the item is cached in torbox.app""" +# cached_hashes = self.get_torrent_cached(streams) +# return { +# stream: cached_hashes[stream]["files"] +# for stream in streams +# if stream in cached_hashes +# } + +# def get_cached_hashes( +# self, item: MediaItem, streams: list[str:Stream] +# ) -> list[str]: +# """Check if the item is cached in torbox.app""" +# cached_hashes = self.get_torrent_cached(streams) +# return { +# stream: cached_hashes[stream]["files"] +# for stream in streams +# if stream in cached_hashes +# } + +# def download_cached(self, item: MediaItem, stream: str) -> None: +# """Download the cached item from torbox.app""" +# cache = self.get_torrent_cached([stream])[stream] +# item.active_stream = cache +# self.download(item) + +# def find_required_files(self, item, container): + +# files = [ +# file +# for file in container +# if file +# and file["size"] > 10000 +# and splitext(file["name"].lower())[1] in WANTED_FORMATS +# ] + +# parsed_file = parse(file["name"]) + +# if item.type == "movie": +# for file in files: +# if parsed_file.type == "movie": +# return [file] +# if item.type == "show": +# # Create a dictionary to map seasons and episodes needed +# needed_episodes = {} +# acceptable_states = [ +# States.Indexed, +# States.Scraped, +# States.Unknown, +# States.Failed, +# ] + +# for season in item.seasons: +# if season.state in acceptable_states and season.is_released: +# needed_episode_numbers = { +# episode.number +# for episode in season.episodes +# if episode.state in acceptable_states and episode.is_released +# } +# if needed_episode_numbers: +# needed_episodes[season.number] = needed_episode_numbers +# if not needed_episodes: +# return False + +# # Iterate over each file to check if it matches +# # the season and episode within the show +# matched_files = [] +# for file in files: +# if not parsed_file.seasons or parsed_file.seasons == [0]: +# continue + +# # Check each season and episode to find a match +# for season_number, episodes in needed_episodes.items(): +# if season_number in parsed_file.season: +# for episode_number in list(episodes): +# if episode_number in parsed_file.episode: +# # Store the matched file for this episode +# matched_files.append(file) +# episodes.remove(episode_number) +# if not matched_files: +# return False + +# if all(len(episodes) == 0 for episodes in needed_episodes.values()): +# return matched_files +# if item.type == "season": +# needed_episodes = { +# episode.number: episode +# for episode in item.episodes +# if episode.state +# in [States.Indexed, States.Scraped, States.Unknown, States.Failed] +# } +# one_season = len(item.parent.seasons) == 1 + +# # Dictionary to hold the matched files for each episode +# matched_files = [] +# season_num = item.number + +# # Parse files once and assign to episodes +# for file in files: +# if not file or not file.get("name"): +# continue +# if not parsed_file.seasons or parsed_file.seasons == [ +# 0 +# ]: # skip specials +# continue +# # Check if the file's season matches the item's season or if there's only one season +# if season_num in parsed_file.seasons or one_season: +# for ep_num in parsed_file.episodes: +# if ep_num in needed_episodes: +# matched_files.append(file) +# if not matched_files: +# return False + +# # Check if all needed episodes are captured (or atleast half) +# if len(needed_episodes) == len(matched_files): +# return matched_files +# if item.type == "episode": +# for file in files: +# if not file or not file.get("name"): +# continue +# if ( +# item.number in parsed_file.episodes +# and item.parent.number in parsed_file.seasons +# ): +# return [file] + +# return [] + +# def download(self, item: MediaItem): +# # Check if the torrent already exists +# exists = False +# torrent_list = self.get_torrent_list() +# for torrent in torrent_list: +# if item.active_stream["hash"] == torrent["hash"]: +# id = torrent["id"] +# exists = True +# break + +# # If it doesnt, lets download it and refresh the torrent_list +# if not exists: +# id = self.add_torrent(item.active_stream["hash"]) +# torrent_list = self.get_torrent_list() + +# # Find the torrent, correct file and we gucci +# for torrent in torrent_list: +# if torrent["id"] == id: +# if item.type == "movie": +# file = self.find_required_files(item, item.active_stream["files"])[ +# 0 +# ] +# _file_path = Path(file["name"]) +# item.set("folder", _file_path.parent.name) +# item.set("alternative_folder", ".") +# item.set("file", _file_path.name) +# if item.type == "show": +# files = self.find_required_files(item, item.active_stream["files"]) +# for season in item.seasons: +# for episode in season.episodes: +# file = self.find_required_files(episode, files)[0] +# _file_path = Path(file["name"]) +# episode.set("folder", _file_path.parent.name) +# episode.set("alternative_folder", ".") +# episode.set("file", _file_path.name) +# if item.type == "season": +# files = self.find_required_files(item, item.active_stream["files"]) +# for episode in item.episodes: +# file = self.find_required_files(episode, files)[0] +# _file_path = Path(file["name"]) +# episode.set("folder", _file_path.parent.name) +# episode.set("alternative_folder", ".") +# episode.set("file", _file_path.name) +# if item.type == "episode": +# file = self.find_required_files(episode, files)[0] +# _file_path = Path(file["name"]) +# item.set("folder", _file_path.parent.name) +# item.set("alternative_folder", ".") +# item.set("file", _file_path.name) +# logger.log("DEBRID", f"Downloaded {item.log_string}") + +# def get_torrent_cached(self, hash_list): +# hash_string = ",".join(hash_list) +# response = get( +# f"{self.base_url}/torrents/checkcached?hash={hash_string}&list_files=True", +# headers=self.headers, +# response_type=dict, +# ) +# return response.data["data"] + +# def add_torrent(self, infohash) -> int: +# magnet_url = f"magnet:?xt=urn:btih:{infohash}&dn=&tr=" +# response = post( +# f"{self.base_url}/torrents/createtorrent", +# data={"magnet": magnet_url, "seed": 1, "allow_zip": False}, +# headers=self.headers, +# ) +# return response.data.data.torrent_id + +# def get_torrent_list(self) -> list: +# response = get( +# f"{self.base_url}/torrents/mylist?bypass_cache=true", +# headers=self.headers, +# response_type=dict, +# ) +# return response.data["data"] diff --git a/src/program/services/indexers/__init__.py b/src/program/services/indexers/__init__.py new file mode 100644 index 0000000..183f60c --- /dev/null +++ b/src/program/services/indexers/__init__.py @@ -0,0 +1 @@ +from .trakt import TraktIndexer # noqa diff --git a/src/program/services/indexers/tmdb.py b/src/program/services/indexers/tmdb.py new file mode 100644 index 0000000..0463138 --- /dev/null +++ b/src/program/services/indexers/tmdb.py @@ -0,0 +1,454 @@ +from datetime import date +from enum import Enum +from typing import Generic, Literal, Optional, TypeVar + +from loguru import logger +from pydantic import BaseModel + +from program.utils.request import create_service_session, get + +TMDB_READ_ACCESS_TOKEN = "eyJhbGciOiJIUzI1NiJ9.eyJhdWQiOiJlNTkxMmVmOWFhM2IxNzg2Zjk3ZTE1NWY1YmQ3ZjY1MSIsInN1YiI6IjY1M2NjNWUyZTg5NGE2MDBmZjE2N2FmYyIsInNjb3BlcyI6WyJhcGlfcmVhZCJdLCJ2ZXJzaW9uIjoxfQ.xrIXsMFJpI1o1j5g2QpQcFP1X3AfRjFA5FlBFO5Naw8" # noqa: S105 + +# TODO: Maybe remove the else condition ? It's not necessary since exception is raised 400-450, 500-511, 408, 460, 504, 520, 524, 522, 598 and 599 + +ItemT = TypeVar("ItemT") + +class TmdbMediaType(str, Enum): + movie = "movie" + tv = "tv" + episode = "tv_episode" + season = "tv_season" + + +class TmdbItem(BaseModel): + adult: bool + backdrop_path: Optional[str] + id: int + title: str + original_title: str + original_language: str + overview: str + poster_path: Optional[str] + media_type: Optional[TmdbMediaType] = None + genre_ids: list[int] + popularity: float + release_date: str + video: bool + vote_average: float + vote_count: int + +class TmdbEpisodeItem(BaseModel): + id: int + name: str + overview: str + media_type: Literal["tv_episode"] + vote_average: float + vote_count: int + air_date: date + episode_number: int + episode_type: str + production_code: str + runtime: int + season_number: int + show_id: int + still_path: str + +class TmdbSeasonItem(BaseModel): + id: int + name: str + overview: str + poster_path: str + media_type: Literal["tv_season"] + vote_average: float + air_date: date + season_number: int + show_id: int + episode_count: int + + +class TmdbPagedResults(BaseModel, Generic[ItemT]): + page: int + results: list[ItemT] + total_pages: int + total_results: int + +class TmdbPagedResultsWithDates(TmdbPagedResults[ItemT], Generic[ItemT]): + class Dates(BaseModel): + maximum: date + minimum: date + dates: Dates + +class TmdbFindResults(BaseModel): + movie_results: list[TmdbItem] + tv_results: list[TmdbItem] + tv_episode_results: list[TmdbEpisodeItem] + tv_season_results: list[TmdbSeasonItem] + +class Genre(BaseModel): + id: int + name: str + +class BelongsToCollection(BaseModel): + id: int + name: str + poster_path: Optional[str] + backdrop_path: Optional[str] + + +class ProductionCompany(BaseModel): + id: int + logo_path: Optional[str] + name: str + origin_country: str + + +class ProductionCountry(BaseModel): + iso_3166_1: str + name: str + + +class SpokenLanguage(BaseModel): + english_name: str + iso_639_1: str + name: str + +class Network(BaseModel): + id: int + logo_path: Optional[str] + name: str + origin_country: str + +class TmdbMovieDetails(BaseModel): + adult: bool + backdrop_path: Optional[str] + belongs_to_collection: Optional[BelongsToCollection] + budget: int + genres: list[Genre] + homepage: Optional[str] + id: int + imdb_id: Optional[str] + original_language: str + original_title: str + overview: Optional[str] + popularity: float + poster_path: Optional[str] + production_companies: list[ProductionCompany] + production_countries: list[ProductionCountry] + release_date: Optional[str] + revenue: int + runtime: Optional[int] + spoken_languages: list[SpokenLanguage] + status: Optional[str] + tagline: Optional[str] + title: str + video: bool + vote_average: float + vote_count: int + +class TmdbTVDetails(BaseModel): + adult: bool + backdrop_path: Optional[str] + episode_run_time: list[int] + first_air_date: str + genres: list[Genre] + homepage: Optional[str] + id: int + in_production: bool + languages: list[str] + last_air_date: Optional[str] + last_episode_to_air: Optional[TmdbEpisodeItem] + name: str + next_episode_to_air: Optional[str] + networks: list[Network] + number_of_episodes: int + number_of_seasons: int + origin_country: list[str] + original_language: str + original_name: str + overview: Optional[str] + popularity: float + poster_path: Optional[str] + production_companies: list[ProductionCompany] + production_countries: list[ProductionCountry] + seasons: list[TmdbSeasonItem] + spoken_languages: list[str] + status: Optional[str] + tagline: Optional[str] + type: Optional[str] + vote_average: float + vote_count: int + +class TmdbCollectionDetails(BaseModel): + adult: bool + backdrop_path: Optional[str] + id: int + name: str + overview: str + original_language: str + original_name: str + poster_path: Optional[str] + +class TmdbEpisodeDetails(TmdbEpisodeItem): + crew: list[dict] + guest_stars: list[dict] + +class TmdbSeasonDetails(BaseModel): + _id: str + air_date: str + episodes: list[TmdbEpisodeDetails] + +class TMDB: + def __init__(self): + self.API_URL = "https://api.themoviedb.org/3" + self.session = create_service_session() + self.HEADERS = { + "Authorization": f"Bearer {TMDB_READ_ACCESS_TOKEN}", + } + self.session.headers.update(self.HEADERS) + + def getMoviesNowPlaying(self, params: str) -> Optional[TmdbPagedResultsWithDates[TmdbItem]]: + url = f"{self.API_URL}/movie/now_playing?{params}" + try: + response = get(self.session, url) + if response.is_ok and response.data: + return response.data + else: + logger.error(f"Failed to get movies now playing: {response.data}") + return None + except Exception as e: + logger.error( + f"An error occurred while getting movies now playing: {str(e)}" + ) + return None + + def getMoviesPopular(self, params: str) -> Optional[TmdbPagedResults[TmdbItem]]: + url = f"{self.API_URL}/movie/popular?{params}" + try: + response = get(self.session, url) + if response.is_ok and response.data: + return response.data + else: + logger.error(f"Failed to get popular movies: {response.data}") + return None + except Exception as e: + logger.error(f"An error occurred while getting popular movies: {str(e)}") + return None + + def getMoviesTopRated(self, params: str) -> Optional[TmdbPagedResults[TmdbItem]]: + url = f"{self.API_URL}/movie/top_rated?{params}" + try: + response = get(self.session, url) + if response.is_ok and response.data: + return response.data + else: + logger.error(f"Failed to get top rated movies: {response.data}") + return None + except Exception as e: + logger.error(f"An error occurred while getting top rated movies: {str(e)}") + return None + + def getMoviesUpcoming(self, params: str) -> Optional[TmdbPagedResultsWithDates[TmdbItem]]: + url = f"{self.API_URL}/movie/upcoming?{params}" + try: + response = get(self.session, url) + if response.is_ok and response.data: + return response.data + else: + logger.error(f"Failed to get upcoming movies: {response.data}") + return None + except Exception as e: + logger.error(f"An error occurred while getting upcoming movies: {str(e)}") + return None + + def getTrending(self, params: str, type: str, window: str) -> Optional[TmdbPagedResults[TmdbItem]]: + url = f"{self.API_URL}/trending/{type}/{window}?{params}" + try: + response = get(self.session, url) + if response.is_ok and response.data: + return response.data + else: + logger.error(f"Failed to get trending {type}: {response.data}") + return None + except Exception as e: + logger.error(f"An error occurred while getting trending {type}: {str(e)}") + return None + + def getTVAiringToday(self, params: str) -> Optional[TmdbPagedResults[TmdbItem]]: + url = f"{self.API_URL}/tv/airing_today?{params}" + try: + response = get(self.session, url) + if response.is_ok and response.data: + return response.data + else: + logger.error(f"Failed to get TV airing today: {response.data}") + return None + except Exception as e: + logger.error(f"An error occurred while getting TV airing today: {str(e)}") + return None + + def getTVOnTheAir(self, params: str) -> Optional[TmdbPagedResults[TmdbItem]]: + url = f"{self.API_URL}/tv/on_the_air?{params}" + try: + response = get(self.session, url) + if response.is_ok and response.data: + return response.data + else: + logger.error(f"Failed to get TV on the air: {response.data}") + return None + except Exception as e: + logger.error(f"An error occurred while getting TV on the air: {str(e)}") + return None + + def getTVPopular(self, params: str) -> Optional[TmdbPagedResults[TmdbItem]]: + url = f"{self.API_URL}/tv/popular?{params}" + try: + response = get(self.session, url) + if response.is_ok and response.data: + return response.data + else: + logger.error(f"Failed to get popular TV shows: {response.data}") + return None + except Exception as e: + logger.error(f"An error occurred while getting popular TV shows: {str(e)}") + return None + + def getTVTopRated(self, params: str) -> Optional[TmdbPagedResults[TmdbItem]]: + url = f"{self.API_URL}/tv/top_rated?{params}" + try: + response = get(self.session, url) + if response.is_ok and response.data: + return response.data + else: + logger.error(f"Failed to get top rated TV shows: {response.data}") + return None + except Exception as e: + logger.error( + f"An error occurred while getting top rated TV shows: {str(e)}" + ) + return None + + def getFromExternalID(self, params: str, external_id: str) -> Optional[TmdbFindResults]: + url = f"{self.API_URL}/find/{external_id}?{params}" + try: + response = get(self.session, url) + if response.is_ok and response.data: + return response.data + else: + logger.error(f"Failed to get from external ID: {response.data}") + return None + except Exception as e: + logger.error(f"An error occurred while getting from external ID: {str(e)}") + return None + + def getMovieDetails(self, params: str, movie_id: str) -> Optional[TmdbMovieDetails]: + url = f"{self.API_URL}/movie/{movie_id}?{params}" + try: + response = get(self.session, url) + if response.is_ok and response.data: + return response.data + else: + logger.error(f"Failed to get movie details: {response.data}") + return None + except Exception as e: + logger.error(f"An error occurred while getting movie details: {str(e)}") + return None + + def getTVDetails(self, params: str, series_id: str) -> Optional[TmdbTVDetails]: + url = f"{self.API_URL}/tv/{series_id}?{params}" + try: + response = get(self.session, url) + if response.is_ok and response.data: + return response.data + else: + logger.error(f"Failed to get TV details: {response.data}") + return None + except Exception as e: + logger.error(f"An error occurred while getting TV details: {str(e)}") + return None + + def getCollectionSearch(self, params: str) -> Optional[TmdbPagedResults[TmdbCollectionDetails]]: + url = f"{self.API_URL}/search/collection?{params}" + try: + response = get(self.session, url) + if response.is_ok and response.data: + return response.data + else: + logger.error(f"Failed to search collections: {response.data}") + return None + except Exception as e: + logger.error(f"An error occurred while searching collections: {str(e)}") + return None + + def getMovieSearch(self, params: str) -> Optional[TmdbPagedResults[TmdbItem]]: + url = f"{self.API_URL}/search/movie?{params}" + try: + response = get(self.session, url) + if response.is_ok and response.data: + return response.data + else: + logger.error(f"Failed to search movies: {response.data}") + return None + except Exception as e: + logger.error(f"An error occurred while searching movies: {str(e)}") + return None + + def getMultiSearch(self, params: str) -> Optional[TmdbPagedResults[TmdbItem]]: + url = f"{self.API_URL}/search/multi?{params}" + try: + response = get(self.session, url) + if response.is_ok and response.data: + return response.data + else: + logger.error(f"Failed to search multi: {response.data}") + return None + except Exception as e: + logger.error(f"An error occurred while searching multi: {str(e)}") + return None + + def getTVSearch(self, params: str) -> Optional[TmdbPagedResults[TmdbItem]]: + url = f"{self.API_URL}/search/tv?{params}" + try: + response = get(self.session, url) + if response.is_ok and response.data: + return response.data + else: + logger.error(f"Failed to search TV shows: {response.data}") + return None + except Exception as e: + logger.error(f"An error occurred while searching TV shows: {str(e)}") + return None + + def getTVSeasonDetails(self, params: str, series_id: int, season_number: int) -> Optional[TmdbSeasonDetails]: + url = f"{self.API_URL}/tv/{series_id}/season/{season_number}?{params}" + try: + response = get(self.session, url) + if response.is_ok and response.data: + return response.data + else: + logger.error(f"Failed to get TV season details: {response.data}") + return None + except Exception as e: + logger.error(f"An error occurred while getting TV season details: {str(e)}") + return None + + def getTVSeasonEpisodeDetails( + self, params: str, series_id: int, season_number: int, episode_number: int + ) -> Optional[TmdbEpisodeDetails]: + url = f"{self.API_URL}/tv/{series_id}/season/{season_number}/episode/{episode_number}?{params}" + try: + response = get(self.session, url) + if response.is_ok and response.data: + return response.data + else: + logger.error( + f"Failed to get TV season episode details: {response.data}" + ) + return None + except Exception as e: + logger.error( + f"An error occurred while getting TV season episode details: {str(e)}" + ) + return None + + +tmdb = TMDB() \ No newline at end of file diff --git a/src/program/services/indexers/trakt.py b/src/program/services/indexers/trakt.py new file mode 100644 index 0000000..b6d1f54 --- /dev/null +++ b/src/program/services/indexers/trakt.py @@ -0,0 +1,127 @@ +"""Trakt updater module""" + +from datetime import datetime, timedelta +from typing import Generator, Union + +from kink import di +from loguru import logger + +from program.apis.trakt_api import TraktAPI +from program.media.item import Episode, MediaItem, Movie, Season, Show +from program.settings.manager import settings_manager + + +class TraktIndexer: + """Trakt updater class""" + key = "TraktIndexer" + + def __init__(self): + self.key = "traktindexer" + self.ids = [] + self.initialized = True + self.settings = settings_manager.settings.indexer + self.failed_ids = set() + self.api = di[TraktAPI] + + @staticmethod + def copy_attributes(source, target): + """Copy attributes from source to target.""" + attributes = ["file", "folder", "update_folder", "symlinked", "is_anime", "symlink_path", "subtitles", "requested_by", "requested_at", "overseerr_id", "active_stream", "requested_id", "streams"] + for attr in attributes: + target.set(attr, getattr(source, attr, None)) + + def copy_items(self, itema: MediaItem, itemb: MediaItem): + """Copy attributes from itema to itemb recursively.""" + is_anime = itema.is_anime or itemb.is_anime + if itema.type == "mediaitem" and itemb.type == "show": + itema.seasons = itemb.seasons + if itemb.type == "show" and itema.type != "movie": + for seasona in itema.seasons: + for seasonb in itemb.seasons: + if seasona.number == seasonb.number: # Check if seasons match + for episodea in seasona.episodes: + for episodeb in seasonb.episodes: + if episodea.number == episodeb.number: # Check if episodes match + self.copy_attributes(episodea, episodeb) + seasonb.set("is_anime", is_anime) + itemb.set("is_anime", is_anime) + elif itemb.type == "movie": + self.copy_attributes(itema, itemb) + itemb.set("is_anime", is_anime) + else: + logger.error(f"Item types {itema.type} and {itemb.type} do not match cant copy metadata") + return itemb + + def run(self, in_item: MediaItem, log_msg: bool = True) -> Generator[Union[Movie, Show, Season, Episode], None, None]: + """Run the Trakt indexer for the given item.""" + if not in_item: + logger.error("Item is None") + return + if not (imdb_id := in_item.imdb_id): + logger.error(f"Item {in_item.log_string} does not have an imdb_id, cannot index it") + return + + if in_item.imdb_id in self.failed_ids: + return + + item_type = in_item.type if in_item.type != "mediaitem" else None + item = self.api.create_item_from_imdb_id(imdb_id, item_type) + + if item: + if item.type == "show": + self._add_seasons_to_show(item, imdb_id) + elif item.type == "movie": + pass + else: + logger.error(f"Indexed IMDb Id {item.imdb_id} returned the wrong item type: {item.type}") + self.failed_ids.add(in_item.imdb_id) + return + else: + logger.error(f"Failed to index item with imdb_id: {in_item.imdb_id}") + self.failed_ids.add(in_item.imdb_id) + return + + item = self.copy_items(in_item, item) + item.indexed_at = datetime.now() + + if log_msg: # used for mapping symlinks to database, need to hide this log message + logger.debug(f"Indexed IMDb id ({in_item.imdb_id}) as {item.type.title()}: {item.log_string}") + yield item + + @staticmethod + def should_submit(item: MediaItem) -> bool: + if not item.indexed_at or not item.title: + return True + + settings = settings_manager.settings.indexer + + try: + interval = timedelta(seconds=settings.update_interval) + return datetime.now() - item.indexed_at > interval + except Exception: + logger.error(f"Failed to parse date: {item.indexed_at} with format: {interval}") + return False + + + def _add_seasons_to_show(self, show: Show, imdb_id: str): + """Add seasons to the given show using Trakt API.""" + if not imdb_id or not imdb_id.startswith("tt"): + logger.error(f"Item {show.log_string} does not have an imdb_id, cannot index it") + return + + seasons = self.api.get_show(imdb_id) + for season in seasons: + if season.number == 0: + continue + season_item = self.api.map_item_from_data(season, "season", show.genres) + if season_item: + for episode in season.episodes: + episode_item = self.api.map_item_from_data(episode, "episode", show.genres) + if episode_item: + season_item.add_episode(episode_item) + show.add_season(season_item) + + + + + diff --git a/src/program/services/libraries/__init__.py b/src/program/services/libraries/__init__.py new file mode 100644 index 0000000..0f6f7e9 --- /dev/null +++ b/src/program/services/libraries/__init__.py @@ -0,0 +1 @@ +from .symlink import SymlinkLibrary # noqa: F401 diff --git a/src/program/services/libraries/symlink.py b/src/program/services/libraries/symlink.py new file mode 100644 index 0000000..643b736 --- /dev/null +++ b/src/program/services/libraries/symlink.py @@ -0,0 +1,384 @@ +import os +import re +import time +from concurrent.futures import ThreadPoolExecutor, as_completed +from pathlib import Path +from typing import TYPE_CHECKING, Generator +from sqlalchemy.orm import aliased + +from loguru import logger +from sqla_wrapper import Session +from PTT import parse_title + +from program.db.db import db +from program.media.subtitle import Subtitle +from program.settings.manager import settings_manager + +if TYPE_CHECKING: + from program.media.item import Episode, MediaItem, Movie, Season, Show + +imdbid_pattern = re.compile(r"tt\d+") +season_pattern = re.compile(r"s(\d+)") +episode_pattern = re.compile(r"e(\d+)") + +ALLOWED_VIDEO_EXTENSIONS = [ + "mp4", + "mkv", + "avi", + "mov", + "wmv", + "flv", + "m4v", + "webm", + "mpg", + "mpeg", + "m2ts", + "ts", +] + +MEDIA_DIRS = ["shows", "movies", "anime_shows", "anime_movies"] +POSSIBLE_DIRS = [settings_manager.settings.symlink.library_path / d for d in MEDIA_DIRS] + + +class SymlinkLibrary: + def __init__(self): + self.key = "symlinklibrary" + self.settings = settings_manager.settings.symlink + self.initialized = self.validate() + if not self.initialized: + logger.error("SymlinkLibrary initialization failed due to invalid configuration.") + return + + def validate(self) -> bool: + """Validate the symlink library settings.""" + library_path = Path(self.settings.library_path).resolve() + if library_path == Path.cwd().resolve(): + logger.error("Library path not set or set to the current directory in SymlinkLibrary settings.") + return False + + required_dirs: list[str] = ["shows", "movies"] + if self.settings.separate_anime_dirs: + required_dirs.extend(["anime_shows", "anime_movies"]) + missing_dirs: list[str] = [d for d in required_dirs if not (library_path / d).exists()] + + if missing_dirs: + available_dirs: str = ", ".join(os.listdir(library_path)) + logger.error(f"Missing required directories in the library path: {', '.join(missing_dirs)}.") + logger.debug(f"Library directory contains: {available_dirs}") + return False + return True + + def run(self) -> list["MediaItem"]: + """ + Create a library from the symlink paths. Return stub items that should + be fed into an Indexer to have the rest of the metadata filled in. + """ + items = [] + for directory, item_type, is_anime in [("shows", "show", False), ("anime_shows", "anime show", True)]: + if not self.settings.separate_anime_dirs and is_anime: + continue + items.extend(process_shows(self.settings.library_path / directory, item_type, is_anime)) + + for directory, item_type, is_anime in [("movies", "movie", False), ("anime_movies", "anime movie", True)]: + if not self.settings.separate_anime_dirs and is_anime: + continue + items.extend(process_items(self.settings.library_path / directory, MediaItem, item_type, is_anime)) + + return items + +def process_items(directory: Path, item_class, item_type: str, is_anime: bool = False): + """Process items in the given directory and yield MediaItem instances.""" + items = [ + (Path(root), file) + for root, _, files in os.walk(directory) + for file in files + if os.path.splitext(file)[1][1:] in ALLOWED_VIDEO_EXTENSIONS # Jellyfin/Emby creates extra files + and Path(root).parent in POSSIBLE_DIRS # MacOS creates extra dirs + ] + for path, filename in items: + if path.parent not in POSSIBLE_DIRS: + logger.debug(f"Skipping {path.parent} as it's not a valid media directory.") + continue + imdb_id = re.search(r"(tt\d+)", filename) + title = re.search(r"(.+)?( \()", filename) + if not imdb_id or not title: + logger.error(f"Can't extract {item_type} imdb_id or title at path {path / filename}") + continue + + item = item_class({"imdb_id": imdb_id.group(), "title": title.group(1)}) + resolve_symlink_and_set_attrs(item, path / filename) + find_subtitles(item, path / filename) + + if settings_manager.settings.force_refresh: + item.set("symlinked", True) + item.set("update_folder", str(path)) + else: + item.set("symlinked", True) + item.set("update_folder", "updated") + if is_anime: + item.is_anime = True + yield item + +def resolve_symlink_and_set_attrs(item, path: Path) -> Path: + # Resolve the symlink path + resolved_path = (path).resolve() + item.file = str(resolved_path.stem) + item.folder = str(resolved_path.parent.stem) + item.symlink_path = str(path) + +def find_subtitles(item, path: Path): + # Scan for subtitle files + for file in os.listdir(path.parent): + if file.startswith(Path(item.symlink_path).stem) and file.endswith(".srt"): + lang_code = file.split(".")[1] + item.subtitles.append(Subtitle({lang_code: (path.parent / file).__str__()})) + logger.debug(f"Found subtitle file {file}.") + +def process_shows(directory: Path, item_type: str, is_anime: bool = False) -> Generator["Show", None, None]: + """Process shows in the given directory and yield Show instances.""" + from program.media.item import Episode, Season, Show # Import inside function to avoid circular import + + for show in os.listdir(directory): + imdb_id = re.search(r"(tt\d+)", show) + title = re.search(r"(.+)?( \()", show) + if not imdb_id or not title: + logger.log("NOT_FOUND", f"Can't extract {item_type} imdb_id or title at path {directory / show}") + continue + show_item = Show({"imdb_id": imdb_id.group(), "title": title.group(1)}) + if is_anime: + show_item.is_anime = True + seasons = {} + for season in os.listdir(directory / show): + if directory not in POSSIBLE_DIRS: + logger.debug(f"Skipping {directory} as it's not a valid media directory.") + continue + if not (season_number := re.search(r"(\d+)", season)): + logger.log("NOT_FOUND", f"Can't extract season number at path {directory / show / season}") + continue + season_item = Season({"number": int(season_number.group())}) + episodes = {} + for episode in os.listdir(directory / show / season): + if os.path.splitext(episode)[1][1:] not in ALLOWED_VIDEO_EXTENSIONS: + continue + episode_numbers: list[int] = parse_title(episode).get("episodes", []) + if not episode_numbers: + logger.log("NOT_FOUND", f"Can't extract episode number at path {directory / show / season / episode}") + # Delete the episode since it can't be indexed + os.remove(directory / show / season / episode) + continue + + for episode_number in episode_numbers: + episode_item = Episode({"number": episode_number}) + resolve_symlink_and_set_attrs(episode_item, Path(directory) / show / season / episode) + find_subtitles(episode_item, Path(directory) / show / season / episode) + if settings_manager.settings.force_refresh: + episode_item.set("symlinked", True) + episode_item.set("update_folder", str(Path(directory) / show / season / episode)) + else: + episode_item.set("symlinked", True) + episode_item.set("update_folder", "updated") + if is_anime: + episode_item.is_anime = True + episodes[episode_number] = episode_item + if len(episodes) > 0: + for i in range(1, max(episodes.keys())+1): + season_item.add_episode(episodes.get(i, Episode({"number": i}))) + seasons[int(season_number.group())] = season_item + if len(seasons) > 0: + for i in range(1, max(seasons.keys())+1): + show_item.add_season(seasons.get(i, Season({"number": i}))) + yield show_item + + +def build_file_map(directory: str) -> dict[str, str]: + """Build a map of filenames to their full paths in the directory.""" + file_map = {} + + def scan_dir(path): + with os.scandir(path) as entries: + for entry in entries: + if entry.is_file(): + file_map[entry.name] = entry.path + elif entry.is_dir(): + scan_dir(entry.path) + + scan_dir(directory) + return file_map + +def find_broken_symlinks(directory: str) -> list[tuple[str, str]]: + """Find all broken symlinks in the directory.""" + broken_symlinks = [] + for root, dirs, files in os.walk(directory): + for name in files + dirs: + full_path = os.path.join(root, name) + if os.path.islink(full_path): + target = os.readlink(full_path) + if not os.path.exists(os.path.realpath(full_path)): + broken_symlinks.append((full_path, target)) + return broken_symlinks + +def fix_broken_symlinks(library_path, rclone_path, max_workers=4): + """Find and fix all broken symlinks in the library path using files from the rclone path.""" + missing_files = 0 + + def check_and_fix_symlink(symlink_path, file_map): + """Check and fix a single symlink.""" + nonlocal missing_files + + if isinstance(symlink_path, tuple): + symlink_path = symlink_path[0] + + target_path = os.readlink(symlink_path) + filename = os.path.basename(target_path) + dirname = os.path.dirname(target_path).split("/")[-1] + + delays = settings_manager.settings.symlink.retry_delays[:7] # Only use first 7 retry attempts + attempt = 0 + + while attempt < len(delays): + correct_path = file_map.get(filename) + if correct_path: + break + + delay = delays[attempt] + attempts_left = len(delays) - attempt - 1 + + if attempts_left > 0: + logger.debug(f"File {filename} not found in rclone_path, waiting {delay} seconds. {attempts_left} attempts left.") + time.sleep(delay) + file_map = build_file_map(rclone_path) # Refresh the file map + attempt += 1 + + failed = False + + with db.Session() as session: + items = get_items_from_filepath(session, symlink_path) + if not items: + logger.log("NOT_FOUND", f"Could not find item in database for path: {symlink_path}") + return + + if correct_path: + os.remove(symlink_path) + os.symlink(correct_path, symlink_path) + try: + for item in items: + item = session.merge(item) + item.file = filename + item.folder = dirname + item.symlinked = True + item.symlink_path = correct_path + item.update_folder = correct_path + item.store_state() + session.merge(item) + logger.log("FILES", f"Retargeted broken symlink for {item.log_string} with correct path: {correct_path}") + except Exception as e: + logger.error(f"Failed to fix {item.log_string} with path: {correct_path}: {str(e)}") + failed = True + else: + os.remove(symlink_path) + for item in items: + item = session.merge(item) + item.reset() + item.store_state() + session.merge(item) + missing_files += 1 + total_wait = sum(delays[:attempt]) + logger.log("NOT_FOUND", f"Could not find file {filename} in rclone_path after 7 attempts and {total_wait} seconds. Will not retry symlinking.") + + session.commit() + logger.log("FILES", "Saved items to the database.") + + if failed: + logger.warning("Failed to retarget some broken symlinks, recommended action: reset database.") + + def process_directory(directory, file_map): + """Process a single directory for broken symlinks.""" + local_broken_symlinks = find_broken_symlinks(directory) + logger.log("FILES", f"Found {len(local_broken_symlinks)} broken symlinks in {directory}") + if not local_broken_symlinks: + return + + with ThreadPoolExecutor(thread_name_prefix="FixSymlinks", max_workers=max_workers) as executor: + futures = [executor.submit(check_and_fix_symlink, symlink_path, file_map) for symlink_path in local_broken_symlinks] + for future in as_completed(futures): + future.result() + + start_time = time.time() + logger.log("FILES", f"Finding and fixing broken symlinks in {library_path} using files from {rclone_path}") + + file_map = build_file_map(rclone_path) + if not file_map: + logger.log("FILES", f"No files found in rclone_path: {rclone_path}. Aborting fix_broken_symlinks.") + return + + logger.log("FILES", f"Built file map for {rclone_path}") + + top_level_dirs = [os.path.join(library_path, d) for d in os.listdir(library_path) if os.path.isdir(os.path.join(library_path, d))] + logger.log("FILES", f"Found top-level directories: {top_level_dirs}") + + with ThreadPoolExecutor(max_workers=max_workers) as executor: + futures = [executor.submit(process_directory, directory, file_map) for directory in top_level_dirs] + if not futures: + logger.log("FILES", f"No directories found in {library_path}. Aborting fix_broken_symlinks.") + return + for future in as_completed(futures): + future.result() + + end_time = time.time() + elapsed_time = end_time - start_time + logger.log("FILES", f"Finished processing and retargeting broken symlinks. Time taken: {elapsed_time:.2f} seconds.") + logger.log("FILES", f"Reset {missing_files} items to be rescraped due to missing rclone files.") + +def get_items_from_filepath(session: Session, filepath: str) -> list["Movie"] | list["Episode"]: + """Get items that match the imdb_id or season and episode from a file in library_path""" + from program.media.item import Episode, Movie, Season, Show # Import inside function to avoid circular import + + items = [] + imdb_id = None + + # Try to find IMDB ID in the path + match = re.search(r"tt\d{7,8}", filepath) + if match: + imdb_id = match.group() + + if not imdb_id: + logger.debug(f"No IMDB ID found in path: {filepath}") + return [] + + # Check for season/episode numbers + season_match = re.search(r"s(\d+)", filepath, re.IGNORECASE) + if season_match: + season_number = int(season_match.group(1)) + episode_numbers = [int(num) for num in re.findall(r"e(\d+)", filepath, re.IGNORECASE)] + for ep_num in episode_numbers: + # Create explicit aliases for Season and Show + SeasonAlias = aliased(Season, flat=True) + ShowAlias = aliased(Show, flat=True) + + query = ( + session.query(Episode) + .join(SeasonAlias, Episode.parent_id == SeasonAlias.imdb_id) + .join(ShowAlias, SeasonAlias.parent_id == ShowAlias.imdb_id) + .filter( + ShowAlias.imdb_id == imdb_id, + SeasonAlias.number == season_number, + Episode.number == ep_num + ) + ) + episode_item = query.with_entities(Episode).first() + if episode_item: + items.append(episode_item) + else: + query = session.query(Movie).filter_by(imdb_id=imdb_id) + movie_item = query.first() + if movie_item: + items.append(movie_item) + + if len(items) > 1: + logger.log("FILES", f"Found multiple items in database for path: {filepath}") + for item in items: + logger.log("FILES", f"Found item: {item.log_string}") + elif not items: + logger.debug(f"No items found in database for path: {filepath}") + + return items \ No newline at end of file diff --git a/src/program/services/post_processing/__init__.py b/src/program/services/post_processing/__init__.py new file mode 100644 index 0000000..9a4692f --- /dev/null +++ b/src/program/services/post_processing/__init__.py @@ -0,0 +1,52 @@ +from datetime import datetime + +from loguru import logger +from subliminal import Movie + +from program.db.db import db +from program.db.db_functions import clear_streams +from program.media.item import MediaItem, Movie, Show +from program.media.state import States +from program.services.post_processing.subliminal import Subliminal +from program.settings.manager import settings_manager +from program.utils.notifications import notify_on_complete + + +class PostProcessing: + def __init__(self): + self.key = "post_processing" + self.initialized = False + self.settings = settings_manager.settings.post_processing + self.services = { + Subliminal: Subliminal() + } + self.initialized = True + + def run(self, item: MediaItem): + if Subliminal.should_submit(item): + self.services[Subliminal].run(item) + if item.last_state == States.Completed: + clear_streams(item) + yield item + +def notify(item: MediaItem): + show = None + if item.type in ["show", "movie"]: + _notify(item) + elif item.type == "episode": + show = item.parent.parent + elif item.type == "season": + show = item.parent + if show: + with db.Session() as session: + show = session.merge(show) + show.store_state() + session.commit() + if show == States.Completed: + _notify(show) + +def _notify(_item: Show | Movie): + duration = round((datetime.now() - _item.requested_at).total_seconds()) + logger.success(f"{_item.log_string} has been completed in {duration} seconds.") + if settings_manager.settings.notifications.enabled: + notify_on_complete(_item) \ No newline at end of file diff --git a/src/program/services/post_processing/subliminal.py b/src/program/services/post_processing/subliminal.py new file mode 100644 index 0000000..3ce9f3a --- /dev/null +++ b/src/program/services/post_processing/subliminal.py @@ -0,0 +1,152 @@ +import os +import pathlib + +from babelfish import Language +from loguru import logger +from subliminal import ProviderPool, Video, region, save_subtitles +from subliminal.exceptions import AuthenticationError + +from program.media.subtitle import Subtitle +from program.settings.manager import settings_manager +from program.utils import root_dir + + +class Subliminal: + def __init__(self): + self.key = "subliminal" + self.settings = settings_manager.settings.post_processing.subliminal + if not self.settings.enabled: + self.initialized = False + return + if not region.is_configured: + region.configure("dogpile.cache.dbm", arguments={"filename": f"{root_dir}/data/subliminal.dbm"}) + providers = ["gestdown","opensubtitles","opensubtitlescom","podnapisi","tvsubtitles"] + provider_config = {} + for provider, value in self.settings.providers.items(): + if value["enabled"]: + provider_config[provider] = {"username": value["username"], "password": value["password"]} + self.pool = ProviderPool(providers=providers,provider_configs=provider_config) + for provider in providers: + try: + self.pool[provider].initialize() + if self.pool.provider_configs.get(provider, False): + if provider == "opensubtitlescom": + self.pool[provider].login() + if not self.pool[provider].check_token(): + raise AuthenticationError + except Exception: + logger.warning(f"Could not initialize provider: {provider}.") + if provider == "opensubtitlescom": + self.pool.initialized_providers.pop(provider) + self.pool.provider_configs.pop(provider) + self.pool[provider].initialize() + logger.warning("Using default opensubtitles.com provider.") + self.languages = set(create_language_from_string(lang) for lang in self.settings.languages) + self.initialized = self.enabled + + @property + def enabled(self): + return self.settings.enabled + + def scan_files_and_download(self): + # Do we want this? + pass + # videos = _scan_videos(settings_manager.settings.symlink.library_path) + # subtitles = download_best_subtitles(videos, {Language("eng")}) + # for video, subtitle in subtitles.items(): + # original_name = video.name + # video.name = pathlib.Path(video.symlink) + # saved = save_subtitles(video, subtitle) + # video.name = original_name + # for subtitle in saved: + # logger.info(f"Downloaded ({subtitle.language}) subtitle for {pathlib.Path(video.symlink).stem}") + + def get_subtitles(self, item): + if item.type in ["movie", "episode"]: + real_name = pathlib.Path(item.symlink_path).resolve().name + try: + video = Video.fromname(real_name) + video.symlink_path = item.symlink_path + video.subtitle_languages = get_existing_subtitles(pathlib.Path(item.symlink_path).stem, pathlib.Path(item.symlink_path).parent) + return video, self.pool.download_best_subtitles(self.pool.list_subtitles(video, self.languages), video, self.languages) + except ValueError: + logger.error(f"Could not parse video name: {real_name}") + return {} + + def save_subtitles(self, video, subtitles, item): + for subtitle in subtitles: + original_name = video.name + video.name = pathlib.Path(video.symlink_path) + saved = save_subtitles(video, [subtitle]) + for subtitle in saved: + logger.info(f"Downloaded ({subtitle.language}) subtitle for {pathlib.Path(item.symlink_path).stem}") + video.name = original_name + + + def run(self, item): + for language in self.languages: + key = str(language) + item.subtitles.append(Subtitle({key: None})) + try: + video, subtitles = self.get_subtitles(item) + self.save_subtitles(video, subtitles, item) + self.update_item(item) + except Exception as e: + logger.error(f"Failed to download subtitles for {item.log_string}: {e}") + + + def update_item(self, item): + folder = pathlib.Path(item.symlink_path).parent + subs = get_existing_subtitles(pathlib.Path(item.symlink_path).stem, folder) + for lang in subs: + key = str(lang) + for subtitle in item.subtitles: + if subtitle.language == key: + subtitle.file = (folder / lang.file).__str__() + break + + def should_submit(item): + return item.type in ["movie", "episode"] and not any(subtitle.file is not None for subtitle in item.subtitles) + +def _scan_videos(directory): + """ + Scan the given directory recursively for video files. + + :param directory: Path to the directory to scan + :return: List of Video objects + """ + videos = [] + for root, _, files in os.walk(directory): + for file in files: + if file.endswith((".mp4", ".mkv", ".avi", ".mov", ".wmv")): + video_path = os.path.join(root, file) + video_name = pathlib.Path(video_path).resolve().name + video = Video.fromname(video_name) + video.symlink = pathlib.Path(video_path) + + # Scan for subtitle files + video.subtitle_languages = get_existing_subtitles(video.symlink.stem, pathlib.Path(root)) + videos.append(video) + return videos + +def create_language_from_string(lang: str) -> Language: + try: + if len(lang) == 2: + return Language.fromcode(lang, "alpha2") + if len(lang) == 3: + return Language.fromcode(lang, "alpha3b") + except ValueError: + logger.error(f"Invalid language code: {lang}") + return None + +def get_existing_subtitles(filename: str, path: pathlib.Path) -> set[Language]: + subtitle_languages = set() + for file in path.iterdir(): + if file.stem.startswith(filename) and file.suffix == ".srt": + parts = file.name.split(".") + if len(parts) > 2: + lang_code = parts[-2] + language = create_language_from_string(lang_code) + language.file = file.name + subtitle_languages.add(language) + return subtitle_languages \ No newline at end of file diff --git a/src/program/services/scrapers/__init__.py b/src/program/services/scrapers/__init__.py new file mode 100644 index 0000000..986cf42 --- /dev/null +++ b/src/program/services/scrapers/__init__.py @@ -0,0 +1,161 @@ +import threading +from datetime import datetime +from typing import Dict, Generator, List + +from loguru import logger + +from program.media.item import MediaItem +from program.media.stream import Stream +from program.services.scrapers.comet import Comet +from program.services.scrapers.jackett import Jackett +from program.services.scrapers.knightcrawler import Knightcrawler +from program.services.scrapers.mediafusion import Mediafusion +from program.services.scrapers.orionoid import Orionoid +from program.services.scrapers.prowlarr import Prowlarr +from program.services.scrapers.shared import _parse_results +from program.services.scrapers.torbox import TorBoxScraper +from program.services.scrapers.torrentio import Torrentio +from program.services.scrapers.zilean import Zilean +from program.settings.manager import settings_manager + + +class Scraping: + _instance = None + _initialized = False + + def __new__(cls): + if cls._instance is None: + cls._instance = super().__new__(cls) + return cls._instance + + def __init__(self): + if not self._initialized: + self.key = "scraping" + self.initialized = False + self.settings = settings_manager.settings.scraping + self.imdb_services = { # If we are missing imdb_id then we cant scrape here + Torrentio: Torrentio(), + Knightcrawler: Knightcrawler(), + Orionoid: Orionoid(), + TorBoxScraper: TorBoxScraper(), + Mediafusion: Mediafusion(), + Comet: Comet() + } + self.keyword_services = { + Jackett: Jackett(), + Prowlarr: Prowlarr(), + Zilean: Zilean() + } + self.services = { + **self.imdb_services, + **self.keyword_services + } + self.initialized = self.validate() + self.__class__._initialized = True + + def validate(self): + return any(service.initialized for service in self.services.values()) + + def run(self, item: MediaItem) -> Generator[MediaItem, None, None]: + """Scrape an item.""" + if self.can_we_scrape(item): + sorted_streams = self.scrape(item) + for stream in sorted_streams.values(): + if stream not in item.streams: + item.streams.append(stream) + item.set("scraped_at", datetime.now()) + item.set("scraped_times", item.scraped_times + 1) + + if not item.get("streams", []): + logger.log("NOT_FOUND", f"Scraping returned no good results for {item.log_string}") + + yield item + + def scrape(self, item: MediaItem, log = True) -> Dict[str, Stream]: + """Scrape an item.""" + threads: List[threading.Thread] = [] + results: Dict[str, str] = {} + total_results = 0 + results_lock = threading.RLock() + + imdb_id = item.get_top_imdb_id() + available_services = self.services if imdb_id else self.keyword_services + + def run_service(service, item,): + nonlocal total_results + service_results = service.run(item) + + if not isinstance(service_results, dict): + logger.error(f"Service {service.__class__.__name__} returned invalid results: {service_results}") + return + + # ensure that info hash is lower case in each result + if isinstance(service_results, dict): + for infohash in list(service_results.keys()): + if infohash.lower() != infohash: + service_results[infohash.lower()] = service_results.pop(infohash) + + with results_lock: + results.update(service_results) + total_results += len(service_results) + + for service_name, service in available_services.items(): + if service.initialized: + thread = threading.Thread(target=run_service, args=(service, item), name=service_name.__name__) + threads.append(thread) + thread.start() + + for thread in threads: + thread.join() + + if total_results != len(results): + logger.debug(f"Scraped {item.log_string} with {total_results} results, removed {total_results - len(results)} duplicate hashes") + + sorted_streams: Dict[str, Stream] = {} + + if results: + sorted_streams = _parse_results(item, results, log) + + if sorted_streams and (log and settings_manager.settings.debug): + item_type = item.type.title() + top_results = list(sorted_streams.values())[:10] + for sorted_tor in top_results: + item_info = f"[{item_type}]" + if item.type == "season": + item_info = f"[{item_type} {item.number}]" + elif item.type == "episode": + item_info = f"[{item_type} {item.parent.number}:{item.number}]" + logger.debug(f"{item_info} Parsed '{sorted_tor.parsed_title}' with rank {sorted_tor.rank} ({sorted_tor.infohash}): '{sorted_tor.raw_title}'") + else: + logger.log("NOT_FOUND", f"No streams to process for {item.log_string}") + + return sorted_streams + + @classmethod + def can_we_scrape(cls, item: MediaItem) -> bool: + """Check if we can scrape an item.""" + if not item.is_released: + logger.debug(f"Cannot scrape {item.log_string}: Item is not released") + return False + if not cls.should_submit(item): + logger.debug(f"Cannot scrape {item.log_string}: Item has been scraped recently, backing off") + return False + return True + + @staticmethod + def should_submit(item: MediaItem) -> bool: + """Check if an item should be submitted for scraping.""" + settings = settings_manager.settings.scraping + scrape_time = 5 * 60 # 5 minutes by default + + if item.scraped_times >= 2 and item.scraped_times <= 5: + scrape_time = settings.after_2 * 60 * 60 + elif item.scraped_times > 5 and item.scraped_times <= 10: + scrape_time = settings.after_5 * 60 * 60 + elif item.scraped_times > 10: + scrape_time = settings.after_10 * 60 * 60 + + return ( + not item.scraped_at + or (datetime.now() - item.scraped_at).total_seconds() > scrape_time + ) \ No newline at end of file diff --git a/src/program/services/scrapers/comet.py b/src/program/services/scrapers/comet.py new file mode 100644 index 0000000..50e20d3 --- /dev/null +++ b/src/program/services/scrapers/comet.py @@ -0,0 +1,122 @@ +""" Comet scraper module """ +import base64 +import json +from typing import Dict + +import regex +from loguru import logger +from requests import ConnectTimeout, ReadTimeout +from requests.exceptions import RequestException + +from program.media.item import MediaItem, Show +from program.services.scrapers.shared import ( + ScraperRequestHandler, + _get_stremio_identifier, +) +from program.settings.manager import settings_manager +from program.utils.request import ( + HttpMethod, + RateLimitExceeded, + create_service_session, + get_rate_limit_params, +) + + +class Comet: + """Scraper for `Comet`""" + + def __init__(self): + self.key = "comet" + self.settings = settings_manager.settings.scraping.comet + self.timeout = self.settings.timeout + self.encoded_string = base64.b64encode(json.dumps({ + "indexers": self.settings.indexers, + "maxResults": 0, + "resolutions": ["All"], + "languages": ["All"], + "debridService": "realdebrid", + "debridApiKey": settings_manager.settings.downloaders.real_debrid.api_key, + "debridStreamProxyPassword": "" + }).encode("utf-8")).decode("utf-8") + rate_limit_params = get_rate_limit_params(per_hour=300) if self.settings.ratelimit else None + session = create_service_session(rate_limit_params=rate_limit_params) + self.request_handler = ScraperRequestHandler(session) + self.initialized = self.validate() + if not self.initialized: + return + logger.success("Comet initialized!") + + def validate(self) -> bool: + """Validate the Comet settings.""" + if not self.settings.enabled: + return False + if not self.settings.url: + logger.error("Comet URL is not configured and will not be used.") + return False + if not isinstance(self.timeout, int) or self.timeout <= 0: + logger.error("Comet timeout is not set or invalid.") + return False + if not isinstance(self.settings.ratelimit, bool): + logger.error("Comet ratelimit must be a valid boolean.") + return False + try: + url = f"{self.settings.url}/manifest.json" + response = self.request_handler.execute(HttpMethod.GET, url, timeout=self.timeout) + if response.is_ok: + return True + except Exception as e: + logger.error(f"Comet failed to initialize: {e}", ) + return False + + def run(self, item: MediaItem) -> Dict[str, str]: + """Scrape the comet site for the given media items + and update the object with scraped streams""" + if not item or isinstance(item, Show): + return {} + + try: + return self.scrape(item) + except RateLimitExceeded: + logger.debug(f"Comet ratelimit exceeded for item: {item.log_string}") + except ConnectTimeout: + logger.warning(f"Comet connection timeout for item: {item.log_string}") + except ReadTimeout: + logger.warning(f"Comet read timeout for item: {item.log_string}") + except RequestException as e: + logger.error(f"Comet request exception: {str(e)}") + except Exception as e: + logger.error(f"Comet exception thrown: {str(e)}") + return {} + + def scrape(self, item: MediaItem) -> tuple[Dict[str, str], int]: + """Wrapper for `Comet` scrape method""" + identifier, scrape_type, imdb_id = _get_stremio_identifier(item) + url = f"{self.settings.url}/{self.encoded_string}/stream/{scrape_type}/{imdb_id}{identifier or ''}.json" + + response = self.request_handler.execute(HttpMethod.GET, url, timeout=self.timeout) + if not response.is_ok or not getattr(response.data, "streams", None): + logger.log("NOT_FOUND", f"No streams found for {item.log_string}") + return {} + + torrents: Dict[str, str] = {} + for stream in response.data.streams: + if stream.title == "Invalid Comet config.": + logger.error("Invalid Comet config.") + return {} + + infohash_pattern = regex.compile(r"(?!.*playback\/)[a-zA-Z0-9]{40}") + infohash = infohash_pattern.search(stream.url).group() + title = stream.title.split("\n")[0] + + if not infohash: + logger.warning(f"Comet infohash not found for title: {title}") + continue + + torrents[infohash] = title + + if torrents: + logger.log("SCRAPER", f"Found {len(torrents)} streams for {item.log_string}") + else: + logger.log("NOT_FOUND", f"No streams found for {item.log_string}") + + return torrents diff --git a/src/program/services/scrapers/jackett.py b/src/program/services/scrapers/jackett.py new file mode 100644 index 0000000..9cfbeb4 --- /dev/null +++ b/src/program/services/scrapers/jackett.py @@ -0,0 +1,279 @@ +""" Jackett scraper module """ + +import queue +import threading +import time +import xml.etree.ElementTree as ET +from typing import Dict, Generator, List, Optional, Tuple + +import requests +from loguru import logger +from pydantic import BaseModel +from requests import HTTPError, ReadTimeout, RequestException, Timeout + +from program.media.item import Episode, MediaItem, Movie, Season, Show +from program.services.scrapers.shared import ScraperRequestHandler +from program.settings.manager import settings_manager +from program.utils.request import ( + HttpMethod, + RateLimitExceeded, + ResponseType, + create_service_session, + get_http_adapter, + get_rate_limit_params, +) + + +class JackettIndexer(BaseModel): + """Indexer model for Jackett""" + title: Optional[str] = None + id: Optional[str] = None + link: Optional[str] = None + type: Optional[str] = None + language: Optional[str] = None + tv_search_capabilities: Optional[List[str]] = None + movie_search_capabilities: Optional[List[str]] = None + + +class Jackett: + """Scraper for `Jackett`""" + + def __init__(self): + self.key = "jackett" + self.api_key = None + self.indexers = None + self.settings = settings_manager.settings.scraping.jackett + self.request_handler = None + self.initialized = self.validate() + if not self.initialized and not self.api_key: + return + logger.success("Jackett initialized!") + + def validate(self) -> bool: + """Validate Jackett settings.""" + if not self.settings.enabled: + return False + if self.settings.url and self.settings.api_key: + self.api_key = self.settings.api_key + try: + if not isinstance(self.settings.timeout, int) or self.settings.timeout <= 0: + logger.error("Jackett timeout is not set or invalid.") + return False + if not isinstance(self.settings.ratelimit, bool): + logger.error("Jackett ratelimit must be a valid boolean.") + return False + indexers = self._get_indexers() + if not indexers: + logger.error("No Jackett indexers configured.") + return False + self.indexers = indexers + rate_limit_params = get_rate_limit_params(max_calls=len(self.indexers), + period=2) if self.settings.ratelimit else None + http_adapter = get_http_adapter(pool_connections=len(self.indexers), pool_maxsize=len(self.indexers)) + session = create_service_session(rate_limit_params=rate_limit_params, session_adapter=http_adapter) + self.request_handler = ScraperRequestHandler(session) + self._log_indexers() + return True + except ReadTimeout: + logger.error("Jackett request timed out. Check your indexers, they may be too slow to respond.") + return False + except Exception as e: + logger.error(f"Jackett failed to initialize with API Key: {e}") + return False + logger.warning("Jackett is not configured and will not be used.") + return False + + def run(self, item: MediaItem) -> Generator[MediaItem, None, None]: + """Scrape the Jackett site for the given media items + and update the object with scraped streams""" + try: + return self.scrape(item) + except RateLimitExceeded: + logger.debug(f"Jackett ratelimit exceeded for item: {item.log_string}") + except RequestException as e: + logger.error(f"Jackett request exception: {e}") + except Exception as e: + logger.error(f"Jackett failed to scrape item with error: {e}") + return {} + + def scrape(self, item: MediaItem) -> Dict[str, str]: + """Scrape the given media item""" + results_queue = queue.Queue() + threads = [ + threading.Thread(target=self._thread_target, args=(item, indexer, results_queue)) + for indexer in self.indexers + ] + + for thread in threads: + thread.start() + for thread in threads: + thread.join() + + results = [] + while not results_queue.empty(): + results.extend(results_queue.get()) + + torrents: Dict[str, str] = {} + for result in results: + if result[1] is None: + continue + # infohash: raw_title + torrents[result[1]] = result[0] + + if torrents: + logger.log("SCRAPER", f"Found {len(torrents)} streams for {item.log_string}") + else: + logger.log("NOT_FOUND", f"No streams found for {item.log_string}") + return torrents + + def _thread_target(self, item: MediaItem, indexer: JackettIndexer, results_queue: queue.Queue): + """Thread target for searching indexers""" + try: + start_time = time.perf_counter() + result = self._search_indexer(item, indexer) + search_duration = time.perf_counter() - start_time + except TypeError as e: + logger.error(f"Invalid Type for {item.log_string}: {e}") + result = [] + search_duration = 0 + item_title = item.log_string + logger.debug(f"Scraped {item_title} from {indexer.title} in {search_duration:.2f} seconds with {len(result)} results") + results_queue.put(result) + + def _search_indexer(self, item: MediaItem, indexer: JackettIndexer) -> List[Tuple[str, str]]: + """Search for the given item on the given indexer""" + if isinstance(item, Movie): + return self._search_movie_indexer(item, indexer) + elif isinstance(item, (Show, Season, Episode)): + return self._search_series_indexer(item, indexer) + else: + raise TypeError("Only Movie and Series is allowed!") + + def _search_movie_indexer(self, item: MediaItem, indexer: JackettIndexer) -> List[Tuple[str, str]]: + """Search for movies on the given indexer""" + if indexer.movie_search_capabilities is None: + return [] + params = { + "apikey": self.api_key, + "t": "movie", + "cat": "2000", + "q": item.title, + } + if indexer.movie_search_capabilities and "year" in indexer.movie_search_capabilities: + if hasattr(item.aired_at, "year") and item.aired_at.year: params["year"] = item.aired_at.year + if indexer.movie_search_capabilities and "imdbid" in indexer.movie_search_capabilities: + params["imdbid"] = item.imdb_id + + url = f"{self.settings.url}/api/v2.0/indexers/{indexer.id}/results/torznab/api" + return self._fetch_results(url, params, indexer.title, "movie") + + def _search_series_indexer(self, item: MediaItem, indexer: JackettIndexer) -> List[Tuple[str, str]]: + """Search for series on the given indexer""" + if indexer.tv_search_capabilities is None: + return [] + q, season, ep = self._get_series_search_params(item) + + if not q: + logger.debug(f"No search query found for {item.log_string}") + return [] + + params = { + "apikey": self.api_key, + "t": "tvsearch", + "cat": "5000", + "q": q + } + if ep and indexer.tv_search_capabilities and "ep" in indexer.tv_search_capabilities: params["ep"] = ep + if season and indexer.tv_search_capabilities and "season" in indexer.tv_search_capabilities: params["season"] = season + if indexer.tv_search_capabilities and "imdbid" in indexer.tv_search_capabilities: + params["imdbid"] = item.imdb_id if isinstance(item, (Episode, Show)) else item.parent.imdb_id + + url = f"{self.settings.url}/api/v2.0/indexers/{indexer.id}/results/torznab/api" + return self._fetch_results(url, params, indexer.title, "series") + + def _get_series_search_params(self, item: MediaItem) -> Tuple[str, int, Optional[int]]: + """Get search parameters for series""" + title = item.get_top_title() + if isinstance(item, Show): + return title, None, None + elif isinstance(item, Season): + return title, item.number, None + elif isinstance(item, Episode): + return title, item.parent.number, item.number + return title, None, None + + def _get_indexers(self) -> List[JackettIndexer]: + """Get the indexers from Jackett""" + url = f"{self.settings.url}/api/v2.0/indexers/all/results/torznab/api?apikey={self.api_key}&t=indexers&configured=true" + try: + response = requests.get(url) + response.raise_for_status() + return self._get_indexer_from_xml(response.text) + except Exception as e: + logger.error(f"Exception while getting indexers from Jackett: {e}") + return [] + + def _get_indexer_from_xml(self, xml_content: str) -> list[JackettIndexer]: + """Parse the indexers from the XML content""" + xml_root = ET.fromstring(xml_content) + + indexer_list = [] + for item in xml_root.findall(".//indexer"): + indexer_data = { + "title": item.find("title").text, + "id": item.attrib["id"], + "link": item.find("link").text, + "type": item.find("type").text, + "language": item.find("language").text.split("-")[0], + "movie_search_capabilities": None, + "tv_search_capabilities": None + } + movie_search = item.find(".//searching/movie-search[@available='yes']") + tv_search = item.find(".//searching/tv-search[@available='yes']") + if movie_search is not None: + indexer_data["movie_search_capabilities"] = movie_search.attrib["supportedParams"].split(",") + if tv_search is not None: + indexer_data["tv_search_capabilities"] = tv_search.attrib["supportedParams"].split(",") + indexer = JackettIndexer(**indexer_data) + indexer_list.append(indexer) + return indexer_list + + def _fetch_results(self, url: str, params: Dict[str, str], indexer_title: str, search_type: str) -> List[Tuple[str, str]]: + """Fetch results from the given indexer""" + try: + response = self.request_handler.execute(HttpMethod.GET, url, params=params, timeout=self.settings.timeout) + return self._parse_xml(response.response.text) + except RateLimitExceeded: + logger.warning(f"Rate limit exceeded while fetching results for {search_type}: {indexer_title}") + return [] + except (HTTPError, ConnectionError, Timeout): + logger.debug(f"Indexer failed to fetch results for {search_type}: {indexer_title}") + except Exception as e: + if "Jackett.Common.IndexerException" in str(e): + logger.error(f"Indexer exception while fetching results from {indexer_title} ({search_type}): {e}") + else: + logger.error(f"Exception while fetching results from {indexer_title} ({search_type}): {e}") + return [] + + def _parse_xml(self, xml_content: str) -> list[tuple[str, str]]: + """Parse the torrents from the XML content""" + xml_root = ET.fromstring(xml_content) + result_list = [] + for item in xml_root.findall(".//item"): + infoHash = item.find( + ".//torznab:attr[@name='infohash']", + namespaces={"torznab": "http://torznab.com/schemas/2015/feed"} + ) + if infoHash is None or len(infoHash.attrib["value"]) != 40: + continue + result_list.append((item.find(".//title").text, infoHash.attrib["value"])) + return result_list + + def _log_indexers(self) -> None: + """Log the indexers information""" + for indexer in self.indexers: + # logger.debug(f"Indexer: {indexer.title} - {indexer.link} - {indexer.type}") + if not indexer.movie_search_capabilities: + logger.debug(f"Movie search not available for {indexer.title}") + if not indexer.tv_search_capabilities: + logger.debug(f"TV search not available for {indexer.title}") \ No newline at end of file diff --git a/src/program/services/scrapers/knightcrawler.py b/src/program/services/scrapers/knightcrawler.py new file mode 100644 index 0000000..84f8c70 --- /dev/null +++ b/src/program/services/scrapers/knightcrawler.py @@ -0,0 +1,107 @@ +""" Knightcrawler scraper module """ +from typing import Dict + +from loguru import logger +from requests import ConnectTimeout, ReadTimeout +from requests.exceptions import RequestException + +from program.media.item import MediaItem +from program.services.scrapers.shared import ( + ScraperRequestHandler, + _get_stremio_identifier, +) +from program.settings.manager import settings_manager +from program.utils.request import ( + HttpMethod, + RateLimitExceeded, + create_service_session, + get_rate_limit_params, +) + + +class Knightcrawler: + """Scraper for `Knightcrawler`""" + + def __init__(self): + self.key = "knightcrawler" + self.settings = settings_manager.settings.scraping.knightcrawler + self.timeout = self.settings.timeout + rate_limit_params = get_rate_limit_params(max_calls=1, period=5) if self.settings.ratelimit else None + session = create_service_session(rate_limit_params=rate_limit_params) + self.request_handler = ScraperRequestHandler(session) + self.initialized = self.validate() + if not self.initialized: + return + logger.success("Knightcrawler initialized!") + + def validate(self) -> bool: + """Validate the Knightcrawler settings.""" + if not self.settings.enabled: + return False + if not self.settings.url: + logger.error("Knightcrawler URL is not configured and will not be used.") + return False + if not isinstance(self.timeout, int) or self.timeout <= 0: + logger.error("Knightcrawler timeout is not set or invalid.") + return False + if not isinstance(self.settings.ratelimit, bool): + logger.error("Knightcrawler ratelimit must be a valid boolean.") + return False + try: + url = f"{self.settings.url}/{self.settings.filter}/manifest.json" + response = self.request_handler.execute(HttpMethod.GET, url, timeout=self.timeout) + if response.is_ok: + return True + except Exception as e: + logger.error(f"Knightcrawler failed to initialize: {e}", ) + return False + return True + + def run(self, item: MediaItem) -> Dict[str, str]: + """Scrape the knightcrawler site for the given media items + and update the object with scraped streams""" + if not item: + return {} + + try: + return self.scrape(item) + except RateLimitExceeded: + logger.debug(f"Knightcrawler rate limit exceeded for item: {item.log_string}") + except ConnectTimeout: + logger.warning(f"Knightcrawler connection timeout for item: {item.log_string}") + except ReadTimeout: + logger.warning(f"Knightcrawler read timeout for item: {item.log_string}") + except RequestException as e: + if e.response.status_code == 429: + logger.warning(f"Knightcrawler ratelimit exceeded for item: {item.log_string}") + else: + logger.error(f"Knightcrawler request exception: {e}") + except Exception as e: + logger.error(f"Knightcrawler exception thrown: {e}") + return {} + + def scrape(self, item: MediaItem) -> Dict[str, str]: + """Wrapper for `Knightcrawler` scrape method""" + identifier, scrape_type, imdb_id = _get_stremio_identifier(item) + + url = f"{self.settings.url}/{self.settings.filter}/stream/{scrape_type}/{imdb_id}" + if identifier: + url += identifier + + response = self.request_handler.execute(HttpMethod.GET, f"{url}.json", timeout=self.timeout) + if not response.is_ok or len(response.data.streams) <= 0: + logger.log("NOT_FOUND", f"No streams found for {item.log_string}") + return {} + + torrents = { + stream.infoHash: "\n".join(stream.title.split("\n")[:-1]).split("\n")[0] + for stream in response.data.streams + if stream.infoHash + } + + if torrents: + logger.log("SCRAPER", f"Found {len(torrents)} streams for {item.log_string}") + else: + logger.log("NOT_FOUND", f"No streams found for {item.log_string}") + + return torrents \ No newline at end of file diff --git a/src/program/services/scrapers/mediafusion.py b/src/program/services/scrapers/mediafusion.py new file mode 100644 index 0000000..64553ca --- /dev/null +++ b/src/program/services/scrapers/mediafusion.py @@ -0,0 +1,159 @@ +""" Mediafusion scraper module """ +import json +import re +from typing import Dict + +from loguru import logger +from requests import ConnectTimeout, ReadTimeout +from requests.exceptions import RequestException + +from program.media.item import MediaItem +from program.services.scrapers.shared import ( + ScraperRequestHandler, + _get_stremio_identifier, +) +from program.settings.manager import settings_manager +from program.settings.models import AppModel +from program.utils.request import ( + HttpMethod, + RateLimitExceeded, + ResponseType, + create_service_session, + get_rate_limit_params, +) + + +class Mediafusion: + """Scraper for `Mediafusion`""" + + def __init__(self): + self.key = "mediafusion" + self.api_key = None + self.downloader = None + self.app_settings: AppModel = settings_manager.settings + self.settings = self.app_settings.scraping.mediafusion + self.timeout = self.settings.timeout + self.encrypted_string = None + # https://github.com/elfhosted/infra/blob/ci/mediafusion/middleware-ratelimit-stream.yaml + rate_limit_params = get_rate_limit_params(max_calls=1, period=10) if self.settings.ratelimit else None + session = create_service_session(rate_limit_params=rate_limit_params) + self.request_handler = ScraperRequestHandler(session) + self.initialized = self.validate() + if not self.initialized: + return + logger.success("Mediafusion initialized!") + + def validate(self) -> bool: + """Validate the Mediafusion settings.""" + if not self.settings.enabled: + return False + if not self.settings.url: + logger.error("Mediafusion URL is not configured and will not be used.") + return False + if not isinstance(self.timeout, int) or self.timeout <= 0: + logger.error("Mediafusion timeout is not set or invalid.") + return False + if not isinstance(self.settings.ratelimit, bool): + logger.error("Mediafusion ratelimit must be a valid boolean.") + return False + if not self.settings.catalogs: + logger.error("Configure at least one Mediafusion catalog.") + return False + + if self.app_settings.downloaders.real_debrid.enabled: + self.api_key = self.app_settings.downloaders.real_debrid.api_key + self.downloader = "realdebrid" + elif self.app_settings.downloaders.torbox.enabled: + self.api_key = self.app_settings.downloaders.torbox.api_key + self.downloader = "torbox" + else: + logger.error("No downloader enabled, please enable at least one.") + return False + + payload = { + "sp": { + "sv": self.downloader, + "tk": self.api_key, + "ewc": False + }, + "sc": self.settings.catalogs, + "sr": ["4k", "2160p", "1440p", "1080p", "720p", "480p", None], + "ec": False, + "eim": False, + "sftn": True, + "tsp": ["cached"], # sort order, but this doesnt matter as we sort later + "nf": ["Disable"], # nudity filter + "cf": ["Disable"] # certification filter + } + + url = f"{self.settings.url}/encrypt-user-data" + headers = {"Content-Type": "application/json"} + + try: + response = self.request_handler.execute(HttpMethod.POST, url, overriden_response_type=ResponseType.DICT, json=payload, headers=headers) + if not response.data or response.data["status"] != "success": + logger.error(f"Failed to encrypt user data: {response.data['message']}") + return False + self.encrypted_string = response.data["encrypted_str"] + except Exception as e: + logger.error(f"Failed to encrypt user data: {e}") + return False + + try: + url = f"{self.settings.url}/manifest.json" + response = self.request_handler.execute(HttpMethod.GET, url, timeout=self.timeout) + return response.is_ok + except Exception as e: + logger.error(f"Mediafusion failed to initialize: {e}") + return False + + def run(self, item: MediaItem) -> Dict[str, str]: + """Scrape the mediafusion site for the given media items + and update the object with scraped streams""" + if not item: + return {} + + try: + return self.scrape(item) + except RateLimitExceeded: + logger.debug(f"Mediafusion ratelimit exceeded for item: {item.log_string}") + except ConnectTimeout: + logger.warning(f"Mediafusion connection timeout for item: {item.log_string}") + except ReadTimeout: + logger.warning(f"Mediafusion read timeout for item: {item.log_string}") + except RequestException as e: + logger.error(f"Mediafusion request exception: {e}") + except Exception as e: + logger.exception(f"Mediafusion exception thrown: {e}") + return {} + + def scrape(self, item: MediaItem) -> tuple[Dict[str, str], int]: + """Wrapper for `Mediafusion` scrape method""" + identifier, scrape_type, imdb_id = _get_stremio_identifier(item) + + url = f"{self.settings.url}/{self.encrypted_string}/stream/{scrape_type}/{imdb_id}" + if identifier: + url += identifier + + response = self.request_handler.execute(HttpMethod.GET, f"{url}.json", timeout=self.timeout) + if not response.is_ok or len(response.data.streams) <= 0: + logger.log("NOT_FOUND", f"No streams found for {item.log_string}") + return {} + + torrents: Dict[str, str] = {} + + for stream in response.data.streams: + if not hasattr(stream, "description") and hasattr(stream, "title") and "rate-limit exceeded" in stream.title: + raise RateLimitExceeded(f"Mediafusion rate-limit exceeded for item: {item.log_string}") + description_split = stream.description.replace("📂 ", "") + raw_title = description_split.split("/")[0] or description_split.split("\n")[0] # we want the torrent name if possible + info_hash = re.search(r"info_hash=([A-Za-z0-9]+)", stream.url).group(1) + if info_hash and info_hash not in torrents: + torrents[info_hash] = raw_title + + if torrents: + logger.log("SCRAPER", f"Found {len(torrents)} streams for {item.log_string}") + else: + logger.log("NOT_FOUND", f"No streams found for {item.log_string}") + + return torrents \ No newline at end of file diff --git a/src/program/services/scrapers/orionoid.py b/src/program/services/scrapers/orionoid.py new file mode 100644 index 0000000..8ad7188 --- /dev/null +++ b/src/program/services/scrapers/orionoid.py @@ -0,0 +1,170 @@ +""" Orionoid scraper module """ +from typing import Dict + +from loguru import logger + +from program.media.item import MediaItem +from program.services.scrapers.shared import ScraperRequestHandler +from program.settings.manager import settings_manager +from program.utils.request import ( + HttpMethod, + RateLimitExceeded, + create_service_session, + get_rate_limit_params, +) + +KEY_APP = "D3CH6HMX9KD9EMD68RXRCDUNBDJV5HRR" + + +class Orionoid: + """Scraper for `Orionoid`""" + + def __init__(self): + self.key = "orionoid" + self.base_url = "https://api.orionoid.com" + self.settings = settings_manager.settings.scraping.orionoid + self.timeout = self.settings.timeout + self.is_premium = False + self.is_unlimited = False + self.initialized = False + rate_limit_params = get_rate_limit_params(max_calls=1, period=5) if self.settings.ratelimit else None + session = create_service_session(rate_limit_params=rate_limit_params) + self.request_handler = ScraperRequestHandler(session) + if self.validate(): + self.is_premium = self.check_premium() + self.initialized = True + else: + return + logger.success("Orionoid initialized!") + + def validate(self) -> bool: + """Validate the Orionoid class_settings.""" + if not self.settings.enabled: + return False + if len(self.settings.api_key) != 32 or self.settings.api_key == "": + logger.error("Orionoid API Key is not valid or not set. Please check your settings.") + return False + if not isinstance(self.timeout, int) or self.timeout <= 0: + logger.error("Orionoid timeout is not set or invalid.") + return False + try: + url = f"{self.base_url}?keyapp={KEY_APP}&keyuser={self.settings.api_key}&mode=user&action=retrieve" + response = self.request_handler.execute(HttpMethod.GET, url, timeout=self.timeout) + if response.is_ok and hasattr(response.data, "result"): + if response.data.result.status != "success": + logger.error( + f"Orionoid API Key is invalid. Status: {response.data.result.status}", + ) + return False + if not response.is_ok: + logger.error( + f"Orionoid Status Code: {response.status_code}, Reason: {response.data.reason}", + ) + return False + if response.data.data.subscription.package.type == "unlimited": + self.is_unlimited = True + return True + except Exception as e: + logger.exception(f"Orionoid failed to initialize: {e}") + return False + + def check_premium(self) -> bool: + """Check if the user is active, has a premium account, and has RealDebrid service enabled.""" + url = f"{self.base_url}?keyapp={KEY_APP}&keyuser={self.settings.api_key}&mode=user&action=retrieve" + response = self.request_handler.execute(HttpMethod.GET, url) + if response.is_ok and hasattr(response.data, "data"): + active = response.data.data.status == "active" + premium = response.data.data.subscription.package.premium + debrid = response.data.data.service.realdebrid + if active and premium and debrid: + return True + return False + + def check_limit(self) -> bool: + """Check if the user has exceeded the rate limit for the Orionoid API.""" + url = f"{self.base_url}?keyapp={KEY_APP}&keyuser={self.settings.api_key}&mode=user&action=retrieve" + try: + response = self.request_handler.execute(HttpMethod.GET, url) + if response.is_ok and hasattr(response.data, "data"): + remaining = response.data.data.requests.streams.daily.remaining + if remaining is None: + return False + elif remaining and remaining <= 0: + return True + except Exception as e: + logger.error(f"Orionoid failed to check limit: {e}") + return False + + def run(self, item: MediaItem) -> Dict[str, str]: + """Scrape the orionoid site for the given media items and update the object with scraped streams.""" + if not item: + return {} + + if not self.is_unlimited: + limit_hit = self.check_limit() + if limit_hit: + logger.debug("Orionoid daily limits have been reached") + return {} + + try: + return self.scrape(item) + except RateLimitExceeded: + logger.debug(f"Orionoid ratelimit exceeded for item: {item.log_string}") + except Exception as e: + logger.exception(f"Orionoid exception for item: {item.log_string} - Exception: {e}") + return {} + + def _build_query_params(self, item: MediaItem) -> dict: + """Construct the query parameters for the Orionoid API based on the media item.""" + media_type = "movie" if item.type == "movie" else "show" + imdbid: str = item.get_top_imdb_id() + if not imdbid: + raise ValueError("IMDB ID is missing for the media item") + + params = { + "keyapp": KEY_APP, + "keyuser": self.settings.api_key, + "mode": "stream", + "action": "retrieve", + "type": media_type, + "idimdb": imdbid[2:], + "streamtype": "torrent", + "protocoltorrent": "magnet" + } + + if item.type == "season": + params["numberseason"] = item.number + elif item.type == "episode": + params["numberseason"] = item.parent.number + params["numberepisode"] = item.number + + if self.settings.cached_results_only: + params["access"] = "realdebridtorrent" + params["debridlookup"] = "realdebrid" + + for key, value in self.settings.parameters.items(): + if key not in params: + params[key] = value + + return params + + def scrape(self, item: MediaItem) -> Dict[str, str]: + """Wrapper for `Orionoid` scrape method""" + params = self._build_query_params(item) + response = self.request_handler.execute(HttpMethod.GET, self.base_url, params=params, timeout=self.timeout) + if not response.is_ok or not hasattr(response.data, "data"): + logger.log("NOT_FOUND", f"No streams found for {item.log_string}") + return {} + + torrents = {} + for stream in response.data.data.streams: + if not stream.file.hash or not stream.file.name: + continue + torrents[stream.file.hash] = stream.file.name + + if torrents: + logger.log("SCRAPER", f"Found {len(torrents)} streams for {item.log_string}") + else: + logger.log("NOT_FOUND", f"No streams found for {item.log_string}") + + return torrents \ No newline at end of file diff --git a/src/program/services/scrapers/prowlarr.py b/src/program/services/scrapers/prowlarr.py new file mode 100644 index 0000000..fc57b8e --- /dev/null +++ b/src/program/services/scrapers/prowlarr.py @@ -0,0 +1,290 @@ +""" Prowlarr scraper module """ + +import json +import queue +import threading +import time +import xml.etree.ElementTree as ET +from typing import Dict, List, Optional, Tuple + +import requests +from loguru import logger +from pydantic import BaseModel +from requests import HTTPError, ReadTimeout, RequestException, Timeout + +from program.media.item import Episode, MediaItem, Movie, Season, Show +from program.services.scrapers.shared import ScraperRequestHandler +from program.settings.manager import settings_manager +from program.utils.request import ( + HttpMethod, + RateLimitExceeded, + create_service_session, + get_http_adapter, + get_rate_limit_params, +) + + +class ProwlarrIndexer(BaseModel): + """Indexer model for Prowlarr""" + title: Optional[str] = None + id: Optional[str] = None + link: Optional[str] = None + type: Optional[str] = None + language: Optional[str] = None + tv_search_capabilities: Optional[List[str]] = None + movie_search_capabilities: Optional[List[str]] = None + + +class Prowlarr: + """Scraper for `Prowlarr`""" + + def __init__(self): + self.key = "prowlarr" + self.api_key = None + self.indexers = None + self.settings = settings_manager.settings.scraping.prowlarr + self.timeout = self.settings.timeout + self.request_handler = None + self.initialized = self.validate() + if not self.initialized and not self.api_key: + return + logger.success("Prowlarr initialized!") + + def validate(self) -> bool: + """Validate Prowlarr settings.""" + if not self.settings.enabled: + return False + if self.settings.url and self.settings.api_key: + self.api_key = self.settings.api_key + try: + if not isinstance(self.timeout, int) or self.timeout <= 0: + logger.error("Prowlarr timeout is not set or invalid.") + return False + if not isinstance(self.settings.ratelimit, bool): + logger.error("Prowlarr ratelimit must be a valid boolean.") + return False + indexers = self._get_indexers() + if not indexers: + logger.error("No Prowlarr indexers configured.") + return False + self.indexers = indexers + rate_limit_params = get_rate_limit_params(max_calls=len(self.indexers), period=self.settings.limiter_seconds) if self.settings.ratelimit else None + http_adapter = get_http_adapter(pool_connections=len(self.indexers), pool_maxsize=len(self.indexers)) + session = create_service_session(rate_limit_params=rate_limit_params, session_adapter=http_adapter) + self.request_handler = ScraperRequestHandler(session) + self._log_indexers() + return True + except ReadTimeout: + logger.error("Prowlarr request timed out. Check your indexers, they may be too slow to respond.") + return False + except Exception as e: + logger.error(f"Prowlarr failed to initialize with API Key: {e}") + return False + logger.warning("Prowlarr is not configured and will not be used.") + return False + + def run(self, item: MediaItem) -> Dict[str, str]: + """Scrape the Prowlarr site for the given media items + and update the object with scraped streams""" + if not item: + return {} + + try: + return self.scrape(item) + except RateLimitExceeded: + logger.debug(f"Prowlarr ratelimit exceeded for item: {item.log_string}") + except RequestException as e: + logger.error(f"Prowlarr request exception: {e}") + except Exception as e: + logger.exception(f"Prowlarr failed to scrape item with error: {e}") + return {} + + def scrape(self, item: MediaItem) -> Dict[str, str]: + """Scrape the given media item using Prowlarr indexers""" + results_queue = queue.Queue() + threads = [ + threading.Thread(target=self._thread_target, args=(item, indexer, results_queue)) + for indexer in self.indexers + ] + + for thread in threads: + thread.start() + for thread in threads: + thread.join() + + results = [] + while not results_queue.empty(): + results.extend(results_queue.get()) + + torrents: Dict[str, str] = {} + for result in results: + if result[1] is None: + continue + torrents[result[1]] = result[0] + + if torrents: + logger.log("SCRAPER", f"Found {len(torrents)} streams for {item.log_string}") + else: + logger.log("NOT_FOUND", f"No streams found for {item.log_string}") + + return torrents + + def _thread_target(self, item: MediaItem, indexer: ProwlarrIndexer, results_queue: queue.Queue): + try: + start_time = time.perf_counter() + result = self._search_indexer(item, indexer) + search_duration = time.perf_counter() - start_time + except TypeError as e: + logger.error(f"Invalid Type for {item.log_string}: {e}") + result = [] + search_duration = 0 + item_title = item.log_string # probably not needed, but since its concurrent, it's better to be safe + logger.debug(f"Scraped {item_title} from {indexer.title} in {search_duration:.2f} seconds with {len(result)} results") + results_queue.put(result) + + def _search_indexer(self, item: MediaItem, indexer: ProwlarrIndexer) -> List[Tuple[str, str]]: + """Search for the given item on the given indexer""" + if isinstance(item, Movie): + return self._search_movie_indexer(item, indexer) + elif isinstance(item, (Show, Season, Episode)): + return self._search_series_indexer(item, indexer) + else: + raise TypeError("Only Movie and Series is allowed!") + + def _search_movie_indexer(self, item: MediaItem, indexer: ProwlarrIndexer) -> List[Tuple[str, str]]: + """Search for movies on the given indexer""" + if indexer.movie_search_capabilities is None: + return [] + params = { + "apikey": self.api_key, + "t": "movie", + "cat": "2000", + "q": item.title, + } + if indexer.movie_search_capabilities and "year" in indexer.movie_search_capabilities: + if hasattr(item.aired_at, "year") and item.aired_at.year: params["year"] = item.aired_at.year + if indexer.movie_search_capabilities and "imdbid" in indexer.movie_search_capabilities: + params["imdbid"] = item.imdb_id + url = f"{self.settings.url}/api/v1/indexer/{indexer.id}/newznab" + return self._fetch_results(url, params, indexer.title, "movie") + + def _search_series_indexer(self, item: MediaItem, indexer: ProwlarrIndexer) -> List[Tuple[str, str]]: + """Search for series on the given indexer""" + if indexer.tv_search_capabilities is None: + return [] + q, season, ep = self._get_series_search_params(item) + + if not q: + logger.debug(f"No search query found for {item.log_string}") + return [] + + params = { + "apikey": self.api_key, + "t": "tvsearch", + "cat": "5000", + "q": q + } + if ep and indexer.tv_search_capabilities and "ep" in indexer.tv_search_capabilities: params["ep"] = ep + if season and indexer.tv_search_capabilities and "season" in indexer.tv_search_capabilities: params["season"] = season + if indexer.tv_search_capabilities and "imdbid" in indexer.tv_search_capabilities: + params["imdbid"] = item.imdb_id if isinstance(item, [Episode, Show]) else item.parent.imdb_id + + url = f"{self.settings.url}/api/v1/indexer/{indexer.id}/newznab" + return self._fetch_results(url, params, indexer.title, "series") + + def _get_series_search_params(self, item: MediaItem) -> Tuple[str, int, Optional[int]]: + """Get search parameters for series""" + title = item.get_top_title() + if isinstance(item, Show): + return title, None, None + elif isinstance(item, Season): + return title, item.number, None + elif isinstance(item, Episode): + return title, item.parent.number, item.number + return title, None, None + + def _get_indexers(self) -> List[ProwlarrIndexer]: + """Get the indexers from Prowlarr""" + url = f"{self.settings.url}/api/v1/indexer?apikey={self.api_key}" + try: + response = requests.get(url) + response.raise_for_status() + return self._get_indexer_from_json(response.text) + except Exception as e: + logger.error(f"Exception while getting indexers from Prowlarr: {e}") + return [] + + def _get_indexer_from_json(self, json_content: str) -> list[ProwlarrIndexer]: + """Parse the indexers from the XML content""" + indexer_list = [] + for indexer in json.loads(json_content): + has_movies = any( + category["name"] == "Movies" + for category in indexer["capabilities"]["categories"] + ) + has_tv = any( + category["name"] == "TV" + for category in indexer["capabilities"]["categories"] + ) + + indexer_list.append( + ProwlarrIndexer( + title=indexer["name"], + id=str(indexer["id"]), + link=indexer["infoLink"], + type=indexer["protocol"], + language=indexer["language"], + movie_search_capabilities=( + list(indexer["capabilities"]["movieSearchParams"]) + if has_movies else None + ), + tv_search_capabilities=( + list(indexer["capabilities"]["tvSearchParams"]) + if has_tv else None + ) + ) + ) + + return indexer_list + + def _fetch_results(self, url: str, params: Dict[str, str], indexer_title: str, search_type: str) -> List[Tuple[str, str]]: + """Fetch results from the given indexer""" + try: + response = self.request_handler.execute(HttpMethod.GET, url, params=params, timeout=self.timeout) + return self._parse_xml(response.response.text, indexer_title) + except (HTTPError, ConnectionError, Timeout): + logger.debug(f"Indexer failed to fetch results for {search_type.title()} with indexer {indexer_title}") + except Exception as e: + if "Prowlarr.Common.IndexerException" in str(e): + logger.error(f"Indexer exception while fetching results from {indexer_title} ({search_type}): {e}") + else: + logger.error(f"Exception while fetching results from {indexer_title} ({search_type}): {e}") + return [] + + def _parse_xml(self, xml_content: str, indexer_title: str) -> list[tuple[str, str]]: + """Parse the torrents from the XML content""" + xml_root = ET.fromstring(xml_content) + result_list = [] + infohashes_found = False + data = xml_root.findall(".//item") + for item in data: + infoHash = item.find( + ".//torznab:attr[@name='infohash']", + namespaces={"torznab": "http://torznab.com/schemas/2015/feed"} + ) + if infoHash is None or len(infoHash.attrib["value"]) != 40: + continue + infohashes_found = True + result_list.append((item.find(".//title").text, infoHash.attrib["value"])) + len_data = len(data) + if infohashes_found is False and len_data > 0: + logger.warning(f"{self.key} Tracker {indexer_title} may never return infohashes, consider disabling: {len_data} items found, None contain infohash.") + return result_list + + def _log_indexers(self) -> None: + """Log the indexers information""" + for indexer in self.indexers: + if not indexer.movie_search_capabilities: + logger.debug(f"Movie search not available for {indexer.title}") + if not indexer.tv_search_capabilities: + logger.debug(f"TV search not available for {indexer.title}") diff --git a/src/program/services/scrapers/shared.py b/src/program/services/scrapers/shared.py new file mode 100644 index 0000000..b09b587 --- /dev/null +++ b/src/program/services/scrapers/shared.py @@ -0,0 +1,169 @@ +"""Shared functions for scrapers.""" +from typing import Dict, Optional, Set, Type, Union + +from loguru import logger +from RTN import RTN, ParsedData, Torrent, sort_torrents +from RTN.exceptions import GarbageTorrent + +from program.media.item import Episode, MediaItem, Movie, Season, Show +from program.media.state import States +from program.media.stream import Stream +from program.settings.manager import settings_manager +from program.settings.versions import models +from program.utils.request import ( + BaseRequestHandler, + HttpMethod, + ResponseObject, + ResponseType, + Session, +) + +enable_aliases = settings_manager.settings.scraping.enable_aliases +settings_model = settings_manager.settings.ranking +ranking_model = models.get(settings_model.profile) +rtn = RTN(settings_model, ranking_model) + + +class ScraperRequestHandler(BaseRequestHandler): + def __init__(self, session: Session, response_type=ResponseType.SIMPLE_NAMESPACE, custom_exception: Optional[Type[Exception]] = None, request_logging: bool = False): + super().__init__(session, response_type=response_type, custom_exception=custom_exception, request_logging=request_logging) + + def execute(self, method: HttpMethod, endpoint: str, overriden_response_type: ResponseType = None, **kwargs) -> ResponseObject: + return super()._request(method, endpoint, overriden_response_type=overriden_response_type, **kwargs) + + +def _get_stremio_identifier(item: MediaItem) -> tuple[str | None, str, str]: + """Get the stremio identifier for a media item based on its type.""" + if isinstance(item, Show): + identifier, scrape_type, imdb_id = ":1:1", "series", item.imdb_id + elif isinstance(item, Season): + identifier, scrape_type, imdb_id = f":{item.number}:1", "series", item.parent.imdb_id + elif isinstance(item, Episode): + identifier, scrape_type, imdb_id = f":{item.parent.number}:{item.number}", "series", item.parent.parent.imdb_id + elif isinstance(item, Movie): + identifier, scrape_type, imdb_id = None, "movie", item.imdb_id + else: + return None, None, None + return identifier, scrape_type, imdb_id + + +def _parse_results(item: MediaItem, results: Dict[str, str], log_msg: bool = True) -> Dict[str, Stream]: + """Parse the results from the scrapers into Torrent objects.""" + torrents: Set[Torrent] = set() + processed_infohashes: Set[str] = set() + correct_title: str = item.get_top_title() + + logger.log("SCRAPER", f"Processing {len(results)} results for {item.log_string}") + + if item.type in ["show", "season", "episode"]: + needed_seasons: list[int] = _get_needed_seasons(item) + + for infohash, raw_title in results.items(): + if infohash in processed_infohashes: + continue + + try: + torrent: Torrent = rtn.rank( + raw_title=raw_title, + infohash=infohash, + correct_title=correct_title, + remove_trash=settings_manager.settings.ranking.options["remove_all_trash"], + aliases=item.get_aliases() if enable_aliases else {} # in some cases we want to disable aliases + ) + + + if torrent.data.country and not item.is_anime: + if _get_item_country(item) != torrent.data.country: + if settings_manager.settings.scraping.parse_debug: + logger.debug(f"Skipping torrent for incorrect country with {item.log_string}: {raw_title}") + continue + + if item.type in ["show", "season", "episode"]: + if torrent.data.complete: + torrents.add(torrent) + processed_infohashes.add(infohash) + continue + + if item.type == "movie": + # Check if a movie is within a year range of +/- 1 year. + # Ex: [2018, 2019, 2020] for a 2019 movie + if _check_item_year(item, torrent.data): + torrents.add(torrent) + + elif item.type == "show": + if torrent.data.seasons and not torrent.data.episodes: + # We subtract one because Trakt doesn't always index + # shows according to uploaders + if len(torrent.data.seasons) >= (len(needed_seasons) - 1): + torrents.add(torrent) + + elif item.type == "season": + # If the torrent has the needed seasons and no episodes, we can add it + if any(season in torrent.data.seasons for season in needed_seasons) and not torrent.data.episodes: + torrents.add(torrent) + + elif item.type == "episode": + # If the torrent has the season and episode numbers, we can add it + if ( + item.number in torrent.data.episodes + and item.parent.number in torrent.data.seasons + ) or ( + len(item.parent.parent.seasons) == 1 + and not torrent.data.seasons + and item.number in torrent.data.episodes + ) or any( + season in torrent.data.seasons + for season in needed_seasons + ) and not torrent.data.episodes: + torrents.add(torrent) + + processed_infohashes.add(infohash) + + except (ValueError, AttributeError) as e: + # The only stuff I've seen that show up here is titles with a date. + # Dates can be sometimes parsed incorrectly by Arrow library, + # so we'll just ignore them. + if settings_manager.settings.scraping.parse_debug and log_msg: + logger.debug(f"Skipping torrent: '{raw_title}' - {e}") + continue + except GarbageTorrent as e: + if settings_manager.settings.scraping.parse_debug and log_msg: + logger.debug(e) + continue + + if torrents: + logger.log("SCRAPER", f"Found {len(torrents)} streams for {item.log_string}") + torrents = sort_torrents(torrents) + torrents_dict = {} + for torrent in torrents.values(): + torrents_dict[torrent.infohash] = Stream(torrent) + return torrents_dict + return {} + + +# helper functions + +def _check_item_year(item: MediaItem, data: ParsedData) -> bool: + """Check if the year of the torrent is within the range of the item.""" + year_range = [item.aired_at.year - 1, item.aired_at.year, item.aired_at.year + 1] + if item.type == "movie" and data.year: + return data.year in year_range + return False + +def _get_item_country(item: MediaItem) -> str: + """Get the country code for a country.""" + if item.type == "season": + return item.parent.country.upper() + elif item.type == "episode": + return item.parent.parent.country.upper() + return item.country.upper() + +def _get_needed_seasons(item: Union[Show, Season, Episode]) -> list[int]: + """Get the seasons that are needed for the item.""" + if item.type == "show": + return [season.number for season in item.seasons if season.last_state != States.Completed] + elif item.type == "season": + return [season.number for season in item.parent.seasons if season.last_state != States.Completed] + elif item.type == "episode": + return [season.number for season in item.parent.parent.seasons if season.last_state != States.Completed] + return [] diff --git a/src/program/services/scrapers/torbox.py b/src/program/services/scrapers/torbox.py new file mode 100644 index 0000000..5b348fe --- /dev/null +++ b/src/program/services/scrapers/torbox.py @@ -0,0 +1,101 @@ +from typing import Dict + +from loguru import logger +from requests import RequestException +from requests.exceptions import ConnectTimeout + +from program.media.item import MediaItem +from program.services.scrapers.shared import ScraperRequestHandler +from program.settings.manager import settings_manager +from program.utils.request import HttpMethod, RateLimitExceeded, create_service_session + + +class TorBoxScraper: + def __init__(self): + self.key = "torbox" + self.settings = settings_manager.settings.scraping.torbox_scraper + self.base_url = "http://search-api.torbox.app" + self.headers = {"Authorization": f"Bearer {self.settings.api_key}"} + self.timeout = self.settings.timeout + session = create_service_session() + self.request_handler = ScraperRequestHandler(session) + self.initialized = self.validate() + if not self.initialized: + return + logger.success("TorBox Scraper is initialized") + + def validate(self) -> bool: + """Validate the TorBox Scraper as a service""" + if not self.settings.enabled: + return False + if not self.settings.api_key: + logger.error("TorBox API key is not set.") + return False + if not isinstance(self.timeout, int) or self.timeout <= 0: + logger.error("TorBox timeout is not set or invalid.") + return False + try: + response = self.request_handler.execute(HttpMethod.GET, f"{self.base_url}/torrents/imdb:tt0944947?metadata=false&season=1&episode=1", headers=self.headers, timeout=self.timeout) + return response.is_ok + except Exception as e: + logger.exception(f"Error validating TorBox Scraper: {e}") + return False + + def run(self, item: MediaItem) -> Dict[str, str]: + """Scrape Torbox with the given media item for streams""" + try: + return self.scrape(item) + except RateLimitExceeded: + logger.debug(f"TorBox rate limit exceeded for item: {item.log_string}") + except ConnectTimeout: + logger.log("NOT_FOUND", f"TorBox is caching request for {item.log_string}, will retry later") + except RequestException as e: + if e.response and e.response.status_code == 418: + logger.log("NOT_FOUND", f"TorBox has no metadata for item: {item.log_string}, unable to scrape") + elif e.response and e.response.status_code == 500: + logger.log("NOT_FOUND", f"TorBox is caching request for {item.log_string}, will retry later") + except Exception as e: + if "418 Client Error" in str(e): + logger.log("NOT_FOUND", f"TorBox has no metadata for item: {item.log_string}, unable to scrape") + else: + logger.exception(f"TorBox exception thrown: {e}") + return {} + + def _build_query_params(self, item: MediaItem) -> str: + """Build the query params for the TorBox API""" + imdb_id = item.get_top_imdb_id() + if item.type == "movie": + return f"torrents/imdb:{imdb_id}" + elif item.type == "show": + return f"torrents/imdb:{imdb_id}?season=1&episode=1" + elif item.type == "season": + return f"torrents/imdb:{imdb_id}?season={item.number}&episode=1" + elif item.type == "episode": + return f"torrents/imdb:{imdb_id}?season={item.parent.number}&episode={item.number}" + return "" + + def scrape(self, item: MediaItem) -> Dict[str, str]: + """Wrapper for `Torbox` scrape method using Torbox API""" + query_params = self._build_query_params(item) + url = f"{self.base_url}/{query_params}&metadata=false" + + response = self.request_handler.execute(HttpMethod.GET, url, headers=self.headers, timeout=self.timeout) + if not response.is_ok or not response.data.data.torrents: + logger.log("NOT_FOUND", f"No streams found for {item.log_string}") + return {} + + torrents = {} + for torrent_data in response.data.data.torrents: + raw_title = torrent_data.raw_title + info_hash = torrent_data.hash + if not info_hash or not raw_title: + continue + + torrents[info_hash] = raw_title + + if torrents: + logger.log("SCRAPER", f"Found {len(torrents)} streams for {item.log_string}") + else: + logger.log("NOT_FOUND", f"No streams found for {item.log_string}") + + return torrents \ No newline at end of file diff --git a/src/program/services/scrapers/torrentio.py b/src/program/services/scrapers/torrentio.py new file mode 100644 index 0000000..128c20c --- /dev/null +++ b/src/program/services/scrapers/torrentio.py @@ -0,0 +1,96 @@ +""" Torrentio scraper module """ +from typing import Dict + +from loguru import logger + +from program.media.item import MediaItem +from program.services.scrapers.shared import ( + ScraperRequestHandler, + _get_stremio_identifier, +) +from program.settings.manager import settings_manager +from program.settings.models import TorrentioConfig +from program.utils.request import ( + HttpMethod, + RateLimitExceeded, + create_service_session, + get_rate_limit_params, +) + + +class Torrentio: + """Scraper for `Torrentio`""" + + def __init__(self): + self.key = "torrentio" + self.settings: TorrentioConfig = settings_manager.settings.scraping.torrentio + self.timeout: int = self.settings.timeout + rate_limit_params = get_rate_limit_params(max_calls=1, period=5) if self.settings.ratelimit else None + session = create_service_session(rate_limit_params=rate_limit_params) + self.request_handler = ScraperRequestHandler(session) + self.headers = {"User-Agent": "Mozilla/5.0"} + self.initialized: bool = self.validate() + if not self.initialized: + return + logger.success("Torrentio initialized!") + + def validate(self) -> bool: + """Validate the Torrentio settings.""" + if not self.settings.enabled: + return False + if not self.settings.url: + logger.error("Torrentio URL is not configured and will not be used.") + return False + if not isinstance(self.timeout, int) or self.timeout <= 0: + logger.error("Torrentio timeout is not set or invalid.") + return False + try: + url = f"{self.settings.url}/{self.settings.filter}/manifest.json" + response = self.request_handler.execute(HttpMethod.GET, url, timeout=10, headers=self.headers) + if response.is_ok: + return True + except Exception as e: + logger.error(f"Torrentio failed to initialize: {e}", ) + return False + return True + + def run(self, item: MediaItem) -> Dict[str, str]: + """Scrape Torrentio with the given media item for streams""" + try: + return self.scrape(item) + except RateLimitExceeded: + logger.debug(f"Torrentio rate limit exceeded for item: {item.log_string}") + except Exception as e: + logger.exception(f"Torrentio exception thrown: {str(e)}") + return {} + + def scrape(self, item: MediaItem) -> tuple[Dict[str, str], int]: + """Wrapper for `Torrentio` scrape method""" + identifier, scrape_type, imdb_id = _get_stremio_identifier(item) + if not imdb_id: + return {} + + url = f"{self.settings.url}/{self.settings.filter}/stream/{scrape_type}/{imdb_id}" + if identifier: + url += identifier + + response = self.request_handler.execute(HttpMethod.GET, f"{url}.json", timeout=self.timeout, headers=self.headers) + if not response.is_ok or not hasattr(response.data, 'streams') or not response.data.streams: + logger.log("NOT_FOUND", f"No streams found for {item.log_string}") + return {} + + torrents: Dict[str, str] = {} + for stream in response.data.streams: + if not stream.infoHash: + continue + + stream_title = stream.title.split("\n👤")[0] + raw_title = stream_title.split("\n")[0] + torrents[stream.infoHash] = raw_title + + if torrents: + logger.log("SCRAPER", f"Found {len(torrents)} streams for {item.log_string}") + else: + logger.log("NOT_FOUND", f"No streams found for {item.log_string}") + + return torrents \ No newline at end of file diff --git a/src/program/services/scrapers/zilean.py b/src/program/services/scrapers/zilean.py new file mode 100644 index 0000000..c198f07 --- /dev/null +++ b/src/program/services/scrapers/zilean.py @@ -0,0 +1,94 @@ +""" Zilean scraper module """ + +from typing import Dict + +from loguru import logger + +from program.media.item import Episode, MediaItem, Season, Show +from program.services.scrapers.shared import ScraperRequestHandler +from program.settings.manager import settings_manager +from program.utils.request import ( + HttpMethod, + RateLimitExceeded, + create_service_session, + get_rate_limit_params, +) + + +class Zilean: + """Scraper for `Zilean`""" + + def __init__(self): + self.key = "zilean" + self.settings = settings_manager.settings.scraping.zilean + self.timeout = self.settings.timeout + rate_limit_params = get_rate_limit_params(max_calls=1, period=2) if self.settings.ratelimit else None + session = create_service_session(rate_limit_params=rate_limit_params) + self.request_handler = ScraperRequestHandler(session) + self.initialized = self.validate() + if not self.initialized: + return + logger.success("Zilean initialized!") + + def validate(self) -> bool: + """Validate the Zilean settings.""" + if not self.settings.enabled: + return False + if not self.settings.url: + logger.error("Zilean URL is not configured and will not be used.") + return False + if not isinstance(self.timeout, int) or self.timeout <= 0: + logger.error("Zilean timeout is not set or invalid.") + return False + try: + url = f"{self.settings.url}/healthchecks/ping" + response = self.request_handler.execute(HttpMethod.GET, url, timeout=self.timeout) + return response.is_ok + except Exception as e: + logger.error(f"Zilean failed to initialize: {e}") + return False + + def run(self, item: MediaItem) -> Dict[str, str]: + """Scrape the Zilean site for the given media items and update the object with scraped items""" + try: + return self.scrape(item) + except RateLimitExceeded: + logger.debug(f"Zilean rate limit exceeded for item: {item.log_string}") + except Exception as e: + logger.exception(f"Zilean exception thrown: {e}") + return {} + + def _build_query_params(self, item: MediaItem) -> Dict[str, str]: + """Build the query params for the Zilean API""" + params = {"Query": item.get_top_title()} + if isinstance(item, Show): + params["Season"] = 1 + elif isinstance(item, Season): + params["Season"] = item.number + elif isinstance(item, Episode): + params["Season"] = item.parent.number + params["Episode"] = item.number + return params + + def scrape(self, item: MediaItem) -> Dict[str, str]: + """Wrapper for `Zilean` scrape method""" + url = f"{self.settings.url}/dmm/filtered" + params = self._build_query_params(item) + + response = self.request_handler.execute(HttpMethod.GET, url, params=params, timeout=self.timeout) + if not response.is_ok or not response.data: + logger.log("NOT_FOUND", f"No streams found for {item.log_string}") + return {} + + torrents: Dict[str, str] = {} + for result in response.data: + if not result.raw_title or not result.info_hash: + continue + torrents[result.info_hash] = result.raw_title + + if torrents: + logger.log("SCRAPER", f"Found {len(torrents)} streams for {item.log_string}") + else: + logger.log("NOT_FOUND", f"No streams found for {item.log_string}") + + return torrents \ No newline at end of file diff --git a/src/program/services/updaters/__init__.py b/src/program/services/updaters/__init__.py new file mode 100644 index 0000000..13bc9dd --- /dev/null +++ b/src/program/services/updaters/__init__.py @@ -0,0 +1,51 @@ +"""Updater module""" +from loguru import logger + +from program.media.item import MediaItem +from program.services.updaters.emby import EmbyUpdater +from program.services.updaters.jellyfin import JellyfinUpdater +from program.services.updaters.plex import PlexUpdater + + +class Updater: + def __init__(self): + self.key = "updater" + self.services = { + PlexUpdater: PlexUpdater(), + JellyfinUpdater: JellyfinUpdater(), + EmbyUpdater: EmbyUpdater(), + } + self.initialized = True + + def validate(self) -> bool: + """Validate that at least one updater service is initialized.""" + initialized_services = [service for service in self.services.values() if service.initialized] + return len(initialized_services) > 0 + + def run(self, item: MediaItem): + if not self.initialized: + logger.error("Updater is not initialized properly.") + return + + for service_cls, service in self.services.items(): + if service.initialized: + try: + item = next(service.run(item)) + except Exception as e: + logger.error(f"{service_cls.__name__} failed to update {item.log_string}: {e}") + + # Lets update the attributes of the item and its children, we dont care if the service updated it or not. + for _item in get_items_to_update(item): + _item.set("update_folder", "updated") + yield item + +def get_items_to_update(item: MediaItem) -> list[MediaItem]: + """Get items to update for a given item.""" + items_to_update = [] + if item.type in ["movie", "episode"]: + items_to_update = [item] + if item.type == "show": + items_to_update = [e for s in item.seasons for e in s.episodes if e.symlinked and e.get("update_folder") != "updated"] + elif item.type == "season": + items_to_update = [e for e in item.episodes if e.symlinked and e.update_folder != "updated"] + return items_to_update \ No newline at end of file diff --git a/src/program/services/updaters/emby.py b/src/program/services/updaters/emby.py new file mode 100644 index 0000000..9b8617f --- /dev/null +++ b/src/program/services/updaters/emby.py @@ -0,0 +1,121 @@ +"""Emby Updater module""" +from types import SimpleNamespace +from typing import Generator, Optional, Type + +from loguru import logger + +from program.media.item import MediaItem +from program.settings.manager import settings_manager +from program.utils.request import ( + BaseRequestHandler, + HttpMethod, + ResponseObject, + ResponseType, + Session, + create_service_session, +) + + +class EmbyRequestHandler(BaseRequestHandler): + def __init__(self, session: Session, response_type=ResponseType.SIMPLE_NAMESPACE, custom_exception: Optional[Type[Exception]] = None, request_logging: bool = False): + super().__init__(session, response_type=response_type, custom_exception=custom_exception, request_logging=request_logging) + + def execute(self, method: HttpMethod, endpoint: str, **kwargs) -> ResponseObject: + return super()._request(method, endpoint, **kwargs) + +class EmbyUpdater: + def __init__(self): + self.key = "emby" + self.initialized = False + self.settings = settings_manager.settings.updaters.emby + session = create_service_session() + self.request_handler = EmbyRequestHandler(session) + self.initialized = self.validate() + if not self.initialized: + return + logger.success("Emby Updater initialized!") + + def validate(self) -> bool: + """Validate Emby library""" + if not self.settings.enabled: + return False + if not self.settings.api_key: + logger.error("Emby API key is not set!") + return False + if not self.settings.url: + logger.error("Emby URL is not set!") + return False + try: + response = self.request_handler.execute(HttpMethod.GET, f"{self.settings.url}/Users?api_key={self.settings.api_key}") + if response.is_ok: + self.initialized = True + return True + except Exception as e: + logger.exception(f"Emby exception thrown: {e}") + return False + + def run(self, item: MediaItem) -> Generator[MediaItem, None, None]: + """Update Emby library for a single item or a season with its episodes""" + items_to_update = [] + + if item.type in ["movie", "episode"]: + items_to_update = [item] + elif item.type == "show": + for season in item.seasons: + items_to_update += [e for e in season.episodes if e.symlinked and e.update_folder != "updated"] + elif item.type == "season": + items_to_update = [e for e in item.episodes if e.symlinked and e.update_folder != "updated"] + + if not items_to_update: + logger.debug(f"No items to update for {item.log_string}") + return + + updated = False + updated_episodes = [] + + for item_to_update in items_to_update: + if self.update_item(item_to_update): + updated_episodes.append(item_to_update) + updated = True + + if updated: + if item.type in ["show", "season"]: + if len(updated_episodes) == len(items_to_update): + logger.log("EMBY", f"Updated all episodes for {item.log_string}") + else: + updated_episodes_log = ", ".join([str(ep.number) for ep in updated_episodes]) + logger.log("EMBY", f"Updated episodes {updated_episodes_log} in {item.log_string}") + else: + logger.log("EMBY", f"Updated {item.log_string}") + + yield item + + + def update_item(self, item: MediaItem) -> bool: + """Update the Emby item""" + if item.symlinked and item.update_folder != "updated" and item.symlink_path: + try: + response = self.request_handler.execute(HttpMethod.POST, + f"{self.settings.url}/Library/Media/Updated", + json={"Updates": [{"Path": item.symlink_path, "UpdateType": "Created"}]}, + params={"api_key": self.settings.api_key}, + ) + if response.is_ok: + return True + except Exception as e: + logger.error(f"Failed to update Emby item: {e}") + return False + + # not needed to update, but maybe useful in the future? + def get_libraries(self) -> list[SimpleNamespace]: + """Get the libraries from Emby""" + try: + response = self.request_handler.execute(HttpMethod.GET, + f"{self.settings.url}/Library/VirtualFolders", + params={"api_key": self.settings.api_key}, + ) + if response.is_ok and response.data: + return response.data + except Exception as e: + logger.error(f"Failed to get Emby libraries: {e}") + return [] diff --git a/src/program/services/updaters/jellyfin.py b/src/program/services/updaters/jellyfin.py new file mode 100644 index 0000000..0a9fb45 --- /dev/null +++ b/src/program/services/updaters/jellyfin.py @@ -0,0 +1,122 @@ +"""Jellyfin Updater module""" +from types import SimpleNamespace +from typing import Generator, Optional, Type + +from loguru import logger + +from program.media.item import MediaItem +from program.settings.manager import settings_manager +from program.utils.request import ( + BaseRequestHandler, + HttpMethod, + ResponseObject, + ResponseType, + Session, + create_service_session, +) + + +class JellyfinRequestHandler(BaseRequestHandler): + def __init__(self, session: Session, response_type=ResponseType.SIMPLE_NAMESPACE, custom_exception: Optional[Type[Exception]] = None, request_logging: bool = False): + super().__init__(session, response_type=response_type, custom_exception=custom_exception, request_logging=request_logging) + + def execute(self, method: HttpMethod, endpoint: str, **kwargs) -> ResponseObject: + return super()._request(method, endpoint, **kwargs) + +class JellyfinUpdater: + def __init__(self): + self.key = "jellyfin" + self.initialized = False + self.settings = settings_manager.settings.updaters.jellyfin + session = create_service_session() + self.request_handler = JellyfinRequestHandler(session) + self.initialized = self.validate() + if not self.initialized: + return + logger.success("Jellyfin Updater initialized!") + + def validate(self) -> bool: + """Validate Jellyfin library""" + if not self.settings.enabled: + return False + if not self.settings.api_key: + logger.error("Jellyfin API key is not set!") + return False + if not self.settings.url: + logger.error("Jellyfin URL is not set!") + return False + + try: + response = self.request_handler.execute(HttpMethod.GET, f"{self.settings.url}/Users", params={"api_key": self.settings.api_key}) + if response.is_ok: + self.initialized = True + return True + except Exception as e: + logger.exception(f"Jellyfin exception thrown: {e}") + return False + + def run(self, item: MediaItem) -> Generator[MediaItem, None, None]: + """Update Jellyfin library for a single item or a season with its episodes""" + items_to_update = [] + + if item.type in ["movie", "episode"]: + items_to_update = [item] + elif item.type == "show": + for season in item.seasons: + items_to_update += [e for e in season.episodes if e.symlinked and e.update_folder != "updated"] + elif item.type == "season": + items_to_update = [e for e in item.episodes if e.symlinked and e.update_folder != "updated"] + + if not items_to_update: + logger.debug(f"No items to update for {item.log_string}") + return + + updated = False + updated_episodes = [] + + for item_to_update in items_to_update: + if self.update_item(item_to_update): + updated_episodes.append(item_to_update) + updated = True + + if updated: + if item.type in ["show", "season"]: + if len(updated_episodes) == len(items_to_update): + logger.log("JELLYFIN", f"Updated all episodes for {item.log_string}") + else: + updated_episodes_log = ", ".join([str(ep.number) for ep in updated_episodes]) + logger.log("JELLYFIN", f"Updated episodes {updated_episodes_log} in {item.log_string}") + else: + logger.log("JELLYFIN", f"Updated {item.log_string}") + + yield item + + + def update_item(self, item: MediaItem) -> bool: + """Update the Jellyfin item""" + if item.symlinked and item.update_folder != "updated" and item.symlink_path: + try: + response = self.request_handler.execute(HttpMethod.POST, + f"{self.settings.url}/Library/Media/Updated", + json={"Updates": [{"Path": item.symlink_path, "UpdateType": "Created"}]}, + params={"api_key": self.settings.api_key}, + ) + if response.is_ok: + return True + except Exception as e: + logger.error(f"Failed to update Jellyfin item: {e}") + return False + + # not needed to update, but maybe useful in the future? + def get_libraries(self) -> list[SimpleNamespace]: + """Get the libraries from Jellyfin""" + try: + response = self.request_handler.execute(HttpMethod.GET, + f"{self.settings.url}/Library/VirtualFolders", + params={"api_key": self.settings.api_key}, + ) + if response.is_ok and response.data: + return response.data + except Exception as e: + logger.error(f"Failed to get Jellyfin libraries: {e}") + return [] diff --git a/src/program/services/updaters/plex.py b/src/program/services/updaters/plex.py new file mode 100644 index 0000000..a16dc11 --- /dev/null +++ b/src/program/services/updaters/plex.py @@ -0,0 +1,118 @@ +"""Plex Updater module""" +import os +from typing import Dict, Generator, List, Union + +from kink import di +from loguru import logger +from plexapi.exceptions import BadRequest, Unauthorized +from plexapi.library import LibrarySection +from requests.exceptions import ConnectionError as RequestsConnectionError +from urllib3.exceptions import MaxRetryError, NewConnectionError, RequestError + +from program.apis.plex_api import PlexAPI +from program.media.item import Episode, Movie, Season, Show +from program.settings.manager import settings_manager + + +class PlexUpdater: + def __init__(self): + self.key = "plexupdater" + self.initialized = False + self.library_path = os.path.abspath( + os.path.dirname(settings_manager.settings.symlink.library_path) + ) + self.settings = settings_manager.settings.updaters.plex + self.api = None + self.sections: Dict[LibrarySection, List[str]] = {} + self.initialized = self.validate() + if not self.initialized: + return + logger.success("Plex Updater initialized!") + + def validate(self) -> bool: # noqa: C901 + """Validate Plex library""" + if not self.settings.enabled: + return False + if not self.settings.token: + logger.error("Plex token is not set!") + return False + if not self.settings.url: + logger.error("Plex URL is not set!") + return False + if not self.library_path or not os.path.exists(self.library_path): + logger.error("Library path is not set or does not exist!") + return False + + try: + self.api = di[PlexAPI] + self.api.validate_server() + self.sections = self.api.map_sections_with_paths() + self.initialized = True + return True + except Unauthorized as e: + logger.error(f"Plex is not authorized!: {e}") + except TimeoutError as e: + logger.exception(f"Plex timeout error: {e}") + except BadRequest as e: + logger.exception(f"Plex is not configured correctly!: {e}") + except MaxRetryError as e: + logger.exception(f"Plex max retries exceeded: {e}") + except NewConnectionError as e: + logger.exception(f"Plex new connection error: {e}") + except RequestsConnectionError as e: + logger.exception(f"Plex requests connection error: {e}") + except RequestError as e: + logger.exception(f"Plex request error: {e}") + except Exception as e: + logger.exception(f"Plex exception thrown: {e}") + return False + + def run(self, item: Union[Movie, Show, Season, Episode]) -> Generator[Union[Movie, Show, Season, Episode], None, None]: + """Update Plex library section for a single item or a season with its episodes""" + + item_type = "movie" if isinstance(item, Movie) else "show" + updated = False + updated_episodes = [] + items_to_update = [] + + if isinstance(item, (Movie, Episode)): + items_to_update = [item] + elif isinstance(item, Show): + for season in item.seasons: + items_to_update += [e for e in season.episodes if e.symlinked and e.get("update_folder") != "updated" ] + elif isinstance(item, Season): + items_to_update = [e for e in item.episodes if e.symlinked and e.update_folder != "updated"] + + if not items_to_update: + logger.debug(f"No items to update for {item.log_string}") + return + + section_name = None + # any failures are usually because we are updating Plex too fast + for section, paths in self.sections.items(): + if section.type == item_type: + for path in paths: + if isinstance(item, (Show, Season)): + for episode in items_to_update: + if episode.update_folder and str(path) in str(episode.update_folder): + if self.api.update_section(section, episode): + updated_episodes.append(episode) + section_name = section.title + updated = True + elif isinstance(item, (Movie, Episode)): + if item.update_folder and str(path) in str(item.update_folder): + if self.api.update_section(section, item): + section_name = section.title + updated = True + + if updated: + if isinstance(item, (Show, Season)): + if len(updated_episodes) == len(items_to_update): + logger.log("PLEX", f"Updated section {section_name} with all episodes for {item.log_string}") + else: + updated_episodes_log = ", ".join([str(ep.number) for ep in updated_episodes]) + logger.log("PLEX", f"Updated section {section_name} for episodes {updated_episodes_log} in {item.log_string}") + else: + logger.log("PLEX", f"Updated section {section_name} for {item.log_string}") + + yield item diff --git a/src/program/settings/__init__.py b/src/program/settings/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/src/program/settings/manager.py b/src/program/settings/manager.py new file mode 100644 index 0000000..5b7ffbb --- /dev/null +++ b/src/program/settings/manager.py @@ -0,0 +1,88 @@ +import json +import os + +from loguru import logger +from pydantic import ValidationError + +from program.settings.models import AppModel, Observable +from program.utils import data_dir_path + + +class SettingsManager: + """Class that handles settings, ensuring they are validated against a Pydantic schema.""" + + def __init__(self): + self.observers = [] + self.filename = "settings.json" + self.settings_file = data_dir_path / self.filename + + Observable.set_notify_observers(self.notify_observers) + + if not self.settings_file.exists(): + self.settings = AppModel() + self.settings = AppModel.model_validate( + self.check_environment(json.loads(self.settings.model_dump_json()), "RIVEN") + ) + self.notify_observers() + else: + self.load() + + def register_observer(self, observer): + self.observers.append(observer) + + def notify_observers(self): + for observer in self.observers: + observer() + + def check_environment(self, settings, prefix="", seperator="_"): + checked_settings = {} + for key, value in settings.items(): + if isinstance(value, dict): + sub_checked_settings = self.check_environment(value, f"{prefix}{seperator}{key}") + checked_settings[key] = (sub_checked_settings) + else: + environment_variable = f"{prefix}_{key}".upper() + if os.getenv(environment_variable, None): + new_value = os.getenv(environment_variable) + if isinstance(value, bool): + checked_settings[key] = new_value.lower() == "true" or new_value == "1" + elif isinstance(value, int): + checked_settings[key] = int(new_value) + elif isinstance(value, float): + checked_settings[key] = float(new_value) + elif isinstance(value, list): + checked_settings[key] = json.loads(new_value) + else: + checked_settings[key] = new_value + else: + checked_settings[key] = value + return checked_settings + + def load(self, settings_dict: dict | None = None): + """Load settings from file, validating against the AppModel schema.""" + try: + if not settings_dict: + with open(self.settings_file, "r", encoding="utf-8") as file: + settings_dict = json.loads(file.read()) + if os.environ.get("RIVEN_FORCE_ENV", "false").lower() == "true": + settings_dict = self.check_environment(settings_dict, "RIVEN") + self.settings = AppModel.model_validate(settings_dict) + self.save() + except ValidationError as e: + logger.error(f"Error validating settings: {e}") + raise + except json.JSONDecodeError as e: + logger.error(f"Error parsing settings file: {e}") + raise + except FileNotFoundError: + logger.warning(f"Error loading settings: {self.settings_file} does not exist") + raise + self.notify_observers() + + def save(self): + """Save settings to file, using Pydantic model for JSON serialization.""" + with open(self.settings_file, "w", encoding="utf-8") as file: + file.write(self.settings.model_dump_json(indent=4)) + + +settings_manager = SettingsManager() \ No newline at end of file diff --git a/src/program/settings/migratable.py b/src/program/settings/migratable.py new file mode 100644 index 0000000..999a5ea --- /dev/null +++ b/src/program/settings/migratable.py @@ -0,0 +1,11 @@ +from pydantic import BaseModel + + +class MigratableBaseModel(BaseModel): + def __init__(self, **data): + for field_name, field in self.model_fields.items(): + if field_name not in data: + default_value = field.default if field.default is not None else None + data[field_name] = default_value + super().__init__(**data) + diff --git a/src/program/settings/models.py b/src/program/settings/models.py new file mode 100644 index 0000000..3e8a69e --- /dev/null +++ b/src/program/settings/models.py @@ -0,0 +1,377 @@ +"""Riven settings models""" +from pathlib import Path +from typing import Any, Callable, List + +from pydantic import BaseModel, Field, field_validator +from RTN.models import SettingsModel + +from program.settings.migratable import MigratableBaseModel +from program.utils import generate_api_key, get_version + +deprecation_warning = "This has been deprecated and will be removed in a future version." + + +class Observable(MigratableBaseModel): + class Config: + arbitrary_types_allowed = True + + _notify_observers: Callable = None + + @classmethod + def set_notify_observers(cls, notify_observers_callable): + cls._notify_observers = notify_observers_callable + + def __setattr__(self, name, value): + super().__setattr__(name, value) + if self.__class__._notify_observers: + with self._notify_observers_context(): + self.__class__._notify_observers() + + @staticmethod + def _notify_observers_context(): + class NotifyContextManager: + def __enter__(self_): + pass + + def __exit__(self_, exc_type, exc_value, traceback): + pass + + return NotifyContextManager() + + +# Download Services + + +class RealDebridModel(Observable): + enabled: bool = False + api_key: str = "" + proxy_enabled: bool = False + proxy_url: str = "" + + +class AllDebridModel(Observable): + enabled: bool = False + api_key: str = "" + proxy_enabled: bool = False + proxy_url: str = "" + + +class TorboxModel(Observable): + enabled: bool = False + api_key: str = "" + + +class DownloadersModel(Observable): + video_extensions: List[str] = ["mp4", "mkv", "avi"] + prefer_speed_over_quality: bool = True + movie_filesize_mb_min: int = 0 # MB + movie_filesize_mb_max: int = -1 # MB (-1 is no limit) + episode_filesize_mb_min: int = 0 # MB + episode_filesize_mb_max: int = -1 # MB (-1 is no limit) + real_debrid: RealDebridModel = RealDebridModel() + all_debrid: AllDebridModel = AllDebridModel() + torbox: TorboxModel = TorboxModel() + + +# Symlink Service + + +class SymlinkModel(Observable): + rclone_path: Path = Path() + library_path: Path = Path() + separate_anime_dirs: bool = False + repair_symlinks: bool = False + repair_interval: float = 6 + retry_delays: List[int] = [5, 10, 20, 40, 80, 100, 120] # Fixed retry delays in seconds + + +# Content Services + + +class Updatable(Observable): + update_interval: int = 80 + + @field_validator("update_interval") + def check_update_interval(cls, v): + if v < (limit := 5): + raise ValueError(f"update_interval must be at least {limit} seconds") + return v + + +# Updaters + + +class PlexLibraryModel(Observable): + enabled: bool = False + token: str = "" + url: str = "http://localhost:32400" + + +class JellyfinLibraryModel(Observable): + enabled: bool = False + api_key: str = "" + url: str = "http://localhost:8096" + + +class EmbyLibraryModel(Observable): + enabled: bool = False + api_key: str = "" + url: str = "http://localhost:8096" + + +class UpdatersModel(Observable): + updater_interval: int = 120 + plex: PlexLibraryModel = PlexLibraryModel() + jellyfin: JellyfinLibraryModel = JellyfinLibraryModel() + emby: EmbyLibraryModel = EmbyLibraryModel() + + +# Content Services + + +class ListrrModel(Updatable): + enabled: bool = False + movie_lists: List[str] = [] + show_lists: List[str] = [] + api_key: str = "" + update_interval: int = 86400 + + +class MdblistModel(Updatable): + enabled: bool = False + api_key: str = "" + lists: List[int | str] = [] + update_interval: int = 86400 + + +class OverseerrModel(Updatable): + enabled: bool = False + url: str = "http://localhost:5055" + api_key: str = "" + use_webhook: bool = False + update_interval: int = 60 + + +class PlexWatchlistModel(Updatable): + enabled: bool = False + rss: List[str] = [] + update_interval: int = 60 + + +class TraktOauthModel(BaseModel): + oauth_client_id: str = "" + oauth_client_secret: str = "" + oauth_redirect_uri: str = "" + access_token: str = "" + refresh_token: str = "" + + +class TraktModel(Updatable): + enabled: bool = False + api_key: str = "" + watchlist: List[str] = [] + user_lists: List[str] = [] + collection: List[str] = [] + fetch_trending: bool = False + trending_count: int = 10 + fetch_popular: bool = False + popular_count: int = 10 + fetch_most_watched: bool = False + most_watched_period: str = "weekly" + most_watched_count: int = 10 + update_interval: int = 86400 + oauth: TraktOauthModel = TraktOauthModel() + + +class ContentModel(Observable): + overseerr: OverseerrModel = OverseerrModel() + plex_watchlist: PlexWatchlistModel = PlexWatchlistModel() + mdblist: MdblistModel = MdblistModel() + listrr: ListrrModel = ListrrModel() + trakt: TraktModel = TraktModel() + + +# Scraper Services + + +class TorrentioConfig(Observable): + enabled: bool = False + filter: str = "sort=qualitysize%7Cqualityfilter=480p,scr,cam" + url: str = "http://torrentio.strem.fun" + timeout: int = 30 + ratelimit: bool = Field(default=True, deprecated=deprecation_warning) + + +class KnightcrawlerConfig(Observable): + enabled: bool = False + filter: str = "sort=qualitysize%7Cqualityfilter=480p,scr,cam" + url: str = "https://knightcrawler.elfhosted.com" + timeout: int = 30 + ratelimit: bool = True + + +class CometConfig(Observable): + enabled: bool = False + url: str = "http://localhost:8000" + indexers: List[str] = [ + "bitsearch", + "eztv", + "thepiratebay", + "therarbg", + "yts" + ] + timeout: int = 30 + ratelimit: bool = True + + +class ZileanConfig(Observable): + enabled: bool = False + url: str = "http://localhost:8181" + timeout: int = 30 + ratelimit: bool = Field(default=True, deprecated=deprecation_warning) + + +class MediafusionConfig(Observable): + enabled: bool = False + url: str = "https://mediafusion.elfhosted.com" + timeout: int = 30 + ratelimit: bool = True + catalogs: List[str] = [ + "prowlarr_streams", + "torrentio_streams", + "zilean_dmm_streams" + ] + + +class OrionoidConfig(Observable): + enabled: bool = False + api_key: str = "" + cached_results_only: bool = False + parameters: dict[str, Any] = { + "video3d": "false", + "videoquality": "sd_hd8k", + "limitcount": 5 + } + timeout: int = 30 + ratelimit: bool = Field(default=True, deprecated=deprecation_warning) + + +class JackettConfig(Observable): + enabled: bool = False + url: str = "http://localhost:9117" + api_key: str = "" + timeout: int = 30 + ratelimit: bool = True + + +class ProwlarrConfig(Observable): + enabled: bool = False + url: str = "http://localhost:9696" + api_key: str = "" + timeout: int = 30 + ratelimit: bool = True + limiter_seconds: int = 60 + + +class TorBoxScraperConfig(Observable): + enabled: bool = False + api_key: str = "" + timeout: int = 30 + + +class ScraperModel(Observable): + after_2: float = 2 + after_5: int = 6 + after_10: int = 24 + parse_debug: bool = False + enable_aliases: bool = True + torrentio: TorrentioConfig = TorrentioConfig() + knightcrawler: KnightcrawlerConfig = KnightcrawlerConfig() + jackett: JackettConfig = JackettConfig() + prowlarr: ProwlarrConfig = ProwlarrConfig() + orionoid: OrionoidConfig = OrionoidConfig() + torbox_scraper: TorBoxScraperConfig = TorBoxScraperConfig() + mediafusion: MediafusionConfig = MediafusionConfig() + zilean: ZileanConfig = ZileanConfig() + comet: CometConfig = CometConfig() + + +# Version Ranking Model (set application defaults here!) + + +class RTNSettingsModel(SettingsModel, Observable): + ... + + +# Application Settings + + +class IndexerModel(Observable): + update_interval: int = 60 * 60 + + +class LoggingModel(Observable): + ... + + +class DatabaseModel(Observable): + host: str = "postgresql+psycopg2://postgres:postgres@localhost/riven" + + +class NotificationsModel(Observable): + enabled: bool = False + title: str = "Riven completed something!" + on_item_type: List[str] = ["movie", "show", "season"] + service_urls: List[str] = [] + + +class SubliminalConfig(Observable): + enabled: bool = False + languages: List[str] = ["eng"] + providers: dict = { + "opensubtitles": { + "enabled": False, + "username": "", + "password": "" + }, + "opensubtitlescom": { + "enabled": False, + "username": "", + "password": "" + } + } + + +class PostProcessing(Observable): + subliminal: SubliminalConfig = SubliminalConfig() + + +class AppModel(Observable): + version: str = get_version() + api_key: str = "" + debug: bool = True + debug_database: bool = False + log: bool = True + force_refresh: bool = False + map_metadata: bool = True + tracemalloc: bool = False + symlink: SymlinkModel = SymlinkModel() + updaters: UpdatersModel = UpdatersModel() + downloaders: DownloadersModel = DownloadersModel() + content: ContentModel = ContentModel() + scraping: ScraperModel = ScraperModel() + ranking: RTNSettingsModel = RTNSettingsModel() + indexer: IndexerModel = IndexerModel() + database: DatabaseModel = DatabaseModel() + notifications: NotificationsModel = NotificationsModel() + post_processing: PostProcessing = PostProcessing() + + def __init__(self, **data: Any): + current_version = get_version() + existing_version = data.get("version", current_version) + super().__init__(**data) + if existing_version < current_version: + self.version = current_version + + if self.api_key == "": + self.api_key = generate_api_key() diff --git a/src/program/settings/versions.py b/src/program/settings/versions.py new file mode 100644 index 0000000..0314ccf --- /dev/null +++ b/src/program/settings/versions.py @@ -0,0 +1,36 @@ +from loguru import logger +from RTN.models import BaseRankingModel, BestRanking, DefaultRanking + + +class RankModels: + """ + The `RankModels` class represents a collection of ranking models for different categories. + Each ranking model is a subclass of the `BaseRankingModel` class. + + Attributes: + `default` (DefaultRanking): The default ranking model for getting best results for non-transcoded releases. + `custom` (BaseRankingModel): Uses a base ranking model for all categories with all ranks set to 0. + `best` (BestRanking): The best ranking model for getting the highest quality releases. + + Methods: + `get(name: str)` -> `BaseRankingModel`: Returns a ranking model based on the given name. + + Note: + If the name is not found, use the `custom` model which uses a base ranking model for all categories with all ranks set to 0. + """ + + custom: BaseRankingModel = BaseRankingModel() # All ranks set to 0 by default + default: DefaultRanking = DefaultRanking() # Good for 720p/1080p releases + best: BestRanking = BestRanking() # Good for 4K HDR REMUX releases + + @classmethod + def get(cls, name: str) -> BaseRankingModel: + """Get a ranking model by name.""" + model = getattr(cls, name, None) + if model is None: + logger.warning(f"Ranking model '{name}' not found. Setting to custom model.") + return cls.custom + return model + + +models = RankModels() diff --git a/src/program/state_transition.py b/src/program/state_transition.py new file mode 100644 index 0000000..9ecec85 --- /dev/null +++ b/src/program/state_transition.py @@ -0,0 +1,83 @@ +from loguru import logger +from program.media import MediaItem, States +from program.services.downloaders import Downloader +from program.services.indexers.trakt import TraktIndexer +from program.services.post_processing import PostProcessing, notify +from program.services.post_processing.subliminal import Subliminal +from program.services.scrapers import Scraping +from program.services.updaters import Updater +from program.settings.manager import settings_manager +from program.symlink import Symlinker +from program.types import ProcessedEvent, Service + + +def process_event(emitted_by: Service, existing_item: MediaItem | None = None, content_item: MediaItem | None = None) -> ProcessedEvent: + """Process an event and return the updated item, next service and items to submit.""" + next_service: Service = None + no_further_processing: ProcessedEvent = (None, []) + items_to_submit = [] + +#TODO - Reindex non-released badly indexed items here + if content_item or (existing_item is not None and existing_item.last_state == States.Requested): + next_service = TraktIndexer + logger.debug(f"Submitting {content_item.log_string if content_item else existing_item.log_string} to trakt indexer") + return next_service, [content_item or existing_item] + + elif existing_item is not None and existing_item.last_state in [States.PartiallyCompleted, States.Ongoing]: + if existing_item.type == "show": + for season in existing_item.seasons: + if season.last_state not in [States.Completed, States.Unreleased]: + _, sub_items = process_event(emitted_by, season, None) + items_to_submit += sub_items + elif existing_item.type == "season": + for episode in existing_item.episodes: + if episode.last_state != States.Completed: + _, sub_items = process_event(emitted_by, episode, None) + items_to_submit += sub_items + + elif existing_item is not None and existing_item.last_state == States.Indexed: + next_service = Scraping + if emitted_by != Scraping and Scraping.should_submit(existing_item): + items_to_submit = [existing_item] + elif existing_item.type == "show": + items_to_submit = [s for s in existing_item.seasons if s.last_state != States.Completed and Scraping.should_submit(s)] + elif existing_item.type == "season": + items_to_submit = [e for e in existing_item.episodes if e.last_state != States.Completed and Scraping.should_submit(e)] + + elif existing_item is not None and existing_item.last_state == States.Scraped: + next_service = Downloader + items_to_submit = [existing_item] + + elif existing_item is not None and existing_item.last_state == States.Downloaded: + next_service = Symlinker + items_to_submit = [existing_item] + + elif existing_item is not None and existing_item.last_state == States.Symlinked: + next_service = Updater + items_to_submit = [existing_item] + + elif existing_item is not None and existing_item.last_state == States.Completed: + # If a user manually retries an item, lets not notify them again + if emitted_by not in ["RetryItem", PostProcessing]: + notify(existing_item) + # Avoid multiple post-processing runs + if emitted_by != PostProcessing: + if settings_manager.settings.post_processing.subliminal.enabled: + next_service = PostProcessing + if existing_item.type in ["movie", "episode"] and Subliminal.should_submit(existing_item): + items_to_submit = [existing_item] + elif existing_item.type == "show": + items_to_submit = [e for s in existing_item.seasons for e in s.episodes if e.last_state == States.Completed and Subliminal.should_submit(e)] + elif existing_item.type == "season": + items_to_submit = [e for e in existing_item.episodes if e.last_state == States.Completed and Subliminal.should_submit(e)] + if not items_to_submit: + return no_further_processing + else: + + return no_further_processing + + # if items_to_submit and next_service: + # for item in items_to_submit: + # logger.debug(f"Submitting {item.log_string} ({item.id}) to {next_service if isinstance(next_service, str) else next_service.__name__}") + + return next_service, items_to_submit diff --git a/src/program/symlink.py b/src/program/symlink.py new file mode 100644 index 0000000..2c8fafe --- /dev/null +++ b/src/program/symlink.py @@ -0,0 +1,319 @@ +import os +import random +import shutil +from datetime import datetime, timedelta +from pathlib import Path +from typing import List, Optional, Union + +from loguru import logger +from sqlalchemy import select + +from program.db.db import db +from program.media.item import Episode, MediaItem, Movie, Season, Show +from program.media.state import States +from program.settings.manager import settings_manager + + +class Symlinker: + """ + A class that represents a symlinker thread. + + Settings Attributes: + rclone_path (str): The absolute path of the rclone mount root directory. + library_path (str): The absolute path of the location we will create our symlinks that point to the rclone_path. + """ + + def __init__(self): + self.key = "symlink" + self.settings = settings_manager.settings.symlink + self.rclone_path = self.settings.rclone_path + self.initialized = self.validate() + if not self.initialized: + return + logger.info(f"Rclone path symlinks are pointed to: {self.rclone_path}") + logger.info(f"Symlinks will be placed in: {self.settings.library_path}") + logger.success("Symlink initialized!") + + def validate(self): + """Validate paths and create the initial folders.""" + library_path = self.settings.library_path + if not self.rclone_path or not library_path: + logger.error("rclone_path or library_path not provided.") + return False + if self.rclone_path == Path(".") or library_path == Path("."): + logger.error("rclone_path or library_path is set to the current directory.") + return False + if not self.rclone_path.exists(): + logger.error(f"rclone_path does not exist: {self.rclone_path}") + return False + if not library_path.exists(): + logger.error(f"library_path does not exist: {library_path}") + return False + if not self.rclone_path.is_absolute(): + logger.error(f"rclone_path is not an absolute path: {self.rclone_path}") + return False + if not library_path.is_absolute(): + logger.error(f"library_path is not an absolute path: {library_path}") + return False + return self._create_initial_folders() + + def _create_initial_folders(self): + """Create the initial library folders.""" + try: + self.library_path_movies = self.settings.library_path / "movies" + self.library_path_shows = self.settings.library_path / "shows" + self.library_path_anime_movies = self.settings.library_path / "anime_movies" + self.library_path_anime_shows = self.settings.library_path / "anime_shows" + folders = [ + self.library_path_movies, + self.library_path_shows, + self.library_path_anime_movies, + self.library_path_anime_shows, + ] + for folder in folders: + if not folder.exists(): + folder.mkdir(parents=True, exist_ok=True) + except FileNotFoundError as e: + logger.error(f"Path not found when creating directory: {e}") + return False + except PermissionError as e: + logger.error(f"Permission denied when creating directory: {e}") + return False + except OSError as e: + logger.error(f"OS error when creating directory: {e}") + return False + return True + + def run(self, item: Union[Movie, Show, Season, Episode]): + """Check if the media item exists and create a symlink if it does""" + items = self._get_items_to_update(item) + if not self._should_submit(items): + if item.symlinked_times == 7: + logger.debug(f"Soft resetting {item.log_string} because required files were not found") + item.blacklist_active_stream() + item.reset() + yield item + next_attempt = self._calculate_next_attempt(item) + logger.debug(f"Waiting for {item.log_string} to become available, next attempt in {round((next_attempt - datetime.now()).total_seconds())} seconds") + item.symlinked_times += 1 + yield (item, next_attempt) + try: + for _item in items: + self._symlink(_item) + logger.log("SYMLINKER", f"Symlinks created for {item.log_string}") + except Exception as e: + logger.error(f"Exception thrown when creating symlink for {item.log_string}: {e}") + yield item + + def _calculate_next_attempt(self, item: Union[Movie, Show, Season, Episode]) -> datetime: + """Calculate next retry attempt time using fixed delays.""" + delays = self.settings.retry_delays + attempt = min(item.symlinked_times, len(delays) - 1) + delay = timedelta(seconds=delays[attempt]) + return datetime.now() + delay + + def _should_submit(self, items: Union[Movie, Show, Season, Episode]) -> bool: + """Check if the item should be submitted for symlink creation.""" + random_item = random.choice(items) + if not _get_item_path(random_item): + return False + else: + return True + + def _get_items_to_update(self, item: Union[Movie, Show, Season, Episode]) -> List[Union[Movie, Episode]]: + items = [] + if item.type in ["episode", "movie"]: + items.append(item) + item.set("folder", item.folder) + elif item.type == "show": + for season in item.seasons: + for episode in season.episodes: + if episode.state == States.Downloaded: + items.append(episode) + elif item.type == "season": + for episode in item.episodes: + if episode.state == States.Downloaded: + items.append(episode) + return items + + def symlink(self, item: Union[Movie, Episode]) -> bool: + """Create a symlink for the given media item if it does not already exist.""" + return self._symlink(item) + + def _symlink(self, item: Union[Movie, Episode]) -> bool: + """Create a symlink for the given media item if it does not already exist.""" + if not item: + logger.error(f"Invalid item sent to Symlinker: {item}") + return False + + source = _get_item_path(item) + if not source: + logger.error(f"Could not find path for {item.log_string}, cannot create symlink.") + return False + + filename = self._determine_file_name(item) + if not filename: + logger.error(f"Symlink filename is None for {item.log_string}, cannot create symlink.") + return False + + extension = os.path.splitext(item.file)[1][1:] + symlink_filename = f"{filename}.{extension}" + destination = self._create_item_folders(item, symlink_filename) + + try: + if os.path.islink(destination): + os.remove(destination) + os.symlink(source, destination) + except PermissionError as e: + # This still creates the symlinks, however they will have wrong perms. User needs to fix their permissions. + # TODO: Maybe we validate symlink class by symlinking a test file, then try removing it and see if it still exists + logger.exception(f"Permission denied when creating symlink for {item.log_string}: {e}") + except OSError as e: + if e.errno == 36: + # This will cause a loop if it hits this.. users will need to fix their paths + # TODO: Maybe create an alternative naming scheme to cover this? + logger.error(f"Filename too long when creating symlink for {item.log_string}: {e}") + else: + logger.error(f"OS error when creating symlink for {item.log_string}: {e}") + return False + + if Path(destination).readlink() != source: + logger.error(f"Symlink validation failed: {destination} does not point to {source} for {item.log_string}") + return False + + item.set("symlinked", True) + item.set("symlinked_at", datetime.now()) + item.set("symlinked_times", item.symlinked_times + 1) + item.set("symlink_path", destination) + return True + + def _create_item_folders(self, item: Union[Movie, Show, Season, Episode], filename: str) -> str: + """Create necessary folders and determine the destination path for symlinks.""" + is_anime: bool = hasattr(item, "is_anime") and item.is_anime + + movie_path: Path = self.library_path_movies + show_path: Path = self.library_path_shows + + if self.settings.separate_anime_dirs and is_anime: + if isinstance(item, Movie): + movie_path = self.library_path_anime_movies + elif isinstance(item, (Show, Season, Episode)): + show_path = self.library_path_anime_shows + + def create_folder_path(base_path, *subfolders): + path = os.path.join(base_path, *subfolders) + os.makedirs(path, exist_ok=True) + return path + + if isinstance(item, Movie): + movie_folder = f"{item.title.replace('/', '-')} ({item.aired_at.year}) {{imdb-{item.imdb_id}}}" + destination_folder = create_folder_path(movie_path, movie_folder) + item.set("update_folder", destination_folder) + elif isinstance(item, Show): + folder_name_show = f"{item.title.replace('/', '-')} ({item.aired_at.year}) {{imdb-{item.imdb_id}}}" + destination_folder = create_folder_path(show_path, folder_name_show) + item.set("update_folder", destination_folder) + elif isinstance(item, Season): + show = item.parent + folder_name_show = f"{show.title.replace('/', '-')} ({show.aired_at.year}) {{imdb-{show.imdb_id}}}" + show_path = create_folder_path(show_path, folder_name_show) + folder_season_name = f"Season {str(item.number).zfill(2)}" + destination_folder = create_folder_path(show_path, folder_season_name) + item.set("update_folder", destination_folder) + elif isinstance(item, Episode): + show = item.parent.parent + folder_name_show = f"{show.title.replace('/', '-')} ({show.aired_at.year}) {{imdb-{show.imdb_id}}}" + show_path = create_folder_path(show_path, folder_name_show) + season = item.parent + folder_season_name = f"Season {str(season.number).zfill(2)}" + destination_folder = create_folder_path(show_path, folder_season_name) + item.set("update_folder", destination_folder) + + return os.path.join(destination_folder, filename.replace("/", "-")) + + def _determine_file_name(self, item: Union[Movie, Episode]) -> str | None: + """Determine the filename of the symlink.""" + filename = None + if isinstance(item, Movie): + filename = f"{item.title} ({item.aired_at.year}) " + "{imdb-" + item.imdb_id + "}" + elif isinstance(item, Season): + showname = item.parent.title + showyear = item.parent.aired_at.year + filename = f"{showname} ({showyear}) - Season {str(item.number).zfill(2)}" + elif isinstance(item, Episode): + episode_string = "" + episode_number: List[int] = item.get_file_episodes() + if episode_number and item.number in episode_number: + if len(episode_number) > 1: + episode_string = f"e{str(episode_number[0]).zfill(2)}-e{str(episode_number[-1]).zfill(2)}" + else: + episode_string = f"e{str(item.number).zfill(2)}" + if episode_string != "": + showname = item.parent.parent.title + showyear = item.parent.parent.aired_at.year + filename = f"{showname} ({showyear}) - s{str(item.parent.number).zfill(2)}{episode_string}" + return filename + + def delete_item_symlinks(self, item: "MediaItem") -> bool: + """Delete symlinks and directories based on the item type.""" + if not isinstance(item, (Movie, Show)): + logger.debug(f"skipping delete symlink for {item.log_string}: Not a movie or show") + return False + item_path = None + if isinstance(item, Show): + base_path = self.library_path_anime_shows if item.is_anime else self.library_path_shows + item_path = base_path / f"{item.title.replace('/', '-')} ({item.aired_at.year}) {{imdb-{item.imdb_id}}}" + elif isinstance(item, Movie): + base_path = self.library_path_anime_movies if item.is_anime else self.library_path_movies + item_path = base_path / f"{item.title.replace('/', '-')} ({item.aired_at.year}) {{imdb-{item.imdb_id}}}" + return _delete_symlink(item, item_path) + + def delete_item_symlinks_by_id(self, item_id: int) -> bool: + """Delete symlinks and directories based on the item ID.""" + with db.Session() as session: + item = session.execute(select(MediaItem).where(MediaItem.id == item_id)).unique().scalar_one() + if not item: + logger.error(f"No item found with ID {item_id}") + return False + return self.delete_item_symlinks(item) + +def _delete_symlink(item: Union[Movie, Show], item_path: Path) -> bool: + try: + if item_path.exists(): + shutil.rmtree(item_path) + logger.debug(f"Deleted symlink Directory for {item.log_string}") + return True + else: + logger.debug(f"Symlink Directory for {item.log_string} does not exist, skipping symlink deletion") + return True + except FileNotFoundError as e: + logger.error(f"File not found error when deleting symlink for {item.log_string}: {e}") + except PermissionError as e: + logger.error(f"Permission denied when deleting symlink for {item.log_string}: {e}") + except Exception as e: + logger.error(f"Failed to delete symlink for {item.log_string}, error: {e}") + return False + +def _get_item_path(item: Union[Movie, Episode]) -> Optional[Path]: + """Quickly check if the file exists in the rclone path.""" + if not item.file: + return None + + rclone_path = Path(settings_manager.settings.symlink.rclone_path) + possible_folders = [item.folder, item.file, item.alternative_folder] + possible_folders_without_duplicates = list(set(possible_folders)) + if len(possible_folders_without_duplicates) == 1: + new_possible_folder = Path(possible_folders_without_duplicates[0]).with_suffix("") + possible_folders_without_duplicates.append(new_possible_folder) + + for folder in possible_folders_without_duplicates: + if folder: + file_path = rclone_path / folder / item.file + if file_path.exists(): + return file_path + + # Not in a folder? Perhaps it's just sitting in the root. + file = rclone_path / item.file + if file.exists() and file.is_file(): + return file + return None \ No newline at end of file diff --git a/src/program/types.py b/src/program/types.py new file mode 100644 index 0000000..1482939 --- /dev/null +++ b/src/program/types.py @@ -0,0 +1,55 @@ +from dataclasses import dataclass +from datetime import datetime +from typing import Generator, Optional, Union + +from program.media.item import MediaItem +from program.services.content import ( + Listrr, + Mdblist, + Overseerr, + PlexWatchlist, + TraktContent, +) +from program.services.downloaders import AllDebridDownloader, RealDebridDownloader + +# TorBoxDownloader, +from program.services.libraries import SymlinkLibrary +from program.services.scrapers import ( + Comet, + Jackett, + Knightcrawler, + Mediafusion, + Orionoid, + Scraping, + TorBoxScraper, + Torrentio, + Zilean, +) +from program.services.updaters import Updater +from program.symlink import Symlinker + +# Typehint classes +Scraper = Union[Scraping, Torrentio, Knightcrawler, Mediafusion, Orionoid, Jackett, TorBoxScraper, Zilean, Comet] +Content = Union[Overseerr, PlexWatchlist, Listrr, Mdblist, TraktContent] +Downloader = Union[RealDebridDownloader, + # TorBoxDownloader, + AllDebridDownloader] +Service = Union[Content, SymlinkLibrary, Scraper, Downloader, Symlinker, Updater] +MediaItemGenerator = Generator[MediaItem, None, MediaItem | None] + +class ProcessedEvent: + service: Service + related_media_items: list[MediaItem] + +@dataclass +class Event: + emitted_by: Service + item_id: Optional[str] = None + content_item: Optional[MediaItem] = None + run_at: datetime = datetime.now() + + @property + def log_message(self): + if self.content_item: + return f"Event for {self.content_item.log_string}" + return f"Event for Item ID: {self.item_id}" \ No newline at end of file diff --git a/src/program/utils/__init__.py b/src/program/utils/__init__.py new file mode 100644 index 0000000..6da07ac --- /dev/null +++ b/src/program/utils/__init__.py @@ -0,0 +1,38 @@ +import os +import re +import secrets +import string +from pathlib import Path + +from loguru import logger + +root_dir = Path(__file__).resolve().parents[3] + +data_dir_path = root_dir / "data" +alembic_dir = data_dir_path / "alembic" + +def get_version() -> str: + with open(root_dir / "pyproject.toml") as file: + pyproject_toml = file.read() + + match = re.search(r'version = "(.+)"', pyproject_toml) + if match: + version = match.group(1) + else: + raise ValueError("Could not find version in pyproject.toml") + return version + +def generate_api_key(): + """Generate a secure API key of the specified length.""" + API_KEY = os.getenv("API_KEY", "") + if len(API_KEY) != 32: + logger.warning("env.API_KEY is not 32 characters long, generating a new one...") + characters = string.ascii_letters + string.digits + + # Generate the API key + api_key = "".join(secrets.choice(characters) for _ in range(32)) + logger.warning(f"New api key: {api_key}") + else: + api_key = API_KEY + + return api_key \ No newline at end of file diff --git a/src/program/utils/cli.py b/src/program/utils/cli.py new file mode 100644 index 0000000..188b301 --- /dev/null +++ b/src/program/utils/cli.py @@ -0,0 +1,67 @@ +import argparse + +from program.db.db_functions import ( + hard_reset_database, + hard_reset_database_pre_migration, +) +from program.services.libraries.symlink import fix_broken_symlinks +from program.settings.manager import settings_manager +from program.utils.logging import log_cleaner, logger + + +def handle_args(): + parser = argparse.ArgumentParser() + parser.add_argument( + "--ignore_cache", + action="store_true", + help="Ignore the cached metadata, create new data from scratch.", + ) + parser.add_argument( + "--hard_reset_db", + action="store_true", + help="Hard reset the database.", + ) + parser.add_argument( + "--hard_reset_db_pre_migration", + action="store_true", + help="Hard reset the database.", + ) + parser.add_argument( + "--clean_logs", + action="store_true", + help="Clean old logs.", + ) + parser.add_argument( + "--fix_symlinks", + action="store_true", + help="Fix broken symlinks.", + ) + parser.add_argument( + "-p", "--port", + type=int, + default=8080, + help="Port to run the server on (default: 8000)" + ) + + args = parser.parse_args() + + if args.hard_reset_db: + hard_reset_database() + logger.info("Hard reset the database") + exit(0) + + if args.hard_reset_db_pre_migration: + hard_reset_database_pre_migration() + logger.info("Hard reset the database") + exit(0) + + if args.clean_logs: + log_cleaner() + logger.info("Cleaned old logs.") + exit(0) + + if args.fix_symlinks: + fix_broken_symlinks(settings_manager.settings.symlink.library_path, settings_manager.settings.symlink.rclone_path) + exit(0) + + return args diff --git a/src/program/utils/logging.py b/src/program/utils/logging.py new file mode 100644 index 0000000..f752ebd --- /dev/null +++ b/src/program/utils/logging.py @@ -0,0 +1,137 @@ +"""Logging utils""" + +import os +import sys +from datetime import datetime + +from loguru import logger +from rich.console import Console +from rich.progress import ( + BarColumn, + Progress, + SpinnerColumn, + TextColumn, + TimeRemainingColumn, +) + +from program.settings.manager import settings_manager +from program.utils import data_dir_path + +LOG_ENABLED: bool = settings_manager.settings.log + +def setup_logger(level): + """Setup the logger""" + logs_dir_path = data_dir_path / "logs" + os.makedirs(logs_dir_path, exist_ok=True) + timestamp = datetime.now().strftime("%Y%m%d-%H%M") + log_filename = logs_dir_path / f"riven-{timestamp}.log" + + # Helper function to get log settings from environment or use default + def get_log_settings(name, default_color, default_icon): + color = os.getenv(f"RIVEN_LOGGER_{name}_FG", default_color) + icon = os.getenv(f"RIVEN_LOGGER_{name}_ICON", default_icon) + return f"", icon + + # Define log levels and their default settings + log_levels = { + "PROGRAM": (36, "cc6600", "🤖"), + "DATABASE": (37, "d834eb", "🛢️"), + "DEBRID": (38, "cc3333", "🔗"), + "SYMLINKER": (39, "F9E79F", "🔗"), + "SCRAPER": (40, "3D5A80", "👻"), + "COMPLETED": (41, "FFFFFF", "🟢"), + "CACHE": (42, "527826", "📜"), + "NOT_FOUND": (43, "818589", "🤷‍"), + "NEW": (44, "e63946", "✨"), + "FILES": (45, "FFFFE0", "🗃️ "), + "ITEM": (46, "92a1cf", "🗃️ "), + "DISCOVERY": (47, "e56c49", "🔍"), + "API": (47, "006989", "👾"), + "PLEX": (47, "DAD3BE", "📽️ "), + "LOCAL": (48, "DAD3BE", "📽️ "), + "JELLYFIN": (48, "DAD3BE", "📽️ "), + "EMBY": (48, "DAD3BE", "📽️ "), + "TRAKT": (48, "1DB954", "🎵"), + } + + # Set log levels + for name, (no, default_color, default_icon) in log_levels.items(): + color, icon = get_log_settings(name, default_color, default_icon) + logger.level(name, no=no, color=color, icon=icon) + + # Default log levels + debug_color, debug_icon = get_log_settings("DEBUG", "98C1D9", "🐞") + info_color, info_icon = get_log_settings("INFO", "818589", "📰") + warning_color, warning_icon = get_log_settings("WARNING", "ffcc00", "⚠️ ") + critical_color, critical_icon = get_log_settings("CRITICAL", "ff0000", "") + success_color, success_icon = get_log_settings("SUCCESS", "00ff00", "✔️ ") + + logger.level("DEBUG", color=debug_color, icon=debug_icon) + logger.level("INFO", color=info_color, icon=info_icon) + logger.level("WARNING", color=warning_color, icon=warning_icon) + logger.level("CRITICAL", color=critical_color, icon=critical_icon) + logger.level("SUCCESS", color=success_color, icon=success_icon) + + # Log format to match the old log format, but with color + log_format = ( + "{time:YY-MM-DD} {time:HH:mm:ss} | " + "{level.icon} {level: <9} | " + "{module}.{function} - {message}" + ) + + logger.configure(handlers=[ + { + "sink": sys.stderr, + "level": level.upper() or "INFO", + "format": log_format, + "backtrace": False, + "diagnose": False, + "enqueue": True, + }, + { + "sink": log_filename, + "level": level.upper(), + "format": log_format, + "rotation": "25 MB", + "retention": "24 hours", + "compression": None, + "backtrace": False, + "diagnose": True, + "enqueue": True, + } + ]) + +def log_cleaner(): + """Remove old log files based on retention settings.""" + cleaned = False + try: + logs_dir_path = data_dir_path / "logs" + for log_file in logs_dir_path.glob("riven-*.log"): + # remove files older than 8 hours + if (datetime.now() - datetime.fromtimestamp(log_file.stat().st_mtime)).total_seconds() / 3600 > 8: + log_file.unlink() + cleaned = True + if cleaned: + logger.log("COMPLETED", "Cleaned up old logs that were older than 8 hours.") + except Exception as e: + logger.error(f"Failed to clean old logs: {e}") + +def create_progress_bar(total_items: int) -> tuple[Progress, Console]: + console = Console() + progress = Progress( + SpinnerColumn(), + TextColumn("[progress.description]{task.description}"), + BarColumn(), + TextColumn("[progress.percentage]{task.percentage:>3.0f}%"), + TimeRemainingColumn(), + TextColumn("[progress.completed]{task.completed}/{task.total}", justify="right"), + TextColumn("[progress.log]{task.fields[log]}", justify="right"), + console=console, + transient=True + ) + return progress, console + + +console = Console() +log_level = "DEBUG" if settings_manager.settings.debug else "INFO" +setup_logger(log_level) \ No newline at end of file diff --git a/src/program/utils/notifications.py b/src/program/utils/notifications.py new file mode 100644 index 0000000..bfcab40 --- /dev/null +++ b/src/program/utils/notifications.py @@ -0,0 +1,73 @@ +from pathlib import Path +from typing import List + +from apprise import Apprise +from loguru import logger + +from program.media.item import MediaItem +from program.settings.manager import settings_manager +from program.settings.models import NotificationsModel +from program.utils import root_dir + +ntfy = Apprise() +settings: NotificationsModel = settings_manager.settings.notifications +on_item_type: List[str] = settings.on_item_type +riven_logo: Path = root_dir / "assets" / "riven-light.png" + + +try: + for service_url in settings.service_urls: + ntfy.add(service_url) +except Exception as e: + logger.debug(f"Failed to add service URL {service_url}: {e}") + + +def notification(title: str, body: str) -> None: + """Send notifications to all services in settings.""" + for url in settings.service_urls: + if "discord" in url: + url = f"{url}?format=markdown" + try: + ntfy.notify( + title=title, + body=body, + ) + except Exception as e: + logger.debug(f"Failed to send notification to {url}: {e}") + continue + +def _build_discord_notification(item: MediaItem) -> str: + """Build a discord notification for the given item using markdown that lists the files completed.""" + notification_message = f"[{item.type.title()}] **{item.log_string}**\n" + + if item.type == "movie": + notification_message += f" - File: {item.file}\n" + + elif item.type == "show": + for season in item.seasons: + notification_message += f"- [Season {season.number}] **Season {season.number}**\n" + for episode in season.episodes: + notification_message += f" - [Episode {episode.number}] **{episode.log_string}**\n" + notification_message += f" - File: {episode.file}\n" + + elif item.type == "season": + notification_message += f"- [Season {item.number}] **Season {item.number}**\n" + for episode in item.episodes: + notification_message += f" - [Episode {episode.number}] **{episode.log_string}**\n" + notification_message += f" - File: {episode.file}\n" + + elif item.type == "episode": + notification_message += f" - [Episode {item.number}] **{item.log_string}**\n" + notification_message += f" - File: {item.file}\n" + + return notification_message + + +def notify_on_complete(item: MediaItem) -> None: + """Send notifications to all services in settings.""" + if item.type not in on_item_type: + return + + title = "Riven completed something!" if not settings.title else settings.title + body = _build_discord_notification(item) + notification(title, body) \ No newline at end of file diff --git a/src/program/utils/request.py b/src/program/utils/request.py new file mode 100644 index 0000000..fde7d26 --- /dev/null +++ b/src/program/utils/request.py @@ -0,0 +1,357 @@ +import json +from enum import Enum +from types import SimpleNamespace +from typing import Any, Dict, Optional, Type + +from loguru import logger +from lxml import etree +from pyrate_limiter import ( + Duration, + Limiter, + MemoryListBucket, + MemoryQueueBucket, + RequestRate, +) +from requests import Session +from requests.adapters import HTTPAdapter +from requests.exceptions import ConnectTimeout, HTTPError, RequestException +from requests.models import Response +from requests_cache import CachedSession, CacheMixin +from requests_ratelimiter import ( + LimiterAdapter, + LimiterMixin, + LimiterSession, + SQLiteBucket, +) +from urllib3.util.retry import Retry +from xmltodict import parse as parse_xml + +from program.utils import data_dir_path + + +class HttpMethod(Enum): + GET = "GET" + POST = "POST" + PUT = "PUT" + DELETE = "DELETE" + PATCH = "PATCH" + + +class ResponseType(Enum): + SIMPLE_NAMESPACE = "simple_namespace" + DICT = "dict" + + +class BaseRequestParameters: + """Holds base parameters that may be included in every request.""" + + def to_dict(self) -> Dict[str, Any]: + """Convert all non-None attributes to a dictionary for inclusion in requests.""" + return {key: value for key, value in self.__dict__.items() if value is not None} + + +class ResponseObject: + """Response object to handle different response formats. + + :param response: The response object to parse. + :param response_type: The response type to parse the content as. + """ + + def __init__(self, response: Response, response_type: ResponseType = ResponseType.SIMPLE_NAMESPACE): + self.response = response + self.is_ok = response.ok + self.status_code = response.status_code + self.response_type = response_type + self.data = self.handle_response(response, response_type) + + + def handle_response(self, response: Response, response_type: ResponseType) -> dict | SimpleNamespace: + """Parse the response content based on content type. + + :param response: The response object to parse. + :param response_type: The response type to parse the content as. + :return: Parsed response content. + """ + + timeout_statuses = [408, 460, 504, 520, 524, 522, 598, 599] + rate_limit_statuses = [429] + client_error_statuses = list(range(400, 451)) # 400-450 + server_error_statuses = list(range(500, 512)) # 500-511 + + if self.status_code in timeout_statuses: + raise ConnectTimeout(f"Connection timed out with status {self.status_code}", response=response) + if self.status_code in rate_limit_statuses: + raise RateLimitExceeded(f"Rate Limit Exceeded {self.status_code}", response=response) + if self.status_code in client_error_statuses: + raise RequestException(f"Client error with status {self.status_code}", response=response) + if self.status_code in server_error_statuses: + raise RequestException(f"Server error with status {self.status_code}", response=response) + if not self.is_ok: + raise RequestException(f"Request failed with status {self.status_code}", response=response) + + content_type = response.headers.get("Content-Type", "") + if not content_type or response.content == b"": + return {} + + try: + if "application/json" in content_type: + if response_type == ResponseType.DICT: + return response.json() + return json.loads(response.content, object_hook=lambda item: SimpleNamespace(**item)) + elif "application/xml" in content_type or "text/xml" in content_type: + return xml_to_simplenamespace(response.content) + elif "application/rss+xml" in content_type or "application/atom+xml" in content_type: + return parse_xml(response.content) + else: + return {} + except Exception as e: + logger.error(f"Failed to parse response content: {e}", exc_info=True) + return {} + +class BaseRequestHandler: + """Base request handler for services. + + :param session: The session to use for requests. + :param response_type: The response type to parse the content as. + :param base_url: Optional base URL to use for requests. + :param base_params: Optional base parameters to include in requests. + :param custom_exception: Optional custom exception to raise on request failure. + :param request_logging: Boolean indicating if request logging should be enabled. + """ + def __init__(self, session: Session | LimiterSession, response_type: ResponseType = ResponseType.SIMPLE_NAMESPACE, base_url: Optional[str] = None, base_params: Optional[BaseRequestParameters] = None, + custom_exception: Optional[Type[Exception]] = None, request_logging: bool = False): + self.session = session + self.response_type = response_type + self.BASE_URL = base_url + self.BASE_REQUEST_PARAMS = base_params or BaseRequestParameters() + self.custom_exception = custom_exception or Exception + self.request_logging = request_logging + + def _request(self, method: HttpMethod, endpoint: str, ignore_base_url: Optional[bool] = None, overriden_response_type: ResponseType = None, **kwargs) -> ResponseObject: + """Generic request handler with error handling, using kwargs for flexibility. + + :param method: HTTP method to use for the request. + :param endpoint: Endpoint to request. + :param ignore_base_url: Boolean indicating if the base URL should be ignored. + :param overriden_response_type: Optional response type to use for the request. + :param retry_policy: Optional retry policy to use for the request. + :param kwargs: Additional parameters to pass to the request. + :return: ResponseObject with the response data. + """ + try: + url = f"{self.BASE_URL}/{endpoint}".rstrip('/') if not ignore_base_url and self.BASE_URL else endpoint + + request_params = self.BASE_REQUEST_PARAMS.to_dict() + if request_params: + kwargs.setdefault('params', {}).update(request_params) + elif 'params' in kwargs and not kwargs['params']: + del kwargs['params'] + + if self.request_logging: + logger.debug(f"Making request to {url} with kwargs: {kwargs}") + + response = self.session.request(method.value, url, **kwargs) + response.raise_for_status() + + request_response_type = overriden_response_type or self.response_type + + response_obj = ResponseObject(response=response, response_type=request_response_type) + if self.request_logging: + logger.debug(f"ResponseObject: status_code={response_obj.status_code}, data={response_obj.data}") + return response_obj + + except HTTPError as e: + if e.response is not None and e.response.status_code == 429: + raise RateLimitExceeded(f"Rate limit exceeded for {url}", response=e.response) from e + else: + raise self.custom_exception(f"Request failed: {e}") from e + + +class RateLimitExceeded(Exception): + """Rate limit exceeded exception for requests.""" + def __init__(self, message, response=None): + super().__init__(message) + self.response = response + +class CachedLimiterSession(CacheMixin, LimiterMixin, Session): + """Session class with caching and rate-limiting behavior.""" + pass + +def create_service_session( + rate_limit_params: Optional[dict] = None, + use_cache: bool = False, + cache_params: Optional[dict] = None, + session_adapter: Optional[HTTPAdapter | LimiterAdapter] = None, + retry_policy: Optional[Retry] = None, + log_config: Optional[bool] = False, +) -> Session | CachedSession | CachedLimiterSession: + """ + Create a session for a specific service with optional caching and rate-limiting. + + :param rate_limit_params: Dictionary of rate-limiting parameters. + :param use_cache: Boolean indicating if caching should be enabled. + :param cache_params: Dictionary of caching parameters if caching is enabled. + :param session_adapter: Optional custom HTTP adapter to use for the session. + :param retry_policy: Optional retry policy to use for the session. + :param log_config: Boolean indicating if the session configuration should be logged. + :return: Configured session for the service. + """ + if use_cache and not cache_params: + raise ValueError("Cache parameters must be provided if use_cache is True.") + + if use_cache and cache_params: + if log_config: + logger.debug(f"Rate Limit Parameters: {rate_limit_params}") + logger.debug(f"Cache Parameters: {cache_params}") + session_class = CachedLimiterSession if rate_limit_params else CachedSession + cache_session = session_class(**rate_limit_params, **cache_params) + _create_and_mount_session_adapter(cache_session, session_adapter, retry_policy, log_config) + return cache_session + + if rate_limit_params: + if log_config: + logger.debug(f"Rate Limit Parameters: {rate_limit_params}") + limiter_session = LimiterSession(**rate_limit_params) + _create_and_mount_session_adapter(limiter_session, session_adapter, retry_policy, log_config) + return limiter_session + + standard_session = Session() + _create_and_mount_session_adapter(standard_session, session_adapter, retry_policy, log_config) + return standard_session + + +def get_rate_limit_params( + custom_limiter: Optional[Limiter] = None, + per_second: Optional[int] = None, + per_minute: Optional[int] = None, + per_hour: Optional[int] = None, + calculated_rate: Optional[int] = None, + max_calls: Optional[int] = None, + period: Optional[int] = None, + db_name: Optional[str] = None, + use_memory_list: bool = False, + limit_statuses: Optional[list[int]] = None, + max_delay: Optional[int] = 0, + +) -> Dict[str, any]: + """ + Generate rate limit parameters for a service. If `db_name` is not provided, + use an in-memory bucket for rate limiting. + + :param custom_limiter: Optional custom limiter to use for rate limiting. + :param per_second: Requests per second limit. + :param per_minute: Requests per minute limit. + :param per_hour: Requests per hour limit. + :param calculated_rate: Optional calculated rate for requests per minute. + :param max_calls: Maximum calls allowed in a specified period. + :param period: Time period in seconds for max_calls. + :param db_name: Optional name for the SQLite database file for persistent rate limiting. + :param use_memory_list: If true, use MemoryListBucket instead of MemoryQueueBucket for in-memory limiting. + :param limit_statuses: Optional list of status codes to track for rate limiting. + :param max_delay: Optional maximum delay for rate limiting. + :return: Dictionary with rate limit configuration. + """ + + bucket_class = SQLiteBucket if db_name else (MemoryListBucket if use_memory_list else MemoryQueueBucket) + bucket_kwargs = {"path": data_dir_path / f"{db_name}.db"} if db_name else {} + + rate_limits = [] + if per_second: + rate_limits.append(RequestRate(per_second, Duration.SECOND)) + if per_minute: + rate_limits.append(RequestRate(per_minute, Duration.MINUTE)) + if per_hour: + rate_limits.append(RequestRate(per_hour, Duration.HOUR)) + if calculated_rate: + rate_limits.append(RequestRate(calculated_rate, Duration.MINUTE)) + if max_calls and period: + rate_limits.append(RequestRate(max_calls, Duration.SECOND * period)) + + if not rate_limits: + raise ValueError("At least one rate limit (per_second, per_minute, per_hour, calculated_rate, or max_calls and period) must be specified.") + + limiter = custom_limiter or Limiter(*rate_limits, bucket_class=bucket_class, bucket_kwargs=bucket_kwargs) + + return { + 'limiter': limiter, + 'bucket_class': bucket_class, + 'bucket_kwargs': bucket_kwargs, + 'limit_statuses': limit_statuses or [429], + 'max_delay': max_delay, + } + + +def get_cache_params(cache_name: str = 'cache', expire_after: Optional[int] = 60) -> dict: + """Generate cache parameters for a service, ensuring the cache file is in the specified directory. + + :param cache_name: The name of the cache file excluding the extension. + :param expire_after: The time in seconds to expire the cache. + :return: Dictionary with cache configuration. + """ + cache_path = data_dir_path / f"{cache_name}.db" + return {'cache_name': cache_path, 'expire_after': expire_after} + + +def get_retry_policy(retries: int = 3, backoff_factor: float = 0.3, status_forcelist: Optional[list[int]] = None) -> Retry: + """ + Create a retry policy for requests. + + :param retries: The maximum number of retry attempts. + :param backoff_factor: A backoff factor to apply between attempts. + :param status_forcelist: A list of HTTP status codes that we should force a retry on. + :return: Configured Retry object. + """ + return Retry(total=retries, backoff_factor=backoff_factor, status_forcelist=status_forcelist or [500, 502, 503, 504]) + + +def get_http_adapter( + retry_policy: Optional[Retry] = None, + pool_connections: Optional[int] = 50, + pool_maxsize: Optional[int] = 100, + pool_block: Optional[bool] = True +) -> HTTPAdapter: + """ + Create an HTTP adapter with retry policy and optional rate limiting. + + :param retry_policy: The retry policy to use for the adapter. + :param pool_connections: The number of connection pools to allow. + :param pool_maxsize: The maximum number of connections to keep in the pool. + :param pool_block: Boolean indicating if the pool should block when full. + """ + adapter_kwargs = { + 'max_retries': retry_policy, + 'pool_connections': pool_connections, + 'pool_maxsize': pool_maxsize, + 'pool_block': pool_block, + } + return HTTPAdapter(**adapter_kwargs) + + +def xml_to_simplenamespace(xml_string: str) -> SimpleNamespace: + """Convert an XML string to a SimpleNamespace object.""" + root = etree.fromstring(xml_string) + def element_to_simplenamespace(element): + children_as_ns = {child.tag: element_to_simplenamespace(child) for child in element} + attributes = {key: value for key, value in element.attrib.items()} + attributes.update(children_as_ns) + return SimpleNamespace(**attributes, text=element.text) + return element_to_simplenamespace(root) + + +def _create_and_mount_session_adapter( + session: Session, + adapter_instance: Optional[HTTPAdapter] = None, + retry_policy: Optional[Retry] = None, + log_config: Optional[bool] = False): + """ + Create and mount an HTTP adapter to a session. + + :param session: The session to mount the adapter to. + :param retry_policy: The retry policy to use for the adapter. + """ + adapter = adapter_instance or get_http_adapter(retry_policy) + session.mount("https://", adapter) + session.mount("http://", adapter) + + if log_config: + logger.debug(f"Mounted http adapter with params: {adapter.__dict__} to session.") diff --git a/src/program/utils/useragents.py b/src/program/utils/useragents.py new file mode 100644 index 0000000..99e33c6 --- /dev/null +++ b/src/program/utils/useragents.py @@ -0,0 +1,54 @@ +import random + + +class UserAgentFactory: + def __init__(self, user_agents: list): + self.user_agents = user_agents + + def get_random_user_agent(self): + return random.choice(self.user_agents) + + +# Sample user agents pool +user_agents_pool = [ + "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/58.0.3029.110 Safari/537.3", + "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/58.0.3029.110 Safari/537.3", + "Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:89.0) Gecko/20100101 Firefox/89.0", + "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/605.1.15 (KHTML, like Gecko) Version/14.1.1 Safari/605.1.15", + "Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:89.0) Gecko/20100101 Firefox/89.0", + "Mozilla/5.0 (Windows NT 6.1; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/41.0.2228.0 Safari/537.36", + "Mozilla/5.0 (Windows NT 6.3; WOW64; rv:40.0) Gecko/20100101 Firefox/40.0", + "Mozilla/5.0 (Windows NT 5.1; rv:40.0) Gecko/20100101 Firefox/40.0", + "Mozilla/5.0 (Windows NT 6.1; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/38.0.2125.104 Safari/537.36", + "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_14_5) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/75.0.3770.100 Safari/537.36", + "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/44.0.2403.157 Safari/537.36", + "Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/36.0.1985.125 Safari/537.36", + "Mozilla/5.0 (Windows NT 6.2; WOW64; rv:28.0) Gecko/20100101 Firefox/28.0", + "Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:28.0) Gecko/20100101 Firefox/28.0", + "Mozilla/5.0 (Windows NT 6.2; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/27.0.1453.110 Safari/537.36", + "Mozilla/5.0 (X11; Ubuntu; Linux i686; rv:27.0) Gecko/20100101 Firefox/27.0", + "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/27.0.1453.110 Safari/537.36", + "Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/30.0.1599.101 Safari/537.36", + "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/35.0.1916.153 Safari/537.36", + "Mozilla/5.0 (Windows NT 6.3; WOW64; Trident/7.0; AS; rv:11.0) like Gecko", + "Mozilla/5.0 (Windows NT 6.1; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/31.0.1650.63 Safari/537.36", + "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/37.0.2062.120 Safari/537.36", + "Mozilla/5.0 (Windows NT 6.1; WOW64; Trident/7.0; rv:11.0) like Gecko", + "Mozilla/5.0 (X11; Ubuntu; Linux i686; rv:30.0) Gecko/20100101 Firefox/30.0", + "Mozilla/5.0 (X11; Linux x86_64; rv:31.0) Gecko/20100101 Firefox/31.0", + "Mozilla/5.0 (X11; Linux i686; rv:31.0) Gecko/20100101 Firefox/31.0", + "Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:33.0) Gecko/20100101 Firefox/33.0", + "Mozilla/5.0 (X11; Linux x86_64; rv:32.0) Gecko/20100101 Firefox/32.0", + "Mozilla/5.0 (Windows NT 6.1; rv:29.0) Gecko/20100101 Firefox/29.0", + "Mozilla/5.0 (Windows NT 6.3; WOW64; rv:31.0) Gecko/20100101 Firefox/31.0", + "curl/7.64.1", + "curl/7.58.0", + "curl/7.61.1", + "curl/7.55.1", + "curl/7.54.0", + "curl/7.65.3", + "curl/7.50.3", + "curl/7.67.0", + "curl/7.63.0", +] +user_agent_factory = UserAgentFactory(user_agents_pool) \ No newline at end of file diff --git a/src/pytest.ini b/src/pytest.ini new file mode 100644 index 0000000..4b9acb9 --- /dev/null +++ b/src/pytest.ini @@ -0,0 +1,6 @@ +[pytest] +minversion = 7.0 +filterwarnings = ignore::DeprecationWarning +addopts = -vv +pythonpath = . +testpaths = tests \ No newline at end of file diff --git a/src/routers/__init__.py b/src/routers/__init__.py new file mode 100644 index 0000000..1e56787 --- /dev/null +++ b/src/routers/__init__.py @@ -0,0 +1,29 @@ +from fastapi import Depends, Request +from fastapi.routing import APIRouter + +from auth import resolve_api_key +from program.settings.manager import settings_manager +from routers.models.shared import RootResponse +from routers.secure.default import router as default_router +from routers.secure.items import router as items_router +from routers.secure.scrape import router as scrape_router +from routers.secure.settings import router as settings_router +from routers.secure.stream import router as stream_router +from routers.secure.webhooks import router as webhooks_router + +API_VERSION = "v1" + +app_router = APIRouter(prefix=f"/api/{API_VERSION}") +@app_router.get("/", operation_id="root") +async def root(_: Request) -> RootResponse: + return { + "message": "Riven is running!", + "version": settings_manager.settings.version, + } + +app_router.include_router(default_router, dependencies=[Depends(resolve_api_key)]) +app_router.include_router(items_router, dependencies=[Depends(resolve_api_key)]) +app_router.include_router(scrape_router, dependencies=[Depends(resolve_api_key)]) +app_router.include_router(settings_router, dependencies=[Depends(resolve_api_key)]) +app_router.include_router(webhooks_router, dependencies=[Depends(resolve_api_key)]) +app_router.include_router(stream_router, dependencies=[Depends(resolve_api_key)]) \ No newline at end of file diff --git a/src/routers/models/overseerr.py b/src/routers/models/overseerr.py new file mode 100644 index 0000000..551a257 --- /dev/null +++ b/src/routers/models/overseerr.py @@ -0,0 +1,67 @@ +from typing import Any, List, Literal, Optional + +from pydantic import BaseModel, field_validator + +MediaType = Literal["movie", "tv"] + + +class Media(BaseModel): + media_type: MediaType + status: str + imdbId: str | None = None + tmdbId: int + tvdbId: int | None = None + + @field_validator("imdbId", mode="after") + @classmethod + def stringify_imdb_id(cls, value: Any): + if value and isinstance(value, int): + return f"tt{int(value):07d}" + return None + + @field_validator("tvdbId", "tmdbId", mode="before") + @classmethod + def validate_ids(cls, value: Any): + if value and isinstance(value, str) and value != "": + return int(value) + return None + + +class RequestInfo(BaseModel): + request_id: str + requestedBy_email: str + requestedBy_username: str + requestedBy_avatar: Optional[str] + +class IssueInfo(BaseModel): + issue_id: str + issue_type: str + issue_status: str + reportedBy_email: str + reportedBy_username: str + reportedBy_avatar: Optional[str] + +class CommentInfo(BaseModel): + comment_message: str + commentedBy_email: str + commentedBy_username: str + commentedBy_avatar: Optional[str] + +class OverseerrWebhook(BaseModel): + notification_type: str + event: str + subject: str + message: Optional[str] = None + image: Optional[str] = None + media: Media + request: Optional[RequestInfo] = None + issue: Optional[IssueInfo] = None + comment: Optional[CommentInfo] = None + extra: List[dict[str, Any]] = [] + + @property + def requested_seasons(self) -> Optional[List[int]]: + for extra in self.extra: + if extra["name"] == "Requested Seasons": + return [int(x) for x in extra["value"].split(",")] + return None diff --git a/src/routers/models/plex.py b/src/routers/models/plex.py new file mode 100644 index 0000000..994175e --- /dev/null +++ b/src/routers/models/plex.py @@ -0,0 +1,46 @@ +from typing import Optional + +from pydantic import BaseModel, Field + + +class Account(BaseModel): + id: int + thumb: str + title: str + +class Server(BaseModel): + title: str + uuid: str + +class Player(BaseModel): + local: bool + publicAddress: str + title: str + uuid: str + +class Metadata(BaseModel): + librarySectionType: str + ratingKey: str + key: str + guid: str + type: str + title: str + librarySectionTitle: str + librarySectionID: int + librarySectionKey: str + contentRating: str + summary: str + rating: Optional[float] = Field(None, description="Rating of the media") + audienceRating: Optional[float] = Field(None, description="Audience rating of the media") + year: int + tagline: Optional[str] = Field(None, description="Tagline of the media") + thumb: str + +class PlexPayload(BaseModel): + event: str + user: bool + owner: bool + Account: Account + Server: Server + Player: Player + Metadata: Metadata diff --git a/src/routers/models/shared.py b/src/routers/models/shared.py new file mode 100644 index 0000000..62b8cca --- /dev/null +++ b/src/routers/models/shared.py @@ -0,0 +1,8 @@ +from pydantic import BaseModel + + +class MessageResponse(BaseModel): + message: str + +class RootResponse(MessageResponse): + version: str \ No newline at end of file diff --git a/src/routers/secure/__init__.py b/src/routers/secure/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/src/routers/secure/default.py b/src/routers/secure/default.py new file mode 100644 index 0000000..8be9e8d --- /dev/null +++ b/src/routers/secure/default.py @@ -0,0 +1,276 @@ +from typing import Literal + +import requests +from fastapi import APIRouter, HTTPException, Request +from kink import di +from loguru import logger +from pydantic import BaseModel, Field, HttpUrl +from sqlalchemy import func, select + +from program.apis import TraktAPI +from program.db.db import db +from program.managers.event_manager import EventUpdate +from program.media.item import Episode, MediaItem, Movie, Season, Show +from program.media.state import States +from program.settings.manager import settings_manager +from program.utils import generate_api_key + +from ..models.shared import MessageResponse + +router = APIRouter( + responses={404: {"description": "Not found"}}, +) + + +@router.get("/health", operation_id="health") +async def health(request: Request) -> MessageResponse: + return { + "message": str(request.app.program.initialized), + } + + +class RDUser(BaseModel): + id: int + username: str + email: str + points: int = Field(description="User's RD points") + locale: str + avatar: str = Field(description="URL to the user's avatar") + type: Literal["free", "premium"] + premium: int = Field(description="Premium subscription left in seconds") + + +@router.get("/rd", operation_id="rd") +async def get_rd_user() -> RDUser: + api_key = settings_manager.settings.downloaders.real_debrid.api_key + headers = {"Authorization": f"Bearer {api_key}"} + + proxy = ( + settings_manager.settings.downloaders.real_debrid.proxy_url + if settings_manager.settings.downloaders.real_debrid.proxy_enabled + else None + ) + + response = requests.get( + "https://api.real-debrid.com/rest/1.0/user", + headers=headers, + proxies=proxy if proxy else None, + timeout=10, + ) + + if response.status_code != 200: + return {"success": False, "message": response.json()} + + return response.json() + +@router.post("/generateapikey", operation_id="generateapikey") +async def generate_apikey() -> MessageResponse: + new_key = generate_api_key() + settings_manager.settings.api_key = new_key + settings_manager.save() + return { "message": new_key} + + +@router.get("/torbox", operation_id="torbox") +async def get_torbox_user(): + api_key = settings_manager.settings.downloaders.torbox.api_key + headers = {"Authorization": f"Bearer {api_key}"} + response = requests.get( + "https://api.torbox.app/v1/api/user/me", headers=headers, timeout=10 + ) + return response.json() + + +@router.get("/services", operation_id="services") +async def get_services(request: Request) -> dict[str, bool]: + data = {} + if hasattr(request.app.program, "services"): + for service in request.app.program.all_services.values(): + data[service.key] = service.initialized + if not hasattr(service, "services"): + continue + for sub_service in service.services.values(): + data[sub_service.key] = sub_service.initialized + return data + + +class TraktOAuthInitiateResponse(BaseModel): + auth_url: str + + +@router.get("/trakt/oauth/initiate", operation_id="trakt_oauth_initiate") +async def initiate_trakt_oauth(request: Request) -> TraktOAuthInitiateResponse: + trakt_api = di[TraktAPI] + if trakt_api is None: + raise HTTPException(status_code=404, detail="Trakt service not found") + auth_url = trakt_api.perform_oauth_flow() + return {"auth_url": auth_url} + + +@router.get("/trakt/oauth/callback", operation_id="trakt_oauth_callback") +async def trakt_oauth_callback(code: str, request: Request) -> MessageResponse: + trakt_api = di[TraktAPI] + trakt_api_key = settings_manager.settings.content.trakt.api_key + if trakt_api is None: + raise HTTPException(status_code=404, detail="Trakt Api not found") + if trakt_api_key is None: + raise HTTPException(status_code=404, detail="Trakt Api key not found in settings") + success = trakt_api.handle_oauth_callback(trakt_api_key, code) + if success: + return {"message": "OAuth token obtained successfully"} + else: + raise HTTPException(status_code=400, detail="Failed to obtain OAuth token") + + +class StatsResponse(BaseModel): + total_items: int + total_movies: int + total_shows: int + total_seasons: int + total_episodes: int + total_symlinks: int + incomplete_items: int + incomplete_retries: dict[str, int] = Field( + description="Media item log string: number of retries" + ) + states: dict[States, int] + + +@router.get("/stats", operation_id="stats") +async def get_stats(_: Request) -> StatsResponse: + payload = {} + with db.Session() as session: + # Ensure the connection is open for the entire duration of the session + with session.connection().execution_options(stream_results=True) as conn: + movies_symlinks = conn.execute(select(func.count(Movie.id)).where(Movie.symlinked == True)).scalar_one() + episodes_symlinks = conn.execute(select(func.count(Episode.id)).where(Episode.symlinked == True)).scalar_one() + total_symlinks = movies_symlinks + episodes_symlinks + + total_movies = conn.execute(select(func.count(Movie.id))).scalar_one() + total_shows = conn.execute(select(func.count(Show.id))).scalar_one() + total_seasons = conn.execute(select(func.count(Season.id))).scalar_one() + total_episodes = conn.execute(select(func.count(Episode.id))).scalar_one() + total_items = conn.execute(select(func.count(MediaItem.id))).scalar_one() + + # Use a server-side cursor for batch processing + incomplete_retries = {} + batch_size = 1000 + + result = conn.execute( + select(MediaItem.id, MediaItem.scraped_times) + .where(MediaItem.last_state != States.Completed) + ) + + while True: + batch = result.fetchmany(batch_size) + if not batch: + break + + for media_item_id, scraped_times in batch: + incomplete_retries[media_item_id] = scraped_times + + states = {} + for state in States: + states[state] = conn.execute(select(func.count(MediaItem.id)).where(MediaItem.last_state == state)).scalar_one() + + payload["total_items"] = total_items + payload["total_movies"] = total_movies + payload["total_shows"] = total_shows + payload["total_seasons"] = total_seasons + payload["total_episodes"] = total_episodes + payload["total_symlinks"] = total_symlinks + payload["incomplete_items"] = len(incomplete_retries) + payload["incomplete_retries"] = incomplete_retries + payload["states"] = states + + return payload + +class LogsResponse(BaseModel): + logs: str + + +@router.get("/logs", operation_id="logs") +async def get_logs() -> str: + log_file_path = None + for handler in logger._core.handlers.values(): + if ".log" in handler._name: + log_file_path = handler._sink._path + break + + if not log_file_path: + return {"success": False, "message": "Log file handler not found"} + + try: + with open(log_file_path, "r") as log_file: + log_contents = log_file.read() + return {"logs": log_contents} + except Exception as e: + logger.error(f"Failed to read log file: {e}") + raise HTTPException(status_code=500, detail="Failed to read log file") + + +@router.get("/events", operation_id="events") +async def get_events( + request: Request, +) -> dict[str, list[str]]: + events = request.app.program.em.get_event_updates() + return events + + +@router.get("/mount", operation_id="mount") +async def get_rclone_files() -> dict[str, str]: + """Get all files in the rclone mount.""" + import os + + rclone_dir = settings_manager.settings.symlink.rclone_path + file_map = {} + + def scan_dir(path): + with os.scandir(path) as entries: + for entry in entries: + if entry.is_file(): + file_map[entry.name] = entry.path + elif entry.is_dir(): + scan_dir(entry.path) + + scan_dir(rclone_dir) # dict of `filename: filepath`` + return file_map + + +class UploadLogsResponse(BaseModel): + success: bool + url: HttpUrl = Field(description="URL to the uploaded log file. 50M Filesize limit. 180 day retention.") + +@router.post("/upload_logs", operation_id="upload_logs") +async def upload_logs() -> UploadLogsResponse: + """Upload the latest log file to paste.c-net.org""" + + log_file_path = None + for handler in logger._core.handlers.values(): + if ".log" in handler._name: + log_file_path = handler._sink._path + break + + if not log_file_path: + raise HTTPException(status_code=500, detail="Log file handler not found") + + try: + with open(log_file_path, "r") as log_file: + log_contents = log_file.read() + + response = requests.post( + "https://paste.c-net.org/", + data=log_contents.encode('utf-8'), + headers={"Content-Type": "text/plain"} + ) + + if response.status_code == 200: + logger.info(f"Uploaded log file to {response.text.strip()}") + return UploadLogsResponse(success=True, url=response.text.strip()) + else: + logger.error(f"Failed to upload log file: {response.status_code}") + raise HTTPException(status_code=500, detail="Failed to upload log file") + + except Exception as e: + logger.error(f"Failed to read or upload log file: {e}") + raise HTTPException(status_code=500, detail="Failed to read or upload log file") diff --git a/src/routers/secure/items.py b/src/routers/secure/items.py new file mode 100644 index 0000000..b5a4980 --- /dev/null +++ b/src/routers/secure/items.py @@ -0,0 +1,435 @@ +import asyncio +from datetime import datetime +from typing import Literal, Optional + +import Levenshtein +from fastapi import APIRouter, Depends, HTTPException, Request, status +from loguru import logger +from pydantic import BaseModel +from sqlalchemy import and_, func, or_, select +from sqlalchemy.exc import NoResultFound +from sqlalchemy.orm import Session + +from program.db import db_functions +from program.db.db import db, get_db +from program.media.item import MediaItem +from program.media.state import States +from program.services.content import Overseerr +from program.symlink import Symlinker +from program.types import Event + +from ..models.shared import MessageResponse + +router = APIRouter( + prefix="/items", + tags=["items"], + responses={404: {"description": "Not found"}}, +) + + +def handle_ids(ids: str) -> list[str]: + ids = [str(id) for id in ids.split(",")] if "," in ids else [str(ids)] + if not ids: + raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail="No item ID provided") + return ids + + +class StateResponse(BaseModel): + success: bool + states: list[str] + + +@router.get("/states", operation_id="get_states") +async def get_states() -> StateResponse: + return { + "success": True, + "states": [state for state in States], + } + + +class ItemsResponse(BaseModel): + success: bool + items: list[dict] + page: int + limit: int + total_items: int + total_pages: int + + +@router.get( + "", + summary="Retrieve Media Items", + description="Fetch media items with optional filters and pagination", + operation_id="get_items", +) +async def get_items( + _: Request, + limit: Optional[int] = 50, + page: Optional[int] = 1, + type: Optional[str] = None, + states: Optional[str] = None, + sort: Optional[ + Literal["date_desc", "date_asc", "title_asc", "title_desc"] + ] = "date_desc", + search: Optional[str] = None, + extended: Optional[bool] = False, + is_anime: Optional[bool] = False, +) -> ItemsResponse: + if page < 1: + raise HTTPException(status_code=400, detail="Page number must be 1 or greater.") + + if limit < 1: + raise HTTPException(status_code=400, detail="Limit must be 1 or greater.") + + query = select(MediaItem) + + if search: + search_lower = search.lower() + if search_lower.startswith("tt"): + query = query.where(MediaItem.imdb_id == search_lower) + else: + query = query.where( + (func.lower(MediaItem.title).like(f"%{search_lower}%")) + | (func.lower(MediaItem.imdb_id).like(f"%{search_lower}%")) + ) + + if states: + states = states.split(",") + filter_states = [] + for state in states: + filter_lower = state.lower() + for state_enum in States: + if Levenshtein.ratio(filter_lower, state_enum.name.lower()) >= 0.82: + filter_states.append(state_enum) + break + if 'All' not in states: + if len(filter_states) == len(states): + query = query.where(MediaItem.last_state.in_(filter_states)) + else: + valid_states = [state_enum.name for state_enum in States] + raise HTTPException( + status_code=400, + detail=f"Invalid filter states: {states}. Valid states are: {valid_states}", + ) + + if type: + if "," in type: + types = type.split(",") + for type in types: + if type not in ["movie", "show", "season", "episode", "anime"]: + raise HTTPException( + status_code=400, + detail=f"Invalid type: {type}. Valid types are: ['movie', 'show', 'season', 'episode', 'anime']", + ) + else: + types = [type] + if "anime" in types: + types = [type for type in types if type != "anime"] + query = query.where( + or_( + and_( + MediaItem.type.in_(["movie", "show"]), + MediaItem.is_anime == True, + ), + MediaItem.type.in_(types), + ) + ) + else: + query = query.where(MediaItem.type.in_(types)) + + if is_anime: + query = query.where(MediaItem.is_anime is True) + + if sort and not search: + sort_lower = sort.lower() + if sort_lower == "title_asc": + query = query.order_by(MediaItem.title.asc()) + elif sort_lower == "title_desc": + query = query.order_by(MediaItem.title.desc()) + elif sort_lower == "date_asc": + query = query.order_by(MediaItem.requested_at.asc()) + elif sort_lower == "date_desc": + query = query.order_by(MediaItem.requested_at.desc()) + else: + raise HTTPException( + status_code=400, + detail=f"Invalid sort: {sort}. Valid sorts are: ['title_asc', 'title_desc', 'date_asc', 'date_desc']", + ) + + with db.Session() as session: + total_items = session.execute( + select(func.count()).select_from(query.subquery()) + ).scalar_one() + items = ( + session.execute(query.offset((page - 1) * limit).limit(limit)) + .unique() + .scalars() + .all() + ) + + total_pages = (total_items + limit - 1) // limit + + return { + "success": True, + "items": [ + item.to_extended_dict() if extended else item.to_dict() + for item in items + ], + "page": page, + "limit": limit, + "total_items": total_items, + "total_pages": total_pages, + } + + +@router.post( + "/add", + summary="Add Media Items", + description="Add media items with bases on imdb IDs", + operation_id="add_items", +) +async def add_items(request: Request, imdb_ids: str = None) -> MessageResponse: + if not imdb_ids: + raise HTTPException(status_code=400, detail="No IMDb ID(s) provided") + + ids = imdb_ids.split(",") + + valid_ids = [] + for id in ids: + if not id.startswith("tt"): + logger.warning(f"Invalid IMDb ID {id}, skipping") + else: + valid_ids.append(id) + + if not valid_ids: + raise HTTPException(status_code=400, detail="No valid IMDb ID(s) provided") + + with db.Session() as _: + for id in valid_ids: + item = MediaItem( + {"imdb_id": id, "requested_by": "riven", "requested_at": datetime.now()} + ) + request.app.program.em.add_item(item) + + return {"message": f"Added {len(valid_ids)} item(s) to the queue"} + +@router.get( + "/{id}", + summary="Retrieve Media Item", + description="Fetch a single media item by ID", + operation_id="get_item", +) +async def get_item(_: Request, id: str, use_tmdb_id: Optional[bool] = False) -> dict: + with db.Session() as session: + try: + query = select(MediaItem) + if use_tmdb_id: + query = query.where(MediaItem.tmdb_id == id) + else: + query = query.where(MediaItem.id == id) + item = session.execute(query).unique().scalar_one() + except NoResultFound: + raise HTTPException(status_code=404, detail="Item not found") + return item.to_extended_dict(with_streams=False) + + +@router.get( + "/{imdb_ids}", + summary="Retrieve Media Items By IMDb IDs", + description="Fetch media items by IMDb IDs", + operation_id="get_items_by_imdb_ids", +) +async def get_items_by_imdb_ids(request: Request, imdb_ids: str) -> list[dict]: + ids = imdb_ids.split(",") + with db.Session() as session: + items = [] + for id in ids: + item = ( + session.execute(select(MediaItem).where(MediaItem.imdb_id == id)) + .unique() + .scalar_one() + ) + if item: + items.append(item) + return [item.to_extended_dict() for item in items] + + +class ResetResponse(BaseModel): + message: str + ids: list[str] + + +@router.post( + "/reset", + summary="Reset Media Items", + description="Reset media items with bases on item IDs", + operation_id="reset_items", +) +async def reset_items(request: Request, ids: str) -> ResetResponse: + ids = handle_ids(ids) + try: + for media_item in db_functions.get_items_by_ids(ids): + try: + request.app.program.em.cancel_job(media_item.id) + db_functions.clear_streams(media_item) + db_functions.reset_media_item(media_item) + except ValueError as e: + logger.error(f"Failed to reset item with id {media_item.id}: {str(e)}") + continue + except Exception as e: + logger.error(f"Unexpected error while resetting item with id {media_item.id}: {str(e)}") + continue + except ValueError as e: + raise HTTPException(status_code=400, detail=str(e)) from e + return {"message": f"Reset items with id {ids}", "ids": ids} + + +class RetryResponse(BaseModel): + message: str + ids: list[str] + + +@router.post( + "/retry", + summary="Retry Media Items", + description="Retry media items with bases on item IDs", + operation_id="retry_items", +) +async def retry_items(request: Request, ids: str) -> RetryResponse: + """Re-add items to the queue""" + ids = handle_ids(ids) + for id in ids: + try: + item = db_functions.get_item_by_id(id) + if item: + with db.Session() as session: + item.scraped_at = None + item.scraped_times = 1 + session.merge(item) + session.commit() + request.app.program.em.add_event(Event("RetryItem", id)) + except ValueError as e: + raise HTTPException(status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, detail=str(e)) + + return {"message": f"Retried items with ids {ids}", "ids": ids} + + +class RemoveResponse(BaseModel): + message: str + ids: list[str] + + +@router.delete( + "/remove", + summary="Remove Media Items", + description="Remove media items based on item IDs", + operation_id="remove_item", +) +async def remove_item(request: Request, ids: str) -> RemoveResponse: + ids: list[str] = handle_ids(ids) + try: + media_items: list[MediaItem] = db_functions.get_items_by_ids(ids, ["movie", "show"]) + if not media_items: + return HTTPException(status_code=404, detail="Item(s) not found") + for item in media_items: + logger.debug(f"Removing item with ID {item.id}") + request.app.program.em.cancel_job(item.id) + await asyncio.sleep(0.2) # Ensure cancellation is processed + if item.type == "show": + for season in item.seasons: + for episode in season.episodes: + request.app.program.em.cancel_job(episode.id) + await asyncio.sleep(0.2) + db_functions.delete_media_item_by_id(episode.id) + request.app.program.em.cancel_job(season.id) + await asyncio.sleep(0.2) + db_functions.delete_media_item_by_id(season.id) + + db_functions.clear_streams_by_id(item.id) + + symlink_service = request.app.program.services.get(Symlinker) + if symlink_service: + symlink_service.delete_item_symlinks_by_id(item.id) + + if item.overseerr_id: + overseerr: Overseerr = request.app.program.services.get(Overseerr) + if overseerr: + overseerr.delete_request(item.overseerr_id) + logger.debug(f"Deleted request from Overseerr with ID {item.overseerr_id}") + + logger.debug(f"Deleting item from database with ID {item.id}") + db_functions.delete_media_item_by_id(item.id) + logger.info(f"Successfully removed item with ID {item.id}") + except ValueError as e: + raise HTTPException(status_code=400, detail=str(e)) + + return {"message": f"Removed items with ids {ids}", "ids": ids} + +@router.get( + "/{item_id}/streams" +) +async def get_item_streams(_: Request, item_id: str, db: Session = Depends(get_db)): + item: MediaItem = ( + db.execute( + select(MediaItem) + .where(MediaItem.id == item_id) + ) + .unique() + .scalar_one_or_none() + ) + + if not item: + raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="Item not found") + + return { + "message": f"Retrieved streams for item {item_id}", + "streams": item.streams, + "blacklisted_streams": item.blacklisted_streams + } + +@router.post( + "/{item_id}/streams/{stream_id}/blacklist" +) +async def blacklist_stream(_: Request, item_id: str, stream_id: int, db: Session = Depends(get_db)): + item: MediaItem = ( + db.execute( + select(MediaItem) + .where(MediaItem.id == item_id) + ) + .unique() + .scalar_one_or_none() + ) + stream = next((stream for stream in item.streams if stream.id == stream_id), None) + + if not item or not stream: + raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="Item or stream not found") + + db_functions.blacklist_stream(item, stream, db) + + return { + "message": f"Blacklisted stream {stream_id} for item {item_id}", + } + +@router.post( + "{item_id}/streams/{stream_id}/unblacklist" +) +async def unblacklist_stream(_: Request, item_id: str, stream_id: int, db: Session = Depends(get_db)): + item: MediaItem = ( + db.execute( + select(MediaItem) + .where(MediaItem.id == item_id) + ) + .unique() + .scalar_one_or_none() + ) + + stream = next((stream for stream in item.blacklisted_streams if stream.id == stream_id), None) + + if not item or not stream: + raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="Item or stream not found") + + db_functions.unblacklist_stream(item, stream, db) + + return { + "message": f"Unblacklisted stream {stream_id} for item {item_id}", + } \ No newline at end of file diff --git a/src/routers/secure/scrape.py b/src/routers/secure/scrape.py new file mode 100644 index 0000000..570c5f5 --- /dev/null +++ b/src/routers/secure/scrape.py @@ -0,0 +1,415 @@ +import asyncio +from datetime import datetime, timedelta +from typing import Dict, List, Literal, Optional, TypeAlias, Union +from uuid import uuid4 + +from fastapi import APIRouter, BackgroundTasks, HTTPException, Request +from loguru import logger +from pydantic import BaseModel, RootModel +from RTN import ParsedData +from sqlalchemy import select + +from program.db import db_functions +from program.db.db import db +from program.media.item import Episode, MediaItem +from program.media.stream import Stream as ItemStream +from program.services.downloaders import Downloader +from program.services.downloaders.shared import hash_from_uri +from program.services.indexers.trakt import TraktIndexer +from program.services.scrapers import Scraping +from program.services.scrapers.shared import rtn +from program.types import Event + + +class Stream(BaseModel): + infohash: str + raw_title: str + parsed_title: str + parsed_data: ParsedData + rank: int + lev_ratio: float + is_cached: bool + +class ScrapeItemResponse(BaseModel): + message: str + streams: Dict[str, Stream] + +class StartSessionResponse(BaseModel): + message: str + session_id: str + torrent_id: str + torrent_info: dict + containers: Optional[List[dict]] + expires_at: str + +class SelectFilesResponse(BaseModel): + message: str + download_type: Literal["cached", "uncached"] + +class UpdateAttributesResponse(BaseModel): + message: str + +class SessionResponse(BaseModel): + message: str + +class ContainerFile(BaseModel): + """Individual file entry in a container""" + filename: str + filesize: Optional[int] = None + +ContainerMap: TypeAlias = Dict[str, ContainerFile] + +class Container(RootModel[ContainerMap]): + """ + Root model for container mapping file IDs to file information. + + Example: + { + "4": { + "filename": "show.s01e01.mkv", + "filesize": 30791392598 + }, + "5": { + "filename": "show.s01e02.mkv", + "filesize": 25573181861 + } + } + """ + root: ContainerMap + +SeasonEpisodeMap: TypeAlias = Dict[int, Dict[int, ContainerFile]] + +class ShowFileData(RootModel[SeasonEpisodeMap]): + """ + Root model for show file data that maps seasons to episodes to file data. + + Example: + { + 1: { # Season 1 + 1: {"filename": "path/to/s01e01.mkv"}, # Episode 1 + 2: {"filename": "path/to/s01e02.mkv"} # Episode 2 + }, + 2: { # Season 2 + 1: {"filename": "path/to/s02e01.mkv"} # Episode 1 + } + } + """ + + root: SeasonEpisodeMap + +class ScrapingSession: + def __init__(self, id: str, item_id: str, magnet: str): + self.id = id + self.item_id = item_id + self.magnet = magnet + self.torrent_id: Optional[str] = None + self.torrent_info: Optional[dict] = None + self.containers: Optional[list] = None + self.selected_files: Optional[dict] = None + self.created_at: datetime = datetime.now() + self.expires_at: datetime = datetime.now() + timedelta(minutes=5) + +class ScrapingSessionManager: + def __init__(self): + self.sessions: Dict[str, ScrapingSession] = {} + self.downloader: Optional[Downloader] = None + + def set_downloader(self, downloader: Downloader): + self.downloader = downloader + + def create_session(self, item_id: str, magnet: str) -> ScrapingSession: + session_id = str(uuid4()) + session = ScrapingSession(session_id, item_id, magnet) + self.sessions[session_id] = session + return session + + def get_session(self, session_id: str) -> Optional[ScrapingSession]: + session = self.sessions.get(session_id) + if not session: + return None + + if datetime.now() > session.expires_at: + self.abort_session(session_id) + return None + + return session + + def update_session(self, session_id: str, **kwargs) -> Optional[ScrapingSession]: + session = self.get_session(session_id) + if not session: + return None + + for key, value in kwargs.items(): + if hasattr(session, key): + setattr(session, key, value) + + return session + + def abort_session(self, session_id: str): + session = self.sessions.pop(session_id, None) + if session and session.torrent_id and self.downloader: + try: + self.downloader.delete_torrent(session.torrent_id) + logger.debug(f"Deleted torrent for aborted session {session_id}") + except Exception as e: + logger.error(f"Failed to delete torrent for session {session_id}: {e}") + if session: + logger.debug(f"Aborted session {session_id} for item {session.item_id}") + + def complete_session(self, session_id: str): + session = self.get_session(session_id) + if not session: + return + + logger.debug(f"Completing session {session_id} for item {session.item_id}") + self.sessions.pop(session_id) + + def cleanup_expired(self, background_tasks: BackgroundTasks): + current_time = datetime.now() + expired = [ + session_id for session_id, session in self.sessions.items() + if current_time > session.expires_at + ] + for session_id in expired: + background_tasks.add_task(self.abort_session, session_id) + +session_manager = ScrapingSessionManager() + +router = APIRouter(prefix="/scrape", tags=["scrape"]) + +def initialize_downloader(downloader: Downloader): + """Initialize downloader if not already set""" + if not session_manager.downloader: + session_manager.set_downloader(downloader) + +@router.get( + "/scrape/{id}", + summary="Get streams for an item", + operation_id="scrape_item" +) +def scrape_item(request: Request, id: str) -> ScrapeItemResponse: + + if id.startswith("tt"): + imdb_id = id + item_id = None + else: + imdb_id = None + item_id = id + + if services := request.app.program.services: + indexer = services[TraktIndexer] + scraper = services[Scraping] + downloader = services[Downloader] + else: + raise HTTPException(status_code=412, detail="Scraping services not initialized") + + log_string = None + with db.Session() as db_session: + + if imdb_id: + prepared_item = MediaItem({"imdb_id": imdb_id}) + item = next(indexer.run(prepared_item)) + else: + item: MediaItem = ( + db_session.execute( + select(MediaItem) + .where(MediaItem.id == item_id) + ) + .unique() + .scalar_one_or_none() + ) + streams = scraper.scrape(item) + stream_containers = downloader.get_instant_availability([stream for stream in streams.keys()]) + for stream in streams.keys(): + if len(stream_containers.get(stream, [])) > 0: + streams[stream].is_cached = True + else: + streams[stream].is_cached = False + log_string = item.log_string + + return { + "message": f"Manually scraped streams for item {log_string}", + "streams": streams + } + +@router.post("/scrape/start_session") +async def start_manual_session( + request: Request, + background_tasks: BackgroundTasks, + item_id: str, + magnet: str +) -> StartSessionResponse: + session_manager.cleanup_expired(background_tasks) + info_hash = hash_from_uri(magnet).lower() + + # Identify item based on IMDb or database ID + if item_id.startswith("tt"): + imdb_id = item_id + item_id = None + else: + imdb_id = None + item_id = item_id + + if services := request.app.program.services: + indexer = services[TraktIndexer] + downloader = services[Downloader] + else: + raise HTTPException(status_code=412, detail="Required services not initialized") + + initialize_downloader(downloader) + + if imdb_id: + prepared_item = MediaItem({"imdb_id": imdb_id}) + item = next(indexer.run(prepared_item)) + else: + item = db_functions.get_item_by_id(item_id) + + if not item: + raise HTTPException(status_code=404, detail="Item not found") + + session = session_manager.create_session(item_id or imdb_id, info_hash) + + try: + torrent_id = downloader.add_torrent(info_hash) + torrent_info = downloader.get_torrent_info(torrent_id) + containers = downloader.get_instant_availability([session.magnet]).get(session.magnet, None) + session_manager.update_session(session.id, torrent_id=torrent_id, torrent_info=torrent_info, containers=containers) + except Exception as e: + background_tasks.add_task(session_manager.abort_session, session.id) + raise HTTPException(status_code=500, detail=str(e)) + + return { + "message": "Started manual scraping session", + "session_id": session.id, + "torrent_id": torrent_id, + "torrent_info": torrent_info, + "containers": containers, + "expires_at": session.expires_at.isoformat() + } + +@router.post( + "/scrape/select_files/{session_id}", + summary="Select files for torrent id, for this to be instant it requires files to be one of /manual/instant_availability response containers", + operation_id="manual_select" +) +def manual_select_files(request: Request, session_id, files: Container) -> SelectFilesResponse: + downloader: Downloader = request.app.program.services.get(Downloader) + session = session_manager.get_session(session_id) + if not session: + raise HTTPException(status_code=404, detail="Session not found or expired") + if not session.torrent_id: + session_manager.abort_session(session_id) + raise HTTPException(status_code=500, detail="") + + download_type = "uncached" + if files.model_dump() in session.containers: + download_type = "cached" + + try: + downloader.select_files(session.torrent_id, files.model_dump()) + session.selected_files = files.model_dump() + except Exception as e: + raise HTTPException(status_code=500, detail=str(e)) + + return { + "message": f"Selected files for {session.item_id}", + "download_type": download_type + } + +@router.post( + "/scrape/update_attributes/{session_id}", + summary="Match container files to item", + operation_id="manual_update_attributes" +) +async def manual_update_attributes(request: Request, session_id, data: Union[ContainerFile, ShowFileData]) -> UpdateAttributesResponse: + session = session_manager.get_session(session_id) + log_string = None + if not session: + raise HTTPException(status_code=404, detail="Session not found or expired") + if not session.item_id: + session_manager.abort_session(session_id) + raise HTTPException(status_code=500, detail="") + + with db.Session() as db_session: + if str(session.item_id).startswith("tt") and not db_functions.get_item_by_external_id(imdb_id=session.item_id) and not db_functions.get_item_by_id(session.item_id): + prepared_item = MediaItem({"imdb_id": session.item_id}) + item = next(TraktIndexer().run(prepared_item)) + db_session.merge(item) + db_session.commit() + else: + item = db_functions.get_item_by_id(session.item_id) + + if not item: + raise HTTPException(status_code=404, detail="Item not found") + + item_ids_to_submit = [] + + if item.type == "movie": + request.app.program.em.cancel_job(item.id) + item.reset() + item.file = data.filename + item.folder = data.filename + item.alternative_folder = session.torrent_info["original_filename"] + item.active_stream = {"infohash": session.magnet, "id": session.torrent_info["id"]} + torrent = rtn.rank(session.magnet, session.magnet) + item.streams.append(ItemStream(torrent)) + item_ids_to_submit.append(item.id) + else: + request.app.program.em.cancel_job(item.id) + await asyncio.sleep(0.2) + for season in item.seasons: + request.app.program.em.cancel_job(season.id) + await asyncio.sleep(0.2) + for season, episodes in data.root.items(): + for episode, episode_data in episodes.items(): + item_episode: Episode = next((_episode for _season in item.seasons if _season.number == season for _episode in _season.episodes if _episode.number == episode), None) + if item_episode: + request.app.program.em.cancel_job(item_episode.id) + await asyncio.sleep(0.2) + item_episode.reset() + item_episode.file = episode_data.filename + item_episode.folder = episode_data.filename + item_episode.alternative_folder = session.torrent_info["original_filename"] + item_episode.active_stream = {"infohash": session.magnet, "id": session.torrent_info["id"]} + torrent = rtn.rank(session.magnet, session.magnet) + item_episode.streams.append(ItemStream(torrent)) + item_ids_to_submit.append(item_episode.id) + item.store_state() + log_string = item.log_string + db_session.merge(item) + db_session.commit() + + for item_id in item_ids_to_submit: + request.app.program.em.add_event(Event("ManualAPI", item_id)) + + return {"message": f"Updated given data to {log_string}"} + +@router.post("/scrape/abort_session/{session_id}") +async def abort_manual_session( + _: Request, + background_tasks: BackgroundTasks, + session_id: str +) -> SessionResponse: + session = session_manager.get_session(session_id) + if not session: + raise HTTPException(status_code=404, detail="Session not found or expired") + + background_tasks.add_task(session_manager.abort_session, session_id) + return {"message": f"Aborted session {session_id}"} + +@router.post( + "/scrape/complete_session/{session_id}", + summary="Complete a manual scraping session", + operation_id="complete_manual_session" +) +async def complete_manual_session(_: Request, session_id: str) -> SessionResponse: + session = session_manager.get_session(session_id) + + if not session: + raise HTTPException(status_code=404, detail="Session not found or expired") + + if not all([session.torrent_id, session.selected_files]): + raise HTTPException(status_code=400, detail="Session is incomplete") + + session_manager.complete_session(session_id) + return {"message": f"Completed session {session_id}"} \ No newline at end of file diff --git a/src/routers/secure/settings.py b/src/routers/secure/settings.py new file mode 100644 index 0000000..3775515 --- /dev/null +++ b/src/routers/secure/settings.py @@ -0,0 +1,131 @@ +from copy import copy +from typing import Any, Dict, List + +from fastapi import APIRouter, HTTPException +from pydantic import BaseModel, ValidationError + +from program.settings.manager import settings_manager +from program.settings.models import AppModel + +from ..models.shared import MessageResponse + + +class SetSettings(BaseModel): + key: str + value: Any + + +router = APIRouter( + prefix="/settings", + tags=["settings"], + responses={404: {"description": "Not found"}}, +) + + +@router.get("/schema", operation_id="get_settings_schema") +async def get_settings_schema() -> dict[str, Any]: + """ + Get the JSON schema for the settings. + """ + return settings_manager.settings.model_json_schema() + +@router.get("/load", operation_id="load_settings") +async def load_settings() -> MessageResponse: + settings_manager.load() + return { + "message": "Settings loaded!", + } + +@router.post("/save", operation_id="save_settings") +async def save_settings() -> MessageResponse: + settings_manager.save() + return { + "message": "Settings saved!", + } + + +@router.get("/get/all", operation_id="get_all_settings") +async def get_all_settings() -> AppModel: + return copy(settings_manager.settings) + + +@router.get("/get/{paths}", operation_id="get_settings") +async def get_settings(paths: str) -> dict[str, Any]: + current_settings = settings_manager.settings.model_dump() + data = {} + for path in paths.split(","): + keys = path.split(".") + current_obj = current_settings + + for k in keys: + if k not in current_obj: + return None + current_obj = current_obj[k] + + data[path] = current_obj + return data + + +@router.post("/set/all", operation_id="set_all_settings") +async def set_all_settings(new_settings: Dict[str, Any]) -> MessageResponse: + current_settings = settings_manager.settings.model_dump() + + def update_settings(current_obj, new_obj): + for key, value in new_obj.items(): + if isinstance(value, dict) and key in current_obj: + update_settings(current_obj[key], value) + else: + current_obj[key] = value + + update_settings(current_settings, new_settings) + + # Validate and save the updated settings + try: + updated_settings = settings_manager.settings.model_validate(current_settings) + settings_manager.load(settings_dict=updated_settings.model_dump()) + settings_manager.save() # Ensure the changes are persisted + except Exception as e: + raise HTTPException(status_code=400, detail=str(e)) + + return { + "message": "All settings updated successfully!", + } + +@router.post("/set", operation_id="set_settings") +async def set_settings(settings: List[SetSettings]) -> MessageResponse: + current_settings = settings_manager.settings.model_dump() + + for setting in settings: + keys = setting.key.split(".") + current_obj = current_settings + + # Navigate to the last key's parent object, ensuring all keys exist. + for k in keys[:-1]: + if k not in current_obj: + raise HTTPException( + status_code=400, + detail=f"Path '{'.'.join(keys[:-1])}' does not exist.", + ) + current_obj = current_obj[k] + + # Ensure the final key exists before setting the value. + if keys[-1] in current_obj: + current_obj[keys[-1]] = setting.value + else: + raise HTTPException( + status_code=400, + detail=f"Key '{keys[-1]}' does not exist in path '{'.'.join(keys[:-1])}'.", + ) + + # Validate and apply the updated settings to the AppModel instance + try: + updated_settings = settings_manager.settings.__class__(**current_settings) + settings_manager.load(settings_dict=updated_settings.model_dump()) + settings_manager.save() # Ensure the changes are persisted + except ValidationError as e: + raise HTTPException from e( + status_code=400, + detail=f"Failed to update settings: {str(e)}", + ) + + return {"message": "Settings updated successfully."} diff --git a/src/routers/secure/stream.py b/src/routers/secure/stream.py new file mode 100644 index 0000000..2c634f0 --- /dev/null +++ b/src/routers/secure/stream.py @@ -0,0 +1,38 @@ +import json +import logging +from datetime import datetime + +from fastapi import APIRouter, Request +from fastapi.responses import StreamingResponse +from loguru import logger +from pydantic import BaseModel + +from program.managers.sse_manager import sse_manager + +router = APIRouter( + responses={404: {"description": "Not found"}}, + prefix="/stream", + tags=["stream"], +) + +class EventResponse(BaseModel): + data: dict + +class SSELogHandler(logging.Handler): + def emit(self, record: logging.LogRecord): + log_entry = { + "time": datetime.fromtimestamp(record.created).isoformat(), + "level": record.levelname, + "message": record.msg + } + sse_manager.publish_event("logging", json.dumps(log_entry)) + +logger.add(SSELogHandler()) + +@router.get("/event_types") +async def get_event_types(): + return {"message": list(sse_manager.event_queues.keys())} + +@router.get("/{event_type}") +async def stream_events(_: Request, event_type: str) -> EventResponse: + return StreamingResponse(sse_manager.subscribe(event_type), media_type="text/event-stream") \ No newline at end of file diff --git a/src/routers/secure/webhooks.py b/src/routers/secure/webhooks.py new file mode 100644 index 0000000..2eb2ad0 --- /dev/null +++ b/src/routers/secure/webhooks.py @@ -0,0 +1,63 @@ +from typing import Any, Dict + +import pydantic +from fastapi import APIRouter, Request +from kink import di +from loguru import logger +from requests import RequestException + +from program.apis.trakt_api import TraktAPI +from program.media.item import MediaItem +from program.services.content.overseerr import Overseerr + +from ..models.overseerr import OverseerrWebhook + +router = APIRouter( + prefix="/webhook", + responses={404: {"description": "Not found"}}, +) + + +@router.post("/overseerr") +async def overseerr(request: Request) -> Dict[str, Any]: + """Webhook for Overseerr""" + try: + response = await request.json() + if response.get("subject") == "Test Notification": + logger.log("API", "Received test notification, Overseerr configured properly") + return {"success": True} + req = OverseerrWebhook.model_validate(response) + except (Exception, pydantic.ValidationError) as e: + logger.error(f"Failed to process request: {e}") + return {"success": False, "message": str(e)} + + imdb_id = get_imdbid_from_overseerr(req) + if not imdb_id: + logger.error(f"Failed to get imdb_id from Overseerr: {req.media.tmdbId}") + return {"success": False, "message": "Failed to get imdb_id from Overseerr"} + + overseerr: Overseerr = request.app.program.all_services[Overseerr] + if not overseerr.initialized: + logger.error("Overseerr not initialized") + return {"success": False, "message": "Overseerr not initialized"} + + new_item = MediaItem({"imdb_id": imdb_id, "requested_by": "overseerr", "overseerr_id": req.request.request_id}) + request.app.program.em.add_item(new_item, service="Overseerr") + return {"success": True} + + +def get_imdbid_from_overseerr(req: OverseerrWebhook) -> str: + """Get the imdb_id from the Overseerr webhook""" + imdb_id = req.media.imdbId + trakt_api = di[TraktAPI] + if not imdb_id: + try: + _type = req.media.media_type + if _type == "tv": + _type = "show" + imdb_id = trakt_api.get_imdbid_from_tmdb(str(req.media.tmdbId), type=_type) + if not imdb_id or not imdb_id.startswith("tt"): + imdb_id = trakt_api.get_imdbid_from_tvdb(str(req.media.tvdbId), type=_type) + except RequestException: + pass + return imdb_id \ No newline at end of file diff --git a/src/routers/secure/ws.py b/src/routers/secure/ws.py new file mode 100644 index 0000000..5d622ae --- /dev/null +++ b/src/routers/secure/ws.py @@ -0,0 +1,14 @@ +from fastapi import WebSocket +from utils.websockets import manager + +from .default import router + + +@router.websocket("/") +async def websocket_endpoint(websocket: WebSocket): + await manager.connect(websocket) + try: + while True: + await websocket.receive_text() + except Exception: + manager.disconnect(websocket) \ No newline at end of file diff --git a/src/tests/test_alldebrid_downloader.py b/src/tests/test_alldebrid_downloader.py new file mode 100644 index 0000000..e22cca9 --- /dev/null +++ b/src/tests/test_alldebrid_downloader.py @@ -0,0 +1,177 @@ +import json + +import pytest + +from program.services.downloaders import alldebrid +from program.services.downloaders.alldebrid import ( + AllDebridDownloader, + add_torrent, + get_instant_availability, + get_status, + get_torrents, +) +from program.settings.manager import settings_manager as settings + + +@pytest.fixture +def downloader(instant, upload, status, status_all, delete): + """Instance of AllDebridDownloader with API calls mocked""" + # mock API calls + _get = alldebrid.get + def get(url, **params): + match url: + case "user": + return {"data": { "user": { "isPremium": True, "premiumUntil": 1735514599, } } } + case "magnet/instant": + return instant(url, **params) + case "magnet/upload": + return upload(url, **params) + case "magnet/delete": + return delete(url, **params) + case "magnet/status": + if params.get("id", False): + return status(url, **params) + else: + return status_all(url, **params) + case _: + raise Exception("unmatched api call") + alldebrid.get = get + + alldebrid_settings = settings.settings.downloaders.all_debrid + alldebrid_settings.enabled = True + alldebrid_settings.api_key = "key" + + downloader = AllDebridDownloader() + assert downloader.initialized + yield downloader + + # tear down mock + alldebrid.get = get + + +## Downloader tests +def test_process_hashes(downloader): + hashes = downloader.process_hashes(["abc"], None, [False, True]) + assert len(hashes) == 1 + + +def test_download_cached(downloader): + torrent_id = downloader.download_cached({"infohash": "abc"}) + assert torrent_id == MAGNET_ID + + +def test_get_torrent_names(downloader): + names = downloader.get_torrent_names(123) + assert names == ("Ubuntu 24.04", None) + + +## API parsing tests +def test_get_instant_availability(instant): + alldebrid.get = instant + infohashes = [UBUNTU] + availability = get_instant_availability(infohashes) + assert len(availability[0].get("files", [])) == 2 + + +def test_get_instant_availability_unavailable(instant_unavailable): + alldebrid.get = instant_unavailable + infohashes = [UBUNTU] + availability = get_instant_availability(infohashes) + assert availability[0]["hash"] == UBUNTU + + +def test_add_torrent(upload): + alldebrid.get = upload + torrent_id = add_torrent(UBUNTU) + assert torrent_id == 251993753 + + +def test_add_torrent_cached(upload_ready): + alldebrid.get = upload_ready + torrent_id = add_torrent(UBUNTU) + assert torrent_id == 251993753 + + +def test_get_status(status): + alldebrid.get = status + torrent_status = get_status(251993753) + assert torrent_status["filename"] == "Ubuntu 24.04" + + +def test_get_status_unfinished(status_downloading): + alldebrid.get = status_downloading + torrent_status = get_status(251993753) + assert torrent_status["status"] == "Downloading" + + +def test_get_torrents(status_all): + alldebrid.get = status_all + torrents = get_torrents() + assert torrents[0]["status"] == "Ready" + + +def test_delete(delete): + alldebrid.get = delete + delete(123) + + +# Example requests - taken from real API calls +UBUNTU = "3648baf850d5930510c1f172b534200ebb5496e6" +MAGNET_ID = "251993753" +@pytest.fixture +def instant(): + """GET /magnet/instant?magnets[0]=infohash (torrent available)""" + with open("src/tests/test_data/alldebrid_magnet_instant.json") as f: + body = json.load(f) + return lambda url, **params: body + +@pytest.fixture +def instant_unavailable(): + """GET /magnet/instant?magnets[0]=infohash (torrent unavailable)""" + with open("src/tests/test_data/alldebrid_magnet_instant_unavailable.json") as f: + body = json.load(f) + return lambda url, **params: body + +@pytest.fixture +def upload(): + """GET /magnet/upload?magnets[]=infohash (torrent not ready yet)""" + with open("src/tests/test_data/alldebrid_magnet_upload_not_ready.json") as f: + body = json.load(f) + return lambda url, **params: body + +@pytest.fixture +def upload_ready(): + """GET /magnet/upload?magnets[]=infohash (torrent ready)""" + with open("src/tests/test_data/alldebrid_magnet_upload_ready.json") as f: + body = json.load(f) + return lambda url, **params: body + +@pytest.fixture +def status(): + """GET /magnet/status?id=123 (debrid links ready)""" + with open("src/tests/test_data/alldebrid_magnet_status_one_ready.json") as f: + body = json.load(f) + return lambda url, **params: body + +@pytest.fixture +def status_downloading(): + """GET /magnet/status?id=123 (debrid links not ready yet)""" + with open("src/tests/test_data/alldebrid_magnet_status_one_downloading.json") as f: + body = json.load(f) + return lambda url, **params: body + +@pytest.fixture +def status_all(): + """GET /magnet/status (gets a list of all links instead of a single object)""" + # The body is the same as a single item, but with all your magnets in a list. + with open("src/tests/test_data/alldebrid_magnet_status_one_ready.json") as f: + body = json.load(f) + return lambda url, **params: {"status": "success", "data": {"magnets": [body["data"]["magnets"]]}} + +@pytest.fixture +def delete(): + """GET /delete""" + with open("src/tests/test_data/alldebrid_magnet_delete.json") as f: + body = json.load(f) + return lambda url, **params: body + diff --git a/src/tests/test_cache.sqlite b/src/tests/test_cache.sqlite new file mode 100644 index 0000000000000000000000000000000000000000..c869c8b234b0a777fa91bb67f6d8c8a18296d3b1 GIT binary patch literal 24576 zcmeI&Jx{_w7{Ku>ZyFO`HbyRSfy9^)2UkHahKQip5SW@OXVQp(v=AH}{a|i>AUExi z(x{0pPB#BbTAu4&@A~|D%Y|NDw!J{h+v)t?4dj6+3Cj{kQVJo`;gJiEO)>S%rV{>I zpW>gwwAf!h=ZkM5Tj+~oxA;Qwb=^=z)JDwgmjBfat;#^J`+aTmdKU3cPHoO&GQI_X?_ zw_ 0, "Levenshtein ratio should be greater than 0" \ No newline at end of file diff --git a/src/tests/test_rate_limiting.py b/src/tests/test_rate_limiting.py new file mode 100644 index 0000000..d70a9d5 --- /dev/null +++ b/src/tests/test_rate_limiting.py @@ -0,0 +1,297 @@ +import time +from unittest.mock import patch + +import pytest +import responses +from requests.exceptions import HTTPError + +from program.utils.request import ( + BaseRequestHandler, + HttpMethod, + RateLimitExceeded, + ResponseType, + create_service_session, + get_http_adapter, + get_rate_limit_params, + get_retry_policy, +) + + +@responses.activate +def test_rate_limiter_with_base_request_handler(): + # Setup: Define the URL and rate-limiting parameters + url = "https://api.example.com/endpoint" + rate_limit_params = get_rate_limit_params(per_second=1) # 1 request per second as an example + session = create_service_session(rate_limit_params=rate_limit_params) + + # Initialize the BaseRequestHandler with the rate-limited session + request_handler = BaseRequestHandler(session=session, response_type=ResponseType.DICT, base_url=url) + + for _ in range(3): + responses.add(responses.GET, url, json={"message": "OK"}, status=200) + + for _ in range(5): + responses.add(responses.GET, url, json={"error": "Rate limit exceeded"}, status=429) + + success_count = 0 + rate_limited_count = 0 + + for i in range(8): + try: + # Use BaseRequestHandler's _request method + response_obj = request_handler._request(HttpMethod.GET, "") + print(f"Request {i + 1}: Status {response_obj.status_code} - Success") + success_count += 1 + except RateLimitExceeded as e: + print(f"Request {i + 1}: Rate limit hit - {e}") + rate_limited_count += 1 + except HTTPError as e: + print(f"Request {i + 1}: Failed with error - {e}") + time.sleep(0.1) # Interval shorter than rate limit threshold + + # Assertions + assert success_count == 3, "Expected 3 successful requests before rate limiting" + assert rate_limited_count == 5, "Expected 5 rate-limited requests after threshold exceeded" + + +@responses.activate +def test_successful_requests_within_limit(): + """Test that requests succeed if within the rate limit.""" + url = "https://api.example.com/endpoint" + rate_limit_params = get_rate_limit_params(per_second=2) # 2 requests per second + session = create_service_session(rate_limit_params=rate_limit_params) + request_handler = BaseRequestHandler(session=session, response_type=ResponseType.DICT, base_url=url) + + # Mock responses for the first 2 requests + responses.add(responses.GET, url, json={"message": "OK"}, status=200) + responses.add(responses.GET, url, json={"message": "OK"}, status=200) + + success_count = 0 + + for i in range(2): + response_obj = request_handler._request(HttpMethod.GET, "") + print(f"Request {i + 1}: Status {response_obj.status_code} - Success") + success_count += 1 + + assert success_count == 2, "Expected both requests to succeed within the rate limit" + + +@responses.activate +def test_rate_limit_exceeded(): + """Test that requests are blocked after rate limit is reached.""" + url = "https://api.example.com/endpoint" + rate_limit_params = get_rate_limit_params(per_second=1) # 1 request per second + session = create_service_session(rate_limit_params=rate_limit_params) + request_handler = BaseRequestHandler(session=session, response_type=ResponseType.DICT, base_url=url) + + # First request is mocked as 200 OK, subsequent as 429 + responses.add(responses.GET, url, json={"message": "OK"}, status=200) + responses.add(responses.GET, url, json={"error": "Rate limit exceeded"}, status=429) + + # First request should succeed + success_count = 0 + rate_limited_count = 0 + + try: + response_obj = request_handler._request(HttpMethod.GET, "") + print(f"Request 1: Status {response_obj.status_code} - Success") + success_count += 1 + except RateLimitExceeded: + rate_limited_count += 1 + + # Second request should be rate-limited + try: + request_handler._request(HttpMethod.GET, "") + except RateLimitExceeded as e: + print("Request 2: Rate limit hit -", e) + rate_limited_count += 1 + + assert success_count == 1, "Expected the first request to succeed" + assert rate_limited_count == 1, "Expected the second request to be rate-limited" + + +@responses.activate +def test_rate_limit_reset(): + """Test that requests succeed after waiting for the rate limit to reset.""" + url = "https://api.example.com/endpoint" + rate_limit_params = get_rate_limit_params(per_second=1) # 1 request per second + session = create_service_session(rate_limit_params=rate_limit_params) + request_handler = BaseRequestHandler(session=session, response_type=ResponseType.DICT, base_url=url) + + # Mock the first request with 200 OK + responses.add(responses.GET, url, json={"message": "OK"}, status=200) + + # Mock the second request with 429 to simulate rate limit + responses.add(responses.GET, url, json={"error": "Rate limit exceeded"}, status=429) + + # Mock the third request after rate limit reset with 200 OK + responses.add(responses.GET, url, json={"message": "OK"}, status=200) + + success_count = 0 + rate_limited_count = 0 + + # First request should succeed + try: + response_obj = request_handler._request(HttpMethod.GET, "") + print(f"Request 1: Status {response_obj.status_code} - Success") + success_count += 1 + except RateLimitExceeded: + rate_limited_count += 1 + + # Second request immediately should be rate-limited + try: + request_handler._request(HttpMethod.GET, "") + except RateLimitExceeded as e: + print("Request 2: Rate limit hit -", e) + rate_limited_count += 1 + + # Wait for the rate limit to reset, then try again + time.sleep(1.1) + try: + response_obj = request_handler._request(HttpMethod.GET, "") + print(f"Request 3: Status {response_obj.status_code} - Success after reset") + success_count += 1 + except RateLimitExceeded: + rate_limited_count += 1 + + assert success_count == 2, "Expected two successful requests (first and after reset)" + assert rate_limited_count == 1, "Expected one rate-limited request (second request)" + + +def test_direct_rate_limiter(): + """Test the Limiter directly to confirm it enforces rate limiting.""" + from pyrate_limiter import Duration, Limiter, RequestRate + + rate_limits = [] + rate_limits.append(RequestRate(1, Duration.SECOND)) + rate_limits.append(RequestRate(60, Duration.MINUTE)) + limiter = Limiter(*rate_limits) # 1 request per second and 60 requests per minute + + success_count = 0 + rate_limited_count = 0 + + # First request should succeed + try: + limiter.try_acquire("test_key") + print("Request 1: Success") + success_count += 1 + except Exception as e: + print("Request 1: Rate limit hit") + rate_limited_count += 1 + + # Additional requests should be rate-limited + for i in range(4): + try: + limiter.try_acquire("test_key") + print(f"Request {i + 2}: Success") + success_count += 1 + except Exception as e: + print(f"Request {i + 2}: Rate limit hit") + rate_limited_count += 1 + time.sleep(0.2) # Short interval to exceed rate limit + + # Assertions + assert success_count == 1, "Expected only one successful request within the rate limit" + assert rate_limited_count >= 1, "Expected at least one rate-limited request after hitting the limit" + + +def test_limiter_session_with_basic_rate_limit(): + """Test a basic LimiterSession that enforces a rate limit of 5 requests per second.""" + rate_limit_params = get_rate_limit_params(per_second=1) + session = create_service_session(rate_limit_params=rate_limit_params) + start = time.time() + request_count = 20 + interval_limit = 5 + buffer_time = 0.8 + + # Store timestamps to analyze intervals + request_timestamps = [] + + # Send 20 requests, observing the time intervals to confirm rate limiting + for i in range(request_count): + response = session.get('https://httpbin.org/get') + current_time = time.time() + request_timestamps.append(current_time) + print(f'[t+{current_time - start:.2f}] Sent request {i + 1} - Status code: {response.status_code}') + + # Check time intervals every 5 requests to confirm rate limiting is applied + if (i + 1) % interval_limit == 0: + elapsed_time = request_timestamps[-1] - request_timestamps[-interval_limit] + assert elapsed_time >= 1 - buffer_time, ( + f"Rate limit exceeded: {interval_limit} requests in {elapsed_time:.2f} seconds" + ) + + # Final assertion to ensure all requests respected the rate limit + total_elapsed_time = request_timestamps[-1] - request_timestamps[0] + expected_min_time = (request_count / interval_limit) - buffer_time + assert total_elapsed_time >= expected_min_time, ( + f"Test failed: Expected at least {expected_min_time:.2f} seconds " + f"for {request_count} requests, got {total_elapsed_time:.2f} seconds" + ) + +@pytest.fixture +def retry_policy(): + return get_retry_policy(retries=5, backoff_factor=0.5, status_forcelist=[500, 502, 503, 504]) + +@pytest.fixture +def connection_pool_params(): + return { + 'pool_connections': 20, + 'pool_maxsize': 50, + 'pool_block': True + } + + +def test_session_adapter_configuration(retry_policy, connection_pool_params): + with patch("program.utils.request.HTTPAdapter") as MockAdapter: + session = create_service_session( + retry_policy=retry_policy, + session_adapter=get_http_adapter( + retry_policy=retry_policy, + pool_connections=connection_pool_params["pool_connections"], + pool_maxsize=connection_pool_params["pool_maxsize"], + pool_block=connection_pool_params["pool_block"] + ) + ) + + MockAdapter.assert_called_with( + max_retries=retry_policy, + **connection_pool_params + ) + + assert session.adapters["http://"] == MockAdapter.return_value + assert session.adapters["https://"] == MockAdapter.return_value + + +def test_session_adapter_pool_configuration_and_request(retry_policy, connection_pool_params): + # Mock an HTTP endpoint to test request functionality + url = "https://api.example.com/test" + with responses.RequestsMock() as rsps: + rsps.add(rsps.GET, url, json={"message": "success"}, status=200) + + session = create_service_session( + retry_policy=retry_policy, + session_adapter=get_http_adapter( + retry_policy=retry_policy, + pool_connections=connection_pool_params["pool_connections"], + pool_maxsize=connection_pool_params["pool_maxsize"], + pool_block=connection_pool_params["pool_block"] + ) + ) + + adapter_http = session.adapters["http://"] + adapter_https = session.adapters["https://"] + + assert adapter_http == adapter_https, "HTTP and HTTPS adapters should be the same instance" + assert adapter_http._pool_connections == connection_pool_params["pool_connections"], \ + f"Expected pool_connections to be {connection_pool_params['pool_connections']}, got {adapter_http._pool_connections}" + assert adapter_http._pool_maxsize == connection_pool_params["pool_maxsize"], \ + f"Expected pool_maxsize to be {connection_pool_params['pool_maxsize']}, got {adapter_http._pool_maxsize}" + assert adapter_http._pool_block == connection_pool_params["pool_block"], \ + f"Expected pool_block to be {connection_pool_params['pool_block']}, got {adapter_http._pool_block}" + assert adapter_http.max_retries == retry_policy, \ + f"Expected max_retries to be {retry_policy}, got {adapter_http.max_retries}" + + response = session.get(url) + assert response.status_code == 200 + assert response.json() == {"message": "success"} \ No newline at end of file diff --git a/src/tests/test_requests.py b/src/tests/test_requests.py new file mode 100644 index 0000000..191d1f0 --- /dev/null +++ b/src/tests/test_requests.py @@ -0,0 +1,186 @@ +from types import SimpleNamespace +from unittest.mock import MagicMock + +import pytest +from requests import Session +from requests.exceptions import ConnectTimeout + +from program.utils.request import ( + CachedLimiterSession, + CachedSession, + LimiterSession, + MemoryQueueBucket, + RateLimitExceeded, + RequestException, + Response, + ResponseObject, + _make_request, + create_service_session, + delete, + get, + get_cache_params, + get_rate_limit_params, + ping, + post, + put, +) + + +class TestCodeUnderTest: + def test_create_service_session_default(self): + session = create_service_session() + assert isinstance(session, Session) + + def test_handle_empty_response_content(self, mocker): + mock_response = mocker.Mock() + mock_response.ok = True + mock_response.status_code = 200 + mock_response.content = b"" + mock_response.headers = {"Content-Type": "application/json"} + response_object = ResponseObject(mock_response) + assert response_object.data == {} + + def test_handle_json_response(self, mocker): + mock_session = mocker.MagicMock() + mock_response = MagicMock() + mock_response.content = b'{"key": "value"}' + mock_response.headers = {"Content-Type": "application/json"} + mock_response.ok = True + mock_response.status_code = 200 + mock_session.request.return_value = mock_response + response_object = get(mock_session, "https://example.com") + assert response_object.is_ok is True + assert response_object.status_code == 200 + assert response_object.data.key == "value" + + def test_handle_xml_response(self, mocker): + mock_session = mocker.MagicMock() + mock_response = MagicMock() + mock_response.content = b'value' + mock_response.headers = {"Content-Type": "application/xml"} + mock_response.ok = True + mock_response.status_code = 200 + mock_session.request.return_value = mock_response + response_object = get(mock_session, "https://example.com") + assert response_object.is_ok is True + assert response_object.status_code == 200 + assert response_object.data.key.text == "value" + + def test_create_service_session_without_cache_params_raises_error(self): + mock_rate_limit_params = get_rate_limit_params(per_minute=60) + with pytest.raises(ValueError, match="Cache parameters must be provided if use_cache is True."): + create_service_session(rate_limit_params=mock_rate_limit_params, use_cache=True) + + def test_apply_rate_limiting_valid_parameters(self, mocker): + mock_rate_limit_params = {'per_minute': 60, 'bucket_class': MemoryQueueBucket, 'bucket_kwargs': {}} + session = create_service_session(rate_limit_params=mock_rate_limit_params) + assert isinstance(session, LimiterSession) + + def test_apply_caching_valid_parameters(self, mocker): + mock_cache_params = {'cache_name': 'test_cache', 'expire_after': 60} + session = create_service_session(use_cache=True, cache_params=mock_cache_params) + assert isinstance(session, CachedSession) + + def test_apply_rate_limiting_and_caching_valid_parameters(self, mocker): + mock_rate_limit_params = {'per_minute': 60, 'bucket_class': MemoryQueueBucket, 'bucket_kwargs': {}} + mock_cache_params = {'cache_name': 'test_cache', 'expire_after': 60} + session = create_service_session(rate_limit_params=mock_rate_limit_params, use_cache=True, cache_params=mock_cache_params) + assert isinstance(session, CachedLimiterSession) + + def test_make_get_request_valid_response(self, mocker): + url = "https://api.example.com" + expected_response = ResponseObject(SimpleNamespace(ok=True, status_code=200, content={}, headers={})) + mocker.patch('program.utils.request.Session') + session_instance = Session() + mocker.patch('program.utils.request._make_request', return_value=expected_response) + response = get(session_instance, url) + assert response.is_ok is True + assert response.status_code == 200 + + def test_make_post_request_valid_response(self, mocker): + url = "https://api.example.com" + expected_response = ResponseObject(SimpleNamespace(ok=True, status_code=201, content={}, headers={})) + mocker.patch('program.utils.request.Session') + session_instance = Session() + mocker.patch('program.utils.request._make_request', return_value=expected_response) + response = post(session_instance, url) + assert response.is_ok is True + assert response.status_code == 201 + + def test_put_request_valid_response(self, mocker): + mock_response = MagicMock() + mock_response.ok = True + mock_response.status_code = 200 + mock_session = mocker.Mock() + mocker.patch('program.utils.request._make_request', return_value=ResponseObject(mock_response)) + response = put(mock_session, "https://example.com") + assert response.is_ok + assert response.status_code == 200 + + + def test_delete_request_valid_response(self, mocker): + url = "https://example.com" + expected_response = ResponseObject(SimpleNamespace(ok=True, status_code=200, content={}, headers={})) + mocker.patch('program.utils.request._make_request', return_value=expected_response) + mock_session = mocker.Mock() + response = delete(mock_session, url) + assert response.is_ok is True + assert response.status_code == 200 + + def test_handle_unsupported_content_types(self, mocker): + mock_response = mocker.Mock() + mock_response.headers.get.return_value = "unsupported/type" + mock_response.content = b"Unsupported content" + mock_session = mocker.Mock() + mock_session.request.return_value = mock_response + response_object = _make_request(mock_session, "GET", "https://example.com") + assert response_object.data == {} + + def test_raise_exceptions_timeout_status_codes(self, mocker): + mock_response = mocker.Mock() + mock_response.ok = False + mock_response.status_code = 504 + mock_session = mocker.Mock() + mock_session.request.return_value = mock_response + with pytest.raises(ConnectTimeout): + _make_request(mock_session, "GET", "https://example.com") + + def test_raise_rate_limit_exceptions(self, mocker): + mock_response = mocker.Mock() + mock_response.ok = False + mock_response.status_code = 429 + mock_response.headers = {"Content-Type": "application/json", "Connection": "keep-alive"} + mocker.patch('program.utils.request.Session.request', return_value=mock_response) + rate_limit_params = get_rate_limit_params(per_second=10, period=1) + cache_params = {'cache_name': 'test_cache', 'expire_after': 60} + session = create_service_session(rate_limit_params=rate_limit_params, use_cache=True, cache_params=cache_params) + with pytest.raises(RateLimitExceeded): + get(session, "https://api.example.com/data") + + def test_raise_client_error_exceptions(self, mocker): + mock_response = mocker.Mock() + mock_response.ok = False + mock_response.status_code = 400 + mock_response.headers = {"Content-Type": "application/json", "Connection": "keep-alive"} + mocker.patch('program.utils.request.Session.request', return_value=mock_response) + cache_params = {'cache_name': 'test_cache', 'expire_after': 60} + session = create_service_session(rate_limit_params=None, use_cache=True, cache_params=cache_params) + with pytest.raises(RequestException): + post(session, "https://api.example.com/data", data={"key": "value"}) + + def test_raise_exceptions_server_error_status_codes(self, mocker): + mocker.patch('program.utils.request._make_request', + side_effect=RequestException("Server error with status 500")) + mock_session = mocker.Mock() + with pytest.raises(RequestException, match="Server error with status 500"): + ping(mock_session, "https://example.com") + + + def test_log_errors_when_parsing_response_content_fails(self, mocker): + mock_logger = mocker.patch('logging.Logger.error') + response = Response() + response._content = b"invalid json content" + response.headers = {"Content-Type": "application/json"} + response.status_code = 200 + ResponseObject(response) + mock_logger.assert_called_with("Failed to parse response content: Expecting value: line 1 column 1 (char 0)", exc_info=True) diff --git a/src/tests/test_settings_migration.py b/src/tests/test_settings_migration.py new file mode 100644 index 0000000..981d9ad --- /dev/null +++ b/src/tests/test_settings_migration.py @@ -0,0 +1,70 @@ +import json +import os +from pathlib import Path + +from program.settings.manager import SettingsManager + +TEST_VERSION = "9.9.9" +DATA_PATH = Path(os.curdir) / "data" + +# Sample old settings data +old_settings_data = { + "version": "0.7.5", + "debug": True, + "log": True, + "force_refresh": False, + "map_metadata": True, + "tracemalloc": False, + "downloaders": { + # "movie_filesize_min": 200, + # "movie_filesize_max": -1, + # "episode_filesize_min": 40, + # "episode_filesize_max": -1, + "real_debrid": { + "enabled": False, + "api_key": "", + "proxy_enabled": False, + "proxy_url": "" + }, + "all_debrid": { + "enabled": True, + "api_key": "12345678", + "proxy_enabled": False, + "proxy_url": "https://no_proxy.com" + }, + "torbox": { + "enabled": False, + "api_key": "" + } + }, +} + + +def test_load_and_migrate_settings(): + temp_settings_file = Path.joinpath(DATA_PATH, "settings.json") + version_file = Path.joinpath(DATA_PATH, "VERSION") + + try: + temp_settings_file.write_text(json.dumps(old_settings_data)) + version_file.write_text("9.9.9") + + import program.settings.models + program.settings.manager.data_dir_path = DATA_PATH + program.settings.models.version_file_path = version_file + settings_manager = SettingsManager() + + assert settings_manager.settings.debug is True + assert settings_manager.settings.log is True + assert settings_manager.settings.force_refresh is False + assert settings_manager.settings.map_metadata is True + assert settings_manager.settings.tracemalloc is False + # assert settings_manager.settings.downloaders.movie_filesize_min == 200 + assert settings_manager.settings.downloaders.real_debrid.enabled is False + assert settings_manager.settings.downloaders.all_debrid.enabled is True + assert settings_manager.settings.downloaders.all_debrid.api_key == "12345678" + assert settings_manager.settings.downloaders.all_debrid.proxy_url == "https://no_proxy.com" + assert settings_manager.settings.database.host == "postgresql+psycopg2://postgres:postgres@localhost/riven" + assert settings_manager.settings.version == TEST_VERSION + finally: + temp_settings_file.unlink() + version_file.unlink() \ No newline at end of file diff --git a/src/tests/test_states_processing.py b/src/tests/test_states_processing.py new file mode 100644 index 0000000..a258c34 --- /dev/null +++ b/src/tests/test_states_processing.py @@ -0,0 +1,229 @@ +import pytest + +from program.media.item import Episode, MediaItem, Movie, Season, Show +from program.media.state import States +from program.program import Program +from program.services.downloaders.realdebrid import RealDebridDownloader +from program.services.indexers.trakt import TraktIndexer +from program.services.scrapers import Scraping +from program.services.updaters.plex import PlexUpdater +from program.state_transition import process_event +from program.symlink import Symlinker + + +@pytest.fixture +def movie(): + return Movie({"imdb_id": "tt1375666", "requested_by": "Iceberg"}) + +@pytest.fixture +def show(): + show = Show({"imdb_id": "tt0903747", "requested_by": "Iceberg"}) + season = Season({"number": 1}) + episode = Episode({"number": 1}) + season.add_episode(episode) + show.add_season(season) + return show + +@pytest.fixture +def media_item_movie(): + return MediaItem({"imdb_id": "tt1375666", "requested_by": "Iceberg"}) + +@pytest.fixture +def media_item_show(): + show = MediaItem({"imdb_id": "tt0903747", "requested_by": "Iceberg"}) + season = MediaItem({"number": 1}) + episode = MediaItem({"number": 1}) + season.add_episode(episode) + show.add_season(season) + return show + +@pytest.fixture +def season(show): + return show.seasons[0] + +@pytest.fixture +def episode(season): + return season.episodes[0] + +def test_initial_state(movie, show, season, episode): + """Test that items start in the Unknown state.""" + # Given: A new media item (movie, episode, season, show) + # When: The item is first created + # Then: The item's state should be Unknown + + # As long as we initialize Movies with an imdb_id and requested_by, + # it should end up as Requested. + assert movie.state == States.Requested, "Movie should start in Requested state" + + # Show, Season and Episode are Unknown until they are added to a Show. + assert show.state == States.Unknown, "Show should start in Unknown state" + assert season.state == States.Unknown, "Season should start in Unknown state" + assert episode.state == States.Unknown, "Episode should start in Unknown state" + +def test_requested_state(movie): + """Test transition to the Requested state.""" + # Given: A media item (movie) + movie.set("requested_by", "user") + # When: The item is requested by a user + # Then: The item's state should be Requested + assert movie.state == States.Requested, "Movie should transition to Requested state" + +def test_indexed_state(movie): + """Test transition to the Indexed state.""" + # Given: A media item (movie) + movie.set("title", "Inception") + # When: The item has a title set + # Then: The item's state should be Indexed + assert movie.state == States.Indexed, "Movie should transition to Indexed state" + +def test_scraped_state(episode): + """Test transition to the Scraped state.""" + # Given: A media item (episode) + episode.set("streams", {"source1": {"cached": True}}) + # When: The item has streams available + # Then: The item's state should be Scraped + assert episode.state == States.Scraped, "Episode should transition to Scraped state" + +def test_downloaded_state(episode): + """Test transition to the Downloaded state.""" + # Given: A media item (episode) + episode.set("file", "/path/to/file") + episode.set("folder", "/path/to/folder") + # When: The item has file and folder set + # Then: The item's state should be Downloaded + assert episode.state == States.Downloaded, "Episode should transition to Downloaded state" + +def test_symlinked_state(episode): + """Test transition to the Symlinked state.""" + # Given: A media item (episode) + episode.set("symlinked", True) + # When: The item is symlinked + # Then: The item's state should be Symlinked + assert episode.state == States.Symlinked, "Episode should transition to Symlinked state" + +def test_completed_state(movie): + """Test transition to the Completed state.""" + # Given: A media item (movie) + movie.set("key", "some_key") + movie.set("update_folder", "updated") + # When: The item has a key and update_folder set + # Then: The item's state should be Completed + assert movie.state == States.Completed, "Movie should transition to Completed state" + +def test_show_state_transitions(show): + """Test full state transitions of a show.""" + # Given: A media item (show) + # When: The show has various states set for its episodes and seasons + show.seasons[0].episodes[0].set("file", "/path/to/file") + show.seasons[0].episodes[0].set("folder", "/path/to/folder") + show.seasons[0].episodes[0].set("symlinked", True) + show.seasons[0].episodes[0].set("key", "some_key") + show.seasons[0].episodes[0].set("update_folder", "updated") + + # Then: The show's state should transition based on its episodes and seasons + assert show.state == States.Completed, "Show should transition to Completed state" + +@pytest.mark.parametrize("state, service, next_service", [ + (States.Unknown, Program, TraktIndexer), + # (States.Requested, TraktIndexer, TraktIndexer), + (States.Indexed, TraktIndexer, Scraping), + (States.Scraped, Scraping, RealDebridDownloader), + (States.Downloaded, RealDebridDownloader, Symlinker), + (States.Symlinked, Symlinker, PlexUpdater), + (States.Completed, PlexUpdater, None) +]) +def test_process_event_transitions_movie(state, service, next_service, movie): + """Test processing events for state transitions.""" + # Given: A media item (movie) and a service + movie._determine_state = lambda: state # Manually override the state + + # When: The event is processed + updated_item, next_service_result, items_to_submit = process_event(None, service, movie) + + # Then: The next service should be as expected based on the current service + if next_service is None: + assert next_service_result is None, f"Next service should be None for {service}" + else: + assert next_service_result == next_service, f"Next service should be {next_service} for {service}" + + +@pytest.mark.parametrize("state, service, next_service", [ + (States.Unknown, Program, TraktIndexer), + # (States.Requested, TraktIndexer, TraktIndexer), + (States.Indexed, TraktIndexer, Scraping), + (States.Scraped, Scraping, RealDebridDownloader), + (States.Downloaded, RealDebridDownloader, Symlinker), + (States.Symlinked, Symlinker, PlexUpdater), + (States.Completed, PlexUpdater, None) +]) +def test_process_event_transition_shows(state, service, next_service, show): + """Test processing events for state transitions with shows.""" + # Given: A media item (show) and a service + show._determine_state = lambda: state # Manually override the state + + # Ensure the show has seasons and episodes + if not hasattr(show, "seasons"): + show.seasons = [] + for season in show.seasons: + if not hasattr(season, "episodes"): + season.episodes = [] + + # When: The event is processed + updated_item, next_service_result, items_to_submit = process_event(None, service, show) + + # Then: The next service should be as expected based on the current service + if next_service is None: + assert next_service_result is None, f"Next service should be None for {service}" + else: + assert next_service_result == next_service, f"Next service should be {next_service} for {service}" + +# test media item movie +@pytest.mark.parametrize("state, service, next_service", [ + (States.Unknown, Program, TraktIndexer), + # (States.Requested, TraktIndexer, TraktIndexer), + (States.Indexed, TraktIndexer, Scraping), + (States.Scraped, Scraping, RealDebridDownloader), + (States.Downloaded, RealDebridDownloader, Symlinker), + (States.Symlinked, Symlinker, PlexUpdater), + (States.Completed, PlexUpdater, None) +]) +def test_process_event_transitions_media_item_movie(state, service, next_service, media_item_movie): + """Test processing events for state transitions.""" + # Given: A media item (movie) and a service + media_item_movie._determine_state = lambda: state + + # When: The event is processed + updated_item, next_service_result, items_to_submit = process_event(None, service, media_item_movie) + + # Then: The next service should be as expected based on the current service + if next_service is None: + assert next_service_result is None, f"Next service should be None for {service}" + else: + assert next_service_result == next_service, f"Next service should be {next_service} for {service}" + +# test media item show +# @pytest.mark.parametrize("state, service, next_service", [ +# (States.Unknown, Program, TraktIndexer), +# # (States.Requested, TraktIndexer, TraktIndexer), +# (States.Indexed, TraktIndexer, Scraping), +# (States.Scraped, Scraping, Debrid), +# (States.Downloaded, Debrid, Symlinker), +# (States.Symlinked, Symlinker, PlexUpdater), +# (States.Completed, PlexUpdater, None) +# ]) +# def test_process_event_transitions_media_item_show(state, service, next_service, media_item_show): +# """Test processing events for state transitions.""" +# # Given: A media item (movie) and a service +# media_item_show._determine_state = lambda: state + +# # When: The event is processed +# updated_item, next_service_result, items_to_submit = process_event(None, service, media_item_show) + +# if next_service is Scraping: +# assert isinstance(updated_item, Show), "Updated item should be of type Show" + +# # Then: The next service should be as expected based on the current service +# if next_service is None: +# assert next_service_result is None, f"Next service should be None for {service}" +# else: +# assert next_service_result == next_service, f"Next service should be {next_service} for {service}" \ No newline at end of file diff --git a/src/tests/test_symlink_creation.py b/src/tests/test_symlink_creation.py new file mode 100644 index 0000000..6fd677d --- /dev/null +++ b/src/tests/test_symlink_creation.py @@ -0,0 +1,222 @@ +import shutil +from datetime import datetime +from pathlib import Path + +import pytest +from loguru import logger +from sqlalchemy import create_engine +from sqlalchemy.engine import URL +from sqlalchemy.orm import declarative_base, sessionmaker + +from program.media.item import Episode, Movie, Season, Show +from program.settings.manager import settings_manager +from program.symlink import Symlinker + +logger.disable("program") # Suppress + +Base = declarative_base() +url = URL.create( + drivername="postgresql", + username="coderpad", + host="/tmp/postgresql/socket", + database="coderpad", +) +engine = create_engine(url) +Session = sessionmaker(bind=engine) + + +@pytest.fixture(scope="module") +def db_session(): + Base.metadata.create_all(engine) + session = Session() + yield session + session.rollback() + session.close() + + +@pytest.fixture(scope="module") +def movie(): + movie = Movie({}) + movie.title = "Riven" + movie.aired_at = datetime(2020, 1, 1) + movie.imdb_id = "tt18278776" + return movie + + +@pytest.fixture(scope="module") +def episode(): + show = Show({}) + show.title = "Big Art" + show.aired_at = datetime(2015, 1, 1) + show.imdb_id = "tt4667710" + + season = Season({}) + season.title = "Season 01" + season.parent = show + season.number = 1 + + episode = Episode({}) + episode.title = "S01E06 Riven with Fire" + episode.parent = season + episode.number = 6 + episode.imdb_id = "tt14496350" + return episode + + +class MockSettings: + def __init__(self, library_path, rclone_path): + self.force_refresh = False + self.symlink = type( + "symlink", + (), + { + "library_path": Path(library_path), + "rclone_path": Path(rclone_path), + "separate_anime_dirs": True, + }, + ) + + +@pytest.fixture +def symlinker(fs): + library_path = "/fake/library" + fs.create_dir(f"{library_path}") + + rclone_path = "/fake/rclone" + fs.create_dir(f"{rclone_path}") + + settings_manager.settings = MockSettings(library_path, rclone_path) + return Symlinker() + + +def test_valid_symlinker(symlinker): + assert symlinker.initialized, "Library should be initialized successfully." + assert symlinker.library_path_movies.exists() + assert symlinker.library_path_shows.exists() + assert symlinker.library_path_anime_movies.exists() + assert symlinker.library_path_anime_shows.exists() + + +def test_invalid_library_structure(fs): + valid_path = "/valid" + invalid_path = "/invalid" + fs.create_dir(invalid_path) + + # Invalid library path + settings_manager.settings = MockSettings(invalid_path, valid_path) + library = Symlinker() + assert ( + not library.initialized + ), "Library should fail initialization with incorrect structure." + + # invalid rclone path + settings_manager.settings = MockSettings(valid_path, invalid_path) + library = Symlinker() + assert ( + not library.initialized + ), "Library should fail initialization with incorrect structure." + + +def test_symlink_create_invalid_item(symlinker): + assert symlinker.symlink(None) is False + assert symlinker.symlink(Movie({})) is False + + +def test_symlink_movie(symlinker, movie, fs): + def symlink_path(movie: Movie) -> Path: + """ + Simplistic version of Symlinker._create_item_folders + """ + name = symlinker._determine_file_name(movie) + return symlinker.library_path_movies / name / (name + ".mkv") + + def symlink_check(target: Path): + """ + Runs symlinker, confirms the movie's symlink is in the right place and points to the real path. + """ + # Create "real" file, run symlinker + fs.create_file(target) + assert symlinker._symlink(movie) is True + + # Validate the symlink + assert Path(movie.symlink_path) == symlink_path(movie) + assert Path(movie.symlink_path).is_symlink() + assert Path(movie.symlink_path).readlink() == target + + # cleanup + shutil.rmtree(symlinker.rclone_path) and symlinker.rclone_path.mkdir() + shutil.rmtree( + symlinker.library_path_movies + ) and symlinker.library_path_movies.mkdir() + + file = f"{movie.title}.mkv" + + movie.folder, movie.alternative_folder, movie.file = (movie.title, "other", file) + symlink_check(symlinker.rclone_path / movie.title / file) + symlink_check(symlinker.rclone_path / "other" / file) + symlink_check(symlinker.rclone_path / file / file) + symlink_check(symlinker.rclone_path / file) + + # files in the root + movie.folder, movie.alternative_folder, movie.file = (None, None, file) + symlink_check(symlinker.rclone_path / file) + + +def test_symlink_episode(symlinker, episode, fs): + season_name = "Season %02d" % episode.parent.number + + def symlink_path(episode: Episode) -> Path: + """ + Simplistic version of Symlinker._create_item_folders + """ + show = episode.parent.parent + show_name = f"{show.title} ({show.aired_at.year}) {{imdb-{show.imdb_id}}}" + episode_name = symlinker._determine_file_name(episode) + return ( + symlinker.library_path_shows + / show_name + / season_name + / (episode_name + ".mkv") + ) + + def symlink_check(target: Path): + """ + Runs symlinker, confirms the episode's symlink is in the right place and points to the real path. + """ + # Create "real" file, run symlinker + fs.create_file(target) + assert symlinker._symlink(episode) is True + + # Validate the symlink + assert Path(episode.symlink_path) == symlink_path(episode) + assert Path(episode.symlink_path).is_symlink() + assert Path(episode.symlink_path).readlink() == target + + # cleanup + shutil.rmtree(symlinker.rclone_path) and symlinker.rclone_path.mkdir() + shutil.rmtree( + symlinker.library_path_shows + ) and symlinker.library_path_shows.mkdir() + + file = f"{episode.title}.mkv" + + # Common namings + episode.folder, episode.alternative_folder, episode.file = ( + episode.parent.parent.title, + "other", + file, + ) + # symlink_check(symlinker.rclone_path / episode.parent.parent.title / season_name / file) # Not supported + symlink_check(symlinker.rclone_path / episode.parent.parent.title / file) + # symlink_check(symlinker.rclone_path / "other" / file) + symlink_check(symlinker.rclone_path / file / file) + symlink_check(symlinker.rclone_path / file) + + # Somewhat less common: Show Name - Season 01 / file + name = str(episode.parent.parent.title + season_name) + episode.folder, episode.alternative_folder, episode.file = (name, None, file) + symlink_check(symlinker.rclone_path / name / file) + + # Files in the rclone root + episode.folder, episode.alternative_folder, episode.file = (None, None, file) + symlink_check(symlinker.rclone_path / file) diff --git a/src/tests/test_symlink_library.py b/src/tests/test_symlink_library.py new file mode 100644 index 0000000..df376d2 --- /dev/null +++ b/src/tests/test_symlink_library.py @@ -0,0 +1,100 @@ +from pathlib import Path + +import pytest +from pyfakefs.fake_filesystem_unittest import Patcher + +from program.media.item import Episode, Movie, Season, Show +from program.media.state import States +from program.services.libraries.symlink import SymlinkLibrary +from program.settings.manager import settings_manager + + +class MockSettings: + def __init__(self, library_path): + self.force_refresh = False + self.symlink = type("symlink", (), { + "library_path": Path(library_path), + "separate_anime_dirs": True, + }) + +@pytest.fixture +def symlink_library(fs): + library_path = "/fake/library" + fs.create_dir(f"{library_path}/movies") + fs.create_dir(f"{library_path}/shows") + fs.create_dir(f"{library_path}/anime_movies") + fs.create_dir(f"{library_path}/anime_shows") + settings_manager.settings = MockSettings(library_path) + return SymlinkLibrary() + + +def test_valid_library_structure(symlink_library): + assert symlink_library.initialized, "Library should be initialized successfully." + + +def test_invalid_library_structure(fs): + incorrect_path = "/invalid/library" + fs.create_dir(incorrect_path) + settings_manager.settings = MockSettings(incorrect_path) + library = SymlinkLibrary() + assert not library.initialized, "Library should fail initialization with incorrect structure." + + +def test_movie_detection(symlink_library): + with Patcher() as patcher: + fs = patcher.fs + movie_path = "/fake/library/movies" + fs.create_file(f"{movie_path}/Top Gun (1986) tt0092099.mkv") + fs.create_file(f"{movie_path}/The Matrix (1999) tt0133093.mkv") + fs.create_file(f"{movie_path}/The Matrix Reloaded (2003) tt0234215.mkv") + + movies = list(symlink_library.run()) + assert len(movies) == 3, "Should detect 3 movies." + assert all(isinstance(movie, Movie) for movie in movies), "Detected objects should be of type Movie." + assert all(movie.state == States.Completed for movie in movies), "Detected objects should be in the Completed state." + + +def test_show_detection(symlink_library, fs): + shows_path = "/fake/library/shows" + fs.create_dir(f"{shows_path}/Vikings (2013) tt2306299/Season 01") + fs.create_file(f"{shows_path}/Vikings (2013) tt2306299/Season 01/Vikings (2013) - s01e01 - Rites of Passage.mkv") + fs.create_file(f"{shows_path}/Vikings (2013) tt2306299/Season 01/Vikings (2013) - s01e02 - Wrath of the Northmen.mkv") + fs.create_dir(f"{shows_path}/The Mandalorian (2019) tt8111088/Season 01") + fs.create_file(f"{shows_path}/The Mandalorian (2019) tt8111088/Season 01/The Mandalorian (2019) - s01e01 - Chapter 1.mkv") + fs.create_file(f"{shows_path}/The Mandalorian (2019) tt8111088/Season 01/The Mandalorian (2019) - s01e02 - Chapter 2.mkv") + + shows = list(symlink_library.run()) + assert len(shows) == 2, "Should detect 2 shows." + assert all(isinstance(show, Show) for show in shows), "Detected objects should be of type Show." + assert all(season.state == States.Completed for show in shows for season in show.seasons), "Detected seasons should be in the Completed state." + + +def test_season_detection(symlink_library, fs): + shows_path = "/fake/library/shows" + fs.create_dir(f"{shows_path}/Vikings (2013) tt2306299/Season 01") + fs.create_file(f"{shows_path}/Vikings (2013) tt2306299/Season 01/Vikings (2013) - s01e01 - Rites of Passage.mkv") + + shows = list(symlink_library.run()) + assert len(shows[0].seasons) == 1, "Should detect one season." + assert all(isinstance(season, Season) for season in shows[0].seasons), "Detected objects should be of type Season." + assert all(season.state == States.Completed for season in shows[0].seasons), "Detected objects should be in the Completed state." + + +def test_episode_detection(symlink_library, fs): + shows_path = "/fake/library/shows" + fs.create_dir(f"{shows_path}/Vikings (2013) tt2306299/Season 01") + fs.create_file(f"{shows_path}/Vikings (2013) tt2306299/Season 01/Vikings (2013) - s01e01 - Rites of Passage.mkv") + + shows = list(symlink_library.run()) + assert len(shows[0].seasons[0].episodes) == 1, "Should detect one episode." + assert all(isinstance(episode, Episode) for episode in shows[0].seasons[0].episodes), "Detected objects should be of type Episode." + assert all(episode.state == States.Completed for episode in shows[0].seasons[0].episodes), "Detected objects should be in the Completed state." + + +def test_media_item_creation(symlink_library, fs): + fs.create_file("/fake/library/movies/Top Gun (1986) tt0092099.mkv") + items = list(symlink_library.run()) + assert len(items) == 1, "Should create one media item." + assert items[0].imdb_id == "tt0092099", "Media item should have the correct IMDb ID." + assert isinstance(items[0], Movie), "The created item should be a Movie." + assert items[0].state == States.Completed, "The created item should be in the Completed state."