From d99f8a91a0c7ad333f49d6b3d5f9e8e0b8c828a6 Mon Sep 17 00:00:00 2001 From: Benjamin Harder Date: Tue, 20 Aug 2024 23:28:20 +0200 Subject: [PATCH] New feature: Skipping files that have less than 100% availabiltiy --- README.md | 11 ++++ config/config.conf-Example | 1 + config/definitions.py | 3 +- pyproject.toml | 1 + src/decluttarr.py | 93 ++++++---------------------- src/jobs/remove_unavailable_files.py | 50 +++++++++++++++ src/utils/loadScripts.py | 1 + src/utils/main.py | 77 ----------------------- 8 files changed, 86 insertions(+), 151 deletions(-) create mode 100644 src/jobs/remove_unavailable_files.py delete mode 100644 src/utils/main.py diff --git a/README.md b/README.md index b702fd6..1568a7d 100644 --- a/README.md +++ b/README.md @@ -21,6 +21,7 @@ Feature overview: - Automatically delete slow downloads, after they have been found to be slow multiple times in a row (& trigger download from another source) - Automatically delete downloads belonging to radarr/sonarr/etc. items that are unmonitored - Automatically delete downloads that failed importing since they are not a format upgrade (i.e. a better version is already present) +- Automatically set file to not download if they are not 100% available (missing peers) You may run this locally by launching main.py, or by pulling the docker image. You can find a sample docker-compose.yml in the docker folder. @@ -72,6 +73,7 @@ services: - REMOVE_SLOW=True - REMOVE_STALLED=True - REMOVE_UNMONITORED=True + - REMOVE_UNAVAILABLE_FILES=True - MIN_DOWNLOAD_SPEED=100 - PERMITTED_ATTEMPTS=3 - NO_STALLED_REMOVAL_QBIT_TAG=Don't Kill @@ -212,6 +214,15 @@ Steers which type of cleaning is applied to the downloads queue - Permissible Values: True, False - Is Mandatory: No (Defaults to False) +**REMOVE_UNAVAILABLE_FILES** +- Steers whether files within torrents are marked as 'not download' if they have less then 100% availabiltiy +- These overall download is not removed and will complete for the other files +- After import, the *arr app will trigger a search for the files that were not downloaded +- Note that this is only supported when qBittorrent is configured in decluttarr +- Type: Boolean +- Permissible Values: True, False +- Is Mandatory: No (Defaults to False) + **MIN_DOWNLOAD_SPEED** - Sets the minimum download speed for active downloads - If the increase in the downloaded file size of a download is less than this value between two consecutive checks, the download is considered slow and is removed if happening more ofthen than the permitted attempts diff --git a/config/config.conf-Example b/config/config.conf-Example index 80a2296..0bb5223 100644 --- a/config/config.conf-Example +++ b/config/config.conf-Example @@ -12,6 +12,7 @@ REMOVE_ORPHANS = True REMOVE_SLOW = True REMOVE_STALLED = True REMOVE_UNMONITORED = True +REMOVE_UNAVAILABLE_FILES = True MIN_DOWNLOAD_SPEED = 100 PERMITTED_ATTEMPTS = 3 NO_STALLED_REMOVAL_QBIT_TAG = Don't Kill diff --git a/config/definitions.py b/config/definitions.py index 3c54354..2438394 100644 --- a/config/definitions.py +++ b/config/definitions.py @@ -17,7 +17,8 @@ REMOVE_ORPHANS = get_config_value('REMOVE_ORPHANS' , 'features', False, bool, False) REMOVE_SLOW = get_config_value('REMOVE_SLOW' , 'features', False, bool, False) REMOVE_STALLED = get_config_value('REMOVE_STALLED', 'features', False, bool, False) -REMOVE_UNMONITORED = get_config_value('REMOVE_UNMONITORED' , 'features', False, bool, False) +REMOVE_UNMONITORED = get_config_value('REMOVE_UNMONITORED', 'features', False, bool, False) +REMOVE_UNAVAILABLE_FILES = get_config_value('REMOVE_UNAVAILABLE_FILES', 'features', False, bool, False) MIN_DOWNLOAD_SPEED = get_config_value('MIN_DOWNLOAD_SPEED', 'features', False, int, 0) PERMITTED_ATTEMPTS = get_config_value('PERMITTED_ATTEMPTS', 'features', False, int, 3) NO_STALLED_REMOVAL_QBIT_TAG = get_config_value('NO_STALLED_REMOVAL_QBIT_TAG', 'features', False, str, 'Don\'t Kill') diff --git a/pyproject.toml b/pyproject.toml index 1acbc47..36240bf 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -4,5 +4,6 @@ exclude = ''' /(\.venv|venv|\.git|\.mypy_cache|\.pytest_cache|\.tox|build|dist)/ # Exclude virtual environments, caches, build directories | .*definitions\.py$ # Exclude specific files (e.g., definitions.py) | .*loadScripts\.py$ # Exclude loadScripts.py + | .*decluttarr\.py$ # Exclude loadScripts.py ) ''' \ No newline at end of file diff --git a/src/decluttarr.py b/src/decluttarr.py index 90934ce..80f1552 100644 --- a/src/decluttarr.py +++ b/src/decluttarr.py @@ -11,6 +11,7 @@ from src.jobs.remove_slow import remove_slow from src.jobs.remove_stalled import remove_stalled from src.jobs.remove_unmonitored import remove_unmonitored +from src.jobs.remove_unavailable_files import remove_unavailable_files from src.utils.trackers import Deleted_Downloads @@ -54,12 +55,12 @@ async def queueCleaner( sys.exit() # Cleans up the downloads queue - logger.verbose("Cleaning queue on %s:", NAME) + logger.verbose('Cleaning queue on %s:', NAME) # Refresh queue: - full_queue = await get_queue(BASE_URL, API_KEY, params={full_queue_param: True}) - if not full_queue: - logger.verbose(">>> Queue is empty.") + full_queue = await get_queue(BASE_URL, API_KEY, params = {full_queue_param: True}) + if not full_queue: + logger.verbose('>>> Queue is empty.') return else: logger.debug("queueCleaner/full_queue at start:") @@ -67,104 +68,50 @@ async def queueCleaner( deleted_downloads = Deleted_Downloads([]) items_detected = 0 - try: - if settingsDict["REMOVE_FAILED"]: + try: + if settingsDict['REMOVE_UNAVAILABLE_FILES']: + await remove_unavailable_files( + settingsDict, BASE_URL, API_KEY, NAME, protectedDownloadIDs, privateDowloadIDs, arr_type + ) + + if settingsDict['REMOVE_FAILED']: items_detected += await remove_failed( - settingsDict, - BASE_URL, - API_KEY, - NAME, - deleted_downloads, - defective_tracker, - protectedDownloadIDs, - privateDowloadIDs, + settingsDict, BASE_URL, API_KEY, NAME, deleted_downloads, defective_tracker, protectedDownloadIDs, privateDowloadIDs ) if settingsDict["REMOVE_FAILED_IMPORTS"]: items_detected += await remove_failed_imports( - settingsDict, - BASE_URL, - API_KEY, - NAME, - deleted_downloads, - defective_tracker, - protectedDownloadIDs, - privateDowloadIDs, + settingsDict, BASE_URL, API_KEY, NAME, deleted_downloads, defective_tracker, protectedDownloadIDs, privateDowloadIDs ) if settingsDict["REMOVE_METADATA_MISSING"]: items_detected += await remove_metadata_missing( - settingsDict, - BASE_URL, - API_KEY, - NAME, - deleted_downloads, - defective_tracker, - protectedDownloadIDs, - privateDowloadIDs, + settingsDict, BASE_URL, API_KEY, NAME, deleted_downloads, defective_tracker, protectedDownloadIDs, privateDowloadIDs ) if settingsDict["REMOVE_MISSING_FILES"]: items_detected += await remove_missing_files( - settingsDict, - BASE_URL, - API_KEY, - NAME, - deleted_downloads, - defective_tracker, - protectedDownloadIDs, - privateDowloadIDs, + settingsDict, BASE_URL, API_KEY, NAME, deleted_downloads, defective_tracker, protectedDownloadIDs, privateDowloadIDs ) if settingsDict["REMOVE_ORPHANS"]: items_detected += await remove_orphans( - settingsDict, - BASE_URL, - API_KEY, - NAME, - deleted_downloads, - defective_tracker, - protectedDownloadIDs, - privateDowloadIDs, - full_queue_param, + settingsDict, BASE_URL, API_KEY, NAME, deleted_downloads, defective_tracker, protectedDownloadIDs, privateDowloadIDs, full_queue_param ) if settingsDict["REMOVE_SLOW"]: items_detected += await remove_slow( - settingsDict, - BASE_URL, - API_KEY, - NAME, - deleted_downloads, - defective_tracker, - protectedDownloadIDs, - privateDowloadIDs, - download_sizes_tracker, + settingsDict, BASE_URL, API_KEY, NAME, deleted_downloads, defective_tracker, protectedDownloadIDs, privateDowloadIDs, download_sizes_tracker ) if settingsDict["REMOVE_STALLED"]: items_detected += await remove_stalled( - settingsDict, - BASE_URL, - API_KEY, - NAME, - deleted_downloads, - defective_tracker, - protectedDownloadIDs, - privateDowloadIDs, + settingsDict, BASE_URL, API_KEY, NAME, deleted_downloads, defective_tracker, protectedDownloadIDs, privateDowloadIDs ) if settingsDict["REMOVE_UNMONITORED"]: items_detected += await remove_unmonitored( - settingsDict, - BASE_URL, - API_KEY, - NAME, - deleted_downloads, - defective_tracker, - protectedDownloadIDs, - privateDowloadIDs, - arr_type, + settingsDict, BASE_URL, API_KEY, NAME, deleted_downloads, defective_tracker, protectedDownloadIDs, privateDowloadIDs, arr_type ) if items_detected == 0: diff --git a/src/jobs/remove_unavailable_files.py b/src/jobs/remove_unavailable_files.py new file mode 100644 index 0000000..4a40fc1 --- /dev/null +++ b/src/jobs/remove_unavailable_files.py @@ -0,0 +1,50 @@ +from src.utils.shared import (errorDetails, formattedQueueInfo, get_queue, privateTrackerCheck, protectedDownloadCheck, execute_checks, permittedAttemptsCheck, remove_download, qBitOffline) +import sys, os, traceback +import logging, verboselogs +logger = verboselogs.VerboseLogger(__name__) +from src.utils.rest import rest_get, rest_post + + +async def remove_unavailable_files(settingsDict, BASE_URL, API_KEY, NAME, protectedDownloadIDs, privateDowloadIDs, arr_type): + # Checks if downloads have less than 100% availability and marks the underyling files that cause it as 'do not download' + # Only works in qbit + try: + failType = '>100% availability' + queue = await get_queue(BASE_URL, API_KEY) + logger.debug('remove_unavailable_files/queue IN: %s', formattedQueueInfo(queue)) + if not queue: return 0 + if await qBitOffline(settingsDict, failType, NAME): return + # Find items affected + + qbitHashes = list(set(queueItem['downloadId'].upper() for queueItem in queue['records'])) + + # Remove private and protected trackers + if settingsDict['IGNORE_PRIVATE_TRACKERS']: + for qbitHash in reversed(qbitHashes): + if qbitHash in privateDowloadIDs: + qbitHashes.remove(qbitHash) + + if settingsDict['IGNORE_PRIVATE_TRACKERS']: + for qbitHash in reversed(qbitHashes): + if qbitHash in privateDowloadIDs: + qbitHashes.remove(qbitHash) + + qbitItems = await rest_get(settingsDict['QBITTORRENT_URL']+'/torrents/info',params={'hashes': ('|').join(qbitHashes)}, cookies=settingsDict['QBIT_COOKIE']) + + for qbitItem in qbitItems: + if 'state' in qbitItem and 'availability' in qbitItem: + if qbitItem['state'] == 'downloading' and qbitItem['availability'] < 1: + logger.info('>>> Detected %s: %s', failType, qbitItem['name']) + logger.verbose('>>>>> Marking following files to "not download":') + qbitItemFiles = await rest_get(settingsDict['QBITTORRENT_URL']+'/torrents/files',params={'hash': qbitItem['hash']}, cookies=settingsDict['QBIT_COOKIE']) + for qbitItemFile in qbitItemFiles: + if all(key in qbitItemFile for key in ['availability', 'progress', 'priority', 'index', 'name']): + if qbitItemFile['availability'] < 1 and qbitItemFile['progress'] < 1 and qbitItemFile['priority'] != 0: + logger.verbose('>>>>> %s', qbitItemFile['name'].split('/')[-1]) + if not settingsDict['TEST_RUN']: + await rest_post(url=settingsDict['QBITTORRENT_URL']+'/torrents/filePrio', data={'hash': qbitItem['hash'].lower(), 'id': qbitItemFile['index'], 'priority': 0}, cookies=settingsDict['QBIT_COOKIE']) + + except Exception as error: + errorDetails(NAME, error) + return + diff --git a/src/utils/loadScripts.py b/src/utils/loadScripts.py index a7bb1e1..f0f579f 100644 --- a/src/utils/loadScripts.py +++ b/src/utils/loadScripts.py @@ -82,6 +82,7 @@ def showSettings(settingsDict): logger.info('%s | Removing slow downloads (%s)', str(settingsDict['REMOVE_SLOW']), 'REMOVE_SLOW') logger.info('%s | Removing stalled downloads (%s)', str(settingsDict['REMOVE_STALLED']), 'REMOVE_STALLED') logger.info('%s | Removing downloads belonging to unmonitored items (%s)', str(settingsDict['REMOVE_UNMONITORED']), 'REMOVE_UNMONITORED') + logger.info('%s | Cancelling files with >100%% availability (%s)', str(settingsDict['REMOVE_UNAVAILABLE_FILES']), 'REMOVE_UNAVAILABLE_FILES') logger.info('') logger.info('Running every: %s', fmt.format(rd(minutes=settingsDict['REMOVE_TIMER']))) if settingsDict['REMOVE_SLOW']: diff --git a/src/utils/main.py b/src/utils/main.py deleted file mode 100644 index 053fccf..0000000 --- a/src/utils/main.py +++ /dev/null @@ -1,77 +0,0 @@ -# Import Libraries -import asyncio -import logging, verboselogs -logger = verboselogs.VerboseLogger(__name__) -import json -# Import Functions -from config.definitions import settingsDict -from src.utils.loadScripts import * -from src.decluttarr import queueCleaner -from src.utils.rest import rest_get, rest_post -from src.utils.trackers import Defective_Tracker, Download_Sizes_Tracker - -# Hide SSL Verification Warnings -if settingsDict['SSL_VERIFICATION']==False: - import warnings - warnings.filterwarnings("ignore", message="Unverified HTTPS request") - -# Set up logging -setLoggingFormat(settingsDict) - -# Main function -async def main(settingsDict): -# Adds to settings Dict the instances that are actually configures - settingsDict['INSTANCES'] = [] - for arrApplication in settingsDict['SUPPORTED_ARR_APPS']: - if settingsDict[arrApplication + '_URL']: - settingsDict['INSTANCES'].append(arrApplication) - - # Pre-populates the dictionaries (in classes) that track the items that were already caught as having problems or removed - defectiveTrackingInstances = {} - for instance in settingsDict['INSTANCES']: - defectiveTrackingInstances[instance] = {} - defective_tracker = Defective_Tracker(defectiveTrackingInstances) - download_sizes_tracker = Download_Sizes_Tracker({}) - - # Get name of arr-instances - for instance in settingsDict['INSTANCES']: - settingsDict = await getArrInstanceName(settingsDict, instance) - - # Check outdated - upgradeChecks(settingsDict) - - # Welcome Message - showWelcome() - - # Current Settings - showSettings(settingsDict) - - # Check Minimum Version and if instances are reachable and retrieve qbit cookie - settingsDict = await instanceChecks(settingsDict) - - # Create qBit protection tag if not existing - await createQbitProtectionTag(settingsDict) - - # Show Logger Level - showLoggerLevel(settingsDict) - - # Start Cleaning - while True: - logger.verbose('-' * 50) - # Cache protected (via Tag) and private torrents - protectedDownloadIDs, privateDowloadIDs = await getProtectedAndPrivateFromQbit(settingsDict) - - # Run script for each instance - for instance in settingsDict['INSTANCES']: - await queueCleaner(settingsDict, instance, defective_tracker, download_sizes_tracker, protectedDownloadIDs, privateDowloadIDs) - logger.verbose('') - logger.verbose('Queue clean-up complete!') - - # Wait for the next run - await asyncio.sleep(settingsDict['REMOVE_TIMER']*60) - return - -if __name__ == '__main__': - asyncio.run(main(settingsDict)) - -