diff --git a/README.md b/README.md index eadc4c1..40e914f 100644 --- a/README.md +++ b/README.md @@ -99,6 +99,7 @@ services: "Not a Custom Format upgrade for existing", "Not an upgrade for existing" ]' + IGNORED_DOWNLOAD_CLIENTS: ["emulerr"] ## Radarr RADARR_URL: http://radarr:7878 @@ -335,6 +336,13 @@ If it you face issues, please first check the closed issues before opening a new - Recommended values: ["Not a Custom Format upgrade for existing", "Not an upgrade for existing"] - Is Mandatory: No (Defaults to [], which means all messages are failures) +**IGNORED_DOWNLOAD_CLIENTS** + +- If specified, downloads of the listed download clients are not removed / skipped entirely +- Is useful if multiple download clients are used and some of them are known to have slow downloads that recover (and thus should not be subject to slowness check), while other download clients should be monitored +- Type: List +- Is Mandatory: No (Defaults to [], which means no download clients are skipped) + --- ### **Radarr section** diff --git a/config/config.conf-Example b/config/config.conf-Example index a34995a..b3b75f9 100644 --- a/config/config.conf-Example +++ b/config/config.conf-Example @@ -20,6 +20,7 @@ PERMITTED_ATTEMPTS = 3 NO_STALLED_REMOVAL_QBIT_TAG = Don't Kill IGNORE_PRIVATE_TRACKERS = FALSE FAILED_IMPORT_MESSAGE_PATTERNS = ["Not a Custom Format upgrade for existing", "Not an upgrade for existing"] +IGNORED_DOWNLOAD_CLIENTS = ["emulerr"] [radarr] RADARR_URL = http://radarr:7878 diff --git a/config/definitions.py b/config/definitions.py index 79d5642..f483e23 100644 --- a/config/definitions.py +++ b/config/definitions.py @@ -27,6 +27,7 @@ NO_STALLED_REMOVAL_QBIT_TAG = get_config_value('NO_STALLED_REMOVAL_QBIT_TAG', 'feature_settings', False, str, 'Don\'t Kill') IGNORE_PRIVATE_TRACKERS = get_config_value('IGNORE_PRIVATE_TRACKERS', 'feature_settings', False, bool, True) FAILED_IMPORT_MESSAGE_PATTERNS = get_config_value('FAILED_IMPORT_MESSAGE_PATTERNS','feature_settings', False, list, []) +IGNORED_DOWNLOAD_CLIENTS = get_config_value('IGNORED_DOWNLOAD_CLIENTS', 'feature_settings', False, list, []) # Radarr RADARR_URL = get_config_value('RADARR_URL', 'radarr', False, str) diff --git a/src/decluttarr.py b/src/decluttarr.py index aaea68b..3fab741 100644 --- a/src/decluttarr.py +++ b/src/decluttarr.py @@ -58,7 +58,7 @@ async def queueCleaner( logger.verbose("Cleaning queue on %s:", NAME) # Refresh queue: try: - full_queue = await get_queue(BASE_URL, API_KEY, params={full_queue_param: True}) + full_queue = await get_queue(BASE_URL, API_KEY, settingsDict, params={full_queue_param: True}) if full_queue: logger.debug("queueCleaner/full_queue at start:") logger.debug(full_queue) diff --git a/src/jobs/remove_failed.py b/src/jobs/remove_failed.py index ecfdfe8..1b58c90 100644 --- a/src/jobs/remove_failed.py +++ b/src/jobs/remove_failed.py @@ -28,7 +28,7 @@ async def remove_failed( # Detects failed and triggers delete. Does not add to blocklist try: failType = "failed" - queue = await get_queue(BASE_URL, API_KEY) + queue = await get_queue(BASE_URL, API_KEY, settingsDict) logger.debug("remove_failed/queue IN: %s", formattedQueueInfo(queue)) if not queue: diff --git a/src/jobs/remove_failed_imports.py b/src/jobs/remove_failed_imports.py index 6f9f056..858dd0a 100644 --- a/src/jobs/remove_failed_imports.py +++ b/src/jobs/remove_failed_imports.py @@ -18,7 +18,7 @@ async def remove_failed_imports( # Detects downloads stuck downloading meta data and triggers repeat check and subsequent delete. Adds to blocklist try: failType = "failed import" - queue = await get_queue(BASE_URL, API_KEY) + queue = await get_queue(BASE_URL, API_KEY, settingsDict) logger.debug("remove_failed_imports/queue IN: %s", formattedQueueInfo(queue)) if not queue: return 0 diff --git a/src/jobs/remove_metadata_missing.py b/src/jobs/remove_metadata_missing.py index 8e08ef8..848f917 100644 --- a/src/jobs/remove_metadata_missing.py +++ b/src/jobs/remove_metadata_missing.py @@ -28,7 +28,7 @@ async def remove_metadata_missing( # Detects downloads stuck downloading meta data and triggers repeat check and subsequent delete. Adds to blocklist try: failType = "missing metadata" - queue = await get_queue(BASE_URL, API_KEY) + queue = await get_queue(BASE_URL, API_KEY, settingsDict) logger.debug("remove_metadata_missing/queue IN: %s", formattedQueueInfo(queue)) if not queue: return 0 diff --git a/src/jobs/remove_missing_files.py b/src/jobs/remove_missing_files.py index 8f09a29..ecd913d 100644 --- a/src/jobs/remove_missing_files.py +++ b/src/jobs/remove_missing_files.py @@ -28,7 +28,7 @@ async def remove_missing_files( # Detects downloads broken because of missing files. Does not add to blocklist try: failType = "missing files" - queue = await get_queue(BASE_URL, API_KEY) + queue = await get_queue(BASE_URL, API_KEY, settingsDict) logger.debug("remove_missing_files/queue IN: %s", formattedQueueInfo(queue)) if not queue: return 0 diff --git a/src/jobs/remove_orphans.py b/src/jobs/remove_orphans.py index a152a42..6cee734 100644 --- a/src/jobs/remove_orphans.py +++ b/src/jobs/remove_orphans.py @@ -28,8 +28,10 @@ async def remove_orphans( # Removes downloads belonging to movies/tv shows that have been deleted in the meantime. Does not add to blocklist try: failType = "orphan" - full_queue = await get_queue(BASE_URL, API_KEY, params={full_queue_param: True}) - queue = await get_queue(BASE_URL, API_KEY) + full_queue = await get_queue( + BASE_URL, API_KEY, settingsDict, params={full_queue_param: True} + ) + queue = await get_queue(BASE_URL, API_KEY, settingsDict) logger.debug("remove_orphans/full queue IN: %s", formattedQueueInfo(full_queue)) if not full_queue: return 0 # By now the queue may be empty @@ -63,7 +65,9 @@ async def remove_orphans( logger.debug( "remove_orphans/full queue OUT: %s", formattedQueueInfo( - await get_queue(BASE_URL, API_KEY, params={full_queue_param: True}) + await get_queue( + BASE_URL, API_KEY, settingsDict, params={full_queue_param: True} + ) ), ) return len(affectedItems) diff --git a/src/jobs/remove_slow.py b/src/jobs/remove_slow.py index 29b4341..ad1475b 100644 --- a/src/jobs/remove_slow.py +++ b/src/jobs/remove_slow.py @@ -30,7 +30,7 @@ async def remove_slow( # Detects slow downloads and triggers delete. Adds to blocklist try: failType = "slow" - queue = await get_queue(BASE_URL, API_KEY) + queue = await get_queue(BASE_URL, API_KEY, settingsDict) logger.debug("remove_slow/queue IN: %s", formattedQueueInfo(queue)) if not queue: return 0 diff --git a/src/jobs/remove_stalled.py b/src/jobs/remove_stalled.py index 00e9ac3..dd90427 100644 --- a/src/jobs/remove_stalled.py +++ b/src/jobs/remove_stalled.py @@ -28,7 +28,7 @@ async def remove_stalled( # Detects stalled and triggers repeat check and subsequent delete. Adds to blocklist try: failType = "stalled" - queue = await get_queue(BASE_URL, API_KEY) + queue = await get_queue(BASE_URL, API_KEY, settingsDict) logger.debug("remove_stalled/queue IN: %s", formattedQueueInfo(queue)) if not queue: return 0 diff --git a/src/jobs/remove_unmonitored.py b/src/jobs/remove_unmonitored.py index cf7cc72..c3c8bba 100644 --- a/src/jobs/remove_unmonitored.py +++ b/src/jobs/remove_unmonitored.py @@ -29,7 +29,7 @@ async def remove_unmonitored( # Removes downloads belonging to movies/tv shows that are not monitored. Does not add to blocklist try: failType = "unmonitored" - queue = await get_queue(BASE_URL, API_KEY) + queue = await get_queue(BASE_URL, API_KEY, settingsDict) logger.debug("remove_unmonitored/queue IN: %s", formattedQueueInfo(queue)) if not queue: return 0 diff --git a/src/jobs/run_periodic_rescans.py b/src/jobs/run_periodic_rescans.py index a5e0880..cfd04d3 100644 --- a/src/jobs/run_periodic_rescans.py +++ b/src/jobs/run_periodic_rescans.py @@ -23,7 +23,7 @@ async def run_periodic_rescans( if not arr_type in settingsDict["RUN_PERIODIC_RESCANS"]: return try: - queue = await get_queue(BASE_URL, API_KEY) + queue = await get_queue(BASE_URL, API_KEY, settingsDict) check_on_endpoint = [] RESCAN_SETTINGS = settingsDict["RUN_PERIODIC_RESCANS"][arr_type] if RESCAN_SETTINGS["MISSING"]: diff --git a/src/utils/loadScripts.py b/src/utils/loadScripts.py index 4599a0d..77eeef5 100644 --- a/src/utils/loadScripts.py +++ b/src/utils/loadScripts.py @@ -28,6 +28,7 @@ async def getArrInstanceName(settingsDict, arrApp): settingsDict[arrApp + '_NAME'] = arrApp.title() return settingsDict + async def getProtectedAndPrivateFromQbit(settingsDict): # Returns two lists containing the hashes of Qbit that are either protected by tag, or are private trackers (if IGNORE_PRIVATE_TRACKERS is true) protectedDownloadIDs = [] @@ -101,7 +102,8 @@ def showSettings(settingsDict): if settingsDict['QBITTORRENT_URL']: logger.info('Downloads with this tag will be skipped: \"%s\"', settingsDict['NO_STALLED_REMOVAL_QBIT_TAG']) logger.info('Private Trackers will be skipped: %s', settingsDict['IGNORE_PRIVATE_TRACKERS']) - + if settingsDict['IGNORED_DOWNLOAD_CLIENTS']: + logger.info('Download clients skipped: %s',", ".join(settingsDict['IGNORED_DOWNLOAD_CLIENTS'])) logger.info('') logger.info('*** Configured Instances ***') diff --git a/src/utils/shared.py b/src/utils/shared.py index 470eef4..29bd31c 100644 --- a/src/utils/shared.py +++ b/src/utils/shared.py @@ -22,7 +22,7 @@ async def get_arr_records(BASE_URL, API_KEY, params={}, end_point=""): return records["records"] -async def get_queue(BASE_URL, API_KEY, params={}): +async def get_queue(BASE_URL, API_KEY, settingsDict, params={}): # Refreshes and retrieves the current queue await rest_post( url=BASE_URL + "/command", @@ -31,6 +31,7 @@ async def get_queue(BASE_URL, API_KEY, params={}): ) queue = await get_arr_records(BASE_URL, API_KEY, params=params, end_point="queue") queue = filterOutDelayedQueueItems(queue) + queue = filterOutIgnoredDownloadClients(queue, settingsDict) return queue @@ -59,6 +60,28 @@ def filterOutDelayedQueueItems(queue): return filtered_queue +def filterOutIgnoredDownloadClients(queue, settingsDict): + """ + Filters out queue items whose download client is listed in IGNORED_DOWNLOAD_CLIENTS. + """ + if queue is None: + return queue + filtered_queue = [] + + for queue_item in queue: + download_client = queue_item.get("downloadClient", "Unknown client") + if download_client in settingsDict["IGNORED_DOWNLOAD_CLIENTS"]: + logger.debug( + ">>> Queue item ignored due to ignored download client: %s (Download Client: %s)", + queue_item["title"], + download_client, + ) + else: + filtered_queue.append(queue_item) + + return filtered_queue + + def privateTrackerCheck(settingsDict, affectedItems, failType, privateDowloadIDs): # Ignores private tracker items (if setting is turned on) for affectedItem in reversed(affectedItems): @@ -154,7 +177,7 @@ async def execute_checks( ) # Exit Logs if settingsDict["LOG_LEVEL"] == "DEBUG": - queue = await get_queue(BASE_URL, API_KEY) + queue = await get_queue(BASE_URL, API_KEY, settingsDict) logger.debug( "execute_checks/queue OUT (failType: %s): %s", failType, @@ -318,7 +341,7 @@ def errorDetails(NAME, error): NAME, fname, exc_tb.tb_lineno, - traceback.format_exc() + traceback.format_exc(), ) return @@ -352,8 +375,10 @@ def formattedQueueInfo(queue): errorDetails("formattedQueueInfo", error) logger.debug("formattedQueueInfo/queue for debug: %s", str(queue)) if isinstance(error, KeyError): - logger.debug("formattedQueueInfo/queue_item with error for debug: %s", queue_item) - + logger.debug( + "formattedQueueInfo/queue_item with error for debug: %s", queue_item + ) + return "error"