Skip to content

Commit

Permalink
Merge pull request #202 from ManiMatter/ignore-download-clients
Browse files Browse the repository at this point in the history
Added "Ignore download clients" feature
  • Loading branch information
ManiMatter authored Dec 4, 2024
2 parents c156d4c + 0310bdd commit 6cc8e58
Show file tree
Hide file tree
Showing 15 changed files with 59 additions and 18 deletions.
8 changes: 8 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -99,6 +99,7 @@ services:
"Not a Custom Format upgrade for existing",
"Not an upgrade for existing"
]'
IGNORED_DOWNLOAD_CLIENTS: ["emulerr"]

## Radarr
RADARR_URL: http://radarr:7878
Expand Down Expand Up @@ -335,6 +336,13 @@ If it you face issues, please first check the closed issues before opening a new
- Recommended values: ["Not a Custom Format upgrade for existing", "Not an upgrade for existing"]
- Is Mandatory: No (Defaults to [], which means all messages are failures)

**IGNORED_DOWNLOAD_CLIENTS**

- If specified, downloads of the listed download clients are not removed / skipped entirely
- Is useful if multiple download clients are used and some of them are known to have slow downloads that recover (and thus should not be subject to slowness check), while other download clients should be monitored
- Type: List
- Is Mandatory: No (Defaults to [], which means no download clients are skipped)

---

### **Radarr section**
Expand Down
1 change: 1 addition & 0 deletions config/config.conf-Example
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,7 @@ PERMITTED_ATTEMPTS = 3
NO_STALLED_REMOVAL_QBIT_TAG = Don't Kill
IGNORE_PRIVATE_TRACKERS = FALSE
FAILED_IMPORT_MESSAGE_PATTERNS = ["Not a Custom Format upgrade for existing", "Not an upgrade for existing"]
IGNORED_DOWNLOAD_CLIENTS = ["emulerr"]

[radarr]
RADARR_URL = http://radarr:7878
Expand Down
1 change: 1 addition & 0 deletions config/definitions.py
Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,7 @@
NO_STALLED_REMOVAL_QBIT_TAG = get_config_value('NO_STALLED_REMOVAL_QBIT_TAG', 'feature_settings', False, str, 'Don\'t Kill')
IGNORE_PRIVATE_TRACKERS = get_config_value('IGNORE_PRIVATE_TRACKERS', 'feature_settings', False, bool, True)
FAILED_IMPORT_MESSAGE_PATTERNS = get_config_value('FAILED_IMPORT_MESSAGE_PATTERNS','feature_settings', False, list, [])
IGNORED_DOWNLOAD_CLIENTS = get_config_value('IGNORED_DOWNLOAD_CLIENTS', 'feature_settings', False, list, [])

# Radarr
RADARR_URL = get_config_value('RADARR_URL', 'radarr', False, str)
Expand Down
2 changes: 1 addition & 1 deletion src/decluttarr.py
Original file line number Diff line number Diff line change
Expand Up @@ -58,7 +58,7 @@ async def queueCleaner(
logger.verbose("Cleaning queue on %s:", NAME)
# Refresh queue:
try:
full_queue = await get_queue(BASE_URL, API_KEY, params={full_queue_param: True})
full_queue = await get_queue(BASE_URL, API_KEY, settingsDict, params={full_queue_param: True})
if full_queue:
logger.debug("queueCleaner/full_queue at start:")
logger.debug(full_queue)
Expand Down
2 changes: 1 addition & 1 deletion src/jobs/remove_failed.py
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,7 @@ async def remove_failed(
# Detects failed and triggers delete. Does not add to blocklist
try:
failType = "failed"
queue = await get_queue(BASE_URL, API_KEY)
queue = await get_queue(BASE_URL, API_KEY, settingsDict)
logger.debug("remove_failed/queue IN: %s", formattedQueueInfo(queue))

if not queue:
Expand Down
2 changes: 1 addition & 1 deletion src/jobs/remove_failed_imports.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@ async def remove_failed_imports(
# Detects downloads stuck downloading meta data and triggers repeat check and subsequent delete. Adds to blocklist
try:
failType = "failed import"
queue = await get_queue(BASE_URL, API_KEY)
queue = await get_queue(BASE_URL, API_KEY, settingsDict)
logger.debug("remove_failed_imports/queue IN: %s", formattedQueueInfo(queue))
if not queue:
return 0
Expand Down
2 changes: 1 addition & 1 deletion src/jobs/remove_metadata_missing.py
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,7 @@ async def remove_metadata_missing(
# Detects downloads stuck downloading meta data and triggers repeat check and subsequent delete. Adds to blocklist
try:
failType = "missing metadata"
queue = await get_queue(BASE_URL, API_KEY)
queue = await get_queue(BASE_URL, API_KEY, settingsDict)
logger.debug("remove_metadata_missing/queue IN: %s", formattedQueueInfo(queue))
if not queue:
return 0
Expand Down
2 changes: 1 addition & 1 deletion src/jobs/remove_missing_files.py
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,7 @@ async def remove_missing_files(
# Detects downloads broken because of missing files. Does not add to blocklist
try:
failType = "missing files"
queue = await get_queue(BASE_URL, API_KEY)
queue = await get_queue(BASE_URL, API_KEY, settingsDict)
logger.debug("remove_missing_files/queue IN: %s", formattedQueueInfo(queue))
if not queue:
return 0
Expand Down
10 changes: 7 additions & 3 deletions src/jobs/remove_orphans.py
Original file line number Diff line number Diff line change
Expand Up @@ -28,8 +28,10 @@ async def remove_orphans(
# Removes downloads belonging to movies/tv shows that have been deleted in the meantime. Does not add to blocklist
try:
failType = "orphan"
full_queue = await get_queue(BASE_URL, API_KEY, params={full_queue_param: True})
queue = await get_queue(BASE_URL, API_KEY)
full_queue = await get_queue(
BASE_URL, API_KEY, settingsDict, params={full_queue_param: True}
)
queue = await get_queue(BASE_URL, API_KEY, settingsDict)
logger.debug("remove_orphans/full queue IN: %s", formattedQueueInfo(full_queue))
if not full_queue:
return 0 # By now the queue may be empty
Expand Down Expand Up @@ -63,7 +65,9 @@ async def remove_orphans(
logger.debug(
"remove_orphans/full queue OUT: %s",
formattedQueueInfo(
await get_queue(BASE_URL, API_KEY, params={full_queue_param: True})
await get_queue(
BASE_URL, API_KEY, settingsDict, params={full_queue_param: True}
)
),
)
return len(affectedItems)
Expand Down
2 changes: 1 addition & 1 deletion src/jobs/remove_slow.py
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,7 @@ async def remove_slow(
# Detects slow downloads and triggers delete. Adds to blocklist
try:
failType = "slow"
queue = await get_queue(BASE_URL, API_KEY)
queue = await get_queue(BASE_URL, API_KEY, settingsDict)
logger.debug("remove_slow/queue IN: %s", formattedQueueInfo(queue))
if not queue:
return 0
Expand Down
2 changes: 1 addition & 1 deletion src/jobs/remove_stalled.py
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,7 @@ async def remove_stalled(
# Detects stalled and triggers repeat check and subsequent delete. Adds to blocklist
try:
failType = "stalled"
queue = await get_queue(BASE_URL, API_KEY)
queue = await get_queue(BASE_URL, API_KEY, settingsDict)
logger.debug("remove_stalled/queue IN: %s", formattedQueueInfo(queue))
if not queue:
return 0
Expand Down
2 changes: 1 addition & 1 deletion src/jobs/remove_unmonitored.py
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,7 @@ async def remove_unmonitored(
# Removes downloads belonging to movies/tv shows that are not monitored. Does not add to blocklist
try:
failType = "unmonitored"
queue = await get_queue(BASE_URL, API_KEY)
queue = await get_queue(BASE_URL, API_KEY, settingsDict)
logger.debug("remove_unmonitored/queue IN: %s", formattedQueueInfo(queue))
if not queue:
return 0
Expand Down
2 changes: 1 addition & 1 deletion src/jobs/run_periodic_rescans.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@ async def run_periodic_rescans(
if not arr_type in settingsDict["RUN_PERIODIC_RESCANS"]:
return
try:
queue = await get_queue(BASE_URL, API_KEY)
queue = await get_queue(BASE_URL, API_KEY, settingsDict)
check_on_endpoint = []
RESCAN_SETTINGS = settingsDict["RUN_PERIODIC_RESCANS"][arr_type]
if RESCAN_SETTINGS["MISSING"]:
Expand Down
4 changes: 3 additions & 1 deletion src/utils/loadScripts.py
Original file line number Diff line number Diff line change
Expand Up @@ -28,6 +28,7 @@ async def getArrInstanceName(settingsDict, arrApp):
settingsDict[arrApp + '_NAME'] = arrApp.title()
return settingsDict


async def getProtectedAndPrivateFromQbit(settingsDict):
# Returns two lists containing the hashes of Qbit that are either protected by tag, or are private trackers (if IGNORE_PRIVATE_TRACKERS is true)
protectedDownloadIDs = []
Expand Down Expand Up @@ -101,7 +102,8 @@ def showSettings(settingsDict):
if settingsDict['QBITTORRENT_URL']:
logger.info('Downloads with this tag will be skipped: \"%s\"', settingsDict['NO_STALLED_REMOVAL_QBIT_TAG'])
logger.info('Private Trackers will be skipped: %s', settingsDict['IGNORE_PRIVATE_TRACKERS'])

if settingsDict['IGNORED_DOWNLOAD_CLIENTS']:
logger.info('Download clients skipped: %s',", ".join(settingsDict['IGNORED_DOWNLOAD_CLIENTS']))
logger.info('')
logger.info('*** Configured Instances ***')

Expand Down
35 changes: 30 additions & 5 deletions src/utils/shared.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@ async def get_arr_records(BASE_URL, API_KEY, params={}, end_point=""):
return records["records"]


async def get_queue(BASE_URL, API_KEY, params={}):
async def get_queue(BASE_URL, API_KEY, settingsDict, params={}):
# Refreshes and retrieves the current queue
await rest_post(
url=BASE_URL + "/command",
Expand All @@ -31,6 +31,7 @@ async def get_queue(BASE_URL, API_KEY, params={}):
)
queue = await get_arr_records(BASE_URL, API_KEY, params=params, end_point="queue")
queue = filterOutDelayedQueueItems(queue)
queue = filterOutIgnoredDownloadClients(queue, settingsDict)
return queue


Expand Down Expand Up @@ -59,6 +60,28 @@ def filterOutDelayedQueueItems(queue):
return filtered_queue


def filterOutIgnoredDownloadClients(queue, settingsDict):
"""
Filters out queue items whose download client is listed in IGNORED_DOWNLOAD_CLIENTS.
"""
if queue is None:
return queue
filtered_queue = []

for queue_item in queue:
download_client = queue_item.get("downloadClient", "Unknown client")
if download_client in settingsDict["IGNORED_DOWNLOAD_CLIENTS"]:
logger.debug(
">>> Queue item ignored due to ignored download client: %s (Download Client: %s)",
queue_item["title"],
download_client,
)
else:
filtered_queue.append(queue_item)

return filtered_queue


def privateTrackerCheck(settingsDict, affectedItems, failType, privateDowloadIDs):
# Ignores private tracker items (if setting is turned on)
for affectedItem in reversed(affectedItems):
Expand Down Expand Up @@ -154,7 +177,7 @@ async def execute_checks(
)
# Exit Logs
if settingsDict["LOG_LEVEL"] == "DEBUG":
queue = await get_queue(BASE_URL, API_KEY)
queue = await get_queue(BASE_URL, API_KEY, settingsDict)
logger.debug(
"execute_checks/queue OUT (failType: %s): %s",
failType,
Expand Down Expand Up @@ -318,7 +341,7 @@ def errorDetails(NAME, error):
NAME,
fname,
exc_tb.tb_lineno,
traceback.format_exc()
traceback.format_exc(),
)
return

Expand Down Expand Up @@ -352,8 +375,10 @@ def formattedQueueInfo(queue):
errorDetails("formattedQueueInfo", error)
logger.debug("formattedQueueInfo/queue for debug: %s", str(queue))
if isinstance(error, KeyError):
logger.debug("formattedQueueInfo/queue_item with error for debug: %s", queue_item)

logger.debug(
"formattedQueueInfo/queue_item with error for debug: %s", queue_item
)

return "error"


Expand Down

0 comments on commit 6cc8e58

Please sign in to comment.