Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Fixing problems with dict + dict issue // adding compatibility for delay profiles // code cleanup #70

Merged
merged 6 commits into from
Mar 30, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions .github/workflows/dev.yml
Original file line number Diff line number Diff line change
Expand Up @@ -73,6 +73,7 @@ jobs:
--build-arg SHORT_COMMIT_ID=$SHORT_COMMIT_ID \
--push \
# "annotations": { "org.opencontainers.image.description": "DESCRIPTION" }

# - name: "Delete untagged versions"
# uses: actions/delete-package-versions@v4
Expand Down
4 changes: 2 additions & 2 deletions config/config.py
Original file line number Diff line number Diff line change
Expand Up @@ -135,8 +135,8 @@ def get_config_value(key, config_section, is_mandatory, datatype, default_value
if QBITTORRENT_URL: QBITTORRENT_URL += '/api/v2'

########### Add Variables to Dictionary
settings_dict = {}
settingsDict = {}
for var_name in dir():
if var_name.isupper():
settings_dict[var_name] = locals()[var_name]
settingsDict[var_name] = locals()[var_name]

233 changes: 47 additions & 186 deletions main.py

Large diffs are not rendered by default.

76 changes: 38 additions & 38 deletions src/decluttarr.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,42 +2,42 @@
import logging, verboselogs
logger = verboselogs.VerboseLogger(__name__)
from src.utils.shared import (errorDetails, get_queue)
from src.remove_failed import remove_failed
from src.remove_metadata_missing import remove_metadata_missing
from src.remove_missing_files import remove_missing_files
from src.remove_orphans import remove_orphans
from src.remove_slow import remove_slow
from src.remove_stalled import remove_stalled
from src.remove_unmonitored import remove_unmonitored
from src.jobs.remove_failed import remove_failed
from src.jobs.remove_metadata_missing import remove_metadata_missing
from src.jobs.remove_missing_files import remove_missing_files
from src.jobs.remove_orphans import remove_orphans
from src.jobs.remove_slow import remove_slow
from src.jobs.remove_stalled import remove_stalled
from src.jobs.remove_unmonitored import remove_unmonitored

class Deleted_Downloads:
# Keeps track of which downloads have already been deleted (to not double-delete)
def __init__(self, dict):
self.dict = dict


async def queueCleaner(settings_dict, arr_type, defective_tracker, download_sizes_tracker, protectedDownloadIDs, privateDowloadIDs):
async def queueCleaner(settingsDict, arr_type, defective_tracker, download_sizes_tracker, protectedDownloadIDs, privateDowloadIDs):
# Read out correct instance depending on radarr/sonarr flag
run_dict = {}
if arr_type == 'radarr':
BASE_URL = settings_dict['RADARR_URL']
API_KEY = settings_dict['RADARR_KEY']
NAME = settings_dict['RADARR_NAME']
if arr_type == 'RADARR':
BASE_URL = settingsDict['RADARR_URL']
API_KEY = settingsDict['RADARR_KEY']
NAME = settingsDict['RADARR_NAME']
full_queue_param = 'includeUnknownMovieItems'
elif arr_type == 'sonarr':
BASE_URL = settings_dict['SONARR_URL']
API_KEY = settings_dict['SONARR_KEY']
NAME = settings_dict['SONARR_NAME']
elif arr_type == 'SONARR':
BASE_URL = settingsDict['SONARR_URL']
API_KEY = settingsDict['SONARR_KEY']
NAME = settingsDict['SONARR_NAME']
full_queue_param = 'includeUnknownSeriesItems'
elif arr_type == 'lidarr':
BASE_URL = settings_dict['LIDARR_URL']
API_KEY = settings_dict['LIDARR_KEY']
NAME = settings_dict['LIDARR_NAME']
elif arr_type == 'LIDARR':
BASE_URL = settingsDict['LIDARR_URL']
API_KEY = settingsDict['LIDARR_KEY']
NAME = settingsDict['LIDARR_NAME']
full_queue_param = 'includeUnknownArtistItems'
elif arr_type == 'readarr':
BASE_URL = settings_dict['READARR_URL']
API_KEY = settings_dict['READARR_KEY']
NAME = settings_dict['READARR_NAME']
elif arr_type == 'READARR':
BASE_URL = settingsDict['READARR_URL']
API_KEY = settingsDict['READARR_KEY']
NAME = settingsDict['READARR_NAME']
full_queue_param = 'includeUnknownAuthorItems'
else:
logger.error('Unknown arr_type specified, exiting: %s', str(arr_type))
Expand All @@ -54,26 +54,26 @@ async def queueCleaner(settings_dict, arr_type, defective_tracker, download_size
deleted_downloads = Deleted_Downloads([])
items_detected = 0
try:
if settings_dict['REMOVE_FAILED']:
items_detected += await remove_failed( settings_dict, BASE_URL, API_KEY, NAME, deleted_downloads, defective_tracker, protectedDownloadIDs, privateDowloadIDs)
if settingsDict['REMOVE_FAILED']:
items_detected += await remove_failed( settingsDict, BASE_URL, API_KEY, NAME, deleted_downloads, defective_tracker, protectedDownloadIDs, privateDowloadIDs)

if settings_dict['REMOVE_STALLED']:
items_detected += await remove_stalled( settings_dict, BASE_URL, API_KEY, NAME, deleted_downloads, defective_tracker, protectedDownloadIDs, privateDowloadIDs)
if settingsDict['REMOVE_STALLED']:
items_detected += await remove_stalled( settingsDict, BASE_URL, API_KEY, NAME, deleted_downloads, defective_tracker, protectedDownloadIDs, privateDowloadIDs)

if settings_dict['REMOVE_METADATA_MISSING']:
items_detected += await remove_metadata_missing( settings_dict, BASE_URL, API_KEY, NAME, deleted_downloads, defective_tracker, protectedDownloadIDs, privateDowloadIDs)
if settingsDict['REMOVE_METADATA_MISSING']:
items_detected += await remove_metadata_missing( settingsDict, BASE_URL, API_KEY, NAME, deleted_downloads, defective_tracker, protectedDownloadIDs, privateDowloadIDs)

if settings_dict['REMOVE_ORPHANS']:
items_detected += await remove_orphans( settings_dict, BASE_URL, API_KEY, NAME, deleted_downloads, defective_tracker, protectedDownloadIDs, privateDowloadIDs, full_queue_param)
if settingsDict['REMOVE_ORPHANS']:
items_detected += await remove_orphans( settingsDict, BASE_URL, API_KEY, NAME, deleted_downloads, defective_tracker, protectedDownloadIDs, privateDowloadIDs, full_queue_param)

if settings_dict['REMOVE_UNMONITORED']:
items_detected += await remove_unmonitored( settings_dict, BASE_URL, API_KEY, NAME, deleted_downloads, defective_tracker, protectedDownloadIDs, privateDowloadIDs, arr_type)
if settingsDict['REMOVE_UNMONITORED']:
items_detected += await remove_unmonitored( settingsDict, BASE_URL, API_KEY, NAME, deleted_downloads, defective_tracker, protectedDownloadIDs, privateDowloadIDs, arr_type)

if settings_dict['REMOVE_MISSING_FILES']:
items_detected += await remove_missing_files( settings_dict, BASE_URL, API_KEY, NAME, deleted_downloads, defective_tracker, protectedDownloadIDs, privateDowloadIDs)
if settingsDict['REMOVE_MISSING_FILES']:
items_detected += await remove_missing_files( settingsDict, BASE_URL, API_KEY, NAME, deleted_downloads, defective_tracker, protectedDownloadIDs, privateDowloadIDs)

if settings_dict['REMOVE_SLOW']:
items_detected += await remove_slow( settings_dict, BASE_URL, API_KEY, NAME, deleted_downloads, defective_tracker, protectedDownloadIDs, privateDowloadIDs, download_sizes_tracker)
if settingsDict['REMOVE_SLOW']:
items_detected += await remove_slow( settingsDict, BASE_URL, API_KEY, NAME, deleted_downloads, defective_tracker, protectedDownloadIDs, privateDowloadIDs, download_sizes_tracker)

if items_detected == 0:
logger.verbose('>>> Queue is clean.')
Expand Down
4 changes: 2 additions & 2 deletions src/remove_failed.py → src/jobs/remove_failed.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@
import logging, verboselogs
logger = verboselogs.VerboseLogger(__name__)

async def remove_failed(settings_dict, BASE_URL, API_KEY, NAME, deleted_downloads, defective_tracker, protectedDownloadIDs, privateDowloadIDs):
async def remove_failed(settingsDict, BASE_URL, API_KEY, NAME, deleted_downloads, defective_tracker, protectedDownloadIDs, privateDowloadIDs):
# Detects failed and triggers delete. Does not add to blocklist
try:
failType = 'failed'
Expand All @@ -16,7 +16,7 @@ async def remove_failed(settings_dict, BASE_URL, API_KEY, NAME, deleted_download
if 'errorMessage' in queueItem and 'status' in queueItem:
if queueItem['status'] == 'failed':
affectedItems.append(queueItem)
affectedItems = await execute_checks(settings_dict, affectedItems, failType, BASE_URL, API_KEY, NAME, deleted_downloads, defective_tracker, privateDowloadIDs, protectedDownloadIDs,
affectedItems = await execute_checks(settingsDict, affectedItems, failType, BASE_URL, API_KEY, NAME, deleted_downloads, defective_tracker, privateDowloadIDs, protectedDownloadIDs,
addToBlocklist = False,
doPrivateTrackerCheck = True,
doProtectedDownloadCheck = True,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@
import logging, verboselogs
logger = verboselogs.VerboseLogger(__name__)

async def remove_metadata_missing(settings_dict, BASE_URL, API_KEY, NAME, deleted_downloads, defective_tracker, protectedDownloadIDs, privateDowloadIDs):
async def remove_metadata_missing(settingsDict, BASE_URL, API_KEY, NAME, deleted_downloads, defective_tracker, protectedDownloadIDs, privateDowloadIDs):
# Detects downloads stuck downloading meta data and triggers repeat check and subsequent delete. Adds to blocklist
try:
failType = 'missing metadata'
Expand All @@ -16,7 +16,7 @@ async def remove_metadata_missing(settings_dict, BASE_URL, API_KEY, NAME, delete
if 'errorMessage' in queueItem and 'status' in queueItem:
if queueItem['status'] == 'queued' and queueItem['errorMessage'] == 'qBittorrent is downloading metadata':
affectedItems.append(queueItem)
affectedItems = await execute_checks(settings_dict, affectedItems, failType, BASE_URL, API_KEY, NAME, deleted_downloads, defective_tracker, privateDowloadIDs, protectedDownloadIDs,
affectedItems = await execute_checks(settingsDict, affectedItems, failType, BASE_URL, API_KEY, NAME, deleted_downloads, defective_tracker, privateDowloadIDs, protectedDownloadIDs,
addToBlocklist = True,
doPrivateTrackerCheck = True,
doProtectedDownloadCheck = True,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@
import logging, verboselogs
logger = verboselogs.VerboseLogger(__name__)

async def remove_missing_files(settings_dict, BASE_URL, API_KEY, NAME, deleted_downloads, defective_tracker, protectedDownloadIDs, privateDowloadIDs):
async def remove_missing_files(settingsDict, BASE_URL, API_KEY, NAME, deleted_downloads, defective_tracker, protectedDownloadIDs, privateDowloadIDs):
# Detects downloads broken because of missing files. Does not add to blocklist
try:
failType = 'missing files'
Expand All @@ -18,7 +18,7 @@ async def remove_missing_files(settings_dict, BASE_URL, API_KEY, NAME, deleted_d
(queueItem['errorMessage'] == 'DownloadClientQbittorrentTorrentStateMissingFiles' or
queueItem['errorMessage'] == 'The download is missing files')):
affectedItems.append(queueItem)
affectedItems = await execute_checks(settings_dict, affectedItems, failType, BASE_URL, API_KEY, NAME, deleted_downloads, defective_tracker, privateDowloadIDs, protectedDownloadIDs,
affectedItems = await execute_checks(settingsDict, affectedItems, failType, BASE_URL, API_KEY, NAME, deleted_downloads, defective_tracker, privateDowloadIDs, protectedDownloadIDs,
addToBlocklist = False,
doPrivateTrackerCheck = True,
doProtectedDownloadCheck = True,
Expand Down
4 changes: 2 additions & 2 deletions src/remove_orphans.py → src/jobs/remove_orphans.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@
import logging, verboselogs
logger = verboselogs.VerboseLogger(__name__)

async def remove_orphans(settings_dict, BASE_URL, API_KEY, NAME, deleted_downloads, defective_tracker, protectedDownloadIDs, privateDowloadIDs, full_queue_param):
async def remove_orphans(settingsDict, BASE_URL, API_KEY, NAME, deleted_downloads, defective_tracker, protectedDownloadIDs, privateDowloadIDs, full_queue_param):
# Removes downloads belonging to movies/tv shows that have been deleted in the meantime. Does not add to blocklist
try:
failType = 'orphan'
Expand All @@ -22,7 +22,7 @@ async def remove_orphans(settings_dict, BASE_URL, API_KEY, NAME, deleted_downloa
if queueItem['id'] not in queueIDs:
affectedItems.append(queueItem)

affectedItems = await execute_checks(settings_dict, affectedItems, failType, BASE_URL, API_KEY, NAME, deleted_downloads, defective_tracker, privateDowloadIDs, protectedDownloadIDs,
affectedItems = await execute_checks(settingsDict, affectedItems, failType, BASE_URL, API_KEY, NAME, deleted_downloads, defective_tracker, privateDowloadIDs, protectedDownloadIDs,
addToBlocklist = False,
doPrivateTrackerCheck = True,
doProtectedDownloadCheck = True,
Expand Down
18 changes: 9 additions & 9 deletions src/remove_slow.py → src/jobs/remove_slow.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@
import logging, verboselogs
logger = verboselogs.VerboseLogger(__name__)

async def remove_slow(settings_dict, BASE_URL, API_KEY, NAME, deleted_downloads, defective_tracker, protectedDownloadIDs, privateDowloadIDs, download_sizes_tracker):
async def remove_slow(settingsDict, BASE_URL, API_KEY, NAME, deleted_downloads, defective_tracker, protectedDownloadIDs, privateDowloadIDs, download_sizes_tracker):
# Detects slow downloads and triggers delete. Adds to blocklist
try:
failType = 'slow'
Expand All @@ -18,17 +18,17 @@ async def remove_slow(settings_dict, BASE_URL, API_KEY, NAME, deleted_downloads,
if queueItem['downloadId'] not in alreadyCheckedDownloadIDs:
alreadyCheckedDownloadIDs.append(queueItem['downloadId']) # One downloadId may occur in multiple queueItems - only check once for all of them per iteration
# determine if the downloaded bit on average between this and the last iteration is greater than the min threshold
downloadedSize, previousSize, increment, speed = await getDownloadedSize(settings_dict, queueItem, download_sizes_tracker, NAME)
downloadedSize, previousSize, increment, speed = await getDownloadedSize(settingsDict, queueItem, download_sizes_tracker, NAME)
if queueItem['status'] == 'downloading' and \
queueItem['downloadId'] in download_sizes_tracker.dict and \
speed is not None:
if speed < settings_dict['MIN_DOWNLOAD_SPEED']:
if speed < settingsDict['MIN_DOWNLOAD_SPEED']:
affectedItems.append(queueItem)
logger.debug('remove_slow/slow speed detected: %s (Speed: %d KB/s, KB now: %s, KB previous: %s, Diff: %s, In Minutes: %s', \
queueItem['title'], speed, downloadedSize, previousSize, increment, settings_dict['REMOVE_TIMER'])
queueItem['title'], speed, downloadedSize, previousSize, increment, settingsDict['REMOVE_TIMER'])


affectedItems = await execute_checks(settings_dict, affectedItems, failType, BASE_URL, API_KEY, NAME, deleted_downloads, defective_tracker, privateDowloadIDs, protectedDownloadIDs,
affectedItems = await execute_checks(settingsDict, affectedItems, failType, BASE_URL, API_KEY, NAME, deleted_downloads, defective_tracker, privateDowloadIDs, protectedDownloadIDs,
addToBlocklist = True,
doPrivateTrackerCheck = True,
doProtectedDownloadCheck = True,
Expand All @@ -39,20 +39,20 @@ async def remove_slow(settings_dict, BASE_URL, API_KEY, NAME, deleted_downloads,
return 0

from src.utils.rest import (rest_get)
async def getDownloadedSize(settings_dict, queueItem, download_sizes_tracker, NAME):
async def getDownloadedSize(settingsDict, queueItem, download_sizes_tracker, NAME):
try:
# Determines the speed of download
# Since Sonarr/Radarr do not update the downlodedSize on realtime, if possible, fetch it directly from qBit
if settings_dict['QBITTORRENT_URL'] and queueItem['downloadClient'] == 'qBittorrent':
qbitInfo = await rest_get(settings_dict['QBITTORRENT_URL']+'/torrents/info',params={'hashes': queueItem['downloadId']}, cookies=settings_dict['QBIT_COOKIE'] )
if settingsDict['QBITTORRENT_URL'] and queueItem['downloadClient'] == 'qBittorrent':
qbitInfo = await rest_get(settingsDict['QBITTORRENT_URL']+'/torrents/info',params={'hashes': queueItem['downloadId']}, cookies=settingsDict['QBIT_COOKIE'] )
downloadedSize = qbitInfo[0]['completed']
else:
logger.debug('getDownloadedSize/WARN: Using imprecise method to determine download increments because no direct qBIT query is possible')
downloadedSize = queueItem['size'] - queueItem['sizeleft']
if queueItem['downloadId'] in download_sizes_tracker.dict:
previousSize = download_sizes_tracker.dict.get(queueItem['downloadId'])
increment = downloadedSize - previousSize
speed = round(increment / 1000 / (settings_dict['REMOVE_TIMER'] * 60),1)
speed = round(increment / 1000 / (settingsDict['REMOVE_TIMER'] * 60),1)
else:
previousSize = None
increment = None
Expand Down
4 changes: 2 additions & 2 deletions src/remove_stalled.py → src/jobs/remove_stalled.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@
import logging, verboselogs
logger = verboselogs.VerboseLogger(__name__)

async def remove_stalled(settings_dict, BASE_URL, API_KEY, NAME, deleted_downloads, defective_tracker, protectedDownloadIDs, privateDowloadIDs):
async def remove_stalled(settingsDict, BASE_URL, API_KEY, NAME, deleted_downloads, defective_tracker, protectedDownloadIDs, privateDowloadIDs):
# Detects stalled and triggers repeat check and subsequent delete. Adds to blocklist
try:
failType = 'stalled'
Expand All @@ -16,7 +16,7 @@ async def remove_stalled(settings_dict, BASE_URL, API_KEY, NAME, deleted_downloa
if 'errorMessage' in queueItem and 'status' in queueItem:
if queueItem['status'] == 'warning' and queueItem['errorMessage'] == 'The download is stalled with no connections':
affectedItems.append(queueItem)
affectedItems = await execute_checks(settings_dict, affectedItems, failType, BASE_URL, API_KEY, NAME, deleted_downloads, defective_tracker, privateDowloadIDs, protectedDownloadIDs,
affectedItems = await execute_checks(settingsDict, affectedItems, failType, BASE_URL, API_KEY, NAME, deleted_downloads, defective_tracker, privateDowloadIDs, protectedDownloadIDs,
addToBlocklist = True,
doPrivateTrackerCheck = True,
doProtectedDownloadCheck = True,
Expand Down
Loading
Loading