From 513974c7edf850ae3765d3c3afca8f75585a4bdb Mon Sep 17 00:00:00 2001 From: Drazzilb <65743746+Drazzilb08@users.noreply.github.com> Date: Thu, 15 Feb 2024 09:55:31 -0800 Subject: [PATCH 01/37] add patch creation to the image repos --- .github/workflows/patch.yml | 45 +++++++++++++++++++++++++++++++++++++ 1 file changed, 45 insertions(+) create mode 100644 .github/workflows/patch.yml diff --git a/.github/workflows/patch.yml b/.github/workflows/patch.yml new file mode 100644 index 0000000..cd9c984 --- /dev/null +++ b/.github/workflows/patch.yml @@ -0,0 +1,45 @@ +name: Docker Latest Release + +on: + push: + branches: + - '**patch-*' # Trigger on patch branches + delete: + branches: + - '**patch-*' # Delete only patch branches + +jobs: + docker-latest: + runs-on: ubuntu-latest + + steps: + - name: Checkout the repo + uses: actions/checkout@v4 + + - name: Login to GitHub Container Registry + run: echo "${{ secrets.GH_TOKEN }}" | docker login ghcr.io -u ${{ secrets.GH_USERNAME }} --password-stdin + + - name: Build and push to GitHub Container Registry + run: | + docker build --build-arg BRANCH=$(git rev-parse --abbrev-ref HEAD) -t ghcr.io/${{ secrets.GH_USERNAME }}/userscripts:${{ github.ref }} . + docker push ghcr.io/${{ secrets.GH_USERNAME }}/userscripts:${{ github.ref }} + + - name: Login to Docker Hub + uses: docker/login-action@v3 + with: + username: ${{ secrets.DOCKER_USERNAME }} + password: ${{ secrets.DOCKER_TOKEN }} + + - name: Build and push to Docker Hub + uses: docker/build-push-action@v5 + with: + context: . + file: ./Dockerfile + push: true + tags: ${{ secrets.DOCKER_USERNAME }}/userscripts:${{ github.ref }} + + - name: Remove container from remote repository if branch deleted + if: startsWith(github.ref, 'refs/heads/') && github.event_name == 'delete' && startsWith(github.ref, 'refs/heads/patch-') + run: | + docker rmi ghcr.io/${{ secrets.GH_USERNAME }}/userscripts:${{ github.ref }} || true + docker rmi ${{ secrets.DOCKER_USERNAME }}/userscripts:${{ github.ref }} || true From d437de3a9e367bdb9a1288eea081a04c3476b71d Mon Sep 17 00:00:00 2001 From: Drazzilb <65743746+Drazzilb08@users.noreply.github.com> Date: Thu, 15 Feb 2024 09:55:43 -0800 Subject: [PATCH 02/37] update: logger will rotate fresh each run --- util/logger.py | 41 +++++++++++++++++------------------------ 1 file changed, 17 insertions(+), 24 deletions(-) diff --git a/util/logger.py b/util/logger.py index f8c9275..6517b73 100755 --- a/util/logger.py +++ b/util/logger.py @@ -1,24 +1,18 @@ import os import time import logging -import logging.handlers import pathlib from util.utility import is_docker -import logging -import logging.handlers -import os -import pathlib -import time +from logging.handlers import RotatingFileHandler -# Get the parent directory of the script file - -def setup_logger(log_level, script_name): +def setup_logger(log_level, script_name, max_logs=9): """ Setup the logger. Parameters: log_level (str): The log level to use script_name (str): The name of the script + max_logs (int): Maximum number of log files to keep Returns: A logger object for logging messages. @@ -42,7 +36,17 @@ def setup_logger(log_level, script_name): today = time.strftime("%Y-%m-%d") # Define the log file path with the current date - log_file = f"{log_dir}/{script_name}_{today}.log" + log_file = f"{log_dir}/{script_name}.log" + + # Check if log file already exists + if os.path.isfile(log_file): + # Rename existing log files and rotate logs + for i in range(max_logs - 1, 0, -1): + old_log = f"{log_dir}/{script_name}.log.{i}" + new_log = f"{log_dir}/{script_name}.log.{i + 1}" + if os.path.exists(old_log): + os.rename(old_log, new_log) + os.rename(log_file, f"{log_dir}/{script_name}.log.1") # Create a logger object with the script name logger = logging.getLogger(script_name) @@ -63,8 +67,8 @@ def setup_logger(log_level, script_name): # Define the log message format formatter = logging.Formatter(fmt='%(asctime)s %(levelname)s: %(message)s', datefmt='%I:%M %p') - # Create a TimedRotatingFileHandler for log files - handler = logging.handlers.TimedRotatingFileHandler(log_file, when='midnight', interval=1, backupCount=3) + # Create a RotatingFileHandler for log files + handler = RotatingFileHandler(log_file, delay=True, mode="w", backupCount=max_logs) handler.setFormatter(formatter) # Add the file handler to the logger @@ -82,15 +86,4 @@ def setup_logger(log_level, script_name): # Add the console handler to the logger logger.addHandler(console_handler) - # Remove older log files, keeping only the latest 3 log files - log_files = [f for f in os.listdir(log_dir) if os.path.isfile(os.path.join(log_dir, f)) and f.startswith(f"{script_name}_")] - log_files.sort(key=lambda x: os.path.getmtime(os.path.join(log_dir, x)), reverse=True) - for file in log_files[3:]: - os.remove(os.path.join(log_dir, file)) - - # Overwrite previous logger if exists - logging.getLogger(script_name).handlers.clear() - logging.getLogger(script_name).addHandler(handler) - logging.getLogger(script_name).addHandler(console_handler) - - return logger \ No newline at end of file + return logger From cfbe8c8f7b671b734a9c659a3b530ed0c5741b11 Mon Sep 17 00:00:00 2001 From: Drazzilb <65743746+Drazzilb08@users.noreply.github.com> Date: Thu, 15 Feb 2024 09:55:59 -0800 Subject: [PATCH 03/37] Add: combine like functions from scripts --- util/utility.py | 87 ++++++++++++++++++++++++++++++++++++++++++++++--- 1 file changed, 83 insertions(+), 4 deletions(-) diff --git a/util/utility.py b/util/utility.py index c0d59b4..f600269 100755 --- a/util/utility.py +++ b/util/utility.py @@ -20,6 +20,8 @@ illegal_chars_regex = re.compile(r'[<>:"/\\|?*\x00-\x1f]+') # Regex to extract the year from parentheses in the title year_regex = re.compile(r"\s?\((\d{4})\).*") +# Regex to extract the year from parentheses in the folder name +folder_year_regex = re.compile(r"(.*)\s\((\d{4})\)") # Regex to remove special characters from the title remove_special_chars = re.compile(r'[^a-zA-Z0-9\s]+') # Season number regex @@ -76,6 +78,9 @@ def normalize_file_names(file_name): # Remove trailing whitespaces file_name = file_name.rstrip() + # Remove leading whitespaces + file_name = file_name.lstrip() + # Replace '&' with 'and' file_name = file_name.replace('&', 'and') @@ -113,18 +118,21 @@ def normalize_titles(title): # Convert special characters to ASCII equivalent normalized_title = unidecode(html.unescape(normalized_title)) - + # Remove trailing whitespaces normalized_title = normalized_title.rstrip() + + # Remove leading whitespaces + normalized_title = normalized_title.lstrip() # Replace '&' with 'and' normalized_title = normalized_title.replace('&', 'and') - + # Remove special characters using regex normalized_title = re.sub(remove_special_chars, '', normalized_title).lower() # Remove spaces in the title - normalized_title = normalized_title.replace(' ', '') + normalized_title = normalized_title.replace(' ', ' ') return normalized_title @@ -780,4 +788,75 @@ def sort_assets(assets_list): else: assets_dict['movies'].append(item) - return assets_dict \ No newline at end of file + return assets_dict + +def compare_strings(string1, string2): + """ + Compare two strings for equality + + Args: + string1 (str): The first string to compare + string2 (str): The second string to compare + + Returns: + bool: True if the strings are equal, False otherwise + """ + string1 = re.sub(r'\W+', '', string1) + string2 = re.sub(r'\W+', '', string2) + + return string1.lower() == string2.lower() + +def is_match(asset, media): + """ + Check if the asset matches the media + + Args: + asset (dict): The asset to check + media (dict): The media to check + + Returns: + bool: True if the asset matches the media, False otherwise + """ + no_prefix = asset.get('no_prefix', []) + no_suffix = asset.get('no_suffix', []) + no_prefix_normalized = asset.get('no_prefix_normalized', []) + no_suffix_normalized = asset.get('no_suffix_normalized', []) + alternate_titles = media.get('alternate_titles', []) + normalized_alternate_titles = media.get('normalized_alternate_titles', []) + secondary_year = media.get('secondary_year', None) + original_title = media.get('original_title', None) + folder = media.get('folder', None) + folder_title = None + folder_year = None + normalized_folder_title = None + if folder: + folder_base_name = os.path.basename(folder) + match = re.search(folder_year_regex, folder_base_name) + if match: + folder_title, folder_year = match.groups() + folder_year = int(folder_year) + normalized_folder_title = normalize_titles(folder_title) + + # Matching criteria for media and asset + if ( + asset['title'] == media['title'] or + asset['normalized_title'] == media['normalized_title'] or + asset['title'] in alternate_titles or + asset['normalized_title'] in normalized_alternate_titles or + asset['title'] == original_title or + asset['title'] == folder_title or + asset['normalized_title'] == normalized_folder_title or + media['title'] == no_prefix or + media['title'] == no_suffix or + (media['normalized_title'] in no_prefix_normalized) or + (media['normalized_title'] in no_suffix_normalized) or + compare_strings(asset['title'], media['title']) or + compare_strings(asset['normalized_title'], media['normalized_title']) + ) and ( + asset['year'] == media['year'] or + asset['year'] == secondary_year or + asset['year'] == folder_year + ): + return True + else: + return False From 1f4e8742dc71fc69bffa10408e88654ea67b4fe2 Mon Sep 17 00:00:00 2001 From: Drazzilb <65743746+Drazzilb08@users.noreply.github.com> Date: Thu, 15 Feb 2024 09:56:34 -0800 Subject: [PATCH 04/37] Update: combine like functionalities into utility.py --- modules/poster_cleanarr.py | 66 +++++++------------------------------ modules/unmatched_assets.py | 56 ++++++++++--------------------- 2 files changed, 29 insertions(+), 93 deletions(-) diff --git a/modules/poster_cleanarr.py b/modules/poster_cleanarr.py index b6736e4..5ae6d3f 100755 --- a/modules/poster_cleanarr.py +++ b/modules/poster_cleanarr.py @@ -43,15 +43,6 @@ log_level = config.log_level dry_run = config.dry_run -logging.getLogger("requests").setLevel(logging.WARNING) -logging.getLogger('urllib3').setLevel(logging.WARNING) - -year_regex = re.compile(r"(.*)\s\((\d{4})\)") - -season_name_info = [ - "_Season", -] - def match_assets(assets_dict, media_dict, ignore_collections): """ Match assets to media. @@ -63,68 +54,35 @@ def match_assets(assets_dict, media_dict, ignore_collections): Returns: dict: Dictionary of unmatched assets. """ - # Define media types to be matched - media_types = ['movies', 'series', 'collections'] - - # Initialize a dictionary to store unmatched assets for each media type - unmatched_assets = {media_type: [] for media_type in media_types} - - # Iterate through each media type - for media_type in media_types: + + # Initialize dictionary to store unmatched assets by media types + unmatched_assets = {} + # Loop through different media types + for media_type in ['movies', 'series', 'collections']: + unmatched_assets[media_type] = {} # Check if the media type exists in both assets and media dictionaries if media_type in media_dict and media_type in assets_dict: # Iterate through each asset in the asset dictionary of the given media type for asset_data in tqdm(assets_dict[media_type], desc=f"Matching {media_type}", unit="assets", total=len(assets_dict[media_type]), disable=None, leave=True): # Initialize a flag to track if an asset is matched with media matched = False + # Skip collections if in ignore_collections if ignore_collections: if media_type == 'collections' and asset_data['title'] in ignore_collections: continue + # Iterate through each media data of the same media type for media_data in media_dict[media_type]: # Check if the normalized title and year match between the asset and media - no_prefix = asset_data.get('no_prefix', None) - no_suffix = asset_data.get('no_suffix', None) - no_prefix_normalized = asset_data.get('no_prefix_normalized', None) - no_suffix_normalized = asset_data.get('no_suffix_normalized', None) - alternate_titles = media_data.get('alternate_titles', []) - normalized_alternate_titles = media_data.get('normalized_alternate_titles', []) - secondary_year = media_data.get('secondary_year', None) - original_title = media_data.get('original_title', None) asset_seasons_numbers = asset_data.get('season_numbers', None) - folder = media_data.get('folder', None) + # Get title and year from folder base_name - if folder: - folder_base_name = os.path.basename(folder) - match = re.search(year_regex, folder_base_name) - if match: - folder_title, folder_year = match.groups() - folder_year = int(folder_year) - normalized_folder_title = normalize_titles(folder_title) if media_type == 'series': media_seasons_numbers = [season['season_number'] for season in media_data.get('seasons', [])] - # Skip the iteration if the asset is already matched - if matched: - continue - # Matching criteria for media and asset - if ( - asset_data['title'] == media_data['title'] or - asset_data['normalized_title'] == media_data['normalized_title'] or - asset_data['title'] in alternate_titles or - asset_data['normalized_title'] in normalized_alternate_titles or - asset_data['title'] == original_title or - folder_title == asset_data['title'] or - normalized_folder_title == asset_data['normalized_title'] or - (no_prefix and media_data['title'] in no_prefix) or - (no_suffix and media_data['title'] in no_suffix) or - (no_prefix_normalized and media_data['normalized_title'] in no_prefix_normalized) or - (no_suffix_normalized and media_data['normalized_title'] in no_suffix_normalized) - ) and ( - asset_data['year'] == media_data['year'] or - asset_data['year'] == secondary_year or - folder_year == asset_data['year'] - ): + + # Check if the asset is a match + if is_match(asset_data, media_data): matched = True # For series, check for missing seasons in the media if media_type == 'series': diff --git a/modules/unmatched_assets.py b/modules/unmatched_assets.py index f739c8e..d64477e 100755 --- a/modules/unmatched_assets.py +++ b/modules/unmatched_assets.py @@ -38,8 +38,6 @@ log_level = config.log_level logger = setup_logger(log_level, script_name) -year_regex = re.compile(r"(.*)\s\((\d{4})\)") - def match_assets(assets_dict, media_dict, ignore_root_folders): """ Matches assets to media and returns a dictionary of unmatched assets. @@ -62,62 +60,42 @@ def match_assets(assets_dict, media_dict, ignore_root_folders): if media_type in media_dict and media_type in assets_dict: # Iterate through each media data in the media dictionary of the current type for media_data in tqdm(media_dict[media_type], desc=f"Matching {media_type}", unit="media", total=len(media_dict[media_type]), leave=True, disable=None): + # Initialize variable to store whether a match was found + matched = False + # Check if the media is released, ended, or continuing or not monitored if media_type in ['series', 'movies'] and not media_data['status'] in ['released', 'ended', 'continuing'] or media_type in ['series', 'movies'] and not media_data['monitored']: continue + + # Get location of media if media_type == "collections": location = media_data['location'] else: location = media_data['root_folder'] + + # Get root folder name root_folder = os.path.basename(location.rstrip('/')).lower() + + # Check if the root folder should be ignored if ignore_root_folders: if root_folder in ignore_root_folders or location in ignore_root_folders: continue - # Initialize variable to store whether a match was found - matched = False + + # Check if location is in unmatched_assets if location not in unmatched_assets[media_type]: unmatched_assets[media_type][location] = [] + # Get season numbers for series if media_type == 'series': media_seasons_numbers = [season['season_number'] for season in media_data.get('seasons', []) if season['season_has_episodes']] + + # Compare media data with each asset data for the same media type for asset_data in assets_dict[media_type]: - no_prefix = asset_data.get('no_prefix', None) - no_suffix = asset_data.get('no_suffix', None) - no_prefix_normalized = asset_data.get('no_prefix_normalized', None) - no_suffix_normalized = asset_data.get('no_suffix_normalized', None) - alternate_titles = media_data.get('alternate_titles', []) - normalized_alternate_titles = media_data.get('normalized_alternate_titles', []) - secondary_year = media_data.get('secondary_year', None) - original_title = media_data.get('original_title', None) asset_seasons_numbers = asset_data.get('season_numbers', None) - folder = media_data.get('folder', None) - # Get title and year from folder base_name - if folder: - folder_base_name = os.path.basename(folder) - match = re.search(year_regex, folder_base_name) - if match: - folder_title, folder_year = match.groups() - folder_year = int(folder_year) - normalized_folder_title = normalize_titles(folder_title) - # If normalized title and year match between asset and media, check for missing seasons - if ( - asset_data['title'] == media_data['title'] or - asset_data['normalized_title'] == media_data['normalized_title'] or - asset_data['title'] in alternate_titles or - asset_data['normalized_title'] in normalized_alternate_titles or - asset_data['title'] == original_title or - folder_title == asset_data['title'] or - normalized_folder_title == asset_data['normalized_title'] or - (no_prefix and media_data['title'] in no_prefix) or - (no_suffix and media_data['title'] in no_suffix) or - (no_prefix_normalized and media_data['normalized_title'] in no_prefix_normalized) or - (no_suffix_normalized and media_data['normalized_title'] in no_suffix_normalized) - ) and ( - asset_data['year'] == media_data['year'] or - asset_data['year'] == secondary_year or - folder_year == asset_data['year'] - ): + + # Check if the asset matches the media + if is_match(asset_data, media_data): matched = True if media_type == 'series': if asset_seasons_numbers and media_seasons_numbers: From 630ff10b136a05e2eb9254e25b2547df37aa2b6f Mon Sep 17 00:00:00 2001 From: Drazzilb <65743746+Drazzilb08@users.noreply.github.com> Date: Thu, 15 Feb 2024 09:56:51 -0800 Subject: [PATCH 05/37] Update: rework match handling --- modules/poster_renamerr.py | 46 ++++++++++---------------------------- 1 file changed, 12 insertions(+), 34 deletions(-) diff --git a/modules/poster_renamerr.py b/modules/poster_renamerr.py index 00392ab..44ac3db 100755 --- a/modules/poster_renamerr.py +++ b/modules/poster_renamerr.py @@ -123,44 +123,21 @@ def match_data(media_dict, asset_files): # Iterate through each asset type with tqdm(total=len(asset_types), desc=f"Matching assets...", unit="asset types", leave=True) as pbar_outer: for asset_type in asset_types: - if asset_type in media_dict: # Check if the asset type exists in media dictionary - unmatched_dict = [] # Initialize unmatched dictionary for current asset type - matched_dict = [] # Initialize matched dictionary for current asset type - asset_data = asset_files[asset_type] # Get asset data for current asset type - media_data = media_dict[asset_type] # Get media data for current asset type + if asset_type in media_dict: + unmatched_dict = [] + matched_dict = [] + asset_data = asset_files[asset_type] + media_data = media_dict[asset_type] # Iterate through each media entry of the current asset type with tqdm(total=len(media_data), desc=f"Matching {asset_type}", unit="media", leave=True, disable=None) as pbar_inner: for media in media_data: - matched = False # Flag to indicate if media has been matched to an asset + matched = False if asset_type == 'series': media_seasons_numbers = [season['season_number'] for season in media.get('seasons', [])] # Iterate through each asset entry of the current asset type for asset in asset_data: # Extracting various properties of assets and media for comparison - no_prefix = asset.get('no_prefix', None) - no_suffix = asset.get('no_suffix', None) - no_prefix_normalized = asset.get('no_prefix_normalized', None) - no_suffix_normalized = asset.get('no_suffix_normalized', None) - alternate_titles = media.get('alternate_titles', []) - normalized_alternate_titles = media.get('normalized_alternate_titles', []) - secondary_year = media.get('secondary_year', None) - original_title = media.get('original_title', None) - - # Matching criteria for media and asset - if ( - asset['title'] == media['title'] or - asset['normalized_title'] == media['normalized_title'] or - asset['title'] in alternate_titles or - asset['normalized_title'] in normalized_alternate_titles or - asset['title'] == original_title or - (no_prefix and media['title'] in no_prefix) or - (no_suffix and media['title'] in no_suffix) or - (no_prefix_normalized and media['normalized_title'] in no_prefix_normalized) or - (no_suffix_normalized and media['normalized_title'] in no_suffix_normalized) - ) and ( - asset['year'] == media['year'] or - asset['year'] == secondary_year - ): + if is_match(media, asset): matched = True # Set flag to indicate a match asset_season_numbers = asset.get('season_numbers', None) if asset_type == "series": @@ -202,14 +179,15 @@ def match_data(media_dict, asset_files): 'year': media['year'], 'folder': media['folder'], }) - + # Update combined matched and unmatched dictionaries combined_dict['matched'][asset_type] = matched_dict combined_dict['unmatched'][asset_type] = unmatched_dict - pbar_inner.update(1) # Update progress bar for media matching - pbar_outer.update(1) # Update progress bar for asset types - return combined_dict # Return the combined dictionary of matched and unmatched media data + pbar_inner.update(1) + pbar_outer.update(1) + + return combined_dict def process_file(file, new_file_path, action_type): """ From e4e11dc271f0c924b7b202fd53c1b7b4d14b5290 Mon Sep 17 00:00:00 2001 From: Drazzilb <65743746+Drazzilb08@users.noreply.github.com> Date: Thu, 15 Feb 2024 11:00:34 -0800 Subject: [PATCH 06/37] update workflows --- .github/workflows/patch.yml | 32 ++++++++------------------------ 1 file changed, 8 insertions(+), 24 deletions(-) diff --git a/.github/workflows/patch.yml b/.github/workflows/patch.yml index cd9c984..195b950 100644 --- a/.github/workflows/patch.yml +++ b/.github/workflows/patch.yml @@ -3,43 +3,27 @@ name: Docker Latest Release on: push: branches: - - '**patch-*' # Trigger on patch branches + - '**.patch-*' # Trigger on patch branches delete: branches: - - '**patch-*' # Delete only patch branches + - '**.patch-*' # Trigger on patch branch deletion jobs: docker-latest: runs-on: ubuntu-latest - + steps: - name: Checkout the repo uses: actions/checkout@v4 - - - name: Login to GitHub Container Registry - run: echo "${{ secrets.GH_TOKEN }}" | docker login ghcr.io -u ${{ secrets.GH_USERNAME }} --password-stdin - - - name: Build and push to GitHub Container Registry + + - name: Build and push Docker image (patch-* branch) + if: startsWith(github.ref, 'refs/heads/patch-') && github.event_name == 'push' run: | docker build --build-arg BRANCH=$(git rev-parse --abbrev-ref HEAD) -t ghcr.io/${{ secrets.GH_USERNAME }}/userscripts:${{ github.ref }} . docker push ghcr.io/${{ secrets.GH_USERNAME }}/userscripts:${{ github.ref }} - - name: Login to Docker Hub - uses: docker/login-action@v3 - with: - username: ${{ secrets.DOCKER_USERNAME }} - password: ${{ secrets.DOCKER_TOKEN }} - - - name: Build and push to Docker Hub - uses: docker/build-push-action@v5 - with: - context: . - file: ./Dockerfile - push: true - tags: ${{ secrets.DOCKER_USERNAME }}/userscripts:${{ github.ref }} - - - name: Remove container from remote repository if branch deleted - if: startsWith(github.ref, 'refs/heads/') && github.event_name == 'delete' && startsWith(github.ref, 'refs/heads/patch-') + - name: Remove container from remote repositories (branch deletion) + if: startsWith(github.ref, 'refs/heads/patch-') && github.event_name == 'delete' run: | docker rmi ghcr.io/${{ secrets.GH_USERNAME }}/userscripts:${{ github.ref }} || true docker rmi ${{ secrets.DOCKER_USERNAME }}/userscripts:${{ github.ref }} || true From f17231e701e8d44ea60f7685a7848c310c249a37 Mon Sep 17 00:00:00 2001 From: Drazzilb <65743746+Drazzilb08@users.noreply.github.com> Date: Thu, 15 Feb 2024 11:06:04 -0800 Subject: [PATCH 07/37] Update patch.yml --- .github/workflows/patch.yml | 57 +++++++++++++++++++++++++++---------- 1 file changed, 42 insertions(+), 15 deletions(-) diff --git a/.github/workflows/patch.yml b/.github/workflows/patch.yml index 195b950..9d3a93e 100644 --- a/.github/workflows/patch.yml +++ b/.github/workflows/patch.yml @@ -1,29 +1,56 @@ -name: Docker Latest Release +name: Docker Develop Release on: push: - branches: - - '**.patch-*' # Trigger on patch branches + branches: [ patch-* ] + pull_request: + branches: [ patch-* ] delete: branches: - - '**.patch-*' # Trigger on patch branch deletion + - patch-* jobs: + docker-latest: runs-on: ubuntu-latest - + steps: - - name: Checkout the repo - uses: actions/checkout@v4 + - uses: actions/checkout@v4 + - name: checkout the repo + run: | + docker login --username ${{ secrets.GH_USERNAME }} --password ${{ secrets.GH_TOKEN }} ghcr.io + docker build --build-arg BRANCH=$(git rev-parse --abbrev-ref HEAD) -t ghcr.io/drazzilb08/userscripts:dev . + docker push ghcr.io/drazzilb08/userscripts:${{ github.ref_slug }} + + - name: Login to Docker Hub + uses: docker/login-action@v3 + with: + username: ${{ secrets.DOCKER_USERNAME }} + password: ${{ secrets.DOCKER_TOKEN }} + + - name: Build and push + id: docker_build + uses: docker/build-push-action@v5 + with: + context: . + file: ./Dockerfile + build-args: | + "BRANCH=$(git rev-parse --abbrev-ref HEAD)" + push: true + tags: ${{ secrets.DOCKER_USERNAME }}/userscripts:${{ github.ref_slug }} - - name: Build and push Docker image (patch-* branch) - if: startsWith(github.ref, 'refs/heads/patch-') && github.event_name == 'push' + - name: Delete image from GitHub Container Registry if branch was deleted + if: github.event_name == 'delete' run: | - docker build --build-arg BRANCH=$(git rev-parse --abbrev-ref HEAD) -t ghcr.io/${{ secrets.GH_USERNAME }}/userscripts:${{ github.ref }} . - docker push ghcr.io/${{ secrets.GH_USERNAME }}/userscripts:${{ github.ref }} + docker logout ghcr.io + docker login --username ${{ secrets.GH_USERNAME }} --password ${{ secrets.GH_TOKEN }} ghcr.io + docker image rm ghcr.io/drazzilb08/userscripts:${{ github.ref_slug }} - - name: Remove container from remote repositories (branch deletion) - if: startsWith(github.ref, 'refs/heads/patch-') && github.event_name == 'delete' + - name: Delete image from Docker Hub if branch was deleted + if: github.event_name == 'delete' run: | - docker rmi ghcr.io/${{ secrets.GH_USERNAME }}/userscripts:${{ github.ref }} || true - docker rmi ${{ secrets.DOCKER_USERNAME }}/userscripts:${{ github.ref }} || true + docker logout + docker login --username ${{ secrets.DOCKER_USERNAME }} --password ${{ secrets.DOCKER_TOKEN }} + docker image rm ${{ secrets.DOCKER_USERNAME }}/userscripts:${{ github.ref_slug }} + + From 16129bd6d52b3ba1ff3e9da5c6163374480320c2 Mon Sep 17 00:00:00 2001 From: Drazzilb <65743746+Drazzilb08@users.noreply.github.com> Date: Thu, 15 Feb 2024 11:08:33 -0800 Subject: [PATCH 08/37] Update patch.yml --- .github/workflows/patch.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/patch.yml b/.github/workflows/patch.yml index 9d3a93e..a941378 100644 --- a/.github/workflows/patch.yml +++ b/.github/workflows/patch.yml @@ -19,7 +19,7 @@ jobs: - name: checkout the repo run: | docker login --username ${{ secrets.GH_USERNAME }} --password ${{ secrets.GH_TOKEN }} ghcr.io - docker build --build-arg BRANCH=$(git rev-parse --abbrev-ref HEAD) -t ghcr.io/drazzilb08/userscripts:dev . + docker build --build-arg BRANCH=$(git rev-parse --abbrev-ref HEAD) -t ghcr.io/drazzilb08/userscripts:${{ github.ref_slug }} . docker push ghcr.io/drazzilb08/userscripts:${{ github.ref_slug }} - name: Login to Docker Hub From 8a91de25f91e029875c3ad78712a3e86622d76be Mon Sep 17 00:00:00 2001 From: Drazzilb <65743746+Drazzilb08@users.noreply.github.com> Date: Thu, 15 Feb 2024 11:25:51 -0800 Subject: [PATCH 09/37] Add new workflows --- .github/workflows/delete.yml | 22 ++++++++++++ .github/workflows/patch.yml | 69 +++++++++++++----------------------- 2 files changed, 47 insertions(+), 44 deletions(-) create mode 100644 .github/workflows/delete.yml diff --git a/.github/workflows/delete.yml b/.github/workflows/delete.yml new file mode 100644 index 0000000..d24e5b9 --- /dev/null +++ b/.github/workflows/delete.yml @@ -0,0 +1,22 @@ +name: Delete Docker Images if Branch is Deleted + +on: + delete: + branches: + - patch-* + +jobs: + delete: + runs-on: ubuntu-latest + steps: + - name: Delete Docker Image from GHCR + run: | + docker login --username ${{ secrets.GH_USERNAME }} --password ${{ secrets.GH_TOKEN }} ghcr.io + docker image rm ghcr.io/drazzilb08/userscripts:${{ github.ref_slug }} + docker logout ghcr.io + + - name: Delete Docker Image from Docker + run: | + docker login --username ${{ secrets.DOCKER_USERNAME }} --password ${{ secrets.DOCKER_TOKEN }} + docker image rm ${{ secrets.DOCKER_USERNAME }}/userscripts:${{ github.ref_slug }} + docker logout diff --git a/.github/workflows/patch.yml b/.github/workflows/patch.yml index a941378..d75802a 100644 --- a/.github/workflows/patch.yml +++ b/.github/workflows/patch.yml @@ -5,52 +5,33 @@ on: branches: [ patch-* ] pull_request: branches: [ patch-* ] - delete: - branches: - - patch-* jobs: - - docker-latest: - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v4 - - name: checkout the repo - run: | - docker login --username ${{ secrets.GH_USERNAME }} --password ${{ secrets.GH_TOKEN }} ghcr.io - docker build --build-arg BRANCH=$(git rev-parse --abbrev-ref HEAD) -t ghcr.io/drazzilb08/userscripts:${{ github.ref_slug }} . + docker-latest: + runs-on: ubuntu-latest + + steps: + - uses: actions/checkout@v4 + - name: checkout the repo + run: | + docker login --username ${{ secrets.GH_USERNAME }} --password ${{ secrets.GH_TOKEN }} ghcr.io + docker build --build-arg BRANCH=$(git rev-parse --abbrev-ref HEAD) -t ghcr.io/drazzilb08/userscripts:${{ github.ref_slug }} . docker push ghcr.io/drazzilb08/userscripts:${{ github.ref_slug }} - - - name: Login to Docker Hub - uses: docker/login-action@v3 - with: - username: ${{ secrets.DOCKER_USERNAME }} - password: ${{ secrets.DOCKER_TOKEN }} - - - name: Build and push - id: docker_build - uses: docker/build-push-action@v5 - with: - context: . - file: ./Dockerfile - build-args: | - "BRANCH=$(git rev-parse --abbrev-ref HEAD)" - push: true - tags: ${{ secrets.DOCKER_USERNAME }}/userscripts:${{ github.ref_slug }} - - - name: Delete image from GitHub Container Registry if branch was deleted - if: github.event_name == 'delete' - run: | - docker logout ghcr.io - docker login --username ${{ secrets.GH_USERNAME }} --password ${{ secrets.GH_TOKEN }} ghcr.io - docker image rm ghcr.io/drazzilb08/userscripts:${{ github.ref_slug }} + + - name: Login to Docker Hub + uses: docker/login-action@v3 + with: + username: ${{ secrets.DOCKER_USERNAME }} + password: ${{ secrets.DOCKER_TOKEN }} - - name: Delete image from Docker Hub if branch was deleted - if: github.event_name == 'delete' - run: | - docker logout - docker login --username ${{ secrets.DOCKER_USERNAME }} --password ${{ secrets.DOCKER_TOKEN }} - docker image rm ${{ secrets.DOCKER_USERNAME }}/userscripts:${{ github.ref_slug }} - - + - name: Build and push + id: docker_build + uses: docker/build-push-action@v5 + with: + context: . + file: ./Dockerfile + build-args: | + "BRANCH=patch" + push: true + tags: ${{ secrets.DOCKER_USERNAME }}/userscripts:${{ github.ref_slug }} \ No newline at end of file From 0dfcb62e618e4eab56f38877156a55674369490e Mon Sep 17 00:00:00 2001 From: Drazzilb <65743746+Drazzilb08@users.noreply.github.com> Date: Thu, 15 Feb 2024 11:25:58 -0800 Subject: [PATCH 10/37] update debugging --- modules/border_replacerr.py | 51 +++++++++++++++++++++---------------- 1 file changed, 29 insertions(+), 22 deletions(-) diff --git a/modules/border_replacerr.py b/modules/border_replacerr.py index 5d70f9b..b52d496 100755 --- a/modules/border_replacerr.py +++ b/modules/border_replacerr.py @@ -346,11 +346,9 @@ def copy_files(assets_dict, destination_dir, dry_run): Copies the files in the input directory to the output directory. Args: - source_dirs (str): The input directory. + assets_dict (dict): The dictionary of assets. destination_dir (str): The output directory. - asset_folders (bool): Whether to use asset folders. dry_run (bool): Whether to perform a dry run. - Returns: None """ @@ -380,10 +378,8 @@ def copy_files(assets_dict, destination_dir, dry_run): if not dry_run: if not os.path.exists(output_path): os.makedirs(output_path) - else: - output_path = destination_dir else: - logger.debug(f"Creating {output_path}") + logger.debug(f"Would have created {output_path}") else: output_path = destination_dir @@ -399,10 +395,18 @@ def copy_files(assets_dict, destination_dir, dry_run): if not dry_run: if os.path.isfile(final_path): if not filecmp.cmp(final_path, input_file): - shutil.copy(input_file, final_path) + try: + shutil.copy(input_file, final_path) + except shutil.SameFileError: + logger.debug(f"Input file {input_file} is the same as {final_path}, skipping") + logger.debug(f"Input file {input_file} is different from {final_path}, copying to {output_basename}") messages.append(f"Copied {data['title']}{year} - {file_name} to {output_basename}") else: - shutil.copy(input_file, final_path) + try: + shutil.copy(input_file, final_path) + except shutil.SameFileError: + logger.debug(f"Input file {input_file} is the same as {final_path}, skipping") + logger.debug(f"Input file {input_file} does not exist in {output_path}, copying to {output_basename}") messages.append(f"Copied {data['title']}{year} - {file_name} to {output_basename}") else: messages.append(f"Would have copied {data['title']}{year} - {file_name} to {output_basename}") @@ -423,14 +427,28 @@ def process_files(source_dirs, destination_dir, asset_folders, dry_run): # Obtain script configuration details script_config = config.script_config - schedule = script_config['schedule'] - border_colors = script_config['border_colors'] - skip = script_config['skip'] + schedule = script_config.get('schedule', None) + border_colors = script_config.get('border_colors', None) + skip = script_config.get('skip', False) # Convert single string border color to a list if necessary border_colors = [border_colors] if isinstance(border_colors, str) else border_colors source_dirs = [source_dirs] if isinstance(source_dirs, str) else source_dirs + table = [ + ["Script Settings"], + ] + logger.debug(create_table(table)) + logger.debug(f'{"Dry_run:":<20}{config.dry_run}') + logger.debug(f'{"Log Level:":<20}{config.log_level}') + logger.debug(f'{"Input Dir:":<20}{source_dirs}') + logger.debug(f'{"Output Dir:":<20}{destination_dir}') + logger.debug(f'{"Asset Folders:":<20}{asset_folders}') + logger.debug(f'{"Border Colors:":<20}{border_colors}') + logger.debug(f'{"Skip:":<20}{skip}') + logger.debug(f'{"Schedule:":<20}{schedule}') + logger.debug(create_bar("-")) + run_holiday = False # Check for a scheduled event to update border colors if provided @@ -520,17 +538,6 @@ def main(): if isinstance(border_colors, str): border_colors = [border_colors] - # Creating a table to log script settings in debug mode - table = [ - ["Script Settings"], - ] - logger.debug(create_table(table)) - logger.debug(f'{"Dry_run:":<20}{config.dry_run}') - logger.debug(f'{"Log Level:":<20}{config.log_level}') - logger.debug(f'{"Input Dir:":<20}{source_dirs}') - logger.debug(f'{"Output Dir:":<20}{destination_dir}') - logger.debug(f'{"Schedule:":<20}{schedule}') - logger.debug(create_bar("-")) # Process files in the input directory with specified settings process_files(source_dirs, destination_dir, asset_folders, dry_run) From ccbdc526fa3cdf9d6f2afe7d0cf5b58d37243a7e Mon Sep 17 00:00:00 2001 From: Drazzilb <65743746+Drazzilb08@users.noreply.github.com> Date: Thu, 15 Feb 2024 11:32:04 -0800 Subject: [PATCH 11/37] Update patch.yml --- .github/workflows/patch.yml | 11 ++++++++--- 1 file changed, 8 insertions(+), 3 deletions(-) diff --git a/.github/workflows/patch.yml b/.github/workflows/patch.yml index d75802a..7247640 100644 --- a/.github/workflows/patch.yml +++ b/.github/workflows/patch.yml @@ -13,11 +13,16 @@ jobs: steps: - uses: actions/checkout@v4 + - name: Extract branch name + shell: bash + run: echo "branch=${GITHUB_HEAD_REF:-${GITHUB_REF#refs/heads/}}" >> $GITHUB_OUTPUT + id: extract_branch + - name: checkout the repo run: | docker login --username ${{ secrets.GH_USERNAME }} --password ${{ secrets.GH_TOKEN }} ghcr.io - docker build --build-arg BRANCH=$(git rev-parse --abbrev-ref HEAD) -t ghcr.io/drazzilb08/userscripts:${{ github.ref_slug }} . - docker push ghcr.io/drazzilb08/userscripts:${{ github.ref_slug }} + docker build --build-arg BRANCH=$(git rev-parse --abbrev-ref HEAD) -t ghcr.io/drazzilb08/userscripts:${{ steps.extract_branch.outputs.branch }} . + docker push ghcr.io/drazzilb08/userscripts: - name: Login to Docker Hub uses: docker/login-action@v3 @@ -34,4 +39,4 @@ jobs: build-args: | "BRANCH=patch" push: true - tags: ${{ secrets.DOCKER_USERNAME }}/userscripts:${{ github.ref_slug }} \ No newline at end of file + tags: ${{ secrets.DOCKER_USERNAME }}/userscripts:${{ steps.extract_branch.outputs.branch }} \ No newline at end of file From 8bf937910404f251113722ebdab10f3b1f5600ca Mon Sep 17 00:00:00 2001 From: Drazzilb <65743746+Drazzilb08@users.noreply.github.com> Date: Thu, 15 Feb 2024 11:35:30 -0800 Subject: [PATCH 12/37] Update patch.yml --- .github/workflows/patch.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/patch.yml b/.github/workflows/patch.yml index 7247640..0b53512 100644 --- a/.github/workflows/patch.yml +++ b/.github/workflows/patch.yml @@ -12,12 +12,12 @@ jobs: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v4 - name: Extract branch name shell: bash run: echo "branch=${GITHUB_HEAD_REF:-${GITHUB_REF#refs/heads/}}" >> $GITHUB_OUTPUT id: extract_branch - + + - uses: actions/checkout@v4 - name: checkout the repo run: | docker login --username ${{ secrets.GH_USERNAME }} --password ${{ secrets.GH_TOKEN }} ghcr.io From f046ddd9d3bbebedfe019f7b7321cd66b8022e4e Mon Sep 17 00:00:00 2001 From: Drazzilb <65743746+Drazzilb08@users.noreply.github.com> Date: Thu, 15 Feb 2024 11:37:52 -0800 Subject: [PATCH 13/37] error fix --- .github/workflows/patch.yml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/workflows/patch.yml b/.github/workflows/patch.yml index 0b53512..02bf623 100644 --- a/.github/workflows/patch.yml +++ b/.github/workflows/patch.yml @@ -16,13 +16,13 @@ jobs: shell: bash run: echo "branch=${GITHUB_HEAD_REF:-${GITHUB_REF#refs/heads/}}" >> $GITHUB_OUTPUT id: extract_branch - + - uses: actions/checkout@v4 - name: checkout the repo run: | docker login --username ${{ secrets.GH_USERNAME }} --password ${{ secrets.GH_TOKEN }} ghcr.io docker build --build-arg BRANCH=$(git rev-parse --abbrev-ref HEAD) -t ghcr.io/drazzilb08/userscripts:${{ steps.extract_branch.outputs.branch }} . - docker push ghcr.io/drazzilb08/userscripts: + docker push ghcr.io/drazzilb08/userscripts:${{ steps.extract_branch.outputs.branch }} - name: Login to Docker Hub uses: docker/login-action@v3 @@ -39,4 +39,4 @@ jobs: build-args: | "BRANCH=patch" push: true - tags: ${{ secrets.DOCKER_USERNAME }}/userscripts:${{ steps.extract_branch.outputs.branch }} \ No newline at end of file + tags: ${{ secrets.DOCKER_USERNAME }}/userscripts:${{ steps.extract_branch.outputs.branch }} From 00429bbff900cfcb87e5a27c4a7e00d3f3950d5a Mon Sep 17 00:00:00 2001 From: Drazzilb <65743746+Drazzilb08@users.noreply.github.com> Date: Thu, 15 Feb 2024 11:45:09 -0800 Subject: [PATCH 14/37] Update delete.yml --- .github/workflows/delete.yml | 10 ++++++++-- 1 file changed, 8 insertions(+), 2 deletions(-) diff --git a/.github/workflows/delete.yml b/.github/workflows/delete.yml index d24e5b9..9fb3b08 100644 --- a/.github/workflows/delete.yml +++ b/.github/workflows/delete.yml @@ -8,15 +8,21 @@ on: jobs: delete: runs-on: ubuntu-latest + steps: + - name: Extract branch name + shell: bash + run: echo "branch=${GITHUB_HEAD_REF:-${GITHUB_REF#refs/heads/}}" >> $GITHUB_OUTPUT + id: extract_branch + - name: Delete Docker Image from GHCR run: | docker login --username ${{ secrets.GH_USERNAME }} --password ${{ secrets.GH_TOKEN }} ghcr.io - docker image rm ghcr.io/drazzilb08/userscripts:${{ github.ref_slug }} + docker image rm ghcr.io/drazzilb08/userscripts:${{ steps.extract_branch.outputs.branch }} docker logout ghcr.io - name: Delete Docker Image from Docker run: | docker login --username ${{ secrets.DOCKER_USERNAME }} --password ${{ secrets.DOCKER_TOKEN }} - docker image rm ${{ secrets.DOCKER_USERNAME }}/userscripts:${{ github.ref_slug }} + docker image rm ${{ secrets.DOCKER_USERNAME }}/userscripts:${{ steps.extract_branch.outputs.branch }} docker logout From 091176e0922d66433189825315863be42369ba72 Mon Sep 17 00:00:00 2001 From: Drazzilb <65743746+Drazzilb08@users.noreply.github.com> Date: Thu, 15 Feb 2024 12:01:09 -0800 Subject: [PATCH 15/37] add ignore files and folders to actions --- .github/workflows/delete.yml | 1 + .github/workflows/dev.yml | 3 ++ .github/workflows/latest.yml | 3 ++ .github/workflows/patch.yml | 69 +++++++++++++++++++----------------- 4 files changed, 43 insertions(+), 33 deletions(-) diff --git a/.github/workflows/delete.yml b/.github/workflows/delete.yml index 9fb3b08..60530ef 100644 --- a/.github/workflows/delete.yml +++ b/.github/workflows/delete.yml @@ -4,6 +4,7 @@ on: delete: branches: - patch-* + jobs: delete: diff --git a/.github/workflows/dev.yml b/.github/workflows/dev.yml index 43e3d62..fac93af 100644 --- a/.github/workflows/dev.yml +++ b/.github/workflows/dev.yml @@ -3,6 +3,9 @@ name: Docker Develop Release on: push: branches: [ dev ] + paths-ignore: + - '**/.*' + - '**/README.md' pull_request: branches: [ dev ] diff --git a/.github/workflows/latest.yml b/.github/workflows/latest.yml index cd12f84..b948f97 100644 --- a/.github/workflows/latest.yml +++ b/.github/workflows/latest.yml @@ -3,6 +3,9 @@ name: Docker Latest Release on: push: branches: [ master ] + paths-ignore: + - '**/.*' + - '**/README.md' jobs: diff --git a/.github/workflows/patch.yml b/.github/workflows/patch.yml index 02bf623..b81855d 100644 --- a/.github/workflows/patch.yml +++ b/.github/workflows/patch.yml @@ -2,41 +2,44 @@ name: Docker Develop Release on: push: - branches: [ patch-* ] + branches: + - patch-* + paths-ignore: + - '**/.*' + - '**/README.md' pull_request: - branches: [ patch-* ] + branches: + - patch-* jobs: - - docker-latest: - runs-on: ubuntu-latest - - steps: - - name: Extract branch name - shell: bash - run: echo "branch=${GITHUB_HEAD_REF:-${GITHUB_REF#refs/heads/}}" >> $GITHUB_OUTPUT - id: extract_branch + docker-latest: + runs-on: ubuntu-latest + steps: + - name: Extract branch name + shell: bash + run: echo "branch=${GITHUB_HEAD_REF:-${GITHUB_REF#refs/heads/}}" >> $GITHUB_OUTPUT + id: extract_branch - - uses: actions/checkout@v4 - - name: checkout the repo - run: | - docker login --username ${{ secrets.GH_USERNAME }} --password ${{ secrets.GH_TOKEN }} ghcr.io - docker build --build-arg BRANCH=$(git rev-parse --abbrev-ref HEAD) -t ghcr.io/drazzilb08/userscripts:${{ steps.extract_branch.outputs.branch }} . - docker push ghcr.io/drazzilb08/userscripts:${{ steps.extract_branch.outputs.branch }} - - - name: Login to Docker Hub - uses: docker/login-action@v3 - with: - username: ${{ secrets.DOCKER_USERNAME }} - password: ${{ secrets.DOCKER_TOKEN }} + - uses: actions/checkout@v4 + - name: checkout the repo + run: | + docker login --username ${{ secrets.GH_USERNAME }} --password ${{ secrets.GH_TOKEN }} ghcr.io + docker build --build-arg BRANCH=$(git rev-parse --abbrev-ref HEAD) -t ghcr.io/drazzilb08/userscripts:${{ steps.extract_branch.outputs.branch }} . + docker push ghcr.io/drazzilb08/userscripts:${{ steps.extract_branch.outputs.branch }} + + - name: Login to Docker Hub + uses: docker/login-action@v3 + with: + username: ${{ secrets.DOCKER_USERNAME }} + password: ${{ secrets.DOCKER_TOKEN }} - - name: Build and push - id: docker_build - uses: docker/build-push-action@v5 - with: - context: . - file: ./Dockerfile - build-args: | - "BRANCH=patch" - push: true - tags: ${{ secrets.DOCKER_USERNAME }}/userscripts:${{ steps.extract_branch.outputs.branch }} + - name: Build and push + id: docker_build + uses: docker/build-push-action@v5 + with: + context: . + file: ./Dockerfile + build-args: | + "BRANCH=patch" + push: true + tags: ${{ secrets.DOCKER_USERNAME }}/userscripts:${{ steps.extract_branch.outputs.branch }} From c69300d5adc54ddf0707a9844c2166a418860bf6 Mon Sep 17 00:00:00 2001 From: Drazzilb <65743746+Drazzilb08@users.noreply.github.com> Date: Thu, 15 Feb 2024 12:22:44 -0800 Subject: [PATCH 16/37] Update upgradinatorr.py --- modules/upgradinatorr.py | 1 + 1 file changed, 1 insertion(+) diff --git a/modules/upgradinatorr.py b/modules/upgradinatorr.py index 824563d..26f1383 100755 --- a/modules/upgradinatorr.py +++ b/modules/upgradinatorr.py @@ -168,6 +168,7 @@ def process_instance(instance_type, instance_settings, app): ready = app.wait_for_command(search_response['id']) if ready: queue = app.get_queue(instance_type) + logger.debug(f"queue:\n{json.dumps(queue, indent=4)}") queue_dict = process_queue(queue, instance_type, media_ids) logger.debug(f"queue_dict:\n{json.dumps(queue_dict, indent=4)}") for item in filtered_media_dict: From 532a0978671362aa90ce5471148cb7be564d0048 Mon Sep 17 00:00:00 2001 From: Drazzilb <65743746+Drazzilb08@users.noreply.github.com> Date: Thu, 15 Feb 2024 12:49:43 -0800 Subject: [PATCH 17/37] set proper env variable --- .github/workflows/patch.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/patch.yml b/.github/workflows/patch.yml index b81855d..3f0aa3c 100644 --- a/.github/workflows/patch.yml +++ b/.github/workflows/patch.yml @@ -40,6 +40,6 @@ jobs: context: . file: ./Dockerfile build-args: | - "BRANCH=patch" + "BRANCH=${{ steps.extract_branch.outputs.branch }}" push: true tags: ${{ secrets.DOCKER_USERNAME }}/userscripts:${{ steps.extract_branch.outputs.branch }} From 70347460a33772c9cc6d5d53ba0cfe1fe7abec75 Mon Sep 17 00:00:00 2001 From: Drazzilb <65743746+Drazzilb08@users.noreply.github.com> Date: Thu, 15 Feb 2024 12:53:20 -0800 Subject: [PATCH 18/37] Updated name --- .github/workflows/patch.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/patch.yml b/.github/workflows/patch.yml index 3f0aa3c..9409e6a 100644 --- a/.github/workflows/patch.yml +++ b/.github/workflows/patch.yml @@ -1,4 +1,4 @@ -name: Docker Develop Release +name: Docker Patch Release on: push: From 19860a5918d39fb0b028b9df9765c1d35b836674 Mon Sep 17 00:00:00 2001 From: Drazzilb <65743746+Drazzilb08@users.noreply.github.com> Date: Thu, 15 Feb 2024 12:59:20 -0800 Subject: [PATCH 19/37] silence output if chown command fails - added env variable START_DEBUG that can be set to true if this output needs to be seen --- start.sh | 21 +++++++++++++-------- 1 file changed, 13 insertions(+), 8 deletions(-) diff --git a/start.sh b/start.sh index b9442c3..6809c25 100755 --- a/start.sh +++ b/start.sh @@ -3,6 +3,7 @@ PUID=${PUID:-100} PGID=${PGID:-99} UMASK=${UMASK:-002} +START_DEBUG=${START_DEBUG:-false} export RCLONE_CONFIG="${CONFIG_DIR}/rclone/rclone.conf" @@ -63,14 +64,18 @@ fi echo "Starting userScripts as $(whoami) running userscripts with UID: $PUID and GID: $PGID" # Set permissions -if ! chown -R ${PUID}:${PGID} /${CONFIG_DIR} /data /app; then - echo "Failed to change ownership." - echo "DEBUG: ${PUID}:${PGID} /${CONFIG_DIR}" - ls -la /${CONFIG_DIR} - echo "DEBUG: ${PUID}:${PGID} /data" - ls -la /data - echo "DEBUG: ${PUID}:${PGID} /app" - ls -la /app +if [ "$START_DEBUG" = "true" ]; then + if ! chown -R ${PUID}:${PGID} /${CONFIG_DIR} /data /app; then + echo "Failed to change ownership." + echo "DEBUG: ${PUID}:${PGID} /${CONFIG_DIR}" + ls -la /${CONFIG_DIR} + echo "DEBUG: ${PUID}:${PGID} /data" + ls -la /data + echo "DEBUG: ${PUID}:${PGID} /app" + ls -la /app + fi +else + chown -R ${PUID}:${PGID} /${CONFIG_DIR} /data /app > /dev/null 2>&1 fi # Run main.py as the dockeruser From 371c141363ef6105bddbf4fd2a665e3de42f9a31 Mon Sep 17 00:00:00 2001 From: Drazzilb <65743746+Drazzilb08@users.noreply.github.com> Date: Thu, 15 Feb 2024 13:04:48 -0800 Subject: [PATCH 20/37] remove .file ignores --- .github/workflows/dev.yml | 1 - .github/workflows/latest.yml | 1 - .github/workflows/patch.yml | 1 - 3 files changed, 3 deletions(-) diff --git a/.github/workflows/dev.yml b/.github/workflows/dev.yml index fac93af..48dc1e4 100644 --- a/.github/workflows/dev.yml +++ b/.github/workflows/dev.yml @@ -4,7 +4,6 @@ on: push: branches: [ dev ] paths-ignore: - - '**/.*' - '**/README.md' pull_request: branches: [ dev ] diff --git a/.github/workflows/latest.yml b/.github/workflows/latest.yml index b948f97..9b71437 100644 --- a/.github/workflows/latest.yml +++ b/.github/workflows/latest.yml @@ -4,7 +4,6 @@ on: push: branches: [ master ] paths-ignore: - - '**/.*' - '**/README.md' jobs: diff --git a/.github/workflows/patch.yml b/.github/workflows/patch.yml index 9409e6a..ed661fa 100644 --- a/.github/workflows/patch.yml +++ b/.github/workflows/patch.yml @@ -5,7 +5,6 @@ on: branches: - patch-* paths-ignore: - - '**/.*' - '**/README.md' pull_request: branches: From 0f713d73b5db280215e7ebd0ae7735ce4610ed75 Mon Sep 17 00:00:00 2001 From: Drazzilb <65743746+Drazzilb08@users.noreply.github.com> Date: Thu, 15 Feb 2024 13:15:35 -0800 Subject: [PATCH 21/37] Update worlkflows --- .github/workflows/delete.yml | 29 ----------------------------- .github/workflows/dev.yml | 4 +++- .github/workflows/latest.yml | 4 +++- .github/workflows/patch.yml | 4 +++- .github/workflows/version.yml | 3 ++- 5 files changed, 11 insertions(+), 33 deletions(-) delete mode 100644 .github/workflows/delete.yml diff --git a/.github/workflows/delete.yml b/.github/workflows/delete.yml deleted file mode 100644 index 60530ef..0000000 --- a/.github/workflows/delete.yml +++ /dev/null @@ -1,29 +0,0 @@ -name: Delete Docker Images if Branch is Deleted - -on: - delete: - branches: - - patch-* - - -jobs: - delete: - runs-on: ubuntu-latest - - steps: - - name: Extract branch name - shell: bash - run: echo "branch=${GITHUB_HEAD_REF:-${GITHUB_REF#refs/heads/}}" >> $GITHUB_OUTPUT - id: extract_branch - - - name: Delete Docker Image from GHCR - run: | - docker login --username ${{ secrets.GH_USERNAME }} --password ${{ secrets.GH_TOKEN }} ghcr.io - docker image rm ghcr.io/drazzilb08/userscripts:${{ steps.extract_branch.outputs.branch }} - docker logout ghcr.io - - - name: Delete Docker Image from Docker - run: | - docker login --username ${{ secrets.DOCKER_USERNAME }} --password ${{ secrets.DOCKER_TOKEN }} - docker image rm ${{ secrets.DOCKER_USERNAME }}/userscripts:${{ steps.extract_branch.outputs.branch }} - docker logout diff --git a/.github/workflows/dev.yml b/.github/workflows/dev.yml index 48dc1e4..20c0c56 100644 --- a/.github/workflows/dev.yml +++ b/.github/workflows/dev.yml @@ -5,12 +5,14 @@ on: branches: [ dev ] paths-ignore: - '**/README.md' + # Ignore updates to .github + - '**/.github/**' pull_request: branches: [ dev ] jobs: - docker-latest: + docker-dev: runs-on: ubuntu-latest steps: diff --git a/.github/workflows/latest.yml b/.github/workflows/latest.yml index 9b71437..d2ebad0 100644 --- a/.github/workflows/latest.yml +++ b/.github/workflows/latest.yml @@ -4,7 +4,9 @@ on: push: branches: [ master ] paths-ignore: - - '**/README.md' + - '**/README.md' + # Ignore updates to .github + - '**/.github/**' jobs: diff --git a/.github/workflows/patch.yml b/.github/workflows/patch.yml index ed661fa..34b5f05 100644 --- a/.github/workflows/patch.yml +++ b/.github/workflows/patch.yml @@ -6,12 +6,14 @@ on: - patch-* paths-ignore: - '**/README.md' + # Ignore updates to .github + - '**/.github/**' pull_request: branches: - patch-* jobs: - docker-latest: + docker-patch: runs-on: ubuntu-latest steps: - name: Extract branch name diff --git a/.github/workflows/version.yml b/.github/workflows/version.yml index d576099..c9a8599 100644 --- a/.github/workflows/version.yml +++ b/.github/workflows/version.yml @@ -5,9 +5,10 @@ on: tags: - v* + jobs: - docker-latest: + docker-version: runs-on: ubuntu-latest steps: From 178149008e4b240897ae8bcff78a8676335b4ef2 Mon Sep 17 00:00:00 2001 From: Drazzilb <65743746+Drazzilb08@users.noreply.github.com> Date: Thu, 15 Feb 2024 16:27:06 -0800 Subject: [PATCH 22/37] Add: a pause for queue items to populate --- modules/upgradinatorr.py | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/modules/upgradinatorr.py b/modules/upgradinatorr.py index 26f1383..d61ca83 100755 --- a/modules/upgradinatorr.py +++ b/modules/upgradinatorr.py @@ -16,6 +16,7 @@ import json import sys +import time from util.config import Config from util.logger import setup_logger @@ -69,6 +70,7 @@ def process_queue(queue, instance_type, media_ids): Returns: queue_dict (list): A list of dictionaries containing the download_id, media_id, and torrent. """ + if instance_type == "radarr": id_type = "movieId" elif instance_type == "sonarr": @@ -165,8 +167,12 @@ def process_instance(instance_type, instance_settings, app): media_ids = [item['media_id'] for item in filtered_media_dict] search_response = app.search_media(media_ids) app.add_tags(media_ids, tag_id) + print(f"Waiting for searches to complete...") ready = app.wait_for_command(search_response['id']) if ready: + sleep_time = 5 # Set the sleep time to 5 seconds + print(f"Waiting for {sleep_time} seconds to allow for search results to populate in the queue...") + time.sleep(5) queue = app.get_queue(instance_type) logger.debug(f"queue:\n{json.dumps(queue, indent=4)}") queue_dict = process_queue(queue, instance_type, media_ids) From 9138a21ee3d6c1ba5a8b2c5eb5494eb859ea515a Mon Sep 17 00:00:00 2001 From: Drazzilb <65743746+Drazzilb08@users.noreply.github.com> Date: Thu, 15 Feb 2024 16:29:34 -0800 Subject: [PATCH 23/37] Moved print statement to function --- util/arrpy.py | 1 + 1 file changed, 1 insertion(+) diff --git a/util/arrpy.py b/util/arrpy.py index 298cb7e..df89c82 100755 --- a/util/arrpy.py +++ b/util/arrpy.py @@ -335,6 +335,7 @@ def wait_for_command(self, command_id): Returns: bool: True if the refresh was successful, False otherwise. """ + print(f"Waiting for searches to complete...") while True: endpoint = f"{self.url}/api/v3/command/{command_id}" response = self.make_get_request(endpoint) From 4d709783db3b555cbbc4dc2c1ecf3c477f6d2116 Mon Sep 17 00:00:00 2001 From: Drazzilb <65743746+Drazzilb08@users.noreply.github.com> Date: Thu, 15 Feb 2024 16:29:44 -0800 Subject: [PATCH 24/37] Updated wait to 10 seconds --- modules/upgradinatorr.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/modules/upgradinatorr.py b/modules/upgradinatorr.py index d61ca83..c92e0b8 100755 --- a/modules/upgradinatorr.py +++ b/modules/upgradinatorr.py @@ -167,10 +167,9 @@ def process_instance(instance_type, instance_settings, app): media_ids = [item['media_id'] for item in filtered_media_dict] search_response = app.search_media(media_ids) app.add_tags(media_ids, tag_id) - print(f"Waiting for searches to complete...") ready = app.wait_for_command(search_response['id']) if ready: - sleep_time = 5 # Set the sleep time to 5 seconds + sleep_time = 10 # Set the sleep time to 5 seconds print(f"Waiting for {sleep_time} seconds to allow for search results to populate in the queue...") time.sleep(5) queue = app.get_queue(instance_type) From 485eebe9261bc59637596c8b666a712d963e828f Mon Sep 17 00:00:00 2001 From: Drazzilb <65743746+Drazzilb08@users.noreply.github.com> Date: Thu, 15 Feb 2024 17:03:15 -0800 Subject: [PATCH 25/37] fix: key error --- modules/queinatorr.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/modules/queinatorr.py b/modules/queinatorr.py index f8f193b..a16c008 100755 --- a/modules/queinatorr.py +++ b/modules/queinatorr.py @@ -180,7 +180,7 @@ def handle_queue(queue_dict, app): else: messages_dict[id]['messages'][message] = 1 if error: - if error in messages_dict[torrent]['messages']: + if error in messages_dict[id]['messages']: messages_dict[id]['messages'][error] += 1 else: messages_dict[id]['messages'][error] = 1 @@ -338,6 +338,8 @@ def process_instance(instance_type, url, api, pre_import_category, post_import_c # Retrieve the queue from Radarr or Sonarr instance queue = app.get_queue(instance_type) + logger.debug(f"Queue'{instance_type}'\n{json.dumps(queue, indent=4)}\n") + queue_dict = queued_items(queue, instance_type) logger.debug(f"Queue items for '{instance_type}'\n{json.dumps(queue_dict, indent=4)}\n") @@ -360,8 +362,6 @@ def process_instance(instance_type, url, api, pre_import_category, post_import_c if messages_dict: output_dict['queue'] = messages_dict - logger.debug(f"Queue items for '{instance_type}'\n{json.dumps(queue_dict, indent=4)}\n") - # Handle moving torrents from the queue to the specified categories in qBittorrent messages_dict = handle_qbit(queue_dict, qb, post_import_category, pre_import_category, days_to_keep) if messages_dict: From 4082927c910496d75efb45d85b04ecd0c9cecd8b Mon Sep 17 00:00:00 2001 From: Drazzilb <65743746+Drazzilb08@users.noreply.github.com> Date: Thu, 15 Feb 2024 17:03:26 -0800 Subject: [PATCH 26/37] Fix multiple logging to docker --- util/logger.py | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/util/logger.py b/util/logger.py index 6517b73..10cb97c 100755 --- a/util/logger.py +++ b/util/logger.py @@ -73,7 +73,7 @@ def setup_logger(log_level, script_name, max_logs=9): # Add the file handler to the logger logger.addHandler(handler) - + # Configure console logging with the specified log level console_handler = logging.StreamHandler() if log_level == 'DEBUG': @@ -85,5 +85,10 @@ def setup_logger(log_level, script_name, max_logs=9): # Add the console handler to the logger logger.addHandler(console_handler) + + # Overwrite previous logger if exists + logging.getLogger(script_name).handlers.clear() + logging.getLogger(script_name).addHandler(handler) + logging.getLogger(script_name).addHandler(console_handler) return logger From 62b9396b6fc72206b8e97fd2669721f99e5d2eac Mon Sep 17 00:00:00 2001 From: Drazzilb <65743746+Drazzilb08@users.noreply.github.com> Date: Thu, 15 Feb 2024 17:54:24 -0800 Subject: [PATCH 27/37] adjust wait timer --- modules/upgradinatorr.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/modules/upgradinatorr.py b/modules/upgradinatorr.py index c92e0b8..df5c8b7 100755 --- a/modules/upgradinatorr.py +++ b/modules/upgradinatorr.py @@ -169,9 +169,9 @@ def process_instance(instance_type, instance_settings, app): app.add_tags(media_ids, tag_id) ready = app.wait_for_command(search_response['id']) if ready: - sleep_time = 10 # Set the sleep time to 5 seconds + sleep_time = 15 # Set the sleep time to 5 seconds print(f"Waiting for {sleep_time} seconds to allow for search results to populate in the queue...") - time.sleep(5) + time.sleep(sleep_time) queue = app.get_queue(instance_type) logger.debug(f"queue:\n{json.dumps(queue, indent=4)}") queue_dict = process_queue(queue, instance_type, media_ids) From d7f976afe68b0fa1ffe83e63ad5471f50b842856 Mon Sep 17 00:00:00 2001 From: Drazzilb <65743746+Drazzilb08@users.noreply.github.com> Date: Thu, 15 Feb 2024 18:38:05 -0800 Subject: [PATCH 28/37] adjusted variables to better reflect --- modules/upgradinatorr.py | 34 +++++++++++++++++----------------- 1 file changed, 17 insertions(+), 17 deletions(-) diff --git a/modules/upgradinatorr.py b/modules/upgradinatorr.py index df5c8b7..3fc506e 100755 --- a/modules/upgradinatorr.py +++ b/modules/upgradinatorr.py @@ -60,7 +60,7 @@ def filter_media(media_dict, tag_id, count): def process_queue(queue, instance_type, media_ids): """ - Process the queue to return a list of dictionaries containing the download_id, media_id, and torrent. + Process the queue to return a list of dictionaries containing the download_id, media_id, and download. Args: queue (dict): A dictionary containing the queue information. @@ -68,7 +68,7 @@ def process_queue(queue, instance_type, media_ids): media_ids (list): A list of media_ids to filter the queue with. Returns: - queue_dict (list): A list of dictionaries containing the download_id, media_id, and torrent. + queue_dict (list): A list of dictionaries containing the download_id, media_id, and download. """ if instance_type == "radarr": @@ -85,7 +85,7 @@ def process_queue(queue, instance_type, media_ids): queue_dict.append({ 'download_id': item['downloadId'], 'media_id': media_id, - 'torrent': item['title'], + 'download': item['title'], 'torrent_custom_format_score': item['customFormatScore'], }) # Remove duplicate dictionaries in the queue_dict list and convert it to a list of unique dictionaries @@ -169,7 +169,7 @@ def process_instance(instance_type, instance_settings, app): app.add_tags(media_ids, tag_id) ready = app.wait_for_command(search_response['id']) if ready: - sleep_time = 15 # Set the sleep time to 5 seconds + sleep_time = 10 # Set the sleep time to 5 seconds print(f"Waiting for {sleep_time} seconds to allow for search results to populate in the queue...") time.sleep(sleep_time) queue = app.get_queue(instance_type) @@ -177,15 +177,15 @@ def process_instance(instance_type, instance_settings, app): queue_dict = process_queue(queue, instance_type, media_ids) logger.debug(f"queue_dict:\n{json.dumps(queue_dict, indent=4)}") for item in filtered_media_dict: - torrents = {} + downloads = {} for queue_item in queue_dict: if item['media_id'] == queue_item['media_id']: - torrents[queue_item['torrent']] = queue_item['torrent_custom_format_score'] + downloads[queue_item['download']] = queue_item['torrent_custom_format_score'] output_dict['data'].append({ 'media_id': item['media_id'], 'title': item['title'], 'year': item['year'], - 'torrent': torrents + 'download': downloads }) else: for item in filtered_media_dict: @@ -193,7 +193,7 @@ def process_instance(instance_type, instance_settings, app): 'media_id': item['media_id'], 'title': item['title'], 'year': item['year'], - 'torrent': None, + 'download': None, 'torrent_custom_format_score': None }) return output_dict @@ -222,10 +222,10 @@ def print_output(output_dict): for item in instance_data: logger.info(f"{item['title']} ({item['year']})") - # Print torrents and their format scores associated with the media - if item['torrent']: - for torrent, format_score in item['torrent'].items(): - logger.info(f"\t{torrent}\tScore: {format_score}") + # Print downloads and their format scores associated with the media + if item['download']: + for download, format_score in item['download'].items(): + logger.info(f"\t{download}\tScore: {format_score}") else: logger.info("\tNo upgrades found for this item.") @@ -256,18 +256,18 @@ def notification(output_dict): for item in instance_data: title = item['title'] year = item['year'] - torrent = item['torrent'] + download = item['download'] torrent_list = [] torrent_list.append(f"{title} ({year})") - # Construct a list of torrents and their format scores associated with the media - if torrent: - for torrent_item, format_score in torrent.items(): + # Construct a list of downloads and their format scores associated with the media + if download: + for torrent_item, format_score in download.items(): torrent_list.append(f"\t{torrent_item}\n\tCF Score: {format_score}") else: torrent_list.append("\tNo upgrades found for this item.") server_list.append("\n".join(torrent_list)) value = "\n".join(server_list) - # Construct a Discord field containing the server name and associated media/torrents + # Construct a Discord field containing the server name and associated media/downloads if server_list: fields.append({ "name": server_name, From 52a5b4161e6ceef9ffba2da8886da4a5580e2d61 Mon Sep 17 00:00:00 2001 From: Drazzilb <65743746+Drazzilb08@users.noreply.github.com> Date: Thu, 15 Feb 2024 18:38:18 -0800 Subject: [PATCH 29/37] made queue to be more agnostic --- util/arrpy.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/util/arrpy.py b/util/arrpy.py index df89c82..1625460 100755 --- a/util/arrpy.py +++ b/util/arrpy.py @@ -629,7 +629,7 @@ def get_queue(self, instance_type): dict: A dictionary representing the queue. """ if instance_type == 'radarr': - url_addon = "page=1&pageSize=200&includeMovie=true&protocol=torrent" + url_addon = "page=1&pageSize=200&includeMovie=true" elif instance_type == 'sonarr': url_addon = "page=1&pageSize=200&includeSeries=true" From b276d20281370b2a91df4499103977b1149b4023 Mon Sep 17 00:00:00 2001 From: Drazzilb <65743746+Drazzilb08@users.noreply.github.com> Date: Fri, 16 Feb 2024 07:05:20 -0800 Subject: [PATCH 30/37] Error fix --- modules/poster_cleanarr.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/modules/poster_cleanarr.py b/modules/poster_cleanarr.py index 5ae6d3f..c5a77b5 100755 --- a/modules/poster_cleanarr.py +++ b/modules/poster_cleanarr.py @@ -59,7 +59,7 @@ def match_assets(assets_dict, media_dict, ignore_collections): unmatched_assets = {} # Loop through different media types for media_type in ['movies', 'series', 'collections']: - unmatched_assets[media_type] = {} + unmatched_assets[media_type] = [] # Check if the media type exists in both assets and media dictionaries if media_type in media_dict and media_type in assets_dict: # Iterate through each asset in the asset dictionary of the given media type From 22c983a683ca2e05b76c84becc5ed40a4f85e59a Mon Sep 17 00:00:00 2001 From: Drazzilb <65743746+Drazzilb08@users.noreply.github.com> Date: Fri, 16 Feb 2024 13:19:09 -0800 Subject: [PATCH 31/37] asset_folders is no longer required for some scripts --- config/config.sample.yml | 3 --- 1 file changed, 3 deletions(-) diff --git a/config/config.sample.yml b/config/config.sample.yml index 737c440..ea1a613 100755 --- a/config/config.sample.yml +++ b/config/config.sample.yml @@ -282,7 +282,6 @@ border_replacerr: # Colors: https://www.w3schools.com/colors/colors_picker.asp log_level: info dry_run: true - asset_folders: false source_dirs: - /path/to/posters/ destination_dir: /path/to/output/ @@ -327,7 +326,6 @@ border_replacerr: unmatched_assets: log_level: info - asset_folders: false instances: - plex_1 - radarr_1 @@ -369,7 +367,6 @@ unmatched_assets: poster_cleanarr: log_level: info dry_run: true - asset_folders: true instances: - plex_1 - radarr_1 From 3c326688d44f196ca95b039409a8e6ba06eb8e6a Mon Sep 17 00:00:00 2001 From: Drazzilb <65743746+Drazzilb08@users.noreply.github.com> Date: Fri, 16 Feb 2024 13:19:44 -0800 Subject: [PATCH 32/37] Update: How assets are gathered --- modules/border_replacerr.py | 2 +- modules/poster_renamerr.py | 2 +- modules/unmatched_assets.py | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/modules/border_replacerr.py b/modules/border_replacerr.py index b52d496..344f984 100755 --- a/modules/border_replacerr.py +++ b/modules/border_replacerr.py @@ -458,7 +458,7 @@ def process_files(source_dirs, destination_dir, asset_folders, dry_run): assets_list = [] # Categorize files in the input directory into assets for path in source_dirs: - results = categorize_files(path, asset_folders) + results = categorize_files(path) if results: assets_list.extend(results) else: diff --git a/modules/poster_renamerr.py b/modules/poster_renamerr.py index 44ac3db..265dc4f 100755 --- a/modules/poster_renamerr.py +++ b/modules/poster_renamerr.py @@ -62,7 +62,7 @@ def get_assets_files(source_dirs): # Iterate through each source directory for source_dir in source_dirs: - new_assets = categorize_files(source_dir, asset_folders=False) + new_assets = categorize_files(source_dir) if new_assets: # Merge new_assets with final_assets for new in new_assets: diff --git a/modules/unmatched_assets.py b/modules/unmatched_assets.py index d64477e..4a223d3 100755 --- a/modules/unmatched_assets.py +++ b/modules/unmatched_assets.py @@ -265,7 +265,7 @@ def main(): assets_list = [] for path in source_dirs: - results = categorize_files(path, asset_folders) + results = categorize_files(path) if results: assets_list.extend(results) else: From 23f2f7622d0aae7e04c1db5396b9df2e0220b7a6 Mon Sep 17 00:00:00 2001 From: Drazzilb <65743746+Drazzilb08@users.noreply.github.com> Date: Fri, 16 Feb 2024 13:19:51 -0800 Subject: [PATCH 33/37] rework to improve reliability --- modules/poster_cleanarr.py | 132 ++++++++++++++++--------------------- 1 file changed, 58 insertions(+), 74 deletions(-) diff --git a/modules/poster_cleanarr.py b/modules/poster_cleanarr.py index c5a77b5..bd1a72f 100755 --- a/modules/poster_cleanarr.py +++ b/modules/poster_cleanarr.py @@ -54,91 +54,79 @@ def match_assets(assets_dict, media_dict, ignore_collections): Returns: dict: Dictionary of unmatched assets. """ - # Initialize dictionary to store unmatched assets by media types unmatched_assets = {} # Loop through different media types for media_type in ['movies', 'series', 'collections']: unmatched_assets[media_type] = [] - # Check if the media type exists in both assets and media dictionaries - if media_type in media_dict and media_type in assets_dict: - # Iterate through each asset in the asset dictionary of the given media type - for asset_data in tqdm(assets_dict[media_type], desc=f"Matching {media_type}", unit="assets", total=len(assets_dict[media_type]), disable=None, leave=True): - # Initialize a flag to track if an asset is matched with media - matched = False + # Iterate through each asset in the asset dictionary of the given media type + for asset_data in tqdm(assets_dict[media_type], desc=f"Matching {media_type}", unit="assets", total=len(assets_dict[media_type]), disable=None, leave=True): + # Initialize a flag to track if an asset is matched with media + matched = False - # Skip collections if in ignore_collections - if ignore_collections: - if media_type == 'collections' and asset_data['title'] in ignore_collections: - continue + # Skip collections if in ignore_collections + if ignore_collections: + if media_type == 'collections' and asset_data['title'] in ignore_collections: + continue - # Iterate through each media data of the same media type - for media_data in media_dict[media_type]: - # Check if the normalized title and year match between the asset and media - asset_seasons_numbers = asset_data.get('season_numbers', None) + if not asset_data['files']: + unmatched_assets[media_type].append({ + 'title': asset_data['title'], + 'year': asset_data['year'], + 'files': asset_data['files'], + 'path': asset_data.get('path', None) + }) + continue - # Get title and year from folder base_name - if media_type == 'series': - media_seasons_numbers = [season['season_number'] for season in media_data.get('seasons', [])] - - # Check if the asset is a match - if is_match(asset_data, media_data): - matched = True - # For series, check for missing seasons in the media - if media_type == 'series': - if asset_seasons_numbers and media_seasons_numbers: - missing_seasons = [] - for season in asset_seasons_numbers: - if season not in media_seasons_numbers: - missing_seasons.append(season) - # Remove all files that are not missing from asset['files'] - if missing_seasons: - files_to_remove = [] - for file in asset_data['files']: - file_name = os.path.basename(file) - if '_Season' in file_name: - season_number_match = re.search(r'_Season(\d+)', file_name) - if season_number_match: - season_number = int(season_number_match.group(1)) - if season_number not in missing_seasons: - files_to_remove.append(file) - elif '_Season' not in file: - files_to_remove.append(file) + # Iterate through each media data of the same media type + for media_data in media_dict[media_type]: - # Remove the files that need to be removed - for file in files_to_remove: - asset_data['files'].remove(file) - - # If missing seasons exist, add details to the unmatched assets + if is_match(asset_data, media_data): + matched = True + # For series, check for missing seasons in the media + if media_type == 'series': + media_seasons_numbers = media_data.get('season_numbers', None) + asset_seasons_numbers = asset_data.get('season_numbers', None) + if asset_seasons_numbers and media_seasons_numbers: + missing_seasons = [] + for season in asset_seasons_numbers: + if season not in media_seasons_numbers: + missing_seasons.append(season) + files = [] + for season in missing_seasons: + season = str(season).zfill(2) + season = f"Season{season}" + for file in asset_data['files']: + if season in file: + files.append(file) if missing_seasons: unmatched_assets[media_type].append({ 'title': asset_data['title'], 'year': asset_data['year'], - 'files': asset_data['files'], + 'files': files, 'path': asset_data.get('path', None), 'missing_season': True, 'missing_seasons': missing_seasons }) break - - # If no match is found, add the asset to unmatched assets based on media type - if not matched: - if media_type == 'series': - unmatched_assets[media_type].append({ - 'title': asset_data['title'], - 'year': asset_data['year'], - 'files': asset_data['files'], - 'path': asset_data.get('path', None), - 'missing_season': False, - 'missing_seasons': asset_data['season_numbers'] - }) - else: - unmatched_assets[media_type].append({ - 'title': asset_data['title'], - 'year': asset_data['year'], - 'files': asset_data['files'], - 'path': asset_data.get('path', None) - }) + # If no match is found, add the asset to unmatched assets based on media type + if not matched: + if media_type == 'series': + unmatched_assets[media_type].append({ + 'title': asset_data['title'], + 'year': asset_data['year'], + 'files': asset_data['files'], + 'path': asset_data.get('path', None), + 'missing_season': False, + 'missing_seasons': asset_data['season_numbers'] + }) + else: + unmatched_assets[media_type].append({ + 'title': asset_data['title'], + 'year': asset_data['year'], + 'files': asset_data['files'], + 'path': asset_data.get('path', None) + }) return unmatched_assets def remove_assets(unmatched_dict, source_dirs): @@ -159,7 +147,6 @@ def remove_assets(unmatched_dict, source_dirs): # Initialize a list to track items to be removed remove_list = [] - # Iterate through each asset type for asset_type in asset_types: # Iterate through each asset data within the unmatched assets of the given asset type @@ -291,7 +278,6 @@ def main(): logger.error("Invalid script configuration. Exiting.") return library_names = script_config.get('library_names', []) - asset_folders = script_config.get('asset_folders', False) media_paths = script_config.get('media_paths', []) source_dirs = script_config.get('source_dirs', []) ignore_collections = script_config.get('ignore_collections', []) @@ -304,7 +290,6 @@ def main(): logger.debug(create_table(table)) logger.debug(f'{"Log level:":<20}{log_level}') logger.debug(f'{"Dry_run:":<20}{dry_run}') - logger.debug(f'{"Asset Folders:":<20}{asset_folders}') logger.debug(f'{"Assets paths:":<20}{source_dirs}') logger.debug(f'{"Media paths:":<20}{media_paths}') logger.debug(f'{"Library names:":<20}{library_names}') @@ -316,12 +301,11 @@ def main(): assets_list = [] for path in source_dirs: - results = categorize_files(path, asset_folders) + results = categorize_files(path) if results: assets_list.extend(results) else: logger.error(f"No assets found in {path}.") - # Checking for assets and logging if assets_list: assets_dict = sort_assets(assets_list) @@ -387,8 +371,8 @@ def main(): if any(remove_data.values()): logger.debug(f"Remove Data:\n{json.dumps(remove_data, indent=4)}") print_output(remove_data) - else: - logger.info(f"No assets removed.") + else: + logger.info(f"No assets removed.") except KeyboardInterrupt: print("Keyboard Interrupt detected. Exiting...") From de45dea638683c0c3f6ce30e079f85991f69f64c Mon Sep 17 00:00:00 2001 From: Drazzilb <65743746+Drazzilb08@users.noreply.github.com> Date: Fri, 16 Feb 2024 13:20:02 -0800 Subject: [PATCH 34/37] adde auto asset_folder detection --- util/utility.py | 87 +++++++++++++++++++++++++++++++------------------ 1 file changed, 55 insertions(+), 32 deletions(-) diff --git a/util/utility.py b/util/utility.py index f600269..2f361ab 100755 --- a/util/utility.py +++ b/util/utility.py @@ -136,7 +136,25 @@ def normalize_titles(title): return normalized_title -def categorize_files(folder_path, asset_folders): +def _is_asset_folders(folder_path): + """ + Check if the folder contains asset folders + + Args: + folder_path (str): The path to the folder to check + + Returns: + bool: True if the folder contains asset folders, False otherwise + """ + if not os.path.exists(folder_path): + return False + else: + for item in os.listdir(folder_path): + if os.path.isdir(os.path.join(folder_path, item)): + return True + return False + +def categorize_files(folder_path): """ Categorize files into movies, collections, and series @@ -148,6 +166,8 @@ def categorize_files(folder_path, asset_folders): list: A list of dictionaries containing the sorted files """ + asset_folders = _is_asset_folders(folder_path) + assets_dict = [] # Define asset types to categorize @@ -265,7 +285,7 @@ def categorize_files(folder_path, asset_folders): # Sort the season numbers and file paths for the current series entry if series_entry is not None: # Remove duplicates - series_entry['season_numbers'] = list(set(series_entry['season_numbers'])) + series_entry['season_numbers'] = list(set(map(int, series_entry['season_numbers']))) series_entry['season_numbers'].sort() # Remove duplicates series_entry['files'] = list(set(series_entry['files'])) @@ -291,21 +311,22 @@ def categorize_files(folder_path, asset_folders): if not year: # If year is not found in the folder name # Categorize as a collection # Process files within the folder and add to the collection - for file in files: + files = [] + for file in os.listdir(dir): if file.startswith('.'): continue - else: - assets_dict.append({ - 'title': title, - 'year': year, - 'normalized_title': normalize_title, - 'no_prefix': [title.replace(prefix, '').strip() for prefix in prefixes if title.startswith(prefix)], - 'no_suffix': [title.replace(suffix, '').strip() for suffix in suffixes if title.endswith(suffix)], - 'no_prefix_normalized': [normalize_titles(title.replace(prefix, '').strip()) for prefix in prefixes if title.startswith(prefix)], - 'no_suffix_normalized': [normalize_titles(title.replace(suffix, '').strip()) for suffix in suffixes if title.endswith(suffix)], - 'path': dir, - 'files': [f"{dir}/{file}"], - }) + files.append(f"{dir}/{file}") + assets_dict.append({ + 'title': title, + 'year': year, + 'normalized_title': normalize_title, + 'no_prefix': [title.replace(prefix, '').strip() for prefix in prefixes if title.startswith(prefix)], + 'no_suffix': [title.replace(suffix, '').strip() for suffix in suffixes if title.endswith(suffix)], + 'no_prefix_normalized': [normalize_titles(title.replace(prefix, '').strip()) for prefix in prefixes if title.startswith(prefix)], + 'no_suffix_normalized': [normalize_titles(title.replace(suffix, '').strip()) for suffix in suffixes if title.endswith(suffix)], + 'path': dir, + 'files': files, + }) else: # If year is found in the folder name # Check if the folder contains series or movies based on certain criteria @@ -317,7 +338,7 @@ def categorize_files(folder_path, asset_folders): if file.startswith('.'): continue if "season" in file.lower(): - season_numbers = re.search(r'Season\s*(\d+)', file).group(1) + season_numbers = int(re.search(r'Season\s*(\d+)', file).group(1)) if season_numbers not in list_of_season_numbers: list_of_season_numbers.append(season_numbers) if file not in list_of_files: @@ -340,17 +361,18 @@ def categorize_files(folder_path, asset_folders): }) else: - for file in files: + files = [] + for file in os.listdir(dir): if file.startswith('.'): continue - else: - assets_dict.append({ - 'title': title, - 'year': year, - 'normalized_title': normalize_title, - 'path': dir, - 'files': [f"{dir}/{file}"], - }) + files.append(f"{dir}/{file}") + assets_dict.append({ + 'title': title, + 'year': year, + 'normalized_title': normalize_title, + 'path': dir, + 'files': files, + }) except FileNotFoundError: return None @@ -520,25 +542,25 @@ def handle_starr_data(app, instance_type): file_id = item.get('movieFile', {}).get('id', None) # Fetch file ID for Radarr elif instance_type == "sonarr": season_data = item.get('seasons', []) # Fetch season data for Sonarr - season_dict = [] # Initialize a list to hold season data + season_list = [] # Initialize a list to hold season data for season in season_data: episode_data = app.get_episode_data_by_season(item['id'], season['seasonNumber']) # Fetch episode data for each season - episode_dict = [] # Initialize a list to hold episode data + episode_list = [] # Initialize a list to hold episode data for episode in episode_data: - episode_dict.append({ + episode_list.append({ 'episode_number': episode['episodeNumber'], 'monitored': episode['monitored'], 'episode_file_id': episode['episodeFileId'], 'episode_id': episode['id'], 'has_file': episode['hasFile'], }) # Append episode data to the episode dictionary - if episode_dict: - season_dict.append({ + if episode_list: + season_list.append({ 'season_number': season['seasonNumber'], 'monitored': season['monitored'], 'season_pack': season['statistics']['episodeCount'] == season['statistics']['totalEpisodeCount'], 'season_has_episodes': season['statistics']['episodeCount'] > 0, - 'episode_data': episode_dict, + 'episode_data': episode_list, }) # Append season data to the season dictionary alternate_titles = [] @@ -574,7 +596,8 @@ def handle_starr_data(app, instance_type): 'folder': os.path.basename(os.path.normpath(item['path'])), 'has_file': item['hasFile'] if instance_type == "radarr" else None, 'tags': item['tags'], - 'seasons': season_dict if instance_type == "sonarr" else None, # Add season_dict for Sonarr items + 'seasons': season_list if instance_type == "sonarr" else None, # Add season_list for Sonarr items + 'season_numbers': [season['season_number'] for season in season_list] if instance_type == "sonarr" else None, }) # Append the constructed dictionary to media_dict else: return None From 216777f56d779b79bc8ddee24ad4cfbdf1f087fc Mon Sep 17 00:00:00 2001 From: Drazzilb <65743746+Drazzilb08@users.noreply.github.com> Date: Fri, 16 Feb 2024 13:30:32 -0800 Subject: [PATCH 35/37] Update: Forgotten edits --- modules/border_replacerr.py | 7 ++----- modules/poster_renamerr.py | 2 +- 2 files changed, 3 insertions(+), 6 deletions(-) diff --git a/modules/border_replacerr.py b/modules/border_replacerr.py index 344f984..f4b4f91 100755 --- a/modules/border_replacerr.py +++ b/modules/border_replacerr.py @@ -412,14 +412,13 @@ def copy_files(assets_dict, destination_dir, dry_run): messages.append(f"Would have copied {data['title']}{year} - {file_name} to {output_basename}") return messages -def process_files(source_dirs, destination_dir, asset_folders, dry_run): +def process_files(source_dirs, destination_dir, dry_run): """ Processes the files in the input directory. Args: source_dirs (str): The input directory. destination_dir (str): The output directory. - asset_folders (bool): Whether to use asset folders. Returns: None @@ -443,7 +442,6 @@ def process_files(source_dirs, destination_dir, asset_folders, dry_run): logger.debug(f'{"Log Level:":<20}{config.log_level}') logger.debug(f'{"Input Dir:":<20}{source_dirs}') logger.debug(f'{"Output Dir:":<20}{destination_dir}') - logger.debug(f'{"Asset Folders:":<20}{asset_folders}') logger.debug(f'{"Border Colors:":<20}{border_colors}') logger.debug(f'{"Skip:":<20}{skip}') logger.debug(f'{"Schedule:":<20}{schedule}') @@ -531,7 +529,6 @@ def main(): destination_dir = script_config['destination_dir'] schedule = script_config['schedule'] border_colors = script_config['border_colors'] - asset_folders = script_config['asset_folders'] dry_run = config.dry_run # Convert single string border color to a list if necessary @@ -540,7 +537,7 @@ def main(): # Process files in the input directory with specified settings - process_files(source_dirs, destination_dir, asset_folders, dry_run) + process_files(source_dirs, destination_dir, dry_run) logger.info(f"Border Replacer Complete") # Log completion message except KeyboardInterrupt: diff --git a/modules/poster_renamerr.py b/modules/poster_renamerr.py index 265dc4f..902369a 100755 --- a/modules/poster_renamerr.py +++ b/modules/poster_renamerr.py @@ -659,7 +659,7 @@ def main(): logger.info(f"Running border_replacerr.py") tmp_dir = os.path.join(destination_dir, 'tmp') from modules.border_replacerr import process_files - process_files(tmp_dir, destination_dir, asset_folders, dry_run) + process_files(tmp_dir, destination_dir, dry_run) except KeyboardInterrupt: print("Keyboard Interrupt detected. Exiting...") sys.exit() From 58484298ed53101e867fc643e3c560f4c2ff996d Mon Sep 17 00:00:00 2001 From: Drazzilb <65743746+Drazzilb08@users.noreply.github.com> Date: Fri, 16 Feb 2024 13:55:20 -0800 Subject: [PATCH 36/37] Update: Error handling missing folder --- modules/border_replacerr.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/modules/border_replacerr.py b/modules/border_replacerr.py index f4b4f91..f8427a5 100755 --- a/modules/border_replacerr.py +++ b/modules/border_replacerr.py @@ -452,6 +452,10 @@ def process_files(source_dirs, destination_dir, dry_run): # Check for a scheduled event to update border colors if provided if schedule: border_colors, run_holiday, holiday = check_holiday(schedule, border_colors) + + if not os.path.exists(destination_dir): + logger.error(f"Output directory {destination_dir} does not exist.") + return assets_list = [] # Categorize files in the input directory into assets @@ -539,7 +543,6 @@ def main(): # Process files in the input directory with specified settings process_files(source_dirs, destination_dir, dry_run) - logger.info(f"Border Replacer Complete") # Log completion message except KeyboardInterrupt: print("Keyboard Interrupt detected. Exiting...") sys.exit() From 54dc09dbaa098f44dfad546b0847cac2c37a7cc7 Mon Sep 17 00:00:00 2001 From: Drazzilb <65743746+Drazzilb08@users.noreply.github.com> Date: Fri, 16 Feb 2024 19:29:32 -0800 Subject: [PATCH 37/37] Rework cleanarr - Overhauled how matching is facilitated with this script - No more does it match based upon suspected type (movies, series & collections) but now will straight match. - Script is now a bit slower as it's combining all Media and Assets into their respective lists and parsing each of them. --- modules/poster_cleanarr.py | 243 +++++++++++++++---------------------- 1 file changed, 101 insertions(+), 142 deletions(-) diff --git a/modules/poster_cleanarr.py b/modules/poster_cleanarr.py index bd1a72f..f7428d9 100755 --- a/modules/poster_cleanarr.py +++ b/modules/poster_cleanarr.py @@ -43,7 +43,7 @@ log_level = config.log_level dry_run = config.dry_run -def match_assets(assets_dict, media_dict, ignore_collections): +def match_assets(assets_list, media_dict): """ Match assets to media. @@ -55,78 +55,62 @@ def match_assets(assets_dict, media_dict, ignore_collections): dict: Dictionary of unmatched assets. """ # Initialize dictionary to store unmatched assets by media types - unmatched_assets = {} + unmatched_assets = [] # Loop through different media types - for media_type in ['movies', 'series', 'collections']: - unmatched_assets[media_type] = [] - # Iterate through each asset in the asset dictionary of the given media type - for asset_data in tqdm(assets_dict[media_type], desc=f"Matching {media_type}", unit="assets", total=len(assets_dict[media_type]), disable=None, leave=True): - # Initialize a flag to track if an asset is matched with media - matched = False + # Iterate through each asset in the asset dictionary of the given media type + for asset_data in tqdm(assets_list, desc=f"Matching...", unit="assets", total=len(assets_list), disable=None, leave=True): + # Initialize a flag to track if an asset is matched with media + matched = False - # Skip collections if in ignore_collections - if ignore_collections: - if media_type == 'collections' and asset_data['title'] in ignore_collections: - continue - - if not asset_data['files']: - unmatched_assets[media_type].append({ - 'title': asset_data['title'], - 'year': asset_data['year'], - 'files': asset_data['files'], - 'path': asset_data.get('path', None) - }) - continue + if not asset_data['files']: + unmatched_assets.append({ + 'title': asset_data['title'], + 'year': asset_data['year'], + 'files': asset_data['files'], + 'path': asset_data.get('path', None) + }) + continue - # Iterate through each media data of the same media type - for media_data in media_dict[media_type]: + # Iterate through each media data of the same media type + for media_data in media_dict: - if is_match(asset_data, media_data): - matched = True - # For series, check for missing seasons in the media - if media_type == 'series': - media_seasons_numbers = media_data.get('season_numbers', None) - asset_seasons_numbers = asset_data.get('season_numbers', None) - if asset_seasons_numbers and media_seasons_numbers: - missing_seasons = [] - for season in asset_seasons_numbers: - if season not in media_seasons_numbers: - missing_seasons.append(season) - files = [] - for season in missing_seasons: - season = str(season).zfill(2) - season = f"Season{season}" - for file in asset_data['files']: - if season in file: - files.append(file) - if missing_seasons: - unmatched_assets[media_type].append({ - 'title': asset_data['title'], - 'year': asset_data['year'], - 'files': files, - 'path': asset_data.get('path', None), - 'missing_season': True, - 'missing_seasons': missing_seasons - }) - break - # If no match is found, add the asset to unmatched assets based on media type - if not matched: - if media_type == 'series': - unmatched_assets[media_type].append({ - 'title': asset_data['title'], - 'year': asset_data['year'], - 'files': asset_data['files'], - 'path': asset_data.get('path', None), - 'missing_season': False, - 'missing_seasons': asset_data['season_numbers'] - }) - else: - unmatched_assets[media_type].append({ - 'title': asset_data['title'], - 'year': asset_data['year'], - 'files': asset_data['files'], - 'path': asset_data.get('path', None) - }) + if is_match(asset_data, media_data): + matched = True + + # For series, check for missing seasons in the media + if media_data.get('season_numbers', None): + media_seasons_numbers = media_data.get('season_numbers', None) + asset_seasons_numbers = asset_data.get('season_numbers', None) + if asset_seasons_numbers and media_seasons_numbers: + missing_seasons = [] + for season in asset_seasons_numbers: + if season not in media_seasons_numbers: + missing_seasons.append(season) + files = [] + for season in missing_seasons: + season = str(season).zfill(2) + season = f"Season{season}" + for file in asset_data['files']: + if season in file: + files.append(file) + if missing_seasons: + unmatched_assets.append({ + 'title': asset_data['title'], + 'year': asset_data['year'], + 'files': files, + 'path': asset_data.get('path', None), + 'missing_season': True, + 'missing_seasons': missing_seasons + }) + break + # If no match is found, add the asset to unmatched assets based on media type + if not matched: + unmatched_assets.append({ + 'title': asset_data['title'], + 'year': asset_data['year'], + 'files': asset_data['files'], + 'path': asset_data.get('path', None) + }) return unmatched_assets def remove_assets(unmatched_dict, source_dirs): @@ -140,36 +124,34 @@ def remove_assets(unmatched_dict, source_dirs): dict: Dictionary of assets removed. """ # Define the types of assets - asset_types = ['movies', 'series', 'collections'] # Initialize a dictionary to store removed asset data categorized by asset types - remove_data = {media_type: [] for media_type in asset_types} + remove_data = [] # Initialize a list to track items to be removed remove_list = [] # Iterate through each asset type - for asset_type in asset_types: - # Iterate through each asset data within the unmatched assets of the given asset type - for asset_data in unmatched_dict[asset_type]: - messages = [] - - # Check if the asset has no associated files (empty folder) - if not asset_data['files'] and asset_data['path']: - # Add the path of the empty folder to the removal list and log a message - remove_list.append(asset_data['path']) - messages.append(f"Removing empty folder: {os.path.basename(asset_data['path'])}") - else: - # For each file associated with the asset, add it to the removal list and log a message - for file in asset_data['files']: - remove_list.append(file) - messages.append(f"Removing file: {os.path.basename(file)}") + # Iterate through each asset data within the unmatched assets of the given asset type + for asset_data in unmatched_dict: + messages = [] + + # Check if the asset has no associated files (empty folder) + if not asset_data['files'] and asset_data['path']: + # Add the path of the empty folder to the removal list and log a message + remove_list.append(asset_data['path']) + messages.append(f"Removing empty folder: {os.path.basename(asset_data['path'])}") + else: + # For each file associated with the asset, add it to the removal list and log a message + for file in asset_data['files']: + remove_list.append(file) + messages.append(f"Removing file: {os.path.basename(file)}") - # Store removal data for the current asset type - remove_data[asset_type].append({ - 'title': asset_data['title'], - 'year': asset_data['year'], - 'messages': messages - }) + # Store removal data for the current asset type + remove_data.append({ + 'title': asset_data['title'], + 'year': asset_data['year'], + 'messages': messages + }) # If not a dry run, perform the removal operations if not dry_run: @@ -216,41 +198,24 @@ def print_output(remove_data): """ # Define the types of assets - asset_types = ['collections', 'movies', 'series'] count = 0 # Counter to track the total number of assets removed - - # Iterate through each asset type - # If any asset asset types in remove_data have data statement is true - if any(remove_data[asset_type] for asset_type in asset_types): - for asset_type in asset_types: - if asset_type in remove_data: - if remove_data[asset_type]: - table = [ - [f"{asset_type.capitalize()}"] - ] - logger.info(create_table(table)) - # Iterate through each removed asset of the current type - for data in remove_data[asset_type]: - title = data['title'] - year = data['year'] - - # Log the title and year (if available) of the removed asset - if year: - logger.info(f"\t{title} ({year})") - else: - logger.info(f"\t{title}") - - # Log messages related to the removal of files or folders associated with the asset - asset_messages = data['messages'] - for message in asset_messages: - logger.info(f"\t\t{message}") - count += 1 # Increment the counter for each removed asset message - logger.info("") # Add an empty line for better readability - else: - table = [ - ["No assets removed"] - ] - logger.info(create_table(table)) + + for data in remove_data: + title = data['title'] + year = data['year'] + + # Log the title and year (if available) of the removed asset + if year: + logger.info(f"\t{title} ({year})") + else: + logger.info(f"\t{title}") + + # Log messages related to the removal of files or folders associated with the asset + asset_messages = data['messages'] + for message in asset_messages: + logger.info(f"\t\t{message}") + count += 1 # Increment the counter for each removed asset message + logger.info("") # Add an empty line for better readability # Log the total number of assets removed across all types logger.info(f"\nTotal number of assets removed: {count}") @@ -280,7 +245,6 @@ def main(): library_names = script_config.get('library_names', []) media_paths = script_config.get('media_paths', []) source_dirs = script_config.get('source_dirs', []) - ignore_collections = script_config.get('ignore_collections', []) instances = script_config.get('instances', None) # Log script settings for debugging purposes @@ -293,7 +257,6 @@ def main(): logger.debug(f'{"Assets paths:":<20}{source_dirs}') logger.debug(f'{"Media paths:":<20}{media_paths}') logger.debug(f'{"Library names:":<20}{library_names}') - logger.debug(f'{"Ignore Collections:":<20}{ignore_collections}') logger.debug(f'{"Instances:":<20}{instances}') logger.debug(create_bar("-")) @@ -308,18 +271,14 @@ def main(): logger.error(f"No assets found in {path}.") # Checking for assets and logging if assets_list: - assets_dict = sort_assets(assets_list) - logger.debug(f"Assets:\n{json.dumps(assets_dict, indent=4)}") + # assets_dict = sort_assets(assets_list) + logger.debug(f"Assets:\n{json.dumps(assets_list, indent=4)}") else: logger.error("No assets found, Check source_dirs setting in your config. Exiting.") return # Fetch information from Plex and StARR - media_dict = { - 'movies': [], - 'series': [], - 'collections': [] - } + media_dict = [] if instances: for instance_type, instance_data in config.instances_config.items(): for instance in instances: @@ -335,7 +294,7 @@ def main(): if library_names and app: print("Getting Plex data...") results = get_plex_data(app, library_names, logger, include_smart=True, collections_only=True) - media_dict['collections'].extend(results) + media_dict.extend(results) else: logger.warning("No library names specified in config.yml. Skipping Plex.") else: @@ -347,9 +306,9 @@ def main(): results = handle_starr_data(app, instance_type) if results: if instance_type == "radarr": - media_dict['movies'].extend(results) + media_dict.extend(results) elif instance_type == "sonarr": - media_dict['series'].extend(results) + media_dict.extend(results) else: logger.error(f"No {instance_type.capitalize()} data found.") @@ -357,18 +316,18 @@ def main(): logger.error(f"No instances found. Exiting script...") return - if not any(media_dict.values()): + if not media_dict: logger.error("No media found, Check instances setting in your config. Exiting.") return else: logger.debug(f"Media:\n{json.dumps(media_dict, indent=4)}") # Match assets with media and log the results - unmatched_dict = match_assets(assets_dict, media_dict, ignore_collections) - if any(unmatched_dict.values()): + unmatched_dict = match_assets(assets_list, media_dict) + if unmatched_dict: logger.debug(f"Unmatched:\n{json.dumps(unmatched_dict, indent=4)}") remove_data = remove_assets(unmatched_dict, source_dirs) - if any(remove_data.values()): + if remove_data: logger.debug(f"Remove Data:\n{json.dumps(remove_data, indent=4)}") print_output(remove_data) else: