diff --git a/.editorconfig b/.editorconfig index ac0331d697..7e7880565b 100644 --- a/.editorconfig +++ b/.editorconfig @@ -1,15 +1,10 @@ -# top-most EditorConfig file root = true -# Unix-style newlines with a newline ending every file -# and trailing whitespace removed [*] end_of_line = lf insert_final_newline = true trim_trailing_whitespace = true -# Matches multiple files with brace expansion notation -# Set default charset [*.{py,mako}] charset = utf-8 indent_style = space @@ -20,12 +15,9 @@ charset = utf-8 indent_style = space indent_size = 4 -# Matches configuration files for services like Travis, AppVeyor and Codecov [*.yml] indent_style = space indent_size = 2 -[{package.json, bower.json, webpack.config.js}] -charset = utf-8 -indent_style = space +[{package.json,bower.json,webpack.config.js}] indent_size = 2 diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 19c6f246e2..2b6a671273 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -2,11 +2,9 @@ name: Build develop branch on: workflow_run: - workflows: [Python Packaging] - branches: | - - develop - types: - - completed + workflows: [ Python Packaging ] + branches: [ develop ] + types: [ completed, requested ] jobs: version: diff --git a/SickChill.py b/SickChill.py index d11749a6c0..70d0ecfc23 100755 --- a/SickChill.py +++ b/SickChill.py @@ -1,6 +1,7 @@ #!/usr/bin/env python3 import datetime +import mimetypes import os import platform import shutil @@ -28,14 +29,12 @@ setup_gettext() -import mimetypes - mimetypes.add_type("text/css", ".css") mimetypes.add_type("application/sfont", ".otf") mimetypes.add_type("application/sfont", ".ttf") mimetypes.add_type("application/javascript", ".js") mimetypes.add_type("application/font-woff", ".woff") -# Not sure about this one, but we also have halflings in .woff so I think it wont matter +# Not sure about this one, but we also have halflings in .woff, so I think it won't matter # mimetypes.add_type("application/font-woff2", ".woff2") from configobj import ConfigObj @@ -67,6 +66,8 @@ def __init__(self): self.run_as_daemon = False # web server constants + self.flask_server = None + self.web_server = None self.forced_port = None self.no_launch = False @@ -253,7 +254,7 @@ def load_shows_from_db(): for sql_show in sql_results: try: cur_show = TVShow(sql_show["indexer"], sql_show["indexer_id"]) - cur_show.nextEpisode() + cur_show.next_episode() settings.showList.append(cur_show) except Exception as error: logger.exception("There was an error creating the show in {}: Error {}".format(sql_show["location"], error)) @@ -301,7 +302,7 @@ def shutdown(self, event): """ if settings.started: sickchill.start.halt() # stop all tasks - sickchill.start.saveAll() # save all shows to DB + sickchill.start.save_all() # save all shows to DB # shutdown web server if self.web_server: diff --git a/sickchill/gui/slick/js/core.js b/sickchill/gui/slick/js/core.js index 89d2b87f43..7ee067ee3e 100644 --- a/sickchill/gui/slick/js/core.js +++ b/sickchill/gui/slick/js/core.js @@ -137,7 +137,7 @@ const SICKCHILL = { + ' ' + '', confirm(event) { - location.href = event.context.href + ($('#deleteFiles')[0].checked ? '&full=1' : ''); + location.href = event.context.href + ($('#deleteFiles')[0].checked ? '&full=1' : '&full=0'); }, }); diff --git a/sickchill/gui/slick/views/addShows_favoriteShows.mako b/sickchill/gui/slick/views/addShows_favoriteShows.mako index b588f8950a..6074c460ce 100644 --- a/sickchill/gui/slick/views/addShows_favoriteShows.mako +++ b/sickchill/gui/slick/views/addShows_favoriteShows.mako @@ -96,7 +96,7 @@ ${cur_result.siteRatingCount}
${_('Episode')} | ${_('Name')} | ${_('Airdate')} |
---|---|---|
${whichStr} | +||
${which_str} | - ${curResult["name"]} + ${cur_result["name"]} |
% try:
- % if int(curResult['airdate']) > 1:
- <% air_date = datetime.datetime.fromordinal(curResult['airdate']) %>
- % if air_date > datetime.datetime.utcfromtimestamp(0) and curShow.network:
- <% air_date = scdatetime.scdatetime.convert_to_setting(network_timezones.parse_date_time(curResult['airdate'], curShow.airs, curShow.network)) %>
+ % if int(cur_result['airdate']) > 1:
+ <% air_date = datetime.datetime.fromordinal(cur_result['airdate']) %>
+ % if air_date > datetime.datetime.utcfromtimestamp(0) and cur_show.network:
+ <% air_date = scdatetime.scdatetime.convert_to_setting(network_timezones.parse_date_time(cur_result['airdate'], cur_show.airs, cur_show.network)) %>
% endif
% else:
diff --git a/sickchill/gui/slick/views/restart.mako b/sickchill/gui/slick/views/restart.mako
index 7ae284da4a..af72fc3d63 100644
--- a/sickchill/gui/slick/views/restart.mako
+++ b/sickchill/gui/slick/views/restart.mako
@@ -1,6 +1,6 @@
<%inherit file="/layouts/main.mako" />
<%block name="metas">
-
+
%block>
<%block name="css">
diff --git a/sickchill/gui/slick/views/testRename.mako b/sickchill/gui/slick/views/testRename.mako
index 469a0f9ce4..4b38c31e7d 100644
--- a/sickchill/gui/slick/views/testRename.mako
+++ b/sickchill/gui/slick/views/testRename.mako
@@ -78,14 +78,14 @@
% for current_episode in sorted(show.episodes[current_season], reverse=True):
<%
episode_object = show.episodes[current_season][current_episode]
- if not (episode_object and episode_object._location):
+ if not (episode_object and episode_object.location):
continue
episode_list = episode_object.sorted_episode_list
if episode_object.episode != min(episode_list):
continue
- location = episode_object.location[len(show._location)+1:]
+ location = episode_object.location[len(show.get_location)+1:]
extension = location.split('.')[-1]
new_location = episode_object.proper_path() + '.' + extension
%>
diff --git a/sickchill/gui/slick/views/trendingShows.mako b/sickchill/gui/slick/views/trendingShows.mako
index a02b4a82e5..4dfdcba4e3 100644
--- a/sickchill/gui/slick/views/trendingShows.mako
+++ b/sickchill/gui/slick/views/trendingShows.mako
@@ -40,7 +40,7 @@
${cur_show['show']['votes']} ${_('votes')}
${_('Add Show')}
+ class="btn btn-xs">${_('Add Show')}
% if black_list:
${_('Remove Show')}
diff --git a/sickchill/oldbeard/classes.py b/sickchill/oldbeard/classes.py
index 6bdfc70ed3..1726f9a156 100644
--- a/sickchill/oldbeard/classes.py
+++ b/sickchill/oldbeard/classes.py
@@ -76,7 +76,7 @@ def make_result(cls, result_dict):
return show[0]
show = show[1]
- episode_objects = [show.getEpisode(result_dict.get("season"), ep) for ep in result_dict.get("episodes").split("|") if ep]
+ episode_objects = [show.get_episode(result_dict.get("season"), ep) for ep in result_dict.get("episodes").split("|") if ep]
result = cls(episode_objects)
result.from_json(result_dict)
result.show = show
diff --git a/sickchill/oldbeard/common.py b/sickchill/oldbeard/common.py
index 39cd90ff8a..2e7b209192 100644
--- a/sickchill/oldbeard/common.py
+++ b/sickchill/oldbeard/common.py
@@ -445,9 +445,9 @@ def sceneQualityFromName(name, quality):
# 2 corresponds to SDDVD quality
if quality == 2:
- if re.search(r"b(r|d|rd)?(-| |\.)?(rip|mux)", name.lower()):
+ if re.search(r"b(r|d|rd)?([- .])?(rip|mux)", name.lower()):
rip_type = " BDRip"
- elif re.search(r"(dvd)(-| |\.)?(rip|mux)?", name.lower()):
+ elif re.search(r"(dvd)([- .])?(rip|mux)?", name.lower()):
rip_type = " DVDRip"
else:
rip_type = ""
diff --git a/sickchill/oldbeard/dailysearcher.py b/sickchill/oldbeard/dailysearcher.py
index 9704e9557e..d2c8e40cfb 100644
--- a/sickchill/oldbeard/dailysearcher.py
+++ b/sickchill/oldbeard/dailysearcher.py
@@ -66,7 +66,7 @@ def run(self, force=False):
if air_time > curTime:
continue
- ep = show.getEpisode(sqlEp["season"], sqlEp["episode"])
+ ep = show.get_episode(sqlEp["season"], sqlEp["episode"])
with ep.lock:
prefix = _("New episode {episode_string} airs today,").format(episode_string=ep.pretty_name)
if ep.season == 0:
diff --git a/sickchill/oldbeard/failedProcessor.py b/sickchill/oldbeard/failedProcessor.py
index 302ebeade1..9b445c4052 100644
--- a/sickchill/oldbeard/failedProcessor.py
+++ b/sickchill/oldbeard/failedProcessor.py
@@ -56,7 +56,7 @@ def process(self):
self._log(f"{parsed.air_date}", logger.DEBUG)
for episode in parsed.episode_numbers:
- segment = parsed.show.getEpisode(parsed.season_number, episode)
+ segment = parsed.show.get_episode(parsed.season_number, episode)
cur_failed_queue_item = search_queue.FailedQueueItem(parsed.show, [segment])
settings.searchQueueScheduler.action.add_item(cur_failed_queue_item)
diff --git a/sickchill/oldbeard/helpers.py b/sickchill/oldbeard/helpers.py
index 10084c6b75..ba0f3beba6 100644
--- a/sickchill/oldbeard/helpers.py
+++ b/sickchill/oldbeard/helpers.py
@@ -659,7 +659,7 @@ def get_all_episodes_from_absolute_number(show, absolute_numbers, indexer_id=Non
show = Show.find(settings.showList, indexer_id)
for absolute_number in absolute_numbers if show else []:
- ep = show.getEpisode(None, None, absolute_number=absolute_number)
+ ep = show.get_episode(None, None, absolute_number=absolute_number)
if ep:
episodes.append(ep.episode)
season = ep.season # this will always take the last found season so eps that cross the season border are not handeled well
@@ -891,22 +891,22 @@ def _check_against_names(nameInQuestion, show, season=-1):
return False
-def get_show(name, tryIndexers=False):
+def get_show(name, try_indexers=False):
if not settings.showList:
return
- showObj = None
- fromCache = False
+ show_object = None
+ from_cache = False
if not name:
- return showObj
+ return show_object
try:
# check cache for show
cache = sickchill.oldbeard.name_cache.get_id_from_name(name)
if cache:
- fromCache = True
- showObj = Show.find(settings.showList, int(cache))
+ from_cache = True
+ show_object = Show.find(settings.showList, int(cache))
else:
check_names = [full_sanitizeSceneName(name), name]
show_matches = [
@@ -917,31 +917,31 @@ def get_show(name, tryIndexers=False):
]
if len(show_matches) == 1:
- showObj = show_matches[0]
+ show_object = show_matches[0]
# try indexers
- if not showObj and tryIndexers:
+ if not show_object and try_indexers:
result = sickchill.indexer.search_indexers_for_series_id(name=full_sanitizeSceneName(name))[1]
if result:
- showObj = Show.find(settings.showList, result.id)
+ show_object = Show.find(settings.showList, result.id)
# try scene exceptions
- if not showObj:
+ if not show_object:
scene_exceptions = sickchill.oldbeard.scene_exceptions.get_scene_exception_by_name_multiple(name)
for scene_exception in scene_exceptions:
if scene_exception[0]:
- showObj = Show.find(settings.showList, scene_exception[0])
- if showObj:
+ show_object = Show.find(settings.showList, scene_exception[0])
+ if show_object:
break
# add show to cache
- if showObj and not fromCache:
- sickchill.oldbeard.name_cache.add_name(name, showObj.indexerid)
+ if show_object and not from_cache:
+ sickchill.oldbeard.name_cache.add_name(name, show_object.indexerid)
except Exception as error:
logger.debug(_("There was a problem when attempting to find {name} in SickChill. Error: {error}").format(name=name, error=error))
logger.debug(traceback.format_exc())
- return showObj
+ return show_object
def is_hidden_folder(folder):
diff --git a/sickchill/oldbeard/name_parser/parser.py b/sickchill/oldbeard/name_parser/parser.py
index c5072c61c2..5232d60fb9 100644
--- a/sickchill/oldbeard/name_parser/parser.py
+++ b/sickchill/oldbeard/name_parser/parser.py
@@ -3,9 +3,10 @@
import re
import time
from collections import OrderedDict
+from datetime import date
from operator import attrgetter
from threading import Lock
-from typing import TYPE_CHECKING
+from typing import Any, TYPE_CHECKING
from dateutil.parser import parse
@@ -26,17 +27,17 @@ class NameParser(object):
NORMAL_REGEX = 1
ANIME_REGEX = 2
- def __init__(self, filename: bool = True, showObj=None, tryIndexers: bool = False, naming_pattern: bool = False, parse_method: str = None):
+ def __init__(self, filename: bool = True, show_object=None, try_indexers: bool = False, naming_pattern: bool = False, parse_method: str = None):
self.filename: bool = filename
- self.showObj: TVShow = showObj
- self.tryIndexers: bool = tryIndexers
+ self.show_object: TVShow = show_object
+ self.try_indexers: bool = try_indexers
self.compiled_regexes: List = []
self.naming_pattern = naming_pattern
- if (self.showObj and not self.showObj.is_anime) or parse_method == "normal":
+ if (self.show_object and not self.show_object.is_anime) or parse_method == "normal":
self._compile_regexes(self.NORMAL_REGEX)
- elif (self.showObj and self.showObj.is_anime) or parse_method == "anime":
+ elif (self.show_object and self.show_object.is_anime) or parse_method == "anime":
self._compile_regexes(self.ANIME_REGEX)
else:
self._compile_regexes(self.ALL_REGEX)
@@ -57,14 +58,14 @@ def clean_series_name(series_name):
series_name = re.sub(r"\.(?!\s)(\D)", " \\1", series_name)
series_name = series_name.replace("_", " ")
series_name = re.sub(r"-$", "", series_name)
- series_name = re.sub(r"^\[.*\]", "", series_name)
+ series_name = re.sub(r"^\[.*]", "", series_name)
return series_name.strip()
- def _compile_regexes(self, regexMode):
- if regexMode == self.ANIME_REGEX:
+ def _compile_regexes(self, regex_mode):
+ if regex_mode == self.ANIME_REGEX:
dbg_str = "ANIME"
uncompiled_regex = [regexes.anime_regexes]
- elif regexMode == self.NORMAL_REGEX:
+ elif regex_mode == self.NORMAL_REGEX:
dbg_str = "NORMAL"
uncompiled_regex = [regexes.normal_regexes]
else:
@@ -75,8 +76,8 @@ def _compile_regexes(self, regexMode):
for cur_pattern_num, (cur_pattern_name, cur_pattern) in enumerate(regexItem):
try:
cur_regex = re.compile(cur_pattern, re.VERBOSE | re.I)
- except re.error as errormsg:
- logger.info(f"WARNING: Invalid episode_pattern using {dbg_str} regexs, {errormsg}. {cur_pattern}")
+ except re.error as error_message:
+ logger.info(f"WARNING: Invalid episode_pattern using {dbg_str} regexes, {error_message}. {cur_pattern}")
else:
self.compiled_regexes.append((cur_pattern_num, cur_pattern_name, cur_regex))
@@ -145,11 +146,10 @@ def _parse_string(self, name, skip_scene_detection=False):
# Workaround for shows that get interpreted as 'air_date' incorrectly.
# Shows so far are 11.22.63 and 9-1-1
excluded_shows = ["112263", "911"]
- assert re.sub(r"[^\d]*", "", air_date) not in excluded_shows
+ assert re.sub(r"\D*", "", air_date) not in excluded_shows
- # noinspection PyUnresolvedReferences
try:
- check = parse(air_date, fuzzy_with_tokens=True)[0].date()
+ check: date = parse(air_date, fuzzy=True).date()
# Make sure a 20th century date isn't returned as a 21st century date
# 1 Year into the future (No releases should be coming out a year ahead of time, that's just insane)
if check > check.today() and (check - check.today()).days // 365 > 1:
@@ -157,7 +157,8 @@ def _parse_string(self, name, skip_scene_detection=False):
result.air_date = check
result.score += 1
- except:
+ except Exception as error:
+ logger.debug(error)
continue
except Exception as error:
logger.debug(error)
@@ -193,21 +194,21 @@ def _parse_string(self, name, skip_scene_detection=False):
# matches = [x for x in matches if x.series_name]
if matches:
- # pick best match with highest score based on placement
+ # pick the best match with the highest score based on placement
best_result = max(sorted(matches, reverse=True, key=attrgetter("which_regex")), key=attrgetter("score"))
show = None
if best_result and best_result.series_name and not self.naming_pattern:
# try and create a show object for this result
- show = helpers.get_show(best_result.series_name, self.tryIndexers)
+ show = helpers.get_show(best_result.series_name, self.try_indexers)
# confirm passed in show object indexer id matches result show object indexer id
if show:
- if self.showObj and show.indexerid != self.showObj.indexerid:
+ if self.show_object and show.indexerid != self.show_object.indexerid:
show = None
best_result.show = show
- elif self.showObj and not show:
- best_result.show = self.showObj
+ elif self.show_object and not show:
+ best_result.show = self.show_object
# Only allow anime matches if resolved show or specified show is anime
best_result = self.check_anime_preferred(best_result, matches)
@@ -241,10 +242,11 @@ def _parse_string(self, name, skip_scene_detection=False):
if season_number is None or not episode_numbers:
try:
- epObj = sickchill.indexer.episode(best_result.show, firstAired=best_result.air_date)
- season_number = epObj["airedSeason"]
- episode_numbers = [epObj["airedEpisode"]]
- except Exception:
+ episode_object = sickchill.indexer.episode(best_result.show, firstAired=best_result.air_date)
+ season_number = episode_object["airedSeason"]
+ episode_numbers = [episode_object["airedEpisode"]]
+ except Exception as error:
+ logger.debug(error)
logger.warning(f"Unable to find episode with date {best_result.air_date} for show {best_result.show.name}, skipping")
episode_numbers = []
@@ -264,7 +266,7 @@ def _parse_string(self, name, skip_scene_detection=False):
if best_result.show.is_scene and not skip_scene_detection:
a = scene_numbering.get_indexer_absolute_numbering(
- best_result.show.indexerid, best_result.show.indexer, epAbsNo, True, best_result.scene_season
+ best_result.show.indexerid, best_result.show.indexer, epAbsNo, scene_season=best_result.scene_season
)
(s, e) = helpers.get_all_episodes_from_absolute_number(best_result.show, [a])
@@ -288,7 +290,7 @@ def _parse_string(self, name, skip_scene_detection=False):
new_episode_numbers.append(e)
new_season_numbers.append(s)
- # need to do a quick sanity check heregex. It's possible that we now have episodes
+ # need to do a quick sanity check regex. It's possible that we now have episodes
# from more than one season (by tvdb numbering), and this is just too much
# for oldbeard, so we'd need to flag it.
new_season_numbers = list(set(new_season_numbers)) # remove duplicates
@@ -297,8 +299,7 @@ def _parse_string(self, name, skip_scene_detection=False):
f"Scene numbering results episodes from seasons {new_season_numbers}, (i.e. more than one) and sickchill does not support this. Sorry."
)
- # I guess it's possible that we'd have duplicate episodes too, so lets
- # eliminate them
+ # I guess it's possible that we'd have duplicate episodes too, so let's eliminate them
new_episode_numbers = sorted(set(new_episode_numbers))
# maybe even duplicate absolute numbers so why not do them as well
@@ -321,8 +322,8 @@ def _parse_string(self, name, skip_scene_detection=False):
return best_result
def check_anime_preferred(self, best_result, matches):
- show = self.showObj or best_result.show
- if (best_result.show and best_result.show.is_anime and not self.showObj) or (self.showObj and self.showObj.is_anime):
+ show = self.show_object or best_result.show
+ if (best_result.show and best_result.show.is_anime and not self.show_object) or (self.show_object and self.show_object.is_anime):
anime_matches = [x for x in matches if "anime" in x.which_regex[0]]
if anime_matches:
best_result_anime = max(sorted(anime_matches, reverse=True, key=attrgetter("which_regex")), key=attrgetter("score"))
@@ -335,7 +336,7 @@ def check_anime_preferred(self, best_result, matches):
return best_result
@staticmethod
- def _combine_results(first, second, attr):
+ def _combine_results(first: Any, second: Any, attr: str) -> Any:
# if the first doesn't exist then return the second or nothing
if not first:
if not second:
@@ -347,18 +348,18 @@ def _combine_results(first, second, attr):
if not second:
return getattr(first, attr)
- a = getattr(first, attr)
- b = getattr(second, attr)
+ first_value = getattr(first, attr)
+ second_value = getattr(second, attr)
- # if a is good use it
- if a is not None or (isinstance(a, list) and a):
- return a
- # if not use b (if b isn't set it'll just be default)
+ # if first_value is good use it
+ if first_value is not None or (isinstance(first_value, list) and first_value):
+ return first_value
+ # if not use second_value (if second_value isn't set it'll just be default)
else:
- return b
+ return second_value
@staticmethod
- def _unicodify(obj, encoding="utf-8"):
+ def _to_unicode(obj, encoding="utf-8"):
if isinstance(obj, bytes):
obj = str(obj, encoding, "replace")
return obj
@@ -379,7 +380,8 @@ def _convert_number(org_number):
else:
number = 0
- except Exception:
+ except Exception as error:
+ logger.debug(error)
# on error try converting from Roman numerals
roman_to_int_map = (
("M", 1000),
@@ -409,7 +411,7 @@ def _convert_number(org_number):
return number
def parse(self, name, cache_result=True, skip_scene_detection=False):
- name = self._unicodify(name)
+ name = self._to_unicode(name)
if self.naming_pattern:
cache_result = False
@@ -583,7 +585,7 @@ def __str__(self):
to_return += f" [ABD: {self.is_air_by_date}] [ANIME: {self.is_anime}] [whichReg: {self.which_regex}] Score: {self.score}"
- return re.sub(r"[ ]+", " ", to_return)
+ return re.sub(r" +", " ", to_return)
@property
def is_air_by_date(self):
diff --git a/sickchill/oldbeard/name_parser/regexes.py b/sickchill/oldbeard/name_parser/regexes.py
index 6d72165c81..6fa308a6a7 100644
--- a/sickchill/oldbeard/name_parser/regexes.py
+++ b/sickchill/oldbeard/name_parser/regexes.py
@@ -55,7 +55,7 @@
"newpct",
# American Horror Story - Temporada 4 HDTV x264[Cap.408_409]SPANISH AUDIO -NEWPCT
# American Horror Story - Temporada 4 [HDTV][Cap.408][Espanol Castellano]
- # American Horror Story - Temporada 4 HDTV x264[Cap.408]SPANISH AUDIO –NEWPCT)
+ # American Horror Story - Temporada 4 HDTV x264[Cap.408]SPANISH AUDIO –NEWPCT
r"""
(?P Error: unable to connect to D-Bus session bus: Are you running SickChill in a desktop session?").format( - html.escape(error) - ) + return f" Error: unable to connect to D-Bus session bus: Are you running SickChill in a desktop session?" try: bus.get_object("org.freedesktop.Notifications", "/org/freedesktop/Notifications") except dbus.DBusException as error: - return ( - " Error: there doesn't seem to be a notification daemon available: Try installing notification-daemon or notify-osd." - ).format(html.escape(error)) + return f" Error: there doesn't seem to be a notification daemon available: Try installing notification-daemon or notify-osd." return " Error: Unable to send notification."
diff --git a/sickchill/oldbeard/nzbSplitter.py b/sickchill/oldbeard/nzbSplitter.py
index 4935eaea6d..1162c195cd 100644
--- a/sickchill/oldbeard/nzbSplitter.py
+++ b/sickchill/oldbeard/nzbSplitter.py
@@ -128,7 +128,7 @@ def split_result(obj):
# parse the season ep name
try:
- parsed_obj = NameParser(False, showObj=obj.show).parse(obj.name)
+ parsed_obj = NameParser(False, show_object=obj.show).parse(obj.name)
except (InvalidNameException, InvalidShowException) as error:
logger.debug(f"{error}")
return []
@@ -149,7 +149,7 @@ def split_result(obj):
# parse the name
try:
- parsed_obj = NameParser(False, showObj=obj.show).parse(new_nzb)
+ parsed_obj = NameParser(False, show_object=obj.show).parse(new_nzb)
except (InvalidNameException, InvalidShowException) as error:
logger.debug(f"{error}")
return []
@@ -164,7 +164,7 @@ def split_result(obj):
want_ep = True
for ep_num in parsed_obj.episode_numbers:
- if not obj.show.wantEpisode(season, ep_num, obj.quality):
+ if not obj.show.want_episode(season, ep_num, obj.quality):
logger.debug("Ignoring result: " + new_nzb)
want_ep = False
break
@@ -172,7 +172,7 @@ def split_result(obj):
continue
# get all the associated episode objects
- ep_obj_list = [obj.show.getEpisode(season, ep) for ep in parsed_obj.episode_numbers]
+ ep_obj_list = [obj.show.get_episode(season, ep) for ep in parsed_obj.episode_numbers]
# make a result
cur_obj = classes.NZBDataSearchResult(ep_obj_list)
diff --git a/sickchill/oldbeard/postProcessor.py b/sickchill/oldbeard/postProcessor.py
index b6fff5683a..dfe3ebe228 100644
--- a/sickchill/oldbeard/postProcessor.py
+++ b/sickchill/oldbeard/postProcessor.py
@@ -4,8 +4,9 @@
import re
import stat
import subprocess
+from datetime import datetime
from pathlib import Path
-from typing import TYPE_CHECKING, Union
+from typing import List, TYPE_CHECKING, Union
if TYPE_CHECKING:
from processTV import ParseResult
@@ -99,10 +100,8 @@ def _log(self, message, level=logging.INFO):
def _checkForExistingFile(self, existing_file):
"""
Checks if a file exists already and if it does whether it's bigger or smaller than
- the file we are post processing
-
- ;param existing_file: The file to compare to
-
+ the file we are post-processing
+ :param existing_file: The file to compare to
:return:
DOESNT_EXIST if the file doesn't exist
EXISTS_LARGER if the file exists and is larger than the file we are post processing
@@ -137,6 +136,9 @@ def list_associated_files(self, file_path, subtitles_only=False, subfolders=Fals
"""
For a given file path searches for files with the same name but different extension and returns their absolute paths
+ :param subtitles_only: only look for subtitles
+ :param subfolders: check in subfolders
+ :param rename: if we are looking for all files to rename
:param file_path: The file to check for associated files
:return: A list containing all files which are associated to the given file
"""
@@ -264,7 +266,7 @@ def _delete(self, file_path, associated_files=False):
# do the library update for synoindex
notifiers.synoindex_notifier.deleteFile(cur_file)
- def _combined_file_operation(self, file_path, new_path, new_base_name, associated_files=False, action=None, subtitles=False):
+ def _combined_file_operation(self, file_path, new_path: str, new_base_name, associated_files=False, action=None, subtitles=False):
"""
Performs a generic operation (move or copy) on a file. Can rename the file as well as change its location,
and optionally move associated files too.
@@ -412,7 +414,7 @@ def _int_hard_link(cur_file_path, new_file_path):
self._combined_file_operation(file_path, new_path, new_base_name, associated_files, action=_int_hard_link, subtitles=subtitles)
- def _moveAndSymlink(self, file_path, new_path, new_base_name, associated_files=False, subtitles=False):
+ def _move_and_symlink(self, file_path, new_path, new_base_name, associated_files=False, subtitles=False):
"""
Move file, symlink source location back to destination, and set proper permissions
@@ -501,7 +503,7 @@ def _history_lookup(self):
# search the database for a possible match and return immediately if we find one
main_db_con = db.DBConnection()
for curName in names:
- search_name = re.sub(r"[\.\- ]", "_", curName)
+ search_name = re.sub(r"[.\- ]", "_", curName)
sql_results = main_db_con.select(
"SELECT showid, season, quality, version, resource FROM history WHERE resource LIKE ? AND (action % 100 = 4 OR action % 100 = 6)", [search_name]
)
@@ -606,17 +608,17 @@ def _analyze_name(self, name):
return to_return
@staticmethod
- def _build_anidb_episode(connection, filePath):
+ def _build_anidb_episode(connection, file_path):
"""
Look up anidb properties for an episode
:param connection: anidb connection handler
- :param filePath: file to check
+ :param file_path: file to check
:return: episode object
"""
ep = adba.Episode(
connection,
- file_path=Path(filePath),
+ file_path=Path(file_path),
paramsF=["quality", "anidb_file_name", "crc32"],
paramsA=["epno", "english_name", "short_name_list", "other_name", "synonym_list"],
)
@@ -647,7 +649,7 @@ def _find_info(self):
"""
show = season = quality = version = None
- episodes = []
+ episodes: Union[List[int], List[datetime]] = []
# try to look up the release in history
attempt_list = [
@@ -728,10 +730,10 @@ def _find_info(self):
# if there's no season then we can hopefully just use 1 automatically
elif season is None and show:
main_db_con = db.DBConnection()
- numseasonsSQlResult = main_db_con.select(
- "SELECT COUNT(DISTINCT season) FROM tv_episodes WHERE showid = ? and indexer = ? and season != 0", [show.indexerid, show.indexer]
+ total_seasons_sql_result = main_db_con.select(
+ "SELECT COUNT(DISTINCT season) as count FROM tv_episodes WHERE showid = ? and indexer = ? and season != 0", [show.indexerid, show.indexer]
)
- if int(numseasonsSQlResult[0][0]) == 1 and season is None:
+ if int(total_seasons_sql_result[0]["count"]) == 1 and season is None:
self._log(_("Don't have a season number, but this show appears to only have 1 season, setting season number to 1..."), logger.DEBUG)
season = 1
@@ -758,7 +760,7 @@ def _get_ep_obj(self, show, season, episodes):
# now that we've figured out which episode this file is just load it manually
try:
- curEp = show.getEpisode(season, cur_episode)
+ curEp = show.get_episode(season, cur_episode)
if not curEp:
raise EpisodeNotFoundException()
except EpisodeNotFoundException as error:
@@ -840,7 +842,7 @@ def _run_extra_scripts(self, episode_object):
self._log(f"Absolute path to script: {script_cmd[0]}", logger.DEBUG)
script_cmd += [
- episode_object._location,
+ episode_object.location,
self.directory,
str(episode_object.show.indexerid),
str(episode_object.season),
@@ -989,9 +991,11 @@ def process(self):
# Only proceed if the file season is > 0
if int(episode_object.season) > 0:
main_db_con = db.DBConnection()
- max_season = main_db_con.select("SELECT MAX(season) FROM tv_episodes WHERE showid = ? and indexer = ?", [show.indexerid, show.indexer])
+ max_season = main_db_con.select(
+ "SELECT MAX(season) as last_season FROM tv_episodes WHERE showid = ? and indexer = ?", [show.indexerid, show.indexer]
+ )
- if not isinstance(max_season[0][0], int) or max_season[0][0] < 0:
+ if not isinstance(max_season[0]["last_season"], int) or max_season[0]["last_season"] < 0:
self._log(
f"File has season {episode_object.season}, while the database does not have any known seasons yet. "
"Try forcing a full update on the show and process this file again. "
@@ -1000,15 +1004,15 @@ def process(self):
return False
# If the file season (episode_object.season) is bigger than the indexer season (max_season[0][0]), skip the file
- newest_season_num = max_season[0][0]
+ newest_season = max_season[0]["last_season"]
episode_season = episode_object.season
- if int(episode_season) > newest_season_num:
+ if int(episode_season) > newest_season:
self._log(
_(
- "File has season {episode_season}, while the indexer is on season {newest_season_num}. "
+ "File has season {episode_season}, while the indexer is on season {newest_season}. "
"Try forcing a full update on the show and process this file again. "
"The file may be incorrectly labeled or fake, aborting."
- ).format(episode_season=episode_season, newest_season_num=newest_season_num)
+ ).format(episode_season=episode_season, newest_season_num=newest_season)
)
return False
@@ -1020,7 +1024,7 @@ def process(self):
if settings.USE_FREE_SPACE_CHECK:
if not helpers.is_file_locked(self.directory):
if not verify_freespace(
- self.directory, episode_object.show._location, [episode_object] + episode_object.related_episodes, method=self.process_method
+ self.directory, episode_object.show.get_location, [episode_object] + episode_object.related_episodes, method=self.process_method
):
self._log(_("Not enough disk space to continue processing, exiting"), logger.WARNING)
return False
@@ -1034,7 +1038,7 @@ def process(self):
# clean up any left over folders
if cur_ep.location:
- helpers.delete_empty_folders(os.path.dirname(cur_ep.location), keep_dir=episode_object.show._location)
+ helpers.delete_empty_folders(os.path.dirname(cur_ep.location), keep_dir=episode_object.show.get_location)
# clean up download-related properties
cur_ep.cleanup_download_properties()
@@ -1046,19 +1050,19 @@ def process(self):
# curEp.status = common.Quality.compositeStatus(common.SNATCHED, new_ep_quality)
# if the show directory doesn't exist then make it if allowed
- if not os.path.isdir(episode_object.show._location) and settings.CREATE_MISSING_SHOW_DIRS:
+ if not os.path.isdir(episode_object.show.get_location) and settings.CREATE_MISSING_SHOW_DIRS:
self._log(_("Show directory doesn't exist, creating it"), logger.DEBUG)
try:
- os.mkdir(episode_object.show._location)
- helpers.chmodAsParent(episode_object.show._location)
+ os.mkdir(episode_object.show.get_location)
+ helpers.chmodAsParent(episode_object.show.get_location)
# do the library update for synoindex
- notifiers.synoindex_notifier.addFolder(episode_object.show._location)
+ notifiers.synoindex_notifier.addFolder(episode_object.show.get_location)
except (OSError, IOError):
- raise EpisodePostProcessingFailedException(_("Unable to create the show directory: ") + episode_object.show._location)
+ raise EpisodePostProcessingFailedException(_("Unable to create the show directory: ") + episode_object.show.get_location)
# get metadata for the show (but not episode because it hasn't been fully processed)
- episode_object.show.writeMetadata(True)
+ episode_object.show.write_metadata(True)
# update the ep info before we rename so the quality & release name go into the name properly
sql_l = []
@@ -1117,7 +1121,7 @@ def process(self):
# figure out the base name of the resulting episode file
if settings.RENAME_EPISODES:
- old_path = Path(self.filename)
+ old_path = Path(str(self.filename))
orig_extension = old_path.suffix
new_base_name = os.path.basename(proper_path)
new_filename = f"{new_base_name}{orig_extension}"
@@ -1147,7 +1151,7 @@ def process(self):
elif self.process_method == METHOD_SYMLINK:
if helpers.is_file_locked(self.directory, True):
raise EpisodePostProcessingFailedException(_("File is locked for reading/writing"))
- self._moveAndSymlink(
+ self._move_and_symlink(
self.directory, dest_path, new_base_name, settings.MOVE_ASSOCIATED_FILES, settings.USE_SUBTITLES and episode_object.show.subtitles
)
elif self.process_method == METHOD_SYMLINK_REVERSED:
@@ -1174,14 +1178,14 @@ def process(self):
main_db_con = db.DBConnection()
main_db_con.mass_action(sql_l)
- episode_object.airdateModifyStamp()
+ episode_object.airdate_modify_stamp()
if settings.USE_ICACLS and os.name == "nt":
- os.popen('icacls "' + episode_object._location + '"* /reset /T')
+ os.popen(f'icacls "{episode_object.location}"* /reset /T')
# generate nfo/tbn
try:
- episode_object.createMetaFiles()
+ episode_object.create_meta_files()
except Exception:
logger.info(_("Could not create/update meta files. Continuing with postProcessing..."))
@@ -1232,10 +1236,10 @@ def guessit_findit(name: str) -> Union["ParseResult", None]:
logger.debug(f"Trying a new way to verify if we can parse this file")
title = guessit(name, {"type": "episode"}).get("title")
if title:
- show: "TVShow" = helpers.get_show(title, False)
+ show: "TVShow" = helpers.get_show(title)
if show:
try:
- np = NameParser(showObj=show).parse(name, cache_result=False)
+ np = NameParser(show_object=show).parse(name, cache_result=False)
return np
except (InvalidNameException, InvalidShowException) as error:
logger.debug(f"Sorry, guessit failed to parse the file name for {show}: {name} (Error: {error} ... continuing with the old way")
diff --git a/sickchill/oldbeard/properFinder.py b/sickchill/oldbeard/properFinder.py
index e16e85631e..ddc5e4ea27 100644
--- a/sickchill/oldbeard/properFinder.py
+++ b/sickchill/oldbeard/properFinder.py
@@ -222,7 +222,7 @@ def _download_propers(self, proper_list):
continue
# get the episode object
- episode_object = proper.show.getEpisode(proper.season, proper.episode)
+ episode_object = proper.show.get_episode(proper.season, proper.episode)
# make the result object
result = proper.provider.get_result([episode_object])
diff --git a/sickchill/oldbeard/providers/bjshare.py b/sickchill/oldbeard/providers/bjshare.py
index 7bff78fcd2..e60d363fea 100644
--- a/sickchill/oldbeard/providers/bjshare.py
+++ b/sickchill/oldbeard/providers/bjshare.py
@@ -11,9 +11,7 @@
class Provider(TorrentProvider):
- """
- BJ-Share Torrent provider.
- """
+ """BJ-Share Torrent provider."""
def __init__(self):
"""Initialize the class."""
diff --git a/sickchill/oldbeard/providers/ilcorsaronero.py b/sickchill/oldbeard/providers/ilcorsaronero.py
index 973dacc238..af96fc40f8 100644
--- a/sickchill/oldbeard/providers/ilcorsaronero.py
+++ b/sickchill/oldbeard/providers/ilcorsaronero.py
@@ -156,7 +156,7 @@ def _is_english(name):
@staticmethod
def _is_season_pack(name):
try:
- parse_result = NameParser(tryIndexers=True).parse(name)
+ parse_result = NameParser(try_indexers=True).parse(name)
except (InvalidNameException, InvalidShowException) as error:
logger.debug(f"{error}")
return False
diff --git a/sickchill/oldbeard/providers/norbits.py b/sickchill/oldbeard/providers/norbits.py
index c9418d7b98..fc4cb6a7e3 100644
--- a/sickchill/oldbeard/providers/norbits.py
+++ b/sickchill/oldbeard/providers/norbits.py
@@ -27,7 +27,7 @@ def __init__(self):
def _check_auth(self):
if not self.username or not self.passkey:
- raise AuthException(("Your authentication credentials for {} are " "missing, check your config.").format(self.name))
+ raise AuthException(f"Your authentication credentials for {self.name} are missing, check your config.")
return True
diff --git a/sickchill/oldbeard/providers/tntvillage.py b/sickchill/oldbeard/providers/tntvillage.py
index 4997ec9804..a3dcfdd66f 100644
--- a/sickchill/oldbeard/providers/tntvillage.py
+++ b/sickchill/oldbeard/providers/tntvillage.py
@@ -231,7 +231,7 @@ def _is_english(torrent_rows):
@staticmethod
def _is_season_pack(name):
try:
- parse_result = NameParser(tryIndexers=True).parse(name)
+ parse_result = NameParser(try_indexers=True).parse(name)
except (InvalidNameException, InvalidShowException) as error:
logger.debug(f"{error}")
return False
diff --git a/sickchill/oldbeard/scene_numbering.py b/sickchill/oldbeard/scene_numbering.py
index 42427e92aa..cc9bfb640a 100644
--- a/sickchill/oldbeard/scene_numbering.py
+++ b/sickchill/oldbeard/scene_numbering.py
@@ -23,6 +23,7 @@ def get_scene_numbering(indexer_id, indexer, season, episode, fallback_to_xem=Tr
(so the return values will always be set)
:param indexer_id: int
+ :param indexer: int
:param season: int
:param episode: int
:param fallback_to_xem: bool If set (the default), check xem for matches if there is no local scene numbering
@@ -209,7 +210,7 @@ def set_scene_numbering(indexer_id, indexer, season=None, episode=None, absolute
# Reload data from DB so that cache and db are in sync
show = Show.find(settings.showList, indexer_id)
- show.flushEpisodes()
+ show.flush_episodes()
def find_xem_numbering(indexer_id, indexer, season, episode):
@@ -247,6 +248,7 @@ def find_xem_absolute_numbering(indexer_id, indexer, absolute_number):
Refreshes/Loads as needed.
:param indexer_id: int
+ :param indexer: int
:param absolute_number: int
:return: int
"""
@@ -305,6 +307,7 @@ def get_indexer_absolute_numbering_for_xem(indexer_id, indexer, sceneAbsoluteNum
:param indexer_id: int
:param indexer: int
:param sceneAbsoluteNumber: int
+ :param scene_season: default None
:return: int
"""
if indexer_id is None or sceneAbsoluteNumber is None:
@@ -459,6 +462,8 @@ def xem_refresh(indexer_id, indexer, force=False):
Refresh data from xem for a tv show
:param indexer_id: int
+ :param indexer: int
+ :param force: default False
"""
if not indexer_id or indexer_id < 1:
return
diff --git a/sickchill/oldbeard/search.py b/sickchill/oldbeard/search.py
index 0148851dae..1a63bc11c0 100644
--- a/sickchill/oldbeard/search.py
+++ b/sickchill/oldbeard/search.py
@@ -317,7 +317,7 @@ def wanted_episodes(show, from_date):
elif quality in allowed_qualities:
continue
- episode_object = show.getEpisode(result["season"], result["episode"])
+ episode_object = show.get_episode(result["season"], result["episode"])
episode_object.wantedQuality = [i for i in all_qualities if i > quality and i != common.Quality.UNKNOWN]
wanted.append(episode_object)
@@ -526,7 +526,7 @@ def search_providers(show, episodes, manual=False, downCurQuality=False):
some_wanted = False
for episode_number in all_episodes:
for season in (x.season for x in episodes):
- if not show.wantEpisode(season, episode_number, season_quality, downCurQuality):
+ if not show.want_episode(season, episode_number, season_quality, downCurQuality):
all_wanted = False
else:
some_wanted = True
@@ -537,7 +537,7 @@ def search_providers(show, episodes, manual=False, downCurQuality=False):
episode_objects = []
for episode_number in all_episodes:
for season in {x.season for x in episodes}:
- episode_objects.append(show.getEpisode(season, episode_number))
+ episode_objects.append(show.get_episode(season, episode_number))
best_season_result.episodes = episode_objects
# Remove provider from thread name before return results
@@ -575,7 +575,7 @@ def search_providers(show, episodes, manual=False, downCurQuality=False):
episode_objects = []
for episode_number in all_episodes:
for season in {x.season for x in episodes}:
- episode_objects.append(show.getEpisode(season, episode_number))
+ episode_objects.append(show.get_episode(season, episode_number))
best_season_result.episodes = episode_objects
if MULTI_EP_RESULT in found_results[curProvider.name]:
diff --git a/sickchill/oldbeard/searchBacklog.py b/sickchill/oldbeard/searchBacklog.py
index 1659184249..bf46f04445 100644
--- a/sickchill/oldbeard/searchBacklog.py
+++ b/sickchill/oldbeard/searchBacklog.py
@@ -15,7 +15,7 @@ def nextRun(self):
if self.action.lastBacklog <= 1:
return datetime.date.today()
else:
- return datetime.date.fromordinal(self.action.lastBacklog + self.action.cycleTime)
+ return datetime.date.fromordinal(int(self.action.lastBacklog + self.action.cycleTime))
class BacklogSearcher(object):
@@ -139,7 +139,7 @@ def _get_segments(show, fromDate):
elif cur_quality in allowed_qualities:
continue
- episode_object = show.getEpisode(sql_result["season"], sql_result["episode"])
+ episode_object = show.get_episode(sql_result["season"], sql_result["episode"])
if episode_object.season not in wanted:
wanted[episode_object.season] = [episode_object]
diff --git a/sickchill/oldbeard/show_queue.py b/sickchill/oldbeard/show_queue.py
index 87373d264c..883b995211 100644
--- a/sickchill/oldbeard/show_queue.py
+++ b/sickchill/oldbeard/show_queue.py
@@ -418,7 +418,7 @@ def run(self):
# If we have the show in our list, but the location is wrong, lets fix it and refresh!
existing_show = Show.find(settings.showList, self.indexer_id)
# noinspection PyProtectedMember
- if existing_show and not os.path.isdir(existing_show._location):
+ if existing_show and not os.path.isdir(existing_show.get_location):
new_show = existing_show
else:
raise error
@@ -480,7 +480,7 @@ def run(self):
self.show.load_imdb_info()
try:
- self.show.saveToDB()
+ self.show.save_to_db()
except Exception as error:
logger.exception(f"Error saving the show to the database: {error}")
logger.debug(traceback.format_exc())
@@ -492,7 +492,7 @@ def run(self):
settings.showList.append(self.show)
try:
- self.show.loadEpisodesFromIndexer(force_all=True)
+ self.show.load_episodes_from_indexer(force_all=True)
except Exception as error:
logger.exception(f"Error with {self.show.idxr.name}, not creating episode list: {error}")
logger.debug(traceback.format_exc())
@@ -501,7 +501,7 @@ def run(self):
name_cache.build_name_cache(self.show)
try:
- self.show.loadEpisodesFromDir()
+ self.show.load_episodes_from_dir()
except Exception as error:
logger.exception(f"Error searching dir for episodes: {error}")
logger.debug(traceback.format_exc())
@@ -512,11 +512,11 @@ def run(self):
logger.info("Launching backlog for this show since its episodes are WANTED")
settings.backlogSearchScheduler.action.searchBacklog([self.show])
- self.show.writeMetadata()
- self.show.updateMetadata()
- self.show.populateCache()
+ self.show.write_metadata()
+ self.show.update_metadata()
+ self.show.populate_cache()
- self.show.flushEpisodes()
+ self.show.flush_episodes()
if settings.USE_TRAKT:
# if there are specific episodes that need to be added by trakt
@@ -565,11 +565,11 @@ def run(self):
logger.info(f"Performing refresh on {self.show.name}")
- self.show.refreshDir()
- self.show.writeMetadata()
+ self.show.refresh_dir()
+ self.show.write_metadata()
if self.force:
- self.show.updateMetadata()
- self.show.populateCache()
+ self.show.update_metadata()
+ self.show.populate_cache()
# Load XEM data to DB for show
scene_numbering.xem_refresh(self.show.indexerid, self.show.indexer)
@@ -597,7 +597,7 @@ def run(self):
ep_obj_rename_list = []
- ep_obj_list = self.show.getAllEpisodes(has_location=True)
+ ep_obj_list = self.show.get_all_episodes(has_location=True)
for cur_ep_obj in ep_obj_list:
# Only want to rename if we have a location
if cur_ep_obj.location:
@@ -655,18 +655,18 @@ def run(self):
# have to save show before reading episodes from db
try:
- self.show.saveToDB()
+ self.show.save_to_db()
except Exception as error:
logger.exception(f"Error saving show info to the database: {error}")
logger.debug(traceback.format_exc())
# get episode list from DB
- DBEpList = self.show.loadEpisodesFromDB()
+ DBEpList = self.show.load_episodes_from_db()
# get episode list from TVDB
logger.debug(f"Loading all episodes from {self.show.idxr.name}")
try:
- IndexerEpList = self.show.loadEpisodesFromIndexer(self.force)
+ IndexerEpList = self.show.load_episodes_from_indexer(self.force)
except Exception as error:
logger.exception(f"Unable to get info from {self.show.idxr.name}, the show info will not be refreshed: {error}")
IndexerEpList = None
@@ -674,8 +674,8 @@ def run(self):
if IndexerEpList:
for curSeason in IndexerEpList:
for curEpisode in IndexerEpList[curSeason]:
- curEp = self.show.getEpisode(curSeason, curEpisode)
- curEp.saveToDB()
+ curEp = self.show.get_episode(curSeason, curEpisode)
+ curEp.save_to_db()
if curSeason in DBEpList and curEpisode in DBEpList[curSeason]:
del DBEpList[curSeason][curEpisode]
@@ -684,15 +684,15 @@ def run(self):
for curSeason in DBEpList:
for curEpisode in DBEpList[curSeason]:
logger.info("Permanently deleting episode {0:02d}E{1:02d} from the database".format(curSeason, curEpisode))
- curEp = self.show.getEpisode(curSeason, curEpisode)
+ curEp = self.show.get_episode(curSeason, curEpisode)
try:
- curEp.deleteEpisode()
+ curEp.delete_episode()
except EpisodeDeletedException:
pass
# save show again, in case episodes have changed
try:
- self.show.saveToDB()
+ self.show.save_to_db()
except Exception as error:
logger.exception(f"Error saving show info to the database: {error}")
logger.debug(traceback.format_exc())
@@ -716,7 +716,7 @@ def __init__(self, show=None, full=False):
def run(self):
super(QueueItemRemove, self).run()
logger.info(f"Removing {self.show.name}")
- self.show.deleteShow(full=self.full)
+ self.show.delete_show(full=self.full)
if settings.USE_TRAKT:
try:
@@ -743,7 +743,7 @@ def run(self):
# nmj_notifier kicks off its library update when the notify_download is issued (inside notifiers)
# do the library update for Synology Indexer
- notifiers.synoindex_notifier.addFolder(self.show._location)
+ notifiers.synoindex_notifier.addFolder(self.show.get_location)
# do the library update for pyTivo
notifiers.pytivo_notifier.update_library(self.show)
diff --git a/sickchill/oldbeard/subtitles.py b/sickchill/oldbeard/subtitles.py
index 74da7ffc01..ff01e83a69 100644
--- a/sickchill/oldbeard/subtitles.py
+++ b/sickchill/oldbeard/subtitles.py
@@ -22,8 +22,7 @@
# https://github.com/Diaoul/subliminal/issues/536
# provider_manager.register('napiprojekt = subliminal.providers.napiprojekt:NapiProjektProvider')
-# if 'legendastv' not in subliminal.provider_manager.names():
-# subliminal.provider_manager.register('legendastv = subliminal.providers.legendastv:LegendasTVProvider')
+# 'legendastv' closed down
if "itasa" not in subliminal.provider_manager.names():
subliminal.provider_manager.register("itasa = sickchill.providers.subtitle.itasa:ItaSAProvider")
if "wizdom" not in subliminal.provider_manager.names():
@@ -38,16 +37,13 @@
subliminal.region.configure("dogpile.cache.memory")
PROVIDER_URLS = {
- "addic7ed": "http://www.addic7ed.com",
+ "addic7ed": "https://www.addic7ed.com",
"bsplayer": "http://bsplayer-subtitles.com",
"itasa": "http://www.italiansubs.net/",
- "legendastv": "http://www.legendas.tv",
"napiprojekt": "http://www.napiprojekt.pl",
"opensubtitles": "https://www.opensubtitles.com",
"podnapisi": "http://www.podnapisi.net",
- "subscenter": "http://www.subscenter.info",
"subtitulamos": "https://www.subtitulamos.tv",
- "thesubdb": "http://www.thesubdb.com",
"wizdom": "http://wizdom.xyz",
"tvsubtitles": "http://www.tvsubtitles.net",
}
@@ -78,7 +74,6 @@ def __init_instance():
provider_configs = {
"addic7ed": {"username": settings.ADDIC7ED_USER, "password": settings.ADDIC7ED_PASS},
"itasa": {"username": settings.ITASA_USER, "password": settings.ITASA_PASS},
- "legendastv": {"username": settings.LEGENDASTV_USER, "password": settings.LEGENDASTV_PASS},
"opensubtitles": {"username": settings.OPENSUBTITLES_USER, "password": settings.OPENSUBTITLES_PASS},
"subscenter": {"username": settings.SUBSCENTER_USER, "password": settings.SUBSCENTER_PASS},
}
@@ -106,7 +101,7 @@ def __getattr__(self, attr):
def sorted_service_list():
new_list = []
- lmgtfy = "https://lmgtfy.com/?q=%s"
+ lmgtfy = "https://blog.lmgtfy.com/?q=%s"
current_index = 0
for current_service in settings.SUBTITLES_SERVICES_LIST:
@@ -446,7 +441,7 @@ def dhm(td):
logger.debug(f"Show with ID {ep_show_id} not found in the database")
continue
- episode_object = show_object.getEpisode(ep_season, ep_episode)
+ episode_object = show_object.get_episode(ep_season, ep_episode)
if isinstance(episode_object, str):
logger.debug(f"{ep_show_name} {ep_string} not found in the database")
continue
diff --git a/sickchill/oldbeard/traktChecker.py b/sickchill/oldbeard/traktChecker.py
index 3e449c8fc7..c35b507669 100644
--- a/sickchill/oldbeard/traktChecker.py
+++ b/sickchill/oldbeard/traktChecker.py
@@ -594,7 +594,7 @@ def _set_episode_to_wanted(show, season, episode):
"""
Sets an episode to wanted, only if it is currently skipped
"""
- episode_object = show.getEpisode(season, episode)
+ episode_object = show.get_episode(season, episode)
if episode_object:
with episode_object.lock:
if episode_object.status != SKIPPED or episode_object.airdate == datetime.date.min:
@@ -604,7 +604,7 @@ def _set_episode_to_wanted(show, season, episode):
# figure out what segment the episode is in and remember it, so we can backlog it
episode_object.status = WANTED
- episode_object.saveToDB()
+ episode_object.save_to_db()
cur_backlog_queue_item = search_queue.BacklogQueueItem(show, [episode_object])
settings.searchQueueScheduler.action.add_item(cur_backlog_queue_item)
diff --git a/sickchill/oldbeard/tvcache.py b/sickchill/oldbeard/tvcache.py
index bde58fbb84..99666f7b46 100644
--- a/sickchill/oldbeard/tvcache.py
+++ b/sickchill/oldbeard/tvcache.py
@@ -346,7 +346,7 @@ def add_cache_entry(self, name, url, size, seeders, leechers, parse_result=None,
show_obj = Show.find(settings.showList, indexer_id)
try:
- parse_result = NameParser(showObj=show_obj).parse(name)
+ parse_result = NameParser(show_object=show_obj).parse(name)
except (InvalidNameException, InvalidShowException) as error:
logger.debug(f"{error}")
return None
@@ -419,7 +419,7 @@ def list_propers(self, date=None):
propers_results = cache_db_con.select(sql, [self.provider_id])
return [x for x in propers_results if x["indexerid"]]
- def find_needed_episodes(self, episode, manualSearch=False, downCurQuality=False):
+ def find_needed_episodes(self, episode, manual_search=False, down_cur_quality=False):
needed_eps = {}
cl = []
@@ -477,11 +477,11 @@ def find_needed_episodes(self, episode, manualSearch=False, downCurQuality=False
cur_version = cur_result["version"]
# if the show says we want that episode then add it to the list
- if not show_obj.wantEpisode(cur_season, cur_ep, cur_quality, manualSearch, downCurQuality):
+ if not show_obj.want_episode(cur_season, cur_ep, cur_quality, manual_search, down_cur_quality):
logger.debug("Ignoring " + cur_result["name"])
continue
- episode_object = show_obj.getEpisode(cur_season, cur_ep)
+ episode_object = show_obj.get_episode(cur_season, cur_ep)
# build a result object
title = cur_result["name"]
diff --git a/sickchill/providers/GenericProvider.py b/sickchill/providers/GenericProvider.py
index 00346295bb..dcdd5870f6 100644
--- a/sickchill/providers/GenericProvider.py
+++ b/sickchill/providers/GenericProvider.py
@@ -281,7 +281,7 @@ def find_search_results(self, show, episodes, search_mode, manual_search=False,
continue
for episode_number in actual_episodes:
- if not show_object.wantEpisode(actual_season, episode_number, quality, manual_search, download_current_quality):
+ if not show_object.want_episode(actual_season, episode_number, quality, manual_search, download_current_quality):
skip_release = True
break
@@ -293,7 +293,7 @@ def find_search_results(self, show, episodes, search_mode, manual_search=False,
episode_objects = []
for current_episode in actual_episodes:
- episode_objects.append(show_object.getEpisode(actual_season, current_episode))
+ episode_objects.append(show_object.get_episode(actual_season, current_episode))
result = self.get_result(episode_objects)
result.show = show_object
@@ -587,9 +587,6 @@ def add_cookies_from_ui(self):
return False, f"No Cookies added from ui for provider: {self.name}"
- def has_option(self, option):
- return hasattr(self, option)
-
def check_set_option(self, view, option, default="", cast: Callable = str, unhide=False):
if hasattr(self, option):
if view.request.method == "GET":
@@ -636,7 +633,7 @@ def __restore_original_urls(self):
def check_and_update_urls(self):
has_custom_url = False
custom_url_valid = True
- if hasattr(self, "custom_url") and self.has_option("url") and self.has_option("urls"):
+ if hasattr(self, "custom_url") and hasattr(self, "url") and hasattr(self, "urls"):
has_custom_url = bool(self.custom_url)
custom_url_valid = self.valid_url(self.custom_url)
if has_custom_url and custom_url_valid:
diff --git a/sickchill/providers/metadata/generic.py b/sickchill/providers/metadata/generic.py
index de04b71f28..9c0b83f75d 100644
--- a/sickchill/providers/metadata/generic.py
+++ b/sickchill/providers/metadata/generic.py
@@ -115,7 +115,7 @@ def _check_exists(location):
def _has_show_metadata(self, show_obj):
return self._check_exists(self.get_show_file_path(show_obj))
- def _has_episode_metadata(self, episode_object):
+ def has_episode_metadata(self, episode_object):
return self._check_exists(self.get_episode_file_path(episode_object))
def _has_fanart(self, show_obj):
@@ -127,7 +127,7 @@ def _has_poster(self, show_obj):
def _has_banner(self, show_obj):
return self._check_exists(self.get_banner_path(show_obj))
- def _has_episode_thumb(self, episode_object):
+ def has_episode_thumb(self, episode_object):
return self._check_exists(self.get_episode_thumb_path(episode_object))
def _has_season_poster(self, show_obj, season):
@@ -248,7 +248,7 @@ def update_show_indexer_metadata(self, show_obj):
if indexerid is not None:
if indexerid.text == str(show_obj.indexerid):
return True
- indexerid.text = str(show_obj.indexerid)
+ indexerid.text = str(show_obj.indexerid)
else:
ElementTree.SubElement(root, "id").text = str(show_obj.indexerid)
@@ -263,13 +263,13 @@ def update_show_indexer_metadata(self, show_obj):
logger.error(f"Unable to write file to {nfo_file_path} - are you sure the folder is writable? {error}")
def create_episode_metadata(self, episode_object):
- if self.episode_metadata and episode_object and not self._has_episode_metadata(episode_object):
+ if self.episode_metadata and episode_object and not self.has_episode_metadata(episode_object):
logger.debug(f"[{self.name} META] Creating episode metadata for {episode_object.pretty_name}")
return self.write_ep_file(episode_object)
return False
def update_episode_metadata(self, episode_object):
- if self.episode_metadata and episode_object and self._has_episode_metadata(episode_object):
+ if self.episode_metadata and episode_object and self.has_episode_metadata(episode_object):
logger.debug(f"[{self.name} META] Updating episode indexer info metadata file for {episode_object.pretty_name}")
nfo_file_path = self.get_episode_file_path(episode_object)
@@ -342,7 +342,7 @@ def create_banner(self, show_obj):
return False
def create_episode_thumb(self, episode_object):
- if self.episode_thumbnails and episode_object and not self._has_episode_thumb(episode_object):
+ if self.episode_thumbnails and episode_object and not self.has_episode_thumb(episode_object):
logger.debug(f"[{self.name} META] Creating episode thumbnail for {episode_object.pretty_name}")
return self.save_thumbnail(episode_object)
return False
diff --git a/sickchill/providers/torrent/FrenchProvider.py b/sickchill/providers/torrent/FrenchProvider.py
index 492047d346..934acd6f63 100644
--- a/sickchill/providers/torrent/FrenchProvider.py
+++ b/sickchill/providers/torrent/FrenchProvider.py
@@ -62,15 +62,18 @@ def _retrieve_dllink_from_url(self, inner_url):
return ""
- def _get_custom_url(self):
+ @property
+ def custom_url(self):
return self._custom_url
- def _set_custom_url(self, url):
+ @custom_url.setter
+ def custom_url(self, url):
if self._custom_url != url:
self._custom_url = url
self._recheck_url = True
- def _get_provider_url(self):
+ @property
+ def url(self):
if self._recheck_url:
if self.custom_url:
if self.valid_url(self.custom_url):
@@ -82,14 +85,10 @@ def _get_provider_url(self):
return self._used_url
- def _set_provider_url(self, url):
+ @url.setter
+ def url(self, url):
self._used_url = url
- # noinspection PyTypeChecker
- url = property(_get_provider_url, _set_provider_url)
- # noinspection PyTypeChecker
- custom_url = property(_get_custom_url, _set_custom_url)
-
def get_season_search_strings(self, episode: "TVEpisode") -> List[Dict]:
search_string = {"Season": set()}
for show_name in allPossibleShowNames(episode.show, season=episode.scene_season):
diff --git a/sickchill/providers/torrent/TorrentProvider.py b/sickchill/providers/torrent/TorrentProvider.py
index 027524b8a2..1a9f4c2931 100644
--- a/sickchill/providers/torrent/TorrentProvider.py
+++ b/sickchill/providers/torrent/TorrentProvider.py
@@ -31,7 +31,7 @@ def find_propers(self, search_date=None):
show = Show.find(settings.showList, int(result["showid"]))
if show:
- episode = show.getEpisode(result["season"], result["episode"])
+ episode = show.get_episode(result["season"], result["episode"])
self.current_episode_object = episode
for term in self.proper_strings:
diff --git a/sickchill/show/ComingEpisodes.py b/sickchill/show/ComingEpisodes.py
index b531506cd0..03fee34290 100644
--- a/sickchill/show/ComingEpisodes.py
+++ b/sickchill/show/ComingEpisodes.py
@@ -76,7 +76,7 @@ def get_coming_episodes(categories, sort, group, paused=settings.COMING_EPS_DISP
sql_l = []
for show_obj in settings.showList:
- next_air_date = show_obj.nextEpisode()
+ next_air_date = show_obj.next_episode()
sql_l.append(
[
"SELECT DISTINCT {0} ".format(fields_to_select) + "FROM tv_episodes e, tv_shows s "
diff --git a/sickchill/show/History.py b/sickchill/show/History.py
index 8ce3e5141c..aa55d98a82 100644
--- a/sickchill/show/History.py
+++ b/sickchill/show/History.py
@@ -312,7 +312,7 @@ def revert_episode(self, episode_object: "TVEpisode"):
else:
logger.debug("Episode don't have a previous snatched status to revert. Setting it back to WANTED")
episode_object.status = WANTED
- episode_object.saveToDB()
+ episode_object.save_to_db()
except EpisodeNotFoundException as error:
logger.warning(f"Unable to create episode, please set its status manually: {error}")
@@ -332,7 +332,7 @@ def mark_failed(self, episode_object: "TVEpisode"):
with episode_object.lock:
quality = Quality.splitCompositeStatus(episode_object.status)[1]
episode_object.status = Quality.compositeStatus(FAILED, quality)
- episode_object.saveToDB()
+ episode_object.save_to_db()
except EpisodeNotFoundException as error:
logger.warning(f"Unable to get episode, please set its status manually: {error}")
diff --git a/sickchill/show/Show.py b/sickchill/show/Show.py
index 36b0f76570..204f8e2b39 100644
--- a/sickchill/show/Show.py
+++ b/sickchill/show/Show.py
@@ -161,7 +161,7 @@ def pause(indexer_id, pause=None):
else:
show.paused = pause
- show.saveToDB()
+ show.save_to_db()
return None, show
diff --git a/sickchill/show_updater.py b/sickchill/show_updater.py
index 8d9f2f9cec..db14cbf1e0 100644
--- a/sickchill/show_updater.py
+++ b/sickchill/show_updater.py
@@ -50,7 +50,7 @@ def run(self, force=False):
pi_list = []
for cur_show in settings.showList:
try:
- cur_show.nextEpisode()
+ cur_show.next_episode()
skip_update = False
# Skip ended or paused shows until interval is met
diff --git a/sickchill/start.py b/sickchill/start.py
index ea7592a809..5c82a0e08d 100644
--- a/sickchill/start.py
+++ b/sickchill/start.py
@@ -124,18 +124,17 @@ def initialize(console_logging: bool = True, debug: bool = False, dbdebug: bool
restore_dir = os.path.join(settings.DATA_DIR, "restore")
if os.path.exists(restore_dir) and os.path.exists(os.path.join(restore_dir, "cache")):
- def restore_cache(srcDir, dstDir):
+ def restore_cache(source, destination):
def path_leaf(path):
head, tail = os.path.split(path)
return tail or os.path.basename(head)
try:
- if os.path.isdir(dstDir):
- # noinspection PyTypeChecker
- bakFilename = "{0}-{1}".format(path_leaf(dstDir), datetime.datetime.strftime(datetime.datetime.now(), "%Y%m%d_%H%M%S"))
- shutil.move(dstDir, os.path.join(os.path.dirname(dstDir), bakFilename))
+ if os.path.isdir(destination):
+ backup_name = "{0}-{1}".format(path_leaf(destination), datetime.datetime.strftime(datetime.datetime.now(), "%Y%m%d_%H%M%S"))
+ shutil.move(destination, os.path.join(os.path.dirname(destination), backup_name))
- shutil.move(srcDir, dstDir)
+ shutil.move(source, destination)
logger.info("Restore: restoring cache successful")
except Exception as er:
logger.exception(f"Restore: restoring cache failed: {er}")
@@ -701,9 +700,6 @@ def path_leaf(path):
settings.ITASA_USER = check_setting_str(settings.CFG, "Subtitles", "itasa_username", censor_log=True)
settings.ITASA_PASS = check_setting_str(settings.CFG, "Subtitles", "itasa_password", censor_log=True)
- settings.LEGENDASTV_USER = check_setting_str(settings.CFG, "Subtitles", "legendastv_username", censor_log=True)
- settings.LEGENDASTV_PASS = check_setting_str(settings.CFG, "Subtitles", "legendastv_password", censor_log=True)
-
settings.OPENSUBTITLES_USER = check_setting_str(settings.CFG, "Subtitles", "opensubtitles_username", censor_log=True)
settings.OPENSUBTITLES_PASS = check_setting_str(settings.CFG, "Subtitles", "opensubtitles_password", censor_log=True)
@@ -784,59 +780,59 @@ def path_leaf(path):
curProvider.enabled = (curProvider.can_daily or curProvider.can_backlog) and check_setting_bool(
settings.CFG, curProvider.get_id().upper(), curProvider.get_id()
)
- if curProvider.has_option("custom_url"):
+ if hasattr(curProvider, "custom_url"):
curProvider.custom_url = check_setting_str(settings.CFG, curProvider.get_id().upper(), curProvider.get_id("_custom_url"), "", censor_log=True)
- if curProvider.has_option("api_key"):
+ if hasattr(curProvider, "api_key"):
curProvider.api_key = check_setting_str(settings.CFG, curProvider.get_id().upper(), curProvider.get_id("_api_key"), censor_log=True)
- if curProvider.has_option("hash"):
+ if hasattr(curProvider, "hash"):
curProvider.hash = check_setting_str(settings.CFG, curProvider.get_id().upper(), curProvider.get_id("_hash"), censor_log=True)
- if curProvider.has_option("digest"):
+ if hasattr(curProvider, "digest"):
curProvider.digest = check_setting_str(settings.CFG, curProvider.get_id().upper(), curProvider.get_id("_digest"), censor_log=True)
- if curProvider.has_option("username"):
+ if hasattr(curProvider, "username"):
curProvider.username = check_setting_str(settings.CFG, curProvider.get_id().upper(), curProvider.get_id("_username"), censor_log=True)
- if curProvider.has_option("password"):
+ if hasattr(curProvider, "password"):
curProvider.password = check_setting_str(settings.CFG, curProvider.get_id().upper(), curProvider.get_id("_password"), censor_log=True)
- if curProvider.has_option("passkey"):
+ if hasattr(curProvider, "passkey"):
curProvider.passkey = check_setting_str(settings.CFG, curProvider.get_id().upper(), curProvider.get_id("_passkey"), censor_log=True)
- if curProvider.has_option("pin"):
+ if hasattr(curProvider, "pin"):
curProvider.pin = check_setting_str(settings.CFG, curProvider.get_id().upper(), curProvider.get_id("_pin"), censor_log=True)
- if curProvider.has_option("confirmed"):
+ if hasattr(curProvider, "confirmed"):
curProvider.confirmed = check_setting_bool(settings.CFG, curProvider.get_id().upper(), curProvider.get_id("_confirmed"), True)
- if curProvider.has_option("ranked"):
+ if hasattr(curProvider, "ranked"):
curProvider.ranked = check_setting_bool(settings.CFG, curProvider.get_id().upper(), curProvider.get_id("_ranked"), True)
- if curProvider.has_option("engrelease"):
+ if hasattr(curProvider, "engrelease"):
curProvider.engrelease = check_setting_bool(settings.CFG, curProvider.get_id().upper(), curProvider.get_id("_engrelease"))
- if curProvider.has_option("only_spanish_search"):
+ if hasattr(curProvider, "only_spanish_search"):
curProvider.only_spanish_search = check_setting_bool(settings.CFG, curProvider.get_id().upper(), curProvider.get_id("_only_spanish_search"))
- if curProvider.has_option("sorting"):
+ if hasattr(curProvider, "sorting"):
curProvider.sorting = check_setting_str(settings.CFG, curProvider.get_id().upper(), curProvider.get_id("_sorting"), "seeders")
- if curProvider.has_option("options"):
+ if hasattr(curProvider, "options"):
curProvider.options = check_setting_str(settings.CFG, curProvider.get_id().upper(), curProvider.get_id("_options"), "")
- if curProvider.has_option("ratio"):
+ if hasattr(curProvider, "ratio"):
curProvider.ratio = check_setting_str(settings.CFG, curProvider.get_id().upper(), curProvider.get_id("_ratio"), "")
- if curProvider.has_option("minseed"):
+ if hasattr(curProvider, "minseed"):
curProvider.minseed = check_setting_int(settings.CFG, curProvider.get_id().upper(), curProvider.get_id("_minseed"), 1, min_val=0)
- if curProvider.has_option("minleech"):
- curProvider.minleech = check_setting_int(settings.CFG, curProvider.get_id().upper(), curProvider.get_id("_minleech"), 0, min_val=0)
- if curProvider.has_option("freeleech"):
+ if hasattr(curProvider, "minleech"):
+ curProvider.minleech = check_setting_int(settings.CFG, curProvider.get_id().upper(), curProvider.get_id("_minleech"), min_val=0)
+ if hasattr(curProvider, "freeleech"):
curProvider.freeleech = check_setting_bool(settings.CFG, curProvider.get_id().upper(), curProvider.get_id("_freeleech"))
- if curProvider.has_option("search_mode"):
+ if hasattr(curProvider, "search_mode"):
curProvider.search_mode = check_setting_str(settings.CFG, curProvider.get_id().upper(), curProvider.get_id("_search_mode"), "episode")
- if curProvider.has_option("search_fallback"):
+ if hasattr(curProvider, "search_fallback"):
curProvider.search_fallback = check_setting_bool(settings.CFG, curProvider.get_id().upper(), curProvider.get_id("_search_fallback"))
- if curProvider.has_option("enable_daily"):
+ if hasattr(curProvider, "enable_daily"):
curProvider.enable_daily = curProvider.can_daily and check_setting_bool(
settings.CFG, curProvider.get_id().upper(), curProvider.get_id("_enable_daily"), True
)
- if curProvider.has_option("enable_backlog"):
+ if hasattr(curProvider, "enable_backlog"):
curProvider.enable_backlog = curProvider.can_backlog and check_setting_bool(
settings.CFG, curProvider.get_id().upper(), curProvider.get_id("_enable_backlog"), curProvider.can_backlog
)
- if curProvider.has_option("cat"):
- curProvider.cat = check_setting_int(settings.CFG, curProvider.get_id().upper(), curProvider.get_id("_cat"), 0)
- if curProvider.has_option("subtitle"):
+ if hasattr(curProvider, "cat"):
+ curProvider.cat = check_setting_int(settings.CFG, curProvider.get_id().upper(), curProvider.get_id("_cat"))
+ if hasattr(curProvider, "subtitle"):
curProvider.subtitle = check_setting_bool(settings.CFG, curProvider.get_id().upper(), curProvider.get_id("_subtitle"))
- if curProvider.has_option("cookies"):
+ if hasattr(curProvider, "cookies"):
curProvider.cookies = check_setting_str(settings.CFG, curProvider.get_id().upper(), curProvider.get_id("_cookies"), censor_log=True)
providers.check_enabled_providers()
@@ -1081,11 +1077,11 @@ def sig_handler(signum=None, frame=None):
Shutdown.stop(settings.PID)
-def saveAll():
+def save_all():
# write all shows
logger.info("Saving all shows to the database")
for show in settings.showList:
- show.saveToDB()
+ show.save_to_db()
# save config
logger.info("Saving config file to disk")
@@ -1095,60 +1091,60 @@ def saveAll():
def save_config():
new_config = ConfigObj(settings.CONFIG_FILE, encoding="UTF-8", indent_type=" ")
- # For passwords you must include the word `password` in the item_name and add `helpers.encrypt(settings.ITEM_NAME, settings.ENCRYPTION_VERSION)` in save_config()
+ # For passwords, you must include the word `password` in the item_name and add `helpers.encrypt(settings.ITEM_NAME, settings.ENCRYPTION_VERSION)` in save_config()
# dynamically save provider settings
for curProvider in providers.sorted_provider_list():
new_config[curProvider.get_id().upper()] = {}
new_config[curProvider.get_id().upper()][curProvider.get_id()] = int(curProvider.enabled)
- if curProvider.has_option("custom_url"):
+ if hasattr(curProvider, "custom_url"):
new_config[curProvider.get_id().upper()][curProvider.get_id("_custom_url")] = curProvider.custom_url
- if curProvider.has_option("digest"):
+ if hasattr(curProvider, "digest"):
new_config[curProvider.get_id().upper()][curProvider.get_id("_digest")] = curProvider.digest
- if curProvider.has_option("hash"):
+ if hasattr(curProvider, "hash"):
new_config[curProvider.get_id().upper()][curProvider.get_id("_hash")] = curProvider.hash
- if curProvider.has_option("api_key"):
+ if hasattr(curProvider, "api_key"):
new_config[curProvider.get_id().upper()][curProvider.get_id("_api_key")] = curProvider.api_key
- if curProvider.has_option("username"):
+ if hasattr(curProvider, "username"):
new_config[curProvider.get_id().upper()][curProvider.get_id("_username")] = curProvider.username
- if curProvider.has_option("password"):
+ if hasattr(curProvider, "password"):
new_config[curProvider.get_id().upper()][curProvider.get_id("_password")] = helpers.encrypt(curProvider.password, settings.ENCRYPTION_VERSION)
- if curProvider.has_option("passkey"):
+ if hasattr(curProvider, "passkey"):
new_config[curProvider.get_id().upper()][curProvider.get_id("_passkey")] = curProvider.passkey
- if curProvider.has_option("pin"):
+ if hasattr(curProvider, "pin"):
new_config[curProvider.get_id().upper()][curProvider.get_id("_pin")] = curProvider.pin
- if curProvider.has_option("confirmed"):
+ if hasattr(curProvider, "confirmed"):
new_config[curProvider.get_id().upper()][curProvider.get_id("_confirmed")] = int(curProvider.confirmed)
- if curProvider.has_option("ranked"):
+ if hasattr(curProvider, "ranked"):
new_config[curProvider.get_id().upper()][curProvider.get_id("_ranked")] = int(curProvider.ranked)
- if curProvider.has_option("engrelease"):
+ if hasattr(curProvider, "engrelease"):
new_config[curProvider.get_id().upper()][curProvider.get_id("_engrelease")] = int(curProvider.engrelease)
- if curProvider.has_option("only_spanish_search"):
+ if hasattr(curProvider, "only_spanish_search"):
new_config[curProvider.get_id().upper()][curProvider.get_id("_only_spanish_search")] = int(curProvider.only_spanish_search)
- if curProvider.has_option("sorting"):
+ if hasattr(curProvider, "sorting"):
new_config[curProvider.get_id().upper()][curProvider.get_id("_sorting")] = curProvider.sorting
- if curProvider.has_option("ratio"):
+ if hasattr(curProvider, "ratio"):
new_config[curProvider.get_id().upper()][curProvider.get_id("_ratio")] = curProvider.ratio
- if curProvider.has_option("minseed"):
+ if hasattr(curProvider, "minseed"):
new_config[curProvider.get_id().upper()][curProvider.get_id("_minseed")] = int(curProvider.minseed)
- if curProvider.has_option("minleech"):
+ if hasattr(curProvider, "minleech"):
new_config[curProvider.get_id().upper()][curProvider.get_id("_minleech")] = int(curProvider.minleech)
- if curProvider.has_option("options"):
+ if hasattr(curProvider, "options"):
new_config[curProvider.get_id().upper()][curProvider.get_id("_options")] = curProvider.options
- if curProvider.has_option("freeleech"):
+ if hasattr(curProvider, "freeleech"):
new_config[curProvider.get_id().upper()][curProvider.get_id("_freeleech")] = int(curProvider.freeleech)
- if curProvider.has_option("search_mode"):
+ if hasattr(curProvider, "search_mode"):
new_config[curProvider.get_id().upper()][curProvider.get_id("_search_mode")] = curProvider.search_mode
- if curProvider.has_option("search_fallback"):
+ if hasattr(curProvider, "search_fallback"):
new_config[curProvider.get_id().upper()][curProvider.get_id("_search_fallback")] = int(curProvider.search_fallback)
- if curProvider.has_option("enable_daily"):
+ if hasattr(curProvider, "enable_daily"):
new_config[curProvider.get_id().upper()][curProvider.get_id("_enable_daily")] = int(curProvider.enable_daily and curProvider.can_daily)
- if curProvider.has_option("enable_backlog"):
+ if hasattr(curProvider, "enable_backlog"):
new_config[curProvider.get_id().upper()][curProvider.get_id("_enable_backlog")] = int(curProvider.enable_backlog and curProvider.can_backlog)
- if curProvider.has_option("cat"):
+ if hasattr(curProvider, "cat"):
new_config[curProvider.get_id().upper()][curProvider.get_id("_cat")] = int(curProvider.cat)
- if curProvider.has_option("subtitle"):
+ if hasattr(curProvider, "subtitle"):
new_config[curProvider.get_id().upper()][curProvider.get_id("_subtitle")] = int(curProvider.subtitle)
- if curProvider.has_option("cookies"):
+ if hasattr(curProvider, "cookies"):
new_config[curProvider.get_id().upper()][curProvider.get_id("_cookies")] = curProvider.cookies
new_config.update(
@@ -1662,8 +1658,6 @@ def save_config():
"addic7ed_password": helpers.encrypt(settings.ADDIC7ED_PASS, settings.ENCRYPTION_VERSION),
"itasa_username": settings.ITASA_USER,
"itasa_password": helpers.encrypt(settings.ITASA_PASS, settings.ENCRYPTION_VERSION),
- "legendastv_username": settings.LEGENDASTV_USER,
- "legendastv_password": helpers.encrypt(settings.LEGENDASTV_PASS, settings.ENCRYPTION_VERSION),
"opensubtitles_username": settings.OPENSUBTITLES_USER,
"opensubtitles_password": helpers.encrypt(settings.OPENSUBTITLES_PASS, settings.ENCRYPTION_VERSION),
"subscenter_username": settings.SUBSCENTER_USER,
@@ -1701,9 +1695,9 @@ def launchBrowser(protocol="http", startPort=None, web_root="/"):
browserURL = f"{protocol}://localhost:{startPort:d}{web_root}/home/"
try:
- webbrowser.open(browserURL, 2, True)
+ webbrowser.open(browserURL, 2)
except Exception:
try:
- webbrowser.open(browserURL, 1, True)
+ webbrowser.open(browserURL, 1)
except Exception:
logger.exception("Unable to launch a browser")
diff --git a/sickchill/tv.py b/sickchill/tv.py
index 8d0cc766c1..cde36974bc 100644
--- a/sickchill/tv.py
+++ b/sickchill/tv.py
@@ -125,18 +125,18 @@ def __init__(self, indexer, indexerid: int, lang=""):
self.lock = threading.Lock()
self.episodes = {}
- self.nextaired = ""
+ self.next_airdate = ""
self.release_groups = None
self._location = ""
self.indexer = indexer
self.indexerid = indexerid
self.lang = lang
- otherShow = Show.find(settings.showList, self.indexerid)
- if otherShow is not None:
+ other_show = Show.find(settings.showList, self.indexerid)
+ if other_show is not None:
raise MultipleShowObjectsException("Can't create a show if it already exists")
- self.loadFromDB()
+ self.load_from_db()
@property
def name(self):
@@ -181,32 +181,36 @@ def network_image_url(self):
def show_image_url(self, which):
return settings.IMAGE_CACHE.image_url(self.indexerid, which)
- def _getLocation(self):
+ @property
+ def location(self):
# no dir check needed if missing show dirs are created during post-processing
if settings.CREATE_MISSING_SHOW_DIRS or os.path.isdir(self._location):
return self._location
raise ShowDirectoryNotFoundException("Show folder doesn't exist, you shouldn't be using it")
- def _setLocation(self, newLocation):
- logger.debug(f"Setter sets location to {newLocation}")
+ @location.setter
+ def location(self, new_location):
+ logger.debug(f"Setter sets location to {new_location}")
# Don't validate dir if user wants to add shows without creating a dir
- if settings.ADD_SHOWS_WO_DIR or os.path.isdir(newLocation):
- if self._location != newLocation:
+ if settings.ADD_SHOWS_WO_DIR or os.path.isdir(new_location):
+ if self._location != new_location:
self.dirty = True
- self._location = newLocation
+ self._location = new_location
else:
raise NoNFOException("Invalid folder for the show!")
- location = property(_getLocation, _setLocation)
+ @property
+ def get_location(self):
+ return self._location
- def flushEpisodes(self):
- for curSeason in self.episodes:
- self.episodes[curSeason].clear()
+ def flush_episodes(self):
+ for current_season in self.episodes:
+ self.episodes[current_season].clear()
self.episodes.clear()
- def getAllEpisodes(self, season=None, has_location=False):
+ def get_all_episodes(self, season=None, has_location=False):
# detect multi-episodes
sql_selection = "SELECT season, episode, "
sql_selection += "(SELECT COUNT (*) FROM tv_episodes WHERE showid = tve.showid "
@@ -228,7 +232,7 @@ def getAllEpisodes(self, season=None, has_location=False):
ep_list = []
for cur_result in results:
- cur_ep = self.getEpisode(cur_result["season"], cur_result["episode"])
+ cur_ep = self.get_episode(cur_result["season"], cur_result["episode"])
if not cur_ep:
continue
@@ -241,14 +245,14 @@ def getAllEpisodes(self, season=None, has_location=False):
[self.indexerid, cur_ep.season, cur_ep.location, cur_ep.episode],
)
for cur_related_ep in related_eps_result:
- related_ep = self.getEpisode(cur_related_ep["season"], cur_related_ep["episode"])
+ related_ep = self.get_episode(cur_related_ep["season"], cur_related_ep["episode"])
if related_ep and related_ep not in cur_ep.related_episodes:
cur_ep.related_episodes.append(related_ep)
ep_list.append(cur_ep)
return ep_list
- def getEpisode(self, season=None, episode=None, ep_file=None, noCreate=False, absolute_number=None) -> Union["TVEpisode", None]:
+ def get_episode(self, season=None, episode=None, ep_file=None, nfo_create=False, absolute_number=None) -> Union["TVEpisode", None]:
season = try_int(season, None)
episode = try_int(episode, None)
absolute_number = try_int(absolute_number, None)
@@ -278,7 +282,7 @@ def getEpisode(self, season=None, episode=None, ep_file=None, noCreate=False, ab
self.episodes[season] = {}
if not self.episodes[season].get(episode):
- if noCreate:
+ if nfo_create:
return None
if ep_file:
@@ -334,7 +338,7 @@ def should_update(self, update_date=datetime.date.today()):
return False
- def writeShowNFO(self):
+ def write_show_nfo(self):
result = False
if not os.path.isdir(self._location):
@@ -347,19 +351,19 @@ def writeShowNFO(self):
return result
- def writeMetadata(self, show_only=False):
+ def write_metadata(self, show_only=False):
if not os.path.isdir(self._location):
logger.info(f"{self.indexerid}: Show dir doesn't exist, skipping NFO generation")
return
- self.getImages()
+ self.get_images()
- self.writeShowNFO()
+ self.write_show_nfo()
if not show_only:
- self.writeEpisodeNFOs()
+ self.write_episode_nfos()
- def writeEpisodeNFOs(self):
+ def write_episode_nfos(self):
if not os.path.isdir(self._location):
logger.info(f"{self.indexerid}: Show dir doesn't exist, skipping NFO generation")
return
@@ -371,13 +375,13 @@ def writeEpisodeNFOs(self):
for epResult in sql_results:
logger.debug("{id}: Retrieving/creating episode {ep}".format(id=self.indexerid, ep=episode_num(epResult["season"], epResult["episode"])))
- curEp = self.getEpisode(epResult["season"], epResult["episode"])
- if not curEp:
+ current_episode = self.get_episode(epResult["season"], epResult["episode"])
+ if not current_episode:
continue
- curEp.createMetaFiles()
+ current_episode.create_meta_files()
- def updateMetadata(self):
+ def update_metadata(self):
if not os.path.isdir(self._location):
logger.info(f"{self.indexerid}: Show dir doesn't exist, skipping NFO generation")
return
@@ -394,7 +398,7 @@ def updateMetadata(self):
return result
- def loadEpisodesFromDir(self):
+ def load_episodes_from_dir(self):
"""Find all media files in the show folder and create episodes"""
if not os.path.isdir(self._location):
@@ -409,9 +413,9 @@ def loadEpisodesFromDir(self):
sql_l = []
for media_file in media_files:
logger.debug("{tvdbid}: Creating episode from {filename}".format(tvdbid=str(self.indexerid), filename=os.path.basename(media_file)))
- curEpisode = None
+ current_episode = None
try:
- curEpisode = self.makeEpFromFile(media_file)
+ current_episode = self.make_ep_from_file(media_file)
except (ShowNotFoundException, EpisodeNotFoundException) as error:
media_file_name = os.path.basename(media_file)
logger.error(f"Episode {media_file_name} returned an exception: {error}")
@@ -419,41 +423,41 @@ def loadEpisodesFromDir(self):
except EpisodeDeletedException:
logger.debug("The episode deleted itself when I tried making an object for it")
- if curEpisode is None:
+ if current_episode is None:
continue
# see if we should save the release name in the db
- ep_filename = os.path.basename(curEpisode.location)
+ ep_filename = os.path.basename(current_episode.location)
ep_filename = os.path.splitext(ep_filename)[0]
try:
- parse_result = NameParser(False, showObj=self, tryIndexers=True).parse(ep_filename)
+ parse_result = NameParser(False, show_object=self, try_indexers=True).parse(ep_filename)
except (InvalidNameException, InvalidShowException):
parse_result = None
if " " not in ep_filename and parse_result and parse_result.release_group:
logger.debug(f"Name {ep_filename} gave release group of {parse_result.release_group}, seems valid")
- curEpisode.release_name = ep_filename
- curEpisode.release_group = parse_result.release_group
+ current_episode.release_name = ep_filename
+ current_episode.release_group = parse_result.release_group
# store the reference in the show
- if curEpisode is not None:
+ if current_episode is not None:
if self.subtitles:
try:
- curEpisode.refreshSubtitles()
+ current_episode.refresh_subtitles()
except Exception:
logger.error(f"{self.indexerid}: Could not refresh subtitles")
logger.debug(traceback.format_exc())
- sql_l.append(curEpisode.get_sql())
+ sql_l.append(current_episode.get_sql())
if sql_l:
main_db_con = db.DBConnection()
main_db_con.mass_action(sql_l)
- def loadEpisodesFromDB(self):
+ def load_episodes_from_db(self):
logger.debug("Loading all episodes from the database")
- scannedEps = {}
+ scanned_episodes = {}
try:
main_db_con = db.DBConnection()
@@ -461,56 +465,52 @@ def loadEpisodesFromDB(self):
sql_results = main_db_con.select(sql, [self.indexerid])
except OperationalError as error:
logger.error(f"Could not load episodes from the DB. Error: {error}")
- return scannedEps
+ return scanned_episodes
- curShowid = None
- curShowName = None
+ showid = None
+ show_name = None
- for curResult in sql_results:
- curSeason = int(curResult["season"])
- curEpisode = int(curResult["episode"])
- curShowid = int(curResult["showid"])
- curShowName = str(curResult["show_name"])
+ for result in sql_results:
+ season = int(result["season"])
+ episode = int(result["episode"])
+ showid = int(result["showid"])
+ show_name = str(result["show_name"])
- if curSeason not in scannedEps:
- logger.debug("{id}: Not curSeason in scannedEps".format(id=curShowid))
- scannedEps[curSeason] = {}
+ if season not in scanned_episodes:
+ logger.debug(f"{showid}: season not in scanned_episodes")
+ scanned_episodes[season] = {}
- logger.debug("{id}: Loading {show} {ep} from the DB".format(id=curShowid, show=curShowName, ep=episode_num(curSeason, curEpisode)))
+ logger.debug(f"{showid}: Loading {show_name} {episode_num(season, episode)} from the DB")
try:
- curEp = self.getEpisode(curSeason, curEpisode)
- if not curEp:
+ episode_object = self.get_episode(season, episode)
+ if not episode_object:
raise EpisodeNotFoundException
- curEp.loadFromDB(curSeason, curEpisode)
- scannedEps[curSeason][curEpisode] = True
+ episode_object.load_from_db(season, episode)
+ scanned_episodes[season][episode] = True
except EpisodeDeletedException:
- logger.debug(
- "{id}: Tried loading {show} {ep} from the DB that should have been deleted, skipping it".format(
- id=curShowid, show=curShowName, ep=episode_num(curSeason, curEpisode)
- )
- )
+ logger.debug(f"{showid}: Tried loading {show_name} {episode_num(season, episode)} from the DB that should have been deleted, skipping it")
continue
- if curShowName and curShowid:
- logger.debug("{id}: Finished loading all episodes for {show} from the DB".format(show=curShowName, id=curShowid))
+ if show_name and showid:
+ logger.debug(f"{showid}: Finished loading all episodes for {show_name} from the DB")
- return scannedEps
+ return scanned_episodes
- def loadEpisodesFromIndexer(self, force_all: bool = False):
+ def load_episodes_from_indexer(self, force_all: bool = False):
logger.debug(_("{show_id}: Loading all episodes from {indexer_name}...").format(show_id=self.indexerid, indexer_name=self.indexer_name))
- scannedEps = {}
+ scanned_episodes = {}
- for series_episode in self.idxr.episodes(self):
- if series_episode["airedSeason"] not in scannedEps:
- scannedEps[series_episode["airedSeason"]] = {}
+ for indexer_episode in self.idxr.episodes(self):
+ if indexer_episode["airedSeason"] not in scanned_episodes:
+ scanned_episodes[indexer_episode["airedSeason"]] = {}
- season_episode = series_episode["airedSeason"], series_episode["airedEpisodeNumber"]
+ season_episode = indexer_episode["airedSeason"], indexer_episode["airedEpisodeNumber"]
try:
- show_episode = self.getEpisode(*season_episode)
- if not show_episode:
+ episode = self.get_episode(*season_episode)
+ if not episode:
raise EpisodeNotFoundException
except EpisodeNotFoundException:
logger.info(
@@ -521,24 +521,24 @@ def loadEpisodesFromIndexer(self, force_all: bool = False):
continue
else:
try:
- with show_episode.lock:
- show_episode.load_from_indexer(series_episode["airedSeason"], series_episode["airedEpisodeNumber"], force_all=force_all)
- show_episode.saveToDB()
- # sql_l.append(show_episode.get_sql())
+ with episode.lock:
+ episode.load_from_indexer(indexer_episode["airedSeason"], indexer_episode["airedEpisodeNumber"], force_all=force_all)
+ episode.save_to_db()
+ # sql_l.append(episode.get_sql())
except EpisodeDeletedException:
logger.info("The episode was deleted, skipping the rest of the load")
continue
- scannedEps[series_episode["airedSeason"]][series_episode["airedEpisodeNumber"]] = True
+ scanned_episodes[indexer_episode["airedSeason"]][indexer_episode["airedEpisodeNumber"]] = True
# Done updating save last update date
self.last_update_indexer = datetime.datetime.now().toordinal()
- self.saveToDB()
+ self.save_to_db()
- return scannedEps
+ return scanned_episodes
- def getImages(self):
+ def get_images(self):
fanart_result = poster_result = banner_result = False
season_posters_result = season_banners_result = season_all_poster_result = season_all_banner_result = False
@@ -562,7 +562,7 @@ def getImages(self):
or season_all_banner_result
)
- def makeEpFromFile(self, filepath):
+ def make_ep_from_file(self, filepath):
"""make a TVEpisode object from a media file"""
if not os.path.isfile(filepath):
@@ -572,7 +572,9 @@ def makeEpFromFile(self, filepath):
logger.debug(f"{self.indexerid}: Creating episode object from {filepath}")
try:
- parse_result = NameParser(showObj=self, tryIndexers=True, parse_method=("normal", "anime")[self.is_anime]).parse(filepath, True, True)
+ parse_result = NameParser(show_object=self, try_indexers=True, parse_method=("normal", "anime")[self.is_anime]).parse(
+ filepath, skip_scene_detection=True
+ )
except (InvalidNameException, InvalidShowException) as error:
logger.debug(f"{self.indexerid}: {error}")
return None
@@ -585,20 +587,20 @@ def makeEpFromFile(self, filepath):
# for now let's assume that any episode in the show dir belongs to that show
season = parse_result.season_number if parse_result.season_number is not None else 1
- rootEp = None
+ root_episode = None
sql_l = []
- for current_ep in episodes:
- logger.debug(f"{self.indexerid}: {filepath} parsed to {self.name} {episode_num(season, current_ep)}")
+ for current_episode in episodes:
+ logger.debug(f"{self.indexerid}: {filepath} parsed to {self.name} {episode_num(season, current_episode)}")
- checkQualityAgain = False
+ check_quality_again = False
same_file = False
- curEp = self.getEpisode(season, current_ep)
- if not curEp:
+ episode = self.get_episode(season, current_episode)
+ if not episode:
try:
- curEp = self.getEpisode(season, current_ep, filepath)
- if not curEp:
+ episode = self.get_episode(season, current_episode, filepath)
+ if not episode:
raise EpisodeNotFoundException
except EpisodeNotFoundException:
logger.error(f"{self.indexerid}: Unable to figure out what this file is, skipping {filepath}")
@@ -606,90 +608,92 @@ def makeEpFromFile(self, filepath):
else:
# if there is a new file associated with this ep then re-check the quality
- if curEp.location and os.path.normpath(curEp.location) != os.path.normpath(filepath):
+ if episode.location and os.path.normpath(episode.location) != os.path.normpath(filepath):
logger.debug(
f"{self.indexerid}: The old episode had a different file associated with it, "
f"re-checking the quality using the new filename {filepath}"
)
- checkQualityAgain = True
+ check_quality_again = True
- with curEp.lock:
- old_size = curEp.file_size
- curEp.location = filepath
+ with episode.lock:
+ old_size = episode.file_size
+ episode.location = filepath
# if the sizes are the same then it's probably the same file
- same_file = old_size and curEp.file_size == old_size
- curEp.checkForMetaFiles()
+ same_file = old_size and episode.file_size == old_size
+ episode.check_for_meta_files()
- if rootEp is None:
- rootEp = curEp
+ if root_episode is None:
+ root_episode = episode
else:
- if curEp not in rootEp.related_episodes:
- with rootEp.lock:
- rootEp.related_episodes.append(curEp)
+ if episode not in root_episode.related_episodes:
+ with root_episode.lock:
+ root_episode.related_episodes.append(episode)
# if it's a new file then
if not same_file:
- with curEp.lock:
- curEp.release_name = ""
- curEp.release_group = ""
+ with episode.lock:
+ episode.release_name = ""
+ episode.release_group = ""
- # if they replace a file on me I'll make some attempt at re-checking the quality unless I know it's the same file
- if checkQualityAgain and not same_file:
- newQuality = Quality.nameQuality(filepath, self.is_anime)
- logger.debug(f"{self.indexerid}: Since this file has been renamed, I checked {filepath} and found quality {Quality.qualityStrings[newQuality]}")
+ # if they replace a file on me, I'll make some attempt at re-checking the quality unless I know it's the same file
+ if check_quality_again and not same_file:
+ new_quality = Quality.nameQuality(filepath, self.is_anime)
+ logger.debug(
+ f"{self.indexerid}: Since this file has been renamed, I checked {filepath} and found quality {Quality.qualityStrings[new_quality]}"
+ )
- with curEp.lock:
- curEp.status = Quality.compositeStatus(DOWNLOADED, newQuality)
+ with episode.lock:
+ episode.status = Quality.compositeStatus(DOWNLOADED, new_quality)
# check for status/quality changes as long as it's a new file
- elif not same_file and is_media_file(filepath) and curEp.status not in Quality.DOWNLOADED + Quality.ARCHIVED + [IGNORED]:
- oldStatus, oldQuality = Quality.splitCompositeStatus(curEp.status)
- newQuality = Quality.nameQuality(filepath, self.is_anime)
+ elif not same_file and is_media_file(filepath) and episode.status not in Quality.DOWNLOADED + Quality.ARCHIVED + [IGNORED]:
+ old_status, old_quality = Quality.splitCompositeStatus(episode.status)
+ new_quality = Quality.nameQuality(filepath, self.is_anime)
- newStatus = None
+ new_status = None
# if it was snatched and now exists then set the status correctly
- if oldStatus == SNATCHED and oldQuality <= newQuality:
+ if old_status == SNATCHED and old_quality <= new_quality:
logger.debug(
- f"{self.indexerid}: This ep used to be snatched with quality {Quality.qualityStrings[oldQuality]} but a "
- f"file exists with quality {Quality.qualityStrings[newQuality]} so I'm setting the status to DOWNLOADED"
+ f"{self.indexerid}: This ep used to be snatched with quality {Quality.qualityStrings[old_quality]} but a "
+ f"file exists with quality {Quality.qualityStrings[new_quality]} so I'm setting the status to DOWNLOADED"
)
- newStatus = DOWNLOADED
+ new_status = DOWNLOADED
- # if it was snatched proper and we found a higher quality one then allow the status change
- elif oldStatus == SNATCHED_PROPER and oldQuality < newQuality:
+ # if it was snatched proper, and we found a higher quality one then allow the status change
+ elif old_status == SNATCHED_PROPER and old_quality < new_quality:
logger.debug(
- f"{self.indexerid}: This ep used to be snatched proper with quality {Quality.qualityStrings[oldQuality]} "
- f"but a file exists with quality {Quality.qualityStrings[newQuality]} so I'm setting the status to DOWNLOADED"
+ f"{self.indexerid}: This ep used to be snatched proper with quality {Quality.qualityStrings[old_quality]} "
+ f"but a file exists with quality {Quality.qualityStrings[new_quality]} so I'm setting the status to DOWNLOADED"
)
- newStatus = DOWNLOADED
+ new_status = DOWNLOADED
- elif oldStatus not in (SNATCHED, SNATCHED_PROPER):
- newStatus = DOWNLOADED
+ elif old_status not in (SNATCHED, SNATCHED_PROPER):
+ new_status = DOWNLOADED
- if newStatus is not None:
- with curEp.lock:
+ if new_status is not None:
+ with episode.lock:
logger.debug(
- f"{self.indexerid}: We have an associated file, so setting the status from {curEp.status} "
+ f"{self.indexerid}: We have an associated file, so setting the status from {episode.status} "
f"to DOWNLOADED/{Quality.statusFromName(filepath, anime=self.is_anime)}"
)
- curEp.status = Quality.compositeStatus(newStatus, newQuality)
+ episode.status = Quality.compositeStatus(new_status, new_quality)
- with curEp.lock:
- sql_l.append(curEp.get_sql())
+ with episode.lock:
+ sql_l.append(episode.get_sql())
if sql_l:
main_db_con = db.DBConnection()
main_db_con.mass_action(sql_l)
# creating metafiles on the root should be good enough
- if rootEp:
- with rootEp.lock:
- rootEp.createMetaFiles()
+ if root_episode:
+ with root_episode.lock:
+ root_episode.create_meta_files()
- return rootEp
+ return root_episode
- def loadFromDB(self):
+ def load_from_db(self):
"""Get Indexer information from database"""
main_db_con = db.DBConnection(row_type="dict")
@@ -901,20 +905,20 @@ def load_imdb_info(self):
except (SyntaxError, KeyError):
logger.info("Could not get info from IDMb, pip install lxml")
- def nextEpisode(self):
- curDate = datetime.date.today().toordinal()
- if not self.nextaired or self.nextaired and curDate > self.nextaired:
+ def next_episode(self):
+ current_date = datetime.date.today().toordinal()
+ if not self.next_airdate or self.next_airdate and current_date > try_int(self.next_airdate):
main_db_con = db.DBConnection()
sql_results = main_db_con.select(
"SELECT airdate, season, episode FROM tv_episodes WHERE showid = ? AND airdate >= ? AND status IN (?,?) ORDER BY airdate LIMIT 1",
[self.indexerid, datetime.date.today().toordinal(), UNAIRED, WANTED],
)
- self.nextaired = sql_results[0]["airdate"] if sql_results else ""
+ self.next_airdate = sql_results[0]["airdate"] if sql_results else ""
- return self.nextaired
+ return self.next_airdate
- def deleteShow(self, full=False):
+ def delete_show(self, full=False):
main_db_con = db.DBConnection()
episodes_locations = main_db_con.select("SELECT location FROM tv_episodes WHERE showid = ? AND location != ''", [self.indexerid])
@@ -1021,11 +1025,11 @@ def deleteShow(self, full=False):
logger.debug(f"Removing show: indexerid {self.indexerid}, Title {self.name} from Watchlist")
notifiers.trakt_notifier.update_watchlist(self, update="remove")
- def populateCache(self):
+ def populate_cache(self):
logger.debug(f"Checking & filling cache for show {self.name}")
settings.IMAGE_CACHE.fill_cache(self)
- def refreshDir(self):
+ def refresh_dir(self):
if not os.path.isdir(self._location) and not settings.CREATE_MISSING_SHOW_DIRS:
logger.info(
"Show dir does not exist, and `create missing show dirs` is disabled. Skipping refresh (statuses will not be updated): {}".format(
@@ -1035,7 +1039,7 @@ def refreshDir(self):
return False
# load from dir
- self.loadEpisodesFromDir()
+ self.load_episodes_from_dir()
# run through all locations from DB, check that they exist
logger.debug(f"{self.indexerid}: Loading all episodes with a location from the database")
@@ -1044,29 +1048,29 @@ def refreshDir(self):
sql_results = main_db_con.select("SELECT season, episode, location FROM tv_episodes WHERE showid = ? AND location != ''", [self.indexerid])
sql_l = []
- for ep in sql_results:
- curLoc = os.path.normpath(ep["location"])
- season = int(ep["season"])
- episode = int(ep["episode"])
+ for result in sql_results:
+ current_location = os.path.normpath(result["location"])
+ season = int(result["season"])
+ episode = int(result["episode"])
try:
- curEp = self.getEpisode(season, episode)
- if not curEp:
+ episode_object = self.get_episode(season, episode)
+ if not episode_object:
raise EpisodeDeletedException
except EpisodeDeletedException:
logger.debug("The episode was deleted while we were refreshing it, moving on to the next one")
continue
# if the path doesn't exist or if it's not in our show dir
- if not os.path.isfile(curLoc) or not os.path.normpath(curLoc).startswith(os.path.normpath(self._location)):
+ if not os.path.isfile(current_location) or not os.path.normpath(current_location).startswith(os.path.normpath(self._location)):
# check if downloaded files still exist, update our data if this has changed
if not settings.SKIP_REMOVED_FILES:
- with curEp.lock:
- # if it used to have a file associated with it and it doesn't anymore then set it to oldbeard.EP_DEFAULT_DELETED_STATUS
- if curEp.status in Quality.DOWNLOADED:
+ with episode_object.lock:
+ # if it used to have a file associated with it, and it doesn't anymore then set it to oldbeard.EP_DEFAULT_DELETED_STATUS
+ if episode_object.status in Quality.DOWNLOADED:
if settings.EP_DEFAULT_DELETED_STATUS == ARCHIVED:
- oldStatus_, oldQuality = Quality.splitCompositeStatus(curEp.status)
- new_status = Quality.compositeStatus(ARCHIVED, oldQuality)
+ old_status_, old_quality = Quality.splitCompositeStatus(episode_object.status)
+ new_status = Quality.compositeStatus(ARCHIVED, old_quality)
else:
new_status = settings.EP_DEFAULT_DELETED_STATUS
@@ -1075,19 +1079,19 @@ def refreshDir(self):
id=self.indexerid, ep=episode_num(season, episode), status=statusStrings[new_status]
)
)
- curEp.status = new_status
- curEp.subtitles = []
- curEp.subtitles_searchcount = 0
- curEp.subtitles_lastsearch = str(datetime.datetime.min)
- curEp.location = ""
- curEp.hasnfo = False
- curEp.hastbn = False
- curEp.release_name = ""
- curEp.release_group = ""
-
- sql_l.append(curEp.get_sql())
+ episode_object.status = new_status
+ episode_object.subtitles = []
+ episode_object.subtitles_searchcount = 0
+ episode_object.subtitles_lastsearch = str(datetime.datetime.min)
+ episode_object.location = ""
+ episode_object.has_nfo = False
+ episode_object.has_tbn = False
+ episode_object.release_name = ""
+ episode_object.release_group = ""
+
+ sql_l.append(episode_object.get_sql())
else:
- logger.info("Skipping updating removed file because `skip removed files` is enabled: {}".format(curLoc))
+ logger.info(f"Skipping updating removed file because `skip removed files` is enabled: {current_location}")
if sql_l:
main_db_con = db.DBConnection()
@@ -1101,7 +1105,7 @@ def download_subtitles(self):
logger.debug(f"{self.indexerid}: Downloading subtitles")
try:
- episodes = self.getAllEpisodes(has_location=True)
+ episodes = self.get_all_episodes(has_location=True)
if not episodes:
logger.debug(f"{self.indexerid}: No episodes to download subtitles for {self.name}")
return
@@ -1113,14 +1117,14 @@ def download_subtitles(self):
logger.debug(f"{self.indexerid}: Error occurred when downloading subtitles for {self.name}")
logger.error(traceback.format_exc())
- def saveToDB(self, forceSave=False):
- if not self.dirty and not forceSave:
+ def save_to_db(self):
+ if not self.dirty:
return
logger.debug("{0:d}: Saving to database: {1}".format(self.indexerid, self.name))
- controlValueDict = {"indexer_id": self.indexerid}
- newValueDict = {
+ control_value_dict = {"indexer_id": self.indexerid}
+ new_value_dict = {
"indexer": self.indexer,
"show_name": self.show_name,
"custom_name": self.custom_name,
@@ -1152,13 +1156,13 @@ def saveToDB(self, forceSave=False):
}
main_db_con = db.DBConnection()
- main_db_con.upsert("tv_shows", newValueDict, controlValueDict)
+ main_db_con.upsert("tv_shows", new_value_dict, control_value_dict)
helpers.update_anime_support()
if self.imdb_id and self.imdb_info:
main_db_con = db.DBConnection()
- main_db_con.upsert("imdb_info", self.imdb_info, controlValueDict)
+ main_db_con.upsert("imdb_info", self.imdb_info, control_value_dict)
def __str__(self):
info_list = [
@@ -1184,14 +1188,14 @@ def __str__(self):
return "\n".join(info_list)
@staticmethod
- def qualitiesToString(qualities=None):
+ def qualities_to_string(qualities=None):
return ", ".join([Quality.qualityStrings[quality] for quality in qualities or [] if quality and quality in Quality.qualityStrings]) or "None"
- def wantEpisode(self, season, episode, quality, manualSearch=False, downCurQuality=False):
+ def want_episode(self, season, episode, quality, manual_search=False, down_cur_quality=False):
allowed_qualities, preferred_qualities = Quality.splitQuality(self.quality)
logger.debug(
- f"Any,Best = [ {self.qualitiesToString(allowed_qualities)} ] [ {self.qualitiesToString(preferred_qualities)} ]"
- f" Found = [ {self.qualitiesToString([quality])} ]"
+ f"Any,Best = [ {self.qualities_to_string(allowed_qualities)} ] [ {self.qualities_to_string(preferred_qualities)} ]"
+ f" Found = [ {self.qualities_to_string([quality])} ]"
)
if quality not in allowed_qualities + preferred_qualities or quality is UNKNOWN:
@@ -1213,28 +1217,28 @@ def wantEpisode(self, season, episode, quality, manualSearch=False, downCurQuali
)
return False
- epStatus = int(sql_results[0]["status"])
- epStatus_text = statusStrings[epStatus]
+ ep_status = int(sql_results[0]["status"])
+ ep_status_text = statusStrings[ep_status]
- if epStatus in Quality.ARCHIVED + [UNAIRED, SKIPPED, IGNORED] and not manualSearch:
+ if ep_status in Quality.ARCHIVED + [UNAIRED, SKIPPED, IGNORED] and not manual_search:
logger.debug(
"Existing episode status is '{status}', ignoring found result for {name} {ep} with quality {quality}".format(
- status=epStatus_text, name=self.name, ep=episode_num(season, episode), quality=Quality.qualityStrings[quality]
+ status=ep_status_text, name=self.name, ep=episode_num(season, episode), quality=Quality.qualityStrings[quality]
)
)
return False
- curStatus_, curQuality = Quality.splitCompositeStatus(epStatus)
+ cur_status_, cur_quality = Quality.splitCompositeStatus(ep_status)
- if epStatus in (WANTED, SKIPPED, UNKNOWN, FAILED):
+ if ep_status in (WANTED, SKIPPED, UNKNOWN, FAILED):
logger.debug(
"Existing episode status is '{status}', getting found result for {name} {ep} with quality {quality}".format(
- status=epStatus_text, name=self.name, ep=episode_num(season, episode), quality=Quality.qualityStrings[quality]
+ status=ep_status_text, name=self.name, ep=episode_num(season, episode), quality=Quality.qualityStrings[quality]
)
)
return True
- elif manualSearch:
- if (downCurQuality and quality >= curQuality) or (not downCurQuality and quality != curQuality):
+ elif manual_search:
+ if (down_cur_quality and quality >= cur_quality) or (not down_cur_quality and quality != cur_quality):
logger.debug(
"Usually ignoring found result, but forced search allows the quality,"
" getting found result for {name} {ep} with quality {quality}".format(
@@ -1244,21 +1248,21 @@ def wantEpisode(self, season, episode, quality, manualSearch=False, downCurQuali
return True
if (
- epStatus in Quality.DOWNLOADED + Quality.SNATCHED + Quality.SNATCHED_PROPER
+ ep_status in Quality.DOWNLOADED + Quality.SNATCHED + Quality.SNATCHED_PROPER
and quality in preferred_qualities
- and (quality > curQuality or curQuality not in preferred_qualities)
+ and (quality > cur_quality or cur_quality not in preferred_qualities)
):
logger.debug(
"Episode already exists with quality {existing_quality} but the found result"
" quality {new_quality} is wanted more, getting found result for {name} {ep}".format(
- existing_quality=Quality.qualityStrings[curQuality],
+ existing_quality=Quality.qualityStrings[cur_quality],
new_quality=Quality.qualityStrings[quality],
name=self.name,
ep=episode_num(season, episode),
)
)
return True
- elif curQuality == Quality.UNKNOWN and manualSearch:
+ elif cur_quality == Quality.UNKNOWN and manual_search:
logger.debug(
"Episode already exists but quality is Unknown, getting found result for {name} {ep} with quality {quality}".format(
name=self.name, ep=episode_num(season, episode), quality=Quality.qualityStrings[quality]
@@ -1269,7 +1273,7 @@ def wantEpisode(self, season, episode, quality, manualSearch=False, downCurQuali
logger.debug(
"Episode already exists with quality {existing_quality} and the found result has same/lower quality,"
" ignoring found result for {name} {ep} with quality {new_quality}".format(
- existing_quality=Quality.qualityStrings[curQuality],
+ existing_quality=Quality.qualityStrings[cur_quality],
name=self.name,
ep=episode_num(season, episode),
new_quality=Quality.qualityStrings[quality],
@@ -1277,7 +1281,7 @@ def wantEpisode(self, season, episode, quality, manualSearch=False, downCurQuali
)
return False
- def getOverview(self, epStatus, backlog=False):
+ def get_overview(self, episode_status, backlog=False):
"""
Get the Overview status from the Episode status
@@ -1285,7 +1289,7 @@ def getOverview(self, epStatus, backlog=False):
return: an Overview status
"""
- ep_status = try_int(epStatus) or UNKNOWN
+ ep_status = try_int(episode_status) or UNKNOWN
if ep_status == WANTED:
return Overview.WANTED
@@ -1314,7 +1318,7 @@ def getOverview(self, epStatus, backlog=False):
else:
return Overview.GOOD
else:
- logger.error(f"Could not parse episode status into a valid overview status: {epStatus}")
+ logger.error(f"Could not parse episode status into a valid overview status: {episode_status}")
def __getstate__(self):
d = dict(self.__dict__)
@@ -1336,8 +1340,8 @@ class TVEpisode(object):
subtitles_searchcount = DirtySetter(0)
subtitles_lastsearch = DirtySetter(str(datetime.datetime.min))
airdate = DirtySetter(datetime.date.min)
- hasnfo = DirtySetter(False)
- hastbn = DirtySetter(False)
+ has_nfo = DirtySetter(False)
+ has_tbn = DirtySetter(False)
status = DirtySetter(UNKNOWN)
indexerid = DirtySetter(0)
file_size = DirtySetter(0)
@@ -1349,8 +1353,8 @@ class TVEpisode(object):
startyear = DirtySetter("")
def __init__(self, show: TVShow, season, episode, ep_file=""):
- self.season = season
- self.episode = episode
+ self.season: int = season
+ self.episode: int = episode
self._location = ep_file
self.startyear = ""
@@ -1360,21 +1364,28 @@ def __init__(self, show: TVShow, season, episode, ep_file=""):
self.show: TVShow = show
self.indexer = self.show.indexer
- self.scene_season = 0
- self.scene_episode = 0
+ self.scene_season: int = 0
+ self.scene_episode: int = 0
self.scene_absolute_number = 0
+ self._release_group: str = ""
+
self.lock = threading.Lock()
- self.specifyEpisode(self.season, self.episode)
+ self.specify_episode(self.season, self.episode)
self.related_episodes = []
- self.checkForMetaFiles()
+ self.check_for_meta_files()
self.wantedQuality = []
- def _set_location(self, new_location):
+ @property
+ def location(self):
+ return self._location
+
+ @location.setter
+ def location(self, new_location):
logger.debug(f"Setter sets location to {new_location}")
# self._location = newLocation
@@ -1388,8 +1399,6 @@ def _set_location(self, new_location):
else:
self.file_size = 0
- location = property(lambda self: self._location, _set_location)
-
@property
def idxr(self):
return self.show.idxr
@@ -1422,11 +1431,11 @@ def min_max_episode_list(self):
return [min(sorted_list), max(sorted_list)]
- def refreshSubtitles(self):
+ def refresh_subtitles(self):
"""Look for subtitles files and refresh the subtitles property"""
self.subtitles, save_subtitles = subtitles.refresh_subtitles(self)
if save_subtitles:
- self.saveToDB()
+ self.save_to_db()
def download_subtitles(self, force_lang=None):
if not os.path.isfile(self.location):
@@ -1453,7 +1462,7 @@ def download_subtitles(self, force_lang=None):
self.subtitles_searchcount += 1 if self.subtitles_searchcount else 1
self.subtitles_lastsearch = datetime.datetime.now().strftime(dateTimeFormat)
- self.saveToDB()
+ self.save_to_db()
if new_subtitles:
subtitle_list = ", ".join([subtitles.name_from_code(code) for code in new_subtitles])
@@ -1471,9 +1480,9 @@ def download_subtitles(self, force_lang=None):
return new_subtitles
- def checkForMetaFiles(self):
- oldhasnfo = self.hasnfo
- oldhastbn = self.hastbn
+ def check_for_meta_files(self):
+ old_has_nfo = self.has_nfo
+ old_has_tbn = self.has_tbn
cur_nfo = False
cur_tbn = False
@@ -1482,36 +1491,36 @@ def checkForMetaFiles(self):
if os.path.isfile(self.location):
for cur_provider in settings.metadata_provider_dict.values():
if cur_provider.episode_metadata:
- new_result = cur_provider._has_episode_metadata(self)
+ new_result = cur_provider.has_episode_metadata(self)
else:
new_result = False
cur_nfo = new_result or cur_nfo
if cur_provider.episode_thumbnails:
- new_result = cur_provider._has_episode_thumb(self)
+ new_result = cur_provider.has_episode_thumb(self)
else:
new_result = False
cur_tbn = new_result or cur_tbn
- self.hasnfo = cur_nfo
- self.hastbn = cur_tbn
+ self.has_nfo = cur_nfo
+ self.has_tbn = cur_tbn
# if either setting has changed return true, if not return false
- return oldhasnfo != self.hasnfo or oldhastbn != self.hastbn
+ return old_has_nfo != self.has_nfo or old_has_tbn != self.has_tbn
- def specifyEpisode(self, season, episode):
- sql_results = self.loadFromDB(season, episode)
+ def specify_episode(self, season, episode):
+ sql_results = self.load_from_db(season, episode)
if not sql_results:
# only load from NFO if we didn't load from DB
if os.path.isfile(self.location):
try:
- self.loadFromNFO(self.location)
+ self.load_from_nfo(self.location)
except NoNFOException:
logger.error(f"{self.show.indexerid}: There was an error loading the NFO for episode {episode_num(season, episode)}")
# if we tried loading it from NFO and didn't find the NFO, try the Indexers
- if not self.hasnfo:
+ if not self.has_nfo:
try:
result = self.load_from_indexer(season, episode)
except EpisodeDeletedException:
@@ -1521,7 +1530,7 @@ def specifyEpisode(self, season, episode):
if not result:
raise EpisodeNotFoundException("Couldn't find episode {ep}".format(ep=episode_num(season, episode)))
- def loadFromDB(self, season, episode):
+ def load_from_db(self, season, episode):
main_db_con = db.DBConnection()
sql = "SELECT * FROM tv_episodes JOIN tv_shows WHERE showid = indexer_id and showid = ? AND season = ? AND episode = ?"
sql_results = main_db_con.select(sql, [self.show.indexerid, season, episode])
@@ -1537,7 +1546,7 @@ def loadFromDB(self, season, episode):
self.season = season
self.episode = episode
- self.absolute_number = try_int(sql_results[0]["absolute_number"], 0)
+ self.absolute_number = try_int(sql_results[0]["absolute_number"])
self.description = sql_results[0]["description"]
if not self.description:
self.description = ""
@@ -1563,9 +1572,9 @@ def loadFromDB(self, season, episode):
sickchill.oldbeard.scene_numbering.xem_refresh(self.show.indexerid, self.show.indexer)
- self.scene_season = try_int(sql_results[0]["scene_season"], 0)
- self.scene_episode = try_int(sql_results[0]["scene_episode"], 0)
- self.scene_absolute_number = try_int(sql_results[0]["scene_absolute_number"], 0)
+ self.scene_season = try_int(sql_results[0]["scene_season"])
+ self.scene_episode = try_int(sql_results[0]["scene_episode"])
+ self.scene_absolute_number = try_int(sql_results[0]["scene_absolute_number"])
if self.scene_absolute_number == 0:
self.scene_absolute_number = sickchill.oldbeard.scene_numbering.get_scene_absolute_numbering(
@@ -1630,7 +1639,7 @@ def load_from_indexer(self, season=None, episode=None, force_all: bool = False):
id=self.show.indexerid, ep=episode_num(season or self.season, episode or self.episode), absolute=indexer_episode["absoluteNumber"]
)
)
- self.absolute_number = try_int(indexer_episode["absoluteNumber"], 0)
+ self.absolute_number = try_int(indexer_episode["absoluteNumber"])
self.season = (season, self.season)[season is None]
self.episode = (episode, self.episode)[season is None]
@@ -1656,16 +1665,16 @@ def load_from_indexer(self, season=None, episode=None, force_all: bool = False):
else:
first_aired = str(self.airdate)
- rawAirdate = [int(x) for x in first_aired.split("-")]
+ raw_airdate = [int(x) for x in first_aired.split("-")]
try:
- self.airdate = datetime.date(rawAirdate[0], rawAirdate[1], rawAirdate[2])
+ self.airdate = datetime.date(raw_airdate[0], raw_airdate[1], raw_airdate[2])
except (ValueError, IndexError, TypeError):
# Changed this to error because it should NEVER happen now
logger.error(f"Malformed air date of {first_aired} retrieved from {self.indexer_name} for ({self.show.name} - {episode_num(season, episode)})")
- # if I'm incomplete on the indexer but I once was complete then just delete myself from the DB for now
+ # if I'm incomplete on the indexer, but I once was complete then just delete myself from the DB for now
if self.indexerid != -1:
- self.deleteEpisode()
+ self.delete_episode()
return False
if indexer_episode.get("id"):
@@ -1674,11 +1683,11 @@ def load_from_indexer(self, season=None, episode=None, force_all: bool = False):
if not self.indexerid:
logger.error("Failed to retrieve ID from {indexer}".format(indexer=self.indexer_name))
if self.indexerid != -1:
- self.deleteEpisode()
+ self.delete_episode()
return False
- if not os.path.isdir(self.show._location) and not settings.CREATE_MISSING_SHOW_DIRS and not settings.ADD_SHOWS_WO_DIR:
- logger.info(f"The show dir {self.show._location} is missing, not bothering to change the episode statuses since it'd probably be invalid")
+ if not os.path.isdir(self.show.get_location) and not settings.CREATE_MISSING_SHOW_DIRS and not settings.ADD_SHOWS_WO_DIR:
+ logger.info(f"The show dir {self.show.get_location} is missing, not bothering to change the episode statuses since it'd probably be invalid")
return
if self.location:
@@ -1723,8 +1732,8 @@ def load_from_indexer(self, season=None, episode=None, force_all: bool = False):
logger.debug(f"6 Status changes from {self.status} to {UNKNOWN}")
self.status = UNKNOWN
- def loadFromNFO(self, location):
- if not os.path.isdir(self.show._location):
+ def load_from_nfo(self, location):
+ if not os.path.isdir(self.show.get_location):
logger.info(f"{self.show.indexerid}: The show dir is missing, not bothering to try loading the episode NFO")
return
@@ -1737,16 +1746,16 @@ def loadFromNFO(self, location):
logger.debug(f"7 Status changes from {self.status} to {Quality.statusFromName(self.location, anime=self.show.is_anime)}")
self.status = Quality.statusFromName(self.location, anime=self.show.is_anime)
- nfoFile = replace_extension(self.location, "nfo")
- logger.debug(f"{self.show.indexerid}: Using NFO name {nfoFile}")
+ nfo_file = replace_extension(self.location, "nfo")
+ logger.debug(f"{self.show.indexerid}: Using NFO name {nfo_file}")
- if os.path.isfile(nfoFile):
+ if os.path.isfile(nfo_file):
try:
- show_xml = ElementTree.ElementTree(file=nfoFile)
+ show_xml = ElementTree.ElementTree(file=nfo_file)
except (SyntaxError, ValueError) as error:
logger.error(f"Error loading the NFO, backing up the NFO and skipping for now: {error}")
try:
- os.rename(nfoFile, f"{nfoFile}.old")
+ os.rename(nfo_file, f"{nfo_file}.old")
except OSError as error:
logger.error(f"Failed to rename your episode's NFO file - you need to delete it or fix it: {error}")
raise NoNFOException("Error in NFO format")
@@ -1758,8 +1767,10 @@ def loadFromNFO(self, location):
or ep_details.findtext("episode") is None
or int(ep_details.findtext("episode")) != self.episode
):
+ ep_string = episode_num(self.season, self.episode)
+ other_string = episode_num(ep_details.findtext("season"), ep_details.findtext("episode"))
logger.debug(
- f"{self.show.indexerid}: NFO has an |