Skip to content

Commit

Permalink
refactor: lots of refactoring, try to use a set for search strings to…
Browse files Browse the repository at this point in the history
… avoid duplicates, add type checking for search strings, untested (#8704)

Signed-off-by: miigotu <[email protected]>
  • Loading branch information
miigotu authored Feb 2, 2024
1 parent 87eb816 commit 7ae5a29
Show file tree
Hide file tree
Showing 30 changed files with 147 additions and 120 deletions.
2 changes: 1 addition & 1 deletion sickchill/gui/slick/views/config_providers.mako
Original file line number Diff line number Diff line change
Expand Up @@ -529,7 +529,7 @@
<label class="component-title">${_('For Spanish torrents')}</label>
</div>
<div class="col-lg-9 col-md-8 col-sm-7 col-xs-12 component-desc">
<input type="checkbox" name="${provider.get_id("_onlyspasearch")}" id="${provider.get_id("_onlyspasearch")}" ${checked(provider.onlyspasearch)} />
<input type="checkbox" name="${provider.get_id("_onlyspasearch")}" id="${provider.get_id("_onlyspasearch")}" ${checked(provider.only_spanish_search)} />
<label for="${provider.get_id("_onlyspasearch")}">${_('ONLY search on this provider if show info is defined as "Spanish" (avoid provider\'s use for VOS shows)')}</label>
</div>
</div>
Expand Down
4 changes: 2 additions & 2 deletions sickchill/oldbeard/clients/download_station.py
Original file line number Diff line number Diff line change
Expand Up @@ -165,8 +165,8 @@ def _check_destination(self, result: "SearchResult"):
self._set_destination(result, response_json["data"]["default_destination"])
logger.info("Destination set to %s", self._get_destination(result))
except (ValueError, KeyError, JSONDecodeError) as error:
logger.debug("Get DownloadStation default destination error: {0}".format(error))
logger.warning("Could not get share destination from DownloadStation for {}, please set it in the settings", result.result_type)
logger.debug(f"Get DownloadStation default destination error: {error}")
logger.warning(f"Could not get share destination from DownloadStation for {result.result_type}, please set it in the settings")
raise

def _add_torrent_uri(self, result: "SearchResult"):
Expand Down
22 changes: 11 additions & 11 deletions sickchill/oldbeard/clients/generic.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@
from typing import Dict, Iterable, Union
from urllib.parse import urlencode

import bencodepy
import bencode
import requests

from sickchill import logger, settings
Expand Down Expand Up @@ -153,27 +153,27 @@ def _get_torrent_hash(result):
# Remove \n from end if it exists (bytes) primarily for TorrentLeech
result.content = result.content.strip(b"\n")

torrent_bdecode: Union[Iterable, Dict] = bencodepy.decode(result.content)
except (bencodepy.BencodeDecodeError, Exception) as error:
torrent_bdecode: Union[Iterable, Dict] = bencode.decode(result.content)
except (bencode.BencodeDecodeError, Exception) as error:
logger.exception("Unable to bdecode torrent")
logger.info("Error is: {0}".format(error))
logger.info("Torrent bencoded data: {0!r}".format(result.content))
logger.info(f"Error is: {error}")
logger.info(f"Torrent bencoded data: {result.content!r}")
raise

try:
info = torrent_bdecode[b"info"]
except Exception:
logger.exception("Unable to find info field in torrent")
logger.info("Torrent bencoded data: {0!r}".format(result.content))
logger.info(f"Torrent bencoded data: {result.content!r}")
raise

try:
result.hash = sha1(bencodepy.encode(info)).hexdigest()
logger.debug("Result Hash is {0}".format(result.hash))
except (bencodepy.BencodeDecodeError, Exception) as error:
result.hash = sha1(bencode.encode(info)).hexdigest()
logger.debug(f"Result Hash is {result.hash}")
except (bencode.BencodeDecodeError, Exception) as error:
logger.exception("Unable to bencode torrent info")
logger.info("Error is: {0}".format(error))
logger.info("Torrent bencoded data: {0!r}".format(result.content))
logger.info(f"Error is: {error}")
logger.info(f"Torrent bencoded data: {result.content!r}")
raise

return result
Expand Down
1 change: 0 additions & 1 deletion sickchill/oldbeard/helpers.py
Original file line number Diff line number Diff line change
Expand Up @@ -50,7 +50,6 @@
}
)


_context = None


Expand Down
8 changes: 5 additions & 3 deletions sickchill/oldbeard/notifications_queue.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@

class NotificationsQueue(generic_queue.GenericQueue):
"""
Queue to handle multiple post processing tasks
Queue to handle multiple post-processing tasks
"""

def __init__(self):
Expand All @@ -28,7 +28,7 @@ def __init__(self):
@property
def is_paused(self):
"""
Shows if the post processing queue is paused
Shows if the post-processing queue is paused
:return: bool
"""
return self.min_priority == generic_queue.QueuePriorities.HIGH
Expand Down Expand Up @@ -160,13 +160,15 @@ def _send_discord(self, webhook: str = None, name: str = None, avatar: str = Non
r.raise_for_status()
except requests.exceptions.ConnectionError as error:
logger.info("Could not reach the webhook url")
logger.debug(f"Error: {error}")
return False
except requests.exceptions.RequestException as error:
if error.response.status_code != 429 or int(error.response.headers.get("X-RateLimit-Remaining")) != 0:
logger.exception(f"RequestException traceback: {traceback.format_exc()}")
raise error

logger.info("Discord rate limiting, retrying after {} seconds".format(error.response.headers.get("X-RateLimit-Reset-After")))
retry_after = error.response.headers.get("X-RateLimit-Reset-After")
logger.info(f"Discord rate limiting, retrying after {retry_after} seconds")
time.sleep(int(error.response.headers.get("X-RateLimit-Reset-After")) + 1)
r = requests.post(discord_webhook, data=message_data, headers=headers)
r.raise_for_status()
Expand Down
4 changes: 2 additions & 2 deletions sickchill/oldbeard/providers/bitcannon.py
Original file line number Diff line number Diff line change
Expand Up @@ -49,7 +49,7 @@ def search(self, search_strings):
logger.debug("No data returned from provider")
continue

if not self._check_auth_from_data(parsed_json):
if not self.check_auth_from_data(parsed_json):
return results

for result in parsed_json.pop("torrents", {}):
Expand Down Expand Up @@ -92,7 +92,7 @@ def search(self, search_strings):
return results

@staticmethod
def _check_auth_from_data(data):
def check_auth_from_data(data):
if not all([isinstance(data, dict), data.pop("status", 200) != 401, data.pop("message", "") != "Invalid API key"]):
logger.warning("Invalid api key. Check your settings")
return False
Expand Down
14 changes: 8 additions & 6 deletions sickchill/oldbeard/providers/bjshare.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,9 @@


class Provider(TorrentProvider):
"""BJ-Share Torrent provider."""
"""
BJ-Share Torrent provider.
"""

def __init__(self):
"""Initialize the class."""
Expand Down Expand Up @@ -42,12 +44,12 @@ def __init__(self):
# Cache
self.cache = tvcache.TVCache(self)

# One piece and Boruto is the only animes that i'm aware that is in "absolute" numbering, the problem is that
# One piece and Boruto are the only anime that I'm aware that is in "absolute" numbering, the problem is that
# they include the season (wrong season) and episode as absolute, eg: One Piece - S08E836
# 836 is the latest episode in absolute numbering, that is correct, but S08 is not the current season...
# So for this show, i don't see a other way to make it work...
# So for this show, I don't see another way to make it work...
#
# All others animes that i tested is with correct season and episode set, so i can't remove the season from all
# All others anime that I tested is with correct season and episode set, so I can't remove the season from all
# or will break everything else
#
# In this indexer, it looks that it is added "automatically", so all current and new releases will be broken
Expand Down Expand Up @@ -121,7 +123,7 @@ def _parse(self, data, mode):
:param data: The raw response from a search
:param mode: The current mode used to search, e.g. RSS
:return: A KV with a list of items found and if there's an next page to search
:return: A KV with a list of items found and if there's a next page to search
"""

def process_column_header(td):
Expand Down Expand Up @@ -161,7 +163,7 @@ def process_column_header(td):

if "group" in result_class or "torrent" in result_class:
# get international title if available
title = re.sub(r".* \[(.*?)\](.*)", r"\1\2", title)
title = re.sub(r".* \[(.*?)](.*)", r"\1\2", title)

if "group" in result_class:
group_title = title
Expand Down
26 changes: 15 additions & 11 deletions sickchill/oldbeard/providers/btn.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@
import socket
import time
from datetime import datetime
from typing import Dict, Iterable, List, TYPE_CHECKING, Union

import jsonrpclib

Expand All @@ -13,6 +14,9 @@
from sickchill.oldbeard.helpers import sanitizeSceneName
from sickchill.providers.torrent.TorrentProvider import TorrentProvider

if TYPE_CHECKING:
from sickchill.tv import TVEpisode


class Provider(TorrentProvider):
def __init__(self):
Expand All @@ -37,7 +41,7 @@ def _check_auth(self):

return True

def _check_auth_from_data(self, data):
def check_auth_from_data(self, data):
if data is None:
return self._check_auth()

Expand All @@ -64,7 +68,7 @@ def search(self, search_params):
return results

found = {}
if self._check_auth_from_data(data):
if self.check_auth_from_data(data):
if "torrents" in data:
found = data["torrents"]

Expand Down Expand Up @@ -179,39 +183,39 @@ def __add_tvdb_or_name(params, episode):

return search_params

def get_season_search_strings(self, episode_object):
def get_season_search_strings(self, episode: "TVEpisode") -> List[Dict]:
search_params = {"category": "Season"}

# Search for entire seasons: no need to do special things for air by date or sports shows
if self.show.air_by_date or self.show.sports:
# Search for the year of the air by date show
search_params["name"] = str(episode_object.airdate).split("-")[0]
search_params["name"] = str(episode.airdate).split("-")[0]
else:
# BTN uses the same format for both Anime and TV
search_params["name"] = "Season " + str(episode_object.scene_season)
search_params["name"] = "Season " + str(episode.scene_season)

return self.__add_tvdb_or_name(search_params, episode_object)
return self.__add_tvdb_or_name(search_params, episode)

def get_episode_search_strings(self, episode_object, add_string=""):
if not episode_object:
def get_episode_search_strings(self, episode: "TVEpisode", add_string: str = "") -> Union[List[Dict], Iterable]:
if not episode:
return [{}]

search_params = {"category": "Episode"}

# episode
if self.show.air_by_date or self.show.sports:
date_str = str(episode_object.airdate)
date_str = str(episode.airdate)

# BTN uses dots in dates, we just search for the date since that
# combined with the series identifier should result in just one episode
search_params["name"] = date_str.replace("-", ".")
else:
# BTN uses the same format for both Anime and TV
# Do a general name search for the episode
search_params["name"] = episode_num(episode_object.scene_season, episode_object.scene_episode)
search_params["name"] = episode_num(episode.scene_season, episode.scene_episode)

# search
return self.__add_tvdb_or_name(search_params, episode_object)
return self.__add_tvdb_or_name(search_params, episode)

def find_propers(self, search_date=None):
results = []
Expand Down
8 changes: 4 additions & 4 deletions sickchill/oldbeard/providers/elitetorrent.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@ class Provider(TorrentProvider):
def __init__(self):
super().__init__("EliteTorrent")

self.onlyspasearch = None
self.only_spanish_search = None
self.minseed = 0
self.minleech = 0
self.cache = tvcache.TVCache(self) # Only poll EliteTorrent every 20 minutes max
Expand Down Expand Up @@ -45,7 +45,7 @@ def search(self, search_strings):
logger.debug(_("Search Mode: {mode}").format(mode=mode))

# Only search if user conditions are true
if self.onlyspasearch and lang_info != "es" and mode != "RSS":
if self.only_spanish_search and lang_info != "es" and mode != "RSS":
logger.debug("Show info is not spanish, skipping provider search")
continue

Expand Down Expand Up @@ -96,7 +96,7 @@ def search(self, search_strings):
First encode latin1 and then decode utf8 to remains str
"""
row_title = row.find("a", class_="nombre")["title"]
title = self._processTitle(row_title.encode("latin-1").decode("utf8"))
title = self._process_title(row_title.encode("latin-1").decode("utf8"))

seeders = try_int(row.find("td", class_="semillas").get_text(strip=True))
leechers = try_int(row.find("td", class_="clientes").get_text(strip=True))
Expand Down Expand Up @@ -140,7 +140,7 @@ def search(self, search_strings):
return results

@staticmethod
def _processTitle(title):
def _process_title(title):
# Quality, if no literal is defined it's HDTV
if "calidad" not in title:
title += " HDTV x264"
Expand Down
2 changes: 1 addition & 1 deletion sickchill/oldbeard/providers/hd4free.py
Original file line number Diff line number Diff line change
Expand Up @@ -62,7 +62,7 @@ def search(self, search_strings):

error = jdata.get("error")
if error:
logger.debug("{}".format(error))
logger.debug(f"{error}")
return results

try:
Expand Down
25 changes: 15 additions & 10 deletions sickchill/oldbeard/providers/hdbits.py
Original file line number Diff line number Diff line change
@@ -1,12 +1,16 @@
import datetime
import json
from typing import Dict, Iterable, List, TYPE_CHECKING, Union
from urllib.parse import urlencode, urljoin

from sickchill import logger
from sickchill.helper.exceptions import AuthException
from sickchill.oldbeard import classes, tvcache
from sickchill.providers.torrent.TorrentProvider import TorrentProvider

if TYPE_CHECKING:
from sickchill.tv import TVEpisode, TVShow


class Provider(TorrentProvider):
def __init__(self):
Expand All @@ -27,20 +31,20 @@ def _check_auth(self):
return True

@staticmethod
def _check_auth_from_data(parsed_json):
def check_auth_from_data(parsed_json):
"""Check that we are authenticated."""

if "status" in parsed_json and "message" in parsed_json and parsed_json.get("status") == 5:
logger.warning("Invalid username or password. Check your settings")

return True

def get_season_search_strings(self, episode_object):
season_search_string = [self.make_post_data_JSON(show=self.show, season=episode_object)]
def get_season_search_strings(self, episode: "TVEpisode") -> Union[List[Dict], List[str]]:
season_search_string = [self.make_post_data_json(show=self.show, season=episode)]
return season_search_string

def get_episode_search_strings(self, episode_object, add_string=""):
episode_search_string = [self.make_post_data_JSON(show=self.show, episode=episode_object)]
def get_episode_search_strings(self, episode: "TVEpisode", add_string: str = "") -> Union[List[Dict], Iterable]:
episode_search_string = [self.make_post_data_json(show=self.show, episode=episode)]
return episode_search_string

def _get_title_and_url(self, item):
Expand All @@ -60,7 +64,7 @@ def search(self, search_params):
if not parsed_json:
return []

if self._check_auth_from_data(parsed_json):
if self.check_auth_from_data(parsed_json):
if parsed_json and "data" in parsed_json:
items = parsed_json["data"]
else:
Expand All @@ -78,7 +82,7 @@ def find_propers(self, search_date=None):
search_terms = [" proper ", " repack "]

for term in search_terms:
for item in self.search(self.make_post_data_JSON(search_term=term)):
for item in self.search(self.make_post_data_json(search_term=term)):
if item["utadded"]:
try:
result_date = datetime.datetime.fromtimestamp(int(item["utadded"]))
Expand All @@ -91,7 +95,8 @@ def find_propers(self, search_date=None):

return results

def make_post_data_JSON(self, show=None, episode=None, season=None, search_term=None):
# noinspection PyTypedDict
def make_post_data_json(self, show: "TVShow" = None, episode: "TVEpisode" = None, season=None, search_term=None):
post_data = {
"username": self.username,
"passkey": self.passkey,
Expand Down Expand Up @@ -138,9 +143,9 @@ def _get_rss_data(self):
results = []

try:
parsed_json = self.provider.get_url(self.provider.urls["rss"], post_data=self.provider.make_post_data_JSON(), returns="json")
parsed_json = self.provider.get_url(self.provider.urls["rss"], post_data=self.provider.make_post_data_json(), returns="json")

if self.provider._check_auth_from_data(parsed_json):
if self.provider.check_auth_from_data(parsed_json):
results = parsed_json["data"]
except Exception:
pass
Expand Down
Loading

0 comments on commit 7ae5a29

Please sign in to comment.