From 9cf3346ed5992ba5ecc88c82530fab91331cc089 Mon Sep 17 00:00:00 2001 From: Goldy <153996346+g0ldyy@users.noreply.github.com> Date: Wed, 3 Jul 2024 01:20:38 +0200 Subject: [PATCH] feat: lot of fixes, caching system changed, camel to snake etc... --- comet/api/core.py | 11 +-- comet/api/stream.py | 200 +++++++++++++++++++++-------------------- comet/main.py | 8 -- comet/utils/db.py | 2 - comet/utils/general.py | 160 ++++++++++++++++++++++----------- comet/utils/logger.py | 1 - comet/utils/models.py | 1 - 7 files changed, 214 insertions(+), 169 deletions(-) diff --git a/comet/api/core.py b/comet/api/core.py index 06f8298..57f96e5 100644 --- a/comet/api/core.py +++ b/comet/api/core.py @@ -9,30 +9,25 @@ templates = Jinja2Templates("comet/templates") main = APIRouter() - @main.get("/", status_code=200) async def root(): return RedirectResponse("/configure") - @main.get("/health", status_code=200) async def health(): return {"status": "ok"} - indexers = settings.INDEXER_MANAGER_INDEXERS - -webConfig = { +web_config = { "indexers": [indexer.replace(" ", "_").lower() for indexer in indexers], "languages": [language.replace(" ", "_") for language in RTN.patterns.language_code_mapping.keys()], - "resolutions": ["480p", "720p", "1080p", "1440p", "2160p", "2880p", "4320p"] + "resolutions": ["360p", "480p", "576p", "720p", "1080p", "1440p", "2160p", "4K", "Unknown"] } @main.get("/configure") @main.get("/{b64config}/configure") async def configure(request: Request): - return templates.TemplateResponse("index.html", {"request": request, "CUSTOM_HEADER_HTML": settings.CUSTOM_HEADER_HTML if settings.CUSTOM_HEADER_HTML and settings.CUSTOM_HEADER_HTML != "None" else "", "webConfig": webConfig}) - + return templates.TemplateResponse("index.html", {"request": request, "CUSTOM_HEADER_HTML": settings.CUSTOM_HEADER_HTML if settings.CUSTOM_HEADER_HTML and settings.CUSTOM_HEADER_HTML != "None" else "", "webConfig": web_config}) @main.get("/manifest.json") @main.get("/{b64config}/manifest.json") diff --git a/comet/api/stream.py b/comet/api/stream.py index ee0621e..99f7885 100644 --- a/comet/api/stream.py +++ b/comet/api/stream.py @@ -8,9 +8,9 @@ from fastapi.responses import RedirectResponse from RTN import Torrent, parse, sort_torrents, title_match -from comet.utils.general import (bytesToSize, configChecking, - generateDownloadLink, getIndexerManager, - getTorrentHash, isVideo, translate) +from comet.utils.general import (bytes_to_size, config_check, + generate_download_link, get_indexer_manager, + get_torrent_hash, is_video, translate, get_balanced_hashes) from comet.utils.logger import logger from comet.utils.models import database, rtn, settings @@ -19,7 +19,7 @@ @streams.get("/stream/{type}/{id}.json") @streams.get("/{b64config}/stream/{type}/{id}.json") async def stream(request: Request, b64config: str, type: str, id: str): - config = configChecking(b64config) + config = config_check(b64config) if not config: return { "streams": [ @@ -32,11 +32,11 @@ async def stream(request: Request, b64config: str, type: str, id: str): } async with aiohttp.ClientSession() as session: - checkDebrid = await session.get("https://api.real-debrid.com/rest/1.0/user", headers={ + check_debrid = await session.get("https://api.real-debrid.com/rest/1.0/user", headers={ "Authorization": f"Bearer {config['debridApiKey']}" }) - checkDebrid = await checkDebrid.text() - if not '"type": "premium"' in checkDebrid: + check_debrid = await check_debrid.text() + if not '"type": "premium"' in check_debrid: return { "streams": [ { @@ -56,85 +56,95 @@ async def stream(request: Request, b64config: str, type: str, id: str): season = int(info[1]) episode = int(info[2]) - getMetadata = await session.get(f"https://v3.sg.media-imdb.com/suggestion/a/{id}.json") - metadata = await getMetadata.json() + get_metadata = await session.get(f"https://v3.sg.media-imdb.com/suggestion/a/{id}.json") + metadata = await get_metadata.json() name = metadata["d"][0]["l"] name = translate(name) - cacheKey = hashlib.md5(json.dumps({"debridService": config["debridService"], "name": name, "season": season, "episode": episode, "indexers": config["indexers"], "resolutions": config["resolutions"], "languages": config["languages"]}).encode("utf-8")).hexdigest() - cached = await database.fetch_one(f"SELECT EXISTS (SELECT 1 FROM cache WHERE cacheKey = '{cacheKey}')") + cache_key = hashlib.md5(json.dumps({"debridService": config["debridService"], "name": name, "season": season, "episode": episode, "indexers": config["indexers"]}).encode("utf-8")).hexdigest() + cached = await database.fetch_one(f"SELECT EXISTS (SELECT 1 FROM cache WHERE cacheKey = '{cache_key}')") if cached[0] != 0: logger.info(f"Cache found for {name}") - timestamp = await database.fetch_one(f"SELECT timestamp FROM cache WHERE cacheKey = '{cacheKey}'") + timestamp = await database.fetch_one(f"SELECT timestamp FROM cache WHERE cacheKey = '{cache_key}'") if timestamp[0] + settings.CACHE_TTL < time.time(): - await database.execute(f"DELETE FROM cache WHERE cacheKey = '{cacheKey}'") + await database.execute(f"DELETE FROM cache WHERE cacheKey = '{cache_key}'") logger.info(f"Cache expired for {name}") else: - sortedRankedFiles = await database.fetch_one(f"SELECT results FROM cache WHERE cacheKey = '{cacheKey}'") - sortedRankedFiles = json.loads(sortedRankedFiles[0]) + sorted_ranked_files = await database.fetch_one(f"SELECT results FROM cache WHERE cacheKey = '{cache_key}'") + sorted_ranked_files = json.loads(sorted_ranked_files[0]) + + balanced_hashes = await get_balanced_hashes(sorted_ranked_files, config) results = [] - for hash in sortedRankedFiles: - results.append({ - "name": f"[RD⚡] Comet {sortedRankedFiles[hash]['data']['resolution'][0] if len(sortedRankedFiles[hash]['data']['resolution']) > 0 else 'Unknown'}", - "title": f"{sortedRankedFiles[hash]['data']['title']}\n💾 {bytesToSize(sortedRankedFiles[hash]['data']['size'])}", - "url": f"{request.url.scheme}://{request.url.netloc}/{b64config}/playback/{hash}/{sortedRankedFiles[hash]['data']['index']}" - }) + for hash in sorted_ranked_files: + for resolution in balanced_hashes: + if hash in balanced_hashes[resolution]: + results.append({ + "name": f"[RD⚡] Comet {sorted_ranked_files[hash]['data']['resolution'][0] if len(sorted_ranked_files[hash]['data']['resolution']) > 0 else 'Unknown'}", + "title": f"{sorted_ranked_files[hash]['data']['title']}\n💾 {bytes_to_size(sorted_ranked_files[hash]['data']['size'])}", + "url": f"{request.url.scheme}://{request.url.netloc}/{b64config}/playback/{hash}/{sorted_ranked_files[hash]['data']['index']}" + }) + + continue - return {"streams": results} + return { + "streams": results + } else: logger.info(f"No cache found for {name} with user configuration") - indexerManagerType = settings.INDEXER_MANAGER_TYPE + indexer_manager_type = settings.INDEXER_MANAGER_TYPE - logger.info(f"Start of {indexerManagerType} search for {name} with indexers {config['indexers']}") + logger.info(f"Start of {indexer_manager_type} search for {name} with indexers {config['indexers']}") tasks = [] - tasks.append(getIndexerManager(session, indexerManagerType, config["indexers"], name)) + tasks.append(get_indexer_manager(session, indexer_manager_type, config["indexers"], name)) if type == "series": - tasks.append(getIndexerManager(session, indexerManagerType, config["indexers"], f"{name} S0{season}E0{episode}")) - searchResponses = await asyncio.gather(*tasks) + tasks.append(get_indexer_manager(session, indexer_manager_type, config["indexers"], f"{name} S0{season}E0{episode}")) + search_response = await asyncio.gather(*tasks) torrents = [] - for results in searchResponses: + for results in search_response: if results == None: continue for result in results: torrents.append(result) - logger.info(f"{len(torrents)} torrents found for {name} with {indexerManagerType}") + logger.info(f"{len(torrents)} torrents found for {name} with {indexer_manager_type}") - zileanHashesCount = 0 + zilean_hashes_count = 0 try: if settings.ZILEAN_URL: - getDmm = await session.post(f"{settings.ZILEAN_URL}/dmm/search", json={ + get_dmm = await session.post(f"{settings.ZILEAN_URL}/dmm/search", json={ "queryText": name }) - getDmm = await getDmm.json() + get_dmm = await get_dmm.json() - if not "status" in getDmm: - for result in getDmm: - zileanHashesCount += 1 + if not "status" in get_dmm: + for result in get_dmm: + zilean_hashes_count += 1 - if indexerManagerType == "jackett": + if indexer_manager_type == "jackett": object = { "Title": result["filename"], - "InfoHash": result["infoHash"] + "InfoHash": result["infoHash"], + "zilean": True } - if indexerManagerType == "prowlarr": + if indexer_manager_type == "prowlarr": object = { "title": result["filename"], - "infoHash": result["infoHash"] + "infoHash": result["infoHash"], + "zilean": True } torrents.append(object) - logger.info(f"{zileanHashesCount} torrents found for {name} with Zilean API") + logger.info(f"{zilean_hashes_count} torrents found for {name} with Zilean API") except: logger.warning(f"Exception while getting torrents for {name} with Zilean API") @@ -144,36 +154,29 @@ async def stream(request: Request, b64config: str, type: str, id: str): tasks = [] filtered = 0 for torrent in torrents: - parsedTorrent = parse(torrent["Title"] if indexerManagerType == "jackett" else torrent["title"]) - - if not title_match(name.lower(), parsedTorrent.parsed_title.lower()): - filtered += 1 - continue + # Only title match check if from Zilean + if "zilean" in torrent: + parsed_torrent = parse(torrent["Title"] if indexer_manager_type == "jackett" else torrent["title"]) + if not title_match(name.lower(), parsed_torrent.parsed_title.lower()): + filtered += 1 + continue - if not "All" in config["resolutions"] and len(parsedTorrent.resolution) > 0 and parsedTorrent.resolution[0] not in config["resolutions"]: - filtered += 1 - continue + tasks.append(get_torrent_hash(session, indexer_manager_type, torrent)) - if not "All" in config["languages"] and not parsedTorrent.is_multi_audio and not any(language.replace("_", " ").capitalize() in parsedTorrent.language for language in config["languages"]): - filtered += 1 - continue - - tasks.append(getTorrentHash(session, indexerManagerType, torrent)) - - logger.info(f"{filtered} filtered torrents for {name}") + logger.info(f"{filtered} filtered torrents from Zilean API for {name}") - torrentHashes = await asyncio.gather(*tasks) - torrentHashes = list(set([hash for hash in torrentHashes if hash])) + torrent_hashes = await asyncio.gather(*tasks) + torrent_hashes = list(set([hash for hash in torrent_hashes if hash])) - logger.info(f"{len(torrentHashes)} info hashes found for {name}") + logger.info(f"{len(torrent_hashes)} info hashes found for {name}") - torrentHashes = list(set([hash for hash in torrentHashes if hash])) + torrent_hashes = list(set([hash for hash in torrent_hashes if hash])) - if len(torrentHashes) == 0: + if len(torrent_hashes) == 0: return {"streams": []} tasks = [] - for hash in torrentHashes: + for hash in torrent_hashes: tasks.append(session.get(f"https://api.real-debrid.com/rest/1.0/torrents/instantAvailability/{hash}", headers={ "Authorization": f"Bearer {config['debridApiKey']}" })) @@ -192,16 +195,16 @@ async def stream(request: Request, b64config: str, type: str, id: str): if type == "series": for variants in details["rd"]: for index, file in variants.items(): - filename = file["filename"].lower() + filename = file["filename"] - if not isVideo(filename): + if not is_video(filename): continue - filenameParsed = parse(file["filename"]) - if season in filenameParsed.season and episode in filenameParsed.episode: + filename_parsed = parse(filename) + if season in filename_parsed.season and episode in filename_parsed.episode: files[hash] = { "index": index, - "title": file["filename"], + "title": filename, "size": file["filesize"] } @@ -209,49 +212,55 @@ async def stream(request: Request, b64config: str, type: str, id: str): for variants in details["rd"]: for index, file in variants.items(): - filename = file["filename"].lower() + filename = file["filename"] - if not isVideo(filename): + if not is_video(filename): continue files[hash] = { "index": index, - "title": file["filename"], + "title": filename, "size": file["filesize"] } - rankedFiles = set() + ranked_files = set() for hash in files: - rankedFile = rtn.rank(files[hash]["title"], hash) - rankedFiles.add(rankedFile) + ranked_file = rtn.rank(files[hash]["title"], hash) + ranked_files.add(ranked_file) - sortedRankedFiles = sort_torrents(rankedFiles) + sorted_ranked_files = sort_torrents(ranked_files) - logger.info(f"{len(sortedRankedFiles)} cached files found on Real-Debrid for {name}") + logger.info(f"{len(sorted_ranked_files)} cached files found on Real-Debrid for {name}") - if len(sortedRankedFiles) == 0: + if len(sorted_ranked_files) == 0: return {"streams": []} - sortedRankedFiles = { + sorted_ranked_files = { key: (value.model_dump() if isinstance(value, Torrent) else value) - for key, value in sortedRankedFiles.items() + for key, value in sorted_ranked_files.items() } - for hash in sortedRankedFiles: # needed for caching - sortedRankedFiles[hash]["data"]["title"] = files[hash]["title"] - sortedRankedFiles[hash]["data"]["size"] = files[hash]["size"] - sortedRankedFiles[hash]["data"]["index"] = files[hash]["index"] + for hash in sorted_ranked_files: # needed for caching + sorted_ranked_files[hash]["data"]["title"] = files[hash]["title"] + sorted_ranked_files[hash]["data"]["size"] = files[hash]["size"] + sorted_ranked_files[hash]["data"]["index"] = files[hash]["index"] - jsonData = json.dumps(sortedRankedFiles).replace("'", "''") - await database.execute(f"INSERT OR IGNORE INTO cache (cacheKey, results, timestamp) VALUES ('{cacheKey}', '{jsonData}', {time.time()})") + json_data = json.dumps(sorted_ranked_files).replace("'", "''") + await database.execute(f"INSERT OR IGNORE INTO cache (cacheKey, results, timestamp) VALUES ('{cache_key}', '{json_data}', {time.time()})") logger.info(f"Results have been cached for {name}") + + balanced_hashes = await get_balanced_hashes(sorted_ranked_files, config) results = [] - for hash in sortedRankedFiles: - results.append({ - "name": f"[RD⚡] Comet {sortedRankedFiles[hash]['data']['resolution'][0] if len(sortedRankedFiles[hash]['data']['resolution']) > 0 else 'Unknown'}", - "title": f"{sortedRankedFiles[hash]['data']['title']}\n💾 {bytesToSize(sortedRankedFiles[hash]['data']['size'])}", - "url": f"{request.url.scheme}://{request.url.netloc}/{b64config}/playback/{hash}/{sortedRankedFiles[hash]['data']['index']}" - }) + for hash in sorted_ranked_files: + for resolution in balanced_hashes: + if hash in balanced_hashes[resolution]: + results.append({ + "name": f"[RD⚡] Comet {sorted_ranked_files[hash]['data']['resolution'][0] if len(sorted_ranked_files[hash]['data']['resolution']) > 0 else 'Unknown'}", + "title": f"{sorted_ranked_files[hash]['data']['title']}\n💾 {bytes_to_size(sorted_ranked_files[hash]['data']['size'])}", + "url": f"{request.url.scheme}://{request.url.netloc}/{b64config}/playback/{hash}/{sorted_ranked_files[hash]['data']['index']}" + }) + + continue return { "streams": results @@ -259,21 +268,20 @@ async def stream(request: Request, b64config: str, type: str, id: str): @streams.head("/{b64config}/playback/{hash}/{index}") async def playback(b64config: str, hash: str, index: str): - config = configChecking(b64config) + config = config_check(b64config) if not config: return - downloadLink = await generateDownloadLink(config["debridApiKey"], hash, index) - - return RedirectResponse(downloadLink, status_code=302) + download_link = await generate_download_link(config["debridApiKey"], hash, index) + return RedirectResponse(download_link, status_code=302) @streams.get("/{b64config}/playback/{hash}/{index}") async def playback(b64config: str, hash: str, index: str): - config = configChecking(b64config) + config = config_check(b64config) if not config: return - downloadLink = await generateDownloadLink(config["debridApiKey"], hash, index) + download_link = await generate_download_link(config["debridApiKey"], hash, index) - return RedirectResponse(downloadLink, status_code=302) \ No newline at end of file + return RedirectResponse(download_link, status_code=302) \ No newline at end of file diff --git a/comet/main.py b/comet/main.py index a1275dc..b0e5b2a 100644 --- a/comet/main.py +++ b/comet/main.py @@ -19,7 +19,6 @@ from comet.utils.logger import logger from comet.utils.models import settings - class LoguruMiddleware(BaseHTTPMiddleware): async def dispatch(self, request: Request, call_next): start_time = time.time() @@ -36,7 +35,6 @@ async def dispatch(self, request: Request, call_next): ) return response - @asynccontextmanager async def lifespan(app: FastAPI): await setup_database() @@ -49,10 +47,6 @@ async def lifespan(app: FastAPI): version="1.0.0", lifespan=lifespan, redoc_url=None, - license_info={ - "name": "GPL-3.0", - "url": "https://www.gnu.org/licenses/gpl-3.0.en.html", - } ) app.add_middleware(LoguruMiddleware) @@ -69,7 +63,6 @@ async def lifespan(app: FastAPI): app.include_router(main) app.include_router(streams) - class Server(uvicorn.Server): def install_signal_handlers(self): pass @@ -118,7 +111,6 @@ def start_log(): logger.log("COMET", f"Zilean API: {settings.ZILEAN_URL}") logger.log("COMET", f"Custom Header HTML Enabled: {bool(settings.CUSTOM_HEADER_HTML)}") - with server.run_in_thread(): start_log() try: diff --git a/comet/utils/db.py b/comet/utils/db.py index 3a64b83..7ba165d 100644 --- a/comet/utils/db.py +++ b/comet/utils/db.py @@ -3,7 +3,6 @@ from comet.utils.logger import logger from comet.utils.models import database, settings - async def setup_database(): """Setup the database by ensuring the directory and file exist, and creating the necessary tables.""" try: @@ -19,7 +18,6 @@ async def setup_database(): except Exception as e: logger.error(f"Error setting up the database: {e}") - async def teardown_database(): """Teardown the database by disconnecting.""" try: diff --git a/comet/utils/general.py b/comet/utils/general.py index 2c2eed1..7acaed3 100644 --- a/comet/utils/general.py +++ b/comet/utils/general.py @@ -2,17 +2,14 @@ import hashlib import json import math -import os import re - import aiohttp import bencodepy -from RTN.patterns import language_code_mapping from comet.utils.logger import logger from comet.utils.models import settings -translationTable = { +translation_table = { "ā": "a", "ă": "a", "ą": "a", "ć": "c", "č": "c", "ç": "c", "ĉ": "c", "ċ": "c", "ď": "d", "đ": "d", "è": "e", "é": "e", "ê": "e", "ë": "e", "ē": "e", "ĕ": "e", "ę": "e", "ě": "e", @@ -30,18 +27,16 @@ "ǜ": "u", "ǹ": "n", "ǻ": "a", "ǽ": "ae", "ǿ": "o" } -translationTable = str.maketrans(translationTable) -infoHashPattern = re.compile(r"\b([a-fA-F0-9]{40})\b") +translation_table = str.maketrans(translation_table) +info_hash_pattern = re.compile(r"\b([a-fA-F0-9]{40})\b") def translate(title: str): - return title.translate(translationTable) - + return title.translate(translation_table) -def isVideo(title: str): +def is_video(title: str): return title.endswith(tuple([".mkv", ".mp4", ".avi", ".mov", ".flv", ".wmv", ".webm", ".mpg", ".mpeg", ".m4v", ".3gp", ".3g2", ".ogv", ".ogg", ".drc", ".gif", ".gifv", ".mng", ".avi", ".mov", ".qt", ".wmv", ".yuv", ".rm", ".rmvb", ".asf", ".amv", ".m4p", ".m4v", ".mpg", ".mp2", ".mpeg", ".mpe", ".mpv", ".mpg", ".mpeg", ".m2v", ".m4v", ".svi", ".3gp", ".3g2", ".mxf", ".roq", ".nsv", ".flv", ".f4v", ".f4p", ".f4a", ".f4b"])) - -def bytesToSize(bytes: int): +def bytes_to_size(bytes: int): sizes = ["Bytes", "KB", "MB", "GB", "TB"] if bytes == 0: @@ -51,10 +46,10 @@ def bytesToSize(bytes: int): return f"{round(bytes / math.pow(1024, i), 2)} {sizes[i]}" - -def configChecking(b64config: str): +def config_check(b64config: str): try: config = json.loads(base64.b64decode(b64config).decode()) + if not isinstance(config["debridService"], str) or config["debridService"] not in ["realdebrid"]: return False if not isinstance(config["debridApiKey"], str): @@ -72,33 +67,32 @@ def configChecking(b64config: str): except: return False - -async def getIndexerManager(session: aiohttp.ClientSession, indexerManagerType: str, indexers: list, query: str): +async def get_indexer_manager(session: aiohttp.ClientSession, indexer_manager_type: str, indexers: list, query: str): try: indexers = [indexer.replace("_", " ") for indexer in indexers] timeout = aiohttp.ClientTimeout(total=settings.INDEXER_MANAGER_TIMEOUT) results = [] - if indexerManagerType == "jackett": + if indexer_manager_type == "jackett": response = await session.get(f"{settings.INDEXER_MANAGER_URL}/api/v2.0/indexers/all/results?apikey={settings.INDEXER_MANAGER_API_KEY}&Query={query}&Tracker[]={'&Tracker[]='.join(indexer for indexer in indexers)}", timeout=timeout) # &Category[]=2000&Category[]=5000 response = await response.json() for result in response["Results"]: results.append(result) - if indexerManagerType == "prowlarr": - getIndexers = await session.get(f"{settings.INDEXER_MANAGER_URL}/api/v1/indexer", headers={ + if indexer_manager_type == "prowlarr": + get_indexers = await session.get(f"{settings.INDEXER_MANAGER_URL}/api/v1/indexer", headers={ "X-Api-Key": settings.INDEXER_MANAGER_API_KEY }) - getIndexers = await getIndexers.json() + get_indexers = await get_indexers.json() - indexersId = [] - for indexer in getIndexers: + indexers_id = [] + for indexer in get_indexers: if indexer["name"].lower() in indexers or indexer["definitionName"].lower() in indexers: - indexersId.append(indexer["id"]) + indexers_id.append(indexer["id"]) - response = await session.get(f"{settings.INDEXER_MANAGER_URL}/api/v1/search?query={query}&indexerIds={'&indexerIds='.join(str(indexerId) for indexerId in indexersId)}&type=search", headers={ # &categories=2000&categories=5000 + response = await session.get(f"{settings.INDEXER_MANAGER_URL}/api/v1/search?query={query}&indexerIds={'&indexerIds='.join(str(indexer_id) for indexer_id in indexers_id)}&type=search", headers={ # &categories=2000&categories=5000 "X-Api-Key": settings.INDEXER_MANAGER_API_KEY }) response = await response.json() @@ -108,32 +102,31 @@ async def getIndexerManager(session: aiohttp.ClientSession, indexerManagerType: return results except Exception as e: - logger.warning(f"Exception while getting {indexerManagerType} results for {query} with {indexers}: {e}") + logger.warning(f"Exception while getting {indexer_manager_type} results for {query} with {indexers}: {e}") - -async def getTorrentHash(session: aiohttp.ClientSession, indexerManagerType: str, torrent: dict): +async def get_torrent_hash(session: aiohttp.ClientSession, indexer_manager_type: str, torrent: dict): if "InfoHash" in torrent and torrent["InfoHash"] != None: return torrent["InfoHash"] if "infoHash" in torrent: return torrent["infoHash"] - url = torrent["Link"] if indexerManagerType == "jackett" else torrent["downloadUrl"] + url = torrent["Link"] if indexer_manager_type == "jackett" else torrent["downloadUrl"] try: timeout = aiohttp.ClientTimeout(total=settings.GET_TORRENT_TIMEOUT) response = await session.get(url, allow_redirects=False, timeout=timeout) if response.status == 200: - torrentData = await response.read() - torrentDict = bencodepy.decode(torrentData) - info = bencodepy.encode(torrentDict[b"info"]) + torrent_data = await response.read() + torrent_dict = bencodepy.decode(torrent_data) + info = bencodepy.encode(torrent_dict[b"info"]) hash = hashlib.sha1(info).hexdigest() else: location = response.headers.get("Location", "") if not location: return - match = infoHashPattern.search(location) + match = info_hash_pattern.search(location) if not match: return @@ -142,55 +135,116 @@ async def getTorrentHash(session: aiohttp.ClientSession, indexerManagerType: str return hash except Exception as e: logger.warning(f"Exception while getting torrent info hash for {torrent['indexer'] if 'indexer' in torrent else (torrent['Tracker'] if 'Tracker' in torrent else '')}|{url}: {e}") - # logger.warning(f"Exception while getting torrent info hash for {jackettIndexerPattern.findall(url)[0]}|{jackettNamePattern.search(url)[0]}: {e}") +async def get_balanced_hashes(hashes: dict, config: dict): + max_results = config["maxResults"] + config_resolutions = config["resolutions"] + config_languages = config["languages"] + + hashes_by_resolution = {} + for hash in hashes: + if not "All" in config_languages and not hashes[hash]["data"]["is_multi_audio"] and not any(language.replace("_", " ").capitalize() in hashes[hash]["data"]["language"] for language in config_languages): + continue + + resolution = hashes[hash]["data"]["resolution"] + if len(resolution) == 0: + if not "All" in config_resolutions and not "Unknown" in config_resolutions: + continue + + if not "Unknown" in hashes_by_resolution: + hashes_by_resolution["Unknown"] = [hash] + continue + + hashes_by_resolution["Unknown"].append(hash) + continue + + if not "All" in config_resolutions and not resolution[0] in config_resolutions: + continue + + if not resolution[0] in hashes_by_resolution: + hashes_by_resolution[resolution[0]] = [hash] + continue + + hashes_by_resolution[resolution[0]].append(hash) + + if max_results == 0: + return hashes_by_resolution + + total_resolutions = len(hashes_by_resolution) + hashes_per_resolution = max_results // total_resolutions + extra_hashes = max_results % total_resolutions + + balanced_hashes = {} + for resolution, hashes in hashes_by_resolution.items(): + selected_count = hashes_per_resolution + + if extra_hashes > 0: + selected_count += 1 + extra_hashes -= 1 + + balanced_hashes[resolution] = hashes[:selected_count] + + selected_total = sum(len(hashes) for hashes in balanced_hashes.values()) + if selected_total < max_results: + missing_hashes = max_results - selected_total + + for resolution, hashes in hashes_by_resolution.items(): + if missing_hashes <= 0: + break + + current_count = len(balanced_hashes[resolution]) + available_hashes = hashes[current_count:current_count + missing_hashes] + balanced_hashes[resolution].extend(available_hashes) + missing_hashes -= len(available_hashes) + + return balanced_hashes -async def generateDownloadLink(debridApiKey: str, hash: str, index: str): +async def generate_download_link(debrid_api_key: str, hash: str, index: str): try: async with aiohttp.ClientSession() as session: - checkBlacklisted = await session.get("https://real-debrid.com/vpn") - checkBlacklisted = await checkBlacklisted.text() + check_blacklisted = await session.get("https://real-debrid.com/vpn") + check_blacklisted = await check_blacklisted.text() proxy = None - if "Your ISP or VPN provider IP address is currently blocked on our website" in checkBlacklisted: + if "Your ISP or VPN provider IP address is currently blocked on our website" in check_blacklisted: proxy = settings.DEBRID_PROXY_URL if not proxy: logger.warning(f"Real-Debrid blacklisted server's IP. No proxy found.") - return "https://comet.fast" # TODO: This needs to be handled better + return "https://comet.fast" else: logger.warning(f"Real-Debrid blacklisted server's IP. Switching to proxy {proxy} for {hash}|{index}") - addMagnet = await session.post(f"https://api.real-debrid.com/rest/1.0/torrents/addMagnet", headers={ - "Authorization": f"Bearer {debridApiKey}" + add_magnet = await session.post(f"https://api.real-debrid.com/rest/1.0/torrents/addMagnet", headers={ + "Authorization": f"Bearer {debrid_api_key}" }, data={ "magnet": f"magnet:?xt=urn:btih:{hash}" }, proxy=proxy) - addMagnet = await addMagnet.json() + add_magnet = await add_magnet.json() - getMagnetInfo = await session.get(addMagnet["uri"], headers={ - "Authorization": f"Bearer {debridApiKey}" + get_magnet_info = await session.get(add_magnet["uri"], headers={ + "Authorization": f"Bearer {debrid_api_key}" }, proxy=proxy) - getMagnetInfo = await getMagnetInfo.json() + get_magnet_info = await get_magnet_info.json() - selectFile = await session.post(f"https://api.real-debrid.com/rest/1.0/torrents/selectFiles/{addMagnet['id']}", headers={ - "Authorization": f"Bearer {debridApiKey}" + await session.post(f"https://api.real-debrid.com/rest/1.0/torrents/selectFiles/{add_magnet['id']}", headers={ + "Authorization": f"Bearer {debrid_api_key}" }, data={ "files": index }, proxy=proxy) - getMagnetInfo = await session.get(addMagnet["uri"], headers={ - "Authorization": f"Bearer {debridApiKey}" + get_magnet_info = await session.get(add_magnet["uri"], headers={ + "Authorization": f"Bearer {debrid_api_key}" }, proxy=proxy) - getMagnetInfo = await getMagnetInfo.json() + get_magnet_info = await get_magnet_info.json() - unrestrictLink = await session.post(f"https://api.real-debrid.com/rest/1.0/unrestrict/link", headers={ - "Authorization": f"Bearer {debridApiKey}" + unrestrict_link = await session.post(f"https://api.real-debrid.com/rest/1.0/unrestrict/link", headers={ + "Authorization": f"Bearer {debrid_api_key}" }, data={ - "link": getMagnetInfo["links"][0] + "link": get_magnet_info["links"][0] }, proxy=proxy) - unrestrictLink = await unrestrictLink.json() + unrestrict_link = await unrestrict_link.json() - return unrestrictLink["download"] + return unrestrict_link["download"] except Exception as e: logger.warning(f"Exception while getting download link from Real Debrid for {hash}|{index}: {e}") diff --git a/comet/utils/logger.py b/comet/utils/logger.py index 642a7cf..37c1c2b 100644 --- a/comet/utils/logger.py +++ b/comet/utils/logger.py @@ -2,7 +2,6 @@ from loguru import logger - def setupLogger(level: str): logger.level("COMET", no=50, icon="🌠", color="") logger.level("API", no=40, icon="👾", color="") diff --git a/comet/utils/models.py b/comet/utils/models.py index fe94c6e..aba1e5b 100644 --- a/comet/utils/models.py +++ b/comet/utils/models.py @@ -28,7 +28,6 @@ class AppSettings(BaseSettings): ZILEAN_URL: Optional[str] = None CUSTOM_HEADER_HTML: Optional[str] = None - class BestOverallRanking(BaseRankingModel): uhd: int = 100 fhd: int = 90