Skip to content

Commit

Permalink
feat: lot of fixes, caching system changed, camel to snake etc...
Browse files Browse the repository at this point in the history
  • Loading branch information
g0ldyy committed Jul 2, 2024
1 parent e9d90e0 commit 9cf3346
Show file tree
Hide file tree
Showing 7 changed files with 214 additions and 169 deletions.
11 changes: 3 additions & 8 deletions comet/api/core.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,30 +9,25 @@
templates = Jinja2Templates("comet/templates")
main = APIRouter()


@main.get("/", status_code=200)
async def root():
return RedirectResponse("/configure")


@main.get("/health", status_code=200)
async def health():
return {"status": "ok"}


indexers = settings.INDEXER_MANAGER_INDEXERS

webConfig = {
web_config = {
"indexers": [indexer.replace(" ", "_").lower() for indexer in indexers],
"languages": [language.replace(" ", "_") for language in RTN.patterns.language_code_mapping.keys()],
"resolutions": ["480p", "720p", "1080p", "1440p", "2160p", "2880p", "4320p"]
"resolutions": ["360p", "480p", "576p", "720p", "1080p", "1440p", "2160p", "4K", "Unknown"]
}

@main.get("/configure")
@main.get("/{b64config}/configure")
async def configure(request: Request):
return templates.TemplateResponse("index.html", {"request": request, "CUSTOM_HEADER_HTML": settings.CUSTOM_HEADER_HTML if settings.CUSTOM_HEADER_HTML and settings.CUSTOM_HEADER_HTML != "None" else "", "webConfig": webConfig})

return templates.TemplateResponse("index.html", {"request": request, "CUSTOM_HEADER_HTML": settings.CUSTOM_HEADER_HTML if settings.CUSTOM_HEADER_HTML and settings.CUSTOM_HEADER_HTML != "None" else "", "webConfig": web_config})

@main.get("/manifest.json")
@main.get("/{b64config}/manifest.json")
Expand Down
200 changes: 104 additions & 96 deletions comet/api/stream.py

Large diffs are not rendered by default.

8 changes: 0 additions & 8 deletions comet/main.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,6 @@
from comet.utils.logger import logger
from comet.utils.models import settings


class LoguruMiddleware(BaseHTTPMiddleware):
async def dispatch(self, request: Request, call_next):
start_time = time.time()
Expand All @@ -36,7 +35,6 @@ async def dispatch(self, request: Request, call_next):
)
return response


@asynccontextmanager
async def lifespan(app: FastAPI):
await setup_database()
Expand All @@ -49,10 +47,6 @@ async def lifespan(app: FastAPI):
version="1.0.0",
lifespan=lifespan,
redoc_url=None,
license_info={
"name": "GPL-3.0",
"url": "https://www.gnu.org/licenses/gpl-3.0.en.html",
}
)

app.add_middleware(LoguruMiddleware)
Expand All @@ -69,7 +63,6 @@ async def lifespan(app: FastAPI):
app.include_router(main)
app.include_router(streams)


class Server(uvicorn.Server):
def install_signal_handlers(self):
pass
Expand Down Expand Up @@ -118,7 +111,6 @@ def start_log():
logger.log("COMET", f"Zilean API: {settings.ZILEAN_URL}")
logger.log("COMET", f"Custom Header HTML Enabled: {bool(settings.CUSTOM_HEADER_HTML)}")


with server.run_in_thread():
start_log()
try:
Expand Down
2 changes: 0 additions & 2 deletions comet/utils/db.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,6 @@
from comet.utils.logger import logger
from comet.utils.models import database, settings


async def setup_database():
"""Setup the database by ensuring the directory and file exist, and creating the necessary tables."""
try:
Expand All @@ -19,7 +18,6 @@ async def setup_database():
except Exception as e:
logger.error(f"Error setting up the database: {e}")


async def teardown_database():
"""Teardown the database by disconnecting."""
try:
Expand Down
160 changes: 107 additions & 53 deletions comet/utils/general.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,17 +2,14 @@
import hashlib
import json
import math
import os
import re

import aiohttp
import bencodepy
from RTN.patterns import language_code_mapping

from comet.utils.logger import logger
from comet.utils.models import settings

translationTable = {
translation_table = {
"ā": "a", "ă": "a", "ą": "a", "ć": "c", "č": "c", "ç": "c",
"ĉ": "c", "ċ": "c", "ď": "d", "đ": "d", "è": "e", "é": "e",
"ê": "e", "ë": "e", "ē": "e", "ĕ": "e", "ę": "e", "ě": "e",
Expand All @@ -30,18 +27,16 @@
"ǜ": "u", "ǹ": "n", "ǻ": "a", "ǽ": "ae", "ǿ": "o"
}

translationTable = str.maketrans(translationTable)
infoHashPattern = re.compile(r"\b([a-fA-F0-9]{40})\b")
translation_table = str.maketrans(translation_table)
info_hash_pattern = re.compile(r"\b([a-fA-F0-9]{40})\b")

def translate(title: str):
return title.translate(translationTable)

return title.translate(translation_table)

def isVideo(title: str):
def is_video(title: str):
return title.endswith(tuple([".mkv", ".mp4", ".avi", ".mov", ".flv", ".wmv", ".webm", ".mpg", ".mpeg", ".m4v", ".3gp", ".3g2", ".ogv", ".ogg", ".drc", ".gif", ".gifv", ".mng", ".avi", ".mov", ".qt", ".wmv", ".yuv", ".rm", ".rmvb", ".asf", ".amv", ".m4p", ".m4v", ".mpg", ".mp2", ".mpeg", ".mpe", ".mpv", ".mpg", ".mpeg", ".m2v", ".m4v", ".svi", ".3gp", ".3g2", ".mxf", ".roq", ".nsv", ".flv", ".f4v", ".f4p", ".f4a", ".f4b"]))


def bytesToSize(bytes: int):
def bytes_to_size(bytes: int):
sizes = ["Bytes", "KB", "MB", "GB", "TB"]

if bytes == 0:
Expand All @@ -51,10 +46,10 @@ def bytesToSize(bytes: int):

return f"{round(bytes / math.pow(1024, i), 2)} {sizes[i]}"


def configChecking(b64config: str):
def config_check(b64config: str):
try:
config = json.loads(base64.b64decode(b64config).decode())

if not isinstance(config["debridService"], str) or config["debridService"] not in ["realdebrid"]:
return False
if not isinstance(config["debridApiKey"], str):
Expand All @@ -72,33 +67,32 @@ def configChecking(b64config: str):
except:
return False


async def getIndexerManager(session: aiohttp.ClientSession, indexerManagerType: str, indexers: list, query: str):
async def get_indexer_manager(session: aiohttp.ClientSession, indexer_manager_type: str, indexers: list, query: str):
try:
indexers = [indexer.replace("_", " ") for indexer in indexers]

timeout = aiohttp.ClientTimeout(total=settings.INDEXER_MANAGER_TIMEOUT)
results = []

if indexerManagerType == "jackett":
if indexer_manager_type == "jackett":
response = await session.get(f"{settings.INDEXER_MANAGER_URL}/api/v2.0/indexers/all/results?apikey={settings.INDEXER_MANAGER_API_KEY}&Query={query}&Tracker[]={'&Tracker[]='.join(indexer for indexer in indexers)}", timeout=timeout) # &Category[]=2000&Category[]=5000
response = await response.json()

for result in response["Results"]:
results.append(result)

if indexerManagerType == "prowlarr":
getIndexers = await session.get(f"{settings.INDEXER_MANAGER_URL}/api/v1/indexer", headers={
if indexer_manager_type == "prowlarr":
get_indexers = await session.get(f"{settings.INDEXER_MANAGER_URL}/api/v1/indexer", headers={
"X-Api-Key": settings.INDEXER_MANAGER_API_KEY
})
getIndexers = await getIndexers.json()
get_indexers = await get_indexers.json()

indexersId = []
for indexer in getIndexers:
indexers_id = []
for indexer in get_indexers:
if indexer["name"].lower() in indexers or indexer["definitionName"].lower() in indexers:
indexersId.append(indexer["id"])
indexers_id.append(indexer["id"])

response = await session.get(f"{settings.INDEXER_MANAGER_URL}/api/v1/search?query={query}&indexerIds={'&indexerIds='.join(str(indexerId) for indexerId in indexersId)}&type=search", headers={ # &categories=2000&categories=5000
response = await session.get(f"{settings.INDEXER_MANAGER_URL}/api/v1/search?query={query}&indexerIds={'&indexerIds='.join(str(indexer_id) for indexer_id in indexers_id)}&type=search", headers={ # &categories=2000&categories=5000
"X-Api-Key": settings.INDEXER_MANAGER_API_KEY
})
response = await response.json()
Expand All @@ -108,32 +102,31 @@ async def getIndexerManager(session: aiohttp.ClientSession, indexerManagerType:

return results
except Exception as e:
logger.warning(f"Exception while getting {indexerManagerType} results for {query} with {indexers}: {e}")
logger.warning(f"Exception while getting {indexer_manager_type} results for {query} with {indexers}: {e}")


async def getTorrentHash(session: aiohttp.ClientSession, indexerManagerType: str, torrent: dict):
async def get_torrent_hash(session: aiohttp.ClientSession, indexer_manager_type: str, torrent: dict):
if "InfoHash" in torrent and torrent["InfoHash"] != None:
return torrent["InfoHash"]

if "infoHash" in torrent:
return torrent["infoHash"]

url = torrent["Link"] if indexerManagerType == "jackett" else torrent["downloadUrl"]
url = torrent["Link"] if indexer_manager_type == "jackett" else torrent["downloadUrl"]

try:
timeout = aiohttp.ClientTimeout(total=settings.GET_TORRENT_TIMEOUT)
response = await session.get(url, allow_redirects=False, timeout=timeout)
if response.status == 200:
torrentData = await response.read()
torrentDict = bencodepy.decode(torrentData)
info = bencodepy.encode(torrentDict[b"info"])
torrent_data = await response.read()
torrent_dict = bencodepy.decode(torrent_data)
info = bencodepy.encode(torrent_dict[b"info"])
hash = hashlib.sha1(info).hexdigest()
else:
location = response.headers.get("Location", "")
if not location:
return

match = infoHashPattern.search(location)
match = info_hash_pattern.search(location)
if not match:
return

Expand All @@ -142,55 +135,116 @@ async def getTorrentHash(session: aiohttp.ClientSession, indexerManagerType: str
return hash
except Exception as e:
logger.warning(f"Exception while getting torrent info hash for {torrent['indexer'] if 'indexer' in torrent else (torrent['Tracker'] if 'Tracker' in torrent else '')}|{url}: {e}")
# logger.warning(f"Exception while getting torrent info hash for {jackettIndexerPattern.findall(url)[0]}|{jackettNamePattern.search(url)[0]}: {e}")

async def get_balanced_hashes(hashes: dict, config: dict):
max_results = config["maxResults"]
config_resolutions = config["resolutions"]
config_languages = config["languages"]

hashes_by_resolution = {}
for hash in hashes:
if not "All" in config_languages and not hashes[hash]["data"]["is_multi_audio"] and not any(language.replace("_", " ").capitalize() in hashes[hash]["data"]["language"] for language in config_languages):
continue

resolution = hashes[hash]["data"]["resolution"]
if len(resolution) == 0:
if not "All" in config_resolutions and not "Unknown" in config_resolutions:
continue

if not "Unknown" in hashes_by_resolution:
hashes_by_resolution["Unknown"] = [hash]
continue

hashes_by_resolution["Unknown"].append(hash)
continue

if not "All" in config_resolutions and not resolution[0] in config_resolutions:
continue

if not resolution[0] in hashes_by_resolution:
hashes_by_resolution[resolution[0]] = [hash]
continue

hashes_by_resolution[resolution[0]].append(hash)

if max_results == 0:
return hashes_by_resolution

total_resolutions = len(hashes_by_resolution)
hashes_per_resolution = max_results // total_resolutions
extra_hashes = max_results % total_resolutions

balanced_hashes = {}
for resolution, hashes in hashes_by_resolution.items():
selected_count = hashes_per_resolution

if extra_hashes > 0:
selected_count += 1
extra_hashes -= 1

balanced_hashes[resolution] = hashes[:selected_count]

selected_total = sum(len(hashes) for hashes in balanced_hashes.values())
if selected_total < max_results:
missing_hashes = max_results - selected_total

for resolution, hashes in hashes_by_resolution.items():
if missing_hashes <= 0:
break

current_count = len(balanced_hashes[resolution])
available_hashes = hashes[current_count:current_count + missing_hashes]
balanced_hashes[resolution].extend(available_hashes)
missing_hashes -= len(available_hashes)

return balanced_hashes

async def generateDownloadLink(debridApiKey: str, hash: str, index: str):
async def generate_download_link(debrid_api_key: str, hash: str, index: str):
try:
async with aiohttp.ClientSession() as session:
checkBlacklisted = await session.get("https://real-debrid.com/vpn")
checkBlacklisted = await checkBlacklisted.text()
check_blacklisted = await session.get("https://real-debrid.com/vpn")
check_blacklisted = await check_blacklisted.text()

proxy = None
if "Your ISP or VPN provider IP address is currently blocked on our website" in checkBlacklisted:
if "Your ISP or VPN provider IP address is currently blocked on our website" in check_blacklisted:
proxy = settings.DEBRID_PROXY_URL
if not proxy:
logger.warning(f"Real-Debrid blacklisted server's IP. No proxy found.")
return "https://comet.fast" # TODO: This needs to be handled better
return "https://comet.fast"
else:
logger.warning(f"Real-Debrid blacklisted server's IP. Switching to proxy {proxy} for {hash}|{index}")

addMagnet = await session.post(f"https://api.real-debrid.com/rest/1.0/torrents/addMagnet", headers={
"Authorization": f"Bearer {debridApiKey}"
add_magnet = await session.post(f"https://api.real-debrid.com/rest/1.0/torrents/addMagnet", headers={
"Authorization": f"Bearer {debrid_api_key}"
}, data={
"magnet": f"magnet:?xt=urn:btih:{hash}"
}, proxy=proxy)
addMagnet = await addMagnet.json()
add_magnet = await add_magnet.json()

getMagnetInfo = await session.get(addMagnet["uri"], headers={
"Authorization": f"Bearer {debridApiKey}"
get_magnet_info = await session.get(add_magnet["uri"], headers={
"Authorization": f"Bearer {debrid_api_key}"
}, proxy=proxy)
getMagnetInfo = await getMagnetInfo.json()
get_magnet_info = await get_magnet_info.json()

selectFile = await session.post(f"https://api.real-debrid.com/rest/1.0/torrents/selectFiles/{addMagnet['id']}", headers={
"Authorization": f"Bearer {debridApiKey}"
await session.post(f"https://api.real-debrid.com/rest/1.0/torrents/selectFiles/{add_magnet['id']}", headers={
"Authorization": f"Bearer {debrid_api_key}"
}, data={
"files": index
}, proxy=proxy)

getMagnetInfo = await session.get(addMagnet["uri"], headers={
"Authorization": f"Bearer {debridApiKey}"
get_magnet_info = await session.get(add_magnet["uri"], headers={
"Authorization": f"Bearer {debrid_api_key}"
}, proxy=proxy)
getMagnetInfo = await getMagnetInfo.json()
get_magnet_info = await get_magnet_info.json()

unrestrictLink = await session.post(f"https://api.real-debrid.com/rest/1.0/unrestrict/link", headers={
"Authorization": f"Bearer {debridApiKey}"
unrestrict_link = await session.post(f"https://api.real-debrid.com/rest/1.0/unrestrict/link", headers={
"Authorization": f"Bearer {debrid_api_key}"
}, data={
"link": getMagnetInfo["links"][0]
"link": get_magnet_info["links"][0]
}, proxy=proxy)
unrestrictLink = await unrestrictLink.json()
unrestrict_link = await unrestrict_link.json()

return unrestrictLink["download"]
return unrestrict_link["download"]
except Exception as e:
logger.warning(f"Exception while getting download link from Real Debrid for {hash}|{index}: {e}")

Expand Down
1 change: 0 additions & 1 deletion comet/utils/logger.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,6 @@

from loguru import logger


def setupLogger(level: str):
logger.level("COMET", no=50, icon="🌠", color="<fg #7871d6>")
logger.level("API", no=40, icon="👾", color="<fg #7871d6>")
Expand Down
1 change: 0 additions & 1 deletion comet/utils/models.py
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,6 @@ class AppSettings(BaseSettings):
ZILEAN_URL: Optional[str] = None
CUSTOM_HEADER_HTML: Optional[str] = None


class BestOverallRanking(BaseRankingModel):
uhd: int = 100
fhd: int = 90
Expand Down

0 comments on commit 9cf3346

Please sign in to comment.