diff --git a/.gitignore b/.gitignore index 9f5c16d31..ae8c0fb77 100644 --- a/.gitignore +++ b/.gitignore @@ -1,10 +1,6 @@ config.py -*.pyc -data* -.vscode -.idea -*.json -*.pickle +token.pickle +rclone.conf .netrc log.txt accounts/* @@ -16,6 +12,5 @@ tokens/* list_drives.txt shorteners.txt cookies.txt -downloads +downloads/* bot.session* -rclone.conf diff --git a/CHANGELOG.md b/CHANGELOG.md index e0ead33f3..4e8ad25b9 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -4,6 +4,24 @@ All notable changes to this project will be documented in this file. The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html). +## v2.0.2 - 2025-01- + +### Fixed + +- Resolved case sensitivity issue in document type extension checks. +- Fixed channel-related issues for mirror and leech operations. +- Addressed the issue where thumbnails were removed after a restart. + +## v2.0.1 - 2025-01-20 + +### Fixed + +- Token generation issues caused by the command suffix. + +### Removed + +- The "refresh status" and "overview status" buttons, simplifying the status interface. + ## v2.0.0 - 2025-01-18 ### Breaking Changes @@ -21,4 +39,4 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), ### Removed -- Removed certain limit-related variables such as `MIRROR LIMIT` and `LEECH LIMIT`. \ No newline at end of file +- Removed certain limit-related variables such as `MIRROR LIMIT` and `LEECH LIMIT`. diff --git a/bot/core/config_manager.py b/bot/core/config_manager.py index e7aa16bde..c63815e04 100644 --- a/bot/core/config_manager.py +++ b/bot/core/config_manager.py @@ -41,9 +41,6 @@ class Config: RSS_CHAT = "" RSS_DELAY = 600 RSS_SIZE_LIMIT = 0 - SEARCH_API_LINK = "" - SEARCH_LIMIT = 0 - SEARCH_PLUGINS: ClassVar[list[str]] = [] STOP_DUPLICATE = False STREAMWISH_API = "" SUDO_USERS = "" diff --git a/bot/core/startup.py b/bot/core/startup.py index 518ccf93a..250a30bfb 100644 --- a/bot/core/startup.py +++ b/bot/core/startup.py @@ -46,6 +46,8 @@ def update_aria2_options(): async def load_settings(): + if await aiopath.exists("Thumbnails"): + await rmtree("Thumbnails", ignore_errors=True) if not Config.DATABASE_URL: return await database.connect() diff --git a/bot/helper/aeon_utils/access_check.py b/bot/helper/aeon_utils/access_check.py index 8f271f179..dea7ec4e3 100644 --- a/bot/helper/aeon_utils/access_check.py +++ b/bot/helper/aeon_utils/access_check.py @@ -19,7 +19,8 @@ async def error_check(message): msg, button = [], None - user_id = message.from_user.id + user = message.from_user or message.sender_chat + user_id = user.id token_timeout = Config.TOKEN_TIMEOUT if message.chat.type != message.chat.type.BOT: @@ -67,7 +68,7 @@ async def error_check(message): if user_id not in { Config.OWNER_ID, - 1781717085, + Config.RSS_CHAT, user_data.get(user_id, {}).get("is_sudo"), }: token_msg, button = await token_check(user_id, button) diff --git a/bot/helper/aeon_utils/metadata_editor.py b/bot/helper/aeon_utils/metadata_editor.py index bfb11bc2a..a2557a6d8 100644 --- a/bot/helper/aeon_utils/metadata_editor.py +++ b/bot/helper/aeon_utils/metadata_editor.py @@ -159,11 +159,8 @@ async def get_metadata_cmd(file_path, key): # later -async def add_attachment(file, attachment_path): - LOGGER.info(f"Adding photo attachment to file: {file}") - +async def get_embed_thumb_cmd(file, attachment_path): temp_file = f"{file}.temp.mkv" - attachment_ext = attachment_path.split(".")[-1].lower() mime_type = "application/octet-stream" if attachment_ext in ["jpg", "jpeg"]: @@ -173,7 +170,11 @@ async def add_attachment(file, attachment_path): cmd = [ "xtra", - "-y", + "-hide_banner", + "-loglevel", + "error", + "-progress", + "pipe:1", "-i", file, "-attach", @@ -184,18 +185,9 @@ async def add_attachment(file, attachment_path): "copy", "-map", "0", + "-threads", + f"{max(1, os.cpu_count() // 2)}", temp_file, ] - process = await create_subprocess_exec(*cmd, stderr=PIPE, stdout=PIPE) - stdout, stderr = await process.communicate() - - if process.returncode != 0: - err = stderr.decode().strip() - LOGGER.error(err) - LOGGER.error(f"Error adding photo attachment to file: {file}") - return - - os.replace(temp_file, file) - LOGGER.info(f"Photo attachment added successfully to file: {file}") - return + return cmd, temp_file diff --git a/bot/helper/common.py b/bot/helper/common.py index 23d19c5a6..86e3e7200 100644 --- a/bot/helper/common.py +++ b/bot/helper/common.py @@ -24,7 +24,7 @@ ) from bot.core.aeon_client import TgClient from bot.core.config_manager import Config -from bot.helper.aeon_utils.metadata_editor import get_metadata_cmd, get_watermark_cmd +from bot.helper.aeon_utils.metadata_editor import get_metadata_cmd, get_watermark_cmd, get_embed_thumb_cmd from .ext_utils.bot_utils import get_size_bytes, new_task, sync_to_async from .ext_utils.bulk_links import extract_bulk_links @@ -601,18 +601,14 @@ async def proceed_extract(self, dl_path, gid): if not self.is_file: self.subname = file_ code = await sevenz.extract(f_path, t_path, pswd) - if code == 0: + if code == 0: + for file_ in files: + if is_archive_split(file_) or is_archive(file_): + del_path = ospath.join(dirpath, file_) try: - await remove(f_path) + await remove(del_path) except Exception: self.is_cancelled = True - for file_ in files: - if is_archive_split(file_): - del_path = ospath.join(dirpath, file_) - try: - await remove(del_path) - except Exception: - self.is_cancelled = True return t_path if self.is_file and code == 0 else dl_path async def proceed_ffmpeg(self, dl_path, gid): @@ -1198,3 +1194,66 @@ async def proceed_watermark(self, dl_path, gid): if checked: cpu_eater_lock.release() return dl_path + + async def proceed_embed_thumb(self, dl_path, gid): + thumb = self.e_thumb + ffmpeg = FFMpeg(self) + checked = False + if self.is_file: + if is_mkv(dl_path): + cmd, temp_file = await get_embed_thumb_cmd(dl_path, thumb) + if cmd: + if not checked: + checked = True + async with task_dict_lock: + task_dict[self.mid] = FFmpegStatus( + self, + ffmpeg, + gid, + "E_thumb", + ) + self.progress = False + await cpu_eater_lock.acquire() + self.progress = True + self.subsize = self.size + res = await ffmpeg.metadata_watermark_cmds(cmd, dl_path) + if res: + os.replace(temp_file, dl_path) + else: + for dirpath, _, files in await sync_to_async( + walk, + dl_path, + topdown=False, + ): + for file_ in files: + file_path = ospath.join(dirpath, file_) + if self.is_cancelled: + cpu_eater_lock.release() + return "" + if is_mkv(file_path): + cmd, temp_file = await get_embed_thumb_cmd(file_path, thumb) + if cmd: + if not checked: + checked = True + async with task_dict_lock: + task_dict[self.mid] = FFmpegStatus( + self, + ffmpeg, + gid, + "E_thumb", + ) + self.progress = False + await cpu_eater_lock.acquire() + self.progress = True + LOGGER.info(f"Running cmd for: {file_path}") + self.subsize = await aiopath.getsize(file_path) + self.subname = file_ + res = await ffmpeg.metadata_watermark_cmds( + cmd, + file_path, + ) + if res: + os.replace(temp_file, file_path) + if checked: + cpu_eater_lock.release() + return dl_path \ No newline at end of file diff --git a/bot/helper/ext_utils/files_utils.py b/bot/helper/ext_utils/files_utils.py index cb6432be8..84ba43ca3 100644 --- a/bot/helper/ext_utils/files_utils.py +++ b/bot/helper/ext_utils/files_utils.py @@ -100,9 +100,11 @@ ".crc64", ] -FIRST_SPLIT_REGEX = r"(\.|_)part0*1\.rar$|(\.|_)7z\.0*1$|(\.|_)zip\.0*1$|^(?!.*(\.|_)part\d+\.rar$).*\.rar$" +FIRST_SPLIT_REGEX = ( + r"\.part0*1\.rar$|\.7z\.0*1$|\.zip\.0*1$|^(?!.*\.part\d+\.rar$).*\.rar$" +) -SPLIT_REGEX = r"\.r\d+$|\.7z\.\d+$|\.z\d+$|\.zip\.\d+$" +SPLIT_REGEX = r"\.r\d+$|\.7z\.\d+$|\.z\d+$|\.zip\.\d+$|\.part\d+\.rar$" def is_first_archive_split(file): @@ -144,8 +146,6 @@ def clean_all(): try: LOGGER.info("Cleaning Download Directory") rmtree(Config.DOWNLOAD_DIR, ignore_errors=True) - if ospath.exists("Thumbnails"): - rmtree("Thumbnails", ignore_errors=True) except Exception: pass makedirs(Config.DOWNLOAD_DIR, exist_ok=True) @@ -324,7 +324,7 @@ async def _sevenz_progress(self): or self._listener.subproc.stdout.at_eof() ): try: - line = await wait_for(self._listener.subproc.stdout.readline(), 5) + line = await wait_for(self._listener.subproc.stdout.readline(), 2) except Exception: break line = line.decode().strip() diff --git a/bot/helper/ext_utils/media_utils.py b/bot/helper/ext_utils/media_utils.py index 40a05662d..5021f5138 100644 --- a/bot/helper/ext_utils/media_utils.py +++ b/bot/helper/ext_utils/media_utils.py @@ -16,7 +16,7 @@ from bot.core.config_manager import Config from .bot_utils import cmd_exec, sync_to_async -from .files_utils import ARCH_EXT, get_mime_type +from .files_utils import get_mime_type, is_archive, is_archive_split from .status_utils import time_to_seconds @@ -34,41 +34,6 @@ async def create_thumb(msg, _id=""): return output -async def is_multi_streams(path): - try: - result = await cmd_exec( - [ - "ffprobe", - "-hide_banner", - "-loglevel", - "error", - "-print_format", - "json", - "-show_streams", - path, - ], - ) - except Exception as e: - LOGGER.error( - f"Get Video Streams: {e}. Mostly File not found! - File: {path}", - ) - return False - if result[0] and result[2] == 0: - fields = eval(result[0]).get("streams") - if fields is None: - LOGGER.error(f"get_video_streams: {result}") - return False - videos = 0 - audios = 0 - for stream in fields: - if stream.get("codec_type") == "video": - videos += 1 - elif stream.get("codec_type") == "audio": - audios += 1 - return videos > 1 or audios > 1 - return False - - async def get_media_info(path): try: result = await cmd_exec( @@ -101,9 +66,10 @@ async def get_media_info(path): async def get_document_type(path): is_video, is_audio, is_image = False, False, False - if path.endswith(tuple(ARCH_EXT)) or re_search( - r".+(\.|_)(rar|7z|zip|bin)(\.0*\d+)?$", - path, + if ( + is_archive(path) + or is_archive_split(path) + or re_search(r".+(\.|_)(rar|7z|zip|bin)(\.0*\d+)?$", path) ): return is_video, is_audio, is_image mime_type = await sync_to_async(get_mime_type, path) diff --git a/bot/helper/ext_utils/status_utils.py b/bot/helper/ext_utils/status_utils.py index 29fa50df9..47c1f402e 100644 --- a/bot/helper/ext_utils/status_utils.py +++ b/bot/helper/ext_utils/status_utils.py @@ -31,6 +31,7 @@ class MirrorStatus: STATUS_FFMPEG = "FFmpeg" STATUS_METADATA = "Metadata" STATUS_WATERMARK = "Watermark" + STATUS_ETHUMB = "Embed Thumb" STATUSES = { @@ -199,9 +200,9 @@ async def get_readable_message(sid, is_user, page_no=1, status="All", page_step= ): tstatus = await sync_to_async(task.status) if status == "All" else status if task.listener.is_super_chat: - msg += f"{index + start_position}.{tstatus}: " + msg += f"{index + start_position}. {tstatus}: " else: - msg += f"{index + start_position}.{tstatus}: " + msg += f"{index + start_position}. {tstatus}: " msg += f"{escape(f'{task.name()}')}" if task.listener.subname: msg += f"\n{task.listener.subname}" diff --git a/bot/helper/mirror_leech_utils/download_utils/telegram_download.py b/bot/helper/mirror_leech_utils/download_utils/telegram_download.py index 38963aa3a..ea2769fa4 100644 --- a/bot/helper/mirror_leech_utils/download_utils/telegram_download.py +++ b/bot/helper/mirror_leech_utils/download_utils/telegram_download.py @@ -94,11 +94,10 @@ async def _download(self, message, path): async def add_download(self, message, path, session): self.session = session - if self.session != TgClient.bot: - message = await self.session.get_messages( - chat_id=message.chat.id, - message_ids=message.id, - ) + message = await self.session.get_messages( + chat_id=message.chat.id, + message_ids=message.id, + ) media = ( message.document diff --git a/bot/helper/mirror_leech_utils/status_utils/ffmpeg_status.py b/bot/helper/mirror_leech_utils/status_utils/ffmpeg_status.py index 50af1b513..8396faf8a 100644 --- a/bot/helper/mirror_leech_utils/status_utils/ffmpeg_status.py +++ b/bot/helper/mirror_leech_utils/status_utils/ffmpeg_status.py @@ -47,6 +47,8 @@ def status(self): return MirrorStatus.STATUS_METADATA if self._cstatus == "Watermark": return MirrorStatus.STATUS_WATERMARK + if self._cstatus == "E_thumb": + return MirrorStatus.STATUS_ETHUMB return MirrorStatus.STATUS_FFMPEG def task(self): diff --git a/bot/helper/telegram_helper/message_utils.py b/bot/helper/telegram_helper/message_utils.py index 3abcc5f95..6fce739a7 100644 --- a/bot/helper/telegram_helper/message_utils.py +++ b/bot/helper/telegram_helper/message_utils.py @@ -65,9 +65,9 @@ async def send_message( parse_mode=parse_mode, ) except FloodWait as f: + LOGGER.warning(str(f)) if not block: return message - LOGGER.warning(str(f)) await sleep(f.value * 1.2) return await send_message(message, text, buttons, photo, markdown) except Exception as e: @@ -104,9 +104,9 @@ async def edit_message( parse_mode=parse_mode, ) except FloodWait as f: + LOGGER.warning(str(f)) if not block: return message - LOGGER.warning(str(f)) await sleep(f.value * 1.2) return await edit_message(message, text, buttons, photo, markdown) except (MessageNotModified, MessageEmpty): @@ -375,14 +375,14 @@ async def send_status_message(msg, user_id=0): obj.cancel() del intervals["status"][sid] return - message = status_dict[sid]["message"] - await delete_message(message) + old_message = status_dict[sid]["message"] message = await send_message(msg, text, buttons, block=False) if isinstance(message, str): LOGGER.error( f"Status with id: {sid} haven't been sent. Error: {message}", ) return + await delete_message(old_message) message.text = text status_dict[sid].update({"message": message, "time": time()}) else: diff --git a/bot/modules/rss.py b/bot/modules/rss.py index 222802a68..f15f34eb8 100644 --- a/bot/modules/rss.py +++ b/bot/modules/rss.py @@ -733,7 +733,7 @@ async def rss_monitor(): try: await sleep(10) except Exception: - raise RssShutdownException("Rss Monitor Stopped!") + raise RssShutdownException("Rss Monitor Stopped!") from None try: item_title = rss_d.entries[feed_count]["title"] try: diff --git a/bot/modules/search.py b/bot/modules/search.py index fb5cc927a..cd87a507d 100644 --- a/bot/modules/search.py +++ b/bot/modules/search.py @@ -5,7 +5,6 @@ from httpx import AsyncClient from bot import LOGGER, xnox_client -from bot.core.config_manager import Config from bot.helper.ext_utils.bot_utils import new_task, sync_to_async from bot.helper.ext_utils.status_utils import get_readable_file_size from bot.helper.ext_utils.telegraph_helper import telegraph @@ -13,123 +12,76 @@ from bot.helper.telegram_helper.message_utils import edit_message, send_message PLUGINS = [] -SITES = None TELEGRAPH_LIMIT = 300 +SEARCH_PLUGINS = [ + "https://raw.githubusercontent.com/qbittorrent/search-plugins/master/nova3/engines/piratebay.py", + "https://raw.githubusercontent.com/qbittorrent/search-plugins/master/nova3/engines/limetorrents.py", + "https://raw.githubusercontent.com/qbittorrent/search-plugins/master/nova3/engines/torlock.py", + "https://raw.githubusercontent.com/qbittorrent/search-plugins/master/nova3/engines/torrentscsv.py", + "https://raw.githubusercontent.com/qbittorrent/search-plugins/master/nova3/engines/eztv.py", + "https://raw.githubusercontent.com/qbittorrent/search-plugins/master/nova3/engines/torrentproject.py", + "https://raw.githubusercontent.com/MaurizioRicci/qBittorrent_search_engines/master/kickass_torrent.py", + "https://raw.githubusercontent.com/MaurizioRicci/qBittorrent_search_engines/master/yts_am.py", + "https://raw.githubusercontent.com/MadeOfMagicAndWires/qBit-plugins/master/engines/linuxtracker.py", + "https://raw.githubusercontent.com/MadeOfMagicAndWires/qBit-plugins/master/engines/nyaasi.py", + "https://raw.githubusercontent.com/LightDestory/qBittorrent-Search-Plugins/master/src/engines/ettv.py", + "https://raw.githubusercontent.com/LightDestory/qBittorrent-Search-Plugins/master/src/engines/glotorrents.py", + "https://raw.githubusercontent.com/LightDestory/qBittorrent-Search-Plugins/master/src/engines/thepiratebay.py", + "https://raw.githubusercontent.com/v1k45/1337x-qBittorrent-search-plugin/master/leetx.py", + "https://raw.githubusercontent.com/nindogo/qbtSearchScripts/master/magnetdl.py", + "https://raw.githubusercontent.com/msagca/qbittorrent_plugins/main/uniondht.py", + "https://raw.githubusercontent.com/khensolomon/leyts/master/yts.py", +] async def initiate_search_tools(): qb_plugins = await sync_to_async(xnox_client.search_plugins) - if Config.SEARCH_PLUGINS: - globals()["PLUGINS"] = [] - if qb_plugins: - names = [plugin["name"] for plugin in qb_plugins] - await sync_to_async( - xnox_client.search_uninstall_plugin, - names=names, - ) + globals()["PLUGINS"] = [] + if qb_plugins: + names = [plugin["name"] for plugin in qb_plugins] await sync_to_async( - xnox_client.search_install_plugin, - Config.SEARCH_PLUGINS, + xnox_client.search_uninstall_plugin, + names=names, ) - elif qb_plugins: - for plugin in qb_plugins: - await sync_to_async( - xnox_client.search_uninstall_plugin, - names=plugin["name"], - ) - globals()["PLUGINS"] = [] - - if Config.SEARCH_API_LINK: - global SITES - try: - async with AsyncClient() as client: - response = await client.get(f"{Config.SEARCH_API_LINK}/api/v1/sites") - data = response.json() - SITES = { - str(site): str(site).capitalize() for site in data["supported_sites"] - } - SITES["all"] = "All" - except Exception as e: - LOGGER.error( - f"{e} Can't fetching sites from SEARCH_API_LINK make sure use latest version of API", - ) - SITES = None + await sync_to_async( + xnox_client.search_install_plugin, + SEARCH_PLUGINS, + ) async def search(key, site, message, method): - if method.startswith("api"): - if method == "apisearch": - LOGGER.info(f"API Searching: {key} from {site}") - if site == "all": - api = f"{Config.SEARCH_API_LINK}/api/v1/all/search?query={key}&limit={Config.SEARCH_LIMIT}" - else: - api = f"{Config.SEARCH_API_LINK}/api/v1/search?site={site}&query={key}&limit={Config.SEARCH_LIMIT}" - elif method == "apitrend": - LOGGER.info(f"API Trending from {site}") - if site == "all": - api = f"{Config.SEARCH_API_LINK}/api/v1/all/trending?limit={Config.SEARCH_LIMIT}" - else: - api = f"{Config.SEARCH_API_LINK}/api/v1/trending?site={site}&limit={Config.SEARCH_LIMIT}" - elif method == "apirecent": - LOGGER.info(f"API Recent from {site}") - if site == "all": - api = f"{Config.SEARCH_API_LINK}/api/v1/all/recent?limit={Config.SEARCH_LIMIT}" - else: - api = f"{Config.SEARCH_API_LINK}/api/v1/recent?site={site}&limit={Config.SEARCH_LIMIT}" - try: - async with AsyncClient() as client: - response = await client.get(api) - search_results = response.json() - if "error" in search_results or search_results["total"] == 0: - await edit_message( - message, - f"No result found for {key}\nTorrent Site:- {SITES.get(site)}", - ) - return - msg = f"Found {min(search_results['total'], TELEGRAPH_LIMIT)}" - if method == "apitrend": - msg += f" trending result(s)\nTorrent Site:- {SITES.get(site)}" - elif method == "apirecent": - msg += f" recent result(s)\nTorrent Site:- {SITES.get(site)}" - else: - msg += f" result(s) for {key}\nTorrent Site:- {SITES.get(site)}" - search_results = search_results["data"] - except Exception as e: - await edit_message(message, str(e)) - return - else: - LOGGER.info(f"PLUGINS Searching: {key} from {site}") - search = await sync_to_async( - xnox_client.search_start, - pattern=key, - plugins=site, - category="all", - ) - search_id = search.id - while True: - result_status = await sync_to_async( - xnox_client.search_status, - search_id=search_id, - ) - status = result_status[0].status - if status != "Running": - break - dict_search_results = await sync_to_async( - xnox_client.search_results, + LOGGER.info(f"PLUGINS Searching: {key} from {site}") + search = await sync_to_async( + xnox_client.search_start, + pattern=key, + plugins=site, + category="all", + ) + search_id = search.id + while True: + result_status = await sync_to_async( + xnox_client.search_status, search_id=search_id, - limit=TELEGRAPH_LIMIT, ) - search_results = dict_search_results.results - total_results = dict_search_results.total - if total_results == 0: - await edit_message( - message, - f"No result found for {key}\nTorrent Site:- {site.capitalize()}", - ) - return - msg = f"Found {min(total_results, TELEGRAPH_LIMIT)}" - msg += f" result(s) for {key}\nTorrent Site:- {site.capitalize()}" - await sync_to_async(xnox_client.search_delete, search_id=search_id) + status = result_status[0].status + if status != "Running": + break + dict_search_results = await sync_to_async( + xnox_client.search_results, + search_id=search_id, + limit=TELEGRAPH_LIMIT, + ) + search_results = dict_search_results.results + total_results = dict_search_results.total + if total_results == 0: + await edit_message( + message, + f"No result found for {key}\nTorrent Site:- {site.capitalize()}", + ) + return + msg = f"Found {min(total_results, TELEGRAPH_LIMIT)}" + msg += f" result(s) for {key}\nTorrent Site:- {site.capitalize()}" + await sync_to_async(xnox_client.search_delete, search_id=search_id) link = await get_result(search_results, key, message, method) buttons = ButtonMaker() buttons.url_button("🔎 VIEW", link) @@ -139,62 +91,23 @@ async def search(key, site, message, method): async def get_result(search_results, key, message, method): telegraph_content = [] - if method == "apirecent": - msg = "

API Recent Results

" - elif method == "apisearch": - msg = f"

API Search Result(s) For {key}

" - elif method == "apitrend": - msg = "

API Trending Results

" - else: - msg = f"

PLUGINS Search Result(s) For {key}

" + msg = f"

PLUGINS Search Result(s) For {key}

" for index, result in enumerate(search_results, start=1): - if method.startswith("api"): - try: - if "name" in result: - msg += f"{escape(result['name'])}
" - if "torrents" in result: - for subres in result["torrents"]: - msg += f"Quality: {subres['quality']} | Type: {subres['type']} | " - msg += f"Size: {subres['size']}
" - if "torrent" in subres: - msg += ( - f"Direct Link
" - ) - elif "magnet" in subres: - msg += "Share Magnet to " - msg += f"Telegram
" - msg += "
" - else: - msg += f"Size: {result['size']}
" - with contextlib.suppress(Exception): - msg += f"Seeders: {result['seeders']} | Leechers: {result['leechers']}
" - if "torrent" in result: - msg += ( - f"Direct Link

" - ) - elif "magnet" in result: - msg += "Share Magnet to " - msg += f"Telegram

" - else: - msg += "
" - except Exception: - continue + msg += f"{escape(result.fileName)}
" + msg += f"Size: {get_readable_file_size(result.fileSize)}
" + msg += f"Seeders: {result.nbSeeders} | Leechers: {result.nbLeechers}
" + link = result.fileUrl + if link.startswith("magnet:"): + msg += f"Share Magnet to Telegram

" else: - msg += f"{escape(result.fileName)}
" - msg += f"Size: {get_readable_file_size(result.fileSize)}
" - msg += f"Seeders: {result.nbSeeders} | Leechers: {result.nbLeechers}
" - link = result.fileUrl - if link.startswith("magnet:"): - msg += f"Share Magnet to Telegram

" - else: - msg += f"Direct Link

" + msg += f"Direct Link

" - if len(msg.encode("utf-8")) > 39000: - telegraph_content.append(msg) - msg = "" + if len(msg.encode("utf-8")) > 39000: + telegraph_content.append(msg) + msg = "" - if index == TELEGRAPH_LIMIT: - break + if index == TELEGRAPH_LIMIT: + break if msg != "": telegraph_content.append(msg) @@ -221,14 +134,6 @@ async def get_result(search_results, key, message, method): return f"https://telegra.ph/{path[0]}" -def api_buttons(user_id, method): - buttons = ButtonMaker() - for data, name in SITES.items(): - buttons.data_button(name, f"torser {user_id} {data} {method}") - buttons.data_button("Cancel", f"torser {user_id} cancel") - return buttons.build_menu(2) - - async def plugin_buttons(user_id): buttons = ButtonMaker() if not PLUGINS: @@ -250,28 +155,8 @@ async def torrent_search(_, message): user_id = message.from_user.id buttons = ButtonMaker() key = message.text.split() - if SITES is None and not Config.SEARCH_PLUGINS: - await send_message( - message, - "No API link or search PLUGINS added for this function", - ) - elif len(key) == 1 and SITES is None: + if len(key) == 1: await send_message(message, "Send a search key along with command") - elif len(key) == 1: - buttons.data_button("Trending", f"torser {user_id} apitrend") - buttons.data_button("Recent", f"torser {user_id} apirecent") - buttons.data_button("Cancel", f"torser {user_id} cancel") - button = buttons.build_menu(2) - await send_message(message, "Send a search key along with command", button) - elif SITES is not None and Config.SEARCH_PLUGINS: - buttons.data_button("Api", f"torser {user_id} apisearch") - buttons.data_button("Plugins", f"torser {user_id} plugin") - buttons.data_button("Cancel", f"torser {user_id} cancel") - button = buttons.build_menu(2) - await send_message(message, "Choose tool to search:", button) - elif SITES is not None: - button = api_buttons(user_id, "apisearch") - await send_message(message, "Choose site to search | API:", button) else: button = await plugin_buttons(user_id) await send_message(message, "Choose site to search | Plugins:", button) @@ -286,10 +171,6 @@ async def torrent_search_update(_, query): data = query.data.split() if user_id != int(data[1]): await query.answer("Not Yours!", show_alert=True) - elif data[2].startswith("api"): - await query.answer() - button = api_buttons(user_id, data[2]) - await edit_message(message, "Choose site:", button) elif data[2] == "plugin": await query.answer() button = await plugin_buttons(user_id) @@ -298,23 +179,7 @@ async def torrent_search_update(_, query): await query.answer() site = data[2] method = data[3] - if method.startswith("api"): - if key is None: - if method == "apirecent": - endpoint = "Recent" - elif method == "apitrend": - endpoint = "Trending" - await edit_message( - message, - f"Listing {endpoint} Items...\nTorrent Site:- {SITES.get(site)}", - ) - else: - await edit_message( - message, - f"Searching for {key}\nTorrent Site:- {SITES.get(site)}", - ) - else: - await edit_message( + await edit_message( message, f"Searching for {key}\nTorrent Site:- {site.capitalize()}", ) diff --git a/bot/modules/status.py b/bot/modules/status.py index fec35d6d0..e9379eeae 100644 --- a/bot/modules/status.py +++ b/bot/modules/status.py @@ -51,19 +51,17 @@ async def task_status(_, message): async def status_pages(_, query): data = query.data.split() key = int(data[1]) + await query.answer() if data[2] in ["nex", "pre"]: - await query.answer() async with task_dict_lock: if data[2] == "nex": status_dict[key]["page_no"] += status_dict[key]["page_step"] else: status_dict[key]["page_no"] -= status_dict[key]["page_step"] elif data[2] == "ps": - await query.answer() async with task_dict_lock: status_dict[key]["page_step"] = int(data[3]) elif data[2] == "st": - await query.answer() async with task_dict_lock: status_dict[key]["status"] = data[3] await update_status_message(key, force=True) diff --git a/config_sample.py b/config_sample.py index 343880dcb..136f1ba3b 100644 --- a/config_sample.py +++ b/config_sample.py @@ -82,26 +82,3 @@ RSS_DELAY = 600 RSS_CHAT = "" RSS_SIZE_LIMIT = 0 - -# Torrent Search -SEARCH_API_LINK = "" -SEARCH_LIMIT = 0 -SEARCH_PLUGINS = [ - "https://raw.githubusercontent.com/qbittorrent/search-plugins/master/nova3/engines/piratebay.py", - "https://raw.githubusercontent.com/qbittorrent/search-plugins/master/nova3/engines/limetorrents.py", - "https://raw.githubusercontent.com/qbittorrent/search-plugins/master/nova3/engines/torlock.py", - "https://raw.githubusercontent.com/qbittorrent/search-plugins/master/nova3/engines/torrentscsv.py", - "https://raw.githubusercontent.com/qbittorrent/search-plugins/master/nova3/engines/eztv.py", - "https://raw.githubusercontent.com/qbittorrent/search-plugins/master/nova3/engines/torrentproject.py", - "https://raw.githubusercontent.com/MaurizioRicci/qBittorrent_search_engines/master/kickass_torrent.py", - "https://raw.githubusercontent.com/MaurizioRicci/qBittorrent_search_engines/master/yts_am.py", - "https://raw.githubusercontent.com/MadeOfMagicAndWires/qBit-plugins/master/engines/linuxtracker.py", - "https://raw.githubusercontent.com/MadeOfMagicAndWires/qBit-plugins/master/engines/nyaasi.py", - "https://raw.githubusercontent.com/LightDestory/qBittorrent-Search-Plugins/master/src/engines/ettv.py", - "https://raw.githubusercontent.com/LightDestory/qBittorrent-Search-Plugins/master/src/engines/glotorrents.py", - "https://raw.githubusercontent.com/LightDestory/qBittorrent-Search-Plugins/master/src/engines/thepiratebay.py", - "https://raw.githubusercontent.com/v1k45/1337x-qBittorrent-search-plugin/master/leetx.py", - "https://raw.githubusercontent.com/nindogo/qbtSearchScripts/master/magnetdl.py", - "https://raw.githubusercontent.com/msagca/qbittorrent_plugins/main/uniondht.py", - "https://raw.githubusercontent.com/khensolomon/leyts/master/yts.py", -] diff --git a/update.py b/update.py index d229d7fad..efbf78dfc 100644 --- a/update.py +++ b/update.py @@ -88,22 +88,18 @@ def format(self, record: LogRecord) -> str: # Fallback to environment variables for DATABASE_URL DATABASE_URL = ( - config_file.get("DATABASE_URL", "").strip() - or os.getenv("DATABASE_URL", "").strip() + config_file.get("DATABASE_URL", "") + or os.getenv("DATABASE_URL", "") ) if DATABASE_URL: try: conn = MongoClient(DATABASE_URL, server_api=ServerApi("1")) db = conn.luna - old_config = db.settings.deployConfig.find_one({"_id": BOT_ID}, {"_id": 0}) config_dict = db.settings.config.find_one({"_id": BOT_ID}) - if ( - (old_config is not None and old_config == config_file) - or old_config is None - ) and config_dict is not None: - config_file["UPSTREAM_REPO"] = config_dict["UPSTREAM_REPO"] - config_file["UPSTREAM_BRANCH"] = config_dict["UPSTREAM_BRANCH"] + if config_dict is not None: + config_file["UPSTREAM_REPO"] = config_dict.get("UPSTREAM_REPO", config_file.get("UPSTREAM_REPO")) + config_file["UPSTREAM_BRANCH"] = config_dict.get("UPSTREAM_BRANCH", config_file.get("UPSTREAM_BRANCH")) conn.close() except Exception as e: log_error(f"Database ERROR: {e}") @@ -113,7 +109,7 @@ def format(self, record: LogRecord) -> str: "https://github.com/AeonOrg/Aeon-MLTB", ) -UPSTREAM_BRANCH = config_file.get("UPSTREAM_BRANCH", "") or "main" +UPSTREAM_BRANCH = config_file.get("UPSTREAM_BRANCH", "") or "beta" if UPSTREAM_REPO: if path.exists(".git"):