diff --git a/.env.example b/.env.example
new file mode 100644
index 00000000..312f18ee
--- /dev/null
+++ b/.env.example
@@ -0,0 +1,61 @@
+# Example .env file for ytdlbot configuration
+
+# Number of workers (default is 100)
+WORKERS=100
+
+# Telegram app ID
+APP_ID=
+
+# Telegram app hash
+APP_HASH=
+
+# Telegram bot token
+BOT_TOKEN=
+
+# Owner ID or username
+OWNER=
+
+# List of authorized users (comma-separated)
+AUTHORIZED_USER=
+
+# MySQL Data Source Name
+MYSQL_DSN=
+
+# Redis host
+REDIS_HOST=
+
+# Enable FFMPEG for video processing (True/False)
+ENABLE_FFMPEG=False
+
+# Desired audio format (e.g., mp3, wav)
+AUDIO_FORMAT=
+
+# Enable Aria2 for downloads (True/False)
+ENABLE_ARIA2=False
+
+# Path to Rclone executable
+RCLONE_PATH=
+
+# Enable VIP features (True/False)
+ENABLE_VIP=False
+
+# Payment provider token
+PROVIDER_TOKEN=
+
+# Free downloads allowed per user
+FREE_DOWNLOAD=5
+
+# Token price (default: 10 credits for 1 USD)
+TOKEN_PRICE=10
+
+# Rate limit for requests
+RATE_LIMIT=120
+
+# Path for temporary files (ensure the directory exists and is writable)
+TMPFILE_PATH=
+
+# Maximum size for Telegram uploads in MB
+TG_NORMAL_MAX_SIZE=2000
+
+# Maximum URL length in captions
+CAPTION_URL_LENGTH_LIMIT=150
diff --git a/Dockerfile b/Dockerfile
index 737a0c4c..98516054 100644
--- a/Dockerfile
+++ b/Dockerfile
@@ -1,14 +1,16 @@
-FROM python:3.11 as builder
-ADD requirements.txt /tmp/
-RUN apt update && apt install -y git && pip3 install --user -r /tmp/requirements.txt && rm /tmp/requirements.txt
+FROM python:3.12-alpine AS pybuilder
+ADD pyproject.toml pdm.lock /build/
+WORKDIR /build
+RUN apk add alpine-sdk python3-dev musl-dev linux-headers
+RUN pip install pdm
+RUN pdm install
+FROM python:3.12-alpine AS runner
+WORKDIR /app
-FROM python:3.11-slim
-WORKDIR /ytdlbot/ytdlbot
-ENV TZ=Europe/London
+RUN apk update && apk add --no-cache ffmpeg aria2
+COPY --from=pybuilder /build/.venv/lib/ /usr/local/lib/
+COPY ytdlbot /app
+WORKDIR /app
-RUN apt update && apt install -y --no-install-recommends --no-install-suggests ffmpeg vnstat git aria2
-COPY --from=builder /root/.local /usr/local
-COPY . /ytdlbot
-
-CMD ["/usr/local/bin/supervisord", "-c" ,"/ytdlbot/conf/supervisor_main.conf"]
+CMD ["python" ,"main.py"]
diff --git a/Makefile b/Makefile
deleted file mode 100644
index 299f31fa..00000000
--- a/Makefile
+++ /dev/null
@@ -1,52 +0,0 @@
-define NOLOGGING
-
- logging:
- driver: none
-endef
-export NOLOGGING
-
-default:
- docker pull bennythink/ytdlbot
-
-bot:
- make
- docker-compose up -d
- docker system prune -a --volumes -f
-
-worker:
- make
- docker-compose -f worker.yml up -d
- docker system prune -a --volumes -f
- sleep 5
-
-weak-worker:
- make
- docker-compose --compatibility -f worker.yml up -d
- docker system prune -a --volumes -f
- sleep 5
-
-upgrade-all-worker:
- bash upgrade_worker.sh
-
-tag:
- git tag -a v$(shell date "+%Y-%m-%d")_$(shell git rev-parse --short HEAD) -m v$(shell date "+%Y-%m-%d")
- git push --tags
-
-nolog:
- echo "$$NOLOGGING">> worker.yml
-
-flower:
- echo 'import dbm;dbm.open("data/flower","n");exit()'| python3
-
-up:
- docker build -t bennythink/ytdlbot:latest .
- docker-compose -f docker-compose.yml -f worker.yml up -d
-
-ps:
- docker-compose -f docker-compose.yml -f worker.yml ps
-
-down:
- docker-compose -f docker-compose.yml -f worker.yml down
-
-logs:
- docker-compose -f docker-compose.yml -f worker.yml logs -f worker ytdl
\ No newline at end of file
diff --git a/Procfile b/Procfile
deleted file mode 100644
index 8be22ed7..00000000
--- a/Procfile
+++ /dev/null
@@ -1 +0,0 @@
-worker: python ytdlbot/ytdl_bot.py
\ No newline at end of file
diff --git a/README.md b/README.md
index fea9daa5..3c5fbaa1 100644
--- a/README.md
+++ b/README.md
@@ -6,12 +6,10 @@
This Telegram bot allows you to download videos from YouTube and [other supported websites](#supported-websites).
-**Celery mode won't work and I don't know why. So I may shutting down this bot soon.**
-
# Usage
-* EU(recommended): [https://t.me/benny_2ytdlbot](https://t.me/benny_2ytdlbot)
-* Asia:[https://t.me/benny_ytdlbot](https://t.me/benny_ytdlbot)
+* EU๐ช๐บ: [https://t.me/benny_2ytdlbot](https://t.me/benny_2ytdlbot)
+* Singapore๐ธ๐ฌ:[https://t.me/benny_ytdlbot](https://t.me/benny_ytdlbot)
* Join Telegram Channel https://t.me/+OGRC8tp9-U9mZDZl for updates.
@@ -30,39 +28,21 @@ Just send a link directly to the bot.
[terabox.txt](https://github.com/ytdl-org/youtube-dl#how-do-i-pass-cookies-to-youtube-dl).
# Features
-
1. fast download and upload.
2. ads free
3. support progress bar
4. audio conversion
-5. playlist download
-6. payment support: afdian, buy me a coffee, Telegram Payment and Tron(TRX)
-7. different video resolutions
-8. sending as file or streaming as video
-9. celery worker distribution - faster than before. **NOT WORKING**
-10. subscriptions to YouTube Channels
-11. cache mechanism - download once for the same video.
-12. instagram posts(only available for my bot)
-13. 4 GiB file size support with Telegram Premium
-14. History and inline mode support
-15. Supports multiple download engines (yt-dlp, aria2, requests).
-
-> [!NOTE]
-> **For users of [my official bot](https://t.me/benny_ytdlbot)**\
-> Files larger than 2 GiB will be automatically uploaded by me(My Premium Account). By utilizing our service for such downloads, you consent to this process. \
-> That means I know who you are and what you download. \
-> Rest assured that we handle your personal information with the utmost care.
->
+5. different video resolutions
+6. sending as file or streaming as video
+7. cache mechanism - download once for the same video.
+8. Supports multiple download engines (yt-dlp, aria2, requests).
+
+
> ## Limitations
> Due to limitations on servers and bandwidth, there are some restrictions on this free service.
-> * Each user is limited to 10 free downloads per 24-hour period
-> * Maximum of three subscriptions allowed for YouTube channels.
-> * Files bigger than 2 GiB will require at least 1 download token.
->
-> If you need more downloads, you can buy download tokens.
->
-> **Thank you for using the [official bot](https://t.me/benny_ytdlbot).**
-
+> * Each user is limited to 5 free downloads per 24-hour period
+
+
# Screenshots
## Normal download
@@ -73,8 +53,6 @@ Just send a link directly to the bot.
![](assets/instagram.png)
-## celery **NOT WORKING**
-
![](assets/2.jpeg)
# How to deploy?
@@ -82,43 +60,12 @@ Just send a link directly to the bot.
This bot can be deployed on any platform that supports Python.
## Run natively on your machine
-
-To deploy this bot, follow these steps:
-
-1. Install bot dependencies
- * Install Python 3.10 or a later version, FFmpeg.
- * (optional)Aria2 and add it to the PATH.
-
-2. Clone the code from the repository and cd into it.
- * ```Bash
- git clone https://github.com/tgbot-collection/ytdlbot
- ```
- * ```Bash
- cd ytdlbot/
- ```
-3. Creating a virtual environment and installing required modules in Python.
- * ```Python
- python -m venv venv
- ```
- * ```Bash
- source venv/bin/activate # Linux
- #or
- .\venv\Scripts\activate # Windows
- ```
- * ```Python
- pip install --upgrade pip
- ```
- * ```Python
- pip install -r requirements.txt
- ```
-4. Set the environment variables `TOKEN`, `APP_ID`, `APP_HASH`, and any others that you may need.
- * Change values in ytdlbot/config.py or
- * Use export APP_ID=111 APP_HASH=111 TOKEN=123
-5. Finally, run the bot with
- * ```Python
- python ytdlbot/ytdl_bot.py
- ```
-
+* use pdm
+* pdm install
+* copy .env.example to .env
+* python main.py
+
+
## Docker
One line command to run the bot
@@ -127,169 +74,9 @@ One line command to run the bot
docker run -e APP_ID=111 -e APP_HASH=111 -e TOKEN=370FXI bennythink/ytdlbot
```
-## Heroku
-
- Deploy to heroku
-
-
-
-If you are having trouble deploying, you can fork the project to your personal account and deploy it from there.
-
-**Starting November 28, 2022, free Heroku Dynos, free Heroku Postgres, and free Heroku Data for Redisยฎ plans will no
-longer be available.**
-[Heroku Announcement](https://devcenter.heroku.com/articles/free-dyno-hours)
-
-
# Complete deployment guide for docker-compose
-* contains every functionality
-* compatible with amd64 and arm64
-
-## 1. get docker-compose.yml
-
-Download `docker-compose.yml` file to a directory
-
-## 2. create data directory
-
-```shell
-mkdir data
-mkdir env
-```
-
-## 3. configuration
-
-### 3.1. set environment variables
-
-```shell
-vim env/ytdl.env
-```
-
-You can configure all the following environment variables:
-
-* WORKERS: workers count for celery **NOT WORKING**
-* PYRO_WORKERS: number of workers for pyrogram, default is 100
-* APP_ID: **REQUIRED**, get it from https://core.telegram.org/
-* APP_HASH: **REQUIRED**
-* TOKEN: **REQUIRED**
-* REDIS: **REQUIRED if you need VIP mode and cache** โ ๏ธ Don't publish your redis server on the internet. โ ๏ธ
-* EXPIRE: token expire time, default: 1 day
-* ENABLE_VIP: enable VIP mode
-* OWNER: owner username
-* AUTHORIZED_USER: only authorized users can use the bot
-* REQUIRED_MEMBERSHIP: group or channel username, user must join this group to use the bot
-* ENABLE_CELERY: celery mode, default: disable **NOT WORKING**
-* BROKER: celery broker, should be redis://redis:6379/0 **NOT WORKING**
-* MYSQL_HOST:MySQL host
-* MYSQL_USER: MySQL username
-* MYSQL_PASS: MySQL password
-* AUDIO_FORMAT: default audio format
-* ARCHIVE_ID: forward all downloads to this group/channel
-* IPv6 = os.getenv("IPv6", False)
-* ENABLE_FFMPEG = os.getenv("ENABLE_FFMPEG", False)
-* PROVIDER_TOKEN: stripe token on Telegram payment
-* PLAYLIST_SUPPORT: download playlist support
-* M3U8_SUPPORT: download m3u8 files support
-* ENABLE_ARIA2: enable aria2c download
-* FREE_DOWNLOAD: free download count per day
-* TOKEN_PRICE: token price per 1 USD
-* GOOGLE_API_KEY: YouTube API key, required for YouTube video subscription.
-* RCLONE_PATH: rclone path to upload files to cloud storage
-* TMPFILE_PATH: tmpfile path(file download path)
-* TRONGRID_KEY: TronGrid key, better use your own key to avoid rate limit
-* TRON_MNEMONIC: Tron mnemonic, the default one is on nile testnet.
-* PREMIUM_USER: premium user ID, it can help you to download files larger than 2 GiB
-
-## 3.2 Set up init data
-
-If you only need basic functionality, you can skip this step.
-
-### 3.2.1 Create MySQL db
-
-Required for VIP(Download token), settings, YouTube subscription.
-
-```shell
-docker-compose up -d
-docker-compose exec mysql bash
-
-mysql -u root -p
-
-> create database ytdl;
-```
-
-### 3.2.2 Setup flower db in `ytdlbot/ytdlbot/data`
-
-Required if you enable celery and want to monitor the workers.
-**NOT WORKING**
-
-```shell
-{} ~ python3
-Python 3.9.9 (main, Nov 21 2021, 03:22:47)
-[Clang 12.0.0 (clang-1200.0.32.29)] on darwin
-Type "help", "copyright", "credits" or "license" for more information.
->>> import dbm;dbm.open("flower","n");exit()
-```
-
-## 3.3 Tidy docker-compose.yml
-
-In `flower` service section, you may want to change your basic authentication username password and publish port.
-
-You can also limit CPU and RAM usage by adding a `deploy` key, use `--compatibility` when deploying.
-
-```docker
- deploy:
- resources:
- limits:
- cpus: '0.5'
- memory: 1500M
-```
-
-## 4. run
-
-### 4.1. standalone mode
-
-If you only want to run the mode without any celery worker and VIP mode, you can just start `ytdl` service
-
-```shell
-docker-compose up -d ytdl
-```
-
-### 4.2 VIP mode
-
-You'll have to start MySQL and redis to support VIP mode, subscription and settings.
-
-```
-docker-compose up -d mysql redis ytdl
-```
-
-### 4.3 Celery worker mode
-
-**NOT WORKING**
-Firstly, set `ENABLE_CELERY` to true. And then, on one machine:
-
-```shell
-docker-compose up -d
-```
-
-On the other machine:
-
-```shell
-docker-compose -f worker.yml up -d
-```
-
-**โ ๏ธ You should not publish Redis directly on the internet. โ ๏ธ**
-
-### 4.4 4 GiB Support
-
-1. Subscribe to Telegram Premium
-2. Setup user id `PREMIUM_USER` in `ytdl.env`
-3. Create session file by running `python premium.py`
-4. Copy the session file `premium.session` to `data` directory
-5. `docker-compose up -d premium`
-
-## kubernetes
-
-refer guide here [kubernetes](k8s.md)
-
+
# Command
```
@@ -298,19 +85,10 @@ about - What's this bot?
help - Help
spdl - Use to download specific link downloader links
ytdl - Download video in group
-leech - Download file using aria2
-direct - Download file using requests
+aria2 - Download file using aria2
settings - Set your preference
-buy - Buy token
-sub - Subscribe to YouTube Channel
unsub - Unsubscribe from YouTube Channel
-sub_count - Check subscription status, owner only.
-uncache - Delete cache for this link, owner only.
-purge - Delete all tasks, owner only.
ping - Ping the Bot
-stats - Bot running status
-show_history - Show download history
-clear_history - Clear download history
```
# Test data
@@ -350,7 +128,6 @@ https://krakenfiles.com/view/oqmSTF0T5t/file.html
https://terabox.com/s/1mpgNshrZVl6KuH717Hs23Q
-
# Donation
@@ -359,7 +136,6 @@ Found this bot useful? You can donate to support the development of this bot.
## Donation Platforms
* [Buy me a coffee](https://www.buymeacoffee.com/bennythink)
-* [Afdian](https://afdian.net/@BennyThink)
* [GitHub Sponsor](https://github.com/sponsors/BennyThink)
## Stripe
diff --git a/app.json b/app.json
deleted file mode 100644
index f5f5fc37..00000000
--- a/app.json
+++ /dev/null
@@ -1,43 +0,0 @@
-{
- "name": "YouTube-Downloader",
- "description": "A Telegrambot to download youtube video",
- "repository": "https://github.com/tgbot-collection/ytdlbot",
- "logo": "https://avatars.githubusercontent.com/u/73354211?s=200&v=4",
- "keywords": [
- "telegram",
- "youtube-dl"
- ],
- "env": {
- "TOKEN": {
- "description": "Bot token",
- "value": "token"
- },
- "APP_ID": {
- "description": "APP ID",
- "value": "12345"
- },
- "APP_HASH": {
- "description": "APP HASH",
- "value": "12345abc"
- },
- "OWNER": {
- "description": "Your telegram username",
- "value": "username",
- "required": false
- }
- },
- "formation": {
- "worker": {
- "quantity": 1,
- "size": "eco"
- }
- },
- "buildpacks": [
- {
- "url": "https://github.com/heroku/heroku-buildpack-python.git"
- },
- {
- "url": "https://github.com/jonathanong/heroku-buildpack-ffmpeg-latest.git"
- }
- ]
-}
diff --git a/assets/2.jpeg b/assets/2.jpeg
deleted file mode 100644
index 65c36cd4..00000000
Binary files a/assets/2.jpeg and /dev/null differ
diff --git a/conf/YouTube Download Celery.json b/conf/YouTube Download Celery.json
deleted file mode 100644
index 34393887..00000000
--- a/conf/YouTube Download Celery.json
+++ /dev/null
@@ -1,794 +0,0 @@
-{
- "__inputs": [
- {
- "name": "DS_CELERY",
- "label": "celery",
- "description": "",
- "type": "datasource",
- "pluginId": "influxdb",
- "pluginName": "InfluxDB"
- }
- ],
- "__elements": [],
- "__requires": [
- {
- "type": "grafana",
- "id": "grafana",
- "name": "Grafana",
- "version": "8.3.1"
- },
- {
- "type": "datasource",
- "id": "influxdb",
- "name": "InfluxDB",
- "version": "1.0.0"
- },
- {
- "type": "panel",
- "id": "timeseries",
- "name": "Time series",
- "version": ""
- }
- ],
- "annotations": {
- "list": [
- {
- "builtIn": 1,
- "datasource": "-- Grafana --",
- "enable": true,
- "hide": true,
- "iconColor": "rgba(0, 211, 255, 1)",
- "name": "Annotations & Alerts",
- "target": {
- "limit": 100,
- "matchAny": false,
- "tags": [],
- "type": "dashboard"
- },
- "type": "dashboard"
- }
- ]
- },
- "editable": true,
- "fiscalYearStartMonth": 0,
- "graphTooltip": 0,
- "id": null,
- "iteration": 1644554238421,
- "links": [],
- "liveNow": false,
- "panels": [
- {
- "datasource": {
- "type": "influxdb",
- "uid": "${DS_CELERY}"
- },
- "fieldConfig": {
- "defaults": {
- "color": {
- "mode": "palette-classic"
- },
- "custom": {
- "axisLabel": "",
- "axisPlacement": "auto",
- "barAlignment": 0,
- "drawStyle": "line",
- "fillOpacity": 5,
- "gradientMode": "none",
- "hideFrom": {
- "legend": false,
- "tooltip": false,
- "viz": false
- },
- "lineInterpolation": "linear",
- "lineWidth": 1,
- "pointSize": 5,
- "scaleDistribution": {
- "type": "linear"
- },
- "showPoints": "auto",
- "spanNulls": true,
- "stacking": {
- "group": "A",
- "mode": "none"
- },
- "thresholdsStyle": {
- "mode": "off"
- }
- },
- "mappings": [],
- "thresholds": {
- "mode": "absolute",
- "steps": [
- {
- "color": "green",
- "value": null
- },
- {
- "color": "red",
- "value": 80
- }
- ]
- }
- },
- "overrides": []
- },
- "gridPos": {
- "h": 8,
- "w": 12,
- "x": 0,
- "y": 0
- },
- "id": 2,
- "options": {
- "legend": {
- "calcs": [],
- "displayMode": "list",
- "placement": "bottom"
- },
- "tooltip": {
- "mode": "single"
- }
- },
- "targets": [
- {
- "alias": "Active",
- "groupBy": [
- {
- "params": [
- "$__interval"
- ],
- "type": "time"
- },
- {
- "params": [
- "null"
- ],
- "type": "fill"
- }
- ],
- "measurement": "active",
- "orderByTime": "ASC",
- "policy": "default",
- "query": "SELECT mean(\"active\") FROM \"active\" WHERE $timeFilter GROUP BY time($__interval) ",
- "rawQuery": true,
- "refId": "A",
- "resultFormat": "time_series",
- "select": [
- [
- {
- "params": [
- "active"
- ],
- "type": "field"
- },
- {
- "params": [],
- "type": "mean"
- }
- ]
- ],
- "tags": []
- },
- {
- "alias": "$tag_hostname",
- "hide": false,
- "query": "\nSELECT \nmean(\"active\") AS active\nFROM \"tasks\" WHERE (\"hostname\" =~ /^$hostname$/) AND $timeFilter GROUP BY time($__interval) ,* ORDER BY asc ",
- "rawQuery": true,
- "refId": "B",
- "resultFormat": "time_series"
- }
- ],
- "title": "Active Jobs",
- "type": "timeseries"
- },
- {
- "datasource": {
- "type": "influxdb",
- "uid": "${DS_CELERY}"
- },
- "fieldConfig": {
- "defaults": {
- "color": {
- "mode": "palette-classic"
- },
- "custom": {
- "axisLabel": "",
- "axisPlacement": "auto",
- "barAlignment": 0,
- "drawStyle": "line",
- "fillOpacity": 5,
- "gradientMode": "none",
- "hideFrom": {
- "legend": false,
- "tooltip": false,
- "viz": false
- },
- "lineInterpolation": "smooth",
- "lineWidth": 1,
- "pointSize": 5,
- "scaleDistribution": {
- "type": "linear"
- },
- "showPoints": "auto",
- "spanNulls": true,
- "stacking": {
- "group": "A",
- "mode": "none"
- },
- "thresholdsStyle": {
- "mode": "off"
- }
- },
- "mappings": [],
- "thresholds": {
- "mode": "absolute",
- "steps": [
- {
- "color": "green",
- "value": null
- },
- {
- "color": "red",
- "value": 80
- }
- ]
- },
- "unit": "percent"
- },
- "overrides": []
- },
- "gridPos": {
- "h": 8,
- "w": 12,
- "x": 12,
- "y": 0
- },
- "id": 10,
- "options": {
- "legend": {
- "calcs": [],
- "displayMode": "list",
- "placement": "bottom"
- },
- "tooltip": {
- "mode": "single"
- }
- },
- "targets": [
- {
- "alias": "$col",
- "datasource": {
- "type": "influxdb",
- "uid": "${DS_CELERY}"
- },
- "groupBy": [
- {
- "params": [
- "$__interval"
- ],
- "type": "time"
- },
- {
- "params": [
- "null"
- ],
- "type": "fill"
- }
- ],
- "measurement": "metrics",
- "orderByTime": "ASC",
- "policy": "default",
- "query": "\nSELECT \nmean(\"today_audio_success\")/mean(\"today_audio_request\")*100 as audio_success,\nmean(\"today_video_success\")/mean(\"today_video_request\")*100 as video_success\n\nFROM \"metrics\" WHERE $timeFilter GROUP BY time($__interval), * ORDER BY asc ",
- "rawQuery": true,
- "refId": "A",
- "resultFormat": "time_series",
- "select": [
- [
- {
- "params": [
- "today_audio_success"
- ],
- "type": "field"
- },
- {
- "params": [],
- "type": "mean"
- }
- ]
- ],
- "tags": []
- }
- ],
- "title": "Video & Audio Success Rate",
- "type": "timeseries"
- },
- {
- "datasource": {
- "type": "influxdb",
- "uid": "${DS_CELERY}"
- },
- "fieldConfig": {
- "defaults": {
- "color": {
- "mode": "palette-classic"
- },
- "custom": {
- "axisLabel": "",
- "axisPlacement": "auto",
- "barAlignment": 0,
- "drawStyle": "line",
- "fillOpacity": 5,
- "gradientMode": "none",
- "hideFrom": {
- "legend": false,
- "tooltip": false,
- "viz": false
- },
- "lineInterpolation": "smooth",
- "lineWidth": 1,
- "pointSize": 5,
- "scaleDistribution": {
- "type": "linear"
- },
- "showPoints": "auto",
- "spanNulls": true,
- "stacking": {
- "group": "A",
- "mode": "none"
- },
- "thresholdsStyle": {
- "mode": "off"
- }
- },
- "mappings": [],
- "thresholds": {
- "mode": "absolute",
- "steps": [
- {
- "color": "green",
- "value": null
- },
- {
- "color": "red",
- "value": 80
- }
- ]
- }
- },
- "overrides": []
- },
- "gridPos": {
- "h": 8,
- "w": 12,
- "x": 0,
- "y": 8
- },
- "id": 6,
- "options": {
- "legend": {
- "calcs": [],
- "displayMode": "list",
- "placement": "bottom"
- },
- "tooltip": {
- "mode": "single"
- }
- },
- "targets": [
- {
- "alias": "$tag_hostname:$col",
- "query": "SELECT mean(\"load1\") AS load1,mean(\"load5\") AS load5,mean(\"load15\") AS load15\nFROM \"tasks\" WHERE (\"hostname\" =~ /^$hostname$/) AND $timeFilter GROUP BY time($__interval) ,* ORDER BY asc \n\n",
- "rawQuery": true,
- "refId": "A",
- "resultFormat": "time_series"
- }
- ],
- "title": "Load Average",
- "type": "timeseries"
- },
- {
- "datasource": {
- "type": "influxdb",
- "uid": "${DS_CELERY}"
- },
- "fieldConfig": {
- "defaults": {
- "color": {
- "mode": "palette-classic"
- },
- "custom": {
- "axisLabel": "",
- "axisPlacement": "auto",
- "barAlignment": 0,
- "drawStyle": "line",
- "fillOpacity": 5,
- "gradientMode": "none",
- "hideFrom": {
- "legend": false,
- "tooltip": false,
- "viz": false
- },
- "lineInterpolation": "smooth",
- "lineWidth": 1,
- "pointSize": 5,
- "scaleDistribution": {
- "type": "linear"
- },
- "showPoints": "auto",
- "spanNulls": true,
- "stacking": {
- "group": "A",
- "mode": "none"
- },
- "thresholdsStyle": {
- "mode": "off"
- }
- },
- "mappings": [],
- "thresholds": {
- "mode": "absolute",
- "steps": [
- {
- "color": "green",
- "value": null
- },
- {
- "color": "red",
- "value": 80
- }
- ]
- },
- "unit": "percent"
- },
- "overrides": []
- },
- "gridPos": {
- "h": 8,
- "w": 12,
- "x": 12,
- "y": 8
- },
- "id": 9,
- "options": {
- "legend": {
- "calcs": [],
- "displayMode": "list",
- "placement": "bottom"
- },
- "tooltip": {
- "mode": "single"
- }
- },
- "targets": [
- {
- "alias": "$tag_hostname:$col",
- "datasource": {
- "type": "influxdb",
- "uid": "${DS_CELERY}"
- },
- "groupBy": [
- {
- "params": [
- "$__interval"
- ],
- "type": "time"
- },
- {
- "params": [
- "null"
- ],
- "type": "fill"
- }
- ],
- "measurement": "tasks",
- "orderByTime": "ASC",
- "policy": "default",
- "query": "\nSELECT mean(\"task-succeeded\")/mean(\"task-received\")*100 AS success_rate, mean(\"task-failed\")/mean(\"task-received\")*100 AS fail_rate\n\nFROM \"tasks\" WHERE (\"hostname\" =~ /^$hostname$/) AND $timeFilter GROUP BY time($__interval) ,* ORDER BY asc ",
- "rawQuery": true,
- "refId": "A",
- "resultFormat": "time_series",
- "select": [
- [
- {
- "params": [
- "task-received"
- ],
- "type": "field"
- }
- ]
- ],
- "tags": [
- {
- "key": "hostname",
- "operator": "=~",
- "value": "/^$hostname$/"
- }
- ]
- }
- ],
- "title": "Task Rate",
- "type": "timeseries"
- },
- {
- "datasource": {
- "type": "influxdb",
- "uid": "${DS_CELERY}"
- },
- "fieldConfig": {
- "defaults": {
- "color": {
- "mode": "palette-classic"
- },
- "custom": {
- "axisLabel": "",
- "axisPlacement": "auto",
- "barAlignment": 0,
- "drawStyle": "line",
- "fillOpacity": 5,
- "gradientMode": "none",
- "hideFrom": {
- "legend": false,
- "tooltip": false,
- "viz": false
- },
- "lineInterpolation": "smooth",
- "lineWidth": 1,
- "pointSize": 5,
- "scaleDistribution": {
- "type": "linear"
- },
- "showPoints": "auto",
- "spanNulls": true,
- "stacking": {
- "group": "A",
- "mode": "none"
- },
- "thresholdsStyle": {
- "mode": "off"
- }
- },
- "mappings": [],
- "thresholds": {
- "mode": "absolute",
- "steps": [
- {
- "color": "green",
- "value": null
- },
- {
- "color": "red",
- "value": 80
- }
- ]
- },
- "unit": "none"
- },
- "overrides": []
- },
- "gridPos": {
- "h": 8,
- "w": 12,
- "x": 0,
- "y": 16
- },
- "id": 13,
- "options": {
- "legend": {
- "calcs": [],
- "displayMode": "list",
- "placement": "bottom"
- },
- "tooltip": {
- "mode": "single"
- }
- },
- "targets": [
- {
- "alias": "$tag_hostname:$col",
- "datasource": {
- "type": "influxdb",
- "uid": "${DS_CELERY}"
- },
- "groupBy": [
- {
- "params": [
- "$__interval"
- ],
- "type": "time"
- },
- {
- "params": [
- "null"
- ],
- "type": "fill"
- }
- ],
- "measurement": "tasks",
- "orderByTime": "ASC",
- "policy": "default",
- "query": "\nSELECT mean(\"task-received\") AS received, mean(\"task-started\") AS started,mean(\"task-succeeded\") AS succeeded,mean(\"task-failed\") AS failed\n\nFROM \"tasks\" WHERE (\"hostname\" =~ /^$hostname$/) AND $timeFilter GROUP BY time($__interval) ,* ORDER BY asc ",
- "rawQuery": true,
- "refId": "A",
- "resultFormat": "time_series",
- "select": [
- [
- {
- "params": [
- "task-received"
- ],
- "type": "field"
- }
- ]
- ],
- "tags": [
- {
- "key": "hostname",
- "operator": "=~",
- "value": "/^$hostname$/"
- }
- ]
- }
- ],
- "title": "Task Status",
- "type": "timeseries"
- },
- {
- "datasource": {
- "type": "influxdb",
- "uid": "${DS_CELERY}"
- },
- "fieldConfig": {
- "defaults": {
- "color": {
- "mode": "palette-classic"
- },
- "custom": {
- "axisLabel": "",
- "axisPlacement": "auto",
- "barAlignment": 0,
- "drawStyle": "line",
- "fillOpacity": 5,
- "gradientMode": "none",
- "hideFrom": {
- "legend": false,
- "tooltip": false,
- "viz": false
- },
- "lineInterpolation": "smooth",
- "lineWidth": 1,
- "pointSize": 5,
- "scaleDistribution": {
- "type": "linear"
- },
- "showPoints": "auto",
- "spanNulls": true,
- "stacking": {
- "group": "A",
- "mode": "none"
- },
- "thresholdsStyle": {
- "mode": "off"
- }
- },
- "mappings": [],
- "thresholds": {
- "mode": "absolute",
- "steps": [
- {
- "color": "green",
- "value": null
- },
- {
- "color": "red",
- "value": 80
- }
- ]
- }
- },
- "overrides": []
- },
- "gridPos": {
- "h": 8,
- "w": 12,
- "x": 12,
- "y": 16
- },
- "id": 8,
- "options": {
- "legend": {
- "calcs": [],
- "displayMode": "list",
- "placement": "bottom"
- },
- "tooltip": {
- "mode": "single"
- }
- },
- "targets": [
- {
- "alias": "$col",
- "datasource": {
- "type": "influxdb",
- "uid": "${DS_CELERY}"
- },
- "groupBy": [
- {
- "params": [
- "$__interval"
- ],
- "type": "time"
- },
- {
- "params": [
- "null"
- ],
- "type": "fill"
- }
- ],
- "measurement": "metrics",
- "orderByTime": "ASC",
- "policy": "default",
- "query": "SELECT \nmean(\"today_audio_request\") as audio_request,\nmean(\"today_audio_success\") as audio_success,\n\nmean(\"today_bad_request\") as bad_request,\n\nmean(\"today_video_request\") as video_request,\nmean(\"today_video_success\") as video_success\nFROM \"metrics\" WHERE $timeFilter GROUP BY time($__interval), * ORDER BY asc ",
- "rawQuery": true,
- "refId": "A",
- "resultFormat": "time_series",
- "select": [
- [
- {
- "params": [
- "today_audio_success"
- ],
- "type": "field"
- },
- {
- "params": [],
- "type": "mean"
- }
- ]
- ],
- "tags": []
- }
- ],
- "title": "Video & Audio",
- "type": "timeseries"
- }
- ],
- "refresh": "",
- "schemaVersion": 33,
- "style": "dark",
- "tags": [],
- "templating": {
- "list": [
- {
- "current": {},
- "datasource": {
- "type": "influxdb",
- "uid": "${DS_CELERY}"
- },
- "definition": "show tag values with KEY=\"hostname\"",
- "hide": 0,
- "includeAll": true,
- "label": "hostname",
- "multi": true,
- "name": "hostname",
- "options": [],
- "query": "show tag values with KEY=\"hostname\"",
- "refresh": 1,
- "regex": "",
- "skipUrlSync": false,
- "sort": 1,
- "type": "query"
- }
- ]
- },
- "time": {
- "from": "now-15m",
- "to": "now"
- },
- "timepicker": {},
- "timezone": "",
- "title": "YouTube Download Celery",
- "uid": "9yXGmc1nk",
- "version": 14,
- "weekStart": ""
-}
\ No newline at end of file
diff --git a/conf/supervisor_main.conf b/conf/supervisor_main.conf
deleted file mode 100644
index dbae1442..00000000
--- a/conf/supervisor_main.conf
+++ /dev/null
@@ -1,34 +0,0 @@
-[supervisord]
-nodaemon=true
-logfile=/dev/null
-logfile_maxbytes=0
-user=root
-
-
-[program:vnstat]
-command=vnstatd -n
-autorestart=true
-
-
-[program:ytdl]
-directory=/ytdlbot/ytdlbot/
-command=python ytdl_bot.py
-autorestart=true
-priority=900
-stopasgroup=true
-startsecs = 30
-startretries = 2
-
-redirect_stderr=true
-stdout_logfile_maxbytes = 50MB
-stdout_logfile_backups = 2
-stdout_logfile = /var/log/ytdl.log
-
-[program:log]
-command=tail -f /var/log/ytdl.log
-autorestart=true
-priority=999
-
-redirect_stderr=true
-stdout_logfile=/dev/fd/1
-stdout_logfile_maxbytes=0
diff --git a/conf/supervisor_worker.conf b/conf/supervisor_worker.conf
deleted file mode 100644
index 5ca84301..00000000
--- a/conf/supervisor_worker.conf
+++ /dev/null
@@ -1,33 +0,0 @@
-[supervisord]
-nodaemon=true
-logfile=/dev/null
-logfile_maxbytes=0
-user=root
-
-
-[program:vnstat]
-command=vnstatd -n
-autorestart=true
-
-[program:worker]
-directory=/ytdlbot/ytdlbot/
-command=python tasks.py
-autorestart=true
-priority=900
-stopasgroup=true
-startsecs = 5
-startretries = 100
-
-redirect_stderr=true
-stdout_logfile_maxbytes = 50MB
-stdout_logfile_backups = 2
-stdout_logfile = /var/log/ytdl.log
-
-[program:log]
-command=tail -f /var/log/ytdl.log
-autorestart=true
-priority=999
-
-redirect_stderr=true
-stdout_logfile=/dev/fd/1
-stdout_logfile_maxbytes=0
diff --git a/docker-compose.yml b/docker-compose.yml
index e0b6ec13..1fed036d 100644
--- a/docker-compose.yml
+++ b/docker-compose.yml
@@ -1,13 +1,6 @@
version: '3.1'
services:
- socat:
- image: bennythink/socat
- restart: always
- volumes:
- - /var/run/docker.sock:/var/run/docker.sock
- entrypoint: [ "socat", "tcp-listen:2375,fork,reuseaddr","unix-connect:/var/run/docker.sock" ]
-
redis:
image: redis:7-alpine
restart: always
@@ -28,19 +21,7 @@ services:
ytdl:
image: bennythink/ytdlbot
env_file:
- - env/ytdl.env
+ - .env
restart: always
depends_on:
- - socat
- - redis
- volumes:
- - ./data/vnstat/:/var/lib/vnstat/
-
- premium:
- image: bennythink/ytdlbot
- env_file:
- - env/ytdl.env
- restart: always
- volumes:
- - ./data/premium.session:/ytdlbot/ytdlbot/premium.session
- command: [ "/usr/local/bin/python", "/ytdlbot/ytdlbot/premium.py" ]
+ - redis
\ No newline at end of file
diff --git a/k8s.md b/k8s.md
deleted file mode 100644
index 61b0146b..00000000
--- a/k8s.md
+++ /dev/null
@@ -1,200 +0,0 @@
-## Kubernetes
-
-Kubernetes, also known as K8s, is an open-source system for automating deployment, scaling, and management of
-containerized applications
-
-# Complete deployment guide for k8s deloyment
-
-* contains every functionality
-* compatible with amd64, arm64 and armv7l
-
-## First. Get all file in k8s folder
-
-Download `k8s` file to a directory on your k8s server and go to this folder
-
-## 1. Create Redis deloyment
-
-```shell
-kubectl apply -f 01.redis.yml
-```
-
-This command will create ytdl namespace, redis pod and redis service
-
-## 2. Creat MariaDB deloyment
-
-```shell
-kubectl apply -f 02.mariadb.yml
-```
-
-This deloyment will claim 10GB storage from storageClassName: longhorn. Please replace longhorn with your
-storageClassName before apply.
-
-## 3. Set environment variables
-
-Create configMap for env
-
-### 3.1 Edit configmap.yml
-
-```shell
-vim 03.configmap.yml
-```
-
-you can configure all the following environment variables:
-
-* PYRO_WORKERS: number of workers for pyrogram, default is 100
-* WORKERS: workers count for celery
-* APP_ID: **REQUIRED**, get it from https://core.telegram.org/
-* APP_HASH: **REQUIRED**
-* TOKEN: **REQUIRED**
-* REDIS: **REQUIRED if you need VIP mode and cache** โ ๏ธ Don't publish your redis server on the internet. โ ๏ธ
-
-* OWNER: owner username
-* QUOTA: quota in bytes
-* EX: quota expire time
-* MULTIPLY: vip quota comparing to normal quota
-* USD2CNY: exchange rate
-* VIP: VIP mode, default: disable
-* AFD_LINK
-* COFFEE_LINK
-* COFFEE_TOKEN
-* AFD_TOKEN
-* AFD_USER_ID
-
-* AUTHORIZED_USER: users that could use this bot, user_id, separated with `,`
-* REQUIRED_MEMBERSHIP: group or channel username, user must join this group to use the bot. Could be use with
- above `AUTHORIZED_USER`
-
-* ENABLE_CELERY: Distribution mode, default: disable. You'll can setup workers in different locations.
-* ENABLE_FFMPEG: enable ffmpeg so Telegram can stream
-* MYSQL_HOST: you'll have to setup MySQL if you enable VIP mode
-* MYSQL_USER
-* MYSQL_PASS
-* GOOGLE_API_KEY: YouTube API key, required for YouTube video subscription.
-* AUDIO_FORMAT: audio format, default is m4a. You can set to any known and supported format for ffmpeg. For
- example,`mp3`, `flac`, etc. โ ๏ธ m4a is the fastest. Other formats may affect performance.
-* ARCHIVE_ID: group or channel id/username. All downloads will send to this group first and then forward to end user.
- **Inline button will be lost during the forwarding.**
-
-### 3.2 Apply configMap for environment variables
-
-```shell
-kubectl apply -f 03.configmap.yml
-```
-
-## 4. Run Master Celery
-
-```shell
-kubectl apply -f 04.ytdl-master.yml
-```
-
-This deloyment will create ytdl-pvc PersistentVolumeClaim on storageClassName: longhorn. This clain will contain vnstat,
-cookies folder and flower database. Please replace longhorn with your storageClassName before apply
-
-### 4.1 Setup instagram cookies
-
-Required if you want to support instagram.
-
-You can use this extension
-[Get cookies.txt](https://chrome.google.com/webstore/detail/get-cookiestxt/bgaddhkoddajcdgocldbbfleckgcbcid)
-to get instagram cookies
-
-Get pod running ytdl master:
-
-```shell
-kubectl get pods --namespace ytdl
-```
-
-Name should be ytdl-xxxxxxxx
-
-Access to pod
-
-```shell
-kubectl --namespace=ytdl exec --stdin --tty ytdl-xxx -- sh
-```
-
-(replace ytdl-xxx by your pod name)
-
-Go to ytdl-pvc mounted folder
-
-```shell
-cd /ytdlbot/ytdlbot/data/
-vim instagram.com_cookies.txt
-# paste your cookies
-```
-
-## 5. Run Worker Celery
-
-```shell
-kubectl apply -f 05.ytdl-worker.yml
-```
-
-## 6. Run Flower image (OPTIONAL)
-
-### 6.1 Setup flower db
-
-Get pod running ytdl master:
-
-```shell
-kubectl get pods --namespace ytdl
-```
-
-Name should be ytdl-xxxxxxxx
-
-Access to pod
-
-```shell
-kubectl --namespace=ytdl exec --stdin --tty ytdl-xxx -- sh
-```
-
-(replace ytdl-xxx by your pod name)
-
-Go to ytdl-pvc mounted folder
-
-```shel
-cd /var/lib/vnstat/
-```
-
-Create flower database file
-
-```shell
-{} ~ python3
-Python 3.9.9 (main, Nov 21 2021, 03:22:47)
-[Clang 12.0.0 (clang-1200.0.32.29)] on darwin
-Type "help", "copyright", "credits" or "license" for more information.
->>> import dbm;dbm.open("flower","n");exit()
-```
-
-### 6.2 Config Flower Ingress
-
-This step need config ingress from line 51 of file 06.flower.yml with your ingress service. Need for access from
-internet.
-YML file should be adjusted depending on your load balancing, ingress and network system
-
-For active SSL
-
-```yml
-cert-manager.io/cluster-issuer: letsencrypt-prod
-```
-
-Replace nginx by your ingress service
-
-```yml
-ingressClassName: nginx
-```
-
-Add your domain, example
-
-```yml
-tls:
- - hosts:
- - flower.benny.com
- secretName: flower-tls
- rules:
- - host: flower.benny.com
-```
-
-### 6.3 Apply Flower deloyment
-
-```shell
-kubectl apply -f 06.flower.yml
-```
diff --git a/k8s/01.redis.yml b/k8s/01.redis.yml
deleted file mode 100644
index da52fc3c..00000000
--- a/k8s/01.redis.yml
+++ /dev/null
@@ -1,53 +0,0 @@
-apiVersion: v1
-kind: Namespace
-metadata:
- name: ytdl
-
----
-apiVersion: apps/v1
-kind: Deployment
-metadata:
- creationTimestamp: null
- labels:
- ytdl: redis
- name: redis
- namespace: ytdl
-spec:
- replicas: 1
- selector:
- matchLabels:
- ytdl: redis
- strategy: {}
- template:
- metadata:
- creationTimestamp: null
- labels:
- ytdl: redis
- spec:
- containers:
- - image: redis:7-alpine
- name: redis
- ports:
- - containerPort: 6379
- resources: {}
- restartPolicy: Always
-status: {}
-
----
-apiVersion: v1
-kind: Service
-metadata:
- creationTimestamp: null
- labels:
- ytdl: redis
- name: redis
- namespace: ytdl
-spec:
- ports:
- - name: "6379"
- port: 6379
- targetPort: 6379
- selector:
- ytdl: redis
-status:
- loadBalancer: {}
\ No newline at end of file
diff --git a/k8s/02.mariadb.yml b/k8s/02.mariadb.yml
deleted file mode 100644
index c89dbc75..00000000
--- a/k8s/02.mariadb.yml
+++ /dev/null
@@ -1,80 +0,0 @@
-apiVersion: v1
-kind: PersistentVolumeClaim
-metadata:
- creationTimestamp: null
- labels:
- ytdl: mariadb-pvc
- name: mariadb-pvc
- namespace: ytdl
-spec:
- accessModes:
- - ReadWriteOnce
- storageClassName: longhorn
- resources:
- requests:
- storage: 10Gi
-status: {}
-
----
-apiVersion: apps/v1
-kind: Deployment
-metadata:
- annotations:
- creationTimestamp: null
- labels:
- ytdl: mariadb
- name: mariadb
- namespace: ytdl
-spec:
- replicas: 1
- selector:
- matchLabels:
- ytdl: mariadb
- strategy:
- type: Recreate
- template:
- metadata:
- creationTimestamp: null
- labels:
- ytdl: mariadb
- spec:
- containers:
- - env:
- - name: MYSQL_ROOT_PASSWORD
- value: ro0tP4sSworD
- - name: MYSQL_DATABASE
- value: ytdl
- image: mariadb:latest
- name: mariadb
- ports:
- - containerPort: 3306
- resources: {}
- volumeMounts:
- - mountPath: /var/lib/mysql
- name: "mariadb-persistent-storage"
- restartPolicy: Always
- volumes:
- - name: mariadb-persistent-storage
- persistentVolumeClaim:
- claimName: mariadb-pvc
-status: {}
-
----
-apiVersion: v1
-kind: Service
-metadata:
- creationTimestamp: null
- labels:
- ytdl: mariadb
- name: mariadb-svc
- namespace: ytdl
-spec:
- ports:
- - name: "3306"
- port: 3306
- targetPort: 3306
- selector:
- ytdl: mariadb
-status:
- loadBalancer: {}
-
diff --git a/k8s/03.configmap.yml b/k8s/03.configmap.yml
deleted file mode 100644
index 90ec84a8..00000000
--- a/k8s/03.configmap.yml
+++ /dev/null
@@ -1,17 +0,0 @@
-apiVersion: v1
-kind: ConfigMap
-metadata:
- name: ytdlenv
- namespace: ytdl
- annotations:
-data:
- APP_HASH:
- APP_ID:
- TOKEN:
- ARCHIVE_ID:
- ENABLE_CELERY: 'True'
- ENABLE_FFMPEG: 'True'
- MYSQL_HOST: mariadb-svc
- MYSQL_PASS: ro0tP4sSworD
- MYSQL_USER: root
- REDIS: redis
\ No newline at end of file
diff --git a/k8s/04.ytdl-master.yml b/k8s/04.ytdl-master.yml
deleted file mode 100644
index f17579f6..00000000
--- a/k8s/04.ytdl-master.yml
+++ /dev/null
@@ -1,65 +0,0 @@
----
-apiVersion: v1
-kind: PersistentVolumeClaim
-metadata:
- name: ytdl-pvc
- namespace: ytdl
- creationTimestamp: null
- labels:
- ytdl: ytdl-pvc
-spec:
- accessModes:
- - ReadWriteMany
- storageClassName: longhorn
- resources:
- requests:
- storage: 10Gi
-status: {}
-
----
-apiVersion: apps/v1
-kind: Deployment
-metadata:
- name: ytdl
- namespace: ytdl
- creationTimestamp: null
- labels:
- ytdl: ytdl
-spec:
- replicas: 1
- selector:
- matchLabels:
- ytdl: ytdl
- template:
- metadata:
- creationTimestamp: null
- labels:
- ytdl: ytdl
- spec:
- volumes:
- - name: ytdl-pvc
- persistentVolumeClaim:
- claimName: ytdl-pvc
- containers:
- - name: ytdl
- image: bennythink/ytdlbot
- envFrom:
- - configMapRef:
- name: ytdlenv
- resources: {}
- volumeMounts:
- - name: ytdl-pvc
- mountPath: /var/lib/vnstat/
- subPath: vnstat/
- - name: ytdl-pvc
- mountPath: /ytdlbot/ytdlbot/data/
- subPath: data/
- terminationMessagePath: /dev/termination-log
- terminationMessagePolicy: File
- imagePullPolicy: Always
- restartPolicy: Always
- terminationGracePeriodSeconds: 30
- dnsPolicy: ClusterFirst
- securityContext: {}
- schedulerName: default-scheduler
-status: {}
diff --git a/k8s/05.ytdl-worker.yml b/k8s/05.ytdl-worker.yml
deleted file mode 100644
index ca154655..00000000
--- a/k8s/05.ytdl-worker.yml
+++ /dev/null
@@ -1,47 +0,0 @@
-apiVersion: apps/v1
-kind: Deployment
-metadata:
- creationTimestamp: null
- labels:
- ytdl: ytdl-worker
- name: ytdl-worker
- namespace: ytdl
-spec:
- replicas: 4
- selector:
- matchLabels:
- ytdl: ytdl-worker
- template:
- metadata:
- creationTimestamp: null
- labels:
- ytdl: ytdl-worker
- spec:
- volumes:
- - name: ytdl-pvc
- persistentVolumeClaim:
- claimName: ytdl-pvc
- containers:
- - name: ytdl-worker
- image: bennythink/ytdlbot
- args:
- - /usr/local/bin/supervisord
- - '-c'
- - /ytdlbot/conf/supervisor_worker.conf
- envFrom:
- - configMapRef:
- name: ytdlenv
- resources: {}
- volumeMounts:
- - name: ytdl-pvc
- mountPath: /ytdlbot/ytdlbot/data/
- subPath: data/
- terminationMessagePath: /dev/termination-log
- terminationMessagePolicy: File
- imagePullPolicy: Always
- restartPolicy: Always
- terminationGracePeriodSeconds: 30
- dnsPolicy: ClusterFirst
- securityContext: {}
- schedulerName: default-scheduler
-status: {}
diff --git a/k8s/06.flower.yml b/k8s/06.flower.yml
deleted file mode 100644
index e7c01c73..00000000
--- a/k8s/06.flower.yml
+++ /dev/null
@@ -1,101 +0,0 @@
-apiVersion: apps/v1
-kind: Deployment
-metadata:
- creationTimestamp: null
- labels:
- ytdl: flower
- name: flower
- namespace: ytdl
-spec:
- replicas: 1
- selector:
- matchLabels:
- ytdl: flower
- strategy:
- type: Recreate
- template:
- metadata:
- creationTimestamp: null
- labels:
- ytdl: flower
- spec:
- containers:
- - envFrom:
- - configMapRef:
- name: ytdlenv
- args:
- - /usr/local/bin/celery
- - -A
- - flower_tasks
- - flower
- - --basic_auth=bennythink:123456
- - --address=0.0.0.0
- - --persistent
- - --purge_offline_workers=3600
- image: bennythink/ytdlbot
- name: flower
- ports:
- - containerPort: 5555
- resources: {}
- volumeMounts:
- - name: ytdl-pvc
- mountPath: /ytdlbot/ytdlbot/flower
- subPath: vnstat/flower
- restartPolicy: Always
- volumes:
- - name: ytdl-pvc
- persistentVolumeClaim:
- claimName: ytdl-pvc
-status: {}
-
-# THIS IS OPTION IF YOU WANT PUBLIC FLOWER PAGE TO INTERNET.
-# should be adjusted depending on your load balancing system machine
----
-apiVersion: v1
-kind: Service
-metadata:
- creationTimestamp: null
- labels:
- ytdl: flower
- name: flower-svc
- namespace: ytdl
-spec:
- type: NodePort
- ports:
- - name: "5555"
- protocol: TCP
- port: 5555
- targetPort: 5555
- selector:
- ytdl: flower
-status:
- loadBalancer: {}
-
----
-apiVersion: networking.k8s.io/v1
-kind: Ingress
-metadata:
- name: nginx-flower-ingress
- namespace: ytdl
- annotations:
- # cert-manager.io/cluster-issuer: letsencrypt-prod
- nginx.ingress.kubernetes.io/rewrite-target: /
- # nginx.ingress.kubernetes.io/whitelist-source-range: 14.161.27.151 limit by ipaddresss
-
-spec:
- ingressClassName: nginx
- tls:
- - hosts:
- - your-domain
- secretName: flower-tls
- rules:
- - host: your-domain
- http:
- paths:
- - path: /
- pathType: Prefix
- backend:
- service:
- name: flower-svc
- port:
- number: 5555
\ No newline at end of file
diff --git a/pdm.lock b/pdm.lock
new file mode 100644
index 00000000..3bd58083
--- /dev/null
+++ b/pdm.lock
@@ -0,0 +1,719 @@
+# This file is @generated by PDM.
+# It is not intended for manual editing.
+
+[metadata]
+groups = ["default"]
+strategy = ["inherit_metadata"]
+lock_version = "4.5.0"
+content_hash = "sha256:9e796c46fdd2f2190a9a88e31676d4d91a6606563f66e1080d45c23617dbd9c1"
+
+[[metadata.targets]]
+requires_python = ">3.9"
+
+[[package]]
+name = "apscheduler"
+version = "3.11.0"
+requires_python = ">=3.8"
+summary = "In-process task scheduler with Cron-like capabilities"
+groups = ["default"]
+dependencies = [
+ "backports-zoneinfo; python_version < \"3.9\"",
+ "tzlocal>=3.0",
+]
+files = [
+ {file = "APScheduler-3.11.0-py3-none-any.whl", hash = "sha256:fc134ca32e50f5eadcc4938e3a4545ab19131435e851abb40b34d63d5141c6da"},
+ {file = "apscheduler-3.11.0.tar.gz", hash = "sha256:4c622d250b0955a65d5d0eb91c33e6d43fd879834bf541e0a18661ae60460133"},
+]
+
+[[package]]
+name = "async-timeout"
+version = "5.0.1"
+requires_python = ">=3.8"
+summary = "Timeout context manager for asyncio programs"
+groups = ["default"]
+marker = "python_full_version < \"3.11.3\""
+files = [
+ {file = "async_timeout-5.0.1-py3-none-any.whl", hash = "sha256:39e3809566ff85354557ec2398b55e096c8364bacac9405a7a1fa429e77fe76c"},
+ {file = "async_timeout-5.0.1.tar.gz", hash = "sha256:d9321a7a3d5a6a5e187e824d2fa0793ce379a202935782d555d6e9d2735677d3"},
+]
+
+[[package]]
+name = "beautifulsoup4"
+version = "4.12.3"
+requires_python = ">=3.6.0"
+summary = "Screen-scraping library"
+groups = ["default"]
+dependencies = [
+ "soupsieve>1.2",
+]
+files = [
+ {file = "beautifulsoup4-4.12.3-py3-none-any.whl", hash = "sha256:b80878c9f40111313e55da8ba20bdba06d8fa3969fc68304167741bbf9e082ed"},
+ {file = "beautifulsoup4-4.12.3.tar.gz", hash = "sha256:74e3d1928edc070d21748185c46e3fb33490f22f52a3addee9aee0f4f7781051"},
+]
+
+[[package]]
+name = "black"
+version = "24.10.0"
+requires_python = ">=3.9"
+summary = "The uncompromising code formatter."
+groups = ["default"]
+dependencies = [
+ "click>=8.0.0",
+ "mypy-extensions>=0.4.3",
+ "packaging>=22.0",
+ "pathspec>=0.9.0",
+ "platformdirs>=2",
+ "tomli>=1.1.0; python_version < \"3.11\"",
+ "typing-extensions>=4.0.1; python_version < \"3.11\"",
+]
+files = [
+ {file = "black-24.10.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e6668650ea4b685440857138e5fe40cde4d652633b1bdffc62933d0db4ed9812"},
+ {file = "black-24.10.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:1c536fcf674217e87b8cc3657b81809d3c085d7bf3ef262ead700da345bfa6ea"},
+ {file = "black-24.10.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:649fff99a20bd06c6f727d2a27f401331dc0cc861fb69cde910fe95b01b5928f"},
+ {file = "black-24.10.0-cp310-cp310-win_amd64.whl", hash = "sha256:fe4d6476887de70546212c99ac9bd803d90b42fc4767f058a0baa895013fbb3e"},
+ {file = "black-24.10.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:5a2221696a8224e335c28816a9d331a6c2ae15a2ee34ec857dcf3e45dbfa99ad"},
+ {file = "black-24.10.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f9da3333530dbcecc1be13e69c250ed8dfa67f43c4005fb537bb426e19200d50"},
+ {file = "black-24.10.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4007b1393d902b48b36958a216c20c4482f601569d19ed1df294a496eb366392"},
+ {file = "black-24.10.0-cp311-cp311-win_amd64.whl", hash = "sha256:394d4ddc64782e51153eadcaaca95144ac4c35e27ef9b0a42e121ae7e57a9175"},
+ {file = "black-24.10.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:b5e39e0fae001df40f95bd8cc36b9165c5e2ea88900167bddf258bacef9bbdc3"},
+ {file = "black-24.10.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:d37d422772111794b26757c5b55a3eade028aa3fde43121ab7b673d050949d65"},
+ {file = "black-24.10.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:14b3502784f09ce2443830e3133dacf2c0110d45191ed470ecb04d0f5f6fcb0f"},
+ {file = "black-24.10.0-cp312-cp312-win_amd64.whl", hash = "sha256:30d2c30dc5139211dda799758559d1b049f7f14c580c409d6ad925b74a4208a8"},
+ {file = "black-24.10.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:1cbacacb19e922a1d75ef2b6ccaefcd6e93a2c05ede32f06a21386a04cedb981"},
+ {file = "black-24.10.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:1f93102e0c5bb3907451063e08b9876dbeac810e7da5a8bfb7aeb5a9ef89066b"},
+ {file = "black-24.10.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ddacb691cdcdf77b96f549cf9591701d8db36b2f19519373d60d31746068dbf2"},
+ {file = "black-24.10.0-cp313-cp313-win_amd64.whl", hash = "sha256:680359d932801c76d2e9c9068d05c6b107f2584b2a5b88831c83962eb9984c1b"},
+ {file = "black-24.10.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:17374989640fbca88b6a448129cd1745c5eb8d9547b464f281b251dd00155ccd"},
+ {file = "black-24.10.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:63f626344343083322233f175aaf372d326de8436f5928c042639a4afbbf1d3f"},
+ {file = "black-24.10.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ccfa1d0cb6200857f1923b602f978386a3a2758a65b52e0950299ea014be6800"},
+ {file = "black-24.10.0-cp39-cp39-win_amd64.whl", hash = "sha256:2cd9c95431d94adc56600710f8813ee27eea544dd118d45896bb734e9d7a0dc7"},
+ {file = "black-24.10.0-py3-none-any.whl", hash = "sha256:3bb2b7a1f7b685f85b11fed1ef10f8a9148bceb49853e47a294a3dd963c1dd7d"},
+ {file = "black-24.10.0.tar.gz", hash = "sha256:846ea64c97afe3bc677b761787993be4991810ecc7a4a937816dd6bddedc4875"},
+]
+
+[[package]]
+name = "certifi"
+version = "2024.8.30"
+requires_python = ">=3.6"
+summary = "Python package for providing Mozilla's CA Bundle."
+groups = ["default"]
+files = [
+ {file = "certifi-2024.8.30-py3-none-any.whl", hash = "sha256:922820b53db7a7257ffbda3f597266d435245903d80737e34f8a45ff3e3230d8"},
+ {file = "certifi-2024.8.30.tar.gz", hash = "sha256:bec941d2aa8195e248a60b31ff9f0558284cf01a52591ceda73ea9afffd69fd9"},
+]
+
+[[package]]
+name = "charset-normalizer"
+version = "3.4.0"
+requires_python = ">=3.7.0"
+summary = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet."
+groups = ["default"]
+files = [
+ {file = "charset_normalizer-3.4.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:4f9fc98dad6c2eaa32fc3af1417d95b5e3d08aff968df0cd320066def971f9a6"},
+ {file = "charset_normalizer-3.4.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0de7b687289d3c1b3e8660d0741874abe7888100efe14bd0f9fd7141bcbda92b"},
+ {file = "charset_normalizer-3.4.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:5ed2e36c3e9b4f21dd9422f6893dec0abf2cca553af509b10cd630f878d3eb99"},
+ {file = "charset_normalizer-3.4.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:40d3ff7fc90b98c637bda91c89d51264a3dcf210cade3a2c6f838c7268d7a4ca"},
+ {file = "charset_normalizer-3.4.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1110e22af8ca26b90bd6364fe4c763329b0ebf1ee213ba32b68c73de5752323d"},
+ {file = "charset_normalizer-3.4.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:86f4e8cca779080f66ff4f191a685ced73d2f72d50216f7112185dc02b90b9b7"},
+ {file = "charset_normalizer-3.4.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7f683ddc7eedd742e2889d2bfb96d69573fde1d92fcb811979cdb7165bb9c7d3"},
+ {file = "charset_normalizer-3.4.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:27623ba66c183eca01bf9ff833875b459cad267aeeb044477fedac35e19ba907"},
+ {file = "charset_normalizer-3.4.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:f606a1881d2663630ea5b8ce2efe2111740df4b687bd78b34a8131baa007f79b"},
+ {file = "charset_normalizer-3.4.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:0b309d1747110feb25d7ed6b01afdec269c647d382c857ef4663bbe6ad95a912"},
+ {file = "charset_normalizer-3.4.0-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:136815f06a3ae311fae551c3df1f998a1ebd01ddd424aa5603a4336997629e95"},
+ {file = "charset_normalizer-3.4.0-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:14215b71a762336254351b00ec720a8e85cada43b987da5a042e4ce3e82bd68e"},
+ {file = "charset_normalizer-3.4.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:79983512b108e4a164b9c8d34de3992f76d48cadc9554c9e60b43f308988aabe"},
+ {file = "charset_normalizer-3.4.0-cp310-cp310-win32.whl", hash = "sha256:c94057af19bc953643a33581844649a7fdab902624d2eb739738a30e2b3e60fc"},
+ {file = "charset_normalizer-3.4.0-cp310-cp310-win_amd64.whl", hash = "sha256:55f56e2ebd4e3bc50442fbc0888c9d8c94e4e06a933804e2af3e89e2f9c1c749"},
+ {file = "charset_normalizer-3.4.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:0d99dd8ff461990f12d6e42c7347fd9ab2532fb70e9621ba520f9e8637161d7c"},
+ {file = "charset_normalizer-3.4.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c57516e58fd17d03ebe67e181a4e4e2ccab1168f8c2976c6a334d4f819fe5944"},
+ {file = "charset_normalizer-3.4.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:6dba5d19c4dfab08e58d5b36304b3f92f3bd5d42c1a3fa37b5ba5cdf6dfcbcee"},
+ {file = "charset_normalizer-3.4.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bf4475b82be41b07cc5e5ff94810e6a01f276e37c2d55571e3fe175e467a1a1c"},
+ {file = "charset_normalizer-3.4.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ce031db0408e487fd2775d745ce30a7cd2923667cf3b69d48d219f1d8f5ddeb6"},
+ {file = "charset_normalizer-3.4.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8ff4e7cdfdb1ab5698e675ca622e72d58a6fa2a8aa58195de0c0061288e6e3ea"},
+ {file = "charset_normalizer-3.4.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3710a9751938947e6327ea9f3ea6332a09bf0ba0c09cae9cb1f250bd1f1549bc"},
+ {file = "charset_normalizer-3.4.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:82357d85de703176b5587dbe6ade8ff67f9f69a41c0733cf2425378b49954de5"},
+ {file = "charset_normalizer-3.4.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:47334db71978b23ebcf3c0f9f5ee98b8d65992b65c9c4f2d34c2eaf5bcaf0594"},
+ {file = "charset_normalizer-3.4.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:8ce7fd6767a1cc5a92a639b391891bf1c268b03ec7e021c7d6d902285259685c"},
+ {file = "charset_normalizer-3.4.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:f1a2f519ae173b5b6a2c9d5fa3116ce16e48b3462c8b96dfdded11055e3d6365"},
+ {file = "charset_normalizer-3.4.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:63bc5c4ae26e4bc6be6469943b8253c0fd4e4186c43ad46e713ea61a0ba49129"},
+ {file = "charset_normalizer-3.4.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:bcb4f8ea87d03bc51ad04add8ceaf9b0f085ac045ab4d74e73bbc2dc033f0236"},
+ {file = "charset_normalizer-3.4.0-cp311-cp311-win32.whl", hash = "sha256:9ae4ef0b3f6b41bad6366fb0ea4fc1d7ed051528e113a60fa2a65a9abb5b1d99"},
+ {file = "charset_normalizer-3.4.0-cp311-cp311-win_amd64.whl", hash = "sha256:cee4373f4d3ad28f1ab6290684d8e2ebdb9e7a1b74fdc39e4c211995f77bec27"},
+ {file = "charset_normalizer-3.4.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:0713f3adb9d03d49d365b70b84775d0a0d18e4ab08d12bc46baa6132ba78aaf6"},
+ {file = "charset_normalizer-3.4.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:de7376c29d95d6719048c194a9cf1a1b0393fbe8488a22008610b0361d834ecf"},
+ {file = "charset_normalizer-3.4.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:4a51b48f42d9358460b78725283f04bddaf44a9358197b889657deba38f329db"},
+ {file = "charset_normalizer-3.4.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b295729485b06c1a0683af02a9e42d2caa9db04a373dc38a6a58cdd1e8abddf1"},
+ {file = "charset_normalizer-3.4.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ee803480535c44e7f5ad00788526da7d85525cfefaf8acf8ab9a310000be4b03"},
+ {file = "charset_normalizer-3.4.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3d59d125ffbd6d552765510e3f31ed75ebac2c7470c7274195b9161a32350284"},
+ {file = "charset_normalizer-3.4.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8cda06946eac330cbe6598f77bb54e690b4ca93f593dee1568ad22b04f347c15"},
+ {file = "charset_normalizer-3.4.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:07afec21bbbbf8a5cc3651aa96b980afe2526e7f048fdfb7f1014d84acc8b6d8"},
+ {file = "charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:6b40e8d38afe634559e398cc32b1472f376a4099c75fe6299ae607e404c033b2"},
+ {file = "charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:b8dcd239c743aa2f9c22ce674a145e0a25cb1566c495928440a181ca1ccf6719"},
+ {file = "charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:84450ba661fb96e9fd67629b93d2941c871ca86fc38d835d19d4225ff946a631"},
+ {file = "charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:44aeb140295a2f0659e113b31cfe92c9061622cadbc9e2a2f7b8ef6b1e29ef4b"},
+ {file = "charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:1db4e7fefefd0f548d73e2e2e041f9df5c59e178b4c72fbac4cc6f535cfb1565"},
+ {file = "charset_normalizer-3.4.0-cp312-cp312-win32.whl", hash = "sha256:5726cf76c982532c1863fb64d8c6dd0e4c90b6ece9feb06c9f202417a31f7dd7"},
+ {file = "charset_normalizer-3.4.0-cp312-cp312-win_amd64.whl", hash = "sha256:b197e7094f232959f8f20541ead1d9862ac5ebea1d58e9849c1bf979255dfac9"},
+ {file = "charset_normalizer-3.4.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:dd4eda173a9fcccb5f2e2bd2a9f423d180194b1bf17cf59e3269899235b2a114"},
+ {file = "charset_normalizer-3.4.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:e9e3c4c9e1ed40ea53acf11e2a386383c3304212c965773704e4603d589343ed"},
+ {file = "charset_normalizer-3.4.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:92a7e36b000bf022ef3dbb9c46bfe2d52c047d5e3f3343f43204263c5addc250"},
+ {file = "charset_normalizer-3.4.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:54b6a92d009cbe2fb11054ba694bc9e284dad30a26757b1e372a1fdddaf21920"},
+ {file = "charset_normalizer-3.4.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ffd9493de4c922f2a38c2bf62b831dcec90ac673ed1ca182fe11b4d8e9f2a64"},
+ {file = "charset_normalizer-3.4.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:35c404d74c2926d0287fbd63ed5d27eb911eb9e4a3bb2c6d294f3cfd4a9e0c23"},
+ {file = "charset_normalizer-3.4.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4796efc4faf6b53a18e3d46343535caed491776a22af773f366534056c4e1fbc"},
+ {file = "charset_normalizer-3.4.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e7fdd52961feb4c96507aa649550ec2a0d527c086d284749b2f582f2d40a2e0d"},
+ {file = "charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:92db3c28b5b2a273346bebb24857fda45601aef6ae1c011c0a997106581e8a88"},
+ {file = "charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:ab973df98fc99ab39080bfb0eb3a925181454d7c3ac8a1e695fddfae696d9e90"},
+ {file = "charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:4b67fdab07fdd3c10bb21edab3cbfe8cf5696f453afce75d815d9d7223fbe88b"},
+ {file = "charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:aa41e526a5d4a9dfcfbab0716c7e8a1b215abd3f3df5a45cf18a12721d31cb5d"},
+ {file = "charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:ffc519621dce0c767e96b9c53f09c5d215578e10b02c285809f76509a3931482"},
+ {file = "charset_normalizer-3.4.0-cp313-cp313-win32.whl", hash = "sha256:f19c1585933c82098c2a520f8ec1227f20e339e33aca8fa6f956f6691b784e67"},
+ {file = "charset_normalizer-3.4.0-cp313-cp313-win_amd64.whl", hash = "sha256:707b82d19e65c9bd28b81dde95249b07bf9f5b90ebe1ef17d9b57473f8a64b7b"},
+ {file = "charset_normalizer-3.4.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:980b4f289d1d90ca5efcf07958d3eb38ed9c0b7676bf2831a54d4f66f9c27dfa"},
+ {file = "charset_normalizer-3.4.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:f28f891ccd15c514a0981f3b9db9aa23d62fe1a99997512b0491d2ed323d229a"},
+ {file = "charset_normalizer-3.4.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a8aacce6e2e1edcb6ac625fb0f8c3a9570ccc7bfba1f63419b3769ccf6a00ed0"},
+ {file = "charset_normalizer-3.4.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bd7af3717683bea4c87acd8c0d3d5b44d56120b26fd3f8a692bdd2d5260c620a"},
+ {file = "charset_normalizer-3.4.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5ff2ed8194587faf56555927b3aa10e6fb69d931e33953943bc4f837dfee2242"},
+ {file = "charset_normalizer-3.4.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e91f541a85298cf35433bf66f3fab2a4a2cff05c127eeca4af174f6d497f0d4b"},
+ {file = "charset_normalizer-3.4.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:309a7de0a0ff3040acaebb35ec45d18db4b28232f21998851cfa709eeff49d62"},
+ {file = "charset_normalizer-3.4.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:285e96d9d53422efc0d7a17c60e59f37fbf3dfa942073f666db4ac71e8d726d0"},
+ {file = "charset_normalizer-3.4.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:5d447056e2ca60382d460a604b6302d8db69476fd2015c81e7c35417cfabe4cd"},
+ {file = "charset_normalizer-3.4.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:20587d20f557fe189b7947d8e7ec5afa110ccf72a3128d61a2a387c3313f46be"},
+ {file = "charset_normalizer-3.4.0-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:130272c698667a982a5d0e626851ceff662565379baf0ff2cc58067b81d4f11d"},
+ {file = "charset_normalizer-3.4.0-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:ab22fbd9765e6954bc0bcff24c25ff71dcbfdb185fcdaca49e81bac68fe724d3"},
+ {file = "charset_normalizer-3.4.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:7782afc9b6b42200f7362858f9e73b1f8316afb276d316336c0ec3bd73312742"},
+ {file = "charset_normalizer-3.4.0-cp39-cp39-win32.whl", hash = "sha256:2de62e8801ddfff069cd5c504ce3bc9672b23266597d4e4f50eda28846c322f2"},
+ {file = "charset_normalizer-3.4.0-cp39-cp39-win_amd64.whl", hash = "sha256:95c3c157765b031331dd4db3c775e58deaee050a3042fcad72cbc4189d7c8dca"},
+ {file = "charset_normalizer-3.4.0-py3-none-any.whl", hash = "sha256:fe9f97feb71aa9896b81973a7bbada8c49501dc73e58a10fcef6663af95e5079"},
+ {file = "charset_normalizer-3.4.0.tar.gz", hash = "sha256:223217c3d4f82c3ac5e29032b3f1c2eb0fb591b72161f86d93f5719079dae93e"},
+]
+
+[[package]]
+name = "click"
+version = "8.1.7"
+requires_python = ">=3.7"
+summary = "Composable command line interface toolkit"
+groups = ["default"]
+dependencies = [
+ "colorama; platform_system == \"Windows\"",
+ "importlib-metadata; python_version < \"3.8\"",
+]
+files = [
+ {file = "click-8.1.7-py3-none-any.whl", hash = "sha256:ae74fb96c20a0277a1d615f1e4d73c8414f5a98db8b799a7931d1582f3390c28"},
+ {file = "click-8.1.7.tar.gz", hash = "sha256:ca9853ad459e787e2192211578cc907e7594e294c7ccc834310722b41b9ca6de"},
+]
+
+[[package]]
+name = "colorama"
+version = "0.4.6"
+requires_python = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7"
+summary = "Cross-platform colored terminal text."
+groups = ["default"]
+marker = "platform_system == \"Windows\""
+files = [
+ {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"},
+ {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"},
+]
+
+[[package]]
+name = "fakeredis"
+version = "2.26.1"
+requires_python = "<4.0,>=3.7"
+summary = "Python implementation of redis API, can be used for testing purposes."
+groups = ["default"]
+dependencies = [
+ "redis>=4.3; python_full_version > \"3.8.0\"",
+ "redis>=4; python_version < \"3.8\"",
+ "sortedcontainers<3,>=2",
+ "typing-extensions<5.0,>=4.7; python_version < \"3.11\"",
+]
+files = [
+ {file = "fakeredis-2.26.1-py3-none-any.whl", hash = "sha256:68a5615d7ef2529094d6958677e30a6d30d544e203a5ab852985c19d7ad57e32"},
+ {file = "fakeredis-2.26.1.tar.gz", hash = "sha256:69f4daafe763c8014a6dbf44a17559c46643c95447b3594b3975251a171b806d"},
+]
+
+[[package]]
+name = "ffmpeg-python"
+version = "0.2.0"
+summary = "Python bindings for FFmpeg - with complex filtering support"
+groups = ["default"]
+dependencies = [
+ "future",
+]
+files = [
+ {file = "ffmpeg-python-0.2.0.tar.gz", hash = "sha256:65225db34627c578ef0e11c8b1eb528bb35e024752f6f10b78c011f6f64c4127"},
+ {file = "ffmpeg_python-0.2.0-py3-none-any.whl", hash = "sha256:ac441a0404e053f8b6a1113a77c0f452f1cfc62f6344a769475ffdc0f56c23c5"},
+]
+
+[[package]]
+name = "ffpb"
+version = "0.4.1"
+requires_python = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7"
+summary = "A progress bar for ffmpeg. Yay !"
+groups = ["default"]
+dependencies = [
+ "tqdm~=4.25",
+]
+files = [
+ {file = "ffpb-0.4.1-py2.py3-none-any.whl", hash = "sha256:0e3e2962f4812e39f29649f09785e7cd877ea7f0e14e84d17918c33618647321"},
+ {file = "ffpb-0.4.1.tar.gz", hash = "sha256:ede56a6cba4c1d2d6c070daf612e1c4edc957679e49c6b4423cd7dd159577e59"},
+]
+
+[[package]]
+name = "filetype"
+version = "1.2.0"
+summary = "Infer file type and MIME type of any file/buffer. No external dependencies."
+groups = ["default"]
+files = [
+ {file = "filetype-1.2.0-py2.py3-none-any.whl", hash = "sha256:7ce71b6880181241cf7ac8697a2f1eb6a8bd9b429f7ad6d27b8db9ba5f1c2d25"},
+ {file = "filetype-1.2.0.tar.gz", hash = "sha256:66b56cd6474bf41d8c54660347d37afcc3f7d1970648de365c102ef77548aadb"},
+]
+
+[[package]]
+name = "future"
+version = "1.0.0"
+requires_python = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*"
+summary = "Clean single-source support for Python 3 and 2"
+groups = ["default"]
+files = [
+ {file = "future-1.0.0-py3-none-any.whl", hash = "sha256:929292d34f5872e70396626ef385ec22355a1fae8ad29e1a734c3e43f9fbc216"},
+ {file = "future-1.0.0.tar.gz", hash = "sha256:bd2968309307861edae1458a4f8a4f3598c03be43b97521076aebf5d94c07b05"},
+]
+
+[[package]]
+name = "greenlet"
+version = "3.1.1"
+requires_python = ">=3.7"
+summary = "Lightweight in-process concurrent programming"
+groups = ["default"]
+marker = "(platform_machine == \"win32\" or platform_machine == \"WIN32\" or platform_machine == \"AMD64\" or platform_machine == \"amd64\" or platform_machine == \"x86_64\" or platform_machine == \"ppc64le\" or platform_machine == \"aarch64\") and python_version < \"3.13\""
+files = [
+ {file = "greenlet-3.1.1-cp310-cp310-macosx_11_0_universal2.whl", hash = "sha256:0bbae94a29c9e5c7e4a2b7f0aae5c17e8e90acbfd3bf6270eeba60c39fce3563"},
+ {file = "greenlet-3.1.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0fde093fb93f35ca72a556cf72c92ea3ebfda3d79fc35bb19fbe685853869a83"},
+ {file = "greenlet-3.1.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:36b89d13c49216cadb828db8dfa6ce86bbbc476a82d3a6c397f0efae0525bdd0"},
+ {file = "greenlet-3.1.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:94b6150a85e1b33b40b1464a3f9988dcc5251d6ed06842abff82e42632fac120"},
+ {file = "greenlet-3.1.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:93147c513fac16385d1036b7e5b102c7fbbdb163d556b791f0f11eada7ba65dc"},
+ {file = "greenlet-3.1.1-cp310-cp310-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:da7a9bff22ce038e19bf62c4dd1ec8391062878710ded0a845bcf47cc0200617"},
+ {file = "greenlet-3.1.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:b2795058c23988728eec1f36a4e5e4ebad22f8320c85f3587b539b9ac84128d7"},
+ {file = "greenlet-3.1.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:ed10eac5830befbdd0c32f83e8aa6288361597550ba669b04c48f0f9a2c843c6"},
+ {file = "greenlet-3.1.1-cp310-cp310-win_amd64.whl", hash = "sha256:77c386de38a60d1dfb8e55b8c1101d68c79dfdd25c7095d51fec2dd800892b80"},
+ {file = "greenlet-3.1.1-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:e4d333e558953648ca09d64f13e6d8f0523fa705f51cae3f03b5983489958c70"},
+ {file = "greenlet-3.1.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:09fc016b73c94e98e29af67ab7b9a879c307c6731a2c9da0db5a7d9b7edd1159"},
+ {file = "greenlet-3.1.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d5e975ca70269d66d17dd995dafc06f1b06e8cb1ec1e9ed54c1d1e4a7c4cf26e"},
+ {file = "greenlet-3.1.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3b2813dc3de8c1ee3f924e4d4227999285fd335d1bcc0d2be6dc3f1f6a318ec1"},
+ {file = "greenlet-3.1.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e347b3bfcf985a05e8c0b7d462ba6f15b1ee1c909e2dcad795e49e91b152c383"},
+ {file = "greenlet-3.1.1-cp311-cp311-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9e8f8c9cb53cdac7ba9793c276acd90168f416b9ce36799b9b885790f8ad6c0a"},
+ {file = "greenlet-3.1.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:62ee94988d6b4722ce0028644418d93a52429e977d742ca2ccbe1c4f4a792511"},
+ {file = "greenlet-3.1.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:1776fd7f989fc6b8d8c8cb8da1f6b82c5814957264d1f6cf818d475ec2bf6395"},
+ {file = "greenlet-3.1.1-cp311-cp311-win_amd64.whl", hash = "sha256:48ca08c771c268a768087b408658e216133aecd835c0ded47ce955381105ba39"},
+ {file = "greenlet-3.1.1-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:4afe7ea89de619adc868e087b4d2359282058479d7cfb94970adf4b55284574d"},
+ {file = "greenlet-3.1.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f406b22b7c9a9b4f8aa9d2ab13d6ae0ac3e85c9a809bd590ad53fed2bf70dc79"},
+ {file = "greenlet-3.1.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c3a701fe5a9695b238503ce5bbe8218e03c3bcccf7e204e455e7462d770268aa"},
+ {file = "greenlet-3.1.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2846930c65b47d70b9d178e89c7e1a69c95c1f68ea5aa0a58646b7a96df12441"},
+ {file = "greenlet-3.1.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:99cfaa2110534e2cf3ba31a7abcac9d328d1d9f1b95beede58294a60348fba36"},
+ {file = "greenlet-3.1.1-cp312-cp312-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:1443279c19fca463fc33e65ef2a935a5b09bb90f978beab37729e1c3c6c25fe9"},
+ {file = "greenlet-3.1.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:b7cede291382a78f7bb5f04a529cb18e068dd29e0fb27376074b6d0317bf4dd0"},
+ {file = "greenlet-3.1.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:23f20bb60ae298d7d8656c6ec6db134bca379ecefadb0b19ce6f19d1f232a942"},
+ {file = "greenlet-3.1.1-cp312-cp312-win_amd64.whl", hash = "sha256:7124e16b4c55d417577c2077be379514321916d5790fa287c9ed6f23bd2ffd01"},
+ {file = "greenlet-3.1.1-cp313-cp313-macosx_11_0_universal2.whl", hash = "sha256:05175c27cb459dcfc05d026c4232f9de8913ed006d42713cb8a5137bd49375f1"},
+ {file = "greenlet-3.1.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:935e943ec47c4afab8965954bf49bfa639c05d4ccf9ef6e924188f762145c0ff"},
+ {file = "greenlet-3.1.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:667a9706c970cb552ede35aee17339a18e8f2a87a51fba2ed39ceeeb1004798a"},
+ {file = "greenlet-3.1.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b8a678974d1f3aa55f6cc34dc480169d58f2e6d8958895d68845fa4ab566509e"},
+ {file = "greenlet-3.1.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:efc0f674aa41b92da8c49e0346318c6075d734994c3c4e4430b1c3f853e498e4"},
+ {file = "greenlet-3.1.1-cp313-cp313-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0153404a4bb921f0ff1abeb5ce8a5131da56b953eda6e14b88dc6bbc04d2049e"},
+ {file = "greenlet-3.1.1-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:275f72decf9932639c1c6dd1013a1bc266438eb32710016a1c742df5da6e60a1"},
+ {file = "greenlet-3.1.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:c4aab7f6381f38a4b42f269057aee279ab0fc7bf2e929e3d4abfae97b682a12c"},
+ {file = "greenlet-3.1.1-cp313-cp313-win_amd64.whl", hash = "sha256:b42703b1cf69f2aa1df7d1030b9d77d3e584a70755674d60e710f0af570f3761"},
+ {file = "greenlet-3.1.1-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f1695e76146579f8c06c1509c7ce4dfe0706f49c6831a817ac04eebb2fd02011"},
+ {file = "greenlet-3.1.1-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7876452af029456b3f3549b696bb36a06db7c90747740c5302f74a9e9fa14b13"},
+ {file = "greenlet-3.1.1-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4ead44c85f8ab905852d3de8d86f6f8baf77109f9da589cb4fa142bd3b57b475"},
+ {file = "greenlet-3.1.1-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8320f64b777d00dd7ccdade271eaf0cad6636343293a25074cc5566160e4de7b"},
+ {file = "greenlet-3.1.1-cp313-cp313t-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:6510bf84a6b643dabba74d3049ead221257603a253d0a9873f55f6a59a65f822"},
+ {file = "greenlet-3.1.1-cp313-cp313t-musllinux_1_1_aarch64.whl", hash = "sha256:04b013dc07c96f83134b1e99888e7a79979f1a247e2a9f59697fa14b5862ed01"},
+ {file = "greenlet-3.1.1-cp313-cp313t-musllinux_1_1_x86_64.whl", hash = "sha256:411f015496fec93c1c8cd4e5238da364e1da7a124bcb293f085bf2860c32c6f6"},
+ {file = "greenlet-3.1.1-cp39-cp39-macosx_11_0_universal2.whl", hash = "sha256:396979749bd95f018296af156201d6211240e7a23090f50a8d5d18c370084dc3"},
+ {file = "greenlet-3.1.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ca9d0ff5ad43e785350894d97e13633a66e2b50000e8a183a50a88d834752d42"},
+ {file = "greenlet-3.1.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f6ff3b14f2df4c41660a7dec01045a045653998784bf8cfcb5a525bdffffbc8f"},
+ {file = "greenlet-3.1.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:94ebba31df2aa506d7b14866fed00ac141a867e63143fe5bca82a8e503b36437"},
+ {file = "greenlet-3.1.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:73aaad12ac0ff500f62cebed98d8789198ea0e6f233421059fa68a5aa7220145"},
+ {file = "greenlet-3.1.1-cp39-cp39-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:63e4844797b975b9af3a3fb8f7866ff08775f5426925e1e0bbcfe7932059a12c"},
+ {file = "greenlet-3.1.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:7939aa3ca7d2a1593596e7ac6d59391ff30281ef280d8632fa03d81f7c5f955e"},
+ {file = "greenlet-3.1.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:d0028e725ee18175c6e422797c407874da24381ce0690d6b9396c204c7f7276e"},
+ {file = "greenlet-3.1.1-cp39-cp39-win32.whl", hash = "sha256:5e06afd14cbaf9e00899fae69b24a32f2196c19de08fcb9f4779dd4f004e5e7c"},
+ {file = "greenlet-3.1.1-cp39-cp39-win_amd64.whl", hash = "sha256:3319aa75e0e0639bc15ff54ca327e8dc7a6fe404003496e3c6925cd3142e0e22"},
+ {file = "greenlet-3.1.1.tar.gz", hash = "sha256:4ce3ac6cdb6adf7946475d7ef31777c26d94bccc377e070a7986bd2d5c515467"},
+]
+
+[[package]]
+name = "idna"
+version = "3.10"
+requires_python = ">=3.6"
+summary = "Internationalized Domain Names in Applications (IDNA)"
+groups = ["default"]
+files = [
+ {file = "idna-3.10-py3-none-any.whl", hash = "sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3"},
+ {file = "idna-3.10.tar.gz", hash = "sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9"},
+]
+
+[[package]]
+name = "mypy-extensions"
+version = "1.0.0"
+requires_python = ">=3.5"
+summary = "Type system extensions for programs checked with the mypy type checker."
+groups = ["default"]
+files = [
+ {file = "mypy_extensions-1.0.0-py3-none-any.whl", hash = "sha256:4392f6c0eb8a5668a69e23d168ffa70f0be9ccfd32b5cc2d26a34ae5b844552d"},
+ {file = "mypy_extensions-1.0.0.tar.gz", hash = "sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782"},
+]
+
+[[package]]
+name = "packaging"
+version = "24.2"
+requires_python = ">=3.8"
+summary = "Core utilities for Python packages"
+groups = ["default"]
+files = [
+ {file = "packaging-24.2-py3-none-any.whl", hash = "sha256:09abb1bccd265c01f4a3aa3f7a7db064b36514d2cba19a2f694fe6150451a759"},
+ {file = "packaging-24.2.tar.gz", hash = "sha256:c228a6dc5e932d346bc5739379109d49e8853dd8223571c7c5b55260edc0b97f"},
+]
+
+[[package]]
+name = "pathspec"
+version = "0.12.1"
+requires_python = ">=3.8"
+summary = "Utility library for gitignore style pattern matching of file paths."
+groups = ["default"]
+files = [
+ {file = "pathspec-0.12.1-py3-none-any.whl", hash = "sha256:a0d503e138a4c123b27490a4f7beda6a01c6f288df0e4a8b79c7eb0dc7b4cc08"},
+ {file = "pathspec-0.12.1.tar.gz", hash = "sha256:a482d51503a1ab33b1c67a6c3813a26953dbdc71c31dacaef9a838c4e29f5712"},
+]
+
+[[package]]
+name = "platformdirs"
+version = "4.3.6"
+requires_python = ">=3.8"
+summary = "A small Python package for determining appropriate platform-specific dirs, e.g. a `user data dir`."
+groups = ["default"]
+files = [
+ {file = "platformdirs-4.3.6-py3-none-any.whl", hash = "sha256:73e575e1408ab8103900836b97580d5307456908a03e92031bab39e4554cc3fb"},
+ {file = "platformdirs-4.3.6.tar.gz", hash = "sha256:357fb2acbc885b0419afd3ce3ed34564c13c9b95c89360cd9563f73aa5e2b907"},
+]
+
+[[package]]
+name = "psutil"
+version = "6.1.0"
+requires_python = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7"
+summary = "Cross-platform lib for process and system monitoring in Python."
+groups = ["default"]
+files = [
+ {file = "psutil-6.1.0-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:6e2dcd475ce8b80522e51d923d10c7871e45f20918e027ab682f94f1c6351688"},
+ {file = "psutil-6.1.0-cp36-abi3-macosx_11_0_arm64.whl", hash = "sha256:0895b8414afafc526712c498bd9de2b063deaac4021a3b3c34566283464aff8e"},
+ {file = "psutil-6.1.0-cp36-abi3-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9dcbfce5d89f1d1f2546a2090f4fcf87c7f669d1d90aacb7d7582addece9fb38"},
+ {file = "psutil-6.1.0-cp36-abi3-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:498c6979f9c6637ebc3a73b3f87f9eb1ec24e1ce53a7c5173b8508981614a90b"},
+ {file = "psutil-6.1.0-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d905186d647b16755a800e7263d43df08b790d709d575105d419f8b6ef65423a"},
+ {file = "psutil-6.1.0-cp37-abi3-win32.whl", hash = "sha256:1ad45a1f5d0b608253b11508f80940985d1d0c8f6111b5cb637533a0e6ddc13e"},
+ {file = "psutil-6.1.0-cp37-abi3-win_amd64.whl", hash = "sha256:a8fb3752b491d246034fa4d279ff076501588ce8cbcdbb62c32fd7a377d996be"},
+ {file = "psutil-6.1.0.tar.gz", hash = "sha256:353815f59a7f64cdaca1c0307ee13558a0512f6db064e92fe833784f08539c7a"},
+]
+
+[[package]]
+name = "pyaes"
+version = "1.6.1"
+summary = "Pure-Python Implementation of the AES block-cipher and common modes of operation"
+groups = ["default"]
+files = [
+ {file = "pyaes-1.6.1.tar.gz", hash = "sha256:02c1b1405c38d3c370b085fb952dd8bea3fadcee6411ad99f312cc129c536d8f"},
+]
+
+[[package]]
+name = "pymysql"
+version = "1.1.1"
+requires_python = ">=3.7"
+summary = "Pure Python MySQL Driver"
+groups = ["default"]
+files = [
+ {file = "PyMySQL-1.1.1-py3-none-any.whl", hash = "sha256:4de15da4c61dc132f4fb9ab763063e693d521a80fd0e87943b9a453dd4c19d6c"},
+ {file = "pymysql-1.1.1.tar.gz", hash = "sha256:e127611aaf2b417403c60bf4dc570124aeb4a57f5f37b8e95ae399a42f904cd0"},
+]
+
+[[package]]
+name = "pyrogram"
+version = "2.1.33"
+requires_python = "~=3.8"
+git = "https://github.com/KurimuzonAkuma/pyrogram"
+ref = "9ec94e01e8a9d3653d587d39e06856da4a030fbc"
+revision = "9ec94e01e8a9d3653d587d39e06856da4a030fbc"
+summary = "Elegant, modern and asynchronous Telegram MTProto API framework in Python for users and bots"
+groups = ["default"]
+dependencies = [
+ "pyaes==1.6.1",
+ "pysocks==1.7.1",
+]
+
+[[package]]
+name = "pysocks"
+version = "1.7.1"
+requires_python = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
+summary = "A Python SOCKS client module. See https://github.com/Anorov/PySocks for more information."
+groups = ["default"]
+files = [
+ {file = "PySocks-1.7.1-py3-none-any.whl", hash = "sha256:2725bd0a9925919b9b51739eea5f9e2bae91e83288108a9ad338b2e3a4435ee5"},
+ {file = "PySocks-1.7.1.tar.gz", hash = "sha256:3f8804571ebe159c380ac6de37643bb4685970655d3bba243530d6558b799aa0"},
+]
+
+[[package]]
+name = "python-dotenv"
+version = "1.0.1"
+requires_python = ">=3.8"
+summary = "Read key-value pairs from a .env file and set them as environment variables"
+groups = ["default"]
+files = [
+ {file = "python-dotenv-1.0.1.tar.gz", hash = "sha256:e324ee90a023d808f1959c46bcbc04446a10ced277783dc6ee09987c37ec10ca"},
+ {file = "python_dotenv-1.0.1-py3-none-any.whl", hash = "sha256:f7b63ef50f1b690dddf550d03497b66d609393b40b564ed0d674909a68ebf16a"},
+]
+
+[[package]]
+name = "redis"
+version = "5.2.0"
+requires_python = ">=3.8"
+summary = "Python client for Redis database and key-value store"
+groups = ["default"]
+dependencies = [
+ "async-timeout>=4.0.3; python_full_version < \"3.11.3\"",
+]
+files = [
+ {file = "redis-5.2.0-py3-none-any.whl", hash = "sha256:ae174f2bb3b1bf2b09d54bf3e51fbc1469cf6c10aa03e21141f51969801a7897"},
+ {file = "redis-5.2.0.tar.gz", hash = "sha256:0b1087665a771b1ff2e003aa5bdd354f15a70c9e25d5a7dbf9c722c16528a7b0"},
+]
+
+[[package]]
+name = "requests"
+version = "2.32.3"
+requires_python = ">=3.8"
+summary = "Python HTTP for Humans."
+groups = ["default"]
+dependencies = [
+ "certifi>=2017.4.17",
+ "charset-normalizer<4,>=2",
+ "idna<4,>=2.5",
+ "urllib3<3,>=1.21.1",
+]
+files = [
+ {file = "requests-2.32.3-py3-none-any.whl", hash = "sha256:70761cfe03c773ceb22aa2f671b4757976145175cdfca038c02654d061d6dcc6"},
+ {file = "requests-2.32.3.tar.gz", hash = "sha256:55365417734eb18255590a9ff9eb97e9e1da868d4ccd6402399eaf68af20a760"},
+]
+
+[[package]]
+name = "sortedcontainers"
+version = "2.4.0"
+summary = "Sorted Containers -- Sorted List, Sorted Dict, Sorted Set"
+groups = ["default"]
+files = [
+ {file = "sortedcontainers-2.4.0-py2.py3-none-any.whl", hash = "sha256:a163dcaede0f1c021485e957a39245190e74249897e2ae4b2aa38595db237ee0"},
+ {file = "sortedcontainers-2.4.0.tar.gz", hash = "sha256:25caa5a06cc30b6b83d11423433f65d1f9d76c4c6a0c90e3379eaa43b9bfdb88"},
+]
+
+[[package]]
+name = "soupsieve"
+version = "2.6"
+requires_python = ">=3.8"
+summary = "A modern CSS selector implementation for Beautiful Soup."
+groups = ["default"]
+files = [
+ {file = "soupsieve-2.6-py3-none-any.whl", hash = "sha256:e72c4ff06e4fb6e4b5a9f0f55fe6e81514581fca1515028625d0f299c602ccc9"},
+ {file = "soupsieve-2.6.tar.gz", hash = "sha256:e2e68417777af359ec65daac1057404a3c8a5455bb8abc36f1a9866ab1a51abb"},
+]
+
+[[package]]
+name = "sqlalchemy"
+version = "2.0.36"
+requires_python = ">=3.7"
+summary = "Database Abstraction Library"
+groups = ["default"]
+dependencies = [
+ "greenlet!=0.4.17; (platform_machine == \"win32\" or platform_machine == \"WIN32\" or platform_machine == \"AMD64\" or platform_machine == \"amd64\" or platform_machine == \"x86_64\" or platform_machine == \"ppc64le\" or platform_machine == \"aarch64\") and python_version < \"3.13\"",
+ "importlib-metadata; python_version < \"3.8\"",
+ "typing-extensions>=4.6.0",
+]
+files = [
+ {file = "SQLAlchemy-2.0.36-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:59b8f3adb3971929a3e660337f5dacc5942c2cdb760afcabb2614ffbda9f9f72"},
+ {file = "SQLAlchemy-2.0.36-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:37350015056a553e442ff672c2d20e6f4b6d0b2495691fa239d8aa18bb3bc908"},
+ {file = "SQLAlchemy-2.0.36-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8318f4776c85abc3f40ab185e388bee7a6ea99e7fa3a30686580b209eaa35c08"},
+ {file = "SQLAlchemy-2.0.36-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c245b1fbade9c35e5bd3b64270ab49ce990369018289ecfde3f9c318411aaa07"},
+ {file = "SQLAlchemy-2.0.36-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:69f93723edbca7342624d09f6704e7126b152eaed3cdbb634cb657a54332a3c5"},
+ {file = "SQLAlchemy-2.0.36-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:f9511d8dd4a6e9271d07d150fb2f81874a3c8c95e11ff9af3a2dfc35fe42ee44"},
+ {file = "SQLAlchemy-2.0.36-cp310-cp310-win32.whl", hash = "sha256:c3f3631693003d8e585d4200730616b78fafd5a01ef8b698f6967da5c605b3fa"},
+ {file = "SQLAlchemy-2.0.36-cp310-cp310-win_amd64.whl", hash = "sha256:a86bfab2ef46d63300c0f06936bd6e6c0105faa11d509083ba8f2f9d237fb5b5"},
+ {file = "SQLAlchemy-2.0.36-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:fd3a55deef00f689ce931d4d1b23fa9f04c880a48ee97af488fd215cf24e2a6c"},
+ {file = "SQLAlchemy-2.0.36-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:4f5e9cd989b45b73bd359f693b935364f7e1f79486e29015813c338450aa5a71"},
+ {file = "SQLAlchemy-2.0.36-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d0ddd9db6e59c44875211bc4c7953a9f6638b937b0a88ae6d09eb46cced54eff"},
+ {file = "SQLAlchemy-2.0.36-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2519f3a5d0517fc159afab1015e54bb81b4406c278749779be57a569d8d1bb0d"},
+ {file = "SQLAlchemy-2.0.36-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:59b1ee96617135f6e1d6f275bbe988f419c5178016f3d41d3c0abb0c819f75bb"},
+ {file = "SQLAlchemy-2.0.36-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:39769a115f730d683b0eb7b694db9789267bcd027326cccc3125e862eb03bfd8"},
+ {file = "SQLAlchemy-2.0.36-cp311-cp311-win32.whl", hash = "sha256:66bffbad8d6271bb1cc2f9a4ea4f86f80fe5e2e3e501a5ae2a3dc6a76e604e6f"},
+ {file = "SQLAlchemy-2.0.36-cp311-cp311-win_amd64.whl", hash = "sha256:23623166bfefe1487d81b698c423f8678e80df8b54614c2bf4b4cfcd7c711959"},
+ {file = "SQLAlchemy-2.0.36-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:f7b64e6ec3f02c35647be6b4851008b26cff592a95ecb13b6788a54ef80bbdd4"},
+ {file = "SQLAlchemy-2.0.36-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:46331b00096a6db1fdc052d55b101dbbfc99155a548e20a0e4a8e5e4d1362855"},
+ {file = "SQLAlchemy-2.0.36-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fdf3386a801ea5aba17c6410dd1dc8d39cf454ca2565541b5ac42a84e1e28f53"},
+ {file = "SQLAlchemy-2.0.36-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ac9dfa18ff2a67b09b372d5db8743c27966abf0e5344c555d86cc7199f7ad83a"},
+ {file = "SQLAlchemy-2.0.36-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:90812a8933df713fdf748b355527e3af257a11e415b613dd794512461eb8a686"},
+ {file = "SQLAlchemy-2.0.36-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:1bc330d9d29c7f06f003ab10e1eaced295e87940405afe1b110f2eb93a233588"},
+ {file = "SQLAlchemy-2.0.36-cp312-cp312-win32.whl", hash = "sha256:79d2e78abc26d871875b419e1fd3c0bca31a1cb0043277d0d850014599626c2e"},
+ {file = "SQLAlchemy-2.0.36-cp312-cp312-win_amd64.whl", hash = "sha256:b544ad1935a8541d177cb402948b94e871067656b3a0b9e91dbec136b06a2ff5"},
+ {file = "SQLAlchemy-2.0.36-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:b5cc79df7f4bc3d11e4b542596c03826063092611e481fcf1c9dfee3c94355ef"},
+ {file = "SQLAlchemy-2.0.36-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:3c01117dd36800f2ecaa238c65365b7b16497adc1522bf84906e5710ee9ba0e8"},
+ {file = "SQLAlchemy-2.0.36-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9bc633f4ee4b4c46e7adcb3a9b5ec083bf1d9a97c1d3854b92749d935de40b9b"},
+ {file = "SQLAlchemy-2.0.36-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9e46ed38affdfc95d2c958de328d037d87801cfcbea6d421000859e9789e61c2"},
+ {file = "SQLAlchemy-2.0.36-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:b2985c0b06e989c043f1dc09d4fe89e1616aadd35392aea2844f0458a989eacf"},
+ {file = "SQLAlchemy-2.0.36-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:4a121d62ebe7d26fec9155f83f8be5189ef1405f5973ea4874a26fab9f1e262c"},
+ {file = "SQLAlchemy-2.0.36-cp313-cp313-win32.whl", hash = "sha256:0572f4bd6f94752167adfd7c1bed84f4b240ee6203a95e05d1e208d488d0d436"},
+ {file = "SQLAlchemy-2.0.36-cp313-cp313-win_amd64.whl", hash = "sha256:8c78ac40bde930c60e0f78b3cd184c580f89456dd87fc08f9e3ee3ce8765ce88"},
+ {file = "SQLAlchemy-2.0.36-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:dc022184d3e5cacc9579e41805a681187650e170eb2fd70e28b86192a479dcaa"},
+ {file = "SQLAlchemy-2.0.36-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:b817d41d692bf286abc181f8af476c4fbef3fd05e798777492618378448ee689"},
+ {file = "SQLAlchemy-2.0.36-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a4e46a888b54be23d03a89be510f24a7652fe6ff660787b96cd0e57a4ebcb46d"},
+ {file = "SQLAlchemy-2.0.36-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c4ae3005ed83f5967f961fd091f2f8c5329161f69ce8480aa8168b2d7fe37f06"},
+ {file = "SQLAlchemy-2.0.36-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:03e08af7a5f9386a43919eda9de33ffda16b44eb11f3b313e6822243770e9763"},
+ {file = "SQLAlchemy-2.0.36-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:3dbb986bad3ed5ceaf090200eba750b5245150bd97d3e67343a3cfed06feecf7"},
+ {file = "SQLAlchemy-2.0.36-cp39-cp39-win32.whl", hash = "sha256:9fe53b404f24789b5ea9003fc25b9a3988feddebd7e7b369c8fac27ad6f52f28"},
+ {file = "SQLAlchemy-2.0.36-cp39-cp39-win_amd64.whl", hash = "sha256:af148a33ff0349f53512a049c6406923e4e02bf2f26c5fb285f143faf4f0e46a"},
+ {file = "SQLAlchemy-2.0.36-py3-none-any.whl", hash = "sha256:fddbe92b4760c6f5d48162aef14824add991aeda8ddadb3c31d56eb15ca69f8e"},
+ {file = "sqlalchemy-2.0.36.tar.gz", hash = "sha256:7f2767680b6d2398aea7082e45a774b2b0767b5c8d8ffb9c8b683088ea9b29c5"},
+]
+
+[[package]]
+name = "tgcrypto"
+version = "1.2.5"
+requires_python = "~=3.7"
+summary = "Fast and Portable Cryptography Extension Library for Pyrogram"
+groups = ["default"]
+files = [
+ {file = "TgCrypto-1.2.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:4507102377002966f35f2481830b7529e00c9bbff8c7d1e09634f984af801675"},
+ {file = "TgCrypto-1.2.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:38fe25c0d79b41d7a89caba2a78dea0358e17ca73b033cefd16abed680685829"},
+ {file = "TgCrypto-1.2.5-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c035bf8ef89846f67e77e82ea85c089b6ea30631b32e8ac1a6511b9be52ab065"},
+ {file = "TgCrypto-1.2.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f594e2680daf20dbac6bf56862f567ddc3cc8d6a19757ed07faa8320ff7acee4"},
+ {file = "TgCrypto-1.2.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8723a16076e229ffdf537fdb5e638227d10f44ca43e6939db1eab524de6eaed7"},
+ {file = "TgCrypto-1.2.5-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:c1c8d974b8b2d7132364b6f0f6712b92bfe47ab9c5dcee25c70327ff68d22d95"},
+ {file = "TgCrypto-1.2.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:89d9c143a1fcdb2562a4aa887152abbe9253e1979d7bebef2b489148e0bbe086"},
+ {file = "TgCrypto-1.2.5-cp310-cp310-win32.whl", hash = "sha256:aa4bc1d11d4a90811c162abd45a5981f171679d1b5bd0322cd7ccd16447366a2"},
+ {file = "TgCrypto-1.2.5-cp310-cp310-win_amd64.whl", hash = "sha256:39145103614c5e38fe938549742d355920f4a0778fa8259eb69c0c85ba4b1d28"},
+ {file = "TgCrypto-1.2.5-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:59597cdb1c87eb1184088563d20b42a8f2e431e9334fed64926079044ad2a4af"},
+ {file = "TgCrypto-1.2.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:1283337ae75b02406dd700377b8b783e70033b548492517df6e6c4156b0ed69c"},
+ {file = "TgCrypto-1.2.5-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1735437df0023a40e5fdd95e6b09ce806ec8f2cd2f8879023818840dfae60cab"},
+ {file = "TgCrypto-1.2.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cfa17a20206532c6d2442c9d7a7f6434120bd75896ad9a3e9b9277477afa084f"},
+ {file = "TgCrypto-1.2.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:48da3674474839e5619e7430ff1f98aed9f55369f3cfaef7f65511852869572e"},
+ {file = "TgCrypto-1.2.5-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:b49e982e5b156be821a5235bd9102c00dc506a58607e2c8bd50ac872724a951f"},
+ {file = "TgCrypto-1.2.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:9d9f13586065a6d86d05c16409054033a84be208acee29b49f6f194e27b08642"},
+ {file = "TgCrypto-1.2.5-cp311-cp311-win32.whl", hash = "sha256:10dd3870aecb1a783c6eafd3b164b2149dbc93a9ee13feb7e6f5c58f87c24cd0"},
+ {file = "TgCrypto-1.2.5-cp311-cp311-win_amd64.whl", hash = "sha256:a1beec47d6af8b509af7cf266e30f7703208076076594714005b42d2c25225b3"},
+ {file = "TgCrypto-1.2.5-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:7885a75db09ce8bdba42d2c332085bfe314f232541a729808c7507ffa261ff9a"},
+ {file = "TgCrypto-1.2.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:0d28aa317364a5c27317fe97a48267aa1c65c9aaf589909e97489ebe82a714e3"},
+ {file = "TgCrypto-1.2.5-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:940974e19044dc65bcf7b9c5255173b896dff010142f3833047dc55d59cde21c"},
+ {file = "TgCrypto-1.2.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:457c657dd10ffb4bbbb007132a0f6a7bee5080176a98c51f285fedf636b624cb"},
+ {file = "TgCrypto-1.2.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:539bdc6b9239fb6a6b134591a998dc7f50d4dcc4fed861f80540682acc0c3802"},
+ {file = "TgCrypto-1.2.5-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:4d70d5517d64ca952896b726d22c8a66594e6f6259ee2cb4fa134c02d0e8c3e0"},
+ {file = "TgCrypto-1.2.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:90b6337d3ae4348ed14f89dd2ebf7011fa63d67a48c8a98d955a1e392176c60a"},
+ {file = "TgCrypto-1.2.5-cp39-cp39-win32.whl", hash = "sha256:37c4b9be82716fbc6d2b123caef448eee28683888803db075d842327766f7624"},
+ {file = "TgCrypto-1.2.5-cp39-cp39-win_amd64.whl", hash = "sha256:6e96b3a478fae977228c5750194c20a18cde402bbbea6593de424f84d4a8893b"},
+ {file = "TgCrypto-1.2.5-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:9b0a088ff2e05b6bbe891da936f62b99bd85202b2b9f4f57f71a408490dd518c"},
+ {file = "TgCrypto-1.2.5-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f245895c7d518342089d15b5dca3cee9ffa5a0f3534db9d5a930f6a27dff4adf"},
+ {file = "TgCrypto-1.2.5-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7dbf607d645c39a577a0f8571039d11ddd2dcdf9656465be75f9e0f540472444"},
+ {file = "TgCrypto-1.2.5-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:d6b0c2dc84e632ce7b3d0b767cfe20967e557ad7d71ea5dbd7df2dd544323181"},
+ {file = "TgCrypto-1.2.5.tar.gz", hash = "sha256:9bc2cac6fb9a12ef5b08f3dd500174fe374d89b660cce981f57e3138559cb682"},
+]
+
+[[package]]
+name = "token-bucket"
+version = "0.3.0"
+requires_python = ">=3.5"
+summary = "Very fast implementation of the token bucket algorithm."
+groups = ["default"]
+files = [
+ {file = "token_bucket-0.3.0-py2.py3-none-any.whl", hash = "sha256:6df24309e3cf5b808ae5ef714a3191ec5b54f48c34ef959e4882eef140703369"},
+ {file = "token_bucket-0.3.0.tar.gz", hash = "sha256:979571c99db2ff9e651f2b2146a62b2ebadf7de6c217a8781698282976cb675f"},
+]
+
+[[package]]
+name = "tomli"
+version = "2.1.0"
+requires_python = ">=3.8"
+summary = "A lil' TOML parser"
+groups = ["default"]
+marker = "python_version < \"3.11\""
+files = [
+ {file = "tomli-2.1.0-py3-none-any.whl", hash = "sha256:a5c57c3d1c56f5ccdf89f6523458f60ef716e210fc47c4cfb188c5ba473e0391"},
+ {file = "tomli-2.1.0.tar.gz", hash = "sha256:3f646cae2aec94e17d04973e4249548320197cfabdf130015d023de4b74d8ab8"},
+]
+
+[[package]]
+name = "tqdm"
+version = "4.67.1"
+requires_python = ">=3.7"
+summary = "Fast, Extensible Progress Meter"
+groups = ["default"]
+dependencies = [
+ "colorama; platform_system == \"Windows\"",
+]
+files = [
+ {file = "tqdm-4.67.1-py3-none-any.whl", hash = "sha256:26445eca388f82e72884e0d580d5464cd801a3ea01e63e5601bdff9ba6a48de2"},
+ {file = "tqdm-4.67.1.tar.gz", hash = "sha256:f8aef9c52c08c13a65f30ea34f4e5aac3fd1a34959879d7e59e63027286627f2"},
+]
+
+[[package]]
+name = "typing-extensions"
+version = "4.12.2"
+requires_python = ">=3.8"
+summary = "Backported and Experimental Type Hints for Python 3.8+"
+groups = ["default"]
+files = [
+ {file = "typing_extensions-4.12.2-py3-none-any.whl", hash = "sha256:04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d"},
+ {file = "typing_extensions-4.12.2.tar.gz", hash = "sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8"},
+]
+
+[[package]]
+name = "tzdata"
+version = "2024.2"
+requires_python = ">=2"
+summary = "Provider of IANA time zone data"
+groups = ["default"]
+marker = "platform_system == \"Windows\""
+files = [
+ {file = "tzdata-2024.2-py2.py3-none-any.whl", hash = "sha256:a48093786cdcde33cad18c2555e8532f34422074448fbc874186f0abd79565cd"},
+ {file = "tzdata-2024.2.tar.gz", hash = "sha256:7d85cc416e9382e69095b7bdf4afd9e3880418a2413feec7069d533d6b4e31cc"},
+]
+
+[[package]]
+name = "tzlocal"
+version = "5.2"
+requires_python = ">=3.8"
+summary = "tzinfo object for the local timezone"
+groups = ["default"]
+dependencies = [
+ "backports-zoneinfo; python_version < \"3.9\"",
+ "tzdata; platform_system == \"Windows\"",
+]
+files = [
+ {file = "tzlocal-5.2-py3-none-any.whl", hash = "sha256:49816ef2fe65ea8ac19d19aa7a1ae0551c834303d5014c6d5a62e4cbda8047b8"},
+ {file = "tzlocal-5.2.tar.gz", hash = "sha256:8d399205578f1a9342816409cc1e46a93ebd5755e39ea2d85334bea911bf0e6e"},
+]
+
+[[package]]
+name = "urllib3"
+version = "2.2.3"
+requires_python = ">=3.8"
+summary = "HTTP library with thread-safe connection pooling, file post, and more."
+groups = ["default"]
+files = [
+ {file = "urllib3-2.2.3-py3-none-any.whl", hash = "sha256:ca899ca043dcb1bafa3e262d73aa25c465bfb49e0bd9dd5d59f1d0acba2f8fac"},
+ {file = "urllib3-2.2.3.tar.gz", hash = "sha256:e7d814a81dad81e6caf2ec9fdedb284ecc9c73076b62654547cc64ccdcae26e9"},
+]
+
+[[package]]
+name = "yt-dlp"
+version = "2024.12.3"
+requires_python = ">=3.9"
+summary = "A feature-rich command-line audio/video downloader"
+groups = ["default"]
+files = [
+ {file = "yt_dlp-2024.12.3-py3-none-any.whl", hash = "sha256:a6b32ea879ce3f95b47b9b57948b755b4d61f3700d4fc24602b17537ddf0cf90"},
+ {file = "yt_dlp-2024.12.3.tar.gz", hash = "sha256:35abff51c5762033103f2330ba0a8a1f48c4388a413a2d8cdc9b84642fe8edd4"},
+]
diff --git a/pre-push.py b/pre-push.py
new file mode 100755
index 00000000..17f6e49b
--- /dev/null
+++ b/pre-push.py
@@ -0,0 +1,22 @@
+#!/usr/bin/env python3
+# coding: utf-8
+
+# ytdlbot - pre-commit.py
+# for dependabot
+
+import tomllib
+import subprocess
+
+
+with open("pyproject.toml", "rb") as file:
+ config = tomllib.load(file)
+
+with open("requirements.txt", "w") as file:
+ for item in config["project"]["dependencies"]:
+ if " " in item:
+ item = item.split()[-1]
+ file.write(f"{item}\n")
+
+# commit with amend
+subprocess.run(["git", "add", "requirements.txt"])
+subprocess.run(["git", "commit", "--amend", "--no-edit"])
diff --git a/pyproject.toml b/pyproject.toml
new file mode 100644
index 00000000..36e52f47
--- /dev/null
+++ b/pyproject.toml
@@ -0,0 +1,15 @@
+[project]
+name = "ytdlbot"
+version = "1.0.0"
+description = "Default template for PDM package"
+authors = [
+ {name = "Benny", email = "benny.think@gmail.com"},
+]
+dependencies = ["Pyrogram @ git+https://github.com/KurimuzonAkuma/pyrogram@9ec94e01e8a9d3653d587d39e06856da4a030fbc", "tgcrypto>=1.2.5", "yt-dlp==2024.12.3", "APScheduler>=3.11.0", "ffmpeg-python>=0.2.0", "PyMySQL>=1.1.1", "filetype>=1.2.0", "beautifulsoup4>=4.12.3", "fakeredis>=2.26.1", "redis>=5.2.0", "requests>=2.32.3", "tqdm>=4.67.1", "token-bucket>=0.3.0", "python-dotenv>=1.0.1", "black>=24.10.0", "sqlalchemy>=2.0.36", "psutil>=6.1.0", "ffpb>=0.4.1"]
+requires-python = ">3.9"
+readme = "README.md"
+license = {text = "Apache2.0"}
+
+
+[tool.pdm]
+distribution = false
diff --git a/requirements.txt b/requirements.txt
index 86a856d6..d72d3048 100644
--- a/requirements.txt
+++ b/requirements.txt
@@ -1,29 +1,18 @@
-git+https://github.com/KurimuzonAkuma/pyrogram
-tgcrypto==1.2.5
-yt-dlp==2024.8.6
-APScheduler==3.10.4
-beautifultable==1.1.0
-ffmpeg-python==0.2.0
-PyMySQL==1.1.1
-celery==5.4.0
-filetype==1.2.0
-flower==2.0.1
-psutil==6.0.0
-influxdb==5.3.2
-beautifulsoup4==4.12.3
-fakeredis==2.24.1
-supervisor==4.2.5
-tgbot-ping==1.0.7
-redis==5.0.8
-requests==2.32.3
-tqdm==4.66.5
-requests-toolbelt==1.0.0
-ffpb==0.4.1
-youtube-search-python==1.6.6
-token-bucket==0.3.0
-coloredlogs==15.0.1
-tronpy==0.5.0
-mnemonic==0.21
-qrcode==7.4.2
-blinker==1.8.2
-flask===3.0.3
+git+https://github.com/KurimuzonAkuma/pyrogram@9ec94e01e8a9d3653d587d39e06856da4a030fbc
+tgcrypto>=1.2.5
+yt-dlp==2024.12.3
+APScheduler>=3.11.0
+ffmpeg-python>=0.2.0
+PyMySQL>=1.1.1
+filetype>=1.2.0
+beautifulsoup4>=4.12.3
+fakeredis>=2.26.1
+redis>=5.2.0
+requests>=2.32.3
+tqdm>=4.67.1
+token-bucket>=0.3.0
+python-dotenv>=1.0.1
+black>=24.10.0
+sqlalchemy>=2.0.36
+psutil>=6.1.0
+ffpb>=0.4.1
diff --git a/scripts/low_id.sh b/scripts/low_id.sh
deleted file mode 100644
index 16fc2cef..00000000
--- a/scripts/low_id.sh
+++ /dev/null
@@ -1,12 +0,0 @@
-#!/bin/bash
-export PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/usr/games:/usr/local/games:/snap/bin:/usr/local/go/bin:/opt/bin
-
-# Check the logs for the given string
-if docker-compose logs --tail=100 ytdl | grep -q "The msg_id is too low"; then
- # If the string is found, stop the ytdl service
- echo "ytdl service stopped due to 'The msg_id is too low' found in logs."
- docker-compose stop ytdl && docker-compose rm ytdl && docker-compose up -d
-
-else
- echo "String not found in logs."
-fi
diff --git a/scripts/migrate_to_mysql.py b/scripts/migrate_to_mysql.py
deleted file mode 100644
index 436fb78a..00000000
--- a/scripts/migrate_to_mysql.py
+++ /dev/null
@@ -1,27 +0,0 @@
-#!/usr/local/bin/python3
-# coding: utf-8
-
-# ytdlbot - migrate_to_mysql.py
-# 12/29/21 15:28
-#
-
-__author__ = "Benny "
-
-import sqlite3
-
-import pymysql
-
-mysql_con = pymysql.connect(host='localhost', user='root', passwd='root', db='vip', charset='utf8mb4')
-sqlite_con = sqlite3.connect('vip.sqlite')
-
-vips = sqlite_con.execute('SELECT * FROM VIP').fetchall()
-
-for vip in vips:
- mysql_con.cursor().execute('INSERT INTO vip VALUES (%s, %s, %s, %s, %s, %s)', vip)
-
-settings = sqlite_con.execute('SELECT * FROM settings').fetchall()
-
-for setting in settings:
- mysql_con.cursor().execute("INSERT INTO settings VALUES (%s,%s,%s)", setting)
-
-mysql_con.commit()
diff --git a/scripts/start.sh b/scripts/start.sh
deleted file mode 100644
index 242f207f..00000000
--- a/scripts/start.sh
+++ /dev/null
@@ -1,13 +0,0 @@
-docker run -d --restart unless-stopped --name ytdl \
- --net host \
- -e TOKEN=12345 \
- -e APP_ID=123123 \
- -e APP_HASH=4990 \
- -e ENABLE_CELERY=True \
- -e REDIS=192.168.6.1 \
- -e MYSQL_HOST=192.168.6.1 \
- -e WORKERS=4 \
- -e VIP=True \
- -e CUSTOM_TEXT=#StandWithUkraine \
- bennythink/ytdlbot \
- /usr/local/bin/supervisord -c "/ytdlbot/conf/supervisor_worker.conf"
diff --git a/scripts/transfer.py b/scripts/transfer.py
deleted file mode 100644
index 2ace122f..00000000
--- a/scripts/transfer.py
+++ /dev/null
@@ -1,29 +0,0 @@
-#!/usr/bin/env python3
-# coding: utf-8
-
-# ytdlbot - transfer.py
-# 2023-12-07 18:21
-from tronpy import Tron
-from tronpy.hdwallet import seed_from_mnemonic, key_from_seed
-from tronpy.keys import PrivateKey
-
-mnemonic = "web horse smile ramp olive slush blue property world physical donkey pumpkin"
-
-client = Tron(network="nile")
-
-from_ = client.generate_address_from_mnemonic(mnemonic, account_path="m/44'/195'/0'/0/0")["base58check_address"]
-balance = client.get_account_balance(from_)
-print("my addr: ", from_, "balance: ", balance)
-to = input("to: ")
-amount = int(input("amount in TRX: "))
-
-
-def mnemonic_to_private_key():
- seed = seed_from_mnemonic(mnemonic, passphrase="")
- private_key = key_from_seed(seed, account_path="m/44'/195'/0'/0/0")
- return PrivateKey(private_key)
-
-
-t = client.trx.transfer(from_, to, amount * 1_000_000).build().sign(mnemonic_to_private_key()).broadcast()
-
-print(t.wait())
diff --git a/src/config/__init__.py b/src/config/__init__.py
new file mode 100644
index 00000000..5dcf0ac9
--- /dev/null
+++ b/src/config/__init__.py
@@ -0,0 +1,19 @@
+#!/usr/bin/env python3
+# coding: utf-8
+
+# ytdlbot - __init__.py.py
+
+import logging
+
+from dotenv import load_dotenv
+
+load_dotenv()
+
+from config.config import *
+from config.constant import *
+
+logging.basicConfig(
+ level=logging.INFO,
+ format="[%(asctime)s %(filename)s:%(lineno)d %(levelname).1s] %(message)s",
+ datefmt="%Y-%m-%d %H:%M:%S",
+)
diff --git a/src/config/config.py b/src/config/config.py
new file mode 100644
index 00000000..9bd0a4b4
--- /dev/null
+++ b/src/config/config.py
@@ -0,0 +1,41 @@
+#!/usr/local/bin/python3
+# coding: utf-8
+
+# ytdlbot - config.py
+# 8/28/21 15:01
+#
+
+__author__ = "Benny "
+
+import os
+
+# general settings
+WORKERS: int = int(os.getenv("WORKERS", 100))
+APP_ID: int = int(os.getenv("APP_ID"))
+APP_HASH = os.getenv("APP_HASH")
+BOT_TOKEN = os.getenv("BOT_TOKEN")
+OWNER = os.getenv("OWNER")
+# db settings
+AUTHORIZED_USER: str = os.getenv("AUTHORIZED_USER", "")
+MYSQL_DSN = os.getenv("MYSQL_DSN")
+REDIS_HOST = os.getenv("REDIS_HOST")
+ENABLE_FFMPEG = os.getenv("ENABLE_FFMPEG", False)
+AUDIO_FORMAT = os.getenv("AUDIO_FORMAT")
+ENABLE_ARIA2 = os.getenv("ENABLE_ARIA2", False)
+RCLONE_PATH = os.getenv("RCLONE")
+
+# payment settings
+ENABLE_VIP = os.getenv("VIP", False)
+PROVIDER_TOKEN = os.getenv("PROVIDER_TOKEN")
+FREE_DOWNLOAD = int(os.getenv("FREE_DOWNLOAD", 5))
+TOKEN_PRICE = os.getenv("TOKEN_PRICE", 10) # 1 USD=10 credits
+
+# For advance users
+# Please do not change, if you don't know what these are.
+TG_NORMAL_MAX_SIZE = 2000 * 1024 * 1024
+CAPTION_URL_LENGTH_LIMIT = 150
+
+RATE_LIMIT = os.getenv("RATE_LIMIT", 120)
+# This will set the value for the tmpfile path(engine path). If not, will return None and use systemโs default path.
+# Please ensure that the directory exists and you have necessary permissions to write to it.
+TMPFILE_PATH = os.getenv("TMPFILE_PATH")
diff --git a/src/config/constant.py b/src/config/constant.py
new file mode 100644
index 00000000..81eac3a0
--- /dev/null
+++ b/src/config/constant.py
@@ -0,0 +1,52 @@
+#!/usr/local/bin/python3
+# coding: utf-8
+
+# ytdlbot - constant.py
+# 8/16/21 16:59
+#
+
+__author__ = "Benny "
+
+import typing
+
+from pyrogram import Client, types
+
+
+class BotText:
+
+ start = """
+ Welcome to YouTube Download bot. Type /help for more information.
+ EU๐ช๐บ: @benny_2ytdlbot
+ SG๐ธ๐ฌ๏ผ@benny_ytdlbot
+
+ Join https://t.me/+OGRC8tp9-U9mZDZl for updates."""
+
+ help = """
+1. For YouTube and any websites supported by yt-dlp, just send the link and we will engine and send it to you.
+
+2. For specific links use `/spdl {URL}`. More info at https://github.com/SanujaNS/ytdlbot-telegram#supported-websites
+
+3. If the bot doesn't work, try again or join https://t.me/+OGRC8tp9-U9mZDZl for updates.
+
+4. Want to deploy it yourself?\nHere's the source code: https://github.com/tgbot-collection/ytdlbot
+ """
+
+ about = "YouTube Downloader by @BennyThink.\n\nOpen source on GitHub: https://github.com/tgbot-collection/ytdlbot"
+
+ settings = """
+Please choose the preferred format and video quality for your video. These settings only **apply to YouTube videos**.
+High: 1080P
+Medium: 720P
+Low: 480P
+
+If you choose to send the video as a document, Telegram client will not be able stream it.
+
+Your current settings:
+Video quality: %s
+Sending type: %s
+"""
+
+
+class Types:
+ Message = typing.Union[types.Message, typing.Coroutine]
+ Client = Client
diff --git a/src/database/__init__.py b/src/database/__init__.py
new file mode 100644
index 00000000..77daf4c9
--- /dev/null
+++ b/src/database/__init__.py
@@ -0,0 +1,6 @@
+#!/usr/bin/env python3
+# coding: utf-8
+
+# ytdlbot - __init__.py.py
+
+from database.cache import Redis
diff --git a/src/database/cache.py b/src/database/cache.py
new file mode 100644
index 00000000..5fca7a0c
--- /dev/null
+++ b/src/database/cache.py
@@ -0,0 +1,32 @@
+#!/usr/bin/env python3
+# coding: utf-8
+
+# ytdlbot - cache.py
+
+
+import logging
+
+import fakeredis
+import redis
+
+from config import REDIS_HOST
+
+
+class Redis:
+ def __init__(self):
+ try:
+ self.r = redis.StrictRedis(host=REDIS_HOST, db=1, decode_responses=True)
+ self.r.ping()
+ except Exception:
+ logging.warning("Redis connection failed, using fake redis instead.")
+ self.r = fakeredis.FakeStrictRedis(host=REDIS_HOST, db=1, decode_responses=True)
+
+ def __del__(self):
+ self.r.close()
+
+ def add_send_cache(self, link: str, file_id: str, _type: str):
+ # one link might have multiple files, so we use hset
+ self.r.hset(link, mapping={"file_id": file_id, "type": _type})
+
+ def get_send_cache(self, link: str):
+ return self.r.hgetall(link)
diff --git a/src/database/model.py b/src/database/model.py
new file mode 100644
index 00000000..d9b48299
--- /dev/null
+++ b/src/database/model.py
@@ -0,0 +1,163 @@
+#!/usr/bin/env python3
+# coding: utf-8
+import math
+import os
+from contextlib import contextmanager
+from typing import Literal
+
+from sqlalchemy import Column, Enum, Float, ForeignKey, Integer, String, create_engine
+from sqlalchemy.dialects.mysql import JSON
+from sqlalchemy.ext.declarative import declarative_base
+from sqlalchemy.orm import relationship, sessionmaker
+
+from config import FREE_DOWNLOAD
+
+# ytdlbot - model.py
+
+
+Base = declarative_base()
+
+
+class User(Base):
+ __tablename__ = "users"
+
+ id = Column(Integer, primary_key=True, autoincrement=True)
+ user_id = Column(Integer, unique=True, nullable=False) # telegram user id
+ free = Column(Integer, default=FREE_DOWNLOAD)
+ paid = Column(Integer, default=0)
+ config = Column(JSON)
+
+ settings = relationship("Setting", back_populates="user", cascade="all, delete-orphan")
+ payments = relationship("Payment", back_populates="user", cascade="all, delete-orphan")
+
+
+class Setting(Base):
+ __tablename__ = "settings"
+
+ id = Column(Integer, primary_key=True, autoincrement=True)
+ download = Column(Enum("high", "medium", "low", "audio", "custom"), nullable=False, default="high")
+ upload = Column(Enum("video", "audio", "document"), nullable=False, default="video")
+ user_id = Column(Integer, ForeignKey("users.id"), nullable=False)
+
+ user = relationship("User", back_populates="settings")
+
+
+class Payment(Base):
+ __tablename__ = "payments"
+
+ id = Column(Integer, primary_key=True, autoincrement=True)
+ method = Column(String(50), nullable=False)
+ amount = Column(Float, nullable=False)
+ status = Column(Enum("pending", "completed", "failed", "refunded"), nullable=False)
+ user_id = Column(Integer, ForeignKey("users.id"), nullable=False)
+
+ user = relationship("User", back_populates="payments")
+
+
+def create_session():
+ engine = create_engine(
+ "mysql+pymysql://root:root@localhost/ytdlbot",
+ pool_size=50,
+ max_overflow=100,
+ pool_timeout=30,
+ pool_recycle=1800,
+ )
+ Base.metadata.create_all(engine)
+ return sessionmaker(bind=engine)
+
+
+SessionFactory = create_session()
+
+
+@contextmanager
+def session_manager():
+ s = SessionFactory()
+ try:
+ yield s
+ s.commit()
+ except Exception as e:
+ s.rollback()
+ raise
+ finally:
+ s.close()
+
+
+def get_download_settings(uid) -> Literal["high", "medium", "low", "audio", "custom"]:
+ with session_manager() as session:
+ data = session.query(Setting).filter(Setting.user_id == uid).first()
+ if data:
+ return data.download
+ return "high"
+
+
+def get_upload_settings(uid) -> Literal["video", "audio", "document"]:
+ with session_manager() as session:
+ data = session.query(Setting).filter(Setting.user_id == uid).first()
+ if data:
+ return data.upload
+ return "video"
+
+
+def set_user_settings(uid: int, key: str, value: str):
+ # set download or upload settings
+ with session_manager() as session:
+ # upsert
+ setting = session.query(Setting).filter(Setting.user_id == uid).first()
+ if setting:
+ setattr(setting, key, value)
+ else:
+ session.add(Setting(user_id=uid, **{key: value}))
+
+
+def get_free_quota(uid: int):
+ with session_manager() as session:
+ data = session.query(User).filter(User.user_id == uid).first()
+ if data:
+ return data.free
+ return FREE_DOWNLOAD
+
+
+def get_paid_quota(uid: int):
+ if os.getenv("ENABLE_VIP"):
+ with session_manager() as session:
+ data = session.query(User).filter(User.user_id == uid).first()
+ if data:
+ return data.paid
+
+ return 0
+
+ return math.inf
+
+
+def reset_free_quota(uid: int):
+ with session_manager() as session:
+ data = session.query(User).filter(User.user_id == uid).first()
+ if data:
+ data.free = 5
+
+
+def add_paid_quota(uid: int, amount: int):
+ with session_manager() as session:
+ data = session.query(User).filter(User.user_id == uid).first()
+ if data:
+ data.paid += amount
+
+
+def use_quota(uid: int):
+ # use free first, then paid
+ with session_manager() as session:
+ user = session.query(User).filter(User.user_id == uid).first()
+ if user:
+ if user.free > 0:
+ user.free -= 1
+ elif user.paid > 0:
+ user.paid -= 1
+ else:
+ raise Exception("Quota exhausted")
+
+
+def init_user(uid: int):
+ with session_manager() as session:
+ user = session.query(User).filter(User.user_id == uid).first()
+ if not user:
+ session.add(User(user_id=uid))
diff --git a/src/engine/__init__.py b/src/engine/__init__.py
new file mode 100644
index 00000000..0fc8b94a
--- /dev/null
+++ b/src/engine/__init__.py
@@ -0,0 +1,21 @@
+#!/usr/bin/env python3
+# coding: utf-8
+
+# ytdlbot - __init__.py.py
+
+from urllib.parse import urlparse
+
+
+def special_download_entrance(url: str, tempdir: str, bm, **kwargs) -> list:
+ """Specific link downloader"""
+ domain = urlparse(url).hostname
+ if "youtube.com" in domain or "youtu.be" in domain:
+ raise ValueError("ERROR: This is ytdl bot for Youtube links just send the link.")
+ elif "www.instagram.com" in domain:
+ return instagram(url, tempdir, bm, **kwargs)
+ elif "pixeldrain.com" in domain:
+ return pixeldrain(url, tempdir, bm, **kwargs)
+ elif "krakenfiles.com" in domain:
+ return krakenfiles(url, tempdir, bm, **kwargs)
+ else:
+ raise ValueError(f"Invalid URL: No specific link function found for {url}")
diff --git a/src/engine/base.py b/src/engine/base.py
new file mode 100644
index 00000000..a209dae7
--- /dev/null
+++ b/src/engine/base.py
@@ -0,0 +1,243 @@
+#!/usr/bin/env python3
+# coding: utf-8
+
+# ytdlbot - types.py
+
+import logging
+import re
+import tempfile
+import uuid
+from abc import ABC, abstractmethod
+from io import StringIO
+from pathlib import Path
+from types import SimpleNamespace
+
+import ffmpeg
+import filetype
+from helper import debounce, sizeof_fmt
+from pyrogram import enums, types
+from tqdm import tqdm
+
+from config import TG_NORMAL_MAX_SIZE, Types
+from database import Redis
+from database.model import (
+ get_download_settings,
+ get_free_quota,
+ get_paid_quota,
+ get_upload_settings,
+ use_quota,
+)
+
+
+def record_usage(func):
+ def wrapper(self: BaseDownloader, *args, **kwargs):
+ free, paid = get_free_quota(self._user_id), get_paid_quota(self._user_id)
+ if free + paid < 0:
+ raise Exception("Usage limit exceeded")
+ # check cache first
+ result = None
+ if caches := self.get_cache_fileid():
+ for fid, _type in caches.items():
+ self._methods[caches[_type]](self._user_id, fid)
+ else:
+ result = func(self, *args, **kwargs) # Call the original method
+ use_quota(self._user_id)
+ return result
+
+ return wrapper
+
+
+def generate_input_media(file_paths: list, cap: str) -> list:
+ input_media = []
+ for path in file_paths:
+ mime = filetype.guess_mime(path)
+ if "video" in mime:
+ input_media.append(types.InputMediaVideo(media=path))
+ elif "image" in mime:
+ input_media.append(types.InputMediaPhoto(media=path))
+ elif "audio" in mime:
+ input_media.append(types.InputMediaAudio(media=path))
+ else:
+ input_media.append(types.InputMediaDocument(media=path))
+
+ input_media[0].caption = cap
+ return input_media
+
+
+class BaseDownloader(ABC):
+ def __init__(self, client: Types.Client, url: str, user_id: int, _id: int):
+ self._client = client
+ self._url = url
+ self._user_id = user_id
+ self._id = _id
+ self._tempdir = tempfile.TemporaryDirectory(prefix="ytdl-")
+ self._bot_msg: Types.Message = self._client.get_messages(self._user_id, self._id)
+ self._redis = Redis()
+
+ def __del__(self):
+ self._tempdir.cleanup()
+
+ @staticmethod
+ def __remove_bash_color(text):
+ return re.sub(r"\u001b|\[0;94m|\u001b\[0m|\[0;32m|\[0m|\[0;33m", "", text)
+
+ @staticmethod
+ def __tqdm_progress(desc, total, finished, speed="", eta=""):
+ def more(title, initial):
+ if initial:
+ return f"{title} {initial}"
+ else:
+ return ""
+
+ f = StringIO()
+ tqdm(
+ total=total,
+ initial=finished,
+ file=f,
+ ascii=False,
+ unit_scale=True,
+ ncols=30,
+ bar_format="{l_bar}{bar} |{n_fmt}/{total_fmt} ",
+ )
+ raw_output = f.getvalue()
+ tqdm_output = raw_output.split("|")
+ progress = f"`[{tqdm_output[1]}]`"
+ detail = tqdm_output[2].replace("[A", "")
+ text = f"""
+ {desc}
+
+ {progress}
+ {detail}
+ {more("Speed:", speed)}
+ {more("ETA:", eta)}
+ """
+ f.close()
+ return text
+
+ def download_hook(self, d: dict):
+ if d["status"] == "downloading":
+ downloaded = d.get("downloaded_bytes", 0)
+ total = d.get("total_bytes") or d.get("total_bytes_estimate", 0)
+
+ if total > TG_NORMAL_MAX_SIZE:
+ msg = f"Your download file size {sizeof_fmt(total)} is too large for Telegram."
+ raise Exception(msg)
+
+ # percent = remove_bash_color(d.get("_percent_str", "N/A"))
+ speed = self.__remove_bash_color(d.get("_speed_str", "N/A"))
+ eta = self.__remove_bash_color(d.get("_eta_str", d.get("eta")))
+ text = self.__tqdm_progress("Downloading...", total, downloaded, speed, eta)
+ # debounce in here
+ self.edit_text(self._bot_msg, text)
+
+ def upload_hook(self, current, total):
+ text = self.__tqdm_progress("Uploading...", total, current)
+ self.edit_text(self._bot_msg, text)
+
+ @debounce(5)
+ def edit_text(self, text: str):
+ self._bot_msg.edit_text(text)
+
+ def get_cache_fileid(self):
+ unique = self._url + get_download_settings(self._url)
+ return self._redis.get_send_cache(unique)
+
+ @abstractmethod
+ def _setup_formats(self) -> list | None:
+ pass
+
+ @abstractmethod
+ def _download(self, formats) -> list:
+ # responsible for get format and download it
+ pass
+
+ @property
+ def _methods(self):
+ return {
+ "document": self._client.send_document,
+ "audio": self._client.send_audio,
+ "video": self._client.send_video,
+ "animation": self._client.send_animation,
+ "photo": self._client.send_photo,
+ }
+
+ def send_something(self, *, chat_id, files, _type, caption=None, thumb=None, **kwargs):
+ self._client.send_chat_action(chat_id, enums.ChatAction.UPLOAD_DOCUMENT)
+ if len(files) > 1:
+ inputs = generate_input_media(files, caption)
+ return self._client.send_media_group(chat_id, inputs)[0]
+ else:
+ return self._methods[_type](
+ chat_id,
+ files[0],
+ caption=caption,
+ thumb=thumb,
+ progress=self.upload_hook,
+ progress_args=(self._bot_msg,),
+ **kwargs,
+ )
+
+ @staticmethod
+ def get_metadata(files):
+ video_path = files[0]
+ width = height = duration = 0
+ try:
+ video_streams = ffmpeg.probe(video_path, select_streams="v")
+ for item in video_streams.get("streams", []):
+ height = item["height"]
+ width = item["width"]
+ duration = int(float(video_streams["format"]["duration"]))
+ except Exception as e:
+ logging.error(e)
+ try:
+ thumb = Path(video_path).parent.joinpath(f"{uuid.uuid4().hex}-thunmnail.png").as_posix()
+ # A thumbnail's width and height should not exceed 320 pixels.
+ ffmpeg.input(video_path, ss=duration / 2).filter(
+ "scale",
+ "if(gt(iw,ih),300,-1)", # If width > height, scale width to 320 and height auto
+ "if(gt(iw,ih),-1,300)",
+ ).output(thumb, vframes=1).run()
+ except ffmpeg._run.Error:
+ thumb = None
+
+ return dict(height=height, width=width, duration=duration, thumb=thumb)
+
+ @record_usage
+ def _upload(self):
+ upload = get_upload_settings(self._user_id)
+ chat_id = self._bot_msg.chat.id
+ files = list(Path(self._tempdir.name).glob("*"))
+
+ success = SimpleNamespace(document=None, video=None, audio=None, animation=None, photo=None)
+ if upload == "document":
+ thumb = self.get_metadata(files)["thumb"]
+ success = self.send_something(
+ chat_id=chat_id,
+ files=files,
+ _type="document",
+ thumb=thumb,
+ force_document=True,
+ )
+ elif upload == "audio":
+ success = self.send_something(chat_id=chat_id, files=files, _type="audio")
+ elif upload == "video":
+ methods = {"video": self.get_metadata(files)["thumb"], "animation": None, "photo": None}
+ for method, thumb in methods.items():
+ try:
+ success = self.send_something(chat_id=chat_id, files=files, _type=method, thumb=thumb)
+ break
+ except Exception:
+ logging.error("Retry to send as %s", method)
+ else:
+ logging.error("Unknown upload settings")
+ return
+
+ # unique link is link+download_format
+ unique = self._url + get_download_settings(self._url)
+ obj = success.document or success.video or success.audio or success.animation or success.photo
+ self._redis.add_send_cache(unique, getattr(obj, "file_id", None), upload)
+ return success
+
+ @abstractmethod
+ def start(self):
+ pass
diff --git a/src/engine/direct.py b/src/engine/direct.py
new file mode 100644
index 00000000..187fb08f
--- /dev/null
+++ b/src/engine/direct.py
@@ -0,0 +1,95 @@
+#!/usr/bin/env python3
+# coding: utf-8
+
+# ytdlbot - leech.py
+
+import logging
+import os
+import pathlib
+import subprocess
+import tempfile
+from pathlib import Path
+from uuid import uuid4
+
+import filetype
+import requests
+from base import BaseDownloader
+from pyrogram import enums
+
+from config import ENABLE_ARIA2, TMPFILE_PATH
+
+
+class DirectDownloader(BaseDownloader):
+
+ def _setup_formats(self) -> list | None:
+ # direct download doesn't need to setup formats
+ pass
+
+ def _requests_download(self):
+ response = requests.get(self._url, stream=True)
+ response.raise_for_status()
+ file = Path(self._tempdir).joinpath(uuid4().hex)
+ ext = filetype.guess_extension(file)
+ if ext is not None:
+ file = file.with_suffix(ext)
+
+ with open(file, "wb") as f:
+ for chunk in response.iter_content(chunk_size=8192):
+ f.write(chunk)
+ return [file.as_posix()]
+
+ def _aria2_download(self):
+ ua = "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/126.0.0.0 Safari/537.36"
+ response = None
+ video_paths = None
+ # Download process using aria2c
+ try:
+ self._bot_msg.edit_text(f"Aria2 download starting...")
+ # Command to engine the link using aria2c
+ command = [
+ "aria2c",
+ "-U",
+ ua,
+ "--max-tries=5",
+ "--console-log-level=warn",
+ "-d",
+ self._tempdir,
+ self._url,
+ ]
+ # Run the command using subprocess.Popen
+ process = subprocess.Popen(command, bufsize=0, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
+ max_iterations = 100 # Set a reasonable maximum number of iterations
+ iteration = 0
+ while process.poll() is None and iteration < max_iterations:
+ line: str = process.stdout.readline().decode("utf-8")
+ if line.startswith("[#"):
+ line = line.strip()
+ self.edit_text(f"Aria2 downloading... \n\n`{line}`", disable_web_page_preview=True)
+ break
+ iteration += 1
+
+ if iteration >= max_iterations:
+ self.edit_text("Download exceed max iteration. Please try again later.", disable_web_page_preview=True)
+ except Exception as e:
+ self.edit_text(f"Download failed!โ\n\n`{e}`", disable_web_page_preview=True)
+ return
+ # Get filename and extension correctly after engine
+ file: Path = next(Path(self._tempdir).glob("*"))
+ filename = file.name
+ logging.info("Downloaded file %s", filename)
+ self.edit_text(f"Download Complete", disable_web_page_preview=True)
+ ext = filetype.guess_extension(file)
+ # Rename file if it doesn't have extension
+ if ext is not None and not filename.endswith(ext):
+ file.rename(f"{self._tempdir}/{filename}.{ext}")
+ # Get file path of the downloaded file to upload
+ return [file.as_posix()]
+
+ def _download(self, formats=None) -> list:
+ if ENABLE_ARIA2:
+ return self._aria2_download()
+ return self._requests_download()
+
+ def start(self):
+ self._download()
+ self._upload()
diff --git a/src/engine/generic.py b/src/engine/generic.py
new file mode 100644
index 00000000..bdd13fbd
--- /dev/null
+++ b/src/engine/generic.py
@@ -0,0 +1,92 @@
+#!/usr/bin/env python3
+# coding: utf-8
+
+# ytdlbot - generic.py
+
+from pathlib import Path
+
+import yt_dlp
+from base import BaseDownloader
+from pyrogram import types
+
+from database.model import get_download_settings
+
+
+class YoutubeDownload(BaseDownloader):
+
+ def _setup_formats(self) -> list | None:
+ download = get_download_settings(self._user_id)
+ formats = []
+ # "high", "medium", "low", "audio", "custom"
+ if download == "custom":
+ # get format from ytdlp, send inlinekeyboard button to user so they can choose
+ # another callback will be triggered to download the video
+ available_options = {
+ "480P": "best[height<=480]",
+ "720P": "best[height<=720]",
+ "1080P": "best[height<=1080]",
+ }
+ markup, temp_row = [], []
+ for quality, data in available_options.items():
+ temp_row.append(types.InlineKeyboardButton(quality, callback_data=data))
+ if len(temp_row) == 3: # Add a row every 3 buttons
+ markup.append(temp_row)
+ temp_row = []
+ # Add any remaining buttons as the last row
+ if temp_row:
+ markup.append(temp_row)
+ self._bot_msg.edit_text("Choose the format", reply_markup=types.InlineKeyboardMarkup(markup))
+ return None
+ if download == "audio":
+ # download audio only
+ formats.append("bestaudio")
+ elif download == "high":
+ # default config
+ formats.extend(
+ [
+ # webm , vp9 and av01 are not streamable on telegram, so we'll extract only mp4
+ "bestvideo[ext=mp4][vcodec!*=av01][vcodec!*=vp09]+bestaudio[ext=m4a]/bestvideo+bestaudio",
+ "bestvideo[vcodec^=avc]+bestaudio[acodec^=mp4a]/best[vcodec^=avc]/best",
+ None,
+ ]
+ )
+
+ elif download == "medium":
+ # download medium quality video
+ formats.append("medium")
+ elif download == "low":
+ # download low quality video
+ formats.append("worst")
+ return formats
+
+ def _download(self, formats) -> list:
+ output = Path(self._tempdir, "%(title).70s.%(ext)s").as_posix()
+ ydl_opts = {
+ "progress_hooks": [lambda d: self.download_hook(d)],
+ "outtmpl": output,
+ "restrictfilenames": False,
+ "quiet": True,
+ }
+
+ if self._url.startswith("https://drive.google.com"):
+ # Always use the `source` format for Google Drive URLs.
+ formats = ["source"] + formats
+
+ files = None
+ for f in formats:
+ ydl_opts["format"] = f
+ with yt_dlp.YoutubeDL(ydl_opts) as ydl:
+ ydl.download([self._url])
+ files = list(Path(self._tempdir).glob("*"))
+ break
+
+ return files
+
+ def start(self, formats=None):
+ # user can choose format by clicking on the button(custom config)
+ default_formats = self._setup_formats()
+ if formats is not None:
+ # formats according to user choice
+ default_formats = formats + self._setup_formats()
+ self._download(default_formats)
+ self._upload()
diff --git a/src/engine/helper.py b/src/engine/helper.py
new file mode 100644
index 00000000..0ea80a73
--- /dev/null
+++ b/src/engine/helper.py
@@ -0,0 +1,139 @@
+#!/usr/bin/env python3
+# coding: utf-8
+
+# ytdlbot - helper.py
+
+import functools
+import logging
+import os
+import pathlib
+import re
+import subprocess
+import threading
+import time
+from http import HTTPStatus
+from io import StringIO
+
+import ffmpeg
+import ffpb
+import filetype
+import pyrogram
+import requests
+import yt_dlp
+from bs4 import BeautifulSoup
+from pyrogram import types
+from tqdm import tqdm
+
+from config import (
+ AUDIO_FORMAT,
+ CAPTION_URL_LENGTH_LIMIT,
+ ENABLE_ARIA2,
+ TG_NORMAL_MAX_SIZE,
+)
+from utils import shorten_url, sizeof_fmt
+
+
+def debounce(wait_seconds):
+ """
+ Thread-safe debounce decorator for functions that take a message with chat.id and msg.id attributes.
+ The function will only be called if it hasn't been called with the same chat.id and msg.id in the last 'wait_seconds'.
+ """
+
+ def decorator(func):
+ last_called = {}
+ lock = threading.Lock()
+
+ @functools.wraps(func)
+ def wrapper(*args, **kwargs):
+ nonlocal last_called
+ now = time.time()
+
+ # Assuming the first argument is the message object with chat.id and msg.id
+ bot_msg = args[0]
+ key = (bot_msg.chat.id, bot_msg.id)
+
+ with lock:
+ if key not in last_called or now - last_called[key] >= wait_seconds:
+ last_called[key] = now
+ return func(*args, **kwargs)
+
+ return wrapper
+
+ return decorator
+
+
+def get_caption(url, video_path):
+ if isinstance(video_path, pathlib.Path):
+ meta = get_metadata(video_path)
+ file_name = video_path.name
+ file_size = sizeof_fmt(os.stat(video_path).st_size)
+ else:
+ file_name = getattr(video_path, "file_name", "")
+ file_size = sizeof_fmt(getattr(video_path, "file_size", (2 << 2) + ((2 << 2) + 1) + (2 << 5)))
+ meta = dict(
+ width=getattr(video_path, "width", 0),
+ height=getattr(video_path, "height", 0),
+ duration=getattr(video_path, "duration", 0),
+ thumb=getattr(video_path, "thumb", None),
+ )
+
+ # Shorten the URL if necessary
+ try:
+ if len(url) > CAPTION_URL_LENGTH_LIMIT:
+ url_for_cap = shorten_url(url, CAPTION_URL_LENGTH_LIMIT)
+ else:
+ url_for_cap = url
+ except Exception as e:
+ logging.warning(f"Error shortening URL: {e}")
+ url_for_cap = url
+
+ cap = (
+ f"{file_name}\n\n{url_for_cap}\n\nInfo: {meta['width']}x{meta['height']} {file_size}\t" f"{meta['duration']}s\n"
+ )
+ return cap
+
+
+def convert_audio_format(video_paths: list, bm):
+ # 1. file is audio, default format
+ # 2. file is video, default format
+ # 3. non default format
+
+ for path in video_paths:
+ streams = ffmpeg.probe(path)["streams"]
+ if AUDIO_FORMAT is None and len(streams) == 1 and streams[0]["codec_type"] == "audio":
+ logging.info("%s is audio, default format, no need to convert", path)
+ elif AUDIO_FORMAT is None and len(streams) >= 2:
+ logging.info("%s is video, default format, need to extract audio", path)
+ audio_stream = {"codec_name": "m4a"}
+ for stream in streams:
+ if stream["codec_type"] == "audio":
+ audio_stream = stream
+ break
+ ext = audio_stream["codec_name"]
+ new_path = path.with_suffix(f".{ext}")
+ run_ffmpeg_progressbar(["ffmpeg", "-y", "-i", path, "-vn", "-acodec", "copy", new_path], bm)
+ path.unlink()
+ index = video_paths.index(path)
+ video_paths[index] = new_path
+ else:
+ logging.info("Not default format, converting %s to %s", path, AUDIO_FORMAT)
+ new_path = path.with_suffix(f".{AUDIO_FORMAT}")
+ run_ffmpeg_progressbar(["ffmpeg", "-y", "-i", path, new_path], bm)
+ path.unlink()
+ index = video_paths.index(path)
+ video_paths[index] = new_path
+
+
+def split_large_video(video_paths: list):
+ original_video = None
+ split = False
+ for original_video in video_paths:
+ size = os.stat(original_video).st_size
+ if size > TG_NORMAL_MAX_SIZE:
+ split = True
+ logging.warning("file is too large %s, splitting...", size)
+ subprocess.check_output(f"sh split-video.sh {original_video} {TG_NORMAL_MAX_SIZE * 0.95} ".split())
+ os.remove(original_video)
+
+ if split and original_video:
+ return [i for i in pathlib.Path(original_video).parent.glob("*")]
diff --git a/src/engine/instagram.py b/src/engine/instagram.py
new file mode 100644
index 00000000..3921a7f2
--- /dev/null
+++ b/src/engine/instagram.py
@@ -0,0 +1,74 @@
+#!/usr/bin/env python3
+# coding: utf-8
+
+# ytdlbot - instagram.py
+
+
+import pathlib
+import re
+
+import filetype
+import requests
+from base import BaseDownloader
+
+
+class InstagramDownload(BaseDownloader):
+ def extract_code(self):
+ # Regular expression patterns
+ patterns = [
+ # Instagram stories highlights
+ r"/stories/highlights/([a-zA-Z0-9_-]+)/",
+ # Posts
+ r"/p/([a-zA-Z0-9_-]+)/",
+ # Reels
+ r"/reel/([a-zA-Z0-9_-]+)/",
+ # TV
+ r"/tv/([a-zA-Z0-9_-]+)/",
+ # Threads post (both with @username and without)
+ r"(?:https?://)?(?:www\.)?(?:threads\.net)(?:/[@\w.]+)?(?:/post)?/([\w-]+)(?:/?\?.*)?$",
+ ]
+
+ for pattern in patterns:
+ match = re.search(pattern, self._url)
+ if match:
+ if pattern == patterns[0]: # Check if it's the stories highlights pattern
+ # Return the URL as it is
+ return self._url
+ else:
+ # Return the code part (first group)
+ return match.group(1)
+
+ return None
+
+ def _setup_formats(self) -> list | None:
+ pass
+
+ def _download(self, formats):
+ # TODO: Implement download method
+ resp = requests.get(f"http://instagram:15000/?url={self._url}").json()
+ code = self.extract_code()
+ counter = 1
+ video_paths = []
+ if url_results := resp.get("data"):
+ for link in url_results:
+ req = requests.get(link, stream=True)
+ length = int(req.headers.get("content-length"))
+ content = req.content
+ ext = filetype.guess_extension(content)
+ filename = f"{code}_{counter}.{ext}"
+ save_path = pathlib.Path(self._tempdir, filename)
+ chunk_size = 4096
+ downloaded = 0
+ for chunk in req.iter_content(chunk_size):
+ text = tqdm_progress(f"Downloading: {filename}", length, downloaded)
+ edit_text(bm, text)
+ with open(save_path, "ab") as fp:
+ fp.write(chunk)
+ downloaded += len(chunk)
+ video_paths.append(save_path)
+ counter += 1
+
+ return video_paths
+
+ def start(self):
+ pass
diff --git a/src/engine/krakenfiles.py b/src/engine/krakenfiles.py
new file mode 100644
index 00000000..1ac4dfcb
--- /dev/null
+++ b/src/engine/krakenfiles.py
@@ -0,0 +1,32 @@
+#!/usr/bin/env python3
+# coding: utf-8
+
+# ytdlbot - krakenfiles.py
+
+__author__ = "SanujaNS "
+
+import requests
+from bs4 import BeautifulSoup
+
+
+def krakenfiles(url: str, tempdir: str, bm, **kwargs):
+ resp = requests.get(url)
+ html = resp.content
+ soup = BeautifulSoup(html, "html.parser")
+ link_parts = []
+ token_parts = []
+ for form_tag in soup.find_all("form"):
+ action = form_tag.get("action")
+ if action and "krakenfiles.com" in action:
+ link_parts.append(action)
+ input_tag = form_tag.find("input", {"name": "token"})
+ if input_tag:
+ value = input_tag.get("value")
+ token_parts.append(value)
+ for link_part, token_part in zip(link_parts, token_parts):
+ link = f"https:{link_part}"
+ data = {"token": token_part}
+ response = requests.post(link, data=data)
+ json_data = response.json()
+ url = json_data["url"]
+ return sp_ytdl_download(url, tempdir, bm, **kwargs)
diff --git a/src/engine/pixeldrain.py b/src/engine/pixeldrain.py
new file mode 100644
index 00000000..14210341
--- /dev/null
+++ b/src/engine/pixeldrain.py
@@ -0,0 +1,18 @@
+#!/usr/bin/env python3
+# coding: utf-8
+
+# ytdlbot - pixeldrain.py
+
+__author__ = "SanujaNS "
+
+import re
+
+
+def pixeldrain(url: str, tempdir: str, bm, **kwargs):
+ user_page_url_regex = r"https://pixeldrain.com/u/(\w+)"
+ match = re.match(user_page_url_regex, url)
+ if match:
+ url = "https://pixeldrain.com/api/file/{}?engine".format(match.group(1))
+ return sp_ytdl_download(url, tempdir, bm, **kwargs)
+ else:
+ return url
diff --git a/src/main.py b/src/main.py
new file mode 100644
index 00000000..53ed3a7c
--- /dev/null
+++ b/src/main.py
@@ -0,0 +1,401 @@
+#!/usr/local/bin/python3
+# coding: utf-8
+
+# ytdlbot - new.py
+# 8/14/21 14:37
+#
+
+__author__ = "Benny "
+
+import contextlib
+import logging
+import re
+import tempfile
+import threading
+import time
+import typing
+from io import BytesIO
+from typing import Any
+
+import psutil
+import pyrogram.errors
+import yt_dlp
+from apscheduler.schedulers.background import BackgroundScheduler
+from pyrogram import Client, enums, filters, types
+from pyrogram.raw import functions
+from pyrogram.raw import types as raw_types
+
+from config import (
+ APP_HASH,
+ APP_ID,
+ AUTHORIZED_USER,
+ BOT_TOKEN,
+ ENABLE_ARIA2,
+ ENABLE_FFMPEG,
+ ENABLE_VIP,
+ OWNER,
+ PROVIDER_TOKEN,
+ TOKEN_PRICE,
+ BotText,
+)
+from database.model import init_user
+from utils import extract_url_and_name, sizeof_fmt, timeof_fmt
+
+logging.info("Authorized users are %s", AUTHORIZED_USER)
+logging.getLogger("apscheduler.executors.default").propagate = False
+
+
+def create_app(name: str, workers: int = 32) -> Client:
+ return Client(
+ name,
+ APP_ID,
+ APP_HASH,
+ bot_token=BOT_TOKEN,
+ workers=workers,
+ # max_concurrent_transmissions=max(1, WORKERS // 2),
+ # https://github.com/pyrogram/pyrogram/issues/1225#issuecomment-1446595489
+ )
+
+
+app = create_app("main")
+
+
+def private_use(func):
+ def wrapper(client: Client, message: types.Message):
+ chat_id = getattr(message.from_user, "id", None)
+
+ # message type check
+ if message.chat.type != enums.ChatType.PRIVATE and not getattr(message, "text", "").lower().startswith("/ytdl"):
+ logging.debug("%s, it's annoying me...๐๏ธ ", message.text)
+ return
+
+ # authorized users check
+ if AUTHORIZED_USER:
+ users = [int(i) for i in AUTHORIZED_USER.split(",")]
+ else:
+ users = []
+
+ if users and chat_id and chat_id not in users:
+ message.reply_text("BotText.private", quote=True)
+ return
+
+ return func(client, message)
+
+ return wrapper
+
+
+@app.on_message(filters.command(["start"]))
+def start_handler(client: Client, message: types.Message):
+ from_id = message.chat.id
+ init_user(from_id)
+ logging.info("%s welcome to youtube-dl bot!", message.from_user.id)
+ client.send_chat_action(from_id, enums.ChatAction.TYPING)
+ client.send_message(message.chat.id, BotText.start, disable_web_page_preview=True)
+
+
+@app.on_message(filters.command(["help"]))
+def help_handler(client: Client, message: types.Message):
+ chat_id = message.chat.id
+ init_user(chat_id)
+ client.send_chat_action(chat_id, enums.ChatAction.TYPING)
+ client.send_message(chat_id, BotText.help, disable_web_page_preview=True)
+
+
+@app.on_message(filters.command(["about"]))
+def about_handler(client: Client, message: types.Message):
+ chat_id = message.chat.id
+ init_user(chat_id)
+ client.send_chat_action(chat_id, enums.ChatAction.TYPING)
+ client.send_message(chat_id, BotText.about)
+
+
+@app.on_message(filters.command(["ping"]))
+def ping_handler(client: Client, message: types.Message):
+ chat_id = message.chat.id
+ init_user(chat_id)
+ client.send_chat_action(chat_id, enums.ChatAction.TYPING)
+
+ def send_message_and_measure_ping():
+ start_time = int(round(time.time() * 1000))
+ reply: types.Message | typing.Any = client.send_message(chat_id, "Starting Ping...")
+
+ end_time = int(round(time.time() * 1000))
+ ping_time = int(round(end_time - start_time))
+ message_sent = True
+ if message_sent:
+ message.reply_text(f"Ping: {ping_time:.2f} ms", quote=True)
+ time.sleep(0.5)
+ client.edit_message_text(chat_id=reply.chat.id, message_id=reply.id, text="Ping Calculation Complete.")
+ time.sleep(1)
+ client.delete_messages(chat_id=reply.chat.id, message_ids=reply.id)
+
+ thread = threading.Thread(target=send_message_and_measure_ping)
+ thread.start()
+
+
+@app.on_message(filters.command(["stats"]))
+def stats_handler(client: Client, message: types.Message):
+ chat_id = message.chat.id
+ init_user(chat_id)
+ client.send_chat_action(chat_id, enums.ChatAction.TYPING)
+ cpu_usage = psutil.cpu_percent()
+ total, used, free, disk = psutil.disk_usage("/")
+ swap = psutil.swap_memory()
+ memory = psutil.virtual_memory()
+ boot_time = psutil.boot_time()
+
+ owner_stats = (
+ "\n\nโฌโโโโโใ Stats ใโโโโโโฌ\n\n"
+ f"โญ๐ฅ๏ธ **CPU Usage ยป** __{cpu_usage}%__\n"
+ f"โ๐พ **RAM Usage ยป** __{memory.percent}%__\n"
+ f"โฐ๐๏ธ **DISK Usage ยป** __{disk}%__\n\n"
+ f"โญ๐คUpload: {sizeof_fmt(psutil.net_io_counters().bytes_sent)}\n"
+ f"โฐ๐ฅDownload: {sizeof_fmt(psutil.net_io_counters().bytes_recv)}\n\n\n"
+ f"Memory Total: {sizeof_fmt(memory.total)}\n"
+ f"Memory Free: {sizeof_fmt(memory.available)}\n"
+ f"Memory Used: {sizeof_fmt(memory.used)}\n"
+ f"SWAP Total: {sizeof_fmt(swap.total)} | SWAP Usage: {swap.percent}%\n\n"
+ f"Total Disk Space: {sizeof_fmt(total)}\n"
+ f"Used: {sizeof_fmt(used)} | Free: {sizeof_fmt(free)}\n\n"
+ f"Physical Cores: {psutil.cpu_count(logical=False)}\n"
+ f"Total Cores: {psutil.cpu_count(logical=True)}\n\n"
+ f"๐คBot Uptime: {timeof_fmt(time.time() - botStartTime)}\n"
+ f"โฒ๏ธOS Uptime: {timeof_fmt(time.time() - boot_time)}\n"
+ )
+
+ user_stats = (
+ "\n\nโฌโโโโโใ Stats ใโโโโโโฌ\n\n"
+ f"โญ๐ฅ๏ธ **CPU Usage ยป** __{cpu_usage}%__\n"
+ f"โ๐พ **RAM Usage ยป** __{memory.percent}%__\n"
+ f"โฐ๐๏ธ **DISK Usage ยป** __{disk}%__\n\n"
+ f"โญ๐คUpload: {sizeof_fmt(psutil.net_io_counters().bytes_sent)}\n"
+ f"โฐ๐ฅDownload: {sizeof_fmt(psutil.net_io_counters().bytes_recv)}\n\n\n"
+ f"Memory Total: {sizeof_fmt(memory.total)}\n"
+ f"Memory Free: {sizeof_fmt(memory.available)}\n"
+ f"Memory Used: {sizeof_fmt(memory.used)}\n"
+ f"Total Disk Space: {sizeof_fmt(total)}\n"
+ f"Used: {sizeof_fmt(used)} | Free: {sizeof_fmt(free)}\n\n"
+ f"๐คBot Uptime: {timeof_fmt(time.time() - botStartTime)}\n"
+ )
+
+ if message.chat.username == OWNER:
+ message.reply_text(owner_stats, quote=True)
+ else:
+ message.reply_text(user_stats, quote=True)
+
+
+@app.on_message(filters.command(["settings"]))
+def settings_handler(client: Client, message: types.Message):
+ chat_id = message.chat.id
+ init_user(chat_id)
+ client.send_chat_action(chat_id, enums.ChatAction.TYPING)
+ markup = types.InlineKeyboardMarkup(
+ [
+ [ # First row
+ types.InlineKeyboardButton("send as document", callback_data="document"),
+ types.InlineKeyboardButton("send as video", callback_data="video"),
+ types.InlineKeyboardButton("send as audio", callback_data="audio"),
+ ],
+ [ # second row
+ types.InlineKeyboardButton("High Quality", callback_data="high"),
+ types.InlineKeyboardButton("Medium Quality", callback_data="medium"),
+ types.InlineKeyboardButton("Low Quality", callback_data="low"),
+ ],
+ ]
+ )
+
+ client.send_message(chat_id, BotText.settings.format("a", "b"), reply_markup=markup)
+
+
+@app.on_callback_query(filters.regex(r"bot-payments-.*"))
+def bot_payment_btn_calback(client: Client, callback_query: types.CallbackQuery):
+ callback_query.answer("Generating invoice...")
+ chat_id = callback_query.message.chat.id
+ client.send_chat_action(chat_id, enums.ChatAction.TYPING)
+
+ data = callback_query.data
+ price = int(data.split("-")[-1])
+ payload = f"{chat_id}-buy"
+ invoice = generate_invoice(price, f"Buy {TOKEN_PRICE} engine tokens", "Pay by card", payload)
+ app.invoke(
+ functions.messages.SendMedia(
+ peer=(raw_types.InputPeerUser(user_id=chat_id, access_hash=0)),
+ media=invoice,
+ random_id=app.rnd_id(),
+ message="Buy more engine token",
+ )
+ )
+
+
+def generate_invoice(amount: int, title: str, description: str, payload: str):
+ invoice = raw_types.input_media_invoice.InputMediaInvoice(
+ invoice=raw_types.invoice.Invoice(
+ currency="USD", prices=[raw_types.LabeledPrice(label="price", amount=amount)]
+ ),
+ title=title,
+ description=description,
+ provider=PROVIDER_TOKEN,
+ provider_data=raw_types.DataJSON(data="{}"),
+ payload=payload.encode(),
+ start_param=payload,
+ )
+ return invoice
+
+
+def link_checker(url: str) -> str:
+ if url.startswith("https://www.instagram.com"):
+ return ""
+ ytdl = yt_dlp.YoutubeDL()
+ if re.findall(r"^https://www\.youtube\.com/channel/", url) or "list" in url:
+ # TODO maybe using ytdl.extract_info
+ return "Playlist or channel links are disabled."
+
+ if re.findall(r"m3u8|\.m3u8|\.m3u$", url.lower()):
+ return "m3u8 links are not supported."
+
+ with contextlib.suppress(yt_dlp.utils.DownloadError):
+ if ytdl.extract_info(url, download=False).get("live_status") == "is_live":
+ return "Live stream links are disabled. Please engine it after the stream ends."
+
+
+@app.on_message(filters.command(["spdl"]))
+def spdl_handler(client: Client, message: types.Message):
+ chat_id = message.from_user.id
+ init_user(chat_id)
+ client.send_chat_action(chat_id, enums.ChatAction.TYPING)
+ message_text = message.text
+ url, new_name = extract_url_and_name(message_text)
+ logging.info("spdl start %s", url)
+ if url is None or not re.findall(r"^https?://", url.lower()):
+ message.reply_text("Something wrong ๐ค.\nCheck your URL and send me again.", quote=True)
+ return
+
+ bot_msg = message.reply_text("Request received.", quote=True)
+ spdl_download_entrance(client, bot_msg, url)
+
+
+@app.on_message(filters.command(["leech"]))
+def leech_handler(client: Client, message: types.Message):
+ if not ENABLE_ARIA2:
+ message.reply_text("Aria2 Not Enabled.", quote=True)
+ return
+
+ chat_id = message.from_user.id
+ client.send_chat_action(chat_id, enums.ChatAction.TYPING)
+ message_text = message.text
+ url, new_name = extract_url_and_name(message_text)
+ logging.info("leech using aria2 start %s", url)
+ if url is None or not re.findall(r"^https?://", url.lower()):
+ message.reply_text("Send me a correct LINK.", quote=True)
+ return
+
+ bot_msg = message.reply_text("Request received.", quote=True)
+ leech_download_entrance(client, bot_msg, url)
+
+
+@app.on_message(filters.command(["ytdl"]))
+def ytdl_handler(client: Client, message: types.Message):
+ # for group
+ chat_id = message.from_user.id
+ init_user(chat_id)
+ client.send_chat_action(chat_id, enums.ChatAction.TYPING)
+ message_text = message.text
+ url, new_name = extract_url_and_name(message_text)
+ logging.info("ytdl start %s", url)
+ if url is None or not re.findall(r"^https?://", url.lower()):
+ message.reply_text("Something wrong ๐ค.\nCheck your URL and send me again.", quote=True)
+ return
+
+ bot_msg = message.reply_text("Request received.", quote=True)
+ ytdl_download_entrance(client, bot_msg, url)
+
+
+@app.on_message(filters.incoming & filters.text)
+@private_use
+def download_handler(client: Client, message: types.Message):
+ chat_id = message.from_user.id
+ init_user(chat_id)
+ client.send_chat_action(chat_id, enums.ChatAction.TYPING)
+ url = message.text
+ logging.info("start %s", url)
+ # TODO check link
+
+ try:
+ # raise pyrogram.errors.exceptions.FloodWait(10)
+ bot_msg: types.Message | Any = message.reply_text("Acked", quote=True)
+ except pyrogram.errors.Flood as e:
+ f = BytesIO()
+ f.write(str(e).encode())
+ f.write(b"Your job will be done soon. Just wait!")
+ f.name = "Please don't flood me.txt"
+ bot_msg = message.reply_document(
+ f, caption=f"Flood wait! Please wait {e} seconds...." f"Your job will start automatically", quote=True
+ )
+ f.close()
+ client.send_message(OWNER, f"Flood wait! ๐ {e} seconds....")
+ time.sleep(e.value)
+
+ client.send_chat_action(chat_id, enums.ChatAction.UPLOAD_VIDEO)
+ bot_msg.chat = message.chat
+ ytdl_download_entrance(client, bot_msg, url)
+
+
+@app.on_callback_query(filters.regex(r"document|video|audio"))
+def send_method_callback(client: Client, callback_query: types.CallbackQuery):
+ chat_id = callback_query.message.chat.id
+ data = callback_query.data
+ logging.info("Setting %s file type to %s", chat_id, data)
+ callback_query.answer(f"Your send type was set to {callback_query.data}")
+
+
+@app.on_callback_query(filters.regex(r"high|medium|low"))
+def download_resolution_callback(client: Client, callback_query: types.CallbackQuery):
+ chat_id = callback_query.message.chat.id
+ data = callback_query.data
+ logging.info("Setting %s file type to %s", chat_id, data)
+ callback_query.answer(f"Your default engine quality was set to {callback_query.data}")
+
+
+@app.on_callback_query(filters.regex(r"convert"))
+def audio_callback(client: Client, callback_query: types.CallbackQuery):
+ if not ENABLE_FFMPEG:
+ callback_query.answer("Request rejected.")
+ callback_query.message.reply_text("Audio conversion is disabled now.")
+ return
+
+ callback_query.answer(f"Converting to audio...please wait patiently")
+ audio_entrance(client, callback_query.message)
+
+
+@app.on_raw_update()
+def raw_update(client: Client, update, users, chats):
+ action = getattr(getattr(update, "message", None), "action", None)
+ if update.QUALNAME == "types.UpdateBotPrecheckoutQuery":
+ client.invoke(
+ functions.messages.SetBotPrecheckoutResults(
+ query_id=update.query_id,
+ success=True,
+ )
+ )
+ elif action and action.QUALNAME == "types.MessageActionPaymentSentMe":
+ logging.info("Payment received. %s", action)
+ uid = update.message.peer_id.user_id
+ amount = action.total_amount / 100
+ client.send_message(uid, f"Thank you {uid}. Payment received: {amount} {action.currency}")
+
+
+if __name__ == "__main__":
+ botStartTime = time.time()
+ scheduler = BackgroundScheduler(timezone="Europe/London")
+ # scheduler.add_job( reset_today, "cron", hour=0, minute=0)
+ scheduler.start()
+ banner = f"""
+โ โ โโโ โ โโโ โ โ
+โโ โโโ โ โ โ โ โ โโโ โโโ โ โ โโโ โ โ โโโ โ โโโ โโโ โโโ
+ โ โ โ โ โ โ โ โ โ โ โโ โ โ โ โ โโโ โ โ โ โ โ โโโ โ โ
+ โ โโ โโโ โ โโโ โโ โโโ โโ โโ โโ โ โ โ โโ โโโ โโโ
+
+By @BennyThink, mode: {ENABLE_VIP}
+ """
+ print(banner)
+ app.run()
diff --git a/src/utils/__init__.py b/src/utils/__init__.py
new file mode 100644
index 00000000..105d5d2e
--- /dev/null
+++ b/src/utils/__init__.py
@@ -0,0 +1,110 @@
+#!/usr/bin/env python3
+# coding: utf-8
+
+# ytdlbot - __init__.py.py
+
+
+import logging
+import pathlib
+import re
+import shutil
+import tempfile
+import time
+import uuid
+from http.cookiejar import MozillaCookieJar
+from urllib.parse import quote_plus
+
+import ffmpeg
+
+
+def sizeof_fmt(num: int, suffix="B"):
+ for unit in ["", "Ki", "Mi", "Gi", "Ti", "Pi", "Ei", "Zi"]:
+ if abs(num) < 1024.0:
+ return "%3.1f%s%s" % (num, unit, suffix)
+ num /= 1024.0
+ return "%.1f%s%s" % (num, "Yi", suffix)
+
+
+def timeof_fmt(seconds: int | float):
+ periods = [("d", 86400), ("h", 3600), ("m", 60), ("s", 1)]
+ result = ""
+ for period_name, period_seconds in periods:
+ if seconds >= period_seconds:
+ period_value, seconds = divmod(seconds, period_seconds)
+ result += f"{int(period_value)}{period_name}"
+ return result
+
+
+def is_youtube(url: str):
+ if url.startswith("https://www.youtube.com/") or url.startswith("https://youtu.be/"):
+ return True
+
+
+def adjust_formats(formats):
+ # high: best quality 1080P, 2K, 4K, 8K
+ # medium: 720P
+ # low: 480P
+
+ mapping = {"high": [], "medium": [720], "low": [480]}
+ # formats.insert(0, f"bestvideo[ext=mp4][height={m}]+bestaudio[ext=m4a]")
+ # formats.insert(1, f"bestvideo[vcodec^=avc][height={m}]+bestaudio[acodec^=mp4a]/best[vcodec^=avc]/best")
+ #
+ # if settings[2] == "audio":
+ # formats.insert(0, "bestaudio[ext=m4a]")
+ #
+ # if settings[2] == "document":
+ # formats.insert(0, None)
+
+
+def current_time(ts=None):
+ return time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(ts))
+
+
+def clean_tempfile():
+ patterns = ["ytdl*", "spdl*", "leech*", "direct*"]
+ temp_path = pathlib.Path(TMPFILE_PATH or tempfile.gettempdir())
+
+ for pattern in patterns:
+ for item in temp_path.glob(pattern):
+ if time.time() - item.stat().st_ctime > 3600:
+ shutil.rmtree(item, ignore_errors=True)
+
+
+def shorten_url(url, CAPTION_URL_LENGTH_LIMIT):
+ # Shortens a URL by cutting it to a specified length.
+ shortened_url = url[: CAPTION_URL_LENGTH_LIMIT - 3] + "..."
+
+ return shortened_url
+
+
+def extract_filename(response):
+ try:
+ content_disposition = response.headers.get("content-disposition")
+ if content_disposition:
+ filename = re.findall("filename=(.+)", content_disposition)[0]
+ return filename
+ except (TypeError, IndexError):
+ pass # Handle potential exceptions during extraction
+
+ # Fallback if Content-Disposition header is missing
+ filename = response.url.rsplit("/")[-1]
+ if not filename:
+ filename = quote_plus(response.url)
+ return filename
+
+
+def extract_url_and_name(message_text):
+ # Regular expression to match the URL
+ url_pattern = r"(https?://[^\s]+)"
+ # Regular expression to match the new name after '-n'
+ name_pattern = r"-n\s+(.+)$"
+
+ # Find the URL in the message_text
+ url_match = re.search(url_pattern, message_text)
+ url = url_match.group(0) if url_match else None
+
+ # Find the new name in the message_text
+ name_match = re.search(name_pattern, message_text)
+ new_name = name_match.group(1) if name_match else None
+
+ return url, new_name
diff --git a/worker.yml b/worker.yml
deleted file mode 100644
index 990e2453..00000000
--- a/worker.yml
+++ /dev/null
@@ -1,15 +0,0 @@
-version: '3.1'
-
-services:
- worker:
- image: bennythink/ytdlbot
- env_file:
- - env/ytdl.env
- restart: always
- command: [ "/usr/local/bin/supervisord", "-c" ,"/ytdlbot/conf/supervisor_worker.conf" ]
-# network_mode: "host"
-# deploy:
-# resources:
-# limits:
-# cpus: '0.3'
-# memory: 1500M
diff --git a/ytdlbot/channel.py b/ytdlbot/channel.py
deleted file mode 100644
index fd70a1ec..00000000
--- a/ytdlbot/channel.py
+++ /dev/null
@@ -1,181 +0,0 @@
-#!/usr/bin/env python3
-# coding: utf-8
-import http
-import logging
-import os
-import re
-
-import requests
-from bs4 import BeautifulSoup
-
-from config import ENABLE_VIP
-from limit import Payment
-
-
-class Channel(Payment):
- def subscribe_channel(self, user_id: int, share_link: str) -> str:
- if not re.findall(r"youtube\.com|youtu\.be", share_link):
- raise ValueError("Is this a valid YouTube Channel link?")
- if ENABLE_VIP:
- self.cur.execute("select count(user_id) from subscribe where user_id=%s", (user_id,))
- usage = int(self.cur.fetchone()[0])
- if usage >= 10:
- logging.warning("User %s has subscribed %s channels", user_id, usage)
- return "You have subscribed too many channels. Maximum 5 channels."
-
- data = self.get_channel_info(share_link)
- channel_id = data["channel_id"]
-
- self.cur.execute("select user_id from subscribe where user_id=%s and channel_id=%s", (user_id, channel_id))
- if self.cur.fetchall():
- raise ValueError("You have already subscribed this channel.")
-
- self.cur.execute(
- "INSERT IGNORE INTO channel values"
- "(%(link)s,%(title)s,%(description)s,%(channel_id)s,%(playlist)s,%(last_video)s)",
- data,
- )
- self.cur.execute("INSERT INTO subscribe values(%s,%s, NULL)", (user_id, channel_id))
- self.con.commit()
- logging.info("User %s subscribed channel %s", user_id, data["title"])
- return "Subscribed to {}".format(data["title"])
-
- def unsubscribe_channel(self, user_id: int, channel_id: str) -> int:
- affected_rows = self.cur.execute(
- "DELETE FROM subscribe WHERE user_id=%s AND channel_id=%s", (user_id, channel_id)
- )
- self.con.commit()
- logging.info("User %s tried to unsubscribe channel %s", user_id, channel_id)
- return affected_rows
-
- @staticmethod
- def extract_canonical_link(url: str) -> str:
- # canonic link works for many websites. It will strip out unnecessary stuff
- props = ["canonical", "alternate", "shortlinkUrl"]
- headers = {
- "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/80.0.3987.163 Safari/537.36"
- }
- cookie = {"CONSENT": "PENDING+197"}
- # send head request first
- r = requests.head(url, headers=headers, allow_redirects=True, cookies=cookie)
- if r.status_code != http.HTTPStatus.METHOD_NOT_ALLOWED and "text/html" not in r.headers.get("content-type", ""):
- # get content-type, if it's not text/html, there's no need to issue a GET request
- logging.warning("%s Content-type is not text/html, no need to GET for extract_canonical_link", url)
- return url
-
- html_doc = requests.get(url, headers=headers, cookies=cookie, timeout=5).text
- soup = BeautifulSoup(html_doc, "html.parser")
- for prop in props:
- element = soup.find(lambda tag: tag.name == "link" and tag.get("rel") == ["prop"])
- try:
- href = element["href"]
- if href not in ["null", "", None, "https://consent.youtube.com/m"]:
- return href
- except Exception as e:
- logging.debug("Canonical exception %s %s e", url, e)
-
- return url
-
- def get_channel_info(self, url: str) -> dict:
- api_key = os.getenv("GOOGLE_API_KEY")
- canonical_link = self.extract_canonical_link(url)
- try:
- channel_id = canonical_link.split("youtube.com/channel/")[1]
- except IndexError:
- channel_id = canonical_link.split("/")[-1]
- channel_api = (
- f"https://www.googleapis.com/youtube/v3/channels?part=snippet,contentDetails&id={channel_id}&key={api_key}"
- )
-
- data = requests.get(channel_api).json()
- snippet = data["items"][0]["snippet"]
- title = snippet["title"]
- description = snippet["description"]
- playlist = data["items"][0]["contentDetails"]["relatedPlaylists"]["uploads"]
-
- return {
- "link": url,
- "title": title,
- "description": description,
- "channel_id": channel_id,
- "playlist": playlist,
- "last_video": self.get_latest_video(playlist),
- }
-
- @staticmethod
- def get_latest_video(playlist_id: str) -> str:
- api_key = os.getenv("GOOGLE_API_KEY")
- video_api = (
- f"https://www.googleapis.com/youtube/v3/playlistItems?part=snippet&maxResults=1&"
- f"playlistId={playlist_id}&key={api_key}"
- )
- data = requests.get(video_api).json()
- video_id = data["items"][0]["snippet"]["resourceId"]["videoId"]
- logging.info(f"Latest video %s from %s", video_id, data["items"][0]["snippet"]["channelTitle"])
- return f"https://www.youtube.com/watch?v={video_id}"
-
- def has_newer_update(self, channel_id: str) -> str:
- self.cur.execute("SELECT playlist,latest_video FROM channel WHERE channel_id=%s", (channel_id,))
- data = self.cur.fetchone()
- playlist_id = data[0]
- old_video = data[1]
- newest_video = self.get_latest_video(playlist_id)
- if old_video != newest_video:
- logging.info("Newer update found for %s %s", channel_id, newest_video)
- self.cur.execute("UPDATE channel SET latest_video=%s WHERE channel_id=%s", (newest_video, channel_id))
- self.con.commit()
- return newest_video
-
- def get_user_subscription(self, user_id: int) -> str:
- self.cur.execute(
- """
- select title, link, channel.channel_id from channel, subscribe
- where subscribe.user_id = %s and channel.channel_id = subscribe.channel_id
- """,
- (user_id,),
- )
- data = self.cur.fetchall()
- text = ""
- for item in data:
- text += "[{}]({}) `{}\n`".format(*item)
- return text
-
- def group_subscriber(self) -> dict:
- # {"channel_id": [user_id, user_id, ...]}
- self.cur.execute("select * from subscribe where is_valid=1")
- data = self.cur.fetchall()
- group = {}
- for item in data:
- group.setdefault(item[1], []).append(item[0])
- logging.info("Checking periodic subscriber...")
- return group
-
- def deactivate_user_subscription(self, user_id: int):
- self.cur.execute("UPDATE subscribe set is_valid=0 WHERE user_id=%s", (user_id,))
- self.con.commit()
-
- def sub_count(self) -> str:
- sql = """
- select user_id, channel.title, channel.link
- from subscribe, channel where subscribe.channel_id = channel.channel_id
- """
- self.cur.execute(sql)
- data = self.cur.fetchall()
- text = f"Total {len(data)} subscriptions found.\n\n"
- for item in data:
- text += "{} ==> [{}]({})\n".format(*item)
- return text
-
- def del_cache(self, user_link: str) -> int:
- unique = self.extract_canonical_link(user_link)
- caches = self.r.hgetall("cache")
- count = 0
- for key in caches:
- if key.startswith(unique):
- count += self.del_send_cache(key)
- return count
-
-
-if __name__ == "__main__":
- s = Channel.extract_canonical_link("https://www.youtube.com/shorts/KkbYbknjPBM")
- print(s)
diff --git a/ytdlbot/client_init.py b/ytdlbot/client_init.py
deleted file mode 100644
index 7702cd51..00000000
--- a/ytdlbot/client_init.py
+++ /dev/null
@@ -1,25 +0,0 @@
-#!/usr/local/bin/python3
-# coding: utf-8
-
-# ytdlbot - client_init.py
-# 12/29/21 16:20
-#
-
-__author__ = "Benny "
-
-from pyrogram import Client
-
-from config import APP_HASH, APP_ID, PYRO_WORKERS, TOKEN, IPv6
-
-
-def create_app(name: str, workers: int = PYRO_WORKERS) -> Client:
- return Client(
- name,
- APP_ID,
- APP_HASH,
- bot_token=TOKEN,
- workers=workers,
- ipv6=IPv6,
- # max_concurrent_transmissions=max(1, WORKERS // 2),
- # https://github.com/pyrogram/pyrogram/issues/1225#issuecomment-1446595489
- )
diff --git a/ytdlbot/config.py b/ytdlbot/config.py
deleted file mode 100644
index 6bec4bb2..00000000
--- a/ytdlbot/config.py
+++ /dev/null
@@ -1,81 +0,0 @@
-#!/usr/local/bin/python3
-# coding: utf-8
-
-# ytdlbot - config.py
-# 8/28/21 15:01
-#
-
-__author__ = "Benny "
-
-import os
-
-from blinker import signal
-
-# general settings
-WORKERS: int = int(os.getenv("WORKERS", 10))
-PYRO_WORKERS: int = int(os.getenv("PYRO_WORKERS", 100))
-APP_ID: int = int(os.getenv("APP_ID", 198214))
-APP_HASH = os.getenv("APP_HASH", "1234b90")
-TOKEN = os.getenv("TOKEN", "1234")
-
-REDIS = os.getenv("REDIS", "redis")
-
-ENABLE_VIP = os.getenv("VIP", False)
-OWNER = os.getenv("OWNER", "BennyThink")
-
-# limitation settings
-AUTHORIZED_USER: str = os.getenv("AUTHORIZED_USER", "")
-# membership requires: the format could be username(without @ sign)/chat_id of channel or group.
-# You need to add the bot to this group/channel as admin
-REQUIRED_MEMBERSHIP: str = os.getenv("REQUIRED_MEMBERSHIP", "")
-
-# celery related
-ENABLE_CELERY = os.getenv("ENABLE_CELERY", False)
-BROKER = os.getenv("BROKER", f"redis://{REDIS}:6379/1")
-
-MYSQL_HOST = os.getenv("MYSQL_HOST", "mysql")
-MYSQL_USER = os.getenv("MYSQL_USER", "root")
-MYSQL_PASS = os.getenv("MYSQL_PASS", "root")
-
-ARCHIVE_ID = os.getenv("ARCHIVE_ID")
-
-ENABLE_FFMPEG = os.getenv("ENABLE_FFMPEG", False)
-AUDIO_FORMAT = os.getenv("AUDIO_FORMAT")
-
-PLAYLIST_SUPPORT = os.getenv("PLAYLIST_SUPPORT", False)
-M3U8_SUPPORT = os.getenv("M3U8_SUPPORT", False)
-ENABLE_ARIA2 = os.getenv("ENABLE_ARIA2", False)
-
-RCLONE_PATH = os.getenv("RCLONE")
-
-# payment settings
-AFD_LINK = os.getenv("AFD_LINK", "https://afdian.net/@BennyThink")
-COFFEE_LINK = os.getenv("COFFEE_LINK", "https://www.buymeacoffee.com/bennythink")
-COFFEE_TOKEN = os.getenv("COFFEE_TOKEN")
-AFD_TOKEN = os.getenv("AFD_TOKEN")
-AFD_USER_ID = os.getenv("AFD_USER_ID")
-PROVIDER_TOKEN = os.getenv("PROVIDER_TOKEN") or "1234"
-FREE_DOWNLOAD = os.getenv("FREE_DOWNLOAD", 10)
-EXPIRE = 24 * 3600
-TOKEN_PRICE = os.getenv("BUY_UNIT", 20) # one USD=20 credits
-TRONGRID_KEY = os.getenv("TRONGRID_KEY", "").split(",")
-# the default mnemonic is for nile testnet
-TRON_MNEMONIC = os.getenv("TRON_MNEMONIC", "cram floor today legend service drill pitch leaf car govern harvest soda")
-TRX_SIGNAL = signal("trx_received")
-
-PREMIUM_USER = int(os.getenv("PREMIUM_USER", "0"))
-
-# For advance users
-# Please do not change, if you don't know what these are.
-TG_PREMIUM_MAX_SIZE = 4000 * 1024 * 1024
-TG_NORMAL_MAX_SIZE = 2000 * 1024 * 1024
-CAPTION_URL_LENGTH_LIMIT = 150
-IPv6 = os.getenv("IPv6", False)
-RATE_LIMIT = os.getenv("RATE_LIMIT", 120)
-# This will set the value for the tmpfile path(download path). If not, will return None and use systemโs default path.
-# Please ensure that the directory exists and you have necessary permissions to write to it.
-TMPFILE_PATH = os.getenv("TMPFILE_PATH")
-
-
-class FileTooBig(Exception):
- pass
diff --git a/ytdlbot/constant.py b/ytdlbot/constant.py
deleted file mode 100644
index d412653d..00000000
--- a/ytdlbot/constant.py
+++ /dev/null
@@ -1,132 +0,0 @@
-#!/usr/local/bin/python3
-# coding: utf-8
-
-# ytdlbot - constant.py
-# 8/16/21 16:59
-#
-
-__author__ = "Benny "
-
-import os
-
-from config import (
- AFD_LINK,
- COFFEE_LINK,
- ENABLE_CELERY,
- FREE_DOWNLOAD,
- REQUIRED_MEMBERSHIP,
- TOKEN_PRICE,
-)
-from database import InfluxDB
-from utils import get_func_queue
-
-
-class BotText:
- start = """
- Welcome to YouTube Download bot. Type /help for more information. Recommend to use EU Bot
- EU๐ช๐บ: @benny_2ytdlbot
- SG๐ธ๐ฌ๏ผ@benny_ytdlbot
-
- Join https://t.me/+OGRC8tp9-U9mZDZl for updates."""
-
- help = """
-1. For YouTube and any websites supported by yt-dlp, just send the link and we will download and send it to you.
-
-2. For specific links use `/spdl {URL}`. More info at https://github.com/SanujaNS/ytdlbot-telegram#supported-websites
-
-3. If the bot doesn't work, try again or join https://t.me/+OGRC8tp9-U9mZDZl for updates.
-
-4. Wanna deploy it yourself?\nHere's the source code: https://github.com/tgbot-collection/ytdlbot
- """
-
- about = "YouTube Downloader by @BennyThink.\n\nOpen source on GitHub: https://github.com/tgbot-collection/ytdlbot"
-
- buy = f"""
-**Terms:**
-1. You can use this bot to download video for {FREE_DOWNLOAD} times within a 24-hour period.
-
-2. You can buy additional download tokens, valid permanently.
-
-3. Refunds are possible, contact me if you need that @BennyThink
-
-4. Download for paid user will be automatically changed to Local mode to avoid queuing.
-
-5. Paid user can download files larger than 2GB.
-
-**Price:**
-valid permanently
-1. 1 USD == {TOKEN_PRICE} tokens
-2. 7 CNY == {TOKEN_PRICE} tokens
-3. 10 TRX == {TOKEN_PRICE} tokens
-
-**Payment options:**
-Pay any amount you want. For example you can send 20 TRX for {TOKEN_PRICE * 2} tokens.
-1. AFDIAN(AliPay, WeChat Pay and PayPal): {AFD_LINK}
-2. Buy me a coffee: {COFFEE_LINK}
-3. Telegram Bot Payment(Stripe), please click Bot Payment button.
-4. TRON(TRX), please click TRON(TRX) button.
-
-**After payment:**
-1. Afdian: attach order number with /redeem command (e.g., `/redeem 123456`).
-2. Buy Me a Coffee: attach email with /redeem command (e.g., `/redeem 123@x.com`). **Use different email each time.**
-3. Telegram Payment & Tron(TRX): automatically activated within 60s. Check /start to see your balance.
-
-Want to buy more token with Telegram payment? Let's say 100? Here you go! `/buy 123`
- """
-
- private = "This bot is for private use"
-
- membership_require = f"You need to join this group or channel to use this bot\n\nhttps://t.me/{REQUIRED_MEMBERSHIP}"
-
- settings = """
-Please choose the preferred format and video quality for your video. These settings only **apply to YouTube videos**.
-
-High quality is recommended. Medium quality aims to 720P, while low quality is 480P.
-
-If you choose to send the video as a document, it will not be possible to stream it.
-
-Your current settings:
-Video quality: **{0}**
-Sending format: **{1}**
-"""
- custom_text = os.getenv("CUSTOM_TEXT", "")
-
- premium_warning = """
- Your file is too big, do you want me to try to send it as premium user?
- This is an experimental feature so you can only use it once per day.
- Also, the premium user will know who you are and what you are downloading.
- You may be banned if you abuse this feature.
- """
-
- @staticmethod
- def get_receive_link_text() -> str:
- reserved = get_func_queue("reserved")
- if ENABLE_CELERY and reserved:
- text = f"Your tasks was added to the reserved queue {reserved}. Processing...\n\n"
- else:
- text = "Your task was added to active queue.\nProcessing...\n\n"
-
- return text
-
- @staticmethod
- def ping_worker() -> str:
- from tasks import app as celery_app
-
- workers = InfluxDB().extract_dashboard_data()
- # [{'celery@BennyใฎMBP': 'abc'}, {'celery@BennyใฎMBP': 'abc'}]
- response = celery_app.control.broadcast("ping_revision", reply=True)
- revision = {}
- for item in response:
- revision.update(item)
-
- text = ""
- for worker in workers:
- fields = worker["fields"]
- hostname = worker["tags"]["hostname"]
- status = {True: "โ
"}.get(fields["status"], "โ")
- active = fields["active"]
- load = "{},{},{}".format(fields["load1"], fields["load5"], fields["load15"])
- rev = revision.get(hostname, "")
- text += f"{status}{hostname} **{active}** {load} {rev}\n"
-
- return text
diff --git a/ytdlbot/database.py b/ytdlbot/database.py
deleted file mode 100644
index 6e3356d8..00000000
--- a/ytdlbot/database.py
+++ /dev/null
@@ -1,418 +0,0 @@
-#!/usr/local/bin/python3
-# coding: utf-8
-
-# ytdlbot - database.py
-# 12/7/21 16:57
-#
-
-__author__ = "Benny "
-
-import base64
-import contextlib
-import datetime
-import logging
-import os
-import re
-import sqlite3
-import subprocess
-import time
-from io import BytesIO
-
-import fakeredis
-import pymysql
-import redis
-import requests
-from beautifultable import BeautifulTable
-from influxdb import InfluxDBClient
-
-from config import MYSQL_HOST, MYSQL_PASS, MYSQL_USER, REDIS
-
-init_con = sqlite3.connect(":memory:", check_same_thread=False)
-
-
-class FakeMySQL:
- @staticmethod
- def cursor() -> "Cursor":
- return Cursor()
-
- def commit(self):
- pass
-
- def close(self):
- pass
-
- def ping(self, reconnect):
- pass
-
-
-class Cursor:
- def __init__(self):
- self.con = init_con
- self.cur = self.con.cursor()
-
- def execute(self, *args, **kwargs):
- sql = self.sub(args[0])
- new_args = (sql,) + args[1:]
- with contextlib.suppress(sqlite3.OperationalError):
- return self.cur.execute(*new_args, **kwargs)
-
- def fetchall(self):
- return self.cur.fetchall()
-
- def fetchone(self):
- return self.cur.fetchone()
-
- @staticmethod
- def sub(sql):
- sql = re.sub(r"CHARSET.*|charset.*", "", sql, re.IGNORECASE)
- sql = sql.replace("%s", "?")
- return sql
-
-
-class Redis:
- def __init__(self):
- db = 1
- try:
- self.r = redis.StrictRedis(host=REDIS, db=db, decode_responses=True)
- self.r.ping()
- except Exception:
- logging.warning("Redis connection failed, using fake redis instead.")
- self.r = fakeredis.FakeStrictRedis(host=REDIS, db=db, decode_responses=True)
-
- db_banner = "=" * 20 + "DB data" + "=" * 20
- quota_banner = "=" * 20 + "Celery" + "=" * 20
- metrics_banner = "=" * 20 + "Metrics" + "=" * 20
- usage_banner = "=" * 20 + "Usage" + "=" * 20
- vnstat_banner = "=" * 20 + "vnstat" + "=" * 20
- self.final_text = f"""
-{db_banner}
-%s
-
-
-{vnstat_banner}
-%s
-
-
-{quota_banner}
-%s
-
-
-{metrics_banner}
-%s
-
-
-{usage_banner}
-%s
- """
- super().__init__()
-
- def __del__(self):
- self.r.close()
-
- def update_metrics(self, metrics: str):
- logging.info(f"Setting metrics: {metrics}")
- all_ = f"all_{metrics}"
- today = f"today_{metrics}"
- self.r.hincrby("metrics", all_)
- self.r.hincrby("metrics", today)
-
- @staticmethod
- def generate_table(header, all_data: list):
- table = BeautifulTable()
- for data in all_data:
- table.rows.append(data)
- table.columns.header = header
- table.rows.header = [str(i) for i in range(1, len(all_data) + 1)]
- return table
-
- def show_usage(self):
- db = MySQL()
- db.cur.execute("select user_id,payment_amount,old_user,token from payment")
- data = db.cur.fetchall()
- fd = []
- for item in data:
- fd.append([item[0], item[1], item[2], item[3]])
- db_text = self.generate_table(["ID", "pay amount", "old user", "token"], fd)
-
- fd = []
- hash_keys = self.r.hgetall("metrics")
- for key, value in hash_keys.items():
- if re.findall(r"^today|all", key):
- fd.append([key, value])
- fd.sort(key=lambda x: x[0])
- metrics_text = self.generate_table(["name", "count"], fd)
-
- fd = []
- for key, value in hash_keys.items():
- if re.findall(r"\d+", key):
- fd.append([key, value])
- fd.sort(key=lambda x: int(x[-1]), reverse=True)
- usage_text = self.generate_table(["UserID", "count"], fd)
-
- worker_data = InfluxDB.get_worker_data()
- fd = []
- for item in worker_data["data"]:
- fd.append(
- [
- item.get("hostname", 0),
- item.get("status", 0),
- item.get("active", 0),
- item.get("processed", 0),
- item.get("task-failed", 0),
- item.get("task-succeeded", 0),
- ",".join(str(i) for i in item.get("loadavg", [])),
- ]
- )
-
- worker_text = self.generate_table(
- ["worker name", "status", "active", "processed", "failed", "succeeded", "Load Average"], fd
- )
-
- # vnstat
- if os.uname().sysname == "Darwin":
- cmd = "/opt/homebrew/bin/vnstat -i en0".split()
- else:
- cmd = "/usr/bin/vnstat -i eth0".split()
- vnstat_text = subprocess.check_output(cmd).decode("u8")
- return self.final_text % (db_text, vnstat_text, worker_text, metrics_text, usage_text)
-
- def reset_today(self):
- pairs = self.r.hgetall("metrics")
- for k in pairs:
- if k.startswith("today"):
- self.r.hdel("metrics", k)
-
- self.r.delete("premium")
-
- def user_count(self, user_id):
- self.r.hincrby("metrics", user_id)
-
- def generate_file(self):
- text = self.show_usage()
- file = BytesIO()
- file.write(text.encode("u8"))
- date = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(time.time()))
- file.name = f"{date}.txt"
- return file
-
- def add_send_cache(self, unique: str, file_id: str):
- self.r.hset("cache", unique, file_id)
-
- def get_send_cache(self, unique) -> str:
- return self.r.hget("cache", unique)
-
- def del_send_cache(self, unique):
- return self.r.hdel("cache", unique)
-
-
-class MySQL:
- vip_sql = """
- CREATE TABLE if not exists `payment`
- (
- `user_id` bigint NOT NULL,
- `payment_amount` float DEFAULT NULL,
- `payment_id` varchar(256) DEFAULT NULL,
- `old_user` tinyint(1) DEFAULT NULL,
- `token` int DEFAULT NULL,
- UNIQUE KEY `payment_id` (`payment_id`)
- ) CHARSET = utf8mb4
- """
-
- settings_sql = """
- create table if not exists settings
- (
- user_id bigint not null,
- resolution varchar(128) null,
- method varchar(64) null,
- mode varchar(32) default 'Celery' null,
- history varchar(10) default 'OFF' null,
- constraint settings_pk
- primary key (user_id)
- );
- """
-
- channel_sql = """
- create table if not exists channel
- (
- link varchar(256) null,
- title varchar(256) null,
- description text null,
- channel_id varchar(256),
- playlist varchar(256) null,
- latest_video varchar(256) null,
- constraint channel_pk
- primary key (channel_id)
- ) CHARSET=utf8mb4;
- """
-
- subscribe_sql = """
- create table if not exists subscribe
- (
- user_id bigint null,
- channel_id varchar(256) null,
- is_valid boolean default 1 null
- ) CHARSET=utf8mb4;
- """
- history_sql = """
- create table if not exists history
- (
- user_id bigint null,
- link varchar(256) null,
- title varchar(512) null
- ) CHARSET=utf8mb4;
- """
-
- def __init__(self):
- try:
- self.con = pymysql.connect(
- host=MYSQL_HOST, user=MYSQL_USER, passwd=MYSQL_PASS, db="ytdl", charset="utf8mb4"
- )
- self.con.ping(reconnect=True)
- except Exception:
- logging.warning("MySQL connection failed, using fake mysql instead.")
- self.con = FakeMySQL()
-
- self.con.ping(reconnect=True)
- self.cur = self.con.cursor()
- self.init_db()
- super().__init__()
-
- def init_db(self):
- self.cur.execute(self.vip_sql)
- self.cur.execute(self.settings_sql)
- self.cur.execute(self.channel_sql)
- self.cur.execute(self.subscribe_sql)
- self.cur.execute(self.history_sql)
- self.con.commit()
-
- def __del__(self):
- self.con.close()
-
- def get_user_settings(self, user_id: int) -> tuple:
- self.cur.execute("SELECT * FROM settings WHERE user_id = %s", (user_id,))
- data = self.cur.fetchone()
- if data is None:
- return 100, "high", "video", "Celery", "OFF"
- return data
-
- def set_user_settings(self, user_id: int, field: str, value: str):
- cur = self.con.cursor()
- cur.execute("SELECT * FROM settings WHERE user_id = %s", (user_id,))
- data = cur.fetchone()
- if data is None:
- resolution = method = ""
- if field == "resolution":
- method = "video"
- resolution = value
- if field == "method":
- method = value
- resolution = "high"
- cur.execute("INSERT INTO settings VALUES (%s,%s,%s,%s,%s)", (user_id, resolution, method, "Celery", "OFF"))
- else:
- cur.execute(f"UPDATE settings SET {field} =%s WHERE user_id = %s", (value, user_id))
- self.con.commit()
-
- def show_history(self, user_id: int):
- self.cur.execute("SELECT link,title FROM history WHERE user_id = %s", (user_id,))
- data = self.cur.fetchall()
- return "\n".join([f"{i[0]} {i[1]}" for i in data])
-
- def clear_history(self, user_id: int):
- self.cur.execute("DELETE FROM history WHERE user_id = %s", (user_id,))
- self.con.commit()
-
- def add_history(self, user_id: int, link: str, title: str):
- self.cur.execute("INSERT INTO history VALUES (%s,%s,%s)", (user_id, link, title))
- self.con.commit()
-
- def search_history(self, user_id: int, kw: str):
- self.cur.execute("SELECT * FROM history WHERE user_id = %s AND title like %s", (user_id, f"%{kw}%"))
- data = self.cur.fetchall()
- if data:
- return data
- return None
-
-
-class InfluxDB:
- def __init__(self):
- self.client = InfluxDBClient(
- host=os.getenv("INFLUX_HOST"),
- path=os.getenv("INFLUX_PATH"),
- port=443,
- username="nova",
- password=os.getenv("INFLUX_PASS"),
- database="celery",
- ssl=True,
- verify_ssl=True,
- )
- self.data = None
-
- def __del__(self):
- self.client.close()
-
- @staticmethod
- def get_worker_data() -> dict:
- username = os.getenv("FLOWER_USERNAME", "benny")
- password = os.getenv("FLOWER_PASSWORD", "123456abc")
- token = base64.b64encode(f"{username}:{password}".encode()).decode()
- headers = {"Authorization": f"Basic {token}"}
- r = requests.get("https://celery.dmesg.app/workers?json=1", headers=headers)
- if r.status_code != 200:
- return dict(data=[])
- return r.json()
-
- def extract_dashboard_data(self):
- self.data = self.get_worker_data()
- json_body = []
- for worker in self.data["data"]:
- load1, load5, load15 = worker["loadavg"]
- t = {
- "measurement": "tasks",
- "tags": {
- "hostname": worker["hostname"],
- },
- "time": datetime.datetime.utcnow(),
- "fields": {
- "task-received": worker.get("task-received", 0),
- "task-started": worker.get("task-started", 0),
- "task-succeeded": worker.get("task-succeeded", 0),
- "task-failed": worker.get("task-failed", 0),
- "active": worker.get("active", 0),
- "status": worker.get("status", False),
- "load1": load1,
- "load5": load5,
- "load15": load15,
- },
- }
- json_body.append(t)
- return json_body
-
- def __fill_worker_data(self):
- json_body = self.extract_dashboard_data()
- self.client.write_points(json_body)
-
- def __fill_overall_data(self):
- active = sum([i["active"] for i in self.data["data"]])
- json_body = [{"measurement": "active", "time": datetime.datetime.utcnow(), "fields": {"active": active}}]
- self.client.write_points(json_body)
-
- def __fill_redis_metrics(self):
- json_body = [{"measurement": "metrics", "time": datetime.datetime.utcnow(), "fields": {}}]
- r = Redis().r
- hash_keys = r.hgetall("metrics")
- for key, value in hash_keys.items():
- if re.findall(r"^today", key):
- json_body[0]["fields"][key] = int(value)
-
- self.client.write_points(json_body)
-
- def collect_data(self):
- if os.getenv("INFLUX_HOST") is None:
- return
-
- with contextlib.suppress(Exception):
- self.data = self.get_worker_data()
- self.__fill_worker_data()
- self.__fill_overall_data()
- self.__fill_redis_metrics()
- logging.debug("InfluxDB data was collected.")
diff --git a/ytdlbot/downloader.py b/ytdlbot/downloader.py
deleted file mode 100644
index 6844d5d6..00000000
--- a/ytdlbot/downloader.py
+++ /dev/null
@@ -1,297 +0,0 @@
-#!/usr/local/bin/python3
-# coding: utf-8
-
-# ytdlbot - downloader.py
-# 8/14/21 16:53
-#
-
-__author__ = "Benny "
-
-import functools
-import logging
-import os
-import pathlib
-import re
-import subprocess
-import threading
-import time
-import traceback
-from io import StringIO
-from unittest.mock import MagicMock
-
-import ffmpeg
-import ffpb
-import filetype
-import yt_dlp as ytdl
-from pyrogram import types
-from tqdm import tqdm
-
-from config import (
- AUDIO_FORMAT,
- ENABLE_ARIA2,
- ENABLE_FFMPEG,
- PREMIUM_USER,
- TG_NORMAL_MAX_SIZE,
- TG_PREMIUM_MAX_SIZE,
- FileTooBig,
- IPv6,
-)
-from limit import Payment
-from utils import adjust_formats, apply_log_formatter, current_time, sizeof_fmt
-
-apply_log_formatter()
-
-
-def debounce(wait_seconds):
- """
- Thread-safe debounce decorator for functions that take a message with chat.id and msg.id attributes.
- The function will only be called if it hasn't been called with the same chat.id and msg.id in the last 'wait_seconds'.
- """
-
- def decorator(func):
- last_called = {}
- lock = threading.Lock()
-
- @functools.wraps(func)
- def wrapper(*args, **kwargs):
- nonlocal last_called
- now = time.time()
-
- # Assuming the first argument is the message object with chat.id and msg.id
- bot_msg = args[0]
- key = (bot_msg.chat.id, bot_msg.id)
-
- with lock:
- if key not in last_called or now - last_called[key] >= wait_seconds:
- last_called[key] = now
- return func(*args, **kwargs)
-
- return wrapper
-
- return decorator
-
-
-@debounce(5)
-def edit_text(bot_msg: types.Message, text: str):
- bot_msg.edit_text(text)
-
-
-def tqdm_progress(desc, total, finished, speed="", eta=""):
- def more(title, initial):
- if initial:
- return f"{title} {initial}"
- else:
- return ""
-
- f = StringIO()
- tqdm(
- total=total,
- initial=finished,
- file=f,
- ascii=False,
- unit_scale=True,
- ncols=30,
- bar_format="{l_bar}{bar} |{n_fmt}/{total_fmt} ",
- )
- raw_output = f.getvalue()
- tqdm_output = raw_output.split("|")
- progress = f"`[{tqdm_output[1]}]`"
- detail = tqdm_output[2].replace("[A", "")
- text = f"""
-{desc}
-
-{progress}
-{detail}
-{more("Speed:", speed)}
-{more("ETA:", eta)}
- """
- f.close()
- return text
-
-
-def remove_bash_color(text):
- return re.sub(r"\u001b|\[0;94m|\u001b\[0m|\[0;32m|\[0m|\[0;33m", "", text)
-
-
-def download_hook(d: dict, bot_msg):
- if d["status"] == "downloading":
- downloaded = d.get("downloaded_bytes", 0)
- total = d.get("total_bytes") or d.get("total_bytes_estimate", 0)
- if total > TG_PREMIUM_MAX_SIZE:
- raise Exception(f"There's no way to handle a file of {sizeof_fmt(total)}.")
- if total > TG_NORMAL_MAX_SIZE:
- msg = f"Your download file size {sizeof_fmt(total)} is too large for Telegram."
- if PREMIUM_USER:
- raise FileTooBig(msg)
- else:
- raise Exception(msg)
-
- # percent = remove_bash_color(d.get("_percent_str", "N/A"))
- speed = remove_bash_color(d.get("_speed_str", "N/A"))
- eta = remove_bash_color(d.get("_eta_str", d.get("eta")))
- text = tqdm_progress("Downloading...", total, downloaded, speed, eta)
- # debounce in here
- edit_text(bot_msg, text)
-
-
-def upload_hook(current, total, bot_msg):
- text = tqdm_progress("Uploading...", total, current)
- edit_text(bot_msg, text)
-
-
-def convert_to_mp4(video_paths: list, bot_msg):
- default_type = ["video/x-flv", "video/webm"]
- # all_converted = []
- for path in video_paths:
- # if we can't guess file type, we assume it's video/mp4
- mime = getattr(filetype.guess(path), "mime", "video/mp4")
- if mime in default_type:
- if not can_convert_mp4(path, bot_msg.chat.id):
- logging.warning("Conversion abort for %s", bot_msg.chat.id)
- bot_msg._client.send_message(bot_msg.chat.id, "Can't convert your video. ffmpeg has been disabled.")
- break
- edit_text(bot_msg, f"{current_time()}: Converting {path.name} to mp4. Please wait.")
- new_file_path = path.with_suffix(".mp4")
- logging.info("Detected %s, converting to mp4...", mime)
- run_ffmpeg_progressbar(["ffmpeg", "-y", "-i", path, new_file_path], bot_msg)
- index = video_paths.index(path)
- video_paths[index] = new_file_path
-
-
-class ProgressBar(tqdm):
- b = None
-
- def __init__(self, *args, **kwargs):
- super().__init__(*args, **kwargs)
- self.bot_msg = self.b
-
- def update(self, n=1):
- super().update(n)
- t = tqdm_progress("Converting...", self.total, self.n)
- edit_text(self.bot_msg, t)
-
-
-def run_ffmpeg_progressbar(cmd_list: list, bm):
- cmd_list = cmd_list.copy()[1:]
- ProgressBar.b = bm
- ffpb.main(cmd_list, tqdm=ProgressBar)
-
-
-def can_convert_mp4(video_path, uid):
- if not ENABLE_FFMPEG:
- return False
- return True
-
-
-def ytdl_download(url: str, tempdir: str, bm, **kwargs) -> list:
- payment = Payment()
- chat_id = bm.chat.id
- hijack = kwargs.get("hijack")
- output = pathlib.Path(tempdir, "%(title).70s.%(ext)s").as_posix()
- ydl_opts = {
- "progress_hooks": [lambda d: download_hook(d, bm)],
- "outtmpl": output,
- "restrictfilenames": False,
- "quiet": True,
- }
- if ENABLE_ARIA2:
- ydl_opts["external_downloader"] = "aria2c"
- ydl_opts["external_downloader_args"] = [
- "--min-split-size=1M",
- "--max-connection-per-server=16",
- "--max-concurrent-downloads=16",
- "--split=16",
- ]
- if url.startswith("https://drive.google.com"):
- # Always use the `source` format for Google Drive URLs.
- formats = ["source"]
- else:
- # Use the default formats for other URLs.
- formats = [
- # webm , vp9 and av01 are not streamable on telegram, so we'll extract only mp4
- "bestvideo[ext=mp4][vcodec!*=av01][vcodec!*=vp09]+bestaudio[ext=m4a]/bestvideo+bestaudio",
- "bestvideo[vcodec^=avc]+bestaudio[acodec^=mp4a]/best[vcodec^=avc]/best",
- None,
- ]
- # This method will alter formats if necessary
- adjust_formats(chat_id, url, formats, hijack)
- address = ["::", "0.0.0.0"] if IPv6 else [None]
- error = None
- video_paths = None
- for format_ in formats:
- ydl_opts["format"] = format_
- for addr in address:
- # IPv6 goes first in each format
- ydl_opts["source_address"] = addr
- try:
- logging.info("Downloading for %s with format %s", url, format_)
- with ytdl.YoutubeDL(ydl_opts) as ydl:
- ydl.download([url])
- video_paths = list(pathlib.Path(tempdir).glob("*"))
- break
- except FileTooBig as e:
- raise e
- except Exception:
- error = traceback.format_exc()
- logging.error("Download failed for %s - %s, try another way", format_, url)
- if error is None:
- break
-
- if not video_paths:
- raise Exception(error)
-
- # convert format if necessary
- settings = payment.get_user_settings(chat_id)
- if settings[2] == "video" or isinstance(settings[2], MagicMock):
- # only convert if send type is video
- convert_to_mp4(video_paths, bm)
- if settings[2] == "audio" or hijack == "bestaudio[ext=m4a]":
- convert_audio_format(video_paths, bm)
- # split_large_video(video_paths)
- return video_paths
-
-
-def convert_audio_format(video_paths: list, bm):
- # 1. file is audio, default format
- # 2. file is video, default format
- # 3. non default format
-
- for path in video_paths:
- streams = ffmpeg.probe(path)["streams"]
- if AUDIO_FORMAT is None and len(streams) == 1 and streams[0]["codec_type"] == "audio":
- logging.info("%s is audio, default format, no need to convert", path)
- elif AUDIO_FORMAT is None and len(streams) >= 2:
- logging.info("%s is video, default format, need to extract audio", path)
- audio_stream = {"codec_name": "m4a"}
- for stream in streams:
- if stream["codec_type"] == "audio":
- audio_stream = stream
- break
- ext = audio_stream["codec_name"]
- new_path = path.with_suffix(f".{ext}")
- run_ffmpeg_progressbar(["ffmpeg", "-y", "-i", path, "-vn", "-acodec", "copy", new_path], bm)
- path.unlink()
- index = video_paths.index(path)
- video_paths[index] = new_path
- else:
- logging.info("Not default format, converting %s to %s", path, AUDIO_FORMAT)
- new_path = path.with_suffix(f".{AUDIO_FORMAT}")
- run_ffmpeg_progressbar(["ffmpeg", "-y", "-i", path, new_path], bm)
- path.unlink()
- index = video_paths.index(path)
- video_paths[index] = new_path
-
-
-def split_large_video(video_paths: list):
- original_video = None
- split = False
- for original_video in video_paths:
- size = os.stat(original_video).st_size
- if size > TG_NORMAL_MAX_SIZE:
- split = True
- logging.warning("file is too large %s, splitting...", size)
- subprocess.check_output(f"sh split-video.sh {original_video} {TG_NORMAL_MAX_SIZE * 0.95} ".split())
- os.remove(original_video)
-
- if split and original_video:
- return [i for i in pathlib.Path(original_video).parent.glob("*")]
diff --git a/ytdlbot/flower_tasks.py b/ytdlbot/flower_tasks.py
deleted file mode 100644
index c791421a..00000000
--- a/ytdlbot/flower_tasks.py
+++ /dev/null
@@ -1,14 +0,0 @@
-#!/usr/local/bin/python3
-# coding: utf-8
-
-# ytdlbot - flower_tasks.py
-# 1/2/22 10:17
-#
-
-__author__ = "Benny "
-
-from celery import Celery
-
-from config import BROKER
-
-app = Celery("tasks", broker=BROKER, timezone="Europe/London")
diff --git a/ytdlbot/keep_alive.py b/ytdlbot/keep_alive.py
deleted file mode 100644
index 9cf90068..00000000
--- a/ytdlbot/keep_alive.py
+++ /dev/null
@@ -1,11 +0,0 @@
-#!/usr/bin/env python3
-# coding: utf-8
-
-# ytdlbot - keep_alive.py
-# 2024-01-22 17:59
-
-import time
-
-while True:
- print("I'm still alive")
- time.sleep(10)
diff --git a/ytdlbot/limit.py b/ytdlbot/limit.py
deleted file mode 100644
index 819978db..00000000
--- a/ytdlbot/limit.py
+++ /dev/null
@@ -1,260 +0,0 @@
-#!/usr/local/bin/python3
-# coding: utf-8
-
-# ytdlbot - limit.py
-# 8/15/21 18:23
-#
-
-__author__ = "Benny "
-
-import hashlib
-import logging
-import time
-
-import requests
-from tronpy import Tron
-from tronpy.exceptions import TransactionError, ValidationError
-from tronpy.hdwallet import key_from_seed, seed_from_mnemonic
-from tronpy.keys import PrivateKey
-from tronpy.providers import HTTPProvider
-
-from config import (
- AFD_TOKEN,
- AFD_USER_ID,
- COFFEE_TOKEN,
- EXPIRE,
- FREE_DOWNLOAD,
- OWNER,
- TOKEN_PRICE,
- TRON_MNEMONIC,
- TRONGRID_KEY,
- TRX_SIGNAL,
-)
-from database import MySQL, Redis
-from utils import apply_log_formatter, current_time
-
-apply_log_formatter()
-
-
-class BuyMeACoffee:
- def __init__(self):
- self._token = COFFEE_TOKEN
- self._url = "https://developers.buymeacoffee.com/api/v1/supporters"
- self._data = []
-
- def _get_data(self, url):
- d = requests.get(url, headers={"Authorization": f"Bearer {self._token}"}).json()
- self._data.extend(d["data"])
- next_page = d["next_page_url"]
- if next_page:
- self._get_data(next_page)
-
- def _get_bmac_status(self, email: str) -> dict:
- self._get_data(self._url)
- for user in self._data:
- if user["payer_email"] == email or user["support_email"] == email:
- return user
- return {}
-
- def get_user_payment(self, email: str) -> (int, "float", str):
- order = self._get_bmac_status(email)
- price = float(order.get("support_coffee_price", 0))
- cups = float(order.get("support_coffees", 1))
- amount = price * cups
- return amount, email
-
-
-class Afdian:
- def __init__(self):
- self._token = AFD_TOKEN
- self._user_id = AFD_USER_ID
- self._url = "https://afdian.net/api/open/query-order"
-
- def _generate_signature(self):
- data = {
- "user_id": self._user_id,
- "params": '{"x":0}',
- "ts": int(time.time()),
- }
- sign_text = "{token}params{params}ts{ts}user_id{user_id}".format(
- token=self._token, params=data["params"], ts=data["ts"], user_id=data["user_id"]
- )
-
- md5 = hashlib.md5(sign_text.encode("u8"))
- md5 = md5.hexdigest()
- data["sign"] = md5
-
- return data
-
- def _get_afdian_status(self, trade_no: str) -> dict:
- req_data = self._generate_signature()
- data = requests.post(self._url, json=req_data).json()
- # latest 50
- for order in data["data"]["list"]:
- if order["out_trade_no"] == trade_no:
- return order
-
- return {}
-
- def get_user_payment(self, trade_no: str) -> (int, float, str):
- order = self._get_afdian_status(trade_no)
- amount = float(order.get("show_amount", 0))
- # convert to USD
- return amount / 7, trade_no
-
-
-class TronTrx:
- def __init__(self):
- if TRON_MNEMONIC == "cram floor today legend service drill pitch leaf car govern harvest soda":
- logging.warning("Using nile testnet")
- provider = HTTPProvider(endpoint_uri="https://nile.trongrid.io")
- network = "nile"
- else:
- provider = HTTPProvider(api_key=TRONGRID_KEY)
- network = "mainnet"
- self.client = Tron(provider, network=network)
-
- def central_transfer(self, from_, index, amount: int):
- logging.info("Generated key with index %s", index)
- seed = seed_from_mnemonic(TRON_MNEMONIC, passphrase="")
- key = PrivateKey(key_from_seed(seed, account_path=f"m/44'/195'/1'/0/{index}"))
- central = self.central_wallet()
- logging.info("Transfer %s TRX from %s to %s", amount, from_, central)
- try:
- self.client.trx.transfer(from_, central, amount).build().sign(key).broadcast()
- except (TransactionError, ValidationError):
- logging.error("Failed to transfer %s TRX to %s. Lower and try again.", amount, from_)
- if amount > 1_100_000:
- # 1.1 trx transfer fee
- self.client.trx.transfer(from_, central, amount - 1_100_000).build().sign(key).broadcast()
-
- def central_wallet(self):
- wallet = self.client.generate_address_from_mnemonic(TRON_MNEMONIC, account_path="m/44'/195'/0'/0/0")
- return wallet["base58check_address"]
-
- def get_payment_address(self, user_id):
- # payment_id is like tron,0,TN8Mn9KKv3cSrKyrt6Xx5L18nmezbpiW31,index where 0 means unpaid
- db = MySQL()
- con = db.con
- cur = db.cur
- cur.execute("select user_id from payment where payment_id like 'tron,%'")
- data = cur.fetchall()
- index = len(data)
- path = f"m/44'/195'/1'/0/{index}"
- logging.info("Generating address for user %s with path %s", user_id, path)
- addr = self.client.generate_address_from_mnemonic(TRON_MNEMONIC, account_path=path)["base58check_address"]
- # add row in db, unpaid
- cur.execute("insert into payment values (%s,%s,%s,%s,%s)", (user_id, 0, f"tron,0,{addr},{index}", 0, 0))
- con.commit()
- return addr
-
- def check_payment(self):
- db = MySQL()
- con = db.con
- cur = db.cur
-
- cur.execute("select user_id, payment_id from payment where payment_id like 'tron,0,T%'")
- data = cur.fetchall()
- for row in data:
- logging.info("Checking user payment %s", row)
- user_id = row[0]
- addr, index = row[1].split(",")[2:]
- try:
- balance = self.client.get_account_balance(addr)
- except:
- balance = 0
- if balance:
- logging.info("User %s has %s TRX", user_id, balance)
- # paid, calc token count
- token_count = int(balance / 10 * TOKEN_PRICE)
- cur.execute(
- "update payment set token=%s,payment_id=%s where user_id=%s and payment_id like %s",
- (token_count, f"tron,1,{addr},{index}", user_id, f"tron,%{addr}%"),
- )
- cur.execute("UPDATE settings SET mode='Local' WHERE user_id=%s", (user_id,))
- con.commit()
- self.central_transfer(addr, index, int(balance * 1_000_000))
- logging.debug("Dispatch signal now....")
- TRX_SIGNAL.send("cron", user_id=user_id, text=f"{balance} TRX received, {token_count} tokens added.")
-
-
-class Payment(Redis, MySQL):
- def check_old_user(self, user_id: int) -> tuple:
- self.cur.execute("SELECT * FROM payment WHERE user_id=%s AND old_user=1", (user_id,))
- data = self.cur.fetchone()
- return data
-
- def get_pay_token(self, user_id: int) -> int:
- self.cur.execute("SELECT token, old_user FROM payment WHERE user_id=%s", (user_id,))
- data = self.cur.fetchall() or [(0, False)]
- number = sum([i[0] for i in data if i[0]])
- if number == 0 and data[0][1] != 1:
- # not old user, no token
- logging.warning("User %s has no token, set download mode to Celery", user_id)
- # change download mode to Celery
- self.set_user_settings(user_id, "mode", "Celery")
- return number
-
- def get_free_token(self, user_id: int) -> int:
- if self.r.exists(user_id):
- return int(self.r.get(user_id))
- else:
- # set and return
- self.r.set(user_id, FREE_DOWNLOAD, ex=EXPIRE)
- return FREE_DOWNLOAD
-
- def get_token(self, user_id: int):
- ttl = self.r.ttl(user_id)
- return self.get_free_token(user_id), self.get_pay_token(user_id), current_time(time.time() + ttl)
-
- def use_free_token(self, user_id: int):
- if self.r.exists(user_id):
- self.r.decr(user_id, 1)
- else:
- # first time download
- self.r.set(user_id, 5 - 1, ex=EXPIRE)
-
- def use_pay_token(self, user_id: int):
- # a user may pay multiple times, so we'll need to filter the first payment with valid token
- self.cur.execute("SELECT payment_id FROM payment WHERE user_id=%s AND token>0", (user_id,))
- data = self.cur.fetchone()
- payment_id = data[0]
- logging.info("User %s use pay token with payment_id %s", user_id, payment_id)
- self.cur.execute("UPDATE payment SET token=token-1 WHERE payment_id=%s", (payment_id,))
- self.con.commit()
-
- def use_token(self, user_id: int):
- free = self.get_free_token(user_id)
- if free > 0:
- self.use_free_token(user_id)
- else:
- self.use_pay_token(user_id)
-
- def add_pay_user(self, pay_data: list):
- self.cur.execute("INSERT INTO payment VALUES (%s,%s,%s,%s,%s)", pay_data)
- self.set_user_settings(pay_data[0], "mode", "Local")
- self.con.commit()
-
- def verify_payment(self, user_id: int, unique: str) -> str:
- pay = BuyMeACoffee() if "@" in unique else Afdian()
- self.cur.execute("SELECT * FROM payment WHERE payment_id=%s ", (unique,))
- data = self.cur.fetchone()
- if data:
- # TODO what if a user pay twice with the same email address?
- return (
- f"Failed. Payment has been verified by other users. Please contact @{OWNER} if you have any questions."
- )
-
- amount, pay_id = pay.get_user_payment(unique)
- logging.info("User %s paid %s, identifier is %s", user_id, amount, unique)
- # amount is already in USD
- if amount == 0:
- return "Payment not found. Please check your payment ID or email address"
- self.add_pay_user([user_id, amount, pay_id, 0, amount * TOKEN_PRICE])
- return "Thanks! Your payment has been verified. /start to get your token details"
-
-
-if __name__ == "__main__":
- a = TronTrx()
- # a.central_wallet()
- a.check_payment()
diff --git a/ytdlbot/premium.py b/ytdlbot/premium.py
deleted file mode 100644
index b6365e80..00000000
--- a/ytdlbot/premium.py
+++ /dev/null
@@ -1,106 +0,0 @@
-#!/usr/bin/env python3
-# coding: utf-8
-
-# ytdlbot - premium.py
-# 2023-12-20 17:53
-
-import json
-import logging
-import pathlib
-import tempfile
-
-import yt_dlp
-from pyrogram import Client, filters, types
-
-from config import APP_HASH, APP_ID, PYRO_WORKERS, TOKEN
-from limit import Payment, Redis
-from utils import apply_log_formatter, sizeof_fmt
-
-apply_log_formatter()
-app = Client("premium", APP_ID, APP_HASH, workers=PYRO_WORKERS)
-
-BOT_ID = int(TOKEN.split(":")[0])
-
-
-def download_hook(d: dict):
- downloaded = d.get("downloaded_bytes", 0)
- total = d.get("total_bytes") or d.get("total_bytes_estimate", 0)
- logging.info("Downloaded %s/%s, %.2f%% complete", downloaded, total, downloaded / total * 100)
-
-
-async def upload_hook(current, total):
- logging.info("Uploaded %s/%s, %.2f%% complete", current, total, current / total * 100)
-
-
-@app.on_message(filters.user(BOT_ID) & filters.incoming)
-async def hello(client: Client, message: types.Message):
- text = message.text
- try:
- data = json.loads(text)
- except json.decoder.JSONDecodeError:
- return
- url = data["url"]
- user_id = data["user_id"]
-
- redis = Redis()
- redis.r.hset("premium", user_id, 1)
- tempdir = tempfile.TemporaryDirectory(prefix="ytdl-")
- output = pathlib.Path(tempdir.name, "%(title).70s.%(ext)s").as_posix()
- ydl_opts = {"restrictfilenames": False, "quiet": True, "outtmpl": output, "progress_hooks": [download_hook]}
- formats = [
- # webm , vp9 and av01 are not streamable on telegram, so we'll extract only mp4
- "bestvideo[ext=mp4][vcodec!*=av01][vcodec!*=vp09]+bestaudio[ext=m4a]/bestvideo+bestaudio",
- "bestvideo[vcodec^=avc]+bestaudio[acodec^=mp4a]/best[vcodec^=avc]/best",
- None,
- ]
-
- for f in formats:
- ydl_opts["format"] = f
- logging.info("Downloading BIG FILE for %s with format %s", url, f)
- try:
- with yt_dlp.YoutubeDL(ydl_opts) as ydl:
- ydl.download([url])
- break
- except Exception as e:
- logging.error("Download failed for %s: %s", url, e)
-
- payment = Payment()
- settings = payment.get_user_settings(user_id)
- video_path = next(pathlib.Path(tempdir.name).glob("*"))
- logging.info("Final filesize is %s", sizeof_fmt(video_path.stat().st_size))
- caption = "Powered by @benny_ytdlbot "
- if settings[2] == "audio":
- logging.info("Sending as audio")
- await client.send_audio(
- BOT_ID,
- video_path.as_posix(),
- caption=caption,
- file_name=f"{user_id}.mp3",
- progress=upload_hook,
- )
- elif settings[2] == "document":
- logging.info("Sending as document")
- await client.send_document(
- BOT_ID,
- video_path.as_posix(),
- caption=caption,
- file_name=f"{user_id}.mp4",
- progress=upload_hook,
- )
- else:
- logging.info("Sending as video")
- await client.send_video(
- BOT_ID,
- video_path.as_posix(),
- caption=caption,
- supports_streaming=True,
- file_name=f"{user_id}.mp4",
- progress=upload_hook,
- )
-
- tempdir.cleanup()
- logging.info("Finished sending %s", url)
-
-
-if __name__ == "__main__":
- app.run()
diff --git a/ytdlbot/sp_downloader.py b/ytdlbot/sp_downloader.py
deleted file mode 100644
index 18355549..00000000
--- a/ytdlbot/sp_downloader.py
+++ /dev/null
@@ -1,244 +0,0 @@
-#!/usr/local/bin/python3
-# coding: utf-8
-
-# ytdlbot - sp_downloader.py
-# 3/16/24 16:32
-#
-
-__author__ = "SanujaNS "
-
-import functools
-import os
-import json
-import logging
-import pathlib
-import re
-import traceback
-from urllib.parse import urlparse, parse_qs
-
-from pyrogram import types
-from tqdm import tqdm
-import filetype
-import requests
-from bs4 import BeautifulSoup
-import yt_dlp as ytdl
-
-from config import (
- PREMIUM_USER,
- TG_NORMAL_MAX_SIZE,
- TG_PREMIUM_MAX_SIZE,
- FileTooBig,
- IPv6,
-)
-from downloader import (
- edit_text,
- remove_bash_color,
- ProgressBar,
- tqdm_progress,
- download_hook,
- upload_hook,
-)
-from limit import Payment
-from utils import sizeof_fmt, parse_cookie_file, extract_code_from_instagram_url
-
-
-def sp_dl(url: str, tempdir: str, bm, **kwargs) -> list:
- """Specific link downloader"""
- domain = urlparse(url).hostname
- if "youtube.com" in domain or "youtu.be" in domain:
- raise ValueError("ERROR: This is ytdl bot for Youtube links just send the link.")
- elif "www.instagram.com" in domain:
- return instagram(url, tempdir, bm, **kwargs)
- elif "pixeldrain.com" in domain:
- return pixeldrain(url, tempdir, bm, **kwargs)
- elif "krakenfiles.com" in domain:
- return krakenfiles(url, tempdir, bm, **kwargs)
- elif any(
- x in domain
- for x in [
- "terabox.com",
- "nephobox.com",
- "4funbox.com",
- "mirrobox.com",
- "momerybox.com",
- "teraboxapp.com",
- "1024tera.com",
- "terabox.app",
- "gibibox.com",
- "goaibox.com",
- ]
- ):
- return terabox(url, tempdir, bm, **kwargs)
- else:
- raise ValueError(f"Invalid URL: No specific link function found for {url}")
-
- return []
-
-
-def sp_ytdl_download(url: str, tempdir: str, bm, filename=None, **kwargs) -> list:
- payment = Payment()
- chat_id = bm.chat.id
- if filename:
- output = pathlib.Path(tempdir, filename).as_posix()
- else:
- output = pathlib.Path(tempdir, "%(title).70s.%(ext)s").as_posix()
- ydl_opts = {
- "progress_hooks": [lambda d: download_hook(d, bm)],
- "outtmpl": output,
- "restrictfilenames": False,
- "quiet": True,
- "format": None,
- }
-
- address = ["::", "0.0.0.0"] if IPv6 else [None]
- error = None
- video_paths = None
- for addr in address:
- ydl_opts["source_address"] = addr
- try:
- logging.info("Downloading %s", url)
- with ytdl.YoutubeDL(ydl_opts) as ydl:
- ydl.download([url])
- video_paths = list(pathlib.Path(tempdir).glob("*"))
- break
- except FileTooBig as e:
- raise e
- except Exception:
- error = traceback.format_exc()
- logging.error("Download failed for %s - %s", url)
-
- if not video_paths:
- raise Exception(error)
-
- return video_paths
-
-
-def instagram(url: str, tempdir: str, bm, **kwargs):
- resp = requests.get(f"http://192.168.6.1:15000/?url={url}").json()
- code = extract_code_from_instagram_url(url)
- counter = 1
- video_paths = []
- if url_results := resp.get("data"):
- for link in url_results:
- req = requests.get(link, stream=True)
- length = int(req.headers.get("content-length"))
- content = req.content
- ext = filetype.guess_extension(content)
- filename = f"{code}_{counter}.{ext}"
- save_path = pathlib.Path(tempdir, filename)
- chunk_size = 4096
- downloaded = 0
- for chunk in req.iter_content(chunk_size):
- text = tqdm_progress(f"Downloading: {filename}", length, downloaded)
- edit_text(bm, text)
- with open(save_path, "ab") as fp:
- fp.write(chunk)
- downloaded += len(chunk)
- video_paths.append(save_path)
- counter += 1
-
- return video_paths
-
-
-def pixeldrain(url: str, tempdir: str, bm, **kwargs):
- user_page_url_regex = r"https://pixeldrain.com/u/(\w+)"
- match = re.match(user_page_url_regex, url)
- if match:
- url = "https://pixeldrain.com/api/file/{}?download".format(match.group(1))
- return sp_ytdl_download(url, tempdir, bm, **kwargs)
- else:
- return url
-
-
-def krakenfiles(url: str, tempdir: str, bm, **kwargs):
- resp = requests.get(url)
- html = resp.content
- soup = BeautifulSoup(html, "html.parser")
- link_parts = []
- token_parts = []
- for form_tag in soup.find_all("form"):
- action = form_tag.get("action")
- if action and "krakenfiles.com" in action:
- link_parts.append(action)
- input_tag = form_tag.find("input", {"name": "token"})
- if input_tag:
- value = input_tag.get("value")
- token_parts.append(value)
- for link_part, token_part in zip(link_parts, token_parts):
- link = f"https:{link_part}"
- data = {
- "token": token_part
- }
- response = requests.post(link, data=data)
- json_data = response.json()
- url = json_data["url"]
- return sp_ytdl_download(url, tempdir, bm, **kwargs)
-
-
-def find_between(s, start, end):
- return (s.split(start))[1].split(end)[0]
-
-def terabox(url: str, tempdir: str, bm, **kwargs):
- cookies_file = os.path.join(os.path.dirname(os.path.realpath(__file__)), "terabox.txt")
- cookies = parse_cookie_file(cookies_file)
-
- headers = {
- "Accept": "text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3;q=0.7",
- "Accept-Encoding": "gzip, deflate, br",
- "Accept-Language": "en-US,en;q=0.9,hi;q=0.8",
- "Connection": "keep-alive",
- "DNT": "1",
- "Host": "www.terabox.app",
- "Sec-Fetch-Dest": "document",
- "Sec-Fetch-Mode": "navigate",
- "Sec-Fetch-Site": "none",
- "Sec-Fetch-User": "?1",
- "Upgrade-Insecure-Requests": "1",
- "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/121.0.0.0 Safari/537.36",
- "sec-ch-ua": "'Not A(Brand';v='99', 'Google Chrome';v='121', 'Chromium';v='121'",
- "sec-ch-ua-mobile": "?0",
- "sec-ch-ua-platform": "'Windows'",
- }
-
- session = requests.Session()
- session.headers.update(headers)
- session.cookies.update(cookies)
- temp_req = session.get(url)
- request_url = urlparse(temp_req.url)
- surl = parse_qs(request_url.query).get("surl")
- req = session.get(temp_req.url)
- respo = req.text
- js_token = find_between(respo, "fn%28%22", "%22%29")
- logid = find_between(respo, "dp-logid=", "&")
- bdstoken = find_between(respo, 'bdstoken":"', '"')
-
- params = {
- "app_id": "250528",
- "web": "1",
- "channel": "dubox",
- "clienttype": "0",
- "jsToken": js_token,
- "dp-logid": logid,
- "page": "1",
- "num": "20",
- "by": "name",
- "order": "asc",
- "site_referer": temp_req.url,
- "shorturl": surl,
- "root": "1,",
- }
-
- req2 = session.get("https://www.terabox.app/share/list", params=params)
- response_data2 = req2.json()
- file_name = response_data2["list"][0]["server_filename"]
- sizebytes = int(response_data2["list"][0]["size"])
- if sizebytes > 48 * 1024 * 1024:
- direct_link = response_data2["list"][0]["dlink"]
- url = direct_link.replace("d.terabox.app", "d3.terabox.app")
- else:
- direct_link_response = session.head(response_data2["list"][0]["dlink"])
- direct_link_response_headers = direct_link_response.headers
- direct_link = direct_link_response_headers["Location"]
- url = direct_link
-
- return sp_ytdl_download(url, tempdir, bm, filename=file_name, **kwargs)
\ No newline at end of file
diff --git a/ytdlbot/split-video.sh b/ytdlbot/split-video.sh
deleted file mode 100755
index 5265ea19..00000000
--- a/ytdlbot/split-video.sh
+++ /dev/null
@@ -1,60 +0,0 @@
-#!/bin/bash
-# Short script to split videos by filesize using ffmpeg by LukeLR
-
-if [ $# -ne 2 ]; then
- echo 'Illegal number of parameters. Needs 2 parameters:'
- echo 'Usage:'
- echo './split-video.sh FILE SIZELIMIT "FFMPEG_ARGS'
- echo
- echo 'Parameters:'
- echo ' - FILE: Name of the video file to split'
- echo ' - SIZELIMIT: Maximum file size of each part (in bytes)'
- echo ' - FFMPEG_ARGS: Additional arguments to pass to each ffmpeg-call'
- echo ' (video format and quality options etc.)'
- exit 1
-fi
-
-FILE="$1"
-SIZELIMIT="$2"
-FFMPEG_ARGS="$3"
-
-# Duration of the source video
-DURATION=$(ffprobe -i "$FILE" -show_entries format=duration -v quiet -of default=noprint_wrappers=1:nokey=1|cut -d. -f1)
-
-# Duration that has been encoded so far
-CUR_DURATION=0
-
-# Filename of the source video (without extension)
-BASENAME="${FILE%.*}"
-
-# Extension for the video parts
-#EXTENSION="${FILE##*.}"
-EXTENSION="mp4"
-
-# Number of the current video part
-i=1
-
-# Filename of the next video part
-NEXTFILENAME="$BASENAME-$i.$EXTENSION"
-
-echo "Duration of source video: $DURATION"
-
-# Until the duration of all partial videos has reached the duration of the source video
-while [[ $CUR_DURATION -lt $DURATION ]]; do
- # Encode next part
- echo ffmpeg -i "$FILE" -ss "$CUR_DURATION" -fs "$SIZELIMIT" $FFMPEG_ARGS "$NEXTFILENAME"
- ffmpeg -ss "$CUR_DURATION" -i "$FILE" -fs "$SIZELIMIT" $FFMPEG_ARGS "$NEXTFILENAME"
-
- # Duration of the new part
- NEW_DURATION=$(ffprobe -i "$NEXTFILENAME" -show_entries format=duration -v quiet -of default=noprint_wrappers=1:nokey=1|cut -d. -f1)
-
- # Total duration encoded so far
- CUR_DURATION=$((CUR_DURATION + NEW_DURATION))
-
- i=$((i + 1))
-
- echo "Duration of $NEXTFILENAME: $NEW_DURATION"
- echo "Part No. $i starts at $CUR_DURATION"
-
- NEXTFILENAME="$BASENAME-$i.$EXTENSION"
-done
\ No newline at end of file
diff --git a/ytdlbot/tasks.py b/ytdlbot/tasks.py
deleted file mode 100644
index 00a1c379..00000000
--- a/ytdlbot/tasks.py
+++ /dev/null
@@ -1,708 +0,0 @@
-#!/usr/local/bin/python3
-# coding: utf-8
-
-# ytdlbot - tasks.py
-# 12/29/21 14:57
-#
-
-__author__ = "Benny "
-
-import asyncio
-import logging
-import os
-import pathlib
-import re
-import shutil
-import subprocess
-import tempfile
-import threading
-import time
-import traceback
-import typing
-from typing import Any
-from urllib.parse import quote_plus
-
-import filetype
-import psutil
-import pyrogram.errors
-import requests
-from apscheduler.schedulers.background import BackgroundScheduler
-from celery import Celery
-from celery.worker.control import Panel
-from pyrogram import Client, enums, idle, types
-
-from channel import Channel
-from client_init import create_app
-from config import (
- ARCHIVE_ID,
- BROKER,
- ENABLE_CELERY,
- ENABLE_VIP,
- OWNER,
- RATE_LIMIT,
- RCLONE_PATH,
- TMPFILE_PATH,
- WORKERS,
- FileTooBig,
- CAPTION_URL_LENGTH_LIMIT,
-)
-from constant import BotText
-from database import Redis, MySQL
-from downloader import edit_text, tqdm_progress, upload_hook, ytdl_download
-from sp_downloader import sp_dl
-from limit import Payment
-from utils import (
- apply_log_formatter,
- auto_restart,
- customize_logger,
- get_metadata,
- get_revision,
- sizeof_fmt,
- shorten_url,
- extract_filename,
-)
-
-customize_logger(["pyrogram.client", "pyrogram.session.session", "pyrogram.connection.connection"])
-apply_log_formatter()
-bot_text = BotText()
-logging.getLogger("apscheduler.executors.default").propagate = False
-
-app = Celery("tasks", broker=BROKER)
-bot = create_app("tasks")
-channel = Channel()
-
-
-def retrieve_message(chat_id: int, message_id: int) -> types.Message | Any:
- # this should only be called by celery tasks
- try:
- return bot.get_messages(chat_id, message_id)
- except ConnectionError as e:
- logging.critical("BOT IS NOT STARTED YET: %s", e)
- bot.start()
- return bot.get_messages(chat_id, message_id)
-
-
-def premium_button(user_id):
- redis = Redis()
- payment = Payment()
- used = redis.r.hget("premium", user_id)
- ban = redis.r.hget("ban", user_id)
- paid_token = payment.get_pay_token(user_id)
-
- if ban:
- return None
- # vip mode: vip user can use once per day, normal user can't use
- # non vip mode: everyone can use once per day
- if used or (ENABLE_VIP and paid_token == 0):
- return None
-
- markup = types.InlineKeyboardMarkup(
- [
- [
- types.InlineKeyboardButton("Yes", callback_data="premium-yes"),
- types.InlineKeyboardButton("No", callback_data="premium-no"),
- ]
- ]
- )
- return markup
-
-
-@app.task(rate_limit=f"{RATE_LIMIT}/m")
-def ytdl_download_task(chat_id: int, message_id: int, url: str):
- logging.info("YouTube celery tasks started for %s", url)
- bot_msg = retrieve_message(chat_id, message_id)
- try:
- ytdl_normal_download(bot, bot_msg, url)
- except FileTooBig as e:
- # if you can go there, that means you have premium users set up
- logging.warning("Seeking for help from premium user...")
- markup = premium_button(chat_id)
- if markup:
- bot_msg.edit_text(f"{e}\n\n{bot_text.premium_warning}", reply_markup=markup)
- else:
- bot_msg.edit_text(f"{e}\nBig file download is not available now. Please /buy or try again later ")
- except Exception:
- error_msg = traceback.format_exc().split("yt_dlp.utils.DownloadError: ERROR: ")
- if len(error_msg) > 1:
- bot_msg.edit_text(f"Download failed!โ\n\n`{error_msg[-1]}", disable_web_page_preview=True)
- else:
- bot_msg.edit_text(f"Download failed!โ\n\n`{traceback.format_exc()[-2000:]}`", disable_web_page_preview=True)
- logging.info("YouTube celery tasks ended.")
-
-
-@app.task()
-def audio_task(chat_id: int, message_id: int):
- logging.info("Audio celery tasks started for %s-%s", chat_id, message_id)
- bot_msg = retrieve_message(chat_id, message_id)
- normal_audio(bot, bot_msg)
- logging.info("Audio celery tasks ended.")
-
-
-@app.task()
-def direct_download_task(chat_id: int, message_id: int, url: str):
- logging.info("Direct download celery tasks started for %s", url)
- bot_msg = retrieve_message(chat_id, message_id)
- direct_normal_download(bot, bot_msg, url)
- logging.info("Direct download celery tasks ended.")
-
-
-@app.task()
-def leech_download_task(chat_id: int, message_id: int, url: str):
- logging.info("Leech download celery tasks started for %s", url)
- bot_msg = retrieve_message(chat_id, message_id)
- leech_normal_download(bot, bot_msg, url)
- logging.info("Leech download celery tasks ended.")
-
-
-def get_unique_clink(original_url: str, user_id: int):
- payment = Payment()
- settings = payment.get_user_settings(user_id)
- clink = channel.extract_canonical_link(original_url)
- try:
- # different user may have different resolution settings
- unique = "{}?p={}{}".format(clink, *settings[1:])
- except IndexError:
- unique = clink
- return unique
-
-
-def forward_video(client, bot_msg: types.Message | Any, url: str, cached_fid: str):
- res_msg = upload_processor(client, bot_msg, url, cached_fid)
- obj = res_msg.document or res_msg.video or res_msg.audio or res_msg.animation or res_msg.photo
-
- caption, _ = gen_cap(bot_msg, url, obj)
- res_msg.edit_text(caption, reply_markup=gen_video_markup())
- bot_msg.edit_text(f"Download success!โ
")
- return True
-
-
-def ytdl_download_entrance(client: Client, bot_msg: types.Message, url: str, mode=None):
- # in Local node and forward mode, we pass client from main
- # in celery mode, we need to use our own client called bot
- payment = Payment()
- redis = Redis()
- chat_id = bot_msg.chat.id
- unique = get_unique_clink(url, chat_id)
- cached_fid = redis.get_send_cache(unique)
-
- try:
- if cached_fid:
- forward_video(client, bot_msg, url, cached_fid)
- redis.update_metrics("cache_hit")
- return
- redis.update_metrics("cache_miss")
- mode = mode or payment.get_user_settings(chat_id)[3]
- if ENABLE_CELERY and mode in [None, "Celery"]:
- # in celery mode, producer has lost control of this task.
- ytdl_download_task.delay(chat_id, bot_msg.id, url)
- else:
- ytdl_normal_download(client, bot_msg, url)
- except FileTooBig as e:
- logging.warning("Seeking for help from premium user...")
- # this is only for normal node. Celery node will need to do it in celery tasks
- markup = premium_button(chat_id)
- if markup:
- bot_msg.edit_text(f"{e}\n\n{bot_text.premium_warning}", reply_markup=markup)
- else:
- bot_msg.edit_text(f"{e}\nBig file download is not available now. Please /buy or try again later ")
- except Exception as e:
- logging.error("Failed to download %s, error: %s", url, e)
- error_msg = traceback.format_exc().split("yt_dlp.utils.DownloadError: ERROR: ")
- if len(error_msg) > 1:
- bot_msg.edit_text(f"Download failed!โ\n\n`{error_msg[-1]}", disable_web_page_preview=True)
- else:
- bot_msg.edit_text(f"Download failed!โ\n\n`{traceback.format_exc()[-2000:]}`", disable_web_page_preview=True)
-
-
-def direct_download_entrance(client: Client, bot_msg: typing.Union[types.Message, typing.Coroutine], url: str, new_name):
- if ENABLE_CELERY:
- direct_normal_download(client, bot_msg, url, new_name)
- # direct_download_task.delay(bot_msg.chat.id, bot_msg.id, url)
- else:
- direct_normal_download(client, bot_msg, url, new_name)
-
-
-def leech_download_entrance(client: Client, bot_msg: typing.Union[types.Message, typing.Coroutine], url: str):
- if ENABLE_CELERY:
- leech_normal_download(client, bot_msg, url)
- # leech_normal_download.delay(bot_msg.chat.id, bot_msg.id, url)
- else:
- leech_normal_download(client, bot_msg, url)
-
-
-def spdl_download_entrance(client: Client, bot_msg: types.Message, url: str, mode=None):
- payment = Payment()
- redis = Redis()
- chat_id = bot_msg.chat.id
- unique = get_unique_clink(url, chat_id)
- cached_fid = redis.get_send_cache(unique)
-
- try:
- if cached_fid:
- forward_video(client, bot_msg, url, cached_fid)
- redis.update_metrics("cache_hit")
- return
- redis.update_metrics("cache_miss")
- mode = mode or payment.get_user_settings(chat_id)[3]
- spdl_normal_download(client, bot_msg, url)
- except FileTooBig as e:
- logging.warning("Seeking for help from premium user...")
- # this is only for normal node. Celery node will need to do it in celery tasks
- markup = premium_button(chat_id)
- if markup:
- bot_msg.edit_text(f"{e}\n\n{bot_text.premium_warning}", reply_markup=markup)
- else:
- bot_msg.edit_text(f"{e}\nBig file download is not available now. Please /buy or try again later ")
- except ValueError as e:
- logging.error("Invalid URL provided: %s", e)
- bot_msg.edit_text(f"Download failed!โ\n\n{e}", disable_web_page_preview=True)
- except Exception as e:
- logging.error("Failed to download %s, error: %s", url, e)
- error_msg = "Sorry, Something went wrong."
- bot_msg.edit_text(f"Download failed!โ\n\n`{error_msg}", disable_web_page_preview=True)
-
-
-def audio_entrance(client: Client, bot_msg: types.Message):
- if ENABLE_CELERY:
- audio_task.delay(bot_msg.chat.id, bot_msg.id)
- else:
- normal_audio(client, bot_msg)
-
-
-def direct_normal_download(client: Client, bot_msg: typing.Union[types.Message, typing.Coroutine], url: str, new_name):
- chat_id = bot_msg.chat.id
- headers = {
- "user-agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/126.0.0.0 Safari/537.36"
- }
- length = 0
-
- req = None
- try:
- req = requests.get(url, headers=headers, stream=True)
- length = int(req.headers.get("content-length"))
- except Exception as e:
- bot_msg.edit_text(f"Download failed!โ\n\n```{e}```", disable_web_page_preview=True)
- return
-
- if new_name:
- filename = new_name
- else:
- filename = extract_filename(req)
-
- with tempfile.TemporaryDirectory(prefix="ytdl-", dir=TMPFILE_PATH) as f:
- filepath = f"{f}/{filename}"
- # consume the req.content
- downloaded = 0
- for chunk in req.iter_content(1024 * 1024):
- text = tqdm_progress("Downloading...", length, downloaded)
- edit_text(bot_msg, text)
- with open(filepath, "ab") as fp:
- fp.write(chunk)
- downloaded += len(chunk)
- logging.info("Downloaded file %s", filename)
- st_size = os.stat(filepath).st_size
- ext = filetype.guess_extension(filepath)
- # Rename file if it doesn't have extension
- if ext is not None and not filepath.endswith(ext):
- new_filename = f"{filepath}.{ext}"
- os.rename(filepath, new_filename)
- filepath = new_filename
-
- client.send_chat_action(chat_id, enums.ChatAction.UPLOAD_DOCUMENT)
- client.send_document(
- bot_msg.chat.id,
- filepath,
- caption=f"filesize: {sizeof_fmt(st_size)}",
- progress=upload_hook,
- progress_args=(bot_msg,),
- )
- bot_msg.edit_text("Download success!โ
")
-
-
-def leech_normal_download(client: Client, bot_msg: typing.Union[types.Message, typing.Coroutine], url: str):
- chat_id = bot_msg.chat.id
- temp_dir = tempfile.TemporaryDirectory(prefix="leech_dl-", dir=TMPFILE_PATH)
- tempdir = temp_dir.name
- UA = "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/126.0.0.0 Safari/537.36"
- response = None
- video_paths = None
- # Download process using aria2c
- try:
- bot_msg.edit_text(f"Download Starting...", disable_web_page_preview=True)
- # Command to download the link using aria2c
- command = [
- "aria2c",
- "-U",
- UA,
- "--max-tries=5",
- "--console-log-level=warn",
- "-d",
- tempdir,
- url,
- ]
- # Run the command using subprocess.Popen
- process = subprocess.Popen(command, bufsize=0, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
- line = ""
- max_iterations = 100 # Set a reasonable maximum number of iterations
- iteration = 0
-
- while process.poll() is None and iteration < max_iterations:
- line = process.stdout.readline().decode("utf-8")
- if line.startswith("[#"):
- line = line.strip()
- bot_msg.edit_text(f"Downloading... \n\n`{line}`", disable_web_page_preview=True)
- break
- iteration += 1
-
- if iteration >= max_iterations:
- bot_msg.edit_text("Something went wrong. Please try again.", disable_web_page_preview=True)
- except Exception as e:
- bot_msg.edit_text(f"Download failed!โ\n\n`{e}`", disable_web_page_preview=True)
- return
- # Get filename and extension correctly after download
- filepath = list(pathlib.Path(tempdir).glob("*"))
- file_path_obj = filepath[0]
- path_obj = pathlib.Path(file_path_obj)
- filename = path_obj.name
- logging.info("Downloaded file %s", filename)
- bot_msg.edit_text(f"Download Complete", disable_web_page_preview=True)
- ext = filetype.guess_extension(file_path_obj)
- # Rename file if it doesn't have extension
- if ext is not None and not filename.endswith(ext):
- new_filename = f"{tempdir}/{filename}.{ext}"
- os.rename(file_path_obj, new_filename)
- # Get file path of the downloaded file to upload
- video_paths = list(pathlib.Path(tempdir).glob("*"))
- client.send_chat_action(chat_id, enums.ChatAction.UPLOAD_DOCUMENT)
- upload_processor(client, bot_msg, url, video_paths)
- bot_msg.edit_text("Download success!โ
")
-
-
-def normal_audio(client: Client, bot_msg: typing.Union[types.Message, typing.Coroutine]):
- chat_id = bot_msg.chat.id
- # fn = getattr(bot_msg.video, "file_name", None) or getattr(bot_msg.document, "file_name", None)
- status_msg: typing.Union[types.Message, typing.Coroutine] = bot_msg.reply_text(
- "Converting to audio...please wait patiently", quote=True
- )
- orig_url: str = re.findall(r"https?://.*", bot_msg.caption)[0]
- with tempfile.TemporaryDirectory(prefix="ytdl-", dir=TMPFILE_PATH) as tmp:
- client.send_chat_action(chat_id, enums.ChatAction.RECORD_AUDIO)
- # just try to download the audio using yt-dlp
- filepath = ytdl_download(orig_url, tmp, status_msg, hijack="bestaudio[ext=m4a]")
- status_msg.edit_text("Sending audio now...")
- client.send_chat_action(chat_id, enums.ChatAction.UPLOAD_AUDIO)
- for f in filepath:
- client.send_audio(chat_id, f)
- status_msg.edit_text("โ
Conversion complete.")
- Redis().update_metrics("audio_success")
-
-
-def ytdl_normal_download(client: Client, bot_msg: types.Message | typing.Any, url: str):
- """
- This function is called by celery task or directly by bot
- :param client: bot client, either from main or bot(celery)
- :param bot_msg: bot message
- :param url: url to download
- """
- chat_id = bot_msg.chat.id
- temp_dir = tempfile.TemporaryDirectory(prefix="ytdl-", dir=TMPFILE_PATH)
-
- video_paths = ytdl_download(url, temp_dir.name, bot_msg)
- logging.info("Download complete.")
- client.send_chat_action(chat_id, enums.ChatAction.UPLOAD_DOCUMENT)
- bot_msg.edit_text("Download complete. Sending now...")
- data = MySQL().get_user_settings(chat_id)
- if data[4] == "ON":
- logging.info("Adding to history...")
- MySQL().add_history(chat_id, url, pathlib.Path(video_paths[0]).name)
- try:
- upload_processor(client, bot_msg, url, video_paths)
- except pyrogram.errors.Flood as e:
- logging.critical("FloodWait from Telegram: %s", e)
- client.send_message(
- chat_id,
- f"I'm being rate limited by Telegram. Your video will come after {e} seconds. Please wait patiently.",
- )
- client.send_message(OWNER, f"CRITICAL INFO: {e}")
- time.sleep(e.value)
- upload_processor(client, bot_msg, url, video_paths)
-
- bot_msg.edit_text("Download success!โ
")
-
- # setup rclone environment var to back up the downloaded file
- if RCLONE_PATH:
- for item in os.listdir(temp_dir.name):
- logging.info("Copying %s to %s", item, RCLONE_PATH)
- shutil.copy(os.path.join(temp_dir.name, item), RCLONE_PATH)
- temp_dir.cleanup()
-
-
-def spdl_normal_download(client: Client, bot_msg: types.Message | typing.Any, url: str):
- chat_id = bot_msg.chat.id
- temp_dir = tempfile.TemporaryDirectory(prefix="spdl-", dir=TMPFILE_PATH)
-
- video_paths = sp_dl(url, temp_dir.name, bot_msg)
- logging.info("Download complete.")
- client.send_chat_action(chat_id, enums.ChatAction.UPLOAD_DOCUMENT)
- bot_msg.edit_text("Download complete. Sending now...")
- data = MySQL().get_user_settings(chat_id)
- if data[4] == "ON":
- logging.info("Adding to history...")
- MySQL().add_history(chat_id, url, pathlib.Path(video_paths[0]).name)
- try:
- upload_processor(client, bot_msg, url, video_paths)
- except pyrogram.errors.Flood as e:
- logging.critical("FloodWait from Telegram: %s", e)
- client.send_message(
- chat_id,
- f"I'm being rate limited by Telegram. Your video will come after {e} seconds. Please wait patiently.",
- )
- client.send_message(OWNER, f"CRITICAL INFO: {e}")
- time.sleep(e.value)
- upload_processor(client, bot_msg, url, video_paths)
-
- bot_msg.edit_text("Download success!โ
")
-
- if RCLONE_PATH:
- for item in os.listdir(temp_dir.name):
- logging.info("Copying %s to %s", item, RCLONE_PATH)
- shutil.copy(os.path.join(temp_dir.name, item), RCLONE_PATH)
- temp_dir.cleanup()
-
-
-def generate_input_media(file_paths: list, cap: str) -> list:
- input_media = []
- for path in file_paths:
- mime = filetype.guess_mime(path)
- if "video" in mime:
- input_media.append(pyrogram.types.InputMediaVideo(media=path))
- elif "image" in mime:
- input_media.append(pyrogram.types.InputMediaPhoto(media=path))
- elif "audio" in mime:
- input_media.append(pyrogram.types.InputMediaAudio(media=path))
- else:
- input_media.append(pyrogram.types.InputMediaDocument(media=path))
-
- input_media[0].caption = cap
- return input_media
-
-
-def upload_processor(client: Client, bot_msg: types.Message, url: str, vp_or_fid: str | list):
- redis = Redis()
- # raise pyrogram.errors.exceptions.FloodWait(13)
- # if is str, it's a file id; else it's a list of paths
- payment = Payment()
- chat_id = bot_msg.chat.id
- markup = gen_video_markup()
- if isinstance(vp_or_fid, list) and len(vp_or_fid) > 1:
- # just generate the first for simplicity, send as media group(2-20)
- cap, meta = gen_cap(bot_msg, url, vp_or_fid[0])
- res_msg: list["types.Message"] | Any = client.send_media_group(chat_id, generate_input_media(vp_or_fid, cap))
- # TODO no cache for now
- return res_msg[0]
- elif isinstance(vp_or_fid, list) and len(vp_or_fid) == 1:
- # normal download, just contains one file in video_paths
- vp_or_fid = vp_or_fid[0]
- cap, meta = gen_cap(bot_msg, url, vp_or_fid)
- else:
- # just a file id as string
- cap, meta = gen_cap(bot_msg, url, vp_or_fid)
-
- settings = payment.get_user_settings(chat_id)
- if ARCHIVE_ID and isinstance(vp_or_fid, pathlib.Path):
- chat_id = ARCHIVE_ID
-
- if settings[2] == "document":
- logging.info("Sending as document")
- try:
- # send as document could be sent as video even if it's a document
- res_msg = client.send_document(
- chat_id,
- vp_or_fid,
- caption=cap,
- progress=upload_hook,
- progress_args=(bot_msg,),
- reply_markup=markup,
- thumb=meta["thumb"],
- force_document=True,
- )
- except ValueError:
- logging.error("Retry to send as video")
- res_msg = client.send_video(
- chat_id,
- vp_or_fid,
- supports_streaming=True,
- caption=cap,
- progress=upload_hook,
- progress_args=(bot_msg,),
- reply_markup=markup,
- **meta,
- )
- elif settings[2] == "audio":
- logging.info("Sending as audio")
- res_msg = client.send_audio(
- chat_id,
- vp_or_fid,
- caption=cap,
- progress=upload_hook,
- progress_args=(bot_msg,),
- )
- else:
- # settings==video
- logging.info("Sending as video")
- try:
- res_msg = client.send_video(
- chat_id,
- vp_or_fid,
- supports_streaming=True,
- caption=cap,
- progress=upload_hook,
- progress_args=(bot_msg,),
- reply_markup=markup,
- **meta,
- )
- except Exception:
- # try to send as annimation, photo
- try:
- logging.warning("Retry to send as animation")
- res_msg = client.send_animation(
- chat_id,
- vp_or_fid,
- caption=cap,
- progress=upload_hook,
- progress_args=(bot_msg,),
- reply_markup=markup,
- **meta,
- )
- except Exception:
- # this is likely a photo
- logging.warning("Retry to send as photo")
- res_msg = client.send_photo(
- chat_id,
- vp_or_fid,
- caption=cap,
- progress=upload_hook,
- progress_args=(bot_msg,),
- )
-
- unique = get_unique_clink(url, bot_msg.chat.id)
- obj = res_msg.document or res_msg.video or res_msg.audio or res_msg.animation or res_msg.photo
- redis.add_send_cache(unique, getattr(obj, "file_id", None))
- redis.update_metrics("video_success")
- if ARCHIVE_ID and isinstance(vp_or_fid, pathlib.Path):
- client.forward_messages(bot_msg.chat.id, ARCHIVE_ID, res_msg.id)
- return res_msg
-
-
-def gen_cap(bm, url, video_path):
- payment = Payment()
- chat_id = bm.chat.id
- user = bm.chat
- try:
- user_info = "@{}({})-{}".format(user.username or "N/A", user.first_name or "" + user.last_name or "", user.id)
- except Exception:
- user_info = ""
-
- if isinstance(video_path, pathlib.Path):
- meta = get_metadata(video_path)
- file_name = video_path.name
- file_size = sizeof_fmt(os.stat(video_path).st_size)
- else:
- file_name = getattr(video_path, "file_name", "")
- file_size = sizeof_fmt(getattr(video_path, "file_size", (2 << 2) + ((2 << 2) + 1) + (2 << 5)))
- meta = dict(
- width=getattr(video_path, "width", 0),
- height=getattr(video_path, "height", 0),
- duration=getattr(video_path, "duration", 0),
- thumb=getattr(video_path, "thumb", None),
- )
- free = payment.get_free_token(chat_id)
- pay = payment.get_pay_token(chat_id)
- if ENABLE_VIP:
- remain = f"Download token count: free {free}, pay {pay}"
- else:
- remain = ""
-
- if worker_name := os.getenv("WORKER_NAME"):
- worker = f"Downloaded by {worker_name}"
- else:
- worker = ""
- # Shorten the URL if necessary
- try:
- if len(url) > CAPTION_URL_LENGTH_LIMIT:
- url_for_cap = shorten_url(url, CAPTION_URL_LENGTH_LIMIT)
- else:
- url_for_cap = url
- except Exception as e:
- logging.warning(f"Error shortening URL: {e}")
- url_for_cap = url
-
- cap = (
- f"{user_info}\n{file_name}\n\n{url_for_cap}\n\nInfo: {meta['width']}x{meta['height']} {file_size}\t"
- f"{meta['duration']}s\n{remain}\n{worker}\n{bot_text.custom_text}"
- )
- return cap, meta
-
-
-def gen_video_markup():
- markup = types.InlineKeyboardMarkup(
- [
- [ # First row
- types.InlineKeyboardButton( # Generates a callback query when pressed
- "convert to audio", callback_data="convert"
- )
- ]
- ]
- )
- return markup
-
-
-@Panel.register
-def ping_revision(*args):
- return get_revision()
-
-
-@Panel.register
-def hot_patch(*args):
- app_path = pathlib.Path().cwd().parent
- logging.info("Hot patching on path %s...", app_path)
-
- pip_install = "pip install -r requirements.txt"
- unset = "git config --unset http.https://github.com/.extraheader"
- pull_unshallow = "git pull origin --unshallow"
- pull = "git pull"
-
- subprocess.call(unset, shell=True, cwd=app_path)
- if subprocess.call(pull_unshallow, shell=True, cwd=app_path) != 0:
- logging.info("Already unshallow, pulling now...")
- subprocess.call(pull, shell=True, cwd=app_path)
-
- logging.info("Code is updated, applying hot patch now...")
- subprocess.call(pip_install, shell=True, cwd=app_path)
- psutil.Process().kill()
-
-
-def purge_tasks():
- count = app.control.purge()
- return f"purged {count} tasks."
-
-
-def run_celery():
- loop = asyncio.new_event_loop()
- asyncio.set_event_loop(loop)
- worker_name = os.getenv("WORKER_NAME", "")
- argv = ["-A", "tasks", "worker", "--loglevel=info", "--pool=threads", f"--concurrency={WORKERS}", "-n", worker_name]
- app.worker_main(argv)
-
-
-if __name__ == "__main__":
- print("Bootstrapping Celery worker now.....")
- time.sleep(5)
- threading.Thread(target=run_celery, daemon=True).start()
-
- scheduler = BackgroundScheduler(timezone="Europe/London")
- scheduler.add_job(auto_restart, "interval", seconds=900)
- scheduler.start()
-
- idle()
- bot.stop()
diff --git a/ytdlbot/utils.py b/ytdlbot/utils.py
deleted file mode 100644
index 4ec317d4..00000000
--- a/ytdlbot/utils.py
+++ /dev/null
@@ -1,301 +0,0 @@
-#!/usr/local/bin/python3
-# coding: utf-8
-
-# ytdlbot - utils.py
-# 9/1/21 22:50
-#
-
-__author__ = "Benny "
-
-import contextlib
-import inspect as pyinspect
-import logging
-import os
-import pathlib
-import re
-import shutil
-import subprocess
-import tempfile
-import time
-import uuid
-from http.cookiejar import MozillaCookieJar
-from urllib.parse import quote_plus
-
-import coloredlogs
-import ffmpeg
-import psutil
-
-from config import TMPFILE_PATH
-from flower_tasks import app
-
-inspect = app.control.inspect()
-
-
-def apply_log_formatter():
- coloredlogs.install(
- level=logging.INFO,
- fmt="[%(asctime)s %(filename)s:%(lineno)d %(levelname).1s] %(message)s",
- datefmt="%Y-%m-%d %H:%M:%S",
- )
-
-
-def customize_logger(logger: list):
- for log in logger:
- logging.getLogger(log).setLevel(level=logging.INFO)
-
-
-def sizeof_fmt(num: int, suffix="B"):
- for unit in ["", "Ki", "Mi", "Gi", "Ti", "Pi", "Ei", "Zi"]:
- if abs(num) < 1024.0:
- return "%3.1f%s%s" % (num, unit, suffix)
- num /= 1024.0
- return "%.1f%s%s" % (num, "Yi", suffix)
-
-
-def timeof_fmt(seconds: int):
- periods = [("d", 86400), ("h", 3600), ("m", 60), ("s", 1)]
- result = ""
- for period_name, period_seconds in periods:
- if seconds >= period_seconds:
- period_value, seconds = divmod(seconds, period_seconds)
- result += f"{int(period_value)}{period_name}"
- return result
-
-
-def is_youtube(url: str):
- if url.startswith("https://www.youtube.com/") or url.startswith("https://youtu.be/"):
- return True
-
-
-def adjust_formats(user_id: int, url: str, formats: list, hijack=None):
- from database import MySQL
-
- # high: best quality 1080P, 2K, 4K, 8K
- # medium: 720P
- # low: 480P
- if hijack:
- formats.insert(0, hijack)
- return
-
- mapping = {"high": [], "medium": [720], "low": [480]}
- settings = MySQL().get_user_settings(user_id)
- if settings and is_youtube(url):
- for m in mapping.get(settings[1], []):
- formats.insert(0, f"bestvideo[ext=mp4][height={m}]+bestaudio[ext=m4a]")
- formats.insert(1, f"bestvideo[vcodec^=avc][height={m}]+bestaudio[acodec^=mp4a]/best[vcodec^=avc]/best")
-
- if settings[2] == "audio":
- formats.insert(0, "bestaudio[ext=m4a]")
-
- if settings[2] == "document":
- formats.insert(0, None)
-
-
-def get_metadata(video_path):
- width, height, duration = 1280, 720, 0
- try:
- video_streams = ffmpeg.probe(video_path, select_streams="v")
- for item in video_streams.get("streams", []):
- height = item["height"]
- width = item["width"]
- duration = int(float(video_streams["format"]["duration"]))
- except Exception as e:
- logging.error(e)
- try:
- thumb = pathlib.Path(video_path).parent.joinpath(f"{uuid.uuid4().hex}-thunmnail.png").as_posix()
- ffmpeg.input(video_path, ss=duration / 2).filter("scale", width, -1).output(thumb, vframes=1).run()
- except ffmpeg._run.Error:
- thumb = None
-
- return dict(height=height, width=width, duration=duration, thumb=thumb)
-
-
-def current_time(ts=None):
- return time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(ts))
-
-
-def get_revision():
- with contextlib.suppress(subprocess.SubprocessError):
- return subprocess.check_output("git -C ../ rev-parse --short HEAD".split()).decode("u8").replace("\n", "")
- return "unknown"
-
-
-def get_func_queue(func) -> int:
- try:
- count = 0
- data = getattr(inspect, func)() or {}
- for _, task in data.items():
- count += len(task)
- return count
- except Exception:
- return 0
-
-
-def tail_log(f, lines=1, _buffer=4098):
- """Tail a file and get X lines from the end"""
- # placeholder for the lines found
- lines_found = []
-
- # block counter will be multiplied by buffer
- # to get the block size from the end
- block_counter = -1
-
- # loop until we find X lines
- while len(lines_found) < lines:
- try:
- f.seek(block_counter * _buffer, os.SEEK_END)
- except IOError: # either file is too small, or too many lines requested
- f.seek(0)
- lines_found = f.readlines()
- break
-
- lines_found = f.readlines()
-
- # we found enough lines, get out
- # Removed this line because it was redundant the while will catch
- # it, I left it for history
- # if len(lines_found) > lines:
- # break
-
- # decrement the block counter to get the
- # next X bytes
- block_counter -= 1
-
- return lines_found[-lines:]
-
-
-class Detector:
- def __init__(self, logs: str):
- self.logs = logs
-
- @staticmethod
- def func_name():
- with contextlib.suppress(Exception):
- return pyinspect.stack()[1][3]
- return "N/A"
-
- def auth_key_detector(self):
- text = "Server sent transport error: 404 (auth key not found)"
- if self.logs.count(text) >= 3:
- logging.critical("auth key not found: %s", self.func_name())
- os.unlink("*.session")
- return True
-
- def updates_too_long_detector(self):
- # If you're seeing this, that means you have logged more than 10 device
- # and the earliest account was kicked out. Restart the program could get you back in.
- indicators = [
- "types.UpdatesTooLong",
- "Got shutdown from remote",
- "Code is updated",
- "OSError: Connection lost",
- "[Errno -3] Try again",
- "MISCONF",
- ]
- for indicator in indicators:
- if indicator in self.logs:
- logging.critical("kick out crash: %s", self.func_name())
- return True
- logging.debug("No crash detected.")
-
- def next_salt_detector(self):
- text = "Next salt in"
- if self.logs.count(text) >= 5:
- logging.critical("Next salt crash: %s", self.func_name())
- return True
-
- def connection_reset_detector(self):
- text = "Send exception: ConnectionResetError Connection lost"
- if self.logs.count(text) >= 5:
- logging.critical("connection lost: %s ", self.func_name())
- return True
-
-
-def auto_restart():
- log_path = "/var/log/ytdl.log"
- if not os.path.exists(log_path):
- return
- with open(log_path) as f:
- logs = "".join(tail_log(f, lines=10))
-
- det = Detector(logs)
- method_list = [getattr(det, func) for func in dir(det) if func.endswith("_detector")]
- for method in method_list:
- if method():
- logging.critical("%s bye bye world!โ ๏ธ", method)
- for item in pathlib.Path(TMPFILE_PATH or tempfile.gettempdir()).glob("ytdl-*"):
- shutil.rmtree(item, ignore_errors=True)
- time.sleep(5)
- psutil.Process().kill()
-
-
-def clean_tempfile():
- patterns = ["ytdl*", "spdl*", "leech*", "direct*"]
- temp_path = pathlib.Path(TMPFILE_PATH or tempfile.gettempdir())
-
- for pattern in patterns:
- for item in temp_path.glob(pattern):
- if time.time() - item.stat().st_ctime > 3600:
- shutil.rmtree(item, ignore_errors=True)
-
-
-def parse_cookie_file(cookiefile):
- jar = MozillaCookieJar(cookiefile)
- jar.load()
- return {cookie.name: cookie.value for cookie in jar}
-
-
-def extract_code_from_instagram_url(url):
- # Regular expression patterns
- patterns = [r"/p/([a-zA-Z0-9_-]+)/", r"/reel/([a-zA-Z0-9_-]+)/"] # Posts # Reels
-
- for pattern in patterns:
- match = re.search(pattern, url)
- if match:
- return match.group(1)
-
- return None
-
-
-def shorten_url(url, CAPTION_URL_LENGTH_LIMIT):
- # Shortens a URL by cutting it to a specified length.
- shortened_url = url[: CAPTION_URL_LENGTH_LIMIT - 3] + "..."
-
- return shortened_url
-
-
-def extract_filename(response):
- try:
- content_disposition = response.headers.get("content-disposition")
- if content_disposition:
- filename = re.findall("filename=(.+)", content_disposition)[0]
- return filename
- except (TypeError, IndexError):
- pass # Handle potential exceptions during extraction
-
- # Fallback if Content-Disposition header is missing
- filename = response.url.rsplit("/")[-1]
- if not filename:
- filename = quote_plus(response.url)
- return filename
-
-
-def extract_url_and_name(message_text):
- # Regular expression to match the URL
- url_pattern = r'(https?://[^\s]+)'
- # Regular expression to match the new name after '-n'
- name_pattern = r'-n\s+([^\s]+)'
-
- # Find the URL in the message_text
- url_match = re.search(url_pattern, message_text)
- url = url_match.group(0) if url_match else None
-
- # Find the new name in the message_text
- name_match = re.search(name_pattern, message_text)
- new_name = name_match.group(1) if name_match else None
-
- return url, new_name
-
-
-if __name__ == "__main__":
- auto_restart()
diff --git a/ytdlbot/ytdl_bot.py b/ytdlbot/ytdl_bot.py
deleted file mode 100644
index 7e296743..00000000
--- a/ytdlbot/ytdl_bot.py
+++ /dev/null
@@ -1,771 +0,0 @@
-#!/usr/local/bin/python3
-# coding: utf-8
-
-# ytdlbot - new.py
-# 8/14/21 14:37
-#
-
-__author__ = "Benny "
-
-import contextlib
-import json
-import logging
-import os
-import psutil
-import threading
-import random
-import re
-import tempfile
-import time
-import traceback
-from io import BytesIO
-from typing import Any
-
-import pyrogram.errors
-import qrcode
-import yt_dlp
-from apscheduler.schedulers.background import BackgroundScheduler
-from pyrogram import Client, enums, filters, types
-from pyrogram.errors.exceptions.bad_request_400 import UserNotParticipant
-from pyrogram.raw import functions
-from pyrogram.raw import types as raw_types
-from tgbot_ping import get_runtime
-from youtubesearchpython import VideosSearch
-
-from channel import Channel
-from client_init import create_app
-from config import (
- AUTHORIZED_USER,
- ENABLE_CELERY,
- ENABLE_FFMPEG,
- ENABLE_VIP,
- M3U8_SUPPORT,
- OWNER,
- PLAYLIST_SUPPORT,
- PREMIUM_USER,
- PROVIDER_TOKEN,
- REQUIRED_MEMBERSHIP,
- TOKEN_PRICE,
- TRX_SIGNAL,
- ENABLE_ARIA2,
-)
-from constant import BotText
-from database import InfluxDB, MySQL, Redis
-from limit import Payment, TronTrx
-from tasks import app as celery_app
-from tasks import (
- audio_entrance,
- direct_download_entrance,
- leech_download_entrance,
- hot_patch,
- purge_tasks,
- ytdl_download_entrance,
- spdl_download_entrance,
-)
-from utils import (
- sizeof_fmt,
- timeof_fmt,
- auto_restart,
- clean_tempfile,
- customize_logger,
- get_revision,
- extract_url_and_name
-)
-
-logging.info("Authorized users are %s", AUTHORIZED_USER)
-customize_logger(["pyrogram.client", "pyrogram.session.session", "pyrogram.connection.connection"])
-logging.getLogger("apscheduler.executors.default").propagate = False
-
-app = create_app("main")
-channel = Channel()
-
-
-def private_use(func):
- def wrapper(client: Client, message: types.Message):
- chat_id = getattr(message.from_user, "id", None)
-
- # message type check
- if message.chat.type != enums.ChatType.PRIVATE and not getattr(message, "text", "").lower().startswith("/ytdl"):
- logging.debug("%s, it's annoying me...๐๏ธ ", message.text)
- return
-
- # authorized users check
- if AUTHORIZED_USER:
- users = [int(i) for i in AUTHORIZED_USER.split(",")]
- else:
- users = []
-
- if users and chat_id and chat_id not in users:
- message.reply_text(BotText.private, quote=True)
- return
-
- if REQUIRED_MEMBERSHIP:
- try:
- member: types.ChatMember | Any = app.get_chat_member(REQUIRED_MEMBERSHIP, chat_id)
- if member.status not in [
- enums.ChatMemberStatus.ADMINISTRATOR,
- enums.ChatMemberStatus.MEMBER,
- enums.ChatMemberStatus.OWNER,
- ]:
- raise UserNotParticipant()
- else:
- logging.info("user %s check passed for group/channel %s.", chat_id, REQUIRED_MEMBERSHIP)
- except UserNotParticipant:
- logging.warning("user %s is not a member of group/channel %s", chat_id, REQUIRED_MEMBERSHIP)
- message.reply_text(BotText.membership_require, quote=True)
- return
-
- return func(client, message)
-
- return wrapper
-
-
-@app.on_message(filters.command(["start"]))
-def start_handler(client: Client, message: types.Message):
- payment = Payment()
- from_id = message.from_user.id
- logging.info("%s welcome to youtube-dl bot!", message.from_user.id)
- client.send_chat_action(from_id, enums.ChatAction.TYPING)
- is_old_user = payment.check_old_user(from_id)
- if is_old_user:
- info = ""
- if ENABLE_VIP:
- free_token, pay_token, reset = payment.get_token(from_id)
- info = f"Free token: {free_token}, Pay token: {pay_token}, Reset: {reset}"
- else:
- info = ""
- text = f"{BotText.start}\n\n{info}\n{BotText.custom_text}"
- client.send_message(message.chat.id, text, disable_web_page_preview=True)
-
-
-@app.on_message(filters.command(["help"]))
-def help_handler(client: Client, message: types.Message):
- chat_id = message.chat.id
- client.send_chat_action(chat_id, enums.ChatAction.TYPING)
- client.send_message(chat_id, BotText.help, disable_web_page_preview=True)
-
-
-@app.on_message(filters.command(["about"]))
-def about_handler(client: Client, message: types.Message):
- chat_id = message.chat.id
- client.send_chat_action(chat_id, enums.ChatAction.TYPING)
- client.send_message(chat_id, BotText.about)
-
-
-@app.on_message(filters.command(["sub"]))
-def subscribe_handler(client: Client, message: types.Message):
- chat_id = message.chat.id
- client.send_chat_action(chat_id, enums.ChatAction.TYPING)
- if message.text == "/sub":
- result = channel.get_user_subscription(chat_id)
- else:
- link = message.text.split()[1]
- try:
- result = channel.subscribe_channel(chat_id, link)
- except (IndexError, ValueError):
- result = f"Error: \n{traceback.format_exc()}"
- client.send_message(chat_id, result or "You have no subscription.", disable_web_page_preview=True)
-
-
-@app.on_message(filters.command(["unsub"]))
-def unsubscribe_handler(client: Client, message: types.Message):
- chat_id = message.chat.id
- client.send_chat_action(chat_id, enums.ChatAction.TYPING)
- text = message.text.split(" ")
- if len(text) == 1:
- client.send_message(chat_id, "/unsub channel_id", disable_web_page_preview=True)
- return
-
- rows = channel.unsubscribe_channel(chat_id, text[1])
- if rows:
- text = f"Unsubscribed from {text[1]}"
- else:
- text = "Unable to find the channel."
- client.send_message(chat_id, text, disable_web_page_preview=True)
-
-
-@app.on_message(filters.command(["patch"]))
-def patch_handler(client: Client, message: types.Message):
- username = message.from_user.username
- chat_id = message.chat.id
- if username == OWNER:
- celery_app.control.broadcast("hot_patch")
- client.send_chat_action(chat_id, enums.ChatAction.TYPING)
- client.send_message(chat_id, "Oorah!")
- hot_patch()
-
-
-@app.on_message(filters.command(["uncache"]))
-def uncache_handler(client: Client, message: types.Message):
- username = message.from_user.username
- link = message.text.split()[1]
- if username == OWNER:
- count = channel.del_cache(link)
- message.reply_text(f"{count} cache(s) deleted.", quote=True)
-
-
-@app.on_message(filters.command(["purge"]))
-def purge_handler(client: Client, message: types.Message):
- username = message.from_user.username
- if username == OWNER:
- message.reply_text(purge_tasks(), quote=True)
-
-
-@app.on_message(filters.command(["ping"]))
-def ping_handler(client: Client, message: types.Message):
- chat_id = message.chat.id
- client.send_chat_action(chat_id, enums.ChatAction.TYPING)
- message_sent = False
- def send_message_and_measure_ping():
- start_time = int(round(time.time() * 1000))
- reply = client.send_message(chat_id, "Starting Ping...")
- end_time = int(round(time.time() * 1000))
- ping_time = int(round(end_time - start_time))
- message_sent = True
- if message_sent:
- message.reply_text(f"Ping: {ping_time:.2f} ms", quote=True)
- time.sleep(0.5)
- client.edit_message_text(chat_id=reply.chat.id, message_id=reply.id, text="Ping Calculation Complete.")
- time.sleep(1)
- client.delete_messages(chat_id=reply.chat.id, message_ids=reply.id)
-
- thread = threading.Thread(target=send_message_and_measure_ping)
- thread.start()
-
-
-@app.on_message(filters.command(["stats"]))
-def stats_handler(client: Client, message: types.Message):
- chat_id = message.chat.id
- client.send_chat_action(chat_id, enums.ChatAction.TYPING)
- cpu_usage = psutil.cpu_percent()
- total, used, free, disk = psutil.disk_usage("/")
- swap = psutil.swap_memory()
- memory = psutil.virtual_memory()
- boot_time = psutil.boot_time()
-
- owner_stats = (
- "\n\nโฌโโโโโใ Stats ใโโโโโโฌ\n\n"
- f"โญ๐ฅ๏ธ **CPU Usage ยป** __{cpu_usage}%__\n"
- f"โ๐พ **RAM Usage ยป** __{memory.percent}%__\n"
- f"โฐ๐๏ธ **DISK Usage ยป** __{disk}%__\n\n"
- f"โญ๐คUpload: {sizeof_fmt(psutil.net_io_counters().bytes_sent)}\n"
- f"โฐ๐ฅDownload: {sizeof_fmt(psutil.net_io_counters().bytes_recv)}\n\n\n"
- f"Memory Total: {sizeof_fmt(memory.total)}\n"
- f"Memory Free: {sizeof_fmt(memory.available)}\n"
- f"Memory Used: {sizeof_fmt(memory.used)}\n"
- f"SWAP Total: {sizeof_fmt(swap.total)} | SWAP Usage: {swap.percent}%\n\n"
- f"Total Disk Space: {sizeof_fmt(total)}\n"
- f"Used: {sizeof_fmt(used)} | Free: {sizeof_fmt(free)}\n\n"
- f"Physical Cores: {psutil.cpu_count(logical=False)}\n"
- f"Total Cores: {psutil.cpu_count(logical=True)}\n\n"
- f"๐คBot Uptime: {timeof_fmt(time.time() - botStartTime)}\n"
- f"โฒ๏ธOS Uptime: {timeof_fmt(time.time() - boot_time)}\n"
- )
-
- user_stats = (
- "\n\nโฌโโโโโใ Stats ใโโโโโโฌ\n\n"
- f"โญ๐ฅ๏ธ **CPU Usage ยป** __{cpu_usage}%__\n"
- f"โ๐พ **RAM Usage ยป** __{memory.percent}%__\n"
- f"โฐ๐๏ธ **DISK Usage ยป** __{disk}%__\n\n"
- f"โญ๐คUpload: {sizeof_fmt(psutil.net_io_counters().bytes_sent)}\n"
- f"โฐ๐ฅDownload: {sizeof_fmt(psutil.net_io_counters().bytes_recv)}\n\n\n"
- f"Memory Total: {sizeof_fmt(memory.total)}\n"
- f"Memory Free: {sizeof_fmt(memory.available)}\n"
- f"Memory Used: {sizeof_fmt(memory.used)}\n"
- f"Total Disk Space: {sizeof_fmt(total)}\n"
- f"Used: {sizeof_fmt(used)} | Free: {sizeof_fmt(free)}\n\n"
- f"๐คBot Uptime: {timeof_fmt(time.time() - botStartTime)}\n"
- )
-
- if message.chat.username == OWNER:
- message.reply_text(owner_stats, quote=True)
- else:
- message.reply_text(user_stats, quote=True)
-
-
-@app.on_message(filters.command(["sub_count"]))
-def sub_count_handler(client: Client, message: types.Message):
- username = message.from_user.username
- chat_id = message.chat.id
- if username == OWNER:
- with BytesIO() as f:
- f.write(channel.sub_count().encode("u8"))
- f.name = "subscription count.txt"
- client.send_document(chat_id, f)
-
-
-@app.on_message(filters.command(["show_history"]))
-def show_history(client: Client, message: types.Message):
- chat_id = message.chat.id
- client.send_chat_action(chat_id, enums.ChatAction.TYPING)
- data = MySQL().show_history(chat_id)
- if data:
- client.send_message(chat_id, data, disable_web_page_preview=True)
- else:
- client.send_message(chat_id, "No history found.")
-
-
-@app.on_message(filters.command(["clear_history"]))
-def clear_history(client: Client, message: types.Message):
- chat_id = message.chat.id
- MySQL().clear_history(chat_id)
- message.reply_text("History cleared.", quote=True)
-
-
-@app.on_message(filters.command(["settings"]))
-def settings_handler(client: Client, message: types.Message):
- chat_id = message.chat.id
- payment = Payment()
- client.send_chat_action(chat_id, enums.ChatAction.TYPING)
- data = MySQL().get_user_settings(chat_id)
- set_mode = data[3]
- text = {"Local": "Celery", "Celery": "Local"}.get(set_mode, "Local")
- mode_text = f"Download mode: **{set_mode}**\nHistory record: {data[4]}"
- if message.chat.username == OWNER or payment.get_pay_token(chat_id):
- extra = [types.InlineKeyboardButton(f"Change download mode to {text}", callback_data=text)]
- else:
- extra = []
-
- markup = types.InlineKeyboardMarkup(
- [
- [ # First row
- types.InlineKeyboardButton("send as document", callback_data="document"),
- types.InlineKeyboardButton("send as video", callback_data="video"),
- types.InlineKeyboardButton("send as audio", callback_data="audio"),
- ],
- [ # second row
- types.InlineKeyboardButton("High Quality", callback_data="high"),
- types.InlineKeyboardButton("Medium Quality", callback_data="medium"),
- types.InlineKeyboardButton("Low Quality", callback_data="low"),
- ],
- [
- types.InlineKeyboardButton("Toggle History", callback_data=f"history-{data[4]}"),
- ],
- extra,
- ]
- )
-
- try:
- client.send_message(chat_id, BotText.settings.format(data[1], data[2]) + mode_text, reply_markup=markup)
- except:
- client.send_message(
- chat_id, BotText.settings.format(data[1] + ".", data[2] + ".") + mode_text, reply_markup=markup
- )
-
-
-@app.on_message(filters.command(["buy"]))
-def buy_handler(client: Client, message: types.Message):
- # process as chat.id, not from_user.id
- chat_id = message.chat.id
- client.send_chat_action(chat_id, enums.ChatAction.TYPING)
- # currency USD
- token_count = message.text.replace("/buy", "").strip()
- if token_count.isdigit():
- price = int(int(token_count) / TOKEN_PRICE * 100)
- else:
- price = 100
-
- markup = types.InlineKeyboardMarkup(
- [
- [
- types.InlineKeyboardButton("Bot Payments", callback_data=f"bot-payments-{price}"),
- types.InlineKeyboardButton("TRON(TRX)", callback_data="tron-trx"),
- ],
- ]
- )
- client.send_message(chat_id, BotText.buy, disable_web_page_preview=True, reply_markup=markup)
-
-
-@app.on_callback_query(filters.regex(r"tron-trx"))
-def tronpayment_btn_calback(client: Client, callback_query: types.CallbackQuery):
- callback_query.answer("Generating QR code...")
- chat_id = callback_query.message.chat.id
- client.send_chat_action(chat_id, enums.ChatAction.TYPING)
-
- addr = TronTrx().get_payment_address(chat_id)
- with BytesIO() as bio:
- qr = qrcode.make(addr)
- qr.save(bio)
- client.send_photo(chat_id, bio, caption=f"Send any amount of TRX to `{addr}`")
-
-
-@app.on_callback_query(filters.regex(r"premium.*"))
-def premium_click(client: Client, callback_query: types.CallbackQuery):
- data = callback_query.data
- if data == "premium-yes":
- callback_query.answer("Seeking premium user...")
- callback_query.message.edit_text("Please wait patiently...no progress bar will be shown.")
- replied = callback_query.message.reply_to_message
- data = {"url": replied.text, "user_id": callback_query.message.chat.id}
- client.send_message(PREMIUM_USER, json.dumps(data), disable_notification=True, disable_web_page_preview=True)
- else:
- callback_query.answer("Cancelled.")
- original_text = callback_query.message.text
- callback_query.message.edit_text(original_text.split("\n")[0])
-
-
-@app.on_callback_query(filters.regex(r"bot-payments-.*"))
-def bot_payment_btn_calback(client: Client, callback_query: types.CallbackQuery):
- callback_query.answer("Generating invoice...")
- chat_id = callback_query.message.chat.id
- client.send_chat_action(chat_id, enums.ChatAction.TYPING)
-
- data = callback_query.data
- price = int(data.split("-")[-1])
- payload = f"{chat_id}-buy"
- invoice = generate_invoice(price, f"Buy {TOKEN_PRICE} download tokens", "Pay by card", payload)
- app.invoke(
- functions.messages.SendMedia(
- peer=(raw_types.InputPeerUser(user_id=chat_id, access_hash=0)),
- media=invoice,
- random_id=app.rnd_id(),
- message="Buy more download token",
- )
- )
-
-
-@app.on_message(filters.command(["redeem"]))
-def redeem_handler(client: Client, message: types.Message):
- payment = Payment()
- chat_id = message.chat.id
- text = message.text.strip()
- unique = text.replace("/redeem", "").strip()
- msg = payment.verify_payment(chat_id, unique)
- message.reply_text(msg, quote=True)
-
-
-@app.on_message(filters.user(PREMIUM_USER) & filters.incoming & filters.caption)
-def premium_forward(client: Client, message: types.Message):
- media = message.video or message.audio or message.document
- target_user = media.file_name.split(".")[0]
- client.forward_messages(target_user, message.chat.id, message.id)
-
-
-@app.on_message(filters.command(["ban"]) & filters.user(PREMIUM_USER))
-def ban_handler(client: Client, message: types.Message):
- replied = message.reply_to_message.text
- user_id = json.loads(replied).get("user_id")
- redis = Redis()
- redis.r.hset("ban", user_id, 1)
- message.reply_text(f"Done, banned {user_id}.", quote=True)
-
-
-def generate_invoice(amount: int, title: str, description: str, payload: str):
- invoice = raw_types.input_media_invoice.InputMediaInvoice(
- invoice=raw_types.invoice.Invoice(
- currency="USD", prices=[raw_types.LabeledPrice(label="price", amount=amount)]
- ),
- title=title,
- description=description,
- provider=PROVIDER_TOKEN,
- provider_data=raw_types.DataJSON(data="{}"),
- payload=payload.encode(),
- start_param=payload,
- )
- return invoice
-
-
-def link_checker(url: str) -> str:
- if url.startswith("https://www.instagram.com"):
- return ""
- ytdl = yt_dlp.YoutubeDL()
-
- if not PLAYLIST_SUPPORT and (
- re.findall(r"^https://www\.youtube\.com/channel/", Channel.extract_canonical_link(url)) or "list" in url
- ):
- return "Playlist or channel links are disabled."
-
- if not M3U8_SUPPORT and (re.findall(r"m3u8|\.m3u8|\.m3u$", url.lower())):
- return "m3u8 links are disabled."
-
- with contextlib.suppress(yt_dlp.utils.DownloadError):
- if ytdl.extract_info(url, download=False).get("live_status") == "is_live":
- return "Live stream links are disabled. Please download it after the stream ends."
-
-
-def search_ytb(kw: str):
- videos_search = VideosSearch(kw, limit=10)
- text = ""
- results = videos_search.result()["result"]
- for item in results:
- title = item.get("title")
- link = item.get("link")
- index = results.index(item) + 1
- text += f"{index}. {title}\n{link}\n\n"
- return text
-
-
-@app.on_message(filters.command(["spdl"]))
-def spdl_handler(client: Client, message: types.Message):
- redis = Redis()
- chat_id = message.from_user.id
- client.send_chat_action(chat_id, enums.ChatAction.TYPING)
- message_text = message.text
- url, new_name = extract_url_and_name(message_text)
- logging.info("spdl start %s", url)
- if url is None or not re.findall(r"^https?://", url.lower()):
- redis.update_metrics("bad_request")
- message.reply_text("Something wrong ๐ค.\nCheck your URL and send me again.", quote=True)
- return
-
- bot_msg = message.reply_text("Request received.", quote=True)
- redis.update_metrics("spdl_request")
- spdl_download_entrance(client, bot_msg, url)
-
-
-@app.on_message(filters.command(["direct"]))
-def direct_handler(client: Client, message: types.Message):
- redis = Redis()
- chat_id = message.from_user.id
- client.send_chat_action(chat_id, enums.ChatAction.TYPING)
- message_text = message.text
- url, new_name = extract_url_and_name(message_text)
- logging.info("direct start %s", url)
- if url is None or not re.findall(r"^https?://", url.lower()):
- redis.update_metrics("bad_request")
- message.reply_text("Send me a DIRECT LINK.", quote=True)
- return
-
- bot_msg = message.reply_text("Request received.", quote=True)
- redis.update_metrics("direct_request")
- direct_download_entrance(client, bot_msg, url, new_name)
-
-
-@app.on_message(filters.command(["leech"]))
-def leech_handler(client: Client, message: types.Message):
- if not ENABLE_ARIA2:
- message.reply_text("Aria2 Not Enabled.", quote=True)
- return
- redis = Redis()
- chat_id = message.from_user.id
- client.send_chat_action(chat_id, enums.ChatAction.TYPING)
- message_text = message.text
- url, new_name = extract_url_and_name(message_text)
- logging.info("leech using aria2 start %s", url)
- if url is None or not re.findall(r"^https?://", url.lower()):
- redis.update_metrics("bad_request")
- message.reply_text("Send me a correct LINK.", quote=True)
- return
-
- bot_msg = message.reply_text("Request received.", quote=True)
- redis.update_metrics("leech_request")
- leech_download_entrance(client, bot_msg, url)
-
-
-@app.on_message(filters.command(["ytdl"]))
-def ytdl_handler(client: Client, message: types.Message):
- redis = Redis()
- chat_id = message.from_user.id
- client.send_chat_action(chat_id, enums.ChatAction.TYPING)
- message_text = message.text
- url, new_name = extract_url_and_name(message_text)
- logging.info("ytdl start %s", url)
- if url is None or not re.findall(r"^https?://", url.lower()):
- redis.update_metrics("bad_request")
- message.reply_text("Something wrong ๐ค.\nCheck your URL and send me again.", quote=True)
- return
-
- bot_msg = message.reply_text("Request received.", quote=True)
- redis.update_metrics("ytdl_request")
- ytdl_download_entrance(client, bot_msg, url)
-
-
-@app.on_message(filters.incoming & (filters.text | filters.document))
-@private_use
-def download_handler(client: Client, message: types.Message):
- redis = Redis()
- payment = Payment()
- chat_id = message.from_user.id
- client.send_chat_action(chat_id, enums.ChatAction.TYPING)
- redis.user_count(chat_id)
- if message.document:
- with tempfile.NamedTemporaryFile(mode="r+") as tf:
- logging.info("Downloading file to %s", tf.name)
- message.download(tf.name)
- contents = open(tf.name, "r").read() # don't know why
- urls = contents.split()
- else:
- urls = [message.text]
- logging.info("start %s", urls)
-
- for url in urls:
- # check url
- if not re.findall(r"^https?://", url.lower()):
- redis.update_metrics("bad_request")
- text = search_ytb(url)
- message.reply_text(text, quote=True, disable_web_page_preview=True)
- return
-
- if text := link_checker(url):
- message.reply_text(text, quote=True)
- redis.update_metrics("reject_link_checker")
- return
-
- # old user is not limited by token
- if ENABLE_VIP and not payment.check_old_user(chat_id):
- free, pay, reset = payment.get_token(chat_id)
- if free + pay <= 0:
- message.reply_text(f"You don't have enough token. Please wait until {reset} or /buy .", quote=True)
- redis.update_metrics("reject_token")
- return
- else:
- payment.use_token(chat_id)
-
- redis.update_metrics("video_request")
-
- text = BotText.get_receive_link_text()
- try:
- # raise pyrogram.errors.exceptions.FloodWait(10)
- bot_msg: types.Message | Any = message.reply_text(text, quote=True)
- except pyrogram.errors.Flood as e:
- f = BytesIO()
- f.write(str(e).encode())
- f.write(b"Your job will be done soon. Just wait! Don't rush.")
- f.name = "Please don't flood me.txt"
- bot_msg = message.reply_document(
- f, caption=f"Flood wait! Please wait {e} seconds...." f"Your job will start automatically", quote=True
- )
- f.close()
- client.send_message(OWNER, f"Flood wait! ๐ {e} seconds....")
- time.sleep(e.value)
-
- client.send_chat_action(chat_id, enums.ChatAction.UPLOAD_VIDEO)
- bot_msg.chat = message.chat
- ytdl_download_entrance(client, bot_msg, url)
-
-
-@app.on_callback_query(filters.regex(r"document|video|audio"))
-def send_method_callback(client: Client, callback_query: types.CallbackQuery):
- chat_id = callback_query.message.chat.id
- data = callback_query.data
- logging.info("Setting %s file type to %s", chat_id, data)
- MySQL().set_user_settings(chat_id, "method", data)
- callback_query.answer(f"Your send type was set to {callback_query.data}")
-
-
-@app.on_callback_query(filters.regex(r"high|medium|low"))
-def download_resolution_callback(client: Client, callback_query: types.CallbackQuery):
- chat_id = callback_query.message.chat.id
- data = callback_query.data
- logging.info("Setting %s file type to %s", chat_id, data)
- MySQL().set_user_settings(chat_id, "resolution", data)
- callback_query.answer(f"Your default download quality was set to {callback_query.data}")
-
-
-@app.on_callback_query(filters.regex(r"history.*"))
-def set_history_callback(client: Client, callback_query: types.CallbackQuery):
- chat_id = callback_query.message.chat.id
- data = callback_query.data.split("-")[-1]
-
- r = "OFF" if data == "ON" else "ON"
- logging.info("Setting %s file type to %s", chat_id, data)
- MySQL().set_user_settings(chat_id, "history", r)
- callback_query.answer("History setting updated.")
-
-
-@app.on_inline_query()
-def inline_query(client: Client, inline_query: types.InlineQuery):
- kw = inline_query.query
- user_id = inline_query.from_user.id
- data = MySQL().search_history(user_id, kw)
- if data:
- results = [
- types.InlineQueryResultArticle(
- id=str(i),
- title=item[1],
- description=item[2],
- input_message_content=types.InputTextMessageContent(item[1]),
- )
- for i, item in enumerate(data)
- ]
- client.answer_inline_query(inline_query.id, results)
-
-
-@app.on_callback_query(filters.regex(r"convert"))
-def audio_callback(client: Client, callback_query: types.CallbackQuery):
- redis = Redis()
- if not ENABLE_FFMPEG:
- callback_query.answer("Request rejected.")
- callback_query.message.reply_text("Audio conversion is disabled now.")
- return
-
- callback_query.answer(f"Converting to audio...please wait patiently")
- redis.update_metrics("audio_request")
- audio_entrance(client, callback_query.message)
-
-
-@app.on_callback_query(filters.regex(r"Local|Celery"))
-def owner_local_callback(client: Client, callback_query: types.CallbackQuery):
- chat_id = callback_query.message.chat.id
- MySQL().set_user_settings(chat_id, "mode", callback_query.data)
- callback_query.answer(f"Download mode was changed to {callback_query.data}")
-
-
-def periodic_sub_check():
- exceptions = pyrogram.errors.exceptions
- for cid, uids in channel.group_subscriber().items():
- video_url = channel.has_newer_update(cid)
- if video_url:
- logging.info(f"periodic update:{video_url} - {uids}")
- for uid in uids:
- try:
- app.send_message(uid, f"{video_url} is out. Watch it on YouTube")
- except (exceptions.bad_request_400.PeerIdInvalid, exceptions.bad_request_400.UserIsBlocked) as e:
- logging.warning("User is blocked or deleted. %s", e)
- channel.deactivate_user_subscription(uid)
- except Exception:
- logging.error("Unknown error when sending message to user. %s", traceback.format_exc())
- finally:
- time.sleep(random.random() * 3)
-
-
-@app.on_raw_update()
-def raw_update(client: Client, update, users, chats):
- payment = Payment()
- action = getattr(getattr(update, "message", None), "action", None)
- if update.QUALNAME == "types.UpdateBotPrecheckoutQuery":
- client.invoke(
- functions.messages.SetBotPrecheckoutResults(
- query_id=update.query_id,
- success=True,
- )
- )
- elif action and action.QUALNAME == "types.MessageActionPaymentSentMe":
- logging.info("Payment received. %s", action)
- uid = update.message.peer_id.user_id
- amount = action.total_amount / 100
- payment.add_pay_user([uid, amount, action.charge.provider_charge_id, 0, amount * TOKEN_PRICE])
- client.send_message(uid, f"Thank you {uid}. Payment received: {amount} {action.currency}")
-
-
-def trx_notify(_, **kwargs):
- user_id = kwargs.get("user_id")
- text = kwargs.get("text")
- logging.info("Sending trx notification to %s", user_id)
- app.send_message(user_id, text)
-
-
-if __name__ == "__main__":
- botStartTime = time.time()
- MySQL()
- TRX_SIGNAL.connect(trx_notify)
- scheduler = BackgroundScheduler(timezone="Europe/London")
- scheduler.add_job(auto_restart, "interval", seconds=600)
- scheduler.add_job(clean_tempfile, "interval", seconds=120)
- scheduler.add_job(Redis().reset_today, "cron", hour=0, minute=0)
- scheduler.add_job(InfluxDB().collect_data, "interval", seconds=120)
- # scheduler.add_job(TronTrx().check_payment, "interval", seconds=60, max_instances=1)
- # default quota allocation of 10,000 units per day
- # scheduler.add_job(periodic_sub_check, "interval", seconds=3600)
- scheduler.start()
- banner = f"""
-โ โ โโโ โ โโโ โ โ
-โโ โโโ โ โ โ โ โ โโโ โโโ โ โ โโโ โ โ โโโ โ โโโ โโโ โโโ
- โ โ โ โ โ โ โ โ โ โ โโ โ โ โ โ โโโ โ โ โ โ โ โโโ โ โ
- โ โโ โโโ โ โโโ โโ โโโ โโ โโ โโ โ โ โ โโ โโโ โโโ
-
-By @BennyThink, VIP mode: {ENABLE_VIP}, Celery Mode: {ENABLE_CELERY}
-Version: {get_revision()}
- """
- print(banner)
- app.run()