Skip to content

Commit

Permalink
Custom changes to make it work without DB
Browse files Browse the repository at this point in the history
  • Loading branch information
ragmehos committed Feb 16, 2025
1 parent 7e4d0b9 commit 32551a1
Show file tree
Hide file tree
Showing 43 changed files with 995 additions and 251 deletions.
149 changes: 21 additions & 128 deletions .github/workflows/main.yml
Original file line number Diff line number Diff line change
@@ -1,67 +1,37 @@
name: MediaFusion CI/CD
name: Deploy Docker image to GitHub Container Registry

on:
release:
types: [ created ]
push:
branches:
- prowlarr_custom_deployment

jobs:
update_version:
docker:
runs-on: ubuntu-latest
outputs:
previous_version: ${{ steps.extract_versions.outputs.PREVIOUS_VERSION }}
steps:
- name: Checkout
# Checkout the repository
- name: Checkout repository
uses: actions/checkout@v4
with:
fetch-depth: 0

- name: Extract versions
id: extract_versions
run: |
# Extract current version from release body
BODY="${{ github.event.release.body }}"
if ! PREVIOUS_VERSION=$(echo "$BODY" | grep -o '/compare/.*\.\.\.' | sed 's/\/compare\///' | sed 's/\.\.\.//' || true); then
echo "Error: Failed to extract previous version from release body"
exit 1
fi
echo "PREVIOUS_VERSION=${PREVIOUS_VERSION}" >> "$GITHUB_OUTPUT"
- name: Update version numbers
if: "!github.event.release.prerelease"
run: |
make update-version VERSION_NEW=${{ github.ref_name }}
- name: Commit and push version updates
if: "!github.event.release.prerelease"
run: |
git config --local user.email "[email protected]"
git config --local user.name "GitHub Action"
git add -A
git commit -m "chore: update version to ${{ github.ref_name }}"
git push origin HEAD:main
mediafusion_docker_build:
needs: update_version
if: "!github.event.release.prerelease"
runs-on: ubuntu-latest

steps:
- name: Checkout
uses: actions/checkout@v4
with:
ref: main # Use main branch with updated versions

# Set up QEMU for multi-platform builds
- name: Set up QEMU
uses: docker/setup-qemu-action@v3

# Set up Docker Buildx
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3

- name: Login to Docker Hub
# Log in to GitHub Container Registry (GHCR)
- name: Log in to GitHub Container Registry
uses: docker/login-action@v3
with:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
registry: ghcr.io
username: ${{ github.actor }}
password: ${{ secrets.GITHUB_TOKEN }}

- name: Get short SHA
id: short_sha
run: echo "GIT_REV=$(echo ${GITHUB_SHA::7})" >> $GITHUB_ENV

- name: Build and push
id: docker_build
Expand All @@ -71,84 +41,7 @@ jobs:
file: ./deployment/Dockerfile
platforms: linux/amd64,linux/arm64
push: true
build-args: VERSION=${{ github.ref_name }}
build-args: GIT_REV=${{ env.GIT_REV }}
tags: |
mhdzumair/mediafusion:${{ github.ref_name }}
mhdzumair/mediafusion:latest
- name: Image digest
run: echo ${{ steps.docker_build.outputs.digest }}

kodi_build:
needs: update_version
if: "!github.event.release.prerelease"
runs-on: ubuntu-latest

steps:
- name: Checkout
uses: actions/checkout@v4
with:
ref: main # Use main branch with updated versions

- name: Set up Python
uses: actions/setup-python@v5
with:
python-version: '3.8'

- name: Install Python dependencies
run: |
python -m pip install --upgrade pip
pip install setuptools wheel
- name: Install required packages
run: sudo apt-get install -y zip xmlstarlet

- name: Build addon and repository
run: |
make -C kodi
# Validate build artifacts
for file in kodi/dist/plugin.video.mediafusion/plugin.video.mediafusion-*.zip kodi/dist/repository.mediafusion/repository.mediafusion-*.zip; do
if [ ! -f "$file" ]; then
echo "Error: Build artifact $file not found"
exit 1
fi
done
- name: Deploy to GitHub Pages
uses: peaceiris/actions-gh-pages@v4
with:
github_token: ${{ secrets.GITHUB_TOKEN }}
publish_dir: kodi/dist
enable_jekyll: false
force_orphan: true

- name: Upload Release Assets
uses: softprops/action-gh-release@v2
with:
files: |
kodi/plugin.video.mediafusion-*.zip
kodi/repository.mediafusion-*.zip
prerelease: false
token: ${{ secrets.GITHUB_TOKEN }}

generate_release_notes:
needs: update_version
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v4
with:
fetch-depth: 0

- name: Install jq
run: sudo apt-get install -y jq

- name: Generate Release Notes
run: |
make generate-notes VERSION_OLD=${{ needs.update_version.outputs.previous_version }} VERSION_NEW=${{ github.ref_name }} ANTHROPIC_API_KEY=${{ secrets.ANTHROPIC_API_KEY }} > release_notes.md
- name: Update Release Notes
uses: softprops/action-gh-release@v2
with:
body_path: release_notes.md
token: ${{ secrets.GITHUB_TOKEN }}
ghcr.io/ragmehos/mf:v${{ github.ref_name }}
ghcr.io/ragmehos/mf:latest
2 changes: 1 addition & 1 deletion Procfile
Original file line number Diff line number Diff line change
@@ -1 +1 @@
web: uvicorn --no-access-log --host 0.0.0.0 --port $PORT api.main:app
web: uvicorn --no-access-log --host 0.0.0.0 --port $PORT api.main:app
79 changes: 79 additions & 0 deletions api/SeekableHTTPFile.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,79 @@
from io import IOBase
import cgi

import requests

# TODO: fallback file name from url


class SeekableHTTPFile(IOBase):
# a bit based on https://github.com/valgur/pyhttpio
def __init__(self, url, name=None, requests_session=None, timeout=30):
IOBase.__init__(self)
self.url = url
self.sess = requests_session if requests_session is not None else requests.session()
self._seekable = False
self.timeout = timeout
f = self.sess.head(url, headers={'Range': 'bytes=0-'}, timeout=timeout)
if f.status_code == 206 and 'Content-Range' in f.headers:
self._seekable = True
self.len = int(f.headers["Content-Length"])
if name is None:
if "Content-Disposition" in f.headers:
value, params = cgi.parse_header(f.headers["Content-Disposition"])
if "filename" in params:
self.name = params["filename"]
else:
self.name = name
f.close()
self._pos = 0
self._r = self.sess.get(self.url, headers={'Range': 'bytes={}-'.format(self._pos)}, stream=True, timeout=30)

def seekable(self):
return self._seekable

def __len__(self):
return self.len

def tell(self):
return self._pos

def readable(self):
return not self.closed

def writable(self):
return False

def _reopen_stream(self):
if self._r is not None:
self._r.close()
if self._seekable:
self._r = self.sess.get(self.url, headers={'Range': 'bytes={}-'.format(self._pos)}, stream=True, timeout=30)
else:
self._pos = 0
self._r = self.sess.get(self.url, stream=True, timeout=self.timeout)

def seek(self, offset, whence=0):
if not self.seekable():
print(f"Not seekable for {self.url}")
raise OSError
if whence == 0:
self._pos = 0
elif whence == 1:
pass
elif whence == 2:
self._pos = self.len
self._pos += offset
self._r.close()
return self._pos

def read(self, amount=-1):
print(f"Reading {amount} bytes at pos {self._pos} URL {self.url}")
if self._r is None or self._r.raw.closed:
self._reopen_stream()
if amount < 0:
content = self._r.raw.read()
else:
content = self._r.raw.read(amount)
self._pos += len(content)
return content
2 changes: 2 additions & 0 deletions api/__init__.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
# Desc: Setup the dramatiq broker and middleware.
'''
import dramatiq
from dramatiq.brokers.redis import RedisBroker
from dramatiq.middleware import (
Expand Down Expand Up @@ -34,3 +35,4 @@
Abortable(backend=RedisBackend.from_url(settings.redis_url)),
]
dramatiq.set_broker(redis_broker)
'''
71 changes: 68 additions & 3 deletions api/main.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,10 @@
from pydantic import ValidationError
from starlette.responses import HTMLResponse

from api import middleware
#from asgiref.wsgi import WsgiToAsgi
#from a2wsgi import WSGIMiddleware

from api import middleware#, offcloud_wsgidav
from api.scheduler import setup_scheduler
from db import crud, database, schemas
from db.config import settings
Expand Down Expand Up @@ -123,6 +126,54 @@ async def add_cors_header(request: Request, call_next):
app.mount("/static", StaticFiles(directory="resources"), name="static")


# Wrap wsgidav app with WsgiToAsgi
#wsgi_app1 = WsgiToAsgi(offcloud_wsgidav.setup_wsgi())
#wsgi_app = WSGIMiddleware(offcloud_wsgidav.setup_wsgi(), workers=15)

# Mount the wsgidav app in FastAPI
#app.mount("/webdav", wsgi_app)


@app.on_event("startup")
async def init_server():
await database.init()
await torrent.init_best_trackers()


@app.on_event("startup")
async def start_scheduler():
if settings.disable_all_scheduler:
logging.info("All Schedulers are disabled. Not setting up any jobs.")
return

acquired, lock = await acquire_scheduler_lock(app.state.redis)
if acquired:
try:
scheduler = AsyncIOScheduler()
setup_scheduler(scheduler)
scheduler.start()
app.state.scheduler = scheduler
app.state.scheduler_lock = lock
await asyncio.create_task(maintain_heartbeat(app.state.redis))
except Exception as e:
await release_scheduler_lock(app.state.redis, lock)
raise e


@app.on_event("shutdown")
async def stop_scheduler():
if hasattr(app.state, "scheduler"):
app.state.scheduler.shutdown(wait=False)

if hasattr(app.state, "scheduler_lock") and app.state.scheduler_lock:
await release_scheduler_lock(app.state.redis, app.state.scheduler_lock)


@app.on_event("shutdown")
async def shutdown_event():
await app.state.redis.aclose()


@app.get("/", tags=["home"])
async def get_home(request: Request):
return TEMPLATES.TemplateResponse(
Expand Down Expand Up @@ -614,6 +665,7 @@ async def get_streams(
return {"streams": []}

user_ip = await get_user_public_ip(request, user_data)
response.headers.update(const.NO_CACHE_HEADERS)
user_feeds = []
if season is None or episode is None:
season = episode = 1
Expand Down Expand Up @@ -658,8 +710,9 @@ async def get_streams(
fetched_streams = await crud.get_movie_streams(
user_data, secret_str, video_id, user_ip, background_tasks
)
fetched_streams.extend(user_feeds)
#fetched_streams.extend(user_feeds)
elif catalog_type == "series":
response.headers.update(const.NO_CACHE_HEADERS)
fetched_streams = await crud.get_series_streams(
user_data,
secret_str,
Expand All @@ -669,7 +722,19 @@ async def get_streams(
user_ip,
background_tasks,
)
fetched_streams.extend(user_feeds)
# Prefetch next episode
logging.info(f"Prefetching next episode {season} {episode+1}")
background_tasks.add_task(
crud.get_series_streams,
user_data,
secret_str,
video_id,
season,
episode + 1,
user_ip,
background_tasks,
)
#fetched_streams.extend(user_feeds)
elif catalog_type == "events":
fetched_streams = await crud.get_event_streams(video_id, user_data)
response.headers.update(const.NO_CACHE_HEADERS)
Expand Down
Loading

0 comments on commit 32551a1

Please sign in to comment.