Skip to content

Commit

Permalink
Add some docs n stuff
Browse files Browse the repository at this point in the history
  • Loading branch information
lalalune committed May 18, 2024
1 parent 11f2cfd commit efdc555
Show file tree
Hide file tree
Showing 14 changed files with 213 additions and 218 deletions.
53 changes: 53 additions & 0 deletions .github/workflows/dockerhub.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,53 @@
name: Publish Docker image

on:
release:
types: [published]

env:
REGISTRY: index.docker.io
IMAGE_NAME: arfx/test-worker

jobs:
push_to_registry:
name: Push Docker image to Docker Hub
runs-on: ubuntu-latest
permissions:
packages: write
contents: read
attestations: write
id-token: write
steps:
- name: Check out the repo
uses: actions/checkout@v4

- name: Log in to Docker Hub
uses: docker/login-action@f4ef78c080cd8ba55a85445d5b36e214a81df20a
with:
username: ${{ secrets.DOCKER_USERNAME }}
password: ${{ secrets.DOCKER_PASSWORD }}

- name: Extract metadata (tags, labels) for Docker
id: meta
uses: docker/metadata-action@9ec57ed1fcdbf14dcef7dfbe97b2010124a938b7
with:
images: ${{ secrets.DOCKER_IMAGE }}

- name: Build and push Docker image
id: push
uses: docker/build-push-action@3b5e8027fcad23fda98b2e3ac259d8d67585f671
with:
context: .
file: ./Dockerfile
push: true
tags: ${{ steps.meta.outputs.tags }}
labels: ${{ steps.meta.outputs.labels }}


- name: Generate artifact attestation
uses: actions/attest-build-provenance@v1
with:
subject-name: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME}}
subject-digest: ${{ steps.push.outputs.digest }}
push-to-registry: true

28 changes: 28 additions & 0 deletions .github/workflows/mkdocs.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,28 @@
name: mkdocs
on:
push:
branches:
- main
permissions:
contents: write
jobs:
deploy:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- name: Configure Git Credentials
run: |
git config user.name github-actions[bot]
git config user.email 41898282+github-actions[bot]@users.noreply.github.com
- uses: actions/setup-python@v5
with:
python-version: 3.x
- run: echo "cache_id=$(date --utc '+%V')" >> $GITHUB_ENV # (3)!
- uses: actions/cache@v4
with:
key: mkdocs-material-${{ env.cache_id }}
path: .cache
restore-keys: |
mkdocs-material-
- run: pip install mkdocs-material mkdocstrings mkdocstrings-python
- run: mkdocs gh-deploy --force
2 changes: 1 addition & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@

A super simple way to distribute rendering tasks across multiple machines.

<img src="resources/logo.png">
<img src="docs/assets/banner.png">

[![Lint and Test](https://github.com/RaccoonResearch/distributaur/actions/workflows/test.yml/badge.svg)](https://github.com/RaccoonResearch/distributaur/actions/workflows/test.yml)
[![PyPI version](https://badge.fury.io/py/distributaur.svg)](https://badge.fury.io/py/distributaur)
Expand Down
114 changes: 0 additions & 114 deletions distributaur/cli.py

This file was deleted.

77 changes: 77 additions & 0 deletions distributaur/task_runner.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,77 @@
import json
import subprocess
import sys
import os
import ssl
import time
from celery import Celery
from redis import ConnectionPool, Redis

ssl._create_default_https_context = ssl._create_unverified_context

from distributaur.utils import get_redis_values

redis_url = get_redis_values()
pool = ConnectionPool.from_url(redis_url)
redis_client = Redis(connection_pool=pool)

app = Celery("tasks", broker=redis_url, backend=redis_url)

def run_task(task_func):
@app.task(name=task_func.__name__, acks_late=True, reject_on_worker_lost=True)
def wrapper(*args, **kwargs):
job_id = kwargs.get("job_id")
task_id = wrapper.request.id
print(f"Starting task {task_id} in job {job_id}")
update_task_status(job_id, task_id, "IN_PROGRESS")

timeout = 600 # 10 minutes in seconds
task_timeout = 2700 # 45 minutes in seconds

start_time = time.time()
print(f"Task {task_id} starting.")

while True:
elapsed_time = time.time() - start_time
if elapsed_time > timeout:
update_task_status(task_id, "TIMEOUT")
print(f"Task {task_id} timed out before starting task")
return

try:
task_start_time = time.time()
print(f"Task {task_id} executing task function.")
result = task_func(*args, **kwargs)
print(f"Task {task_id} completed task function.")

elapsed_task_time = time.time() - task_start_time
if elapsed_task_time > task_timeout:
update_task_status(task_id, "TIMEOUT")
print(f"Task {task_id} timed out after {elapsed_task_time} seconds of execution")
return

update_task_status(task_id, "COMPLETE")
print(f"Task {task_id} completed successfully")
return result

except subprocess.TimeoutExpired:
update_task_status(task_id, "TIMEOUT")
print(f"Task {task_id} timed out after {timeout} seconds")
return

except Exception as e:
update_task_status(job_id, task_id, "FAILED")
print(f"Task {task_id} failed with error: {str(e)}")
return

return wrapper

def update_task_status(job_id, task_id, status):
key = f"celery-task-meta-{task_id}"
value = json.dumps({"status": status})
redis_client.set(key, value)
print(f"Updated status for task {task_id} in job {job_id} to {status}")

if __name__ == "__main__":
print("Starting Celery worker...")
app.start(argv=['celery', 'worker', '--loglevel=info'])
Loading

0 comments on commit efdc555

Please sign in to comment.