diff --git a/.github/workflows/build-and-test.yml b/.github/workflows/build-and-test.yml new file mode 100644 index 000000000000..73e619b2dfef --- /dev/null +++ b/.github/workflows/build-and-test.yml @@ -0,0 +1,47 @@ +name: Build and Test + +on: ["push"] +jobs: + lint: + runs-on: ubuntu-latest + name: Python Black Lint Check + steps: + - name: Check out repository + uses: actions/checkout@v4.2.2 + + - name: Set up Python + uses: actions/setup-python@v5.3.0 + with: + python-version: 3.8 + + - name: Install Python dependencies + run: pip install black nbqa + + - name: Run Black on Python files + run: black --check $(find . -type f -name "*.py") + build: + uses: ./.github/workflows/docker.yml + with: + registry-dockerhub-enable: ${{ github.event_name != 'pull_request' }} + registry-repo-name: AGiXT + registry-readme: ./docs/README.md + secrets: + DOCKERHUB_TOKEN: ${{ secrets.DOCKERHUB_TOKEN }} + test-agixt-postgres: + uses: josh-xt/AGiXT/.github/workflows/operation-test-with-jupyter.yml@main + with: + notebook: tests/endpoint-tests.ipynb + image: ${{ needs.build.outputs.primary-image }} + port: "7437" + database-type: "postgresql" + report-name: "agixt-postgres-tests" + needs: build + test-agixt-sqlite: + uses: josh-xt/AGiXT/.github/workflows/operation-test-with-jupyter.yml@main + with: + notebook: tests/endpoint-tests.ipynb + image: ${{ needs.build.outputs.primary-image }} + port: "7437" + database-type: "sqlite" + report-name: "agixt-sqlite-tests" + needs: build \ No newline at end of file diff --git a/.github/workflows/code-style-check.yml b/.github/workflows/code-style-check.yml deleted file mode 100644 index 7faee77eaecc..000000000000 --- a/.github/workflows/code-style-check.yml +++ /dev/null @@ -1,24 +0,0 @@ -name: Code Style Check - -on: - pull_request: - branches: - - main - -jobs: - lint: - runs-on: ubuntu-latest - steps: - - name: Check out repository - uses: actions/checkout@v2 - - - name: Set up Python - uses: actions/setup-python@v2 - with: - python-version: 3.8 - - - name: Install Python dependencies - run: pip install black nbqa - - - name: Run Black on Python files - run: black --check $(find . -type f -name "*.py") diff --git a/.github/workflows/operation-docker-build-publish.yml b/.github/workflows/docker.yml similarity index 83% rename from .github/workflows/operation-docker-build-publish.yml rename to .github/workflows/docker.yml index 72f4f24dbab6..ba38bcebf382 100644 --- a/.github/workflows/operation-docker-build-publish.yml +++ b/.github/workflows/docker.yml @@ -1,4 +1,4 @@ -name: Build and publish docker containers +name: Docker Build and Publish on: workflow_call: @@ -6,10 +6,9 @@ on: tags: type: string default: | - type=schedule - type=ref,event=branch - type=ref,event=pr - type=semver,pattern={{raw}} + type=raw,value={{branch}} + type=semver,pattern={{raw}},enable=${{ github.ref_type == 'tag' }} + type=raw,value=latest,enable=${{ github.ref_type == 'tag' }} type=sha flavor: type: string @@ -42,7 +41,7 @@ on: default: ${{ github.event.repository.name }} registry-dockerhub-enable: type: boolean - default: true + default: false registry-github-enable: type: boolean default: true @@ -54,29 +53,27 @@ on: default: ubuntu-latest platforms: type: string - default: linux/amd64 + default: linux/amd64,linux/arm64/v8 cache-from: type: string default: type=gha cache-to: type: string default: type=gha,mode=max - pre-free-disk-space: - type: boolean - default: false outputs: digest: description: "Digest of docker image" - value: ${{ jobs.build_publish_docker.outputs.digest }} + value: ${{ jobs.build.outputs.digest }} primary-image: description: "Primary full name of pushed docker image" - value: ${{ jobs.build_publish_docker.outputs.primary-image }} + value: ${{ jobs.build.outputs.primary-image }} secrets: DOCKERHUB_TOKEN: required: false jobs: - build_publish_docker: + build: + name: Docker (${{ inputs.registry-github-enable == true && inputs.registry-dockerhub-enable == true && 'GitHub, DockerHub' || inputs.registry-github-enable == true && 'GitHub' || 'DockerHub' }}) runs-on: ${{ inputs.runs-on }} permissions: @@ -88,31 +85,16 @@ jobs: primary-image: ${{ steps.get-primary-image.outputs.primary-image }} steps: - - name: Free Disk Space (Ubuntu) - if: inputs.pre-free-disk-space - uses: jlumbroso/free-disk-space@main - with: - # this might remove tools that are actually needed, - # if set to "true" but frees about 6 GB - tool-cache: false - # all of these default to true, but feel free to set to - # "false" if necessary for your workflow - android: true - dotnet: true - haskell: true - large-packages: true - swap-storage: true - - name: Log in to Docker Hub if: inputs.registry-dockerhub-enable - uses: docker/login-action@v3 + uses: docker/login-action@v3.3.0 with: username: ${{ vars.DOCKERHUB_USERNAME }} password: ${{ secrets.DOCKERHUB_TOKEN }} - name: Log in to the Github registry if: inputs.registry-github-enable - uses: docker/login-action@v3 + uses: docker/login-action@v3.3.0 with: registry: ghcr.io username: ${{ github.actor }} @@ -125,16 +107,16 @@ jobs: echo "dockerhub-repo=${{ vars.DOCKERHUB_USERNAME }}/${{ inputs.registry-repo-name }}" >> "$GITHUB_ENV" - name: Set up QEMU - uses: docker/setup-qemu-action@v3 + uses: docker/setup-qemu-action@v3.2.0 # Needed for cache layers on github registry - name: Set up Docker Buildx - uses: docker/setup-buildx-action@v3.1.0 + uses: docker/setup-buildx-action@v3.7.1 ## Cache based contexts - name: Restore cached context if: inputs.context-cache - uses: actions/cache/restore@v4.0.1 + uses: actions/cache/restore@v4.1.2 with: path: cached-context key: ${{ inputs.context-cache }} @@ -147,13 +129,13 @@ jobs: ## Repo based context (fixes submodules etc) - name: Checkout repo - uses: actions/checkout@v4.1.1 + uses: actions/checkout@v4.2.2 if: inputs.context-repository with: fetch-depth: 1 - name: Checkout external context - uses: actions/checkout@v4.1.1 + uses: actions/checkout@v4.2.2 if: inputs.context-repository with: repository: ${{ inputs.context-repository }} @@ -194,7 +176,7 @@ jobs: - name: Build and push Docker images id: dockerBuild - uses: docker/build-push-action@v5.1.0 + uses: docker/build-push-action@v6.9.0 with: platforms: ${{ inputs.platforms }} file: ${{ inputs.dockerfile }} @@ -207,7 +189,7 @@ jobs: cache-to: ${{ inputs.cache-to }} # publish README on docker hub - - uses: actions/checkout@v4.1.1 + - uses: actions/checkout@v4.2.2 with: fetch-depth: 1 diff --git a/.github/workflows/operation-docker-sign.yml b/.github/workflows/operation-docker-sign.yml deleted file mode 100644 index bb15f39c4dd1..000000000000 --- a/.github/workflows/operation-docker-sign.yml +++ /dev/null @@ -1,22 +0,0 @@ -name: Sign off docker containers - -on: - workflow_call: - inputs: - image-name: - required: true - type: string - -jobs: - sign_docker: - runs-on: ubuntu-latest - - permissions: - packages: write - contents: read - - steps: - - - name: Install cosign - if: github.event_name != 'pull_request' - uses: sigstore/cosign-installer@v3 diff --git a/.github/workflows/operation-test-with-jupyter.yml b/.github/workflows/operation-test-with-jupyter.yml deleted file mode 100644 index 51da23276f1f..000000000000 --- a/.github/workflows/operation-test-with-jupyter.yml +++ /dev/null @@ -1,228 +0,0 @@ -name: Run Tests -on: - workflow_call: - inputs: - notebook: - type: string - required: true - description: file to run (ending in .ipynb), can be directory to batch run (without trailing slash) - image: - type: string - required: true - port: - type: string - secondary-image: - type: string - required: false - description: "Secondary image to run" - default: "joshxt/safeexecute:latest" - secondary-image-port: - type: string - required: false - description: "Port for the secondary image" - default: "5432" - port-mapping: - type: string - additional-python-dependencies: - type: string - description: add whatever pip you need here - allow-errors: - type: boolean - description: Fail if there is an error in the execution of the notebook - default: false - additional-args: - type: string - description: additional args for nbconvert - default: "--log-level INFO" - append-logs: - type: boolean - default: false - clone-repo: - type: boolean - default: false - database-type: - type: string - default: "postgresql" - description: "Database type to use" - db-connected: - type: boolean - default: true - description: "If the database is connected" - report-name: - type: string - default: "test-reports" - description: "Name of the report" - auth-schema: - type: string - description: schema to use for authentication - default: company - sendgrid-api-key: - type: string - default: none - description: Optional sendgrid api-key available as os.getenv('SENDGRID_API_KEY') in your notebook - stripe-api-key: - type: string - default: none - description: Optional stripe api-key available as os.getenv('STRIPE_API_KEY') in your notebook - testmail-api-key: - type: string - default: none - description: Optional testmail api-key available as os.getenv('TESTMAIL_API_KEY') in your notebook - testmail-namespace: - type: string - default: none - description: Optional testmail namespace available as os.getenv('TESTMAIL_NAMESPACE') in your notebook - default-email: - type: string - default: none - description: Optional default email available as os.getenv('DEFAULT_EMAIL') in your notebook - secrets: - api-key: - description: Optional api-key available as os.getenv('API_KEY') in your notebook - -jobs: - run-tests: - runs-on: ubuntu-latest - outputs: - digest: ${{ steps.dockerBuild.outputs.digest }} - services: - dbservice: - image: postgres:latest - ports: - - 5432 - env: - POSTGRES_USER: postgres - POSTGRES_PASSWORD: postgres - POSTGRES_DB: postgres - secondary-service: - image: ${{ inputs.secondary-image }} - ports: - - ${{ inputs.port-mapping || format('{0}:{1}', inputs.secondary-image-port, inputs.secondary-image-port) }} - env: - DB_CONNECTED: ${{ inputs.db-connected }} - DATABASE_TYPE: ${{ inputs.database-type }} - DATABASE_HOST: dbservice - DATABASE_USER: postgres - DATABASE_PASSWORD: postgres - DATABASE_PORT: 5432 - DATABASE_NAME: postgres - SCHEMA: ${{ inputs.auth-schema }} - LOG_LEVEL: INFO - MFA_VERIFY: authenticator - TZ: America/New_York - service-under-test: - image: ${{ inputs.image }} - ports: - - ${{ inputs.port-mapping || format('{0}:{1}', inputs.port, inputs.port) }} - options: >- - --health-cmd "curl -f http://localhost:${{ inputs.port }}" - --health-interval 10s - --health-timeout 60s - --health-retries 5 - --health-start-period 2m - env: - DB_CONNECTED: ${{ inputs.db-connected }} - DATABASE_TYPE: ${{ inputs.database-type }} - DATABASE_HOST: dbservice - DATABASE_USER: postgres - DATABASE_PASSWORD: postgres - DATABASE_PORT: 5432 - DATABASE_NAME: postgres - SCHEMA: ${{ inputs.auth-schema }} - LOG_LEVEL: INFO - SENDGRID_API_KEY: ${{ inputs.sendgrid-api-key }} - TESTMAIL_API_KEY: ${{ inputs.testmail-api-key }} - TESTMAIL_NAMESPACE: ${{ inputs.testmail-namespace }} - DEFAULT_EMAIL: ${{ inputs.default-email }} - DEFAULT_SERVICE: sendgrid - COMPILE_SERVER: http://secondary-service:${{ inputs.secondary-image-port }} - MFA_VERIFY: authenticator - MODE: development - UVICORN_WORKERS: 1 - STRIPE_API_KEY: ${{ inputs.stripe-api-key }} - TZ: America/New_York - steps: - - uses: actions/setup-python@v5 - with: - python-version: "3.10" - - name: Checkout - uses: actions/checkout@v4.1.1 - with: - fetch-depth: 1 - - - name: Install jupyter - run: pip3 install jupyter nbconvert[webpdf] - - name: Update package lists and install jupyter output generation dependencies - run: | - sudo apt-get update - sudo apt-get install --fix-missing -y pandoc texlive-xetex texlive-fonts-recommended texlive-plain-generic - - - name: Clone repository and install package - if: inputs.clone-repo - run: | - git clone https://github.com/${{ github.repository }} /tmp/repo - cd /tmp/repo - pip3 install . - - - name: Install additional dependencies for notebooks - if: inputs.additional-python-dependencies - run: pip3 install ${{ inputs.additional-python-dependencies }} - - - name: Set notebook and artifact files - run: | - notebook="${{ inputs.notebook }}" - if ${{ endsWith( inputs.notebook, 'ipynb' ) }} ; then - echo "notebook-file=${notebook}" >> "$GITHUB_ENV" - echo "artifact-file=${notebook%.*}.pdf" >> "$GITHUB_ENV" - else - echo "notebook-file=${notebook}/*.ipynb" >> "$GITHUB_ENV" - echo "artifact-file=${notebook}/*.pdf" >> "$GITHUB_ENV" - fi - - - name: Configure nbconvert args - run: echo "nbconvert-args=--execute ${{ inputs.additional-args }} --to pdf" >> "$GITHUB_ENV" - - - name: Enable switch --allow-errors - if: inputs.allow-errors - run: echo "nbconvert-args=${{ env.nbconvert-args }} --allow-errors" - - - name: Add additional nbconvert args - if: inputs.additional-args - run: echo "nbconvert-args=${{ env.nbconvert-args }} ${{ inputs.additional-args }}" - - - name: Wait for services - run: | - echo "Waiting for services to be ready..." - sleep 60 - - - name: Check secondary service status - run: | - docker exec ${{ job.services.secondary-service.id }} \ - ps aux | grep -v grep | grep -q secondary-service && \ - echo "Secondary service is running" || echo "Secondary service is not running" - - - name: Execute notebook - env: - API_KEY: ${{ secrets.api-key }} - run: python3 -m nbconvert ${{ env.nbconvert-args }} ${{ env.notebook-file }} - - - name: Append test logs - if: inputs.append-logs - run: | - docker logs "${{ job.services.agixt.id }}" > /test-output.log - - - name: Append test logs - if: inputs.append-logs - run: | - echo "artifact-file=${{ env.artifact-file }}\n/test-output.log" >> "$GITHUB_ENV" - - - name: Check service-under-test logs - run: docker logs ${{ job.services.service-under-test.id }} - - - name: Check secondary-service logs - run: docker logs ${{ job.services.secondary-service.id }} - - - uses: actions/upload-artifact@v4 - with: - name: ${{ inputs.report-name }} - path: ${{ env.artifact-file }} diff --git a/.github/workflows/publish-docker-dev.yml b/.github/workflows/publish-docker-dev.yml deleted file mode 100644 index 78e73f55e498..000000000000 --- a/.github/workflows/publish-docker-dev.yml +++ /dev/null @@ -1,67 +0,0 @@ -name: Dev - Build and Test - -on: - push: - branches-ignore: - - main - workflow_dispatch: - -jobs: - build-agixt: - runs-on: ubuntu-latest - outputs: - github_user: ${{ steps.extract_info.outputs.github_user }} - repo_name: ${{ steps.extract_info.outputs.repo_name }} - steps: - - name: Checkout code - uses: actions/checkout@v4 - - - name: Extract GitHub info and ensure lowercase - id: extract_info - run: | - echo "github_user=$(echo ${{ github.actor }} | tr '[:upper:]' '[:lower:]')" >> $GITHUB_OUTPUT - echo "repo_name=$(echo ${{ github.repository }} | cut -d'/' -f 2 | tr '[:upper:]' '[:lower:]')" >> $GITHUB_OUTPUT - - - name: Set up Docker Buildx - uses: docker/setup-buildx-action@v3 - - - name: Login to GitHub Container Registry - uses: docker/login-action@v3 - with: - registry: ghcr.io - username: ${{ github.actor }} - password: ${{ secrets.GITHUB_TOKEN }} - - - name: Build and push Docker image - uses: docker/build-push-action@v5 - with: - context: . - file: ./Dockerfile - platforms: linux/amd64,linux/arm64 - push: true - cache-from: type=gha - cache-to: type=gha,mode=max - tags: | - ghcr.io/${{ steps.extract_info.outputs.github_user }}/${{ steps.extract_info.outputs.repo_name }}:dev - ghcr.io/${{ steps.extract_info.outputs.github_user }}/${{ steps.extract_info.outputs.repo_name }}:${{ github.sha }} - - test-agixt-postgres: - uses: josh-xt/AGiXT/.github/workflows/operation-test-with-jupyter.yml@main - with: - notebook: tests/endpoint-tests.ipynb - image: ghcr.io/${{ needs.build-agixt.outputs.github_user }}/${{ needs.build-agixt.outputs.repo_name }}:${{ github.sha }} - port: "7437" - database-type: "postgresql" - report-name: "agixt-postgres-tests" - additional-python-dependencies: openai requests agixtsdk qrcode==7.4.2 - needs: build-agixt - test-agixt-sqlite: - uses: josh-xt/AGiXT/.github/workflows/operation-test-with-jupyter.yml@main - with: - notebook: tests/endpoint-tests.ipynb - image: ghcr.io/${{ needs.build-agixt.outputs.github_user }}/${{ needs.build-agixt.outputs.repo_name }}:${{ github.sha }} - port: "7437" - database-type: "sqlite" - report-name: "agixt-sqlite-tests" - additional-python-dependencies: openai requests agixtsdk qrcode==7.4.2 - needs: build-agixt \ No newline at end of file diff --git a/.github/workflows/publish-docker.yml b/.github/workflows/publish-docker.yml deleted file mode 100644 index 2cf1a1034268..000000000000 --- a/.github/workflows/publish-docker.yml +++ /dev/null @@ -1,47 +0,0 @@ -name: Publish Docker image - -permissions: - packages: write - contents: read - -on: - push: - branches: [main] - release: - types: [published] - workflow_dispatch: - -jobs: - build-agixt: - uses: josh-xt/AGiXT/.github/workflows/operation-docker-build-publish.yml@main - with: - registry-dockerhub-enable: ${{ github.event_name != 'pull_request' }} - registry-repo-name: AGiXT - registry-readme: ./docs/README.md - tags: | - type=schedule - type=ref,event=branch - type=semver,pattern={{version}} - platforms: linux/amd64,linux/arm64/v8 - secrets: - DOCKERHUB_TOKEN: ${{ secrets.DOCKERHUB_TOKEN }} - test-agixt-postgres: - uses: josh-xt/AGiXT/.github/workflows/operation-test-with-jupyter.yml@main - with: - notebook: tests/endpoint-tests.ipynb - image: ${{ needs.build-agixt.outputs.primary-image }} - port: "7437" - database-type: "postgresql" - report-name: "agixt-postgres-tests" - additional-python-dependencies: openai requests agixtsdk qrcode==7.4.2 - needs: build-agixt - test-agixt-sqlite: - uses: josh-xt/AGiXT/.github/workflows/operation-test-with-jupyter.yml@main - with: - notebook: tests/endpoint-tests.ipynb - image: ${{ needs.build-agixt.outputs.primary-image }} - port: "7437" - database-type: "sqlite" - report-name: "agixt-sqlite-tests" - additional-python-dependencies: openai requests agixtsdk qrcode==7.4.2 - needs: build-agixt \ No newline at end of file diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml new file mode 100644 index 000000000000..a3f60e87c9b5 --- /dev/null +++ b/.github/workflows/tests.yml @@ -0,0 +1,182 @@ +name: Run Tests +on: + workflow_call: + inputs: + notebook: + type: string + required: true + description: file to run (ending in .ipynb), can be directory to batch run (without trailing slash) + image: + type: string + required: true + port: + type: string + port-mapping: + type: string + additional-python-dependencies: + type: string + description: add whatever pip you need here + default: "openai requests agixtsdk qrcode==7.4.2" + allow-errors: + type: boolean + description: Fail if there is an error in the execution of the notebook + default: false + additional-args: + type: string + description: additional args for nbconvert + default: "--log-level INFO" + append-logs: + type: boolean + default: false + clone-repo: + type: boolean + default: false + database-type: + type: string + default: "postgresql" + description: "Database type to use" + report-name: + type: string + default: "test-reports" + description: "Name of the report" + default-email: + type: string + default: none + description: Optional default email available as os.getenv('DEFAULT_EMAIL') in your notebook + secrets: + api-key: + description: Optional api-key available as os.getenv('API_KEY') in your notebook + DISCORD_WEBHOOK: + description: Optional discord webhook available as os.getenv('DISCORD_WEBHOOK') in your notebook + +jobs: + run-tests: + runs-on: ubuntu-latest + outputs: + digest: ${{ steps.dockerBuild.outputs.digest }} + services: + dbservice: + image: postgres:latest + ports: + - 5432 + env: + POSTGRES_USER: postgres + POSTGRES_PASSWORD: postgres + POSTGRES_DB: postgres + agixt: + image: ${{ inputs.image }} + ports: + - ${{ inputs.port-mapping || format('{0}:{1}', inputs.port, inputs.port) }} + options: >- + --health-cmd "curl -f http://localhost:${{ inputs.port }}" + --health-interval 10s + --health-timeout 60s + --health-retries 5 + --health-start-period 2m + env: + DB_CONNECTED: ${{ inputs.db-connected }} + DATABASE_TYPE: ${{ inputs.database-type }} + DATABASE_HOST: dbservice + DATABASE_USER: postgres + DATABASE_PASSWORD: postgres + DATABASE_PORT: 5432 + DATABASE_NAME: postgres + LOG_LEVEL: INFO + AGIXT_API_KEY: just-a-test + AGIXT_URI: http://agixt:7437 + WORKING_DIRECTORY: /agixt/WORKSPACE + TOKENIZERS_PARALLELISM: 'false' + UVICORN_WORKERS: 1 + TZ: America/New_York + steps: + - uses: actions/setup-python@v5.3.0 + with: + python-version: '3.10' + - name: Checkout + uses: actions/checkout@v4.2.2 + with: + fetch-depth: 1 + + - name: Install Python dependencies + run: pip3 install jupyter nbconvert[webpdf] ${{ inputs.additional-python-dependencies }} + + - name: Update package lists and install jupyter output generation dependencies + run: | + sudo apt-get update + sudo apt-get install --fix-missing -y pandoc texlive-xetex texlive-fonts-recommended texlive-plain-generic curl ffmpeg + + - name: Set notebook and artifact files + run: | + notebook="${{ inputs.notebook }}" + if ${{ endsWith( inputs.notebook, 'ipynb' ) }} ; then + echo "notebook-file=${notebook}" >> "$GITHUB_ENV" + echo "artifact-file=${notebook%.*}.pdf" >> "$GITHUB_ENV" + else + echo "notebook-file=${notebook}/*.ipynb" >> "$GITHUB_ENV" + echo "artifact-file=${notebook}/*.pdf" >> "$GITHUB_ENV" + fi + + - name: Check AGiXT logs + run: docker logs ${{ job.services.agixt.id }} --follow & + + - name: Execute notebook + id: execute_notebook + env: + API_KEY: ${{ secrets.api-key }} + features: ${{ inputs.features }} + run: | + echo "Executing notebook with strict error checking..." + + # First try to execute and generate PDF + python3 -m nbconvert --execute --log-level INFO --to pdf ${{ env.notebook-file }} + STRICT_STATUS=$? + echo "strict_status=${STRICT_STATUS}" >> $GITHUB_ENV + - name: Rerun allowing errors if strict status is not 0 + if: env.strict_status != '0' + run: | + echo "Executing notebook with error tolerance..." + python3 -m nbconvert --execute --allow-errors --log-level INFO --to pdf ${{ env.notebook-file }} + - name: Send Video to Discord + if: always() + run: | + # Set message based on stored test outcome + BRANCH_NAME=$(echo ${{ github.ref }} | awk -F'/' '{print $NF}') + if [ "${{ github.actor }}" == "Josh-XT" ]; then + DISCORD_NAME="<@381837595522367488>" + elif [ "${{ github.actor }}" == "JamesonRGrieve" ]; then + DISCORD_NAME="<@329145730725838858>" + elif [ "${{ github.actor }}" == "waiscodes" ]; then + DISCORD_NAME="<@670762167037067304>" + elif [ "${{ github.actor }}" == "birdup000" ]; then + DISCORD_NAME="<@856308374567256074>" + elif [ "${{ github.actor }}" == "Nick-XT" ]; then + DISCORD_NAME="<@381908912951001088>" + else + DISCORD_NAME="**${{ github.actor }}**" + fi + + if [ "${{ env.strict_status }}" != "0" ]; then + MESSAGE="❌ **TEST FAILURE**: **${{ inputs.report-name }}** on repository **${{ github.repository }}** branch **$BRANCH_NAME** commit **${{ github.sha }}** by ${DISCORD_NAME} " + else + MESSAGE="✅ Test passed: **${{ inputs.report-name }}** on repository **${{ github.repository }}** branch **$BRANCH_NAME** commit **${{ github.sha }}** by **${{ github.actor }}**" + fi + FILE_TO_SEND="tests/report.mp4" + # If it doesn't exist, send the PDF + if [ ! -f $FILE_TO_SEND ]; then + FILE_TO_SEND="${{ env.artifact-file }}" + fi + echo "Sending Video: $FILE_TO_SEND" + curl -H "Content-Type:multipart/form-data" \ + -F "file=@$FILE_TO_SEND" \ + -F "content=$MESSAGE" \ + "${{ secrets.DISCORD_WEBHOOK }}" + + - uses: actions/upload-artifact@v4.4.3 + if: always() + with: + name: ${{ inputs.report-name }} + path: ${{ env.artifact-file }} + + - name: Exit with test status + if: env.strict_status != '0' + run: exit 1 \ No newline at end of file diff --git a/agixt/TaskMonitor.py b/agixt/TaskMonitor.py index 1adf05cc9928..50bc71d4af75 100644 --- a/agixt/TaskMonitor.py +++ b/agixt/TaskMonitor.py @@ -1,4 +1,4 @@ -import asyncio +import time import logging from DB import get_session, TaskItem, User from Globals import getenv @@ -37,9 +37,8 @@ def impersonate_user(user_id: str): class TaskMonitor: def __init__(self): self.running = False - self.tasks = [] - async def get_all_pending_tasks(self) -> list: + def get_all_pending_tasks(self) -> list: """Get all pending tasks for all users""" session = get_session() now = datetime.now() @@ -57,13 +56,13 @@ async def get_all_pending_tasks(self) -> list: finally: session.close() - async def process_tasks(self): + def process_tasks(self): """Process all pending tasks across users""" while self.running: try: session = get_session() try: - pending_tasks = await self.get_all_pending_tasks() + pending_tasks = self.get_all_pending_tasks() for pending_task in pending_tasks: # Create task manager with impersonated user context logging.info( @@ -90,7 +89,7 @@ async def process_tasks(self): ) try: # Execute single task - await task_manager.execute_pending_tasks() + task_manager.execute_pending_tasks() except Exception as e: logger.error( f"Error processing task {pending_task.id}: {str(e)}" @@ -102,25 +101,18 @@ async def process_tasks(self): session.close() # Wait before next check - await asyncio.sleep(60) + time.sleep(60) except Exception as e: logger.error(f"Error in task processing loop: {str(e)}") - await asyncio.sleep(60) + time.sleep(60) - async def start(self): + def start(self): """Start the task monitoring service""" self.running = True logger.info("Starting task monitor service...") - task = asyncio.create_task(self.process_tasks()) - self.tasks.append(task) + self.process_tasks() - async def stop(self): + def stop(self): """Stop the task monitoring service""" self.running = False - await asyncio.gather(*self.tasks) logger.info("Task monitor service stopped.") - - -if __name__ == "__main__": - monitor = TaskMonitor() - asyncio.run(monitor.start()) diff --git a/agixt/app.py b/agixt/app.py index e919a84b703d..156d550204f4 100644 --- a/agixt/app.py +++ b/agixt/app.py @@ -40,7 +40,7 @@ @asynccontextmanager async def lifespan(app: FastAPI): workspace_manager.start_file_watcher() - await task_monitor.start() + task_monitor.start() NGROK_TOKEN = getenv("NGROK_TOKEN") if NGROK_TOKEN: from pyngrok import ngrok @@ -61,7 +61,7 @@ async def lifespan(app: FastAPI): finally: # Shutdown workspace_manager.stop_file_watcher() - await task_monitor.stop() + task_monitor.stop() if NGROK_TOKEN: try: ngrok.kill()