Skip to content

Commit

Permalink
Merge pull request #20 from biosimulators/major-bigraph-refactor
Browse files Browse the repository at this point in the history
major refactor for process-bigraph content
  • Loading branch information
AlexPatrie authored Nov 7, 2024
2 parents 4075fc7 + f2f42a4 commit 20487c3
Show file tree
Hide file tree
Showing 215 changed files with 2,127 additions and 1,266 deletions.
4 changes: 3 additions & 1 deletion .dockerignore
Original file line number Diff line number Diff line change
Expand Up @@ -42,4 +42,6 @@ __pycache__/
api/__pycache__
worker/__pycache__
test.ipynb
worker/.composition
worker/.composition
worker/.worker_STABLE
*.egg-info
11 changes: 5 additions & 6 deletions .github/workflows/ci.yml
Original file line number Diff line number Diff line change
Expand Up @@ -21,10 +21,9 @@ jobs:
- name: Install deps and run tests
run: |
pip install --upgrade pip
pip install poetry
poetry env use 3.10
poetry install --only=github
poetry run pytest
pip install -r assets/pipeline/config/requirements.github.ci.txt
cd worker
python -B test_worker.py
build-base-image:
runs-on: ubuntu-latest
Expand Down Expand Up @@ -58,15 +57,15 @@ jobs:
- name: Extract API container version
id: extract_api_version
run: |
API_VERSION=$(python3 .github/parse_container_version.py api)
API_VERSION=$(python -B .github/parse_container_version.py api)
echo "API_VERSION=$API_VERSION" >> $GITHUB_ENV
echo "API Version: $API_VERSION"
# Step 6: Extract worker container version
- name: Extract worker container version
id: extract_worker_version
run: |
WORKER_VERSION=$(python3 .github/parse_container_version.py worker)
WORKER_VERSION=$(python -B .github/parse_container_version.py worker)
echo "WORKER_VERSION=$WORKER_VERSION" >> $GITHUB_ENV
echo "WORKER Version: $WORKER_VERSION"
Expand Down
12 changes: 5 additions & 7 deletions .github/workflows/deploy-microservices.yml
Original file line number Diff line number Diff line change
@@ -1,7 +1,5 @@
name: Deploy Microservices

# on:
# workflow_dispatch:
on:
workflow_run:
workflows: ["Deploy Base Image"]
Expand All @@ -20,15 +18,15 @@ jobs:
- name: Extract API container version
id: extract_api_version
run: |
API_VERSION=$(python3 .github/parse_container_version.py api)
API_VERSION=$(python -B .github/parse_container_version.py api)
echo "API_VERSION=$API_VERSION" >> $GITHUB_ENV
echo "API Version: $API_VERSION"
# Step 6: Extract worker container version
- name: Extract worker container version
id: extract_worker_version
run: |
WORKER_VERSION=$(python3 .github/parse_container_version.py worker)
WORKER_VERSION=$(python -B .github/parse_container_version.py worker)
echo "WORKER_VERSION=$WORKER_VERSION" >> $GITHUB_ENV
echo "WORKER Version: $WORKER_VERSION"
Expand Down Expand Up @@ -59,7 +57,7 @@ jobs:
- name: Install dependencies
run: |
python -m pip install --upgrade pip
pip install -r assets/pipeline/config/requirements.github.txt
pip install -r assets/pipeline/config/requirements.github.cd.txt
- name: Install Docker Compose
run: |
Expand All @@ -76,7 +74,7 @@ jobs:
- name: Deploy API microservice container to GHCR
run: |
rm api/spec/openapi_3_1_0_generated.yaml
python3 api/openapi_spec.py
python -B api/openapi_spec.py
sudo rm -r api/__pycache__
./assets/docker/scripts/push_image.sh api ${{ env.API_VERSION }}
env:
Expand All @@ -88,7 +86,7 @@ jobs:

- name: Deploy worker microservice container to GHCR
run: |
sudo rm -r compose_api/__pycache__
sudo rm -r worker/__pycache__
./assets/docker/scripts/push_image.sh worker ${{ env.WORKER_VERSION }}
# STABLE CONTENT:
# run: |
Expand Down
2 changes: 2 additions & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -24,3 +24,5 @@ test.ipynb
poetry.lock
*_parsetab.py
condaenv.n_dn60fa.requirements.txt
worker/.worker_STABLE
build
17 changes: 14 additions & 3 deletions Dockerfile-base
Original file line number Diff line number Diff line change
Expand Up @@ -14,13 +14,17 @@ SHELL ["/usr/bin/env", "bash", "-c"]
ENV DEBIAN_FRONTEND=noninteractive \
MONGO_URI="mongodb://mongodb/?retryWrites=true&w=majority&appName=bio-check" \
POETRY_VIRTUALENVS_CREATE=false \
POETRY_NO_INTERACTION=1
POETRY_NO_INTERACTION=1 \
TEST_SBML_FP="test-fixtures/sbml-core/Elowitz-Nature-2000-Repressilator/BIOMD0000000012_url.xml" \
TEST_PSC_FP="/Pysces/psc/BIOMD0000000012_url.xml.psc" \
TEST_OMEX_FP="test-fixtures/sbml-core/Elowitz-Nature-2000-Repressilator.omex"

# copy docker
COPY assets/docker/config/.biosimulations.json /.google/.bio-check.json
COPY assets/docker/config/.pys_usercfg.ini /Pysces/.pys_usercfg.ini
COPY assets/docker/config/.pys_usercfg.ini /root/Pysces/.pys_usercfg.ini
COPY assets/docker/shared.py assets/test_fixtures /app/
COPY assets/docker/shared.py /app/shared.py
COPY assets/test-fixtures /app/test-fixtures
COPY assets/docker/config/environment.base.yml /app/environment.base.yml

# cd /app
Expand All @@ -35,9 +39,16 @@ RUN mkdir -p /Pysces \
&& echo "conda activate server" >> ~/.bashrc \
&& source ~/.bashrc \
&& conda env export --no-builds -f config/environment.base.lock.yml \
&& rm -f environment.base.yml
&& rm -f environment.base.yml \
&& conda clean --all -y


# to run with a local network:
# net=app-net
# docker network create "$net"
# docker run -d --rm --name "$lib" --net "$net" --platform linux/amd64 "$PKG_ROOT"-"$lib":latest
# docker run -it --name "$lib" --net "$net" --platform linux/amd64 "$PKG_ROOT"-"$lib"

# # create conda env from yml and ensure env activation
# RUN conda env create -f config/environment.base.yml -y \
# && rm -f config/environment.base.yml \
Expand Down
20 changes: 12 additions & 8 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@

## **For Developers:**

### This application (`bio_check`) uses a microservices architecture which presents the following libraries:
### This application ("BioCompose") uses a microservices architecture which presents the following libraries:

- `api`: This library handles all requests including saving uploaded files, pending job creation, fetching results, and contains the user-facing endpoints.
- `storage`: This library handles MongoDB configs as well as bucket-like storages for uploaded files.
Expand All @@ -28,8 +28,7 @@ the dependency network required by each simulator. See the following documentati
### Dependency management scopes are handled as follows:

#### _*Locally/Dev*_:
- Python poetry via `pyproject.toml` - the most stable/reproducible method, yet the farthest from what is actually happening in the service containers as they use conda.
- Anaconda via `environment.yml` - the closest to local development at root level which micics what actually happens in the containers (conda deps tend to break more frequently than poetry.)
- Anaconda via `environment.yml` - the closest to local development at root level which mimics what actually happens in the containers (conda deps tend to break more frequently than poetry.)

_*Remotely in microservice containers*_:
- Remote microservice container management is handled by `conda` via `environment.yml` files for the respective containers.
Expand All @@ -46,12 +45,17 @@ _*Remotely in microservice containers*_:
BUCKET_NAME=bio-check-requests-1 # name of the bucket used in this app
5. `cd ..`
6. Pull and run the latest version of Mongo from the Docker Hub. (`docker run -d -it mongo:latest` or similar.)
7. Run one of the following commands based on your preference:
7. Create a conda env from the environment file at the root of this repo:

conda env create -n bio-compose-server-dev -f environment.yml

# OR
poetry env use 3.10 && poetry install
conda env create -f environment.yml -y && conda activate bio-composer-server-dev
8. Install pysces with conda and amici with pip:

conda install -c conda-forge -c pysces pysces
conda run pip3 install biosimulators-amici # installs both biosimulators and amici
9. If using Smoldyn, there is a arm-based mac installation script in `assets/dev/` called `install-smoldyn-mac-silicon.sh`. So run the following:

sudo chmod +x ./assets/dev/scripts/install-smoldyn-mac-silicon.sh # or whichever method you are using to install
./assets/dev/scripts/install-smoldyn-mac-silicon.sh # conda is configured to install Smoldyn into its environment


## Notes:
Expand Down
11 changes: 6 additions & 5 deletions api/Dockerfile-api
Original file line number Diff line number Diff line change
Expand Up @@ -4,20 +4,21 @@
# FROM ghcr.io/biosimulators/bio-compose-server-base:latest

# TODO: change this back to latest once stable
FROM ghcr.io/biosimulators/bio-compose-server-base:0.0.5-test
FROM ghcr.io/biosimulators/bio-compose-server-base:0.0.5

SHELL ["/usr/bin/env", "bash", "-c"]

COPY ./ .
COPY . .

EXPOSE 3001

RUN source ~/.bashrc \
&& conda env update -n server -f config/environment.api.yml \
&& conda run pip install -e . --root-user-action=ignore \
&& conda env update -n server -f environment.api.yml \
&& conda env export --no-builds > config/environment.api.lock.yml \
&& rm -f service/spec/openapi_3_1_0_generated.yaml \
&& conda run python3 openapi_spec.py \
&& conda clean --all --json -y \
&& rm -f config/environment.api.yml
&& rm -f environment.api.yml

# RUN source ~/.bashrc \
# && poetry install --without=composition,dev,worker --no-cache
Expand Down
67 changes: 0 additions & 67 deletions api/config/env.api.yml

This file was deleted.

3 changes: 1 addition & 2 deletions api/data_model.py
Original file line number Diff line number Diff line change
@@ -1,11 +1,10 @@
# -- api models -- #
from enum import Enum
from typing import List, Optional, Any, Dict, Union

from pydantic import Field
from fastapi.responses import FileResponse

from api.shared_api import BaseModel, Job, JobStatus
from shared_api import BaseModel, Job, JobStatus


# PENDING JOBS:
Expand Down
File renamed without changes.
7 changes: 2 additions & 5 deletions api/log_config.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,14 +10,11 @@ def start_logging(fname: str):
)


def setup_logging():
def setup_logging(logger: logging.Logger):
# Create a root logger
root_logger = logging.getLogger()
root_logger.setLevel(logging.INFO)

# Create a uvicorn access logger
uvicorn_logger = logging.getLogger("uvicorn.access")

# Create a console handler
console_handler = logging.StreamHandler(stream=sys.stdout)
console_handler.setLevel(logging.INFO)
Expand All @@ -32,4 +29,4 @@ def setup_logging():

# Add the console handler to the root logger and uvicorn logger
root_logger.addHandler(console_handler)
uvicorn_logger.addHandler(console_handler)
logger.addHandler(console_handler)
Loading

0 comments on commit 20487c3

Please sign in to comment.