diff --git a/.github/workflows/backend_check.yml b/.github/workflows/backend_check.yml new file mode 100644 index 00000000..c44c32b3 --- /dev/null +++ b/.github/workflows/backend_check.yml @@ -0,0 +1,76 @@ +name: check backend + +on: [push, pull_request] + +jobs: + build_backend_and_check_codestyle: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + + - name: set up python 3.10 + uses: actions/setup-python@v5 + with: + python-version: '3.10' + + - name: setup poetry and install dependencies + run: | + python -m pip install --upgrade pip poetry + python -m poetry lock --no-update + python -m poetry install --with lint --no-interaction + working-directory: backend/df_designer + + - name: build wheel + run: | + python -m poetry build + working-directory: backend/df_designer + + - name: run black codestyle + run: | + python -m poetry run black --line-length=120 --check . + working-directory: backend/df_designer + + - name: run flake8 codestyle + run: | + ls + bash ./bin/run_codestyle.sh DIFF_BRANCH=${BASE_BRANCH} + env: + BASE_BRANCH: ${{github.base_ref}} + + - name: run isort codestyle + run: | + python -m poetry run isort --line-length=120 --diff . + working-directory: backend/df_designer + + test_backend: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + + - name: set up python 3.10 + uses: actions/setup-python@v5 + with: + python-version: '3.10' + + - name: setup poetry and install dependencies + run: | + python -m pip install --upgrade pip poetry + python -m poetry lock --no-update + python -m poetry install --with lint --no-interaction + working-directory: backend/df_designer + + - name: build wheel + run: | + python -m poetry build + working-directory: backend/df_designer + + - name: Create new project + run: | + python -m poetry run dflowd init --destination ../../ --no-input --overwrite-if-exists + working-directory: backend/df_designer + + - name: run tests + run: | + python -m poetry install + python -m poetry run pytest ../backend/df_designer/app/tests/ --verbose + working-directory: df_designer_project diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml deleted file mode 100644 index 2c00ffff..00000000 --- a/.github/workflows/ci.yml +++ /dev/null @@ -1,28 +0,0 @@ -name: Code checking. -on: - push: - branches: [feat/backend, dev] - pull_request: - branches: [dev] - -jobs: - check: - runs-on: ubuntu-latest - strategy: - matrix: - python-version: ["3.10", "3.11", "3.12"] - steps: - - uses: actions/checkout@v4 - - name: tests - uses: actions/setup-python@v4 - with: - python-version: ${{matrix.python-version}} - - name: install dependencies - run: | - python -m pip install --upgrade pip - pipx install poetry - poetry install - - name: tests - run: poetry run python -m pytest - - diff --git a/.github/workflows/codestyle.yml b/.github/workflows/codestyle.yml new file mode 100644 index 00000000..21ccc243 --- /dev/null +++ b/.github/workflows/codestyle.yml @@ -0,0 +1,32 @@ +name: codestyle + +on: [push, pull_request] + +jobs: + check_backend_codestyle: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + + - name: set up python 3.10 + uses: actions/setup-python@v5 + with: + python-version: '3.10' + + - name: setup poetry and install dependencies + run: | + python -m pip install --upgrade pip poetry + python -m poetry lock --no-update + python -m poetry install --with lint --quiet --no-interaction + working-directory: backend/df_designer + + - name: run black codestyle + run: | + python -m poetry run black --line-length=120 --check . + working-directory: backend/df_designer + + - name: run flake8 codestyle + run: | + bash ./bin/run_codestyle.sh DIFF_BRANCH=${BASE_BRANCH} + env: + BASE_BRANCH: ${{github.base_ref}} diff --git a/.github/workflows/docker_check.yml b/.github/workflows/docker_check.yml new file mode 100644 index 00000000..619353ec --- /dev/null +++ b/.github/workflows/docker_check.yml @@ -0,0 +1,29 @@ +name: Build Docker Images + +on: + push: + branches: + - dev + pull_request: + branches: + - dev + +jobs: + build_images: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + + - name: setup poetry and install dependencies + run: | + python -m pip install --upgrade pip poetry + python -m poetry lock --no-update + python -m poetry install --with lint --no-ansi --no-interaction + working-directory: backend/df_designer + + - name: Create new project + run: python -m poetry run dflowd init --destination ../../ --no-input --overwrite-if-exists + working-directory: backend/df_designer + + - name: Build Frontend and Backend Images + run: docker build -f Dockerfile --build-arg PROJECT_DIR=df_designer_project --target=runtime . diff --git a/.github/workflows/frontend_check.yml b/.github/workflows/frontend_check.yml new file mode 100644 index 00000000..6a28422b --- /dev/null +++ b/.github/workflows/frontend_check.yml @@ -0,0 +1,20 @@ +name: front check + +on: [push, pull_request] + +jobs: + build_and_check_frontend: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + + - name: Install bun + run: npm install -g bun + + - name: Install dependencies + run: bun install + working-directory: frontend + + - name: Build frontend + run: bun run build + working-directory: frontend diff --git a/Dockerfile b/Dockerfile index 71cbee0f..f54b29f4 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,25 +1,24 @@ FROM oven/bun:1 as frontend-base FROM frontend-base AS frontend-builder -WORKDIR /src -COPY ./frontend/package.json /src/frontend/package.json -COPY ./frontend/bun.lockb /src/frontend/bun.lockb +WORKDIR /temp +COPY ./frontend/package.json /temp/frontend/package.json +COPY ./frontend/bun.lockb /temp/frontend/bun.lockb -RUN cd /src/frontend && bun install --frozen-lockfile +RUN cd /temp/frontend && bun install --frozen-lockfile # Copy the rest of the application code -COPY ./frontend/ /src/frontend/ -WORKDIR /src/frontend/ +COPY ./frontend/ /temp/frontend/ +WORKDIR /temp/frontend/ RUN bun run build -RUN ls /src/frontend/dist #--------------------------------------------------------- # Use a slim variant to reduce image size where possible FROM python:3.10-slim as backend-builder -WORKDIR /src +WORKDIR /temp ARG PROJECT_DIR # ENV PROJECT_DIR ${PROJECT_DIR} @@ -35,13 +34,13 @@ RUN python3 -m venv $POETRY_VENV \ ENV PATH="${PATH}:${POETRY_VENV}/bin" -COPY ./backend/df_designer /src/backend/df_designer -COPY --from=frontend-builder /src/frontend/dist /src/backend/df_designer/app/static +COPY ./backend/df_designer /temp/backend/df_designer +COPY --from=frontend-builder /temp/frontend/dist /temp/backend/df_designer/app/static -COPY ./${PROJECT_DIR} /src/${PROJECT_DIR} +COPY ./${PROJECT_DIR} /temp/${PROJECT_DIR} # Build the wheel -WORKDIR /src/backend/df_designer +WORKDIR /temp/backend/df_designer RUN poetry build #--------------------------------------------------------- @@ -58,11 +57,11 @@ COPY --from=backend-builder /poetry-venv /poetry-venv ENV PATH="/poetry-venv/bin:$PATH" # Copy only the necessary files -COPY --from=backend-builder /src/backend/df_designer /df_designer -COPY ./${PROJECT_DIR} /${PROJECT_DIR} +COPY --from=backend-builder /temp/backend/df_designer /src2/backend/df_designer +COPY ./${PROJECT_DIR} /src2/${PROJECT_DIR} # Install the wheel -WORKDIR /${PROJECT_DIR} +WORKDIR /src2/${PROJECT_DIR} RUN poetry lock --no-update \ && poetry install diff --git a/backend/df_designer/app/api/api_v1/api.py b/backend/df_designer/app/api/api_v1/api.py index 7a75d793..d6e857e6 100644 --- a/backend/df_designer/app/api/api_v1/api.py +++ b/backend/df_designer/app/api/api_v1/api.py @@ -1,9 +1,10 @@ from fastapi import APIRouter -from app.api.api_v1.endpoints import bot, flows +from app.api.api_v1.endpoints import bot, dff_services, flows from app.core.config import settings api_router = APIRouter() api_router.include_router(flows.router, prefix="/".join([settings.API_V1_STR, "flows"]), tags=["flows"]) +api_router.include_router(dff_services.router, prefix="/".join([settings.API_V1_STR, "services"]), tags=["services"]) api_router.include_router(bot.router, prefix="/".join([settings.API_V1_STR, "bot"]), tags=["bot"]) diff --git a/backend/df_designer/app/api/api_v1/endpoints/bot.py b/backend/df_designer/app/api/api_v1/endpoints/bot.py index d84cb29f..2a6bd300 100644 --- a/backend/df_designer/app/api/api_v1/endpoints/bot.py +++ b/backend/df_designer/app/api/api_v1/endpoints/bot.py @@ -1,47 +1,47 @@ import asyncio -from fastapi import APIRouter, HTTPException, Depends, WebSocket, WebSocketException, status, BackgroundTasks from typing import Optional, Union -from app.schemas.preset import Preset -from app.schemas.pagination import Pagination -from app.core.logger_config import get_logger -from app.services.process_manager import ProcessManager, BuildManager, RunManager +from fastapi import APIRouter, BackgroundTasks, Depends, HTTPException, WebSocket, WebSocketException, status + from app.api import deps +from app.core.logger_config import get_logger +from app.schemas.pagination import Pagination +from app.schemas.preset import Preset +from app.services.process_manager import BuildManager, ProcessManager, RunManager from app.services.websocket_manager import WebSocketManager - router = APIRouter() logger = get_logger(__name__) -async def _stop_process( - id_: int, process_manager: ProcessManager, process= "run" -): - if id_ not in process_manager.processes: - raise HTTPException( - status_code=status.HTTP_404_NOT_FOUND, detail="Process not found. It may have already exited." - ) +async def _stop_process(id_: int, process_manager: ProcessManager, process="run"): try: await process_manager.stop(id_) except (RuntimeError, ProcessLookupError) as e: - raise HTTPException(status_code=404, detail="Process not found. It may have already exited or not started yet. Please check logs.") from e + raise HTTPException( + status_code=status.HTTP_404_NOT_FOUND, + detail="Process not found. It may have already exited or not started yet. Please check logs.", + ) from e logger.info("%s process '%s' has stopped", process.capitalize(), id_) return {"status": "ok"} -def _check_process_status(id_: int, process_manager: ProcessManager) -> dict[str, str]: +async def _check_process_status(id_: int, process_manager: ProcessManager) -> dict[str, str]: if id_ not in process_manager.processes: raise HTTPException( - status_code=status.HTTP_404_NOT_FOUND, detail="Process not found. It may have already exited.", + status_code=status.HTTP_404_NOT_FOUND, + detail="Process not found. It may have already exited.", ) - process_status = process_manager.get_status(id_) + process_status = await process_manager.get_status(id_) return {"status": process_status} @router.post("/build/start", status_code=201) -async def start_build(preset: Preset, background_tasks: BackgroundTasks, build_manager: BuildManager = Depends(deps.get_build_manager)): +async def start_build( + preset: Preset, background_tasks: BackgroundTasks, build_manager: BuildManager = Depends(deps.get_build_manager) +): await asyncio.sleep(preset.wait_time) await build_manager.start(preset) build_id = build_manager.get_last_id() @@ -57,32 +57,37 @@ async def stop_build(*, build_id: int, build_manager: BuildManager = Depends(dep @router.get("/build/status/{build_id}", status_code=200) async def check_build_status(*, build_id: int, build_manager: BuildManager = Depends(deps.get_build_manager)): - return _check_process_status(build_id, build_manager) + return await _check_process_status(build_id, build_manager) @router.get("/builds", response_model=Optional[Union[list, dict]], status_code=200) async def check_build_processes( build_id: Optional[int] = None, build_manager: BuildManager = Depends(deps.get_build_manager), - pagination: Pagination = Depends() + pagination: Pagination = Depends(), ): if build_id is not None: return await build_manager.get_build_info(build_id) else: return await build_manager.get_full_info(offset=pagination.offset(), limit=pagination.limit) + @router.get("/builds/logs/{build_id}", response_model=Optional[list], status_code=200) async def get_build_logs( - build_id: int, - build_manager: BuildManager = Depends(deps.get_build_manager), - pagination: Pagination = Depends() + build_id: int, build_manager: BuildManager = Depends(deps.get_build_manager), pagination: Pagination = Depends() ): if build_id is not None: return await build_manager.fetch_build_logs(build_id, pagination.offset(), pagination.limit) @router.post("/run/start/{build_id}", status_code=201) -async def start_run(*, build_id: int, preset: Preset, background_tasks: BackgroundTasks, run_manager: RunManager = Depends(deps.get_run_manager)): +async def start_run( + *, + build_id: int, + preset: Preset, + background_tasks: BackgroundTasks, + run_manager: RunManager = Depends(deps.get_run_manager) +): await asyncio.sleep(preset.wait_time) await run_manager.start(build_id, preset) run_id = run_manager.get_last_id() @@ -98,14 +103,14 @@ async def stop_run(*, run_id: int, run_manager: RunManager = Depends(deps.get_ru @router.get("/run/status/{run_id}", status_code=200) async def check_run_status(*, run_id: int, run_manager: RunManager = Depends(deps.get_run_manager)): - return _check_process_status(run_id, run_manager) + return await _check_process_status(run_id, run_manager) @router.get("/runs", response_model=Optional[Union[list, dict]], status_code=200) async def check_run_processes( run_id: Optional[int] = None, run_manager: RunManager = Depends(deps.get_run_manager), - pagination: Pagination = Depends() + pagination: Pagination = Depends(), ): if run_id is not None: return await run_manager.get_run_info(run_id) @@ -115,9 +120,7 @@ async def check_run_processes( @router.get("/runs/logs/{run_id}", response_model=Optional[list], status_code=200) async def get_run_logs( - run_id: int, - run_manager: RunManager = Depends(deps.get_run_manager), - pagination: Pagination = Depends() + run_id: int, run_manager: RunManager = Depends(deps.get_run_manager), pagination: Pagination = Depends() ): if run_id is not None: return await run_manager.fetch_run_logs(run_id, pagination.offset(), pagination.limit) @@ -147,12 +150,17 @@ async def connect( logger.error("process with run_id '%s' exited or never existed", run_id) raise WebSocketException(code=status.WS_1014_BAD_GATEWAY) - await websocket_manager.connect(websocket) logger.info("Websocket for run process '%s' has been opened", run_id) - output_task = asyncio.create_task(websocket_manager.send_process_output_to_websocket(run_id, run_manager, websocket)) - input_task = asyncio.create_task(websocket_manager.forward_websocket_messages_to_process(run_id, run_manager, websocket)) + await websocket.send_text("Start chatting") + + output_task = asyncio.create_task( + websocket_manager.send_process_output_to_websocket(run_id, run_manager, websocket) + ) + input_task = asyncio.create_task( + websocket_manager.forward_websocket_messages_to_process(run_id, run_manager, websocket) + ) # Wait for either task to finish _, websocket_manager.pending_tasks[websocket] = await asyncio.wait( diff --git a/backend/df_designer/app/api/api_v1/endpoints/dff_services.py b/backend/df_designer/app/api/api_v1/endpoints/dff_services.py new file mode 100644 index 00000000..904a4201 --- /dev/null +++ b/backend/df_designer/app/api/api_v1/endpoints/dff_services.py @@ -0,0 +1,21 @@ +from fastapi import APIRouter, Depends + +from app.api.deps import get_index +from app.core.logger_config import get_logger +from app.services.index import Index + +router = APIRouter() + +logger = get_logger(__name__) + + +@router.get("/search/{service_name}", status_code=200) +async def search_service(service_name: str, index: Index = Depends(get_index)): + response = await index.search_service(service_name) + return response + + +@router.get("/refresh_index", status_code=200) +async def refresh_index(index: Index = Depends(get_index)): + await index.load() + return {"status": "ok"} diff --git a/backend/df_designer/app/api/api_v1/endpoints/flows.py b/backend/df_designer/app/api/api_v1/endpoints/flows.py index 820f54e8..ef7d8654 100644 --- a/backend/df_designer/app/api/api_v1/endpoints/flows.py +++ b/backend/df_designer/app/api/api_v1/endpoints/flows.py @@ -1,9 +1,9 @@ from fastapi import APIRouter from omegaconf import OmegaConf -from app.core.logger_config import get_logger from app.core.config import settings -from app.db.base import write_conf, read_conf +from app.core.logger_config import get_logger +from app.db.base import read_conf, write_conf router = APIRouter() diff --git a/backend/df_designer/app/api/deps.py b/backend/df_designer/app/api/deps.py index e308de30..934e5cd9 100644 --- a/backend/df_designer/app/api/deps.py +++ b/backend/df_designer/app/api/deps.py @@ -1,14 +1,30 @@ +from app.services.index import Index from app.services.process_manager import BuildManager, RunManager from app.services.websocket_manager import WebSocketManager build_manager = BuildManager() + + def get_build_manager() -> BuildManager: return build_manager + run_manager = RunManager() + + def get_run_manager() -> RunManager: return run_manager + websocket_manager = WebSocketManager() + + def get_websocket_manager() -> WebSocketManager: return websocket_manager + + +index = Index() + + +def get_index() -> Index: + return index diff --git a/backend/df_designer/app/cli.py b/backend/df_designer/app/cli.py index 1a947a26..5f609d5d 100644 --- a/backend/df_designer/app/cli.py +++ b/backend/df_designer/app/cli.py @@ -1,12 +1,12 @@ import asyncio -from cookiecutter.main import cookiecutter import json import os -from pathlib import Path -import subprocess import sys +from pathlib import Path + import typer import uvicorn +from cookiecutter.main import cookiecutter from app.core.config import settings from app.core.logger_config import get_logger @@ -15,14 +15,19 @@ cli = typer.Typer() -def _execute_command(command_to_run): +async def _execute_command(command_to_run): logger = get_logger(__name__) try: - process = subprocess.run(command_to_run.split(),check=False) + process = await asyncio.create_subprocess_exec(*command_to_run.split()) # Check the return code to determine success if process.returncode == 0: logger.info("Command '%s' executed successfully.", command_to_run) + elif process.returncode is None: + logger.info("Process by command '%s' is running.", command_to_run) + await process.wait() + logger.info("Process ended with return code: %d.", process.returncode) + sys.exit(process.returncode) else: logger.error("Command '%s' failed with return code: %d", command_to_run, process.returncode) sys.exit(process.returncode) @@ -41,44 +46,44 @@ def _execute_command_file(build_id: int, project_dir: str, command_file: str, pr if preset in presets_build_file: command_to_run = presets_build_file[preset]["cmd"] if preset == "success": - command_to_run += f" {build_id}" + command_to_run += f" {build_id} --call_from_open_event_loop True" logger.debug("Executing command for preset '%s': %s", preset, command_to_run) - _execute_command(command_to_run) + asyncio.run(_execute_command(command_to_run)) else: raise ValueError(f"Invalid preset '{preset}'. Preset must be one of {list(presets_build_file.keys())}") @cli.command("build_bot") -def build_bot( - build_id: int, - project_dir: str = settings.work_directory, - preset: str = "success" -): +def build_bot(build_id: int, project_dir: str = settings.work_directory, preset: str = "success"): _execute_command_file(build_id, project_dir, "build.json", preset) @cli.command("build_scenario") -def build_scenario(build_id: int, project_dir: str = "."): - asyncio.run(translator(build_id=build_id, project_dir=project_dir)) +def build_scenario(build_id: int, project_dir: str = ".", call_from_open_event_loop: bool = False): + if call_from_open_event_loop: + loop = asyncio.get_event_loop() + loop.create_task(translator(build_id=build_id, project_dir=project_dir)) + loop.run_until_complete(asyncio.wait([], return_when=asyncio.FIRST_COMPLETED)) + else: + asyncio.run(translator(build_id=build_id, project_dir=project_dir)) + @cli.command("run_bot") -def run_bot( - build_id: int, - project_dir: str = settings.work_directory, - preset: str = "success" -): +def run_bot(build_id: int, project_dir: str = settings.work_directory, preset: str = "success"): _execute_command_file(build_id, project_dir, "run.json", preset) @cli.command("run_scenario") -def run_scenario( - build_id: int, - project_dir: str = "." -): +def run_scenario(build_id: int, project_dir: str = ".", call_from_open_event_loop: bool = False): script_path = Path(project_dir) / "bot" / "scripts" / f"build_{build_id}.yaml" command_to_run = f"poetry run python {project_dir}/app.py --script-path {script_path}" - _execute_command(command_to_run) + if call_from_open_event_loop: + loop = asyncio.get_event_loop() + loop.create_task(_execute_command(command_to_run)) + loop.run_until_complete(asyncio.wait([], return_when=asyncio.FIRST_COMPLETED)) + else: + asyncio.run(_execute_command(command_to_run)) async def _run_server() -> None: @@ -103,17 +108,21 @@ def run_backend( settings.host, settings.backend_port, reload=settings.conf_reload, - reload_dirs=str(settings.work_directory) + reload_dirs=str(settings.work_directory), ) settings.server = uvicorn.Server(settings.uvicorn_config) settings.server.run() @cli.command("init") -def init(destination: str = settings.work_directory): +def init(destination: str = settings.work_directory, no_input: bool = False, overwrite_if_exists: bool = True): original_dir = os.getcwd() try: os.chdir(destination) - cookiecutter("https://github.com/Ramimashkouk/df_d_template.git") + cookiecutter( + "https://github.com/Ramimashkouk/df_d_template.git", + no_input=no_input, + overwrite_if_exists=overwrite_if_exists, + ) finally: os.chdir(original_dir) diff --git a/backend/df_designer/app/core/config.py b/backend/df_designer/app/core/config.py index 8227ded3..2163b84e 100644 --- a/backend/df_designer/app/core/config.py +++ b/backend/df_designer/app/core/config.py @@ -1,7 +1,8 @@ -from pydantic_settings import BaseSettings from pathlib import Path -from omegaconf import OmegaConf + import uvicorn +from pydantic_settings import BaseSettings + class Settings(BaseSettings): API_V1_STR: str = "/api/v1" @@ -17,12 +18,13 @@ class Settings(BaseSettings): backend_port: int = 8000 ui_port: int = 3000 log_level: str = "debug" - conf_reload: bool = True # Enable auto-reload for development mode + conf_reload: bool = True # Enable auto-reload for development mode builds_path: Path = Path(f"{work_directory}/df_designer/builds.yaml") runs_path: Path = Path(f"{work_directory}/df_designer/runs.yaml") dir_logs: Path = Path(f"{work_directory}/df_designer/logs") - frontend_flows_path : Path = Path(f"{work_directory}/df_designer/frontend_flows.yaml") + frontend_flows_path: Path = Path(f"{work_directory}/df_designer/frontend_flows.yaml") + index_path: Path = Path(f"{work_directory}/bot/custom/.services_index.yaml") uvicorn_config: uvicorn.Config = uvicorn.Config( APP, host, backend_port, log_level=log_level, reload=conf_reload, reload_dirs=[work_directory, str(package_dir)] diff --git a/backend/df_designer/app/core/logger_config.py b/backend/df_designer/app/core/logger_config.py index 01c064cc..f244459d 100644 --- a/backend/df_designer/app/core/logger_config.py +++ b/backend/df_designer/app/core/logger_config.py @@ -1,8 +1,8 @@ -from datetime import datetime import logging +import os +from datetime import datetime from pathlib import Path from typing import Optional -import os from app.core.config import settings @@ -14,9 +14,10 @@ "debug": logging.DEBUG, } -def setup_logging(log_type: str, log_name: str) -> Path: #TODO: rename: setup_detailed_logging + +def setup_logging(log_type: str, log_name: str) -> Path: # TODO: rename: setup_detailed_logging # Ensure log_type is either 'builds' or 'runs' - if log_type not in ['builds', 'runs']: + if log_type not in ["builds", "runs"]: raise ValueError("log_type must be 'builds' or 'runs'") today_date = datetime.now().strftime("%Y%m%d") @@ -26,15 +27,16 @@ def setup_logging(log_type: str, log_name: str) -> Path: #TODO: rename: setup_de log_file = log_directory / f"{log_name}.log" if not os.path.exists(log_file): - open(log_file, 'w', encoding="UTF-8").close() + open(log_file, "w", encoding="UTF-8").close() return log_file + def get_logger(name, file_handler_path: Optional[Path] = None): if file_handler_path is None: - os.makedirs(settings.dir_logs, exist_ok=True) - file_handler_path = settings.dir_logs/ "logs.log" - if not os.path.exists(file_handler_path): - open(file_handler_path, 'w', encoding="UTF-8").close() + file_handler_path = settings.dir_logs / "logs.log" + file_handler_path.parent.mkdir(parents=True, exist_ok=True) + if not file_handler_path.exists(): + file_handler_path.touch() logger = logging.getLogger(name) logger.propagate = False @@ -45,8 +47,8 @@ def get_logger(name, file_handler_path: Optional[Path] = None): c_handler.setLevel(LOG_LEVELS[settings.log_level]) f_handler.setLevel(LOG_LEVELS[settings.log_level]) - c_format = logging.Formatter('%(name)s - %(levelname)s - %(message)s') - f_format = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s') + c_format = logging.Formatter("%(name)s - %(levelname)s - %(message)s") + f_format = logging.Formatter("%(asctime)s - %(name)s - %(levelname)s - %(message)s") c_handler.setFormatter(c_format) f_handler.setFormatter(f_format) diff --git a/backend/df_designer/app/db/base.py b/backend/df_designer/app/db/base.py index cd37631a..9328ce5c 100644 --- a/backend/df_designer/app/db/base.py +++ b/backend/df_designer/app/db/base.py @@ -1,25 +1,29 @@ from asyncio import Lock -import aiofiles from pathlib import Path -from omegaconf import OmegaConf from typing import Union +import aiofiles +from omegaconf import OmegaConf + file_lock = Lock() + async def read_conf(path: Path): async with file_lock: async with aiofiles.open(path, "r", encoding="UTF-8") as file: data = await file.read() - omega_data = OmegaConf.create(data) # read from a YAML string + omega_data = OmegaConf.create(data) # read from a YAML string return omega_data + async def write_conf(data: Union[list, dict], path: Path): yaml_conf = OmegaConf.to_yaml(data) async with file_lock: - async with aiofiles.open(path, "w", encoding="UTF-8") as file: #TODO: change to "a" for append + async with aiofiles.open(path, "w", encoding="UTF-8") as file: # TODO: change to "a" for append await file.write(yaml_conf) + async def read_logs(log_file: Path): async with aiofiles.open(log_file, "r", encoding="UTF-8") as file: - logs = [line async for line in file if line.strip()] - return logs \ No newline at end of file + logs = [line async for line in file] + return logs diff --git a/backend/df_designer/app/main.py b/backend/df_designer/app/main.py index ea92f2be..d05b93b0 100644 --- a/backend/df_designer/app/main.py +++ b/backend/df_designer/app/main.py @@ -1,36 +1,52 @@ -from fastapi import FastAPI, APIRouter, Response +from contextlib import asynccontextmanager + +from fastapi import APIRouter, FastAPI, Response from fastapi.middleware.cors import CORSMiddleware -from fastapi.responses import HTMLResponse, FileResponse, RedirectResponse +from fastapi.responses import FileResponse, HTMLResponse, RedirectResponse from app.api.api_v1.api import api_router +from app.api.deps import get_index from app.core.config import settings -app = FastAPI(title="DF Designer") +index_dict = {} + + +@asynccontextmanager +async def lifespan(app: FastAPI): + index_dict["instance"] = get_index() + await index_dict["instance"].load() + yield + # Clean up and release the resources + + +app = FastAPI(title="DF Designer", lifespan=lifespan) app.add_middleware( - CORSMiddleware, - allow_origins=["*"], - allow_credentials=True, - allow_methods=["*"], - allow_headers=["*"], + CORSMiddleware, + allow_origins=["*"], + allow_credentials=True, + allow_methods=["*"], + allow_headers=["*"], ) root_router = APIRouter() + @root_router.get("/app/{path:path}") async def route_static_file(path: str): - if not settings.start_page.exists(): - return HTMLResponse(content="frontend is not built") - file_path = settings.static_files / path.split("/")[-1] - if file_path.suffix in (".js", ".css", ".html"): - return FileResponse(file_path) - return FileResponse(settings.static_files / "index.html") + if not settings.start_page.exists(): + return HTMLResponse(content="frontend is not built") + file_path = settings.static_files / path.split("/")[-1] + if file_path.suffix in (".js", ".css", ".html"): + return FileResponse(file_path) + return FileResponse(settings.static_files / "index.html") @root_router.get("/") async def root() -> Response: - """Redirect '/' to index.html""" - return RedirectResponse(url="/app") + """Redirect '/' to index.html""" + return RedirectResponse(url="/app") + app.include_router(root_router) app.include_router(api_router) diff --git a/backend/df_designer/app/schemas/pagination.py b/backend/df_designer/app/schemas/pagination.py index e31bc241..bb089274 100644 --- a/backend/df_designer/app/schemas/pagination.py +++ b/backend/df_designer/app/schemas/pagination.py @@ -1,5 +1,6 @@ -from pydantic import BaseModel from fastapi import Query +from pydantic import BaseModel + class Pagination(BaseModel): page: int = Query(1, gt=0) diff --git a/backend/df_designer/app/schemas/preset.py b/backend/df_designer/app/schemas/preset.py index 2f6b9cae..0efb5b66 100644 --- a/backend/df_designer/app/schemas/preset.py +++ b/backend/df_designer/app/schemas/preset.py @@ -1,6 +1,8 @@ -from pydantic import BaseModel from typing import Literal +from pydantic import BaseModel + + class Preset(BaseModel): wait_time: float end_status: Literal["success", "failure", "loop"] diff --git a/backend/df_designer/app/services/index.py b/backend/df_designer/app/services/index.py new file mode 100644 index 00000000..defa2fa5 --- /dev/null +++ b/backend/df_designer/app/services/index.py @@ -0,0 +1,100 @@ +import asyncio +from typing import List + +from omegaconf import OmegaConf + +from app.core.config import settings +from app.core.logger_config import get_logger +from app.db.base import read_conf, read_logs, write_conf + + +class Index: + def __init__(self): + self.path = settings.index_path + self.index = {} + self.conditions = [] + self.responses = [] + self.services = [] + self.logger = get_logger(__name__) + + if not self.path.exists(): + self.path.parent.mkdir(parents=True, exist_ok=True) + self.path.touch() + + async def _load_index(self): + db_index = await read_conf(self.path) + index_dict = OmegaConf.to_container(db_index, resolve=True) + self.index = index_dict + self.logger.debug("Index loaded") + + async def _load_conditions(self): + if (path := self.path.parent / "conditions.py").exists(): + self.conditions = await read_logs(path) + self.logger.debug("Conditions loaded") + else: + self.logger.warning("No conditions file found") + + async def _load_responses(self): + if (path := self.path.parent / "responses.py").exists(): + self.responses = await read_logs(path) + self.logger.debug("Responses loaded") + else: + self.logger.warning("No responses file found") + + async def _load_services(self): + if (path := self.path.parent / "services.py").exists(): + self.services = await read_logs(path) + self.logger.debug("Services loaded") + else: + self.logger.warning("No services file found") + + def _get_service(self, services_lst: list, lineno: int): + service = [] + func_lines = services_lst[lineno - 1 :] + self.logger.debug("services_lst: %s", services_lst) + for func_lineno, func_line in enumerate(func_lines): + if func_line[:4] == "def " and func_lineno != 0: + break + service.append(func_line) # ?maybe with \n + return service + + async def load(self): + """load index and services into memory""" + await asyncio.gather( + self._load_index(), + self._load_conditions(), + self._load_responses(), + self._load_services(), + ) + self.logger.info("Index and services loaded") + self.logger.debug("Loaded index: %s", self.index) + + def get_services(self): + return self.index + + async def search_service(self, service_name): + if service_name not in self.index: + return [] + type_ = self.index[service_name]["type"] + lineno = int(self.index[service_name]["lineno"]) + + if type_ == "condition": + return self._get_service(self.conditions, lineno) + elif type_ == "response": + return self._get_service(self.responses, lineno) + elif type_ == "service": + return self._get_service(self.services, lineno) + + async def indexit(self, service_name: str, type_, lineno): + self.logger.debug("Indexing '%s'", service_name) + await self.indexit_all([service_name], [type_], [lineno]) + self.logger.info("Indexed '%s'", service_name) + + async def indexit_all(self, services_names: List[str], types: List[str], linenos: List[int]): + for service_name, type_, lineno in zip(services_names, types, linenos): + self.index[service_name] = { + "type": type_, # condition/response/service + "lineno": lineno, + } + + await write_conf(self.index, self.path) # ?to background tasks diff --git a/backend/df_designer/app/services/json_translator.py b/backend/df_designer/app/services/json_translator.py index 4c71b0ca..131b23c5 100644 --- a/backend/df_designer/app/services/json_translator.py +++ b/backend/df_designer/app/services/json_translator.py @@ -1,24 +1,30 @@ from pathlib import Path +from typing import Tuple -from app.db.base import read_conf, write_conf +from app.api.deps import get_index from app.core.logger_config import get_logger +from app.db.base import read_conf, write_conf logger = get_logger(__name__) -async def translator(build_id: int, project_dir: str): - frontend_graph_path = Path(project_dir) / "df_designer" / "frontend_flows.yaml" - script_file = Path(project_dir) / "bot" / "scripts" / f"build_{build_id}.yaml" - custom_dir = "custom" - custom_dir_path = "bot" / Path(custom_dir) - custom_dir_path.mkdir(exist_ok=True, parents=True) - custom_conditions_file = custom_dir_path / "conditions.py" +def get_db_paths(build_id: int, project_dir: Path, custom_dir: str) -> Tuple[Path, Path, Path]: + frontend_graph_path = project_dir / "df_designer" / "frontend_flows.yaml" + custom_conditions_file = project_dir / "bot" / custom_dir / "conditions.py" + script_path = project_dir / "bot" / "scripts" / f"build_{build_id}.yaml" - script = { - "CONFIG": {"custom_dir": "/".join(["..", custom_dir])}, - } - flow_graph = await read_conf(frontend_graph_path) + if not frontend_graph_path.exists(): + raise FileNotFoundError(f"File {frontend_graph_path} doesn't exist") + if not custom_conditions_file.exists(): + raise FileNotFoundError(f"File {custom_conditions_file} doesn't exist") + if not script_path.exists(): + script_path.parent.mkdir(parents=True, exist_ok=True) + script_path.touch() + + return frontend_graph_path, script_path, custom_conditions_file + +def organize_graph_according_to_nodes(flow_graph, script): nodes = {} for flow in flow_graph["flows"]: for node in flow.data.nodes: @@ -27,37 +33,147 @@ async def translator(build_id: int, project_dir: str): nodes[node.id] = {"info": node} nodes[node.id]["flow"] = flow.name nodes[node.id]["TRANSITIONS"] = [] - for flow in flow_graph["flows"]: - for edge in flow.data.edges: - if edge.source in nodes and edge.target in nodes: - condition = next(condition for condition in nodes[edge.source]["info"].data.conditions if condition["id"] == edge.sourceHandle) - - custom_conditions = custom_conditions_file.read_text() - custom_conditions_names = [fun.split("(")[0].strip() for fun in custom_conditions.split("def ")[1:]] - if condition.name not in custom_conditions_names: - with open(custom_conditions_file, "a", encoding="UTF-8") as f: - f.write(condition.data.action + "\n") - logger.debug("Writing to %s: %s", custom_conditions_file, condition.name) - - nodes[edge.source]["TRANSITIONS"].append( - { - "lbl": [ - nodes[edge.target]['flow'], - nodes[edge.target]['info'].data.name, - condition.data.priority - ], - "cnd": f"custom_dir.conditions.{condition.name}" - } - ) + return nodes + + +def get_condition(nodes, edge): + try: + return next( + condition + for condition in nodes[edge.source]["info"].data.conditions + if condition["id"] == edge.sourceHandle + ) + except StopIteration: + return None + +def write_conditions_to_file(conditions_lines, custom_conditions_file): + # TODO: make reading and writing conditions async + with open(custom_conditions_file, "w", encoding="UTF-8") as file: + for line in conditions_lines: + file.write(f"{line}\n") + + +def add_transitions(nodes, edge, condition): + nodes[edge.source]["TRANSITIONS"].append( + { + "lbl": [ + nodes[edge.target]["flow"], + nodes[edge.target]["info"].data.name, + condition.data.priority, + ], + "cnd": f"custom_dir.conditions.{condition.name}", + } + ) + + +def fill_nodes_into_script(nodes, script): for _, node in nodes.items(): if node["flow"] not in script: script[node["flow"]] = {} - script[node["flow"]].update({ - node["info"].data.name: { - "RESPONSE": {"dff.Message": {"text": node["info"].data.response}}, - "TRANSITIONS": node["TRANSITIONS"], - }, - }) - - await write_conf(script, script_file) + script[node["flow"]].update( + { + node["info"].data.name: { + "RESPONSE": {"dff.Message": {"text": node["info"].data.response}}, + "TRANSITIONS": node["TRANSITIONS"], + } + } + ) + + +def append_condition(condition, conditions_lines): + condition = "".join([condition.data.python.action + "\n\n"]) + + all_lines = conditions_lines + condition.split("\n") + return all_lines + + +async def _shift_cnds_in_index(index, cnd_strt_lineno, diff_in_lines): + services = index.get_services() + for _, service in services.items(): + if service["type"] == "condition": + if service["lineno"] - 1 > cnd_strt_lineno: # -1 is here to convert from file numeration to list numeration + service["lineno"] += diff_in_lines + + await index.indexit_all( + [service_name for service_name, _ in services.items()], + [service["type"] for _, service in services.items()], + [service["lineno"] for _, service in services.items()], + ) + + +async def replace_condition(condition, conditions_lines, cnd_strt_lineno, index): + cnd_strt_lineno = cnd_strt_lineno - 1 # conversion from file numeration to list numeration + all_lines = conditions_lines.copy() + condition = "".join([condition.data.python.action + "\n\n"]) + new_cnd_lines = condition.split("\n") + + old_cnd_lines_num = 0 + for lineno, line in enumerate(all_lines[cnd_strt_lineno:]): + if line[:4] == "def " and lineno != 0: + break + old_cnd_lines_num += 1 + + next_func_location = cnd_strt_lineno + old_cnd_lines_num + + logger.debug("new_cnd_lines\n") + logger.debug(new_cnd_lines) + all_lines = all_lines[:cnd_strt_lineno] + new_cnd_lines + all_lines[next_func_location:] + + diff_in_lines = len(new_cnd_lines) - old_cnd_lines_num + logger.debug("diff_in_lines: %s", diff_in_lines) + logger.debug("cnd_strt_lineno: %s", cnd_strt_lineno) + + await _shift_cnds_in_index(index, cnd_strt_lineno, diff_in_lines) + return all_lines + + +async def translator(build_id: int, project_dir: str, custom_dir: str = "custom"): + index = get_index() + await index.load() + index.logger.debug("Loaded index '%s'", index.index) + + frontend_graph_path, script_path, custom_conditions_file = get_db_paths(build_id, Path(project_dir), custom_dir) + + script = { + "CONFIG": {"custom_dir": "/".join(["..", custom_dir])}, + } + flow_graph = await read_conf(frontend_graph_path) + + nodes = organize_graph_according_to_nodes(flow_graph, script) + + with open(custom_conditions_file, "r", encoding="UTF-8") as file: + conditions_lines = file.readlines() + + for flow in flow_graph["flows"]: + for edge in flow.data.edges: + if edge.source in nodes and edge.target in nodes: + condition = get_condition(nodes, edge) + if condition is None: + logger.error( + "A condition of edge '%s' - '%s' and id of '%s' is not found in the corresponding node", + edge.source, + edge.target, + edge.sourceHandle, + ) + continue + + if condition.name not in (cnd_names := index.index): + logger.debug("Adding condition: %s", condition.name) + cnd_lineno = len(conditions_lines) + conditions_lines = append_condition(condition, conditions_lines) + await index.indexit(condition.name, "condition", cnd_lineno + 1) + else: + logger.debug("Replacing condition: %s", condition.name) + conditions_lines = await replace_condition( + condition, conditions_lines, cnd_names[condition.name]["lineno"], index + ) + + add_transitions(nodes, edge, condition) + else: + logger.error("A node of edge '%s-%s' is not found in nodes", edge.source, edge.target) + + fill_nodes_into_script(nodes, script) + + write_conditions_to_file(conditions_lines, custom_conditions_file) + await write_conf(script, script_path) diff --git a/backend/df_designer/app/services/process.py b/backend/df_designer/app/services/process.py index 3b84fd8d..83bb88e4 100644 --- a/backend/df_designer/app/services/process.py +++ b/backend/df_designer/app/services/process.py @@ -1,14 +1,12 @@ -import aiofiles import asyncio -from datetime import datetime import logging +from datetime import datetime from pathlib import Path from typing import List -from omegaconf import OmegaConf -from app.core.logger_config import get_logger, setup_logging from app.core.config import settings -from app.db.base import write_conf, read_conf +from app.core.logger_config import get_logger, setup_logging +from app.db.base import read_conf, write_conf def _map_to_str(params: dict): @@ -20,29 +18,27 @@ def _map_to_str(params: dict): class Process: - def __init__(self, id_: int, preset_end_status = ""): + def __init__(self, id_: int, preset_end_status=""): self.id: int = id_ self.preset_end_status: str = preset_end_status self.status: str = "null" self.timestamp: datetime = datetime.now() self.log_path: Path - self.process: asyncio.subprocess.Process # pylint: disable=no-member #TODO: is naming ok? + self.lock = asyncio.Lock() + self.process: asyncio.subprocess.Process # pylint: disable=no-member #TODO: is naming ok? self.logger: logging.Logger async def start(self, cmd_to_run): - async with aiofiles.open(self.log_path, "a", encoding="UTF-8") as file: #TODO: log to files - self.process = await asyncio.create_subprocess_exec( - *cmd_to_run.split(), - stdout=asyncio.subprocess.PIPE, - stderr=asyncio.subprocess.PIPE, - stdin=asyncio.subprocess.PIPE, - ) + self.process = await asyncio.create_subprocess_exec( + *cmd_to_run.split(), + stdout=asyncio.subprocess.PIPE, + stderr=asyncio.subprocess.PIPE, + stdin=asyncio.subprocess.PIPE, + ) - def get_full_info(self) -> dict: - self.check_status() - return { - key: getattr(self, key) for key in self.__dict__ if key not in ["process", "logger"] - } + async def get_full_info(self) -> dict: + await self.check_status() + return {key: getattr(self, key) for key in self.__dict__ if key not in ["lock", "process", "logger"]} def set_full_info(self, params_dict): for key, value in params_dict.items(): @@ -53,17 +49,18 @@ async def update_db_info(self): async def periodically_check_status(self): while True: - await self.update_db_info() # check status and update db - self.logger.info("Status of process '%s': %s",self.id, self.status) + await self.update_db_info() # check status and update db + self.logger.info("Status of process '%s': %s", self.id, self.status) if self.status in ["stopped", "completed", "failed"]: break - await asyncio.sleep(2) #TODO: ?sleep time shouldn't be constant + await asyncio.sleep(2) # TODO: ?sleep time shouldn't be constant - def check_status(self) -> str: - """Returns the process status [null, running, completed, failed, stopped]. + async def check_status(self) -> str: + """Returns the process status [null, alive, running, completed, failed, stopped]. - null: When a process is initiated but not started yet. This condition is unusual and typically indicates incorrect usage or a process misuse in backend logic. - - running: returncode is None + - alive: process is alive and ready to communicate + - running: process is still trying to get alive. no communication - completed: returncode is 0 - failed: returncode is 1 - stopped: returncode is -15 @@ -71,8 +68,16 @@ def check_status(self) -> str: """ if self.process is None: self.status = "null" + # if process is already alive, don't interrupt potential open channels by checking status periodically. elif self.process.returncode is None: - self.status = "running" + if self.status == "alive": + self.status = "alive" + else: + if await self.is_alive(): + self.status = "alive" + else: + self.status = "running" + elif self.process.returncode == 0: self.status = "completed" elif self.process.returncode == 1: @@ -80,11 +85,19 @@ def check_status(self) -> str: elif self.process.returncode == -15: self.status = "stopped" else: - self.logger.warning( + self.logger.error( "Unexpected code was returned: '%s'. A non-zero return code indicates an error.", - self.process.returncode + self.process.returncode, ) - return str(self.process.returncode) + self.status = f"Exited with return code: {str(self.process.returncode)}" + + if self.status not in ["null", "running", "alive", "stopped"]: + stdout, stderr = await self.process.communicate() + if stdout: + self.logger.info(f"[stdout]\n{stdout.decode()}") + if stderr: + self.logger.error(f"[stderr]\n{stderr.decode()}") + return self.status async def stop(self): @@ -95,22 +108,39 @@ async def stop(self): self.logger.debug("Terminating process '%s'", self.id) self.process.terminate() await self.process.wait() + self.logger.debug("Process returencode '%s' ", self.process.returncode) + except ProcessLookupError as exc: self.logger.error("Process '%s' not found. It may have already exited.", self.id) raise ProcessLookupError from exc - def read_stdout(self): - if self.process is None: - self.logger.error("Cannot read stdout from a process '%s' that has not started yet.", self.id) - raise RuntimeError + async def read_stdout(self): + async with self.lock: + if self.process is None: + self.logger.error("Cannot read stdout from a process '%s' that has not started yet.", self.id) + raise RuntimeError - return self.process.stdout.readline() + return await self.process.stdout.readline() - def write_stdin(self, message): + async def write_stdin(self, message): if self.process is None: self.logger.error("Cannot write into stdin of a process '%s' that has not started yet.", self.id) raise RuntimeError self.process.stdin.write(message) + await self.process.stdin.drain() + + async def is_alive(self) -> bool: + timeout = 3 + message = b"Hi\n" + try: + # Attempt to write and read from the process with a timeout. + await self.write_stdin(message) + output = await asyncio.wait_for(self.read_stdout(), timeout=timeout) + self.logger.debug("Process output afer communication: %s", output.decode()) + return True + except asyncio.exceptions.TimeoutError: + self.logger.debug("Process did not accept input within the timeout period.") + return False class RunProcess(Process): @@ -124,9 +154,10 @@ def __init__(self, id_: int, build_id: int = None, preset_end_status: str = ""): async def update_db_info(self): # save current run info into runs_path + self.logger.info("Updating db run info") runs_conf = await read_conf(settings.runs_path) - run_params = self.get_full_info() + run_params = await self.get_full_info() _map_to_str(run_params) for run in runs_conf: @@ -166,7 +197,7 @@ async def update_db_info(self): # save current build info into builds_path builds_conf = await read_conf(settings.builds_path) - build_params = self.get_full_info() + build_params = await self.get_full_info() _map_to_str(build_params) for build in builds_conf: diff --git a/backend/df_designer/app/services/process_manager.py b/backend/df_designer/app/services/process_manager.py index a6c92268..b2a3fc22 100644 --- a/backend/df_designer/app/services/process_manager.py +++ b/backend/df_designer/app/services/process_manager.py @@ -1,12 +1,13 @@ from pathlib import Path -from typing import List, Type, Optional +from typing import List + from omegaconf import OmegaConf -from app.core.logger_config import get_logger -from app.services.process import BuildProcess, RunProcess -from app.schemas.preset import Preset from app.core.config import settings +from app.core.logger_config import get_logger from app.db.base import read_conf, read_logs +from app.schemas.preset import Preset +from app.services.process import BuildProcess, RunProcess logger = get_logger(__name__) @@ -14,12 +15,15 @@ class ProcessManager: def __init__(self): self.processes = {} + self.last_id: int def get_last_id(self): - """Get the process_id of the last started process""" - return list(self.processes.keys())[-1] + return self.last_id async def stop(self, id_): + if id_ not in self.processes: + logger.error("Process with id '%s' not found in recent running processes", id_) + raise ProcessLookupError try: await self.processes[id_].stop() except (RuntimeError, ProcessLookupError) as exc: @@ -28,18 +32,18 @@ async def stop(self, id_): async def check_status(self, id_): await self.processes[id_].periodically_check_status() - def get_status(self, id_): - return self.processes[id_].check_status() + async def get_status(self, id_): + return await self.processes[id_].check_status() async def get_process_info(self, id_: int, path: Path): db_conf = await read_conf(path) conf_dict = OmegaConf.to_container(db_conf, resolve=True) - return next((db_process for db_process in conf_dict if db_process["id"]==id_), None) + return next((db_process for db_process in conf_dict if db_process["id"] == id_), None) async def get_full_info(self, offset: int, limit: int, path: Path) -> List[dict]: db_conf = await read_conf(path) conf_dict = OmegaConf.to_container(db_conf, resolve=True) - return conf_dict[offset:offset+limit] + return conf_dict[offset : offset + limit] async def fetch_process_logs(self, id_: int, offset: int, limit: int, path: Path): process_info = await self.get_process_info(id_, path) @@ -50,26 +54,23 @@ async def fetch_process_logs(self, id_: int, offset: int, limit: int, path: Path log_file = process_info["log_path"] try: logs = await read_logs(log_file) + logs = [log for log in logs if log.strip()] except FileNotFoundError: logger.error("Log file '%s' not found", log_file) return None if offset > len(logs): logger.info("Offset '%s' is out of bounds ('%s' logs found)", offset, len(logs)) - return None + return None # TODO: raise error! logger.info("Returning %s logs", len(logs)) - return logs[offset:offset+limit] - + return logs[offset : offset + limit] class RunManager(ProcessManager): def __init__(self): super().__init__() - def get_last_id(self): - return self.last_id - async def start(self, build_id: int, preset: Preset): cmd_to_run = f"dflowd run_bot {build_id} --preset {preset.end_status}" self.last_id = max([run["id"] for run in await self.get_full_info(0, 10000)]) @@ -77,18 +78,9 @@ async def start(self, build_id: int, preset: Preset): id_ = self.last_id process = RunProcess(id_, build_id, preset.end_status) await process.start(cmd_to_run) + process.logger.debug("Started process. status: '%s'", process.process.returncode) self.processes[id_] = process - async def get_min_info(self) -> List[dict]: - runs_conf = await read_conf(settings.runs_path) - minimum_params = ["id", "build_id", "preset_end_status", "status", "timestamp"] - - minimum_info = [] - for run in runs_conf: - minimum_info.append({param: getattr(run, param) for param in minimum_params}) - - return minimum_info - async def get_run_info(self, id_: int): return await super().get_process_info(id_, settings.runs_path) @@ -102,11 +94,7 @@ class BuildManager(ProcessManager): def __init__(self): super().__init__() - def get_last_id(self): - return self.last_id - async def start(self, preset: Preset): - cmd_to_run = f"dflowd build_bot --preset {preset.end_status}" self.last_id = max([build["id"] for build in await self.get_full_info(0, 10000)]) self.last_id += 1 id_ = self.last_id @@ -115,21 +103,6 @@ async def start(self, preset: Preset): await process.start(cmd_to_run) self.processes[id_] = process - async def get_min_info(self) -> List[dict]: - builds_conf = await read_conf(settings.builds_path) - minimum_params = ["id", "preset_end_status", "status", "timestamp", "runs"] - - minimum_info = [] - for build in builds_conf: - info = {} - for param in minimum_params: - if param != "runs": - info.update({param: getattr(build, param)}) - else: - info.update({"run_ids": [run.id for run in build.runs]}) - minimum_info.append(info) - return minimum_info - async def get_build_info(self, id_: int): return await super().get_process_info(id_, settings.builds_path) diff --git a/backend/df_designer/app/services/websocket_manager.py b/backend/df_designer/app/services/websocket_manager.py index 75de6327..ce75a27e 100644 --- a/backend/df_designer/app/services/websocket_manager.py +++ b/backend/df_designer/app/services/websocket_manager.py @@ -1,24 +1,26 @@ import asyncio from asyncio.tasks import Task +from typing import Dict, Set + from fastapi import WebSocket, WebSocketDisconnect -from typing import Optional, Set, Dict from app.core.logger_config import get_logger from app.services.process_manager import ProcessManager logger = get_logger(__name__) + class WebSocketManager: def __init__(self): - self.pending_tasks : Dict[WebSocket, Set[Task]] = dict() + self.pending_tasks: Dict[WebSocket, Set[Task]] = dict() self.active_connections: list[WebSocket] = [] - async def connect(self, websocket: WebSocket): await websocket.accept() self.active_connections.append(websocket) def disconnect(self, websocket: WebSocket): + # TODO: await websocket.close() if websocket in self.pending_tasks: logger.info("Cancelling pending tasks") for task in self.pending_tasks[websocket]: @@ -28,9 +30,11 @@ def disconnect(self, websocket: WebSocket): def check_status(self, websocket: WebSocket): if websocket in self.active_connections: - return websocket ## return Status! + return websocket # return Status! - async def send_process_output_to_websocket(self, run_id: int, process_manager: ProcessManager, websocket: WebSocket): + async def send_process_output_to_websocket( + self, run_id: int, process_manager: ProcessManager, websocket: WebSocket + ): """Read and forward process output to the websocket client.""" try: while True: @@ -43,12 +47,16 @@ async def send_process_output_to_websocket(self, run_id: int, process_manager: P except RuntimeError as exc: raise exc - async def forward_websocket_messages_to_process(self, run_id: int, process_manager: ProcessManager, websocket: WebSocket): + async def forward_websocket_messages_to_process( + self, run_id: int, process_manager: ProcessManager, websocket: WebSocket + ): """Listen for messages from the websocket and send them to the subprocess.""" try: while True: user_message = await websocket.receive_text() - process_manager.processes[run_id].write_stdin(user_message.encode() + b'\n') + if not user_message: + break + await process_manager.processes[run_id].write_stdin(user_message.encode() + b"\n") except asyncio.CancelledError: logger.info("Websocket connection is closed") except WebSocketDisconnect: diff --git a/backend/df_designer/app/tests/__init__.py b/backend/df_designer/app/tests/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/backend/df_designer/app/tests/api/test_bot.py b/backend/df_designer/app/tests/api/test_bot.py index e69de29b..f47605de 100644 --- a/backend/df_designer/app/tests/api/test_bot.py +++ b/backend/df_designer/app/tests/api/test_bot.py @@ -0,0 +1,171 @@ +import pytest +from fastapi import BackgroundTasks, HTTPException, WebSocket + +from app.api.api_v1.endpoints.bot import ( + _check_process_status, + _stop_process, + check_build_processes, + check_run_processes, + connect, + get_build_logs, + get_run_logs, + start_build, + start_run, +) +from app.services.process import RunProcess +from app.services.process_manager import BuildManager, RunManager +from app.services.websocket_manager import WebSocketManager + +PROCESS_ID = 0 +RUN_ID = 42 +BUILD_ID = 43 + + +@pytest.mark.parametrize("process_type, process_manager", [("build", BuildManager), ("run", RunManager)]) +@pytest.mark.asyncio +async def test_stop_process_success(mocker, process_type, process_manager): + mock_stop = mocker.AsyncMock() + mocker.patch.object(process_manager, "stop", mock_stop) + + # Call the function under test + await _stop_process(PROCESS_ID, process_manager(), process_type) + + # Assert the stop method was called once with the correct id + mock_stop.assert_awaited_once_with(PROCESS_ID) + + +# TODO: take into consideration the errors when process type is build +@pytest.mark.parametrize("error_type", [RuntimeError, ProcessLookupError]) +@pytest.mark.asyncio +async def test_stop_process_error(mocker, error_type): + mock_stop = mocker.AsyncMock(side_effect=error_type) + mocker.patch.object(RunManager, "stop", mock_stop) + + process_type = "run" + + with pytest.raises(HTTPException) as exc_info: + await _stop_process(PROCESS_ID, RunManager(), process_type) + + # Assert the stop method was called once with the correct id + assert exc_info.value.status_code == 404 + mock_stop.assert_awaited_once_with(PROCESS_ID) + + +# TODO: check the errors +@pytest.mark.asyncio +async def test_check_process_status(mocker): + mocked_process_manager = mocker.MagicMock() + mocker.patch.object(mocked_process_manager, "processes", {PROCESS_ID: mocker.MagicMock()}) + mocker.patch.object(mocked_process_manager, "get_status", mocker.AsyncMock(return_value="alive")) + + response = await _check_process_status(PROCESS_ID, mocked_process_manager) + + assert response == {"status": "alive"} + mocked_process_manager.get_status.assert_awaited_once_with(0) + + +@pytest.mark.asyncio +async def test_start_build(mocker): + build_manager = mocker.MagicMock() + preset = mocker.MagicMock() + + start = mocker.AsyncMock() + mocker.patch.multiple( + build_manager, start=start, get_last_id=mocker.MagicMock(return_value=BUILD_ID), check_status=mocker.AsyncMock() + ) + mocker.patch.multiple(preset, wait_time=0, end_status="loop") + + response = await start_build(preset, background_tasks=BackgroundTasks(), build_manager=build_manager) + start.assert_awaited_once_with(preset) + assert response == {"status": "ok", "build_id": BUILD_ID} + + +@pytest.mark.asyncio +async def test_check_build_processes_some_info(mocker, pagination): + build_manager = mocker.MagicMock(spec=BuildManager()) + + await check_build_processes(BUILD_ID, build_manager, pagination) + + build_manager.get_build_info.assert_awaited_once_with(BUILD_ID) + + +@pytest.mark.asyncio +async def test_check_build_processes_all_info(mocker, pagination): + build_id = None + build_manager = mocker.MagicMock(spec=BuildManager()) + + await check_build_processes(build_id, build_manager, pagination) + + build_manager.get_full_info.assert_awaited_once_with(offset=pagination.offset(), limit=pagination.limit) + + +@pytest.mark.asyncio +async def test_get_build_logs(mocker, pagination): + build_manager = mocker.MagicMock(spec=BuildManager()) + + await get_build_logs(BUILD_ID, build_manager, pagination) + + build_manager.fetch_build_logs.assert_awaited_once_with(BUILD_ID, pagination.offset(), pagination.limit) + + +@pytest.mark.asyncio +async def test_start_run(mocker): + run_manager = mocker.MagicMock() + preset = mocker.MagicMock() + + start = mocker.AsyncMock() + mocker.patch.multiple( + run_manager, start=start, get_last_id=mocker.MagicMock(return_value=RUN_ID), check_status=mocker.AsyncMock() + ) + mocker.patch.multiple(preset, wait_time=0, end_status="loop") + + response = await start_run( + build_id=BUILD_ID, preset=preset, background_tasks=BackgroundTasks(), run_manager=run_manager + ) + start.assert_awaited_once_with(BUILD_ID, preset) + assert response == {"status": "ok", "run_id": RUN_ID} + + +@pytest.mark.asyncio +async def test_check_run_processes_some_info(mocker, pagination): + run_manager = mocker.MagicMock(spec=RunManager()) + + await check_run_processes(RUN_ID, run_manager, pagination) + + run_manager.get_run_info.assert_awaited_once_with(RUN_ID) + + +@pytest.mark.asyncio +async def test_check_run_processes_all_info(mocker, pagination): + run_id = None + run_manager = mocker.MagicMock(spec=RunManager()) + + await check_run_processes(run_id, run_manager, pagination) + + run_manager.get_full_info.assert_awaited_once_with(offset=pagination.offset(), limit=pagination.limit) + + +@pytest.mark.asyncio +async def test_get_run_logs(mocker, pagination): + run_manager = mocker.MagicMock(spec=RunManager()) + + await get_run_logs(RUN_ID, run_manager, pagination) + + run_manager.fetch_run_logs.assert_awaited_once_with(RUN_ID, pagination.offset(), pagination.limit) + + +@pytest.mark.asyncio +async def test_connect(mocker): + websocket = mocker.MagicMock(spec=WebSocket) + websocket_manager = mocker.MagicMock(spec=WebSocketManager()) + run_manager = mocker.MagicMock(spec=RunManager()) + run_process = mocker.MagicMock(spec=RunProcess(RUN_ID)) + run_manager.processes = {RUN_ID: run_process} + mocker.patch.object(websocket, "query_params", {"run_id": str(RUN_ID)}) + + await connect(websocket, websocket_manager, run_manager) + + websocket_manager.connect.assert_awaited_once_with(websocket) + websocket_manager.send_process_output_to_websocket.assert_awaited_once_with(RUN_ID, run_manager, websocket) + websocket_manager.forward_websocket_messages_to_process.assert_awaited_once_with(RUN_ID, run_manager, websocket) + websocket_manager.disconnect.assert_called_once_with(websocket) diff --git a/backend/df_designer/app/tests/api/test_flows.py b/backend/df_designer/app/tests/api/test_flows.py new file mode 100644 index 00000000..24350633 --- /dev/null +++ b/backend/df_designer/app/tests/api/test_flows.py @@ -0,0 +1,19 @@ +# create test flows function here +import pytest +from omegaconf import OmegaConf + +from app.api.api_v1.endpoints.flows import flows_get, flows_post + + +@pytest.mark.asyncio +async def test_flows_get(mocker): + mocker.patch("app.api.api_v1.endpoints.flows.read_conf", return_value=OmegaConf.create({"foo": "bar"})) + response = await flows_get() + assert response["status"] == "ok" + + +@pytest.mark.asyncio +async def test_flows_post(mocker): + mocker.patch("app.api.api_v1.endpoints.flows.write_conf", return_value={}) + response = await flows_post({"foo": "bar"}) + assert response["status"] == "ok" diff --git a/backend/df_designer/app/tests/conftest.py b/backend/df_designer/app/tests/conftest.py new file mode 100644 index 00000000..22adaaeb --- /dev/null +++ b/backend/df_designer/app/tests/conftest.py @@ -0,0 +1,86 @@ +from contextlib import asynccontextmanager +from typing import Generator + +import httpx +import pytest +from fastapi.testclient import TestClient +from httpx import AsyncClient +from httpx_ws.transport import ASGIWebSocketTransport + +from app.main import app +from app.schemas.pagination import Pagination +from app.schemas.preset import Preset +from app.services.process import RunProcess +from app.services.process_manager import BuildManager, RunManager +from app.services.websocket_manager import WebSocketManager + + +async def start_process(async_client: AsyncClient, endpoint, preset_end_status) -> httpx.Response: + return await async_client.post( + endpoint, + json={"wait_time": 0.1, "end_status": preset_end_status}, + ) + + +@asynccontextmanager +async def override_dependency(mocker_obj, get_manager_func): + process_manager = get_manager_func() + process_manager.check_status = mocker_obj.AsyncMock() + app.dependency_overrides[get_manager_func] = lambda: process_manager + try: + yield process_manager + finally: + for _, process in process_manager.processes.items(): + if process.process.returncode is None: + await process.stop() + app.dependency_overrides = {} + + +@pytest.fixture +def client() -> Generator: + with TestClient(app=app) as client: + yield client + + +@pytest.fixture(scope="session") +def preset() -> Preset: + return Preset( + wait_time=0, + end_status="loop", + ) + + +@pytest.fixture +def pagination() -> Pagination: + return Pagination() + + +@pytest.fixture() +def run_process(): + async def _run_process(cmd_to_run): + process = RunProcess(id_=0) + await process.start(cmd_to_run) + return process + + return _run_process + + +@pytest.fixture() +def run_manager(): + return RunManager() + + +@pytest.fixture() +def build_manager(): + return BuildManager() + + +@pytest.fixture +def websocket_manager(): + return WebSocketManager() + + +@pytest.fixture +async def websocket_client() -> AsyncClient: + async with AsyncClient(transport=ASGIWebSocketTransport(app), base_url="http://test") as client: + yield client diff --git a/backend/df_designer/app/tests/e2e/__init__.py b/backend/df_designer/app/tests/e2e/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/backend/df_designer/app/tests/e2e/test.py b/backend/df_designer/app/tests/e2e/test.py new file mode 100644 index 00000000..5633f377 --- /dev/null +++ b/backend/df_designer/app/tests/e2e/test.py @@ -0,0 +1,36 @@ +import pytest + +from app.api.deps import get_build_manager, get_run_manager +from app.core.logger_config import get_logger +from app.tests.conftest import override_dependency, start_process + +BUILD_ID = 43 + +logger = get_logger(__name__) + + +async def _assert_process_status(response, process_manager): + assert response.json().get("status") == "ok", "Start process response status is not 'ok'" + process_manager.check_status.assert_awaited_once() + + +@pytest.mark.asyncio +async def test_all(mocker, client): + async with override_dependency(mocker, get_build_manager) as process_manager: + response = start_process(client, endpoint="/api/v1/bot/build/start", preset_end_status="success") + await _assert_process_status(response, process_manager) + + async with override_dependency(mocker, get_run_manager) as process_manager: + response = start_process(client, endpoint=f"/api/v1/bot/run/start/{BUILD_ID}", preset_end_status="success") + await _assert_process_status(response, process_manager) + + # connect to websocket + with client.websocket_connect(f"/api/v1/bot/run/connect?run_id={process_manager.get_last_id()}") as websocket: + data = websocket.receive_text() + assert data == "Start chatting" + + # Check sending and receiving messages + websocket.send_text("Hi") + data = websocket.receive_text() + assert data + logger.debug("Received data: %s", data) diff --git a/backend/df_designer/app/tests/integration/__init__.py b/backend/df_designer/app/tests/integration/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/backend/df_designer/app/tests/integration/test_api_integration.py b/backend/df_designer/app/tests/integration/test_api_integration.py new file mode 100644 index 00000000..d71d4c19 --- /dev/null +++ b/backend/df_designer/app/tests/integration/test_api_integration.py @@ -0,0 +1,163 @@ +import asyncio + +import pytest +from httpx import ASGITransport, AsyncClient + +from app.api.deps import get_build_manager, get_run_manager +from app.core.logger_config import get_logger +from app.main import app +from app.tests.conftest import override_dependency, start_process + +logger = get_logger(__name__) + + +async def _assert_process_status(response, process_manager, expected_end_status): + assert response.json().get("status") == "ok", "Start process response status is not 'ok'" + process_manager.check_status.assert_awaited_once() + + try: + await asyncio.wait_for( + process_manager.processes[process_manager.last_id].process.wait(), timeout=4 + ) # TODO: Consider making this timeout configurable + except asyncio.exceptions.TimeoutError as exc: + if expected_end_status in ["alive", "running"]: + logger.debug("Loop process timed out. Expected behavior.") + else: + logger.debug("Process with expected end status '%s' timed out with status 'running'.", expected_end_status) + raise exc + + process_id = process_manager.last_id + logger.debug("Process id is %s", process_id) + current_status = await process_manager.get_status(process_id) + assert ( + current_status == expected_end_status + ), f"Current process status '{current_status}' did not match the expected '{expected_end_status}'" + + return current_status + + +async def _test_start_process(mocker_obj, get_manager_func, endpoint, preset_end_status, expected_end_status): + async with AsyncClient(transport=ASGITransport(app=app), base_url="http://test") as async_client: + async with override_dependency(mocker_obj, get_manager_func) as process_manager: + response = await start_process(async_client, endpoint, preset_end_status) + current_status = await _assert_process_status(response, process_manager, expected_end_status) + + if current_status == "running": + process_manager.processes[process_manager.last_id].process.terminate() + await process_manager.processes[process_manager.last_id].process.wait() + + +async def _test_stop_process(mocker, get_manager_func, start_endpoint, stop_endpoint): + async with AsyncClient(transport=ASGITransport(app=app), base_url="http://test") as async_client: + async with override_dependency(mocker, get_manager_func) as manager: + start_response = await start_process(async_client, start_endpoint, preset_end_status="loop") + assert start_response.status_code == 201 + logger.debug("Processes: %s", manager.processes) + + last_id = manager.get_last_id() + logger.debug("Last id: %s, type: %s", last_id, type(last_id)) + logger.debug("Process status %s", await manager.get_status(last_id)) + + stop_response = await async_client.get(f"{stop_endpoint}/{last_id}") + assert stop_response.status_code == 200 + assert stop_response.json() == {"status": "ok"} + + +# Test flows endpoints and interaction with db (read and write conf) +def test_flows(client): # noqa: F811 + get_response = client.get("/api/v1/flows") + assert get_response.status_code == 200 + data = get_response.json()["data"] + assert "flows" in data + + response = client.post("/api/v1/flows", json=data) + assert response.status_code == 200 + + +# def test_get_build_status(client): +# pass + + +@pytest.mark.asyncio +@pytest.mark.parametrize( + "end_status, process_status", [("failure", "failed"), ("loop", "running"), ("success", "completed")] +) +async def test_start_build(mocker, end_status, process_status): + await _test_start_process( + mocker, + get_build_manager, + endpoint="/api/v1/bot/build/start", + preset_end_status=end_status, + expected_end_status=process_status, + ) + + +@pytest.mark.asyncio +async def test_stop_build(mocker): + await _test_stop_process( + mocker, get_build_manager, start_endpoint="/api/v1/bot/build/start", stop_endpoint="/api/v1/bot/build/stop" + ) + + +# def test_get_run_status(client): +# pass + + +# Test processes of various end_status + Test integration with get_status. No db interaction (mocked processes) +@pytest.mark.asyncio +@pytest.mark.parametrize( + "end_status, process_status", [("failure", "failed"), ("loop", "running"), ("success", "alive")] +) +async def test_start_run(mocker, end_status, process_status): + build_id = 43 + await _test_start_process( + mocker, + get_run_manager, + endpoint=f"/api/v1/bot/run/start/{build_id}", + preset_end_status=end_status, + expected_end_status=process_status, + ) + + +@pytest.mark.asyncio +async def test_stop_run(mocker): + build_id = 43 + await _test_stop_process( + mocker, + get_run_manager, + start_endpoint=f"/api/v1/bot/run/start/{build_id}", + stop_endpoint="/api/v1/bot/run/stop", + ) + + +@pytest.mark.asyncio +async def test_connect_to_ws(mocker, client): # noqa: F811 + build_id = 43 + + # Start a process + run_manager = get_run_manager() + run_manager.check_status = mocker.AsyncMock() + app.dependency_overrides[get_run_manager] = lambda: run_manager + + start_response = client.post( + f"/api/v1/bot/run/start/{build_id}", + json={"wait_time": 0.1, "end_status": "success"}, + ) + + assert start_response.status_code == 201 + logger.debug("Processes: %s", run_manager.processes) + + # Process status + last_id = run_manager.get_last_id() + logger.debug("Last id: %s, type: %s", last_id, type(last_id)) + + # connect to websocket + with client.websocket_connect(f"/api/v1/bot/run/connect?run_id={last_id}") as websocket: + data = websocket.receive_text() + assert data == "Start chatting" + + # Check sending and receiving messages + websocket.send_text("Hi") + data = websocket.receive_text() + assert data + logger.debug("Received data: %s", data) diff --git a/backend/df_designer/app/tests/services/__init__.py b/backend/df_designer/app/tests/services/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/backend/df_designer/app/tests/services/test_process.py b/backend/df_designer/app/tests/services/test_process.py new file mode 100644 index 00000000..fd928e2b --- /dev/null +++ b/backend/df_designer/app/tests/services/test_process.py @@ -0,0 +1,54 @@ +import asyncio + +import pytest + +from app.core.logger_config import get_logger + +logger = get_logger(__name__) + + +class TestRunProcess: + # def test_update_db_info(self, run_process): + # process = await run_process("echo 'Hello df_designer'") + # process.update_db_info() + + @pytest.mark.asyncio + @pytest.mark.parametrize( + "cmd_to_run, status", + [ + ("sleep 10000", "running"), + ("cat /non_existing_file", "failed"), + ("echo Hello df_designer", "completed"), + ], + ) + async def test_check_status(self, run_process, cmd_to_run, status): + process = await run_process(cmd_to_run) + await asyncio.sleep(2) + assert await process.check_status() == status + + # def test_periodically_check_status(self, run_process): + # process = await run_process("sleep 10000") + # run_process.periodically_check_status() + + @pytest.mark.asyncio + async def test_stop(self, run_process): + process = await run_process("sleep 10000") + await process.stop() + assert process.process.returncode == -15 + + @pytest.mark.asyncio + async def test_read_stdout(self, run_process): + process = await run_process("echo Hello df_designer") + output = await process.read_stdout() + assert output.strip().decode() == "Hello df_designer" + + @pytest.mark.asyncio + async def test_write_stdout(self, run_process): + process = await run_process("cat") + await process.write_stdin(b"DF_Designer team welcome you.\n") + output = await process.process.stdout.readline() + assert output.decode().strip() == "DF_Designer team welcome you." + + +# class TestBuildProcess: +# pass diff --git a/backend/df_designer/app/tests/services/test_process_manager.py b/backend/df_designer/app/tests/services/test_process_manager.py new file mode 100644 index 00000000..9c3884e4 --- /dev/null +++ b/backend/df_designer/app/tests/services/test_process_manager.py @@ -0,0 +1,95 @@ +import pytest +from omegaconf import OmegaConf + +from app.core.logger_config import get_logger + +logger = get_logger(__name__) + +RUN_ID = 42 +BUILD_ID = 43 + + +class TestRunManager: + @pytest.mark.asyncio + async def test_start(self, mocker, preset, run_manager): # noqa: F811 + # Mock the RunProcess constructor whereever it's called in + # the process_manager file within the scope of this test function + run_process = mocker.patch("app.services.process_manager.RunProcess") + run_process_instance = run_process.return_value + run_process_instance.start = mocker.AsyncMock() + + await run_manager.start(build_id=BUILD_ID, preset=preset) + + run_process.assert_called_once_with(run_manager.last_id, BUILD_ID, preset.end_status) + run_process_instance.start.assert_awaited_once_with(f"dflowd run_bot {BUILD_ID} --preset {preset.end_status}") + + assert run_manager.processes[run_manager.last_id] is run_process_instance + + @pytest.mark.asyncio + async def test_stop_success(self, mocker, run_manager): + run_manager.processes[RUN_ID] = mocker.MagicMock() + run_manager.processes[RUN_ID].stop = mocker.AsyncMock() + + await run_manager.stop(RUN_ID) + run_manager.processes[RUN_ID].stop.assert_awaited_once_with() + + @pytest.mark.asyncio + async def test_stop_with_error(self, run_manager): + with pytest.raises((RuntimeError, ProcessLookupError)): + await run_manager.stop(RUN_ID) + + # def test_check_status(self, run_manager, preset): + # pass + + @pytest.mark.asyncio + async def test_get_process_info(self, mocker, run_manager): + df_conf = OmegaConf.create( + f""" + - id: {RUN_ID} + status: stopped + """ + ) + df_conf_dict = { + "id": RUN_ID, + "status": "stopped", + } + + read_conf = mocker.patch("app.services.process_manager.read_conf") + read_conf.return_value = df_conf + + run_info = await run_manager.get_run_info(RUN_ID) + assert run_info == df_conf_dict + + @pytest.mark.asyncio + async def test_get_full_info(self, mocker, run_manager): + df_conf = OmegaConf.create( + f""" + - id: {RUN_ID} + status: stopped + - id: {RUN_ID + 1} + status: stopped + """ + ) + df_conf_dict = { + "id": RUN_ID, + "status": "stopped", + } + + read_conf = mocker.patch("app.services.process_manager.read_conf") + read_conf.return_value = df_conf + + run_info = await run_manager.get_full_info(0, 1) + assert run_info == [df_conf_dict] + + @pytest.mark.asyncio + async def test_fetch_run_logs(self, mocker, run_manager): + LOG_PATH = "df_designer/logs/runs/20240425/42_211545.log" + run_manager.get_process_info = mocker.AsyncMock(return_value={"id": RUN_ID, "log_path": LOG_PATH}) + + read_logs = mocker.patch("app.services.process_manager.read_logs", return_value=["log1", "log2"]) + + logs = await run_manager.fetch_run_logs(RUN_ID, 0, 1) + + run_manager.get_process_info.assert_awaited_once() + read_logs.assert_awaited_once_with(LOG_PATH) + assert logs == ["log1"] diff --git a/backend/df_designer/app/tests/services/test_websocket_manager.py b/backend/df_designer/app/tests/services/test_websocket_manager.py new file mode 100644 index 00000000..854b232c --- /dev/null +++ b/backend/df_designer/app/tests/services/test_websocket_manager.py @@ -0,0 +1,60 @@ +import pytest +from fastapi import WebSocket + +from app.services.process import RunProcess +from app.services.process_manager import RunManager + + +class TestWebSocketManager: + @pytest.mark.asyncio + async def test_connect(self, mocker, websocket_manager): + mocked_websocket = mocker.MagicMock(spec=WebSocket) + + await websocket_manager.connect(mocked_websocket) + + mocked_websocket.accept.assert_awaited_once_with() + assert mocked_websocket in websocket_manager.active_connections + + @pytest.mark.asyncio + async def test_disconnect(self, mocker, websocket_manager): + mocked_websocket = mocker.MagicMock(spec=WebSocket) + websocket_manager.active_connections.append(mocked_websocket) + websocket_manager.pending_tasks[mocked_websocket] = set() + + websocket_manager.disconnect(mocked_websocket) + + assert mocked_websocket not in websocket_manager.pending_tasks + assert mocked_websocket not in websocket_manager.active_connections + + @pytest.mark.asyncio + async def test_send_process_output_to_websocket(self, mocker, websocket_manager): + run_id = 42 + awaited_response = "Hello from DF-Designer" + + websocket = mocker.MagicMock(spec=WebSocket) + run_manager = mocker.MagicMock(spec=RunManager()) + run_process = mocker.MagicMock(spec=RunProcess(run_id)) + run_process.read_stdout = mocker.AsyncMock(side_effect=[awaited_response.encode(), None]) + run_manager.processes = {run_id: run_process} + + await websocket_manager.send_process_output_to_websocket(run_id, run_manager, websocket) + + assert run_process.read_stdout.call_count == 2 + websocket.send_text.assert_awaited_once_with(awaited_response) + + @pytest.mark.asyncio + async def test_forward_websocket_messages_to_process(self, mocker, websocket_manager): + run_id = 42 + awaited_message = "Hello from DF-Designer" + + websocket = mocker.MagicMock(spec=WebSocket) + websocket.receive_text = mocker.AsyncMock(side_effect=[awaited_message, None]) + run_manager = mocker.MagicMock(spec=RunManager()) + run_process = mocker.MagicMock(spec=RunProcess(run_id)) + run_process.write_stdin = mocker.AsyncMock() + run_manager.processes = {run_id: run_process} + + await websocket_manager.forward_websocket_messages_to_process(run_id, run_manager, websocket) + + assert websocket.receive_text.await_count == 2 + run_process.write_stdin.assert_called_once_with(awaited_message.encode() + b"\n") diff --git a/backend/df_designer/poetry.lock b/backend/df_designer/poetry.lock index a4ecc2d0..46afb588 100644 --- a/backend/df_designer/poetry.lock +++ b/backend/df_designer/poetry.lock @@ -87,6 +87,40 @@ files = [ [package.dependencies] chardet = ">=3.0.2" +[[package]] +name = "black" +version = "22.12.0" +description = "The uncompromising code formatter." +optional = false +python-versions = ">=3.7" +files = [ + {file = "black-22.12.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9eedd20838bd5d75b80c9f5487dbcb06836a43833a37846cf1d8c1cc01cef59d"}, + {file = "black-22.12.0-cp310-cp310-win_amd64.whl", hash = "sha256:159a46a4947f73387b4d83e87ea006dbb2337eab6c879620a3ba52699b1f4351"}, + {file = "black-22.12.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d30b212bffeb1e252b31dd269dfae69dd17e06d92b87ad26e23890f3efea366f"}, + {file = "black-22.12.0-cp311-cp311-win_amd64.whl", hash = "sha256:7412e75863aa5c5411886804678b7d083c7c28421210180d67dfd8cf1221e1f4"}, + {file = "black-22.12.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c116eed0efb9ff870ded8b62fe9f28dd61ef6e9ddd28d83d7d264a38417dcee2"}, + {file = "black-22.12.0-cp37-cp37m-win_amd64.whl", hash = "sha256:1f58cbe16dfe8c12b7434e50ff889fa479072096d79f0a7f25e4ab8e94cd8350"}, + {file = "black-22.12.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:77d86c9f3db9b1bf6761244bc0b3572a546f5fe37917a044e02f3166d5aafa7d"}, + {file = "black-22.12.0-cp38-cp38-win_amd64.whl", hash = "sha256:82d9fe8fee3401e02e79767016b4907820a7dc28d70d137eb397b92ef3cc5bfc"}, + {file = "black-22.12.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:101c69b23df9b44247bd88e1d7e90154336ac4992502d4197bdac35dd7ee3320"}, + {file = "black-22.12.0-cp39-cp39-win_amd64.whl", hash = "sha256:559c7a1ba9a006226f09e4916060982fd27334ae1998e7a38b3f33a37f7a2148"}, + {file = "black-22.12.0-py3-none-any.whl", hash = "sha256:436cc9167dd28040ad90d3b404aec22cedf24a6e4d7de221bec2730ec0c97bcf"}, + {file = "black-22.12.0.tar.gz", hash = "sha256:229351e5a18ca30f447bf724d007f890f97e13af070bb6ad4c0a441cd7596a2f"}, +] + +[package.dependencies] +click = ">=8.0.0" +mypy-extensions = ">=0.4.3" +pathspec = ">=0.9.0" +platformdirs = ">=2" +tomli = {version = ">=1.1.0", markers = "python_full_version < \"3.11.0a7\""} + +[package.extras] +colorama = ["colorama (>=0.4.3)"] +d = ["aiohttp (>=3.7.4)"] +jupyter = ["ipython (>=7.8.0)", "tokenize-rt (>=3.2.0)"] +uvloop = ["uvloop (>=0.15.2)"] + [[package]] name = "certifi" version = "2024.2.2" @@ -305,23 +339,39 @@ test = ["pytest (>=6)"] [[package]] name = "fastapi" -version = "0.110.0" +version = "0.110.1" description = "FastAPI framework, high performance, easy to learn, fast to code, ready for production" optional = false python-versions = ">=3.8" files = [ - {file = "fastapi-0.110.0-py3-none-any.whl", hash = "sha256:87a1f6fb632a218222c5984be540055346a8f5d8a68e8f6fb647b1dc9934de4b"}, - {file = "fastapi-0.110.0.tar.gz", hash = "sha256:266775f0dcc95af9d3ef39bad55cff525329a931d5fd51930aadd4f428bf7ff3"}, + {file = "fastapi-0.110.1-py3-none-any.whl", hash = "sha256:5df913203c482f820d31f48e635e022f8cbfe7350e4830ef05a3163925b1addc"}, + {file = "fastapi-0.110.1.tar.gz", hash = "sha256:6feac43ec359dfe4f45b2c18ec8c94edb8dc2dfc461d417d9e626590c071baad"}, ] [package.dependencies] pydantic = ">=1.7.4,<1.8 || >1.8,<1.8.1 || >1.8.1,<2.0.0 || >2.0.0,<2.0.1 || >2.0.1,<2.1.0 || >2.1.0,<3.0.0" -starlette = ">=0.36.3,<0.37.0" +starlette = ">=0.37.2,<0.38.0" typing-extensions = ">=4.8.0" [package.extras] all = ["email-validator (>=2.0.0)", "httpx (>=0.23.0)", "itsdangerous (>=1.1.0)", "jinja2 (>=2.11.2)", "orjson (>=3.2.1)", "pydantic-extra-types (>=2.0.0)", "pydantic-settings (>=2.0.0)", "python-multipart (>=0.0.7)", "pyyaml (>=5.3.1)", "ujson (>=4.0.1,!=4.0.2,!=4.1.0,!=4.2.0,!=4.3.0,!=5.0.0,!=5.1.0)", "uvicorn[standard] (>=0.12.0)"] +[[package]] +name = "flake8" +version = "4.0.1" +description = "the modular source code checker: pep8 pyflakes and co" +optional = false +python-versions = ">=3.6" +files = [ + {file = "flake8-4.0.1-py2.py3-none-any.whl", hash = "sha256:479b1304f72536a55948cb40a32dce8bb0ffe3501e26eaf292c7e60eb5e0428d"}, + {file = "flake8-4.0.1.tar.gz", hash = "sha256:806e034dda44114815e23c16ef92f95c91e4c71100ff52813adf7132a6ad870d"}, +] + +[package.dependencies] +mccabe = ">=0.6.0,<0.7.0" +pycodestyle = ">=2.8.0,<2.9.0" +pyflakes = ">=2.4.0,<2.5.0" + [[package]] name = "h11" version = "0.14.0" @@ -333,6 +383,27 @@ files = [ {file = "h11-0.14.0.tar.gz", hash = "sha256:8f19fbbe99e72420ff35c00b27a34cb9937e902a8b810e2c88300c6f0a3b699d"}, ] +[[package]] +name = "httpcore" +version = "1.0.5" +description = "A minimal low-level HTTP client." +optional = false +python-versions = ">=3.8" +files = [ + {file = "httpcore-1.0.5-py3-none-any.whl", hash = "sha256:421f18bac248b25d310f3cacd198d55b8e6125c107797b609ff9b7a6ba7991b5"}, + {file = "httpcore-1.0.5.tar.gz", hash = "sha256:34a38e2f9291467ee3b44e89dd52615370e152954ba21721378a87b2960f7a61"}, +] + +[package.dependencies] +certifi = "*" +h11 = ">=0.13,<0.15" + +[package.extras] +asyncio = ["anyio (>=4.0,<5.0)"] +http2 = ["h2 (>=3,<5)"] +socks = ["socksio (==1.*)"] +trio = ["trio (>=0.22.0,<0.26.0)"] + [[package]] name = "httptools" version = "0.6.1" @@ -381,17 +452,83 @@ files = [ [package.extras] test = ["Cython (>=0.29.24,<0.30.0)"] +[[package]] +name = "httpx" +version = "0.27.0" +description = "The next generation HTTP client." +optional = false +python-versions = ">=3.8" +files = [ + {file = "httpx-0.27.0-py3-none-any.whl", hash = "sha256:71d5465162c13681bff01ad59b2cc68dd838ea1f10e51574bac27103f00c91a5"}, + {file = "httpx-0.27.0.tar.gz", hash = "sha256:a0cb88a46f32dc874e04ee956e4c2764aba2aa228f650b06788ba6bda2962ab5"}, +] + +[package.dependencies] +anyio = "*" +certifi = "*" +httpcore = "==1.*" +idna = "*" +sniffio = "*" + +[package.extras] +brotli = ["brotli", "brotlicffi"] +cli = ["click (==8.*)", "pygments (==2.*)", "rich (>=10,<14)"] +http2 = ["h2 (>=3,<5)"] +socks = ["socksio (==1.*)"] + +[[package]] +name = "httpx-ws" +version = "0.6.0" +description = "WebSockets support for HTTPX" +optional = false +python-versions = ">=3.8" +files = [ + {file = "httpx_ws-0.6.0-py3-none-any.whl", hash = "sha256:437cfca94519a4e6ae06eb5573192df6c0da85c22b1a19cc1ea0b02b05a51d25"}, + {file = "httpx_ws-0.6.0.tar.gz", hash = "sha256:60218f531fb474a2143af38568f4b7d94ba356780973443365c8e2c87882bb8c"}, +] + +[package.dependencies] +anyio = ">=4" +httpcore = ">=1.0.4" +httpx = ">=0.23.1" +wsproto = "*" + [[package]] name = "idna" -version = "3.6" +version = "3.7" description = "Internationalized Domain Names in Applications (IDNA)" optional = false python-versions = ">=3.5" files = [ - {file = "idna-3.6-py3-none-any.whl", hash = "sha256:c05567e9c24a6b9faaa835c4821bad0590fbb9d5779e7caa6e1cc4978e7eb24f"}, - {file = "idna-3.6.tar.gz", hash = "sha256:9ecdbbd083b06798ae1e86adcbfe8ab1479cf864e4ee30fe4e46a003d12491ca"}, + {file = "idna-3.7-py3-none-any.whl", hash = "sha256:82fee1fc78add43492d3a1898bfa6d8a904cc97d8427f683ed8e798d07761aa0"}, + {file = "idna-3.7.tar.gz", hash = "sha256:028ff3aadf0609c1fd278d8ea3089299412a7a8b9bd005dd08b9f8285bcb5cfc"}, +] + +[[package]] +name = "iniconfig" +version = "2.0.0" +description = "brain-dead simple config-ini parsing" +optional = false +python-versions = ">=3.7" +files = [ + {file = "iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374"}, + {file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"}, ] +[[package]] +name = "isort" +version = "5.13.2" +description = "A Python utility / library to sort Python imports." +optional = false +python-versions = ">=3.8.0" +files = [ + {file = "isort-5.13.2-py3-none-any.whl", hash = "sha256:8ca5e72a8d85860d5a3fa69b8745237f2939afe12dbf656afbcb47fe72d947a6"}, + {file = "isort-5.13.2.tar.gz", hash = "sha256:48fdfcb9face5d58a4f6dde2e72a1fb8dcaf8ab26f95ab49fab84c2ddefb0109"}, +] + +[package.extras] +colors = ["colorama (>=0.4.6)"] + [[package]] name = "jinja2" version = "3.1.3" @@ -502,6 +639,17 @@ files = [ {file = "MarkupSafe-2.1.5.tar.gz", hash = "sha256:d283d37a890ba4c1ae73ffadf8046435c76e7bc2247bbb63c00bd1a709c6544b"}, ] +[[package]] +name = "mccabe" +version = "0.6.1" +description = "McCabe checker, plugin for flake8" +optional = false +python-versions = "*" +files = [ + {file = "mccabe-0.6.1-py2.py3-none-any.whl", hash = "sha256:ab8a6258860da4b6677da4bd2fe5dc2c659cff31b3ee4f7f5d64e79735b80d42"}, + {file = "mccabe-0.6.1.tar.gz", hash = "sha256:dd8d182285a0fe56bace7f45b5e7d1a6ebcbf524e8f3bd87eb0f125271b8831f"}, +] + [[package]] name = "mdurl" version = "0.1.2" @@ -513,6 +661,17 @@ files = [ {file = "mdurl-0.1.2.tar.gz", hash = "sha256:bb413d29f5eea38f31dd4754dd7377d4465116fb207585f97bf925588687c1ba"}, ] +[[package]] +name = "mypy-extensions" +version = "1.0.0" +description = "Type system extensions for programs checked with the mypy type checker." +optional = false +python-versions = ">=3.5" +files = [ + {file = "mypy_extensions-1.0.0-py3-none-any.whl", hash = "sha256:4392f6c0eb8a5668a69e23d168ffa70f0be9ccfd32b5cc2d26a34ae5b844552d"}, + {file = "mypy_extensions-1.0.0.tar.gz", hash = "sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782"}, +] + [[package]] name = "nest-asyncio" version = "1.6.0" @@ -539,20 +698,83 @@ files = [ antlr4-python3-runtime = "==4.9.*" PyYAML = ">=5.1.0" +[[package]] +name = "packaging" +version = "24.0" +description = "Core utilities for Python packages" +optional = false +python-versions = ">=3.7" +files = [ + {file = "packaging-24.0-py3-none-any.whl", hash = "sha256:2ddfb553fdf02fb784c234c7ba6ccc288296ceabec964ad2eae3777778130bc5"}, + {file = "packaging-24.0.tar.gz", hash = "sha256:eb82c5e3e56209074766e6885bb04b8c38a0c015d0a30036ebe7ece34c9989e9"}, +] + +[[package]] +name = "pathspec" +version = "0.12.1" +description = "Utility library for gitignore style pattern matching of file paths." +optional = false +python-versions = ">=3.8" +files = [ + {file = "pathspec-0.12.1-py3-none-any.whl", hash = "sha256:a0d503e138a4c123b27490a4f7beda6a01c6f288df0e4a8b79c7eb0dc7b4cc08"}, + {file = "pathspec-0.12.1.tar.gz", hash = "sha256:a482d51503a1ab33b1c67a6c3813a26953dbdc71c31dacaef9a838c4e29f5712"}, +] + +[[package]] +name = "platformdirs" +version = "4.2.0" +description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." +optional = false +python-versions = ">=3.8" +files = [ + {file = "platformdirs-4.2.0-py3-none-any.whl", hash = "sha256:0614df2a2f37e1a662acbd8e2b25b92ccf8632929bc6d43467e17fe89c75e068"}, + {file = "platformdirs-4.2.0.tar.gz", hash = "sha256:ef0cc731df711022c174543cb70a9b5bd22e5a9337c8624ef2c2ceb8ddad8768"}, +] + +[package.extras] +docs = ["furo (>=2023.9.10)", "proselint (>=0.13)", "sphinx (>=7.2.6)", "sphinx-autodoc-typehints (>=1.25.2)"] +test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.4.3)", "pytest-cov (>=4.1)", "pytest-mock (>=3.12)"] + +[[package]] +name = "pluggy" +version = "1.4.0" +description = "plugin and hook calling mechanisms for python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pluggy-1.4.0-py3-none-any.whl", hash = "sha256:7db9f7b503d67d1c5b95f59773ebb58a8c1c288129a88665838012cfb07b8981"}, + {file = "pluggy-1.4.0.tar.gz", hash = "sha256:8c85c2876142a764e5b7548e7d9a0e0ddb46f5185161049a79b7e974454223be"}, +] + +[package.extras] +dev = ["pre-commit", "tox"] +testing = ["pytest", "pytest-benchmark"] + +[[package]] +name = "pycodestyle" +version = "2.8.0" +description = "Python style guide checker" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +files = [ + {file = "pycodestyle-2.8.0-py2.py3-none-any.whl", hash = "sha256:720f8b39dde8b293825e7ff02c475f3077124006db4f440dcbc9a20b76548a20"}, + {file = "pycodestyle-2.8.0.tar.gz", hash = "sha256:eddd5847ef438ea1c7870ca7eb78a9d47ce0cdb4851a5523949f2601d0cbbe7f"}, +] + [[package]] name = "pydantic" -version = "2.6.3" +version = "2.7.0" description = "Data validation using Python type hints" optional = false python-versions = ">=3.8" files = [ - {file = "pydantic-2.6.3-py3-none-any.whl", hash = "sha256:72c6034df47f46ccdf81869fddb81aade68056003900a8724a4f160700016a2a"}, - {file = "pydantic-2.6.3.tar.gz", hash = "sha256:e07805c4c7f5c6826e33a1d4c9d47950d7eaf34868e2690f8594d2e30241f11f"}, + {file = "pydantic-2.7.0-py3-none-any.whl", hash = "sha256:9dee74a271705f14f9a1567671d144a851c675b072736f0a7b2608fd9e495352"}, + {file = "pydantic-2.7.0.tar.gz", hash = "sha256:b5ecdd42262ca2462e2624793551e80911a1e989f462910bb81aef974b4bb383"}, ] [package.dependencies] annotated-types = ">=0.4.0" -pydantic-core = "2.16.3" +pydantic-core = "2.18.1" typing-extensions = ">=4.6.1" [package.extras] @@ -560,90 +782,90 @@ email = ["email-validator (>=2.0.0)"] [[package]] name = "pydantic-core" -version = "2.16.3" -description = "" +version = "2.18.1" +description = "Core functionality for Pydantic validation and serialization" optional = false python-versions = ">=3.8" files = [ - {file = "pydantic_core-2.16.3-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:75b81e678d1c1ede0785c7f46690621e4c6e63ccd9192af1f0bd9d504bbb6bf4"}, - {file = "pydantic_core-2.16.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9c865a7ee6f93783bd5d781af5a4c43dadc37053a5b42f7d18dc019f8c9d2bd1"}, - {file = "pydantic_core-2.16.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:162e498303d2b1c036b957a1278fa0899d02b2842f1ff901b6395104c5554a45"}, - {file = "pydantic_core-2.16.3-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:2f583bd01bbfbff4eaee0868e6fc607efdfcc2b03c1c766b06a707abbc856187"}, - {file = "pydantic_core-2.16.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b926dd38db1519ed3043a4de50214e0d600d404099c3392f098a7f9d75029ff8"}, - {file = "pydantic_core-2.16.3-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:716b542728d4c742353448765aa7cdaa519a7b82f9564130e2b3f6766018c9ec"}, - {file = "pydantic_core-2.16.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fc4ad7f7ee1a13d9cb49d8198cd7d7e3aa93e425f371a68235f784e99741561f"}, - {file = "pydantic_core-2.16.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:bd87f48924f360e5d1c5f770d6155ce0e7d83f7b4e10c2f9ec001c73cf475c99"}, - {file = "pydantic_core-2.16.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:0df446663464884297c793874573549229f9eca73b59360878f382a0fc085979"}, - {file = "pydantic_core-2.16.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:4df8a199d9f6afc5ae9a65f8f95ee52cae389a8c6b20163762bde0426275b7db"}, - {file = "pydantic_core-2.16.3-cp310-none-win32.whl", hash = "sha256:456855f57b413f077dff513a5a28ed838dbbb15082ba00f80750377eed23d132"}, - {file = "pydantic_core-2.16.3-cp310-none-win_amd64.whl", hash = "sha256:732da3243e1b8d3eab8c6ae23ae6a58548849d2e4a4e03a1924c8ddf71a387cb"}, - {file = "pydantic_core-2.16.3-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:519ae0312616026bf4cedc0fe459e982734f3ca82ee8c7246c19b650b60a5ee4"}, - {file = "pydantic_core-2.16.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:b3992a322a5617ded0a9f23fd06dbc1e4bd7cf39bc4ccf344b10f80af58beacd"}, - {file = "pydantic_core-2.16.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8d62da299c6ecb04df729e4b5c52dc0d53f4f8430b4492b93aa8de1f541c4aac"}, - {file = "pydantic_core-2.16.3-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:2acca2be4bb2f2147ada8cac612f8a98fc09f41c89f87add7256ad27332c2fda"}, - {file = "pydantic_core-2.16.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1b662180108c55dfbf1280d865b2d116633d436cfc0bba82323554873967b340"}, - {file = "pydantic_core-2.16.3-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e7c6ed0dc9d8e65f24f5824291550139fe6f37fac03788d4580da0d33bc00c97"}, - {file = "pydantic_core-2.16.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a6b1bb0827f56654b4437955555dc3aeeebeddc47c2d7ed575477f082622c49e"}, - {file = "pydantic_core-2.16.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:e56f8186d6210ac7ece503193ec84104da7ceb98f68ce18c07282fcc2452e76f"}, - {file = "pydantic_core-2.16.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:936e5db01dd49476fa8f4383c259b8b1303d5dd5fb34c97de194560698cc2c5e"}, - {file = "pydantic_core-2.16.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:33809aebac276089b78db106ee692bdc9044710e26f24a9a2eaa35a0f9fa70ba"}, - {file = "pydantic_core-2.16.3-cp311-none-win32.whl", hash = "sha256:ded1c35f15c9dea16ead9bffcde9bb5c7c031bff076355dc58dcb1cb436c4721"}, - {file = "pydantic_core-2.16.3-cp311-none-win_amd64.whl", hash = "sha256:d89ca19cdd0dd5f31606a9329e309d4fcbb3df860960acec32630297d61820df"}, - {file = "pydantic_core-2.16.3-cp311-none-win_arm64.whl", hash = "sha256:6162f8d2dc27ba21027f261e4fa26f8bcb3cf9784b7f9499466a311ac284b5b9"}, - {file = "pydantic_core-2.16.3-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:0f56ae86b60ea987ae8bcd6654a887238fd53d1384f9b222ac457070b7ac4cff"}, - {file = "pydantic_core-2.16.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:c9bd22a2a639e26171068f8ebb5400ce2c1bc7d17959f60a3b753ae13c632975"}, - {file = "pydantic_core-2.16.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4204e773b4b408062960e65468d5346bdfe139247ee5f1ca2a378983e11388a2"}, - {file = "pydantic_core-2.16.3-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:f651dd19363c632f4abe3480a7c87a9773be27cfe1341aef06e8759599454120"}, - {file = "pydantic_core-2.16.3-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:aaf09e615a0bf98d406657e0008e4a8701b11481840be7d31755dc9f97c44053"}, - {file = "pydantic_core-2.16.3-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8e47755d8152c1ab5b55928ab422a76e2e7b22b5ed8e90a7d584268dd49e9c6b"}, - {file = "pydantic_core-2.16.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:500960cb3a0543a724a81ba859da816e8cf01b0e6aaeedf2c3775d12ee49cade"}, - {file = "pydantic_core-2.16.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:cf6204fe865da605285c34cf1172879d0314ff267b1c35ff59de7154f35fdc2e"}, - {file = "pydantic_core-2.16.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:d33dd21f572545649f90c38c227cc8631268ba25c460b5569abebdd0ec5974ca"}, - {file = "pydantic_core-2.16.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:49d5d58abd4b83fb8ce763be7794d09b2f50f10aa65c0f0c1696c677edeb7cbf"}, - {file = "pydantic_core-2.16.3-cp312-none-win32.whl", hash = "sha256:f53aace168a2a10582e570b7736cc5bef12cae9cf21775e3eafac597e8551fbe"}, - {file = "pydantic_core-2.16.3-cp312-none-win_amd64.whl", hash = "sha256:0d32576b1de5a30d9a97f300cc6a3f4694c428d956adbc7e6e2f9cad279e45ed"}, - {file = "pydantic_core-2.16.3-cp312-none-win_arm64.whl", hash = "sha256:ec08be75bb268473677edb83ba71e7e74b43c008e4a7b1907c6d57e940bf34b6"}, - {file = "pydantic_core-2.16.3-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:b1f6f5938d63c6139860f044e2538baeee6f0b251a1816e7adb6cbce106a1f01"}, - {file = "pydantic_core-2.16.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:2a1ef6a36fdbf71538142ed604ad19b82f67b05749512e47f247a6ddd06afdc7"}, - {file = "pydantic_core-2.16.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:704d35ecc7e9c31d48926150afada60401c55efa3b46cd1ded5a01bdffaf1d48"}, - {file = "pydantic_core-2.16.3-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d937653a696465677ed583124b94a4b2d79f5e30b2c46115a68e482c6a591c8a"}, - {file = "pydantic_core-2.16.3-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c9803edf8e29bd825f43481f19c37f50d2b01899448273b3a7758441b512acf8"}, - {file = "pydantic_core-2.16.3-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:72282ad4892a9fb2da25defeac8c2e84352c108705c972db82ab121d15f14e6d"}, - {file = "pydantic_core-2.16.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7f752826b5b8361193df55afcdf8ca6a57d0232653494ba473630a83ba50d8c9"}, - {file = "pydantic_core-2.16.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:4384a8f68ddb31a0b0c3deae88765f5868a1b9148939c3f4121233314ad5532c"}, - {file = "pydantic_core-2.16.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:a4b2bf78342c40b3dc830880106f54328928ff03e357935ad26c7128bbd66ce8"}, - {file = "pydantic_core-2.16.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:13dcc4802961b5f843a9385fc821a0b0135e8c07fc3d9949fd49627c1a5e6ae5"}, - {file = "pydantic_core-2.16.3-cp38-none-win32.whl", hash = "sha256:e3e70c94a0c3841e6aa831edab1619ad5c511199be94d0c11ba75fe06efe107a"}, - {file = "pydantic_core-2.16.3-cp38-none-win_amd64.whl", hash = "sha256:ecdf6bf5f578615f2e985a5e1f6572e23aa632c4bd1dc67f8f406d445ac115ed"}, - {file = "pydantic_core-2.16.3-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:bda1ee3e08252b8d41fa5537413ffdddd58fa73107171a126d3b9ff001b9b820"}, - {file = "pydantic_core-2.16.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:21b888c973e4f26b7a96491c0965a8a312e13be108022ee510248fe379a5fa23"}, - {file = "pydantic_core-2.16.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:be0ec334369316fa73448cc8c982c01e5d2a81c95969d58b8f6e272884df0074"}, - {file = "pydantic_core-2.16.3-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:b5b6079cc452a7c53dd378c6f881ac528246b3ac9aae0f8eef98498a75657805"}, - {file = "pydantic_core-2.16.3-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7ee8d5f878dccb6d499ba4d30d757111847b6849ae07acdd1205fffa1fc1253c"}, - {file = "pydantic_core-2.16.3-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7233d65d9d651242a68801159763d09e9ec96e8a158dbf118dc090cd77a104c9"}, - {file = "pydantic_core-2.16.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c6119dc90483a5cb50a1306adb8d52c66e447da88ea44f323e0ae1a5fcb14256"}, - {file = "pydantic_core-2.16.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:578114bc803a4c1ff9946d977c221e4376620a46cf78da267d946397dc9514a8"}, - {file = "pydantic_core-2.16.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:d8f99b147ff3fcf6b3cc60cb0c39ea443884d5559a30b1481e92495f2310ff2b"}, - {file = "pydantic_core-2.16.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:4ac6b4ce1e7283d715c4b729d8f9dab9627586dafce81d9eaa009dd7f25dd972"}, - {file = "pydantic_core-2.16.3-cp39-none-win32.whl", hash = "sha256:e7774b570e61cb998490c5235740d475413a1f6de823169b4cf94e2fe9e9f6b2"}, - {file = "pydantic_core-2.16.3-cp39-none-win_amd64.whl", hash = "sha256:9091632a25b8b87b9a605ec0e61f241c456e9248bfdcf7abdf344fdb169c81cf"}, - {file = "pydantic_core-2.16.3-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:36fa178aacbc277bc6b62a2c3da95226520da4f4e9e206fdf076484363895d2c"}, - {file = "pydantic_core-2.16.3-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:dcca5d2bf65c6fb591fff92da03f94cd4f315972f97c21975398bd4bd046854a"}, - {file = "pydantic_core-2.16.3-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2a72fb9963cba4cd5793854fd12f4cfee731e86df140f59ff52a49b3552db241"}, - {file = "pydantic_core-2.16.3-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b60cc1a081f80a2105a59385b92d82278b15d80ebb3adb200542ae165cd7d183"}, - {file = "pydantic_core-2.16.3-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:cbcc558401de90a746d02ef330c528f2e668c83350f045833543cd57ecead1ad"}, - {file = "pydantic_core-2.16.3-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:fee427241c2d9fb7192b658190f9f5fd6dfe41e02f3c1489d2ec1e6a5ab1e04a"}, - {file = "pydantic_core-2.16.3-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:f4cb85f693044e0f71f394ff76c98ddc1bc0953e48c061725e540396d5c8a2e1"}, - {file = "pydantic_core-2.16.3-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:b29eeb887aa931c2fcef5aa515d9d176d25006794610c264ddc114c053bf96fe"}, - {file = "pydantic_core-2.16.3-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:a425479ee40ff021f8216c9d07a6a3b54b31c8267c6e17aa88b70d7ebd0e5e5b"}, - {file = "pydantic_core-2.16.3-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:5c5cbc703168d1b7a838668998308018a2718c2130595e8e190220238addc96f"}, - {file = "pydantic_core-2.16.3-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:99b6add4c0b39a513d323d3b93bc173dac663c27b99860dd5bf491b240d26137"}, - {file = "pydantic_core-2.16.3-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:75f76ee558751746d6a38f89d60b6228fa174e5172d143886af0f85aa306fd89"}, - {file = "pydantic_core-2.16.3-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:00ee1c97b5364b84cb0bd82e9bbf645d5e2871fb8c58059d158412fee2d33d8a"}, - {file = "pydantic_core-2.16.3-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:287073c66748f624be4cef893ef9174e3eb88fe0b8a78dc22e88eca4bc357ca6"}, - {file = "pydantic_core-2.16.3-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:ed25e1835c00a332cb10c683cd39da96a719ab1dfc08427d476bce41b92531fc"}, - {file = "pydantic_core-2.16.3-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:86b3d0033580bd6bbe07590152007275bd7af95f98eaa5bd36f3da219dcd93da"}, - {file = "pydantic_core-2.16.3.tar.gz", hash = "sha256:1cac689f80a3abab2d3c0048b29eea5751114054f032a941a32de4c852c59cad"}, + {file = "pydantic_core-2.18.1-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:ee9cf33e7fe14243f5ca6977658eb7d1042caaa66847daacbd2117adb258b226"}, + {file = "pydantic_core-2.18.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:6b7bbb97d82659ac8b37450c60ff2e9f97e4eb0f8a8a3645a5568b9334b08b50"}, + {file = "pydantic_core-2.18.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:df4249b579e75094f7e9bb4bd28231acf55e308bf686b952f43100a5a0be394c"}, + {file = "pydantic_core-2.18.1-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d0491006a6ad20507aec2be72e7831a42efc93193d2402018007ff827dc62926"}, + {file = "pydantic_core-2.18.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2ae80f72bb7a3e397ab37b53a2b49c62cc5496412e71bc4f1277620a7ce3f52b"}, + {file = "pydantic_core-2.18.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:58aca931bef83217fca7a390e0486ae327c4af9c3e941adb75f8772f8eeb03a1"}, + {file = "pydantic_core-2.18.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1be91ad664fc9245404a789d60cba1e91c26b1454ba136d2a1bf0c2ac0c0505a"}, + {file = "pydantic_core-2.18.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:667880321e916a8920ef49f5d50e7983792cf59f3b6079f3c9dac2b88a311d17"}, + {file = "pydantic_core-2.18.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:f7054fdc556f5421f01e39cbb767d5ec5c1139ea98c3e5b350e02e62201740c7"}, + {file = "pydantic_core-2.18.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:030e4f9516f9947f38179249778709a460a3adb516bf39b5eb9066fcfe43d0e6"}, + {file = "pydantic_core-2.18.1-cp310-none-win32.whl", hash = "sha256:2e91711e36e229978d92642bfc3546333a9127ecebb3f2761372e096395fc649"}, + {file = "pydantic_core-2.18.1-cp310-none-win_amd64.whl", hash = "sha256:9a29726f91c6cb390b3c2338f0df5cd3e216ad7a938762d11c994bb37552edb0"}, + {file = "pydantic_core-2.18.1-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:9ece8a49696669d483d206b4474c367852c44815fca23ac4e48b72b339807f80"}, + {file = "pydantic_core-2.18.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:7a5d83efc109ceddb99abd2c1316298ced2adb4570410defe766851a804fcd5b"}, + {file = "pydantic_core-2.18.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5f7973c381283783cd1043a8c8f61ea5ce7a3a58b0369f0ee0ee975eaf2f2a1b"}, + {file = "pydantic_core-2.18.1-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:54c7375c62190a7845091f521add19b0f026bcf6ae674bdb89f296972272e86d"}, + {file = "pydantic_core-2.18.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:dd63cec4e26e790b70544ae5cc48d11b515b09e05fdd5eff12e3195f54b8a586"}, + {file = "pydantic_core-2.18.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:561cf62c8a3498406495cfc49eee086ed2bb186d08bcc65812b75fda42c38294"}, + {file = "pydantic_core-2.18.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:68717c38a68e37af87c4da20e08f3e27d7e4212e99e96c3d875fbf3f4812abfc"}, + {file = "pydantic_core-2.18.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2d5728e93d28a3c63ee513d9ffbac9c5989de8c76e049dbcb5bfe4b923a9739d"}, + {file = "pydantic_core-2.18.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:f0f17814c505f07806e22b28856c59ac80cee7dd0fbb152aed273e116378f519"}, + {file = "pydantic_core-2.18.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:d816f44a51ba5175394bc6c7879ca0bd2be560b2c9e9f3411ef3a4cbe644c2e9"}, + {file = "pydantic_core-2.18.1-cp311-none-win32.whl", hash = "sha256:09f03dfc0ef8c22622eaa8608caa4a1e189cfb83ce847045eca34f690895eccb"}, + {file = "pydantic_core-2.18.1-cp311-none-win_amd64.whl", hash = "sha256:27f1009dc292f3b7ca77feb3571c537276b9aad5dd4efb471ac88a8bd09024e9"}, + {file = "pydantic_core-2.18.1-cp311-none-win_arm64.whl", hash = "sha256:48dd883db92e92519201f2b01cafa881e5f7125666141a49ffba8b9facc072b0"}, + {file = "pydantic_core-2.18.1-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:b6b0e4912030c6f28bcb72b9ebe4989d6dc2eebcd2a9cdc35fefc38052dd4fe8"}, + {file = "pydantic_core-2.18.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f3202a429fe825b699c57892d4371c74cc3456d8d71b7f35d6028c96dfecad31"}, + {file = "pydantic_core-2.18.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a3982b0a32d0a88b3907e4b0dc36809fda477f0757c59a505d4e9b455f384b8b"}, + {file = "pydantic_core-2.18.1-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:25595ac311f20e5324d1941909b0d12933f1fd2171075fcff763e90f43e92a0d"}, + {file = "pydantic_core-2.18.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:14fe73881cf8e4cbdaded8ca0aa671635b597e42447fec7060d0868b52d074e6"}, + {file = "pydantic_core-2.18.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ca976884ce34070799e4dfc6fbd68cb1d181db1eefe4a3a94798ddfb34b8867f"}, + {file = "pydantic_core-2.18.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:684d840d2c9ec5de9cb397fcb3f36d5ebb6fa0d94734f9886032dd796c1ead06"}, + {file = "pydantic_core-2.18.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:54764c083bbe0264f0f746cefcded6cb08fbbaaf1ad1d78fb8a4c30cff999a90"}, + {file = "pydantic_core-2.18.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:201713f2f462e5c015b343e86e68bd8a530a4f76609b33d8f0ec65d2b921712a"}, + {file = "pydantic_core-2.18.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:fd1a9edb9dd9d79fbeac1ea1f9a8dd527a6113b18d2e9bcc0d541d308dae639b"}, + {file = "pydantic_core-2.18.1-cp312-none-win32.whl", hash = "sha256:d5e6b7155b8197b329dc787356cfd2684c9d6a6b1a197f6bbf45f5555a98d411"}, + {file = "pydantic_core-2.18.1-cp312-none-win_amd64.whl", hash = "sha256:9376d83d686ec62e8b19c0ac3bf8d28d8a5981d0df290196fb6ef24d8a26f0d6"}, + {file = "pydantic_core-2.18.1-cp312-none-win_arm64.whl", hash = "sha256:c562b49c96906b4029b5685075fe1ebd3b5cc2601dfa0b9e16c2c09d6cbce048"}, + {file = "pydantic_core-2.18.1-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:3e352f0191d99fe617371096845070dee295444979efb8f27ad941227de6ad09"}, + {file = "pydantic_core-2.18.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:c0295d52b012cbe0d3059b1dba99159c3be55e632aae1999ab74ae2bd86a33d7"}, + {file = "pydantic_core-2.18.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:56823a92075780582d1ffd4489a2e61d56fd3ebb4b40b713d63f96dd92d28144"}, + {file = "pydantic_core-2.18.1-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:dd3f79e17b56741b5177bcc36307750d50ea0698df6aa82f69c7db32d968c1c2"}, + {file = "pydantic_core-2.18.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:38a5024de321d672a132b1834a66eeb7931959c59964b777e8f32dbe9523f6b1"}, + {file = "pydantic_core-2.18.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d2ce426ee691319d4767748c8e0895cfc56593d725594e415f274059bcf3cb76"}, + {file = "pydantic_core-2.18.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2adaeea59849ec0939af5c5d476935f2bab4b7f0335b0110f0f069a41024278e"}, + {file = "pydantic_core-2.18.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:9b6431559676a1079eac0f52d6d0721fb8e3c5ba43c37bc537c8c83724031feb"}, + {file = "pydantic_core-2.18.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:85233abb44bc18d16e72dc05bf13848a36f363f83757541f1a97db2f8d58cfd9"}, + {file = "pydantic_core-2.18.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:641a018af4fe48be57a2b3d7a1f0f5dbca07c1d00951d3d7463f0ac9dac66622"}, + {file = "pydantic_core-2.18.1-cp38-none-win32.whl", hash = "sha256:63d7523cd95d2fde0d28dc42968ac731b5bb1e516cc56b93a50ab293f4daeaad"}, + {file = "pydantic_core-2.18.1-cp38-none-win_amd64.whl", hash = "sha256:907a4d7720abfcb1c81619863efd47c8a85d26a257a2dbebdb87c3b847df0278"}, + {file = "pydantic_core-2.18.1-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:aad17e462f42ddbef5984d70c40bfc4146c322a2da79715932cd8976317054de"}, + {file = "pydantic_core-2.18.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:94b9769ba435b598b547c762184bcfc4783d0d4c7771b04a3b45775c3589ca44"}, + {file = "pydantic_core-2.18.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:80e0e57cc704a52fb1b48f16d5b2c8818da087dbee6f98d9bf19546930dc64b5"}, + {file = "pydantic_core-2.18.1-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:76b86e24039c35280ceee6dce7e62945eb93a5175d43689ba98360ab31eebc4a"}, + {file = "pydantic_core-2.18.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:12a05db5013ec0ca4a32cc6433f53faa2a014ec364031408540ba858c2172bb0"}, + {file = "pydantic_core-2.18.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:250ae39445cb5475e483a36b1061af1bc233de3e9ad0f4f76a71b66231b07f88"}, + {file = "pydantic_core-2.18.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a32204489259786a923e02990249c65b0f17235073149d0033efcebe80095570"}, + {file = "pydantic_core-2.18.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:6395a4435fa26519fd96fdccb77e9d00ddae9dd6c742309bd0b5610609ad7fb2"}, + {file = "pydantic_core-2.18.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:2533ad2883f001efa72f3d0e733fb846710c3af6dcdd544fe5bf14fa5fe2d7db"}, + {file = "pydantic_core-2.18.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:b560b72ed4816aee52783c66854d96157fd8175631f01ef58e894cc57c84f0f6"}, + {file = "pydantic_core-2.18.1-cp39-none-win32.whl", hash = "sha256:582cf2cead97c9e382a7f4d3b744cf0ef1a6e815e44d3aa81af3ad98762f5a9b"}, + {file = "pydantic_core-2.18.1-cp39-none-win_amd64.whl", hash = "sha256:ca71d501629d1fa50ea7fa3b08ba884fe10cefc559f5c6c8dfe9036c16e8ae89"}, + {file = "pydantic_core-2.18.1-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:e178e5b66a06ec5bf51668ec0d4ac8cfb2bdcb553b2c207d58148340efd00143"}, + {file = "pydantic_core-2.18.1-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:72722ce529a76a4637a60be18bd789d8fb871e84472490ed7ddff62d5fed620d"}, + {file = "pydantic_core-2.18.1-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2fe0c1ce5b129455e43f941f7a46f61f3d3861e571f2905d55cdbb8b5c6f5e2c"}, + {file = "pydantic_core-2.18.1-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d4284c621f06a72ce2cb55f74ea3150113d926a6eb78ab38340c08f770eb9b4d"}, + {file = "pydantic_core-2.18.1-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:1a0c3e718f4e064efde68092d9d974e39572c14e56726ecfaeebbe6544521f47"}, + {file = "pydantic_core-2.18.1-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:2027493cc44c23b598cfaf200936110433d9caa84e2c6cf487a83999638a96ac"}, + {file = "pydantic_core-2.18.1-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:76909849d1a6bffa5a07742294f3fa1d357dc917cb1fe7b470afbc3a7579d539"}, + {file = "pydantic_core-2.18.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:ee7ccc7fb7e921d767f853b47814c3048c7de536663e82fbc37f5eb0d532224b"}, + {file = "pydantic_core-2.18.1-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:ee2794111c188548a4547eccc73a6a8527fe2af6cf25e1a4ebda2fd01cdd2e60"}, + {file = "pydantic_core-2.18.1-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:a139fe9f298dc097349fb4f28c8b81cc7a202dbfba66af0e14be5cfca4ef7ce5"}, + {file = "pydantic_core-2.18.1-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d074b07a10c391fc5bbdcb37b2f16f20fcd9e51e10d01652ab298c0d07908ee2"}, + {file = "pydantic_core-2.18.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c69567ddbac186e8c0aadc1f324a60a564cfe25e43ef2ce81bcc4b8c3abffbae"}, + {file = "pydantic_core-2.18.1-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:baf1c7b78cddb5af00971ad5294a4583188bda1495b13760d9f03c9483bb6203"}, + {file = "pydantic_core-2.18.1-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:2684a94fdfd1b146ff10689c6e4e815f6a01141781c493b97342cdc5b06f4d5d"}, + {file = "pydantic_core-2.18.1-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:73c1bc8a86a5c9e8721a088df234265317692d0b5cd9e86e975ce3bc3db62a59"}, + {file = "pydantic_core-2.18.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:e60defc3c15defb70bb38dd605ff7e0fae5f6c9c7cbfe0ad7868582cb7e844a6"}, + {file = "pydantic_core-2.18.1.tar.gz", hash = "sha256:de9d3e8717560eb05e28739d1b35e4eac2e458553a52a301e51352a7ffc86a35"}, ] [package.dependencies] @@ -668,6 +890,17 @@ python-dotenv = ">=0.21.0" toml = ["tomli (>=2.0.1)"] yaml = ["pyyaml (>=6.0.1)"] +[[package]] +name = "pyflakes" +version = "2.4.0" +description = "passive checker of Python programs" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +files = [ + {file = "pyflakes-2.4.0-py2.py3-none-any.whl", hash = "sha256:3bb3a3f256f4b7968c9c788781e4ff07dce46bdf12339dcda61053375426ee2e"}, + {file = "pyflakes-2.4.0.tar.gz", hash = "sha256:05a85c2872edf37a4ed30b0cce2f6093e1d0581f8c19d7393122da7e25b2b24c"}, +] + [[package]] name = "pygments" version = "2.17.2" @@ -683,6 +916,63 @@ files = [ plugins = ["importlib-metadata"] windows-terminal = ["colorama (>=0.4.6)"] +[[package]] +name = "pytest" +version = "8.1.1" +description = "pytest: simple powerful testing with Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pytest-8.1.1-py3-none-any.whl", hash = "sha256:2a8386cfc11fa9d2c50ee7b2a57e7d898ef90470a7a34c4b949ff59662bb78b7"}, + {file = "pytest-8.1.1.tar.gz", hash = "sha256:ac978141a75948948817d360297b7aae0fcb9d6ff6bc9ec6d514b85d5a65c044"}, +] + +[package.dependencies] +colorama = {version = "*", markers = "sys_platform == \"win32\""} +exceptiongroup = {version = ">=1.0.0rc8", markers = "python_version < \"3.11\""} +iniconfig = "*" +packaging = "*" +pluggy = ">=1.4,<2.0" +tomli = {version = ">=1", markers = "python_version < \"3.11\""} + +[package.extras] +testing = ["argcomplete", "attrs (>=19.2)", "hypothesis (>=3.56)", "mock", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"] + +[[package]] +name = "pytest-asyncio" +version = "0.23.6" +description = "Pytest support for asyncio" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pytest-asyncio-0.23.6.tar.gz", hash = "sha256:ffe523a89c1c222598c76856e76852b787504ddb72dd5d9b6617ffa8aa2cde5f"}, + {file = "pytest_asyncio-0.23.6-py3-none-any.whl", hash = "sha256:68516fdd1018ac57b846c9846b954f0393b26f094764a28c955eabb0536a4e8a"}, +] + +[package.dependencies] +pytest = ">=7.0.0,<9" + +[package.extras] +docs = ["sphinx (>=5.3)", "sphinx-rtd-theme (>=1.0)"] +testing = ["coverage (>=6.2)", "hypothesis (>=5.7.1)"] + +[[package]] +name = "pytest-mock" +version = "3.14.0" +description = "Thin-wrapper around the mock package for easier use with pytest" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pytest-mock-3.14.0.tar.gz", hash = "sha256:2719255a1efeceadbc056d6bf3df3d1c5015530fb40cf347c0f9afac88410bd0"}, + {file = "pytest_mock-3.14.0-py3-none-any.whl", hash = "sha256:0b72c38033392a5f4621342fe11e9219ac11ec9d375f8e2a0c164539e0d70f6f"}, +] + +[package.dependencies] +pytest = ">=6.2.5" + +[package.extras] +dev = ["pre-commit", "pytest-asyncio", "tox"] + [[package]] name = "python-dateutil" version = "2.9.0.post0" @@ -851,13 +1141,13 @@ files = [ [[package]] name = "starlette" -version = "0.36.3" +version = "0.37.2" description = "The little ASGI library that shines." optional = false python-versions = ">=3.8" files = [ - {file = "starlette-0.36.3-py3-none-any.whl", hash = "sha256:13d429aa93a61dc40bf503e8c801db1f1bca3dc706b10ef2434a36123568f044"}, - {file = "starlette-0.36.3.tar.gz", hash = "sha256:90a671733cfb35771d8cc605e0b679d23b992f8dcfad48cc60b38cb29aeb7080"}, + {file = "starlette-0.37.2-py3-none-any.whl", hash = "sha256:6fe59f29268538e5d0d182f2791a479a0c64638e6935d1c6989e63fb2699c6ee"}, + {file = "starlette-0.37.2.tar.gz", hash = "sha256:9af890290133b79fc3db55474ade20f6220a364a0402e0b556e7cd5e1e093823"}, ] [package.dependencies] @@ -877,15 +1167,26 @@ files = [ {file = "text_unidecode-1.3-py2.py3-none-any.whl", hash = "sha256:1311f10e8b895935241623731c2ba64f4c455287888b18189350b67134a822e8"}, ] +[[package]] +name = "tomli" +version = "2.0.1" +description = "A lil' TOML parser" +optional = false +python-versions = ">=3.7" +files = [ + {file = "tomli-2.0.1-py3-none-any.whl", hash = "sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc"}, + {file = "tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"}, +] + [[package]] name = "typer" -version = "0.9.0" +version = "0.9.4" description = "Typer, build great CLIs. Easy to code. Based on Python type hints." optional = false python-versions = ">=3.6" files = [ - {file = "typer-0.9.0-py3-none-any.whl", hash = "sha256:5d96d986a21493606a358cae4461bd8cdf83cbf33a5aa950ae629ca3b51467ee"}, - {file = "typer-0.9.0.tar.gz", hash = "sha256:50922fd79aea2f4751a8e0408ff10d2662bd0c8bbfa84755a699f3bada2978b2"}, + {file = "typer-0.9.4-py3-none-any.whl", hash = "sha256:aa6c4a4e2329d868b80ecbaf16f807f2b54e192209d7ac9dd42691d63f7a54eb"}, + {file = "typer-0.9.4.tar.gz", hash = "sha256:f714c2d90afae3a7929fcd72a3abb08df305e1ff61719381384211c4070af57f"}, ] [package.dependencies] @@ -896,7 +1197,7 @@ typing-extensions = ">=3.7.4.3" all = ["colorama (>=0.4.3,<0.5.0)", "rich (>=10.11.0,<14.0.0)", "shellingham (>=1.3.0,<2.0.0)"] dev = ["autoflake (>=1.3.1,<2.0.0)", "flake8 (>=3.8.3,<4.0.0)", "pre-commit (>=2.17.0,<3.0.0)"] doc = ["cairosvg (>=2.5.2,<3.0.0)", "mdx-include (>=1.4.1,<2.0.0)", "mkdocs (>=1.1.2,<2.0.0)", "mkdocs-material (>=8.1.4,<9.0.0)", "pillow (>=9.3.0,<10.0.0)"] -test = ["black (>=22.3.0,<23.0.0)", "coverage (>=6.2,<7.0)", "isort (>=5.0.6,<6.0.0)", "mypy (==0.910)", "pytest (>=4.4.0,<8.0.0)", "pytest-cov (>=2.10.0,<5.0.0)", "pytest-sugar (>=0.9.4,<0.10.0)", "pytest-xdist (>=1.32.0,<4.0.0)", "rich (>=10.11.0,<14.0.0)", "shellingham (>=1.3.0,<2.0.0)"] +test = ["black (>=22.3.0,<23.0.0)", "coverage (>=6.2,<7.0)", "isort (>=5.0.6,<6.0.0)", "mypy (==0.971)", "pytest (>=4.4.0,<8.0.0)", "pytest-cov (>=2.10.0,<5.0.0)", "pytest-sugar (>=0.9.4,<0.10.0)", "pytest-xdist (>=1.32.0,<4.0.0)", "rich (>=10.11.0,<14.0.0)", "shellingham (>=1.3.0,<2.0.0)"] [[package]] name = "types-python-dateutil" @@ -911,13 +1212,13 @@ files = [ [[package]] name = "typing-extensions" -version = "4.10.0" +version = "4.11.0" description = "Backported and Experimental Type Hints for Python 3.8+" optional = false python-versions = ">=3.8" files = [ - {file = "typing_extensions-4.10.0-py3-none-any.whl", hash = "sha256:69b1a937c3a517342112fb4c6df7e72fc39a38e7891a5730ed4985b5214b5475"}, - {file = "typing_extensions-4.10.0.tar.gz", hash = "sha256:b0abd7c89e8fb96f98db18d86106ff1d90ab692004eb746cf6eda2682f91b3cb"}, + {file = "typing_extensions-4.11.0-py3-none-any.whl", hash = "sha256:c1f94d72897edaf4ce775bb7558d5b79d8126906a14ea5ed1635921406c0387a"}, + {file = "typing_extensions-4.11.0.tar.gz", hash = "sha256:83f085bd5ca59c80295fc2a82ab5dac679cbe02b9f33f7d83af68e241bea51b0"}, ] [[package]] @@ -939,13 +1240,13 @@ zstd = ["zstandard (>=0.18.0)"] [[package]] name = "uvicorn" -version = "0.28.0" +version = "0.28.1" description = "The lightning-fast ASGI server." optional = false python-versions = ">=3.8" files = [ - {file = "uvicorn-0.28.0-py3-none-any.whl", hash = "sha256:6623abbbe6176204a4226e67607b4d52cc60ff62cda0ff177613645cefa2ece1"}, - {file = "uvicorn-0.28.0.tar.gz", hash = "sha256:cab4473b5d1eaeb5a0f6375ac4bc85007ffc75c3cc1768816d9e5d589857b067"}, + {file = "uvicorn-0.28.1-py3-none-any.whl", hash = "sha256:5162f6d652f545be91b1feeaee8180774af143965ca9dc8a47ff1dc6bafa4ad5"}, + {file = "uvicorn-0.28.1.tar.gz", hash = "sha256:08103e79d546b6cf20f67c7e5e434d2cf500a6e29b28773e407250c54fc4fa3c"}, ] [package.dependencies] @@ -1254,7 +1555,21 @@ files = [ {file = "wrapt-1.16.0.tar.gz", hash = "sha256:5f370f952971e7d17c7d1ead40e49f32345a7f7a5373571ef44d800d06b1899d"}, ] +[[package]] +name = "wsproto" +version = "1.2.0" +description = "WebSockets state-machine based protocol implementation" +optional = false +python-versions = ">=3.7.0" +files = [ + {file = "wsproto-1.2.0-py3-none-any.whl", hash = "sha256:b9acddd652b585d75b20477888c56642fdade28bdfd3579aa24a4d2c037dd736"}, + {file = "wsproto-1.2.0.tar.gz", hash = "sha256:ad565f26ecb92588a3e43bc3d96164de84cd9902482b130d0ddbaa9664a85065"}, +] + +[package.dependencies] +h11 = ">=0.9.0,<1" + [metadata] lock-version = "2.0" python-versions = "^3.10" -content-hash = "c38c78d758710b2271c18b10a032af520ac530928bf102b4dc86907820f63b08" +content-hash = "e111ae0a81c8a86a628589fb11be3d95e9d7e9a8793658295f0f2ad1c1344167" diff --git a/backend/df_designer/pyproject.toml b/backend/df_designer/pyproject.toml index 705df010..b0b0ad28 100644 --- a/backend/df_designer/pyproject.toml +++ b/backend/df_designer/pyproject.toml @@ -17,6 +17,19 @@ aiofiles = "^23.2.1" cookiecutter = "^2.6.0" dff = { git = "https://github.com/deeppavlov/dialog_flow_framework.git", rev = "4b4cf69dd96e1ed21ec3bf9d5950c010616b24cc" } omegaconf = "^2.3.0" +pytest = "^8.1.1" +pytest-asyncio = "^0.23.6" +pytest-mock = "^3.14.0" +httpx = "^0.27.0" +httpx-ws = "^0.6.0" [tool.poetry.scripts] dflowd = "app.cli:cli" + +[tool.poetry.group.lint] +optional = true + +[tool.poetry.group.lint.dependencies] +isort = "^5" +black = "^22" +flake8 = "^4" diff --git a/bin/run_codestyle.sh b/bin/run_codestyle.sh new file mode 100644 index 00000000..1ffaf838 --- /dev/null +++ b/bin/run_codestyle.sh @@ -0,0 +1,26 @@ +#!/usr/bin/env bash + +pip3 install flake8 + +for ARGUMENT in "$@"; do + + KEY=$(echo $ARGUMENT | cut -f1 -d=) + VALUE=$(echo $ARGUMENT | cut -f2 -d=) + + case "$KEY" in + DIFF_BRANCH) DIFF_BRANCH=${VALUE} ;; + *) ;; + esac +done + +if [[ "$DIFF_BRANCH" == "" ]]; then + DIFF_BRANCH="dev" +fi + +res=$(git diff --cached --name-only --diff-filter=ACMR origin/$DIFF_BRANCH | grep \.py\$ | tr -d "[:blank:]") +if [ -z "$res" ] +then + exit 0 +else + flake8 --ignore=E203 --statistics --count --max-line-length 120 $(git diff --cached --name-only --diff-filter=ACMR origin/$DIFF_BRANCH | grep \.py\$) +fi diff --git a/compose.yaml b/compose.yaml index eaf18aa7..9a4b6c76 100644 --- a/compose.yaml +++ b/compose.yaml @@ -1,12 +1,15 @@ +volumes: + project_data: + services: backend: build: args: - PROJECT_DIR: brand_new + PROJECT_DIR: df_designer_project context: ./ dockerfile: Dockerfile ports: - 8000:8000 volumes: - - ./volume:/src/ + - project_data:/src2/df_designer_project version: '3.8' diff --git a/frontend/src/components/nodes/conditions/Condition.tsx b/frontend/src/components/nodes/conditions/Condition.tsx index 05be1475..b7bb6de5 100644 --- a/frontend/src/components/nodes/conditions/Condition.tsx +++ b/frontend/src/components/nodes/conditions/Condition.tsx @@ -8,7 +8,6 @@ import { CONDITION_LABELS } from "../../../consts" import ConditionModal from "../../../modals/ConditionModal/ConditionModal" import { useDisclosure } from "@nextui-org/react" - const Condition = ({ data, condition }: NodeComponentConditionType) => { const [label, setLabel] = useState(condition.data.transition_type ?? "manual") const {