diff --git a/.github/workflows/backend_check.yml b/.github/workflows/backend_check.yml index ce2b77ea..72c3e217 100644 --- a/.github/workflows/backend_check.yml +++ b/.github/workflows/backend_check.yml @@ -75,13 +75,9 @@ jobs: python -m poetry run chatsky.ui init --destination ../ --no-input --overwrite-if-exists working-directory: backend - - name: Install chatsky-ui into new project poetry environment - run: | - ../bin/add_ui_to_toml.sh - working-directory: my_project - - name: run tests run: | - python -m poetry install --no-root - python -m poetry run pytest ../backend/chatsky_ui/tests/ --verbose + poetry install --with dev -C ../backend + POETRY_ENV=$(poetry env info --path -C ../backend) + $POETRY_ENV/bin/pytest ../backend/chatsky_ui/tests/ --verbose working-directory: my_project diff --git a/README.md b/README.md index fce37cbc..c46a7b1c 100644 --- a/README.md +++ b/README.md @@ -1,9 +1,9 @@ # Quick Start ## System Requirements -Ensure you have Python version 3.8.1 or higher installed. +Ensure you have Python version 3.9 or higher installed (Excluding 3.9.7). ## Installation -To install the necessary package, run the following command: +To install the package and necessary dependencies, run the following command: ```bash pip install chatsky-ui ``` diff --git a/backend/chatsky_ui/api/api_v1/endpoints/bot.py b/backend/chatsky_ui/api/api_v1/endpoints/bot.py index 6485548d..24a9d451 100644 --- a/backend/chatsky_ui/api/api_v1/endpoints/bot.py +++ b/backend/chatsky_ui/api/api_v1/endpoints/bot.py @@ -1,15 +1,14 @@ import asyncio from typing import Any, Dict, List, Optional, Union -from fastapi import APIRouter, BackgroundTasks, Depends, HTTPException, WebSocket, WebSocketException, status -from chatsky.messengers.http_interface import HTTP_INTERFACE_PORT +from fastapi import APIRouter, BackgroundTasks, Depends, HTTPException, status +from httpx import AsyncClient from chatsky_ui.api import deps +from chatsky_ui.core.config import settings from chatsky_ui.schemas.pagination import Pagination from chatsky_ui.schemas.preset import Preset -from chatsky_ui.schemas.process_status import Status from chatsky_ui.services.process_manager import BuildManager, ProcessManager, RunManager -from httpx import AsyncClient router = APIRouter() @@ -144,7 +143,7 @@ async def start_run( build_id: int, preset: Preset, background_tasks: BackgroundTasks, - run_manager: RunManager = Depends(deps.get_run_manager) + run_manager: RunManager = Depends(deps.get_run_manager), ) -> Dict[str, Union[str, int]]: """Starts a `run` process with the given preset. @@ -243,12 +242,12 @@ async def respond( async with AsyncClient() as client: try: response = await client.post( - f"http://localhost:{HTTP_INTERFACE_PORT}/chat", + f"http://localhost:{settings.chatsky_port}/chat", params={"user_id": user_id, "user_message": user_message}, ) return response.json() except Exception as e: raise HTTPException( status_code=status.HTTP_503_SERVICE_UNAVAILABLE, - detail=f"Please check that service's up and running on the port '{HTTP_INTERFACE_PORT}'.", + detail=f"Please check that service's up and running on the port '{settings.chatsky_port}'.", ) from e diff --git a/backend/chatsky_ui/api/api_v1/endpoints/chatsky_services.py b/backend/chatsky_ui/api/api_v1/endpoints/chatsky_services.py index 42e55214..d5cb6d62 100644 --- a/backend/chatsky_ui/api/api_v1/endpoints/chatsky_services.py +++ b/backend/chatsky_ui/api/api_v1/endpoints/chatsky_services.py @@ -10,7 +10,7 @@ from chatsky_ui.clients.chatsky_client import get_chatsky_conditions from chatsky_ui.core.config import settings from chatsky_ui.schemas.code_snippet import CodeSnippet -from chatsky_ui.services.json_converter_new2.logic_component_converter.service_replacer import get_all_classes +from chatsky_ui.services.json_converter.logic_component_converter.service_replacer import get_all_classes from chatsky_ui.utils.ast_utils import get_imports_from_file router = APIRouter() diff --git a/backend/chatsky_ui/api/api_v1/endpoints/flows.py b/backend/chatsky_ui/api/api_v1/endpoints/flows.py index 490f099a..f54e7d01 100644 --- a/backend/chatsky_ui/api/api_v1/endpoints/flows.py +++ b/backend/chatsky_ui/api/api_v1/endpoints/flows.py @@ -1,16 +1,15 @@ -from typing import Dict, Union, Optional +from pathlib import Path +from typing import Dict, Optional, Union -from fastapi import APIRouter, status, HTTPException -from omegaconf import OmegaConf +from dotenv import set_key +from fastapi import APIRouter, HTTPException, status from git.exc import GitCommandError +from omegaconf import OmegaConf from chatsky_ui.core.config import settings +from chatsky_ui.core.logger_config import get_logger from chatsky_ui.db.base import read_conf, write_conf from chatsky_ui.utils.git_cmd import commit_changes, get_repo -from chatsky_ui.core.logger_config import get_logger -from pathlib import Path -from dotenv import set_key - router = APIRouter() diff --git a/backend/chatsky_ui/api/deps.py b/backend/chatsky_ui/api/deps.py index af10d17f..a63efa9f 100644 --- a/backend/chatsky_ui/api/deps.py +++ b/backend/chatsky_ui/api/deps.py @@ -1,6 +1,5 @@ from chatsky_ui.services.process_manager import BuildManager, RunManager - build_manager = BuildManager() diff --git a/backend/chatsky_ui/cli.py b/backend/chatsky_ui/cli.py index 65011566..1132118c 100644 --- a/backend/chatsky_ui/cli.py +++ b/backend/chatsky_ui/cli.py @@ -4,25 +4,20 @@ import string import sys from pathlib import Path +from typing import Optional import nest_asyncio import typer from cookiecutter.main import cookiecutter -from typing_extensions import Annotated from git import Repo -from typing import Optional +from typing_extensions import Annotated # Patch nest_asyncio before importing Chatsky nest_asyncio.apply = lambda: None from chatsky_ui.core.config import app_runner, settings # noqa: E402 from chatsky_ui.core.logger_config import get_logger # noqa: E402 -from chatsky_ui.utils.git_cmd import ( # noqa: E402 - commit_changes, - get_repo, - save_built_script_to_git, - save_frontend_graph_to_git, -) # noqa: E402 +from chatsky_ui.utils.git_cmd import commit_changes # noqa: E402 cli = typer.Typer( help="🚀 Welcome to Chatsky-UI!\n\n" @@ -108,9 +103,7 @@ def build_scenario( raise NotADirectoryError(f"Directory {project_dir} doesn't exist") settings.set_config(work_directory=project_dir) - from chatsky_ui.services.json_converter_new2.pipeline_converter import ( - PipelineConverter, - ) # pylint: disable=C0415 + from chatsky_ui.services.json_converter.pipeline_converter import PipelineConverter # pylint: disable=C0415 pipeline_converter = PipelineConverter() pipeline_converter( @@ -207,7 +200,6 @@ def init( "https://github.com/deeppavlov/chatsky-ui-template.git", no_input=no_input, overwrite_if_exists=overwrite_if_exists, - checkout="add-test", ) finally: os.chdir(original_dir) diff --git a/backend/chatsky_ui/core/config.py b/backend/chatsky_ui/core/config.py index 76321f5c..a7450f2b 100644 --- a/backend/chatsky_ui/core/config.py +++ b/backend/chatsky_ui/core/config.py @@ -1,13 +1,12 @@ +import logging import os from pathlib import Path +from typing import Dict import uvicorn from dotenv import load_dotenv -import logging -from typing import Dict from omegaconf import DictConfig, OmegaConf - LOG_LEVELS: Dict[str, int] = { "critical": logging.CRITICAL, "error": logging.ERROR, @@ -34,6 +33,7 @@ def __init__(self): self.set_config( host=os.getenv("HOST", "0.0.0.0"), port=os.getenv("PORT", "8000"), + chatsky_port=os.getenv("CHATSKY_PORT", "8020"), log_level=os.getenv("LOG_LEVEL", "info"), conf_reload=os.getenv("CONF_RELOAD", "false"), work_directory=".", @@ -45,7 +45,7 @@ def set_config(self, **kwargs): value = Path(value) elif key == "conf_reload": value = str(value).lower() in ["true", "yes", "t", "y", "1"] - elif key == "port": + elif key in ["port", "CHATSKY_PORT"]: value = int(value) setattr(self, key, value) @@ -75,6 +75,7 @@ def save_config(self): "work_directory": str(self.work_directory), "host": self.host, "port": self.port, + "chatsky_port": self.chatsky_port, "log_level": self.log_level, "conf_reload": self.conf_reload, } diff --git a/backend/chatsky_ui/core/logger_config.py b/backend/chatsky_ui/core/logger_config.py index c23ae07b..9ae891cf 100644 --- a/backend/chatsky_ui/core/logger_config.py +++ b/backend/chatsky_ui/core/logger_config.py @@ -3,7 +3,7 @@ from pathlib import Path from typing import Literal, Optional -from chatsky_ui.core.config import settings, LOG_LEVELS +from chatsky_ui.core.config import LOG_LEVELS, settings def setup_logging(log_type: Literal["builds", "runs"], id_: int, timestamp: datetime) -> Path: diff --git a/backend/chatsky_ui/main.py b/backend/chatsky_ui/main.py index 706cd5a0..5de32bd9 100644 --- a/backend/chatsky_ui/main.py +++ b/backend/chatsky_ui/main.py @@ -1,3 +1,4 @@ +import signal from contextlib import asynccontextmanager from fastapi import APIRouter, FastAPI, Response @@ -6,15 +7,27 @@ from chatsky_ui import __version__ from chatsky_ui.api.api_v1.api import api_router +from chatsky_ui.api.deps import run_manager from chatsky_ui.core.config import settings +def signal_handler(self, signum): + global stop_background_task + print("Caught termination signal, shutting down gracefully...") + for process in run_manager.processes.values(): + process.to_be_terminated = True + + @asynccontextmanager async def lifespan(app: FastAPI): if settings.temp_conf.exists(): settings.refresh_work_dir() + signal.signal(signal.SIGINT, signal_handler) yield + settings.temp_conf.unlink(missing_ok=True) + await run_manager.stop_all() + app = FastAPI(title="DF Designer", version=__version__, lifespan=lifespan) diff --git a/backend/chatsky_ui/schemas/front_graph_components/interface.py b/backend/chatsky_ui/schemas/front_graph_components/interface.py index 26ae339d..e21c1d15 100644 --- a/backend/chatsky_ui/schemas/front_graph_components/interface.py +++ b/backend/chatsky_ui/schemas/front_graph_components/interface.py @@ -1,20 +1,18 @@ -from pydantic import Field, model_validator -from typing import Any +import os +from typing import Any, Dict, Optional -from .base_component import BaseComponent -from typing import Optional, Dict from dotenv import load_dotenv -import os +from pydantic import Field, model_validator from chatsky_ui.core.config import settings +from .base_component import BaseComponent + +load_dotenv(os.path.join(settings.work_directory, ".env"), override=True) -load_dotenv(os.path.join(settings.work_directory, ".env")) class Interface(BaseComponent): - model_config = { - "extra": "forbid" - } + model_config = {"extra": "forbid"} telegram: Optional[Dict[str, Any]] = Field(default=None) http: Optional[Dict[str, Any]] = Field(default=None) @@ -28,7 +26,6 @@ def check_one_not_none(cls, values): @model_validator(mode="after") def check_telegram_token(cls, values): - load_dotenv(os.path.join(settings.work_directory, ".env")) tg_bot_token = os.getenv("TG_BOT_TOKEN") if values.telegram is not None and not tg_bot_token: raise ValueError("Telegram token must be provided.") diff --git a/backend/chatsky_ui/schemas/front_graph_components/node.py b/backend/chatsky_ui/schemas/front_graph_components/node.py index 4911333c..1f9e56e3 100644 --- a/backend/chatsky_ui/schemas/front_graph_components/node.py +++ b/backend/chatsky_ui/schemas/front_graph_components/node.py @@ -1,4 +1,5 @@ from typing import List + from pydantic import model_validator from .base_component import BaseComponent @@ -23,7 +24,7 @@ class SlotsNode(Node): groups: List[dict] @model_validator(mode="after") - def check_unique_groups_names(cls, values) -> 'SlotsNode': + def check_unique_groups_names(cls, values) -> "SlotsNode": groups_names = [group["name"] for group in values.groups] if len(groups_names) != len(set(groups_names)): raise ValueError(f"Slot groups names should be unique. Got duplicates: {groups_names}") diff --git a/backend/chatsky_ui/services/json_converter/base_converter.py b/backend/chatsky_ui/services/json_converter/base_converter.py new file mode 100644 index 00000000..aa5dff17 --- /dev/null +++ b/backend/chatsky_ui/services/json_converter/base_converter.py @@ -0,0 +1,10 @@ +from abc import ABC, abstractmethod + + +class BaseConverter(ABC): + def __call__(self, *args, **kwargs): + return self._convert() + + @abstractmethod + def _convert(self): + raise NotImplementedError diff --git a/backend/chatsky_ui/services/json_converter/consts.py b/backend/chatsky_ui/services/json_converter/consts.py new file mode 100644 index 00000000..388bc405 --- /dev/null +++ b/backend/chatsky_ui/services/json_converter/consts.py @@ -0,0 +1,3 @@ +RESPONSES_FILE = "responses" +CONDITIONS_FILE = "conditions" +CUSTOM_FILE = "custom" diff --git a/backend/chatsky_ui/services/json_converter/flow_converter.py b/backend/chatsky_ui/services/json_converter/flow_converter.py new file mode 100644 index 00000000..ddbb1aae --- /dev/null +++ b/backend/chatsky_ui/services/json_converter/flow_converter.py @@ -0,0 +1,72 @@ +from typing import Any, Dict, List, Tuple + +from ...schemas.front_graph_components.flow import Flow +from .base_converter import BaseConverter +from .node_converter import InfoNodeConverter, LinkNodeConverter + + +class FlowConverter(BaseConverter): + NODE_CONVERTERS = { + "default_node": InfoNodeConverter, + "link_node": LinkNodeConverter, + } + + def __init__(self, flow: Dict[str, Any]): + self._validate_flow(flow) + self.flow = Flow( + name=flow["name"], + nodes=flow["data"]["nodes"], + edges=flow["data"]["edges"], + ) + + def __call__(self, *args, **kwargs): + self.mapped_flows = kwargs["mapped_flows"] + self.slots_conf = kwargs["slots_conf"] + self._integrate_edges_into_nodes() + return super().__call__(*args, **kwargs) + + def _validate_flow(self, flow: Dict[str, Any]): + if "data" not in flow or "nodes" not in flow["data"] or "edges" not in flow["data"]: + raise ValueError("Invalid flow structure") + + def _integrate_edges_into_nodes(self): + def _insert_dst_into_condition( + node: Dict[str, Any], condition_id: str, target_node: Tuple[str, str] + ) -> Dict[str, Any]: + for condition in node["data"]["conditions"]: + if condition["id"] == condition_id: + condition["dst"] = target_node + return node + + maped_edges = self._map_edges() + nodes = self.flow.nodes.copy() + for edge in maped_edges: + for idx, node in enumerate(nodes): + if node["id"] == edge["source"]: + nodes[idx] = _insert_dst_into_condition(node, edge["sourceHandle"], edge["target"]) + self.flow.nodes = nodes + + def _map_edges(self) -> List[Dict[str, Any]]: + def _get_flow_and_node_names(target_node): + node_type = target_node["type"] + if node_type == "link_node": # TODO: WHY CONVERTING HERE? + return LinkNodeConverter(target_node)(mapped_flows=self.mapped_flows) + elif node_type == "default_node": + return [self.flow.name, target_node["data"]["name"]] + + edges = self.flow.edges.copy() + for edge in edges: + target_id = edge["target"] + target_node = self.mapped_flows[self.flow.name].get(target_id) + if target_node: + edge["target"] = _get_flow_and_node_names(target_node) + return edges + + def _convert(self) -> Dict[str, Any]: + converted_flow = {self.flow.name: {}} + for node in self.flow.nodes: + if node["type"] == "default_node": + converted_flow[self.flow.name].update( + {node["data"]["name"]: InfoNodeConverter(node)(slots_conf=self.slots_conf)} + ) + return converted_flow diff --git a/backend/chatsky_ui/services/json_converter/interface_converter.py b/backend/chatsky_ui/services/json_converter/interface_converter.py new file mode 100644 index 00000000..380a6dcc --- /dev/null +++ b/backend/chatsky_ui/services/json_converter/interface_converter.py @@ -0,0 +1,15 @@ +from chatsky_ui.core.config import settings + +from ...schemas.front_graph_components.interface import Interface +from .base_converter import BaseConverter + + +class InterfaceConverter(BaseConverter): + def __init__(self, interface: dict): + self.interface = Interface(**interface) + + def _convert(self): + if self.interface.http is not None: + return {"chatsky.messengers.HTTPMessengerInterface": {"port": settings.chatsky_port}} + elif self.interface.telegram is not None: + return {"chatsky.messengers.TelegramInterface": {"token": {"external:os.getenv": "TG_BOT_TOKEN"}}} diff --git a/backend/chatsky_ui/services/json_converter/logic_component_converter/condition_converter.py b/backend/chatsky_ui/services/json_converter/logic_component_converter/condition_converter.py new file mode 100644 index 00000000..a59ce3db --- /dev/null +++ b/backend/chatsky_ui/services/json_converter/logic_component_converter/condition_converter.py @@ -0,0 +1,57 @@ +from abc import ABC, abstractmethod + +from ....core.config import settings +from ....schemas.front_graph_components.info_holders.condition import CustomCondition, SlotCondition +from ..base_converter import BaseConverter +from ..consts import CONDITIONS_FILE, CUSTOM_FILE +from .service_replacer import store_custom_service + + +class BadConditionException(Exception): + pass + + +class ConditionConverter(BaseConverter, ABC): + @abstractmethod + def get_pre_transitions(): + raise NotImplementedError + + +class CustomConditionConverter(ConditionConverter): + def __init__(self, condition: dict): + self.condition = None + try: + self.condition = CustomCondition( + name=condition["name"], + code=condition["data"]["python"]["action"], + ) + except KeyError as missing_key: + raise BadConditionException("Missing key in custom condition data") from missing_key + + def _convert(self): + store_custom_service(settings.conditions_path, [self.condition.code]) + custom_cnd = {f"{CUSTOM_FILE}.{CONDITIONS_FILE}.{self.condition.name}": None} + return custom_cnd + + def get_pre_transitions(self): + return {} + + +class SlotConditionConverter(ConditionConverter): + def __init__(self, condition: dict): + self.condition = None + try: + self.condition = SlotCondition(slot_id=condition["data"]["slot"], name=condition["name"]) + except KeyError as missing_key: + raise BadConditionException("Missing key in slot condition data") from missing_key + + def __call__(self, *args, **kwargs): + self.slots_conf = kwargs["slots_conf"] + return super().__call__(*args, **kwargs) + + def _convert(self): + return {"chatsky.conditions.slots.SlotsExtracted": self.slots_conf[self.condition.slot_id]} + + def get_pre_transitions(self): + slot_path = self.slots_conf[self.condition.slot_id] # type: ignore + return {slot_path: {"chatsky.processing.slots.Extract": slot_path}} diff --git a/backend/chatsky_ui/services/json_converter/logic_component_converter/response_converter.py b/backend/chatsky_ui/services/json_converter/logic_component_converter/response_converter.py new file mode 100644 index 00000000..cb2624ee --- /dev/null +++ b/backend/chatsky_ui/services/json_converter/logic_component_converter/response_converter.py @@ -0,0 +1,42 @@ +from ....core.config import settings +from ....schemas.front_graph_components.info_holders.response import CustomResponse, TextResponse +from ..base_converter import BaseConverter +from ..consts import CUSTOM_FILE, RESPONSES_FILE +from .service_replacer import store_custom_service + + +class BadResponseException(Exception): + pass + + +class ResponseConverter(BaseConverter): + pass + + +class TextResponseConverter(ResponseConverter): + def __init__(self, response: dict): + try: + self.response = TextResponse( + name=response["name"], + text=next(iter(response["data"]))["text"], + ) + except KeyError as e: + raise BadResponseException("Missing key in custom condition data") from e + + def _convert(self): + return {"chatsky.Message": {"text": self.response.text}} + + +class CustomResponseConverter(ResponseConverter): + def __init__(self, response: dict): + try: + self.response = CustomResponse( + name=response["name"], + code=next(iter(response["data"]))["python"]["action"], + ) + except KeyError as e: + raise BadResponseException("Missing key in custom response data") from e + + def _convert(self): + store_custom_service(settings.responses_path, [self.response.code]) + return {f"{CUSTOM_FILE}.{RESPONSES_FILE}.{self.response.name}": None} diff --git a/backend/chatsky_ui/services/json_converter/logic_component_converter/service_replacer.py b/backend/chatsky_ui/services/json_converter/logic_component_converter/service_replacer.py new file mode 100644 index 00000000..6dee45c1 --- /dev/null +++ b/backend/chatsky_ui/services/json_converter/logic_component_converter/service_replacer.py @@ -0,0 +1,82 @@ +import ast +from ast import NodeTransformer +from pathlib import Path +from typing import Dict, List + +from chatsky_ui.core.logger_config import get_logger + + +class ServiceReplacer(NodeTransformer): + def __init__(self, new_services: List[str]): + self.new_services_classes = self._get_classes_def(new_services) + self._logger = None + + @property + def logger(self): + if self._logger is None: + raise ValueError("Logger has not been configured. Call set_logger() first.") + return self._logger + + def set_logger(self): + self._logger = get_logger(__name__) + + def _get_classes_def(self, services_code: List[str]) -> Dict[str, ast.ClassDef]: + parsed_codes = [ast.parse(service_code) for service_code in services_code] + for idx, parsed_code in enumerate(parsed_codes): + classes = self._extract_class_defs(parsed_code, services_code[idx]) + return classes + + def _extract_class_defs(self, parsed_code: ast.Module, service_code: str): + classes = {} + for node in parsed_code.body: + if isinstance(node, ast.ClassDef): + classes[node.name] = node + else: + self.logger.error("No class definition found in new_service: %s", service_code) + return classes + + def visit_ClassDef(self, node: ast.ClassDef) -> ast.ClassDef: + self.logger.debug("Visiting class '%s' and comparing with: %s", node.name, self.new_services_classes.keys()) + if node.name in self.new_services_classes: + return self._get_class_def(node) + return node + + def _get_class_def(self, node: ast.ClassDef) -> ast.ClassDef: + service = self.new_services_classes[node.name] + del self.new_services_classes[node.name] + self.logger.info("Updating class '%s'", node.name) + return service + + def generic_visit(self, node: ast.AST): + super().generic_visit(node) + if isinstance(node, ast.Module) and self.new_services_classes: + self._append_new_services(node) + return node + + def _append_new_services(self, node: ast.Module): + self.logger.info("Services not found, appending new services: %s", list(self.new_services_classes.keys())) + for _, service in self.new_services_classes.items(): + node.body.append(service) + + +def store_custom_service(services_path: Path, services: List[str]): + with open(services_path, "r", encoding="UTF-8") as file: + conditions_tree = ast.parse(file.read()) + + replacer = ServiceReplacer(services) + replacer.set_logger() + replacer.visit(conditions_tree) + + with open(services_path, "w") as file: + file.write(ast.unparse(conditions_tree)) + + +def get_all_classes(services_path): + with open(services_path, "r", encoding="UTF-8") as file: + conditions_tree = ast.parse(file.read()) + + return [ + {"name": node.name, "body": ast.unparse(node)} + for node in conditions_tree.body + if isinstance(node, ast.ClassDef) + ] diff --git a/backend/chatsky_ui/services/json_converter/node_converter.py b/backend/chatsky_ui/services/json_converter/node_converter.py new file mode 100644 index 00000000..9656479b --- /dev/null +++ b/backend/chatsky_ui/services/json_converter/node_converter.py @@ -0,0 +1,84 @@ +from chatsky import PRE_RESPONSE, PRE_TRANSITION, RESPONSE, TRANSITIONS + +from ...schemas.front_graph_components.node import InfoNode, LinkNode +from .base_converter import BaseConverter +from .logic_component_converter.condition_converter import CustomConditionConverter, SlotConditionConverter +from .logic_component_converter.response_converter import CustomResponseConverter, TextResponseConverter + + +class NodeConverter(BaseConverter): + RESPONSE_CONVERTER = { + "text": TextResponseConverter, + "python": CustomResponseConverter, + } + CONDITION_CONVERTER = { + "python": CustomConditionConverter, + "slot": SlotConditionConverter, + } + + def __init__(self, config: dict): + pass + + +class InfoNodeConverter(NodeConverter): + MAP_TR2CHATSKY = { + "start": "dst.Start", + "fallback": "dst.Fallback", + "previous": "dst.Previous", + "repeat": "dst.Current", + } + + def __init__(self, node: dict): + self.node = InfoNode( + id=node["id"], + name=node["data"]["name"], + response=node["data"]["response"], + conditions=node["data"]["conditions"], + ) + + def __call__(self, *args, **kwargs): + self.slots_conf = kwargs["slots_conf"] + return super().__call__(*args, **kwargs) + + def _convert(self): + condition_converters = [ + self.CONDITION_CONVERTER[condition["type"]](condition) for condition in self.node.conditions + ] + return { + RESPONSE: self.RESPONSE_CONVERTER[self.node.response["type"]](self.node.response)(), + TRANSITIONS: [ + { + "dst": condition["dst"] + if "dst" in condition and condition["data"]["transition_type"] == "manual" + else self.MAP_TR2CHATSKY[condition["data"]["transition_type"]], + "priority": condition["data"]["priority"], + "cnd": converter(slots_conf=self.slots_conf), + } + for condition, converter in zip(self.node.conditions, condition_converters) + ], + PRE_TRANSITION: { + key: value + for converter in condition_converters + for key, value in converter.get_pre_transitions().items() + }, + PRE_RESPONSE: {"fill": {"chatsky.processing.FillTemplate": None}}, + } + + +class LinkNodeConverter(NodeConverter): + def __init__(self, config: dict): + self.node = LinkNode( + id=config["id"], + target_flow_name=config["data"]["transition"]["target_flow"], + target_node_id=config["data"]["transition"]["target_node"], + ) + + def __call__(self, *args, **kwargs): + self.mapped_flows = kwargs["mapped_flows"] + return super().__call__(*args, **kwargs) + + def _convert(self): + return [ + self.node.target_flow_name, + self.mapped_flows[self.node.target_flow_name][self.node.target_node_id]["data"]["name"], + ] diff --git a/backend/chatsky_ui/services/json_converter/pipeline_converter.py b/backend/chatsky_ui/services/json_converter/pipeline_converter.py new file mode 100644 index 00000000..969640fe --- /dev/null +++ b/backend/chatsky_ui/services/json_converter/pipeline_converter.py @@ -0,0 +1,48 @@ +from pathlib import Path + +import yaml + +try: + from yaml import CDumper as Dumper + from yaml import CLoader as Loader +except ImportError: + from yaml import Loader, Dumper + +from ...schemas.front_graph_components.pipeline import Pipeline +from .base_converter import BaseConverter +from .interface_converter import InterfaceConverter +from .script_converter import ScriptConverter +from .slots_converter import SlotsConverter + + +class PipelineConverter(BaseConverter): + def __call__(self, input_file: Path, output_dir: Path): + self.from_yaml(file_path=input_file) + + self.pipeline = Pipeline(**self.graph) + self.converted_pipeline = super().__call__() + + self.to_yaml(dir_path=output_dir) + + def from_yaml(self, file_path: Path): + with open(str(file_path), "r", encoding="UTF-8") as file: + self.graph = yaml.load(file, Loader=Loader) + + def to_yaml(self, dir_path: Path): + with open(f"{dir_path}/build.yaml", "w", encoding="UTF-8") as file: + yaml.dump(self.converted_pipeline, file, Dumper=Dumper, default_flow_style=False) + + def _convert(self): + slots_converter = SlotsConverter(self.pipeline.flows) + script_converter = ScriptConverter(self.pipeline.flows) + + slots_conf = slots_converter.map_slots() + start_label, fallback_label = script_converter.extract_start_fallback_labels() + + return { + "script": script_converter(slots_conf=slots_conf), + "messenger_interface": InterfaceConverter(self.pipeline.interface)(), + "slots": slots_converter(), + "start_label": start_label, + "fallback_label": fallback_label, + } diff --git a/backend/chatsky_ui/services/json_converter/script_converter.py b/backend/chatsky_ui/services/json_converter/script_converter.py new file mode 100644 index 00000000..f1f2197d --- /dev/null +++ b/backend/chatsky_ui/services/json_converter/script_converter.py @@ -0,0 +1,51 @@ +from typing import List + +from ...schemas.front_graph_components.script import Script +from .base_converter import BaseConverter +from .flow_converter import FlowConverter + + +class ScriptConverter(BaseConverter): + def __init__(self, flows: List[dict]): + self.script = Script(flows=flows) + self.mapped_flows = self._map_flows() # TODO: think about storing this in a temp file + + def __call__(self, *args, **kwargs): + self.slots_conf = kwargs["slots_conf"] + return super().__call__(*args, **kwargs) + + def _convert(self): + return { + key: value + for flow in self.script.flows + for key, value in FlowConverter(flow)(mapped_flows=self.mapped_flows, slots_conf=self.slots_conf).items() + } + + def _map_flows(self): + mapped_flows = {} + for flow in self.script.flows: + mapped_flows[flow["name"]] = {} + for node in flow["data"]["nodes"]: + mapped_flows[flow["name"]][node["id"]] = node + return mapped_flows + + def extract_start_fallback_labels(self): # TODO: refactor this huge method + start_label, fallback_label = None, None + + for flow in self.script.flows: + for node in flow["data"]["nodes"]: + flags = node["data"].get("flags", []) + + if "start" in flags: + if start_label: + raise ValueError("Multiple start nodes found") + start_label = [flow["name"], node["data"]["name"]] + if "fallback" in flags: + if fallback_label: + raise ValueError("Multiple fallback nodes found") + fallback_label = [flow["name"], node["data"]["name"]] + + if start_label and fallback_label: + return start_label, fallback_label + + return start_label, fallback_label # return None, None diff --git a/backend/chatsky_ui/services/json_converter/slots_converter.py b/backend/chatsky_ui/services/json_converter/slots_converter.py new file mode 100644 index 00000000..3bd9970b --- /dev/null +++ b/backend/chatsky_ui/services/json_converter/slots_converter.py @@ -0,0 +1,75 @@ +from typing import List + +from ...schemas.front_graph_components.node import SlotsNode +from ...schemas.front_graph_components.slot import GroupSlot, RegexpSlot +from .base_converter import BaseConverter + + +class SlotsConverter(BaseConverter): + def __init__(self, flows: List[dict]): + def _get_slots_node(flows): + return next( + iter([node for flow in flows for node in flow["data"]["nodes"] if node["type"] == "slots_node"]), + {"id": "999999", "data": {"groups": []}}, + ) + + slots_node = _get_slots_node(flows) + self.slots_node = SlotsNode( + id=slots_node["id"], + groups=slots_node["data"]["groups"], + ) + + def map_slots(self): + mapped_slots = {} + for group in self.slots_node.groups.copy(): + for slot in group["slots"]: + mapped_slots[slot["id"]] = ".".join([group["name"], slot["name"]]) + return mapped_slots + + def _convert(self): + return {key: value for group in self.slots_node.groups for key, value in GroupSlotConverter(group)().items()} + + +class RegexpSlotConverter(SlotsConverter): + def __init__(self, slot: dict): + self.slot = RegexpSlot( + id=slot["id"], + name=slot["name"], + regexp=slot["value"], + match_group_idx=slot.get("match_group_idx", 1), + ) + + def _convert(self): + return { + self.slot.name: { + "chatsky.slots.RegexpSlot": { + "regexp": self.slot.regexp, + "match_group_idx": self.slot.match_group_idx, + } + } + } + + +class GroupSlotConverter(SlotsConverter): + SLOTS_CONVERTER_TYPES = { + "GroupSlot": "self", # Placeholder, will be replaced in __init__ + "RegexpSlot": RegexpSlotConverter, + } + + def __init__(self, slot: dict): + # Replace the placeholder with the actual class reference + self.SLOTS_CONVERTER_TYPES["GroupSlot"] = GroupSlotConverter + + self.slot = GroupSlot( + name=slot["name"], + slots=slot["slots"], + ) + + def _convert(self): + return { + self.slot.name: { + key: value + for slot in self.slot.slots + for key, value in self.SLOTS_CONVERTER_TYPES[slot["type"]](slot)().items() + } + } diff --git a/backend/chatsky_ui/services/process.py b/backend/chatsky_ui/services/process.py index c3b4a821..7ae7020d 100644 --- a/backend/chatsky_ui/services/process.py +++ b/backend/chatsky_ui/services/process.py @@ -12,17 +12,15 @@ from datetime import datetime from pathlib import Path from typing import Any, Dict, List, Optional -from httpx import AsyncClient -from chatsky.messengers.http_interface import HTTP_INTERFACE_PORT from dotenv import load_dotenv +from httpx import AsyncClient from chatsky_ui.core.config import settings from chatsky_ui.core.logger_config import get_logger, setup_logging from chatsky_ui.db.base import read_conf, write_conf from chatsky_ui.schemas.process_status import Status -from chatsky_ui.utils.git_cmd import get_repo, save_frontend_graph_to_git, save_built_script_to_git - +from chatsky_ui.utils.git_cmd import get_repo, save_built_script_to_git load_dotenv() @@ -42,6 +40,7 @@ def __init__(self, id_: int, preset_end_status: str = ""): self.lock: asyncio.Lock = asyncio.Lock() self.process: Optional[asyncio.subprocess.Process] = None self.logger: logging.Logger + self.to_be_terminated = False async def start(self, cmd_to_run: str) -> None: """Starts an asyncronous process with the given command.""" @@ -63,6 +62,7 @@ async def get_full_info(self, attributes: list) -> Dict[str, Any]: Returns: dict: A dictionary containing the values of the attributes mentioned in the list. """ + def _map_to_str(params: Dict[str, Any]): for k, v in params.copy().items(): if isinstance(v, datetime): @@ -84,10 +84,16 @@ async def update_db_info(self): async def periodically_check_status(self) -> None: """Periodically checks the process status and updates the database.""" - while True: + while not self.to_be_terminated: await self.update_db_info() # check status and update db self.logger.info("Status of process '%s': %s", self.id, self.status) - if self.status in [Status.NULL, Status.STOPPED, Status.COMPLETED, Status.FAILED]: + if self.status in [ + Status.NULL, + Status.STOPPED, + Status.COMPLETED, + Status.FAILED, + Status.FAILED_WITH_UNEXPECTED_CODE, + ]: break await asyncio.sleep(2) # TODO: ?sleep time shouldn't be constant @@ -163,7 +169,7 @@ async def stop(self) -> None: self.logger.error("Process group '%s' not found. It may have already exited.", self.id) raise ProcessLookupError from exc - def add_new_conf(self, conf: list, params: dict) -> list: #TODO: rename conf everywhere to metadata/meta + def add_new_conf(self, conf: list, params: dict) -> list: # TODO: rename conf everywhere to metadata/meta for run in conf: if run.id == params["id"]: # type: ignore for key, value in params.items(): @@ -196,7 +202,7 @@ async def update_db_info(self) -> None: runs_conf = await read_conf(settings.runs_path) run_params = await self.get_full_info() - runs_conf = self.add_new_conf(runs_conf, run_params) # type: ignore + runs_conf = self.add_new_conf(runs_conf, run_params) # type: ignore await write_conf(runs_conf, settings.runs_path) @@ -209,9 +215,10 @@ async def update_db_info(self) -> None: break await write_conf(builds_conf, settings.builds_path) - + async def is_alive(self) -> bool: """Checks if the process is alive by writing to stdin andreading its stdout.""" + async def check_telegram_readiness(stream, name): async for line in stream: decoded_line = line.decode().strip() @@ -225,12 +232,12 @@ async def check_telegram_readiness(stream, name): async with AsyncClient() as client: try: response = await client.get( - f"http://localhost:{HTTP_INTERFACE_PORT}/health", + f"http://localhost:{settings.chatsky_port}/health", ) return response.json()["status"] == "ok" except Exception as e: self.logger.info( - f"Process '{self.id}' isn't alive on port '{HTTP_INTERFACE_PORT}' yet. " + f"Process '{self.id}' isn't alive on port '{settings.chatsky_port}' yet. " f"Ignore this if you're not connecting via HTTPInterface. Exception caught: {e}" ) @@ -274,7 +281,7 @@ async def update_db_info(self) -> None: builds_conf = await read_conf(settings.builds_path) build_params = await self.get_full_info() - builds_conf = self.add_new_conf(builds_conf, build_params) # type: ignore + builds_conf = self.add_new_conf(builds_conf, build_params) # type: ignore await write_conf(builds_conf, settings.builds_path) @@ -284,7 +291,7 @@ def save_built_script_to_git(self, id_: int) -> None: async def periodically_check_status(self) -> None: """Periodically checks the process status and updates the database.""" - while True: + while not self.to_be_terminated: await self.update_db_info() # check status and update db self.logger.info("Status of process '%s': %s", self.id, self.status) if self.status in [Status.NULL, Status.STOPPED, Status.COMPLETED, Status.FAILED]: @@ -293,4 +300,4 @@ async def periodically_check_status(self) -> None: await asyncio.sleep(2) # TODO: ?sleep time shouldn't be constant async def is_alive(self) -> bool: - return False + return False \ No newline at end of file diff --git a/backend/chatsky_ui/services/process_manager.py b/backend/chatsky_ui/services/process_manager.py index 8184fc7d..6f8744e2 100644 --- a/backend/chatsky_ui/services/process_manager.py +++ b/backend/chatsky_ui/services/process_manager.py @@ -6,9 +6,11 @@ starting, stopping, updating, and checking status of processes. Processes themselves are stored in the `processes` dictionary of process managers. """ +import os from pathlib import Path from typing import Any, Dict, List, Optional, Union +from dotenv import load_dotenv from omegaconf import OmegaConf from chatsky_ui.core.config import settings @@ -57,10 +59,10 @@ async def stop(self, id_: int) -> None: raise async def stop_all(self) -> None: - self.logger.info("Stopping all process %s", self.processes) for id_, process in self.processes.items(): - if process.process.returncode is None: + if await process.check_status() in [Status.ALIVE, Status.RUNNING]: await self.stop(id_) + await process.update_db_info() async def check_status(self, id_: int, *args, **kwargs) -> None: """Checks the status of the process with the given id by calling the `periodically_check_status` @@ -135,6 +137,8 @@ async def start(self, build_id: int, preset: Preset) -> int: self.last_id += 1 id_ = self.last_id process = RunProcess(id_, build_id, preset.end_status) + + load_dotenv(os.path.join(settings.work_directory, ".env"), override=True) await process.start(cmd_to_run) process.logger.debug("Started process. status: '%s'", process.process.returncode) self.processes[id_] = process @@ -178,18 +182,16 @@ async def start(self, preset: Preset) -> int: id_ = self.last_id if self.is_repeated_id(id_): - raise ValueError(f"Build id '{id_}' already exists in the database") + raise ValueError(f"Build id '{id_}' already exists in the database") process = BuildProcess(id_, preset.end_status) if self.is_changed_graph(id_): cmd_to_run = ( - f"chatsky.ui build_bot " - f"--preset {preset.end_status} " - f"--project-dir {settings.work_directory}" + f"chatsky.ui build_bot " f"--preset {preset.end_status} " f"--project-dir {settings.work_directory}" ) await process.start(cmd_to_run) self.processes[id_] = process - + return self.last_id async def check_status(self, id_, *args, **kwargs): diff --git a/backend/chatsky_ui/tests/api/test_bot.py b/backend/chatsky_ui/tests/api/test_bot.py index f6b5c2b4..20f70b5e 100644 --- a/backend/chatsky_ui/tests/api/test_bot.py +++ b/backend/chatsky_ui/tests/api/test_bot.py @@ -6,7 +6,6 @@ _stop_process, check_build_processes, check_run_processes, - connect, get_build_logs, get_run_logs, start_build, @@ -16,8 +15,6 @@ from chatsky_ui.services.process_manager import RunManager PROCESS_ID = 0 -RUN_ID = 0 -BUILD_ID = 0 @pytest.mark.asyncio @@ -63,27 +60,27 @@ async def test_check_process_status(mocker): @pytest.mark.asyncio -async def test_start_build(mocker): +async def test_start_build(mocker, dummy_build_id): build_manager = mocker.MagicMock() preset = mocker.MagicMock() - start = mocker.AsyncMock(return_value=BUILD_ID) + start = mocker.AsyncMock(return_value=dummy_build_id) mocker.patch.multiple(build_manager, start=start, check_status=mocker.AsyncMock()) mocker.patch.multiple(preset, wait_time=0, end_status="loop") response = await start_build(preset, background_tasks=BackgroundTasks(), build_manager=build_manager) start.assert_awaited_once_with(preset) - assert response == {"status": "ok", "build_id": BUILD_ID} + assert response == {"status": "ok", "build_id": dummy_build_id} @pytest.mark.asyncio -async def test_check_build_processes_some_info(mocker, pagination): +async def test_check_build_processes_some_info(mocker, pagination, dummy_build_id): build_manager = mocker.AsyncMock() run_manager = mocker.AsyncMock() - await check_build_processes(BUILD_ID, build_manager, run_manager, pagination) + await check_build_processes(dummy_build_id, build_manager, run_manager, pagination) - build_manager.get_build_info.assert_awaited_once_with(BUILD_ID, run_manager) + build_manager.get_build_info.assert_awaited_once_with(dummy_build_id, run_manager) @pytest.mark.asyncio @@ -100,37 +97,37 @@ async def test_check_build_processes_all_info(mocker, pagination): @pytest.mark.asyncio -async def test_get_build_logs(mocker, pagination): +async def test_get_build_logs(mocker, pagination, dummy_build_id): build_manager = mocker.AsyncMock() - await get_build_logs(BUILD_ID, build_manager, pagination) + await get_build_logs(dummy_build_id, build_manager, pagination) - build_manager.fetch_build_logs.assert_awaited_once_with(BUILD_ID, pagination.offset(), pagination.limit) + build_manager.fetch_build_logs.assert_awaited_once_with(dummy_build_id, pagination.offset(), pagination.limit) @pytest.mark.asyncio -async def test_start_run(mocker): +async def test_start_run(mocker, dummy_build_id, dummy_run_id): run_manager = mocker.MagicMock() preset = mocker.MagicMock() - start = mocker.AsyncMock(return_value=RUN_ID) + start = mocker.AsyncMock(return_value=dummy_run_id) mocker.patch.multiple(run_manager, start=start, check_status=mocker.AsyncMock()) mocker.patch.multiple(preset, wait_time=0, end_status="loop") response = await start_run( - build_id=BUILD_ID, preset=preset, background_tasks=BackgroundTasks(), run_manager=run_manager + build_id=dummy_build_id, preset=preset, background_tasks=BackgroundTasks(), run_manager=run_manager ) - start.assert_awaited_once_with(BUILD_ID, preset) - assert response == {"status": "ok", "run_id": RUN_ID} + start.assert_awaited_once_with(dummy_build_id, preset) + assert response == {"status": "ok", "run_id": dummy_run_id} @pytest.mark.asyncio -async def test_check_run_processes_some_info(mocker, pagination): +async def test_check_run_processes_some_info(mocker, pagination, dummy_run_id): run_manager = mocker.AsyncMock() - await check_run_processes(RUN_ID, run_manager, pagination) + await check_run_processes(dummy_run_id, run_manager, pagination) - run_manager.get_run_info.assert_awaited_once_with(RUN_ID) + run_manager.get_run_info.assert_awaited_once_with(dummy_run_id) @pytest.mark.asyncio @@ -144,27 +141,9 @@ async def test_check_run_processes_all_info(mocker, pagination): @pytest.mark.asyncio -async def test_get_run_logs(mocker, pagination): +async def test_get_run_logs(mocker, pagination, dummy_run_id): run_manager = mocker.AsyncMock() - await get_run_logs(RUN_ID, run_manager, pagination) + await get_run_logs(dummy_run_id, run_manager, pagination) - run_manager.fetch_run_logs.assert_awaited_once_with(RUN_ID, pagination.offset(), pagination.limit) - - -@pytest.mark.asyncio -async def test_connect(mocker): - websocket = mocker.AsyncMock() - websocket_manager = mocker.AsyncMock() - websocket_manager.disconnect = mocker.MagicMock() - run_manager = mocker.AsyncMock() - run_process = mocker.AsyncMock() - run_manager.processes = {RUN_ID: run_process} - mocker.patch.object(websocket, "query_params", {"run_id": str(RUN_ID)}) - - await connect(websocket, websocket_manager, run_manager) - - websocket_manager.connect.assert_awaited_once_with(websocket) - websocket_manager.send_process_output_to_websocket.assert_awaited_once_with(RUN_ID, run_manager, websocket) - websocket_manager.forward_websocket_messages_to_process.assert_awaited_once_with(RUN_ID, run_manager, websocket) - websocket_manager.disconnect.assert_called_once_with(websocket) + run_manager.fetch_run_logs.assert_awaited_once_with(dummy_run_id, pagination.offset(), pagination.limit) diff --git a/backend/chatsky_ui/tests/conftest.py b/backend/chatsky_ui/tests/conftest.py index 2f4d1987..18046da9 100644 --- a/backend/chatsky_ui/tests/conftest.py +++ b/backend/chatsky_ui/tests/conftest.py @@ -15,11 +15,18 @@ from chatsky_ui.main import app from chatsky_ui.schemas.pagination import Pagination from chatsky_ui.schemas.preset import Preset -from chatsky_ui.services.process import RunProcess +from chatsky_ui.services.process import BuildProcess, RunProcess from chatsky_ui.services.process_manager import BuildManager, RunManager -DUMMY_BUILD_ID = -1 +@pytest.fixture(scope="session") +def dummy_build_id() -> int: + return 999999 + + +@pytest.fixture(scope="session") +def dummy_run_id() -> int: + return 999999 async def start_process(async_client: AsyncClient, endpoint, preset_end_status) -> httpx.Response: @@ -29,18 +36,22 @@ async def start_process(async_client: AsyncClient, endpoint, preset_end_status) ) -@asynccontextmanager -async def override_dependency(mocker_obj, get_manager_func): - process_manager = get_manager_func() - process_manager.check_status = mocker_obj.AsyncMock() - app.dependency_overrides[get_manager_func] = lambda: process_manager - try: - yield process_manager - finally: - for _, process in process_manager.processes.items(): - if process.process.returncode is None: - await process.stop() - app.dependency_overrides = {} +@pytest.fixture +def override_dependency(mocker): + @asynccontextmanager + async def _override_dependency(get_manager_func): + process_manager = get_manager_func() + process_manager.check_status = mocker.AsyncMock() + app.dependency_overrides[get_manager_func] = lambda: process_manager + try: + yield process_manager + finally: + for _, process in process_manager.processes.items(): + if process.process.returncode is None: + await process.stop() + app.dependency_overrides = {} + + return _override_dependency @pytest.fixture @@ -63,15 +74,25 @@ def pagination() -> Pagination: @pytest.fixture() -def run_process(): - async def _run_process(cmd_to_run): - process = RunProcess(id_=0, build_id=DUMMY_BUILD_ID) +def run_process(dummy_build_id, dummy_run_id): + async def _run_process(cmd_to_run) -> RunProcess: + process = RunProcess(id_=dummy_run_id, build_id=dummy_build_id) await process.start(cmd_to_run) return process return _run_process +@pytest.fixture() +def build_process(dummy_build_id): + async def _build_process(cmd_to_run) -> BuildProcess: + process = BuildProcess(id_=dummy_build_id) + await process.start(cmd_to_run) + return process + + return _build_process + + @pytest.fixture() def run_manager(): manager = RunManager() @@ -82,4 +103,3 @@ def run_manager(): @pytest.fixture() def build_manager(): return BuildManager() - diff --git a/backend/chatsky_ui/tests/integration/test_bot.py b/backend/chatsky_ui/tests/integration/test_bot.py new file mode 100644 index 00000000..4908c8f5 --- /dev/null +++ b/backend/chatsky_ui/tests/integration/test_bot.py @@ -0,0 +1,97 @@ +import asyncio +import os + +import pytest +from dotenv import load_dotenv +from httpx import AsyncClient +from httpx._transports.asgi import ASGITransport + +from chatsky_ui.api.deps import get_build_manager, get_run_manager +from chatsky_ui.core.logger_config import get_logger +from chatsky_ui.main import app +from chatsky_ui.schemas.process_status import Status + +load_dotenv() + +BUILD_COMPLETION_TIMEOUT = float(os.getenv("BUILD_COMPLETION_TIMEOUT", 10)) +RUN_RUNNING_TIMEOUT = float(os.getenv("RUN_RUNNING_TIMEOUT", 5)) + +logger = get_logger(__name__) + + +@pytest.mark.asyncio +@pytest.mark.parametrize( + "preset_status, expected_status", + [("failure", Status.FAILED), ("loop", Status.RUNNING), ("success", Status.COMPLETED)], +) +async def test_start_build(mocker, override_dependency, preset_status, expected_status): + async with AsyncClient(transport=ASGITransport(app=app), base_url="http://test") as async_client: + async with override_dependency(get_build_manager) as process_manager: + process_manager.save_built_script_to_git = mocker.MagicMock() + process_manager.is_changed_graph = mocker.MagicMock(return_value=True) + + response = await async_client.post( + "/api/v1/bot/build/start", + json={"wait_time": 0.1, "end_status": preset_status}, + ) + + assert response.json().get("status") == "ok", "Start process response status is not 'ok'" + + process_id = process_manager.last_id + process = process_manager.processes[process_id] + + try: + await asyncio.wait_for(process.process.wait(), timeout=BUILD_COMPLETION_TIMEOUT) + except asyncio.exceptions.TimeoutError as exc: + if preset_status == "loop": + logger.debug("Loop process timed out. Expected behavior.") + assert True + await process.stop() + return + else: + raise Exception( + f"Process with expected end status '{preset_status}' timed out with " + f"return code '{process.process.returncode}'." + ) from exc + + current_status = await process_manager.get_status(process_id) + assert ( + current_status == expected_status + ), f"Current process status '{current_status}' did not match the expected '{expected_status}'" + + +@pytest.mark.asyncio +@pytest.mark.parametrize( + "preset_status, expected_status", [("failure", Status.FAILED), ("loop", Status.RUNNING), ("success", Status.ALIVE)] +) +async def test_start_run(override_dependency, preset_status, expected_status, dummy_build_id): + async with AsyncClient(transport=ASGITransport(app=app), base_url="http://test") as async_client: + async with override_dependency(get_run_manager) as process_manager: + response = await async_client.post( + f"/api/v1/bot/run/start/{dummy_build_id}", + json={"wait_time": 0.1, "end_status": preset_status}, + ) + + assert response.json().get("status") == "ok", "Start process response status is not 'ok'" + + process_id = process_manager.last_id + process = process_manager.processes[process_id] + + try: + await asyncio.wait_for(process.process.wait(), timeout=RUN_RUNNING_TIMEOUT) + except asyncio.exceptions.TimeoutError as exc: + if preset_status == "loop": + logger.debug("Loop process timed out. Expected behavior.") + assert True + await process.stop() + return + else: + raise Exception( + f"Process with expected end status '{preset_status}' timed out with " + f"return code '{process.process.returncode}'." + ) from exc + + current_status = await process_manager.get_status(process_id) + assert ( + current_status == expected_status + ), f"Current process status '{current_status}' did not match the expected '{expected_status}'" diff --git a/backend/chatsky_ui/tests/services/test_process.py b/backend/chatsky_ui/tests/services/test_process.py index c0c958db..2fb1af42 100644 --- a/backend/chatsky_ui/tests/services/test_process.py +++ b/backend/chatsky_ui/tests/services/test_process.py @@ -2,10 +2,19 @@ import pytest +from chatsky_ui.core.config import settings +from chatsky_ui.db.base import read_conf from chatsky_ui.schemas.process_status import Status class TestRunProcess: + @pytest.mark.asyncio + async def test_get_full_info(self, run_process): + process = await run_process("sleep 10000") + await asyncio.sleep(2) + info = await process.get_full_info(["status", "timestamp"]) + assert info["status"] == Status.RUNNING.value + @pytest.mark.asyncio @pytest.mark.parametrize( "cmd_to_run, status", @@ -20,10 +29,6 @@ async def test_check_status(self, run_process, cmd_to_run, status): await asyncio.sleep(2) assert await process.check_status() == status - # def test_periodically_check_status(self, run_process): - # process = await run_process("sleep 10000") - # run_process.periodically_check_status() - @pytest.mark.asyncio async def test_stop(self, run_process): process = await run_process("sleep 10000") @@ -31,18 +36,19 @@ async def test_stop(self, run_process): assert process.process.returncode == -15 @pytest.mark.asyncio - async def test_read_stdout(self, run_process): + async def test_update_db_info(self, run_process, dummy_run_id): process = await run_process("echo Hello") - output = await process.read_stdout() - assert output.strip().decode() == "Hello" + await process.update_db_info() - @pytest.mark.asyncio - async def test_write_stdout(self, run_process): - process = await run_process("cat") - await process.write_stdin(b"Chatsky-UI team welcome you.\n") - output = await process.process.stdout.readline() - assert output.decode().strip() == "Chatsky-UI team welcome you." + runs_conf = await read_conf(settings.runs_path) + assert dummy_run_id in [conf["id"] for conf in runs_conf] # type: ignore -# class TestBuildProcess: -# pass +class TestBuildProcess: + @pytest.mark.asyncio + async def test_update_db_info(self, build_process, dummy_build_id): + process = await build_process("echo Hello") + await process.update_db_info() + + builds_conf = await read_conf(settings.builds_path) + assert dummy_build_id in [conf["id"] for conf in builds_conf] # type: ignore diff --git a/backend/chatsky_ui/tests/services/test_websocket_manager.py b/backend/chatsky_ui/tests/services/test_websocket_manager.py deleted file mode 100644 index ada2589a..00000000 --- a/backend/chatsky_ui/tests/services/test_websocket_manager.py +++ /dev/null @@ -1,57 +0,0 @@ -import pytest -from fastapi import WebSocket - - -class TestWebSocketManager: - @pytest.mark.asyncio - async def test_connect(self, mocker, websocket_manager): - mocked_websocket = mocker.MagicMock(spec=WebSocket) - - await websocket_manager.connect(mocked_websocket) - - mocked_websocket.accept.assert_awaited_once_with() - assert mocked_websocket in websocket_manager.active_connections - - @pytest.mark.asyncio - async def test_disconnect(self, mocker, websocket_manager): - mocked_websocket = mocker.MagicMock(spec=WebSocket) - websocket_manager.active_connections.append(mocked_websocket) - websocket_manager.pending_tasks[mocked_websocket] = set() - - websocket_manager.disconnect(mocked_websocket) - - assert mocked_websocket not in websocket_manager.pending_tasks - assert mocked_websocket not in websocket_manager.active_connections - - @pytest.mark.asyncio - async def test_send_process_output_to_websocket(self, mocker, websocket_manager): - run_id = 42 - awaited_response = "Hello from DF-Designer" - - websocket = mocker.AsyncMock() - run_manager = mocker.MagicMock() - run_process = mocker.MagicMock() - run_process.read_stdout = mocker.AsyncMock(side_effect=[awaited_response.encode(), None]) - run_manager.processes = {run_id: run_process} - - await websocket_manager.send_process_output_to_websocket(run_id, run_manager, websocket) - - assert run_process.read_stdout.call_count == 2 - websocket.send_text.assert_awaited_once_with(awaited_response) - - @pytest.mark.asyncio - async def test_forward_websocket_messages_to_process(self, mocker, websocket_manager): - run_id = 42 - awaited_message = "Hello from DF-Designer" - - websocket = mocker.AsyncMock() - websocket.receive_text = mocker.AsyncMock(side_effect=[awaited_message, None]) - run_manager = mocker.MagicMock() - run_process = mocker.MagicMock() - run_process.write_stdin = mocker.AsyncMock() - run_manager.processes = {run_id: run_process} - - await websocket_manager.forward_websocket_messages_to_process(run_id, run_manager, websocket) - - assert websocket.receive_text.await_count == 2 - run_process.write_stdin.assert_called_once_with(awaited_message.encode() + b"\n") diff --git a/backend/chatsky_ui/tests/unit/conftest.py b/backend/chatsky_ui/tests/unit/conftest.py index 1ccea99b..28008cfd 100644 --- a/backend/chatsky_ui/tests/unit/conftest.py +++ b/backend/chatsky_ui/tests/unit/conftest.py @@ -1,6 +1,6 @@ import pytest +from chatsky import PRE_RESPONSE, PRE_TRANSITION, RESPONSE, TRANSITIONS -from chatsky import PRE_RESPONSE, RESPONSE, TRANSITIONS, PRE_TRANSITION @pytest.fixture def custom_condition(): @@ -138,13 +138,9 @@ def mapped_flow(info_node): @pytest.fixture def telegram_interface(): - return { - "telegram": {} - } + return {"telegram": {}} @pytest.fixture def chatsky_telegram_interface(): - return { - "chatsky.messengers.TelegramInterface": {"token": {"external:os.getenv": "TG_BOT_TOKEN"}} - } + return {"chatsky.messengers.TelegramInterface": {"token": {"external:os.getenv": "TG_BOT_TOKEN"}}} diff --git a/backend/chatsky_ui/tests/unit/test_flow_converter.py b/backend/chatsky_ui/tests/unit/test_flow_converter.py index cb53310b..2d87d325 100644 --- a/backend/chatsky_ui/tests/unit/test_flow_converter.py +++ b/backend/chatsky_ui/tests/unit/test_flow_converter.py @@ -1,21 +1,18 @@ -import pytest -from pathlib import Path import os +from pathlib import Path + +import pytest import yaml -from chatsky_ui.services.json_converter_new2.flow_converter import FlowConverter -from chatsky_ui.services.json_converter_new2.script_converter import ScriptConverter -from chatsky_ui.services.json_converter_new2.interface_converter import InterfaceConverter -from chatsky_ui.services.json_converter_new2.pipeline_converter import PipelineConverter +from chatsky_ui.services.json_converter.flow_converter import FlowConverter +from chatsky_ui.services.json_converter.interface_converter import InterfaceConverter +from chatsky_ui.services.json_converter.pipeline_converter import PipelineConverter +from chatsky_ui.services.json_converter.script_converter import ScriptConverter @pytest.fixture def chatsky_flow(chatsky_node): - return { - "test_flow": { - "test_node": chatsky_node - } - } + return {"test_flow": {"test_node": chatsky_node}} class TestFlowConverter: diff --git a/backend/chatsky_ui/tests/unit/test_logic_components.py b/backend/chatsky_ui/tests/unit/test_logic_components.py index b102b0fa..fdd231e6 100644 --- a/backend/chatsky_ui/tests/unit/test_logic_components.py +++ b/backend/chatsky_ui/tests/unit/test_logic_components.py @@ -1,16 +1,18 @@ +from pathlib import Path + import pytest -from chatsky_ui.services.json_converter_new2.logic_component_converter.condition_converter import ( + +from chatsky_ui.services.json_converter.logic_component_converter.condition_converter import ( + BadConditionException, CustomConditionConverter, SlotConditionConverter, - BadConditionException, ) -from chatsky_ui.services.json_converter_new2.logic_component_converter.response_converter import ( - TextResponseConverter, - CustomResponseConverter, +from chatsky_ui.services.json_converter.logic_component_converter.response_converter import ( BadResponseException, + CustomResponseConverter, + TextResponseConverter, ) -from chatsky_ui.services.json_converter_new2.logic_component_converter.service_replacer import store_custom_service -from pathlib import Path +from chatsky_ui.services.json_converter.logic_component_converter.service_replacer import store_custom_service @pytest.fixture diff --git a/backend/chatsky_ui/tests/unit/test_node_converer.py b/backend/chatsky_ui/tests/unit/test_node_converer.py index 882e3984..7bdb9a00 100644 --- a/backend/chatsky_ui/tests/unit/test_node_converer.py +++ b/backend/chatsky_ui/tests/unit/test_node_converer.py @@ -1,4 +1,4 @@ -from chatsky_ui.services.json_converter_new2.node_converter import InfoNodeConverter, LinkNodeConverter +from chatsky_ui.services.json_converter.node_converter import InfoNodeConverter, LinkNodeConverter class TestNodeConverter: diff --git a/backend/chatsky_ui/tests/unit/test_slots_converter.py b/backend/chatsky_ui/tests/unit/test_slots_converter.py index 3e54c2a1..feca20aa 100644 --- a/backend/chatsky_ui/tests/unit/test_slots_converter.py +++ b/backend/chatsky_ui/tests/unit/test_slots_converter.py @@ -1,8 +1,4 @@ -from chatsky_ui.services.json_converter_new2.slots_converter import ( - SlotsConverter, - RegexpSlotConverter, - GroupSlotConverter, -) +from chatsky_ui.services.json_converter.slots_converter import GroupSlotConverter, RegexpSlotConverter, SlotsConverter class TestSlotsConverter: diff --git a/backend/poetry.lock b/backend/poetry.lock index ff1e8617..81f0c1a9 100644 --- a/backend/poetry.lock +++ b/backend/poetry.lock @@ -413,12 +413,14 @@ files = [ [[package]] name = "chatsky" -version = "0.9.0" +version = "0.9.0.dev1" description = "Chatsky is a free and open-source software stack for creating chatbots, released under the terms of Apache License 2.0." optional = false -python-versions = "^3.9,!=3.9.7" -files = [] -develop = false +python-versions = "!=2.7.*,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,!=3.7.*,!=3.8.*,>=3.9" +files = [ + {file = "chatsky-0.9.0.dev1-py3-none-any.whl", hash = "sha256:b49acf9abaf5e12fcdd1f03fd47d8f1b9dc373a4fa2e8ece19961be03152b2e2"}, + {file = "chatsky-0.9.0.dev1.tar.gz", hash = "sha256:a5f7ac9e810095d34788f39f35825799da2857c7698caad1cd12a1c70ef7061c"}, +] [package.dependencies] colorama = "*" @@ -1530,13 +1532,13 @@ unidecode = ["Unidecode (>=1.1.1)"] [[package]] name = "python-telegram-bot" -version = "21.8" +version = "21.7" description = "We have made you a wrapper you can't refuse" optional = false python-versions = ">=3.9" files = [ - {file = "python_telegram_bot-21.8-py3-none-any.whl", hash = "sha256:2fea8e65d97e593f47666e7de81fb15bd517100504e98ca7cb399ee6ce4f3838"}, - {file = "python_telegram_bot-21.8.tar.gz", hash = "sha256:114437f194e32eee0651f25fb1ab1664b605e877ca33adf58547a599afe73b72"}, + {file = "python_telegram_bot-21.7-py3-none-any.whl", hash = "sha256:aff1d7245f1b0d4d12d41c9acff74e86d7100713c2204cd02ff17f8d80d18846"}, + {file = "python_telegram_bot-21.7.tar.gz", hash = "sha256:bc8537b77ae02531fc2ad440caafc023fd13f13cf19e592dfa1a9ff84988a012"}, ] [package.dependencies] @@ -2393,4 +2395,4 @@ type = ["pytest-mypy"] [metadata] lock-version = "2.0" python-versions = "^3.9,!=3.9.7" -content-hash = "8539df9b57bdf437aebcc0ea06e63d3da12180a44a64282f95a5f903e7d0ab54" +content-hash = "a676df6948689fe5ff0e011484f12ecdde34add2dd2dfca1d290f1d8f8203674" diff --git a/backend/pyproject.toml b/backend/pyproject.toml index 45427923..a089a25a 100644 --- a/backend/pyproject.toml +++ b/backend/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "chatsky-ui" -version = "0.3.0" +version = "0.4.0" description = "Chatsky-UI is GUI for Chatsky Framework, that is a free and open-source software stack for creating chatbots, released under the terms of Apache License 2.0." license = "Apache-2.0" authors = [ @@ -20,7 +20,7 @@ typer = "^0.9.0" pydantic-settings = "^2.2.1" aiofiles = "^23.2.1" cookiecutter = "^2.6.0" -chatsky = {extras = ["yaml", "telegram"], git = "https://github.com/deeppavlov/chatsky.git", branch = "feat/add-docker-image"} +chatsky = {extras = ["yaml", "telegram"], version = "==0.9.0.dev1"} omegaconf = "^2.3.0" httpx = "^0.27.0" httpx-ws = "^0.6.0" diff --git a/bin/add_ui_to_toml.sh b/bin/add_ui_to_toml.sh deleted file mode 100755 index a0be2353..00000000 --- a/bin/add_ui_to_toml.sh +++ /dev/null @@ -1,7 +0,0 @@ -#!/bin/bash - -# Find the latest version of the wheel file -VERSION=$(basename $(ls ../backend/dist/chatsky_ui-*.whl) | sed -E 's/chatsky_ui-([^-]+)-.*/\1/' | head -n 1) - -# Add the specific version to my project -poetry add ../backend/dist/chatsky_ui-$VERSION-py3-none-any.whl