Skip to content

Commit

Permalink
feat: Support Pythonic response (#60)
Browse files Browse the repository at this point in the history
* feat: Support response converting

* fix: Get response text from its data

* chore: Raise error if logs dir doesn't exist

* feat: Convert to dff `random choice` response

* chore: Allow for all-list response data
  • Loading branch information
Ramimashkouk authored Jul 8, 2024
1 parent a350b49 commit d8a1d5a
Show file tree
Hide file tree
Showing 3 changed files with 81 additions and 35 deletions.
4 changes: 3 additions & 1 deletion backend/df_designer/app/cli.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@

from app.core.config import settings
from app.core.logger_config import get_logger
from app.services.json_converter import converter


cli = typer.Typer()

Expand Down Expand Up @@ -61,6 +61,8 @@ def build_bot(build_id: int, project_dir: str = settings.work_directory, preset:

@cli.command("build_scenario")
def build_scenario(build_id: int, project_dir: str = ".", call_from_open_event_loop: bool = False):
from app.services.json_converter import converter # pylint: disable=C0415

if call_from_open_event_loop:
loop = asyncio.get_event_loop()
loop.create_task(converter(build_id=build_id, project_dir=project_dir))
Expand Down
3 changes: 2 additions & 1 deletion backend/df_designer/app/core/logger_config.py
Original file line number Diff line number Diff line change
Expand Up @@ -35,7 +35,8 @@ def setup_logging(log_type: Literal["builds", "runs"], id_: int, timestamp: date
def get_logger(name, file_handler_path: Optional[Path] = None):
if file_handler_path is None:
file_handler_path = settings.dir_logs / "logs.log"
file_handler_path.parent.mkdir(parents=True, exist_ok=True)
if not file_handler_path.parent.exists():
raise FileNotFoundError(f"File {file_handler_path} doesn't exist")
file_handler_path.touch(exist_ok=True)

logger = logging.getLogger(name)
Expand Down
109 changes: 76 additions & 33 deletions backend/df_designer/app/services/json_converter.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,22 +17,25 @@
logger = get_logger(__name__)


def _get_db_paths(build_id: int, project_dir: Path, custom_dir: str) -> Tuple[Path, Path, Path]:
def _get_db_paths(build_id: int, project_dir: Path, custom_dir: str) -> Tuple[Path, Path, Path, Path]:
"""Get paths to frontend graph, dff script, and dff custom conditions files."""

frontend_graph_path = project_dir / "df_designer" / "frontend_flows.yaml"
custom_conditions_file = project_dir / "bot" / custom_dir / "conditions.py"
custom_responses_file = project_dir / "bot" / custom_dir / "responses.py"
script_path = project_dir / "bot" / "scripts" / f"build_{build_id}.yaml"

if not frontend_graph_path.exists():
raise FileNotFoundError(f"File {frontend_graph_path} doesn't exist")
if not custom_conditions_file.exists():
raise FileNotFoundError(f"File {custom_conditions_file} doesn't exist")
if not custom_responses_file.exists():
raise FileNotFoundError(f"File {custom_responses_file} doesn't exist")
if not script_path.exists():
script_path.parent.mkdir(parents=True, exist_ok=True)
script_path.touch()

return frontend_graph_path, script_path, custom_conditions_file
return frontend_graph_path, script_path, custom_conditions_file, custom_responses_file


def _organize_graph_according_to_nodes(flow_graph: DictConfig, script: dict) -> dict:
Expand All @@ -56,22 +59,13 @@ def _organize_graph_according_to_nodes(flow_graph: DictConfig, script: dict) ->

def _get_condition(nodes: dict, edge: DictConfig) -> DictConfig | None:
"""Get node's condition from `nodes` according to `edge` info."""
try:
return next(
condition
for condition in nodes[edge.source]["info"].data.conditions
if condition["id"] == edge.sourceHandle
)
except StopIteration:
logger.debug(
"Condition of edge '%s' and id of '%s' is not found in the corresponding node. Expected behavior",
edge.source,
edge.sourceHandle,
)
return None
return next(
(condition for condition in nodes[edge.source]["info"].data.conditions if condition["id"] == edge.sourceHandle),
None,
)


def _write_conditions_to_file(conditions_lines: list, custom_conditions_file: Path) -> None:
def _write_list_to_file(conditions_lines: list, custom_conditions_file: Path) -> None:
"""Write dff custom conditions from list to file."""
# TODO: make reading and writing conditions async
with open(custom_conditions_file, "w", encoding="UTF-8") as file:
Expand Down Expand Up @@ -103,21 +97,22 @@ def _fill_nodes_into_script(nodes: dict, script: dict) -> None:
script[node["flow"]].update(
{
node["info"].data.name: {
"RESPONSE": {"dff.Message": {"text": node["info"].data.response}},
"RESPONSE": node["info"].data.response,
"TRANSITIONS": node["TRANSITIONS"],
}
}
)


def _append_condition(condition: DictConfig, conditions_lines: list) -> list:
def _append(service: DictConfig, services_lines: list) -> list:
"""Append a condition to a list"""
condition_with_newline = "".join([condition.data.python.action + "\n\n"])
if service.type == "python":
service_with_newline = "".join([service.data.python.action + "\n\n"])

logger.debug("Condition to append: %s", condition_with_newline)
logger.debug("conditions_lines before appending: %s", conditions_lines)
logger.debug("Service to append: %s", service_with_newline)
logger.debug("services_lines before appending: %s", services_lines)

all_lines = conditions_lines + condition_with_newline.split("\n")
all_lines = services_lines + service_with_newline.split("\n")
return all_lines


Expand All @@ -136,21 +131,22 @@ async def _shift_cnds_in_index(index: Index, cnd_strt_lineno: int, diff_in_lines
)


async def _replace_condition(condition: DictConfig, conditions_lines: list, cnd_strt_lineno: int, index: Index) -> list:
"""Replace a condition in a conditions list with a new one.
async def _replace(service: DictConfig, services_lines: list, cnd_strt_lineno: int, index: Index) -> list:
"""Replace a servuce in a services list with a new one.
Args:
condition: condition to replace. `condition.data.python.action` is a string with the new condition
service: service to replace. `condition.data.python.action` is a string with the new service(condition)
conditions_lines: list of conditions lines
cnd_strt_lineno: a pointer to the condition start line in custom conditions file
cnd_strt_lineno: a pointer to the service start line in custom conditions file
index: index object to update
Returns:
list of all conditions as lines
"""
cnd_strt_lineno = cnd_strt_lineno - 1 # conversion from file numeration to list numeration
all_lines = conditions_lines.copy()
condition = "".join([condition.data.python.action + "\n\n"])
all_lines = services_lines.copy()
if service.type == "python":
condition = "".join([service.data.python.action + "\n\n"])
new_cnd_lines = condition.split("\n")

old_cnd_lines_num = 0
Expand All @@ -173,21 +169,67 @@ async def _replace_condition(condition: DictConfig, conditions_lines: list, cnd_
return all_lines


async def update_responses_lines(nodes: dict, responses_lines: list, index: Index) -> tuple[dict, list[str]]:
"""Organizes the responses in nodes in a format that json-importer accepts.
If the response type is "python", its function will be added to responses_lines to be written
to the custom_conditions_file later.
* If the response already exists in the responses_lines, it will be replaced with the new one.
"""
for node in nodes.values():
response = node["info"].data.response
logger.debug("response type: %s", response.type)
if response.type == "python":
response.data = response.data[0]
if response.name not in (rsp_names := index.index):
logger.debug("Adding response: %s", response.name)
rsp_lineno = len(responses_lines)
responses_lines = _append(response, responses_lines)
await index.indexit(response.name, "response", rsp_lineno + 1)
else:
logger.debug("Replacing response: %s", response.name)
responses_lines = await _replace(response, responses_lines, rsp_names[response.name]["lineno"], index)
node["info"].data.response = f"custom_dir.responses.{response.name}"
elif response.type == "text":
response.data = response.data[0]
logger.debug("Adding response: %s", response.data.text)
node["info"].data.response = {"dff.Message": {"text": response.data.text}}
elif response.type == "choice":
# logger.debug("Adding response: %s", )
dff_responses = []
for message in response.data:
if "text" in message:
dff_responses.append({"dff.Message": {"text": message["text"]}})
else:
raise ValueError("Unknown response type. There must be a 'text' field in each message.")
node["info"].data.response = {"dff.rsp.choice": dff_responses.copy()}
else:
raise ValueError(f"Unknown response type: {response.type}")
return nodes, responses_lines


async def converter(build_id: int, project_dir: str, custom_dir: str = "custom") -> None:
"""Translate frontend flow script into dff script."""
index = get_index()
await index.load()
index.logger.debug("Loaded index '%s'", index.index)

frontend_graph_path, script_path, custom_conditions_file = _get_db_paths(build_id, Path(project_dir), custom_dir)
frontend_graph_path, script_path, custom_conditions_file, custom_responses_file = _get_db_paths(
build_id, Path(project_dir), custom_dir
)

script = {
"CONFIG": {"custom_dir": "/".join(["..", custom_dir])},
}
flow_graph = await read_conf(frontend_graph_path)
flow_graph: DictConfig = await read_conf(frontend_graph_path) # type: ignore

nodes = _organize_graph_according_to_nodes(flow_graph, script)

with open(custom_responses_file, "r", encoding="UTF-8") as file:
responses_lines = file.readlines()

nodes, responses_lines = await update_responses_lines(nodes, responses_lines, index)

with open(custom_conditions_file, "r", encoding="UTF-8") as file:
conditions_lines = file.readlines()

Expand All @@ -207,11 +249,11 @@ async def converter(build_id: int, project_dir: str, custom_dir: str = "custom")
if condition.name not in (cnd_names := index.index):
logger.debug("Adding condition: %s", condition.name)
cnd_lineno = len(conditions_lines)
conditions_lines = _append_condition(condition, conditions_lines)
conditions_lines = _append(condition, conditions_lines)
await index.indexit(condition.name, "condition", cnd_lineno + 1)
else:
logger.debug("Replacing condition: %s", condition.name)
conditions_lines = await _replace_condition(
conditions_lines = await _replace(
condition, conditions_lines, cnd_names[condition.name]["lineno"], index
)

Expand All @@ -221,5 +263,6 @@ async def converter(build_id: int, project_dir: str, custom_dir: str = "custom")

_fill_nodes_into_script(nodes, script)

_write_conditions_to_file(conditions_lines, custom_conditions_file)
_write_list_to_file(conditions_lines, custom_conditions_file)
_write_list_to_file(responses_lines, custom_responses_file)
await write_conf(script, script_path)

0 comments on commit d8a1d5a

Please sign in to comment.