Skip to content

Commit

Permalink
JSON path prompt debug facilitation (run-llama#9097)
Browse files Browse the repository at this point in the history
  • Loading branch information
dylan1218 authored and Izuki Matsuba committed Mar 29, 2024
1 parent caf0352 commit 3d38adb
Show file tree
Hide file tree
Showing 2 changed files with 33 additions and 1 deletion.
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
import json
import logging
import re
from typing import Any, Callable, Dict, List, Optional, Union

from llama_index.core.base.base_query_engine import BaseQueryEngine
Expand Down Expand Up @@ -43,6 +44,19 @@
)


def default_output_response_parser(llm_output: str) -> str:
"""Attempts to parse the JSON path prompt output. Only applicable if the default prompt is used."""
try:
llm_output_parsed = re.search(
pattern=r"JSONPath:\s+(.*)", string=llm_output
).groups()[0]
except Exception as exc:
raise ValueError(
f"JSON Path could not be parsed in the LLM response after the 'JSONPath' identifier. Try passing a custom JSON path prompt and processor."
) from exc
return llm_output_parsed


def default_output_processor(llm_output: str, json_value: JSONType) -> JSONType:
"""Default output processor that extracts values based on JSON Path expressions."""
# Split the given string into separate JSON Path expressions
Expand Down Expand Up @@ -164,6 +178,12 @@ def _query(self, query_bundle: QueryBundle) -> Response:
**self._output_kwargs,
)

# removes JSONPath: prefix from returned JSON path prompt call
if self._json_path_prompt == DEFAULT_JSON_PATH_PROMPT:
json_path_response_str = default_output_response_parser(
json_path_response_str
)

if self._verbose:
print_text(f"> JSONPath Output: {json_path_output}\n")

Expand Down Expand Up @@ -193,6 +213,12 @@ async def _aquery(self, query_bundle: QueryBundle) -> Response:
query_str=query_bundle.query_str,
)

# removes JSONPath: prefix from returned JSON path prompt call
if self._json_path_prompt == DEFAULT_JSON_PATH_PROMPT:
json_path_response_str = default_output_response_parser(
json_path_response_str
)

if self._verbose:
print_text(
f"> JSONPath Instructions:\n" f"```\n{json_path_response_str}\n```\n"
Expand Down
8 changes: 7 additions & 1 deletion llama-index-core/llama_index/core/prompts/default_prompts.py
Original file line number Diff line number Diff line change
Expand Up @@ -390,8 +390,14 @@
"{schema}\n"
"Given a task, respond with a JSON Path query that "
"can retrieve data from a JSON value that matches the schema.\n"
"Provide the JSON Path query in the following format: 'JSONPath: <JSONPath>'\n"
"You must include the value 'JSONPath:' before the provided JSON Path query."
"Example Format:\n"
"Task: What is John's age?\n"
"Response: JSONPath: $.John.age\n"
"Let's try this now: \n\n"
"Task: {query_str}\n"
"JSONPath: "
"Response: "
)

DEFAULT_JSON_PATH_PROMPT = PromptTemplate(
Expand Down

0 comments on commit 3d38adb

Please sign in to comment.