Skip to content

Commit

Permalink
Added convertion from pydantic to json schema
Browse files Browse the repository at this point in the history
  • Loading branch information
ngafar committed Feb 12, 2025
1 parent 6a14b98 commit b86d267
Show file tree
Hide file tree
Showing 2 changed files with 29 additions and 5 deletions.
17 changes: 13 additions & 4 deletions mito-ai/mito_ai/providers.py
Original file line number Diff line number Diff line change
Expand Up @@ -272,13 +272,22 @@ async def request_completions(
)
else:
# If they don't have an Open AI key, use the mito server to get a completion
self.log.debug(f"Requesting completion from Mito server with model {model}.")
self.log.debug(
f"Requesting completion from Mito server with model {model}."
)
global _num_usages
if _num_usages is None:
_num_usages = get_user_field(UJ_AI_MITO_API_NUM_USAGES)

completion_function_params = get_open_ai_completion_function_params(model, request.messages, False, response_format)


if response_format:
completion_function_params = get_open_ai_completion_function_params(
model, request.messages, False, response_format
)
else:
completion_function_params = get_open_ai_completion_function_params(
model, request.messages, False
)

last_message_content = str(request.messages[-1].get("content", "")) if request.messages else None
ai_response = await get_ai_completion_from_mito_server(
last_message_content,
Expand Down
17 changes: 16 additions & 1 deletion mito-ai/mito_ai/utils/open_ai_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -126,9 +126,24 @@ def get_open_ai_completion_function_params(
"model": model,
"stream": stream,
"messages": messages,
"response_format": response_format,
}

# If a response format is provided, we need to convert it to a json schema.
# Pydantic models are supported by the OpenAI API, however, we need to be able to
# serialize it for requests that are going to be sent to the mito server.
# OpenAI expects a very specific schema as seen below.
if response_format:
json_schema = response_format.schema()
completion_function_params["response_format"] = {
"type": "json_schema",
"json_schema": {
"name": "plan_of_attack",
"schema": json_schema,
"strict": True
}
}
completion_function_params["response_format"]["json_schema"]["schema"]["additionalProperties"] = False

# o3-mini will error if we try setting the temperature
if model == "gpt-4o-mini":
completion_function_params["temperature"] = 0.0
Expand Down

0 comments on commit b86d267

Please sign in to comment.