Skip to content

Commit

Permalink
fix: assistant function call when multple tool are needed in the same… (
Browse files Browse the repository at this point in the history
#4483)

* fix: assistant function call when multple tool are needed in the same query

* fix: ts

---------

Co-authored-by: Nicolas Burtey <[email protected]>
  • Loading branch information
nicolasburtey and Nicolas Burtey authored May 20, 2024
1 parent 4310a7d commit 39c0111
Show file tree
Hide file tree
Showing 3 changed files with 43 additions and 28 deletions.
2 changes: 2 additions & 0 deletions core/api/src/domain/support/errors.ts
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,8 @@ export class UnknownPineconeError extends SupportError {
export class ChatAssistantError extends SupportError {}
export class ChatAssistantNotFoundError extends SupportError {}

export class TimeoutAssistantError extends SupportError {}

export class UnknownChatAssistantError extends ChatAssistantError {
level = ErrorLevel.Critical
}
1 change: 1 addition & 0 deletions core/api/src/graphql/error-map.ts
Original file line number Diff line number Diff line change
Expand Up @@ -823,6 +823,7 @@ export const mapError = (error: ApplicationError): CustomGraphQLError => {
case "UnknownPineconeError":
case "CallbackServiceError":
case "ChatAssistantNotFoundError":
case "TimeoutAssistantError":
message = `Unknown error occurred (code: ${error.name})`
return new UnknownClientError({ message, logger: baseLogger })

Expand Down
68 changes: 40 additions & 28 deletions core/api/src/services/openai/assistant.ts
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,7 @@ import { sleep } from "@/utils"
import { UnknownDomainError } from "@/domain/shared"
import {
ChatAssistantNotFoundError,
TimeoutAssistantError,
UnknownChatAssistantError,
} from "@/domain/support/errors"

Expand Down Expand Up @@ -134,30 +135,36 @@ export const Assistant = (): ChatAssistant => {
}
}

const processAction = async (run: OpenAI.Beta.Threads.Runs.Run) => {
const processAction = async (run: OpenAI.Beta.Threads.Runs.Run): Promise<string[]> => {
const action = run.required_action
assert(action?.type === "submit_tool_outputs")

const name = action.submit_tool_outputs.tool_calls[0].function.name
assert(name === "queryBlinkKnowledgeBase")
const outputs: string[] = []

const args = action.submit_tool_outputs.tool_calls[0].function.arguments
const query = JSON.parse(args).query_str
for (const toolCall of action.submit_tool_outputs.tool_calls) {
const name = toolCall.function.name
assert(name === "queryBlinkKnowledgeBase")

const vector = await textToVector(query)
if (vector instanceof Error) throw vector
const args = toolCall.function.arguments
const query = JSON.parse(args).query_str

const relatedQueries = await retrieveRelatedQueries(vector)
if (relatedQueries instanceof Error) throw relatedQueries
const vector = await textToVector(query)
if (vector instanceof Error) throw vector

let output = ""
let i = 0
for (const query of relatedQueries) {
output += `Context chunk ${i}:\n${query}\n-----\n`
i += 1
const relatedQueries = await retrieveRelatedQueries(vector)
if (relatedQueries instanceof Error) throw relatedQueries

let output = ""
let i = 0
for (const query of relatedQueries) {
output += `Context chunk ${i}:\n${query}\n-----\n`
i += 1
}

outputs.push(output)
}

return output
return outputs
}

const waitForCompletion = async ({
Expand All @@ -166,8 +173,11 @@ export const Assistant = (): ChatAssistant => {
}: {
runId: string
threadId: string
}) => {
}): Promise<true | ChatAssistantError> => {
let run: OpenAI.Beta.Threads.Runs.Run
const maxRetries = 60 // Assuming a 30-second timeout with 500ms sleep
let retries = 0

try {
run = await openai.beta.threads.runs.retrieve(threadId, runId)
} catch (err) {
Expand All @@ -177,32 +187,34 @@ export const Assistant = (): ChatAssistant => {
while (
["queued", "in_progress", "cancelling", "requires_action"].includes(run.status)
) {
// TODO: max timer for this loop
// add open telemetry here? or is it already present with the http requests?
if (retries >= maxRetries) {
return new TimeoutAssistantError()
}

// Add telemetry here if needed
await sleep(500)
retries += 1

try {
run = await openai.beta.threads.runs.retrieve(threadId, runId)
} catch (err) {
return new UnknownChatAssistantError(err)
}

if (run.status === "requires_action") {
let output: string
let outputs: string[]
try {
output = await processAction(run)
outputs = await processAction(run)
} catch (err) {
return new UnknownChatAssistantError(err)
}

try {
await openai.beta.threads.runs.submitToolOutputs(threadId, runId, {
tool_outputs: [
{
tool_call_id: run.required_action?.submit_tool_outputs.tool_calls[0].id,
output,
},
],
tool_outputs: outputs.map((output, index) => ({
tool_call_id: run.required_action?.submit_tool_outputs.tool_calls[index].id,
output,
})),
})
} catch (err) {
return new UnknownChatAssistantError(err)
Expand All @@ -222,12 +234,12 @@ export const Assistant = (): ChatAssistant => {
const responseThread = messages.data[0]

if (responseThread.content[0]?.type !== "text") {
return new UnknownChatAssistantError("last message is not text")
return new UnknownChatAssistantError("Last message is not text")
}

return true
} else {
return new UnknownChatAssistantError("issue running the assistant")
return new UnknownChatAssistantError("Issue running the assistant")
}
}

Expand Down

0 comments on commit 39c0111

Please sign in to comment.