Skip to content

Commit

Permalink
Add cache option to createChatModel function
Browse files Browse the repository at this point in the history
  • Loading branch information
gramliu committed Nov 27, 2023
1 parent 4d79048 commit 3f55fee
Showing 1 changed file with 5 additions and 3 deletions.
8 changes: 5 additions & 3 deletions apps/agent/src/lib/openai.ts
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,8 @@ export interface CreateChatModelParams {
modelName: string;
maxTokens?: number;
temperature?: number;
callbacks?: Callbacks
callbacks?: Callbacks;
cache?: boolean;
}

/**
Expand All @@ -16,9 +17,10 @@ export async function createChatModel({
modelName,
maxTokens,
temperature,
callbacks
callbacks,
cache: shouldCache = true
}: CreateChatModelParams) {
const cache = await getCache();
const cache = shouldCache ? await getCache() : undefined;

return new ChatOpenAI({
modelName,
Expand Down

0 comments on commit 3f55fee

Please sign in to comment.