From c026b8ee414d61416a15868bbfa553e37146e2a0 Mon Sep 17 00:00:00 2001 From: nerfZael Date: Thu, 28 Dec 2023 15:08:25 +0100 Subject: [PATCH 1/3] reusing evo for different goals --- apps/browser/lib/services/evo/EvoThread.ts | 63 +++++++++++++------ .../lib/services/evo/createEvoInstance.ts | 23 ++++--- 2 files changed, 59 insertions(+), 27 deletions(-) diff --git a/apps/browser/lib/services/evo/EvoThread.ts b/apps/browser/lib/services/evo/EvoThread.ts index 9aaf1ad3..6f3a868c 100644 --- a/apps/browser/lib/services/evo/EvoThread.ts +++ b/apps/browser/lib/services/evo/EvoThread.ts @@ -1,5 +1,5 @@ import { createEvoInstance } from "@/lib/services/evo/createEvoInstance"; -import { GoalApi } from "@/lib/api"; +import { GoalApi, ProxyEmbeddingApi, ProxyLlmApi } from "@/lib/api"; import { ChatLog } from "@/components/Chat"; import { Evo, @@ -7,6 +7,8 @@ import { ChatMessage, Workspace, InMemoryWorkspace, + EmbeddingApi, + LlmApi, } from "@evo-ninja/agents"; export interface EvoThreadConfig { @@ -23,6 +25,9 @@ export interface EvoThreadConfig { export interface EvoThreadState { goal: string | undefined; + evo: Evo | undefined; + llm: LlmApi | undefined; + embedding: EmbeddingApi | undefined; status: string | undefined; isRunning: boolean; isLoading: boolean; @@ -47,6 +52,9 @@ export interface EvoThreadStartOptions { const INIT_STATE: EvoThreadState = { goal: undefined, + evo: undefined, + llm: undefined, + embedding: undefined, status: undefined, isRunning: false, isLoading: false, @@ -141,29 +149,46 @@ export class EvoThread { return; } - // Create an Evo instance - const evo = createEvoInstance( - goalId, - this._state.workspace, - options.openAiApiKey, - this._config.onMessagesAdded, - this._config.onVariableSet, - (chatLog) => this.onChatLog(chatLog), - (status) => this.onStatusUpdate(status), - () => this._callbacks?.onGoalCapReached(), - // onError - (error) => this._callbacks?.onError(error) - ); + if (this._state.evo && this._state.llm && this._state.embedding) { + console.log("Reusing existing Evo instance"); + } else { + console.log("Creating new Evo instance"); + // Create an Evo instance + const result = createEvoInstance( + this._state.workspace, + options.openAiApiKey, + this._config.onMessagesAdded, + this._config.onVariableSet, + (chatLog) => this.onChatLog(chatLog), + (status) => this.onStatusUpdate(status), + () => this._callbacks?.onGoalCapReached(), + // onError + (error) => this._callbacks?.onError(error) + ); - if (!evo) { - this.setIsRunning(false); - return; + if (!result) { + this.setIsRunning(false); + return; + } + + console.log("Evo instance created", result); + + this._state.evo = result.evo; + this._state.llm = result.llm; + this._state.embedding = result.embedding; } - await evo.init(); + if (this._state.llm instanceof ProxyLlmApi) { + console.log("Setting goal ID1", goalId); + this._state.llm.setGoalId(goalId); + } + if (this._state.embedding instanceof ProxyEmbeddingApi) { + console.log("Setting goal ID2", goalId); + this._state.embedding.setGoalId(goalId); + } // Run the evo instance against the goal - await this.runEvo(evo, options.goal); + await this.runEvo(this._state.evo, options.goal); this._state.goal = undefined; } diff --git a/apps/browser/lib/services/evo/createEvoInstance.ts b/apps/browser/lib/services/evo/createEvoInstance.ts index 8c648a87..35cf2246 100644 --- a/apps/browser/lib/services/evo/createEvoInstance.ts +++ b/apps/browser/lib/services/evo/createEvoInstance.ts @@ -24,7 +24,6 @@ import { import cl100k_base from "gpt-tokenizer/esm/encoding/cl100k_base"; export function createEvoInstance( - goalId: string, workspace: Workspace, openAiApiKey: string | undefined, onMessagesAdded: (type: ChatLogType, messages: ChatMessage[]) => Promise, @@ -33,7 +32,14 @@ export function createEvoInstance( onStatusUpdate: (status: string) => void, onGoalCapReached: () => void, onError: (error: string) => void -): Evo | undefined { +): { + evo: Evo, + llm: LlmApi, + embedding: EmbeddingApi +} | undefined { + let llm: LlmApi; + let embedding: EmbeddingApi; + try { const browserLogger = new BrowserLogger({ onLog: async (message: string) => { @@ -66,10 +72,8 @@ export function createEvoInstance( MAX_RESPONSE_TOKENS: "4096", }); - let llm: LlmApi; - let embedding: EmbeddingApi; - if (openAiApiKey) { + console.log("Using OpenAI API"); llm = new OpenAILlmApi( env.OPENAI_API_KEY, env.GPT_MODEL as LlmModel, @@ -85,19 +89,18 @@ export function createEvoInstance( env.OPENAI_API_BASE_URL, ); } else { + console.log("Using Evo API"); const llmProxy = new ProxyLlmApi( env.GPT_MODEL as LlmModel, env.CONTEXT_WINDOW_TOKENS, env.MAX_RESPONSE_TOKENS, onGoalCapReached, ); - llmProxy.setGoalId(goalId); llm = llmProxy; const embeddingProxy = new ProxyEmbeddingApi( cl100k_base, onGoalCapReached ); - embeddingProxy.setGoalId(goalId); embedding = embeddingProxy; } @@ -124,7 +127,11 @@ export function createEvoInstance( agentVariables ) ); - return evo; + return { + evo, + llm, + embedding + }; } catch (e: any) { onError(e.message); return undefined; From 25bbf6110c96d03392f401aed9a3e361ed538bca Mon Sep 17 00:00:00 2001 From: nerfZael Date: Fri, 29 Dec 2023 17:36:16 +0100 Subject: [PATCH 2/3] implemented support for multiple goals in browser --- apps/browser/lib/hooks/useEvoService.ts | 15 +++--- apps/browser/lib/queries/useChats.ts | 49 ++++++++++++------- apps/browser/lib/services/evo/EvoThread.ts | 42 ++++++++++------ .../lib/services/evo/createEvoInstance.ts | 2 - .../agents/src/agent-core/llm/chat/Chat.ts | 14 ++++-- 5 files changed, 76 insertions(+), 46 deletions(-) diff --git a/apps/browser/lib/hooks/useEvoService.ts b/apps/browser/lib/hooks/useEvoService.ts index 1fcce57b..875fb8c4 100644 --- a/apps/browser/lib/hooks/useEvoService.ts +++ b/apps/browser/lib/hooks/useEvoService.ts @@ -11,7 +11,7 @@ import { EvoThreadCallbacks, EvoThreadConfig } from "@/lib/services/evo/EvoThrea import { useAddChatLog } from "@/lib/mutations/useAddChatLog"; import { useAddMessages } from "@/lib/mutations/useAddMessages"; import { useAddVariable } from "@/lib/mutations/useAddVariable"; -import { fetchChats, useChats } from "@/lib/queries/useChats"; +import { Chat, fetchChats, useChats } from "@/lib/queries/useChats"; import { SupabaseWorkspace } from "@/lib/supabase/SupabaseWorkspace"; import { useWorkspaceFilesUpdate } from "@/lib/hooks/useWorkspaceFilesUpdate"; import { useWorkspaceUploadUpdate } from "@/lib/hooks/useWorkspaceUploadUpdate"; @@ -89,7 +89,7 @@ export const useEvoService = ( const config: EvoThreadConfig = { chatId, - loadChatLog, + loadChat, loadWorkspace, onChatLogAdded: handleChatLogAdded, onMessagesAdded: handleMessagesAdded, @@ -113,9 +113,9 @@ export const useEvoService = ( setIsConnected(true); }; - const loadChatLog = async (chatId: string) => { + const loadChat = async (chatId: string): Promise => { if (chatId === "") { - return []; + throw new Error("Cannot load chat for anonymous user."); } const { data: chats, error } = await fetchChats(supabase!); @@ -123,16 +123,15 @@ export const useEvoService = ( if (error) { console.error(error); setError("Failed to fetch user chats."); - return []; + throw error; } const currentChat = chats?.find(c => c.id === chatId); if (!currentChat) { - return []; + throw new Error(`Chat with id ${chatId} not found.`); } - - return currentChat.logs; + return currentChat; }; async function loadWorkspace(chatId: string): Promise { diff --git a/apps/browser/lib/queries/useChats.ts b/apps/browser/lib/queries/useChats.ts index 9bf97c46..2b4494f5 100644 --- a/apps/browser/lib/queries/useChats.ts +++ b/apps/browser/lib/queries/useChats.ts @@ -9,11 +9,16 @@ export interface Chat { id: string; created_at: string; title: string | null; - messages: ChatMessage[]; + messages: SavedMessage[]; logs: ChatLog[]; variables: Map } +export interface SavedMessage { + msg: ChatMessage, + temporary: boolean +} + interface MessageDTO { id: string; created_at: string; @@ -49,42 +54,52 @@ interface ChatDTO { messages: MessageDTO[]; } -const mapMessageDTOtoMessage = (dto: MessageDTO): ChatMessage & { temporary: boolean } => { +const mapMessageDTOtoMessage = (dto: MessageDTO): SavedMessage => { const messageRole = dto.role as "function" | "user" | "tool" | "system" | "assistant" switch (messageRole) { case "user": case "system": { return { - role: messageRole, - content: dto.content, - temporary: dto.temporary + msg: { + role: messageRole, + content: dto.content, + }, + temporary: dto.temporary, } } case "function": { return { - role: messageRole, - content: dto.content, + msg: { + role: messageRole, + content: dto.content, + name: dto.name as string + }, temporary: dto.temporary, - name: dto.name as string } } case "assistant": { return { - role: messageRole, - content: dto.content, + msg: { + role: messageRole, + content: dto.content, + // TODO: Json casting + function_call: dto.function_call as any ?? undefined, + tool_calls: dto.tool_calls as any + ? dto.tool_calls as any + : undefined, + }, temporary: dto.temporary, - // TODO: Json casting - function_call: dto.function_call as any, - tool_calls: dto.tool_calls as any, } } case "tool": { return { - role: messageRole, - content: dto.content, + msg: { + role: messageRole, + content: dto.content, + tool_call_id: dto.tool_call_id as string, + }, temporary: dto.temporary, - tool_call_id: dto.tool_call_id as string, } } } @@ -104,7 +119,7 @@ const mapChatDTOtoChat = (dto: ChatDTO): Chat => { id: dto.id, created_at: dto.created_at, title: dto.title, - messages, + messages: messages, variables, logs } diff --git a/apps/browser/lib/services/evo/EvoThread.ts b/apps/browser/lib/services/evo/EvoThread.ts index 30dabd13..14cf552e 100644 --- a/apps/browser/lib/services/evo/EvoThread.ts +++ b/apps/browser/lib/services/evo/EvoThread.ts @@ -10,10 +10,11 @@ import { EmbeddingApi, LlmApi, } from "@evo-ninja/agents"; +import { Chat } from "@/lib/queries/useChats"; export interface EvoThreadConfig { chatId: string; - loadChatLog: (chatId: string) => Promise; + loadChat: (chatId: string) => Promise; loadWorkspace: (chatId: string) => Promise; onChatLogAdded: (chatLog: ChatLog) => Promise; onMessagesAdded: ( @@ -32,6 +33,7 @@ export interface EvoThreadState { isRunning: boolean; isLoading: boolean; logs: ChatLog[]; + chat: Chat | undefined; workspace: Workspace; } @@ -59,6 +61,7 @@ const INIT_STATE: EvoThreadState = { isRunning: false, isLoading: false, logs: [], + chat: undefined, workspace: new InMemoryWorkspace() }; @@ -85,12 +88,12 @@ export class EvoThread { thread._state.isLoading = true; const results = await Promise.all<[ - Promise, + Promise, Promise ]>([ - thread._config.loadChatLog(chatId).catch((reason) => { + thread._config.loadChat(chatId).catch((reason) => { thread._callbacks?.onError(reason.toString()); - return []; + throw reason; }), thread._config.loadWorkspace(chatId).catch((reason) => { thread._callbacks?.onError(reason.toString()); @@ -98,7 +101,8 @@ export class EvoThread { }) ]); - thread._state.logs = results[0]; + thread._state.chat = results[0]; + thread._state.logs = results[0].logs; thread._state.workspace = results[1]; thread._state.isLoading = false; @@ -177,11 +181,7 @@ export class EvoThread { return; } - if (this._state.evo && this._state.llm && this._state.embedding) { - console.log("Reusing existing Evo instance"); - } else { - console.log("Creating new Evo instance"); - // Create an Evo instance + if (!this._state.evo || !this._state.llm || !this._state.embedding) { const result = createEvoInstance( this._state.workspace, options.openAiApiKey, @@ -190,7 +190,6 @@ export class EvoThread { (chatLog) => this.onChatLog(chatLog), (status) => this.onStatusUpdate(status), () => this._callbacks?.onGoalCapReached(), - // onError (error) => this._callbacks?.onError(error) ); @@ -199,19 +198,32 @@ export class EvoThread { return; } - console.log("Evo instance created", result); - this._state.evo = result.evo; this._state.llm = result.llm; this._state.embedding = result.embedding; + + if (this._state.chat?.messages.length) { + await this._state.evo.context.chat.addWithoutEvents( + "persistent", + this._state.chat.messages + .filter(x => !x.temporary) + .map(x => x.msg) + ); + await this._state.evo.context.chat.addWithoutEvents( + "temporary", + this._state.chat.messages + .filter(x => x.temporary) + .map(x => x.msg) + ); + } else { + await this._state.evo.init(); + } } if (this._state.llm instanceof ProxyLlmApi) { - console.log("Setting goal ID1", goalId); this._state.llm.setGoalId(goalId); } if (this._state.embedding instanceof ProxyEmbeddingApi) { - console.log("Setting goal ID2", goalId); this._state.embedding.setGoalId(goalId); } diff --git a/apps/browser/lib/services/evo/createEvoInstance.ts b/apps/browser/lib/services/evo/createEvoInstance.ts index 35cf2246..633384ec 100644 --- a/apps/browser/lib/services/evo/createEvoInstance.ts +++ b/apps/browser/lib/services/evo/createEvoInstance.ts @@ -73,7 +73,6 @@ export function createEvoInstance( }); if (openAiApiKey) { - console.log("Using OpenAI API"); llm = new OpenAILlmApi( env.OPENAI_API_KEY, env.GPT_MODEL as LlmModel, @@ -89,7 +88,6 @@ export function createEvoInstance( env.OPENAI_API_BASE_URL, ); } else { - console.log("Using Evo API"); const llmProxy = new ProxyLlmApi( env.GPT_MODEL as LlmModel, env.CONTEXT_WINDOW_TOKENS, diff --git a/packages/agents/src/agent-core/llm/chat/Chat.ts b/packages/agents/src/agent-core/llm/chat/Chat.ts index 16c793ac..dc619bee 100644 --- a/packages/agents/src/agent-core/llm/chat/Chat.ts +++ b/packages/agents/src/agent-core/llm/chat/Chat.ts @@ -34,6 +34,16 @@ export class Chat { public async add(type: ChatLogType, msg: ChatMessage | ChatMessage[]): Promise { const msgs = Array.isArray(msg) ? msg : [msg]; + + this.addWithoutEvents(type, msgs); + + if (this.options?.onMessagesAdded) { + await this.options.onMessagesAdded(type, msgs); + } + } + + public addWithoutEvents(type: ChatLogType, msg: ChatMessage | ChatMessage[]) { + const msgs = Array.isArray(msg) ? msg : [msg]; const msgsWithTokens = msgs.map((msg) => { const tokens = this._tokenizer.encode(JSON.stringify(msg)).length; @@ -42,10 +52,6 @@ export class Chat { const tokens = msgsWithTokens.map(({ tokens }) => tokens); this._chatLogs.add(type, msgs, tokens) - - if (this.options?.onMessagesAdded) { - await this.options.onMessagesAdded(type, msgs); - } } public async persistent(role: ChatRole, content: string): Promise; From a650d9e8615e286a6ab6f08d51b68bbd66698d15 Mon Sep 17 00:00:00 2001 From: nerfZael Date: Fri, 29 Dec 2023 19:03:38 +0100 Subject: [PATCH 3/3] using llm and embedding apis from agent context --- apps/browser/lib/services/evo/EvoThread.ts | 24 ++++++++----------- .../lib/services/evo/createEvoInstance.ts | 12 ++-------- 2 files changed, 12 insertions(+), 24 deletions(-) diff --git a/apps/browser/lib/services/evo/EvoThread.ts b/apps/browser/lib/services/evo/EvoThread.ts index 14cf552e..494f368c 100644 --- a/apps/browser/lib/services/evo/EvoThread.ts +++ b/apps/browser/lib/services/evo/EvoThread.ts @@ -27,8 +27,6 @@ export interface EvoThreadConfig { export interface EvoThreadState { goal: string | undefined; evo: Evo | undefined; - llm: LlmApi | undefined; - embedding: EmbeddingApi | undefined; status: string | undefined; isRunning: boolean; isLoading: boolean; @@ -55,8 +53,6 @@ export interface EvoThreadStartOptions { const INIT_STATE: EvoThreadState = { goal: undefined, evo: undefined, - llm: undefined, - embedding: undefined, status: undefined, isRunning: false, isLoading: false, @@ -181,8 +177,8 @@ export class EvoThread { return; } - if (!this._state.evo || !this._state.llm || !this._state.embedding) { - const result = createEvoInstance( + if (!this._state.evo) { + const evo = createEvoInstance( this._state.workspace, options.openAiApiKey, this._config.onMessagesAdded, @@ -193,14 +189,12 @@ export class EvoThread { (error) => this._callbacks?.onError(error) ); - if (!result) { + if (!evo) { this.setIsRunning(false); return; } - this._state.evo = result.evo; - this._state.llm = result.llm; - this._state.embedding = result.embedding; + this._state.evo = evo; if (this._state.chat?.messages.length) { await this._state.evo.context.chat.addWithoutEvents( @@ -220,11 +214,13 @@ export class EvoThread { } } - if (this._state.llm instanceof ProxyLlmApi) { - this._state.llm.setGoalId(goalId); + const { llm, embedding } = this._state.evo.context; + + if (llm instanceof ProxyLlmApi) { + llm.setGoalId(goalId); } - if (this._state.embedding instanceof ProxyEmbeddingApi) { - this._state.embedding.setGoalId(goalId); + if (embedding instanceof ProxyEmbeddingApi) { + embedding.setGoalId(goalId); } // Run the evo instance against the goal diff --git a/apps/browser/lib/services/evo/createEvoInstance.ts b/apps/browser/lib/services/evo/createEvoInstance.ts index 633384ec..57b28348 100644 --- a/apps/browser/lib/services/evo/createEvoInstance.ts +++ b/apps/browser/lib/services/evo/createEvoInstance.ts @@ -32,11 +32,7 @@ export function createEvoInstance( onStatusUpdate: (status: string) => void, onGoalCapReached: () => void, onError: (error: string) => void -): { - evo: Evo, - llm: LlmApi, - embedding: EmbeddingApi -} | undefined { +): Evo | undefined { let llm: LlmApi; let embedding: EmbeddingApi; @@ -125,11 +121,7 @@ export function createEvoInstance( agentVariables ) ); - return { - evo, - llm, - embedding - }; + return evo; } catch (e: any) { onError(e.message); return undefined;