Skip to content

Commit

Permalink
feat: add anda_engine_cli
Browse files Browse the repository at this point in the history
  • Loading branch information
zensh committed Feb 15, 2025
1 parent 5ad830c commit eb1f8e4
Show file tree
Hide file tree
Showing 23 changed files with 329 additions and 84 deletions.
20 changes: 20 additions & 0 deletions Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

2 changes: 2 additions & 0 deletions Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@ resolver = "2"
members = [
"anda_core",
"anda_engine",
"anda_engine_cli",
"anda_engine_server",
"anda_lancedb",
"anda_web3_client",
Expand Down Expand Up @@ -36,6 +37,7 @@ axum = { version = "0.8", features = [
"query",
], default-features = true }
bytes = "1"
base64 = "0.22"
candid = "0.10"
ciborium = "0.2"
futures = "0.3"
Expand Down
12 changes: 6 additions & 6 deletions agents/anda_bot/src/main.rs
Original file line number Diff line number Diff line change
Expand Up @@ -409,7 +409,7 @@ async fn bootstrap_local(
let default_agent = character.username.clone();
let knowledge_table: Path = default_agent.to_ascii_lowercase().into();

let web3 = Web3Client::new(&ic_host, id_secret, root_secret, None).await?;
let web3 = Web3Client::new(&ic_host, id_secret, root_secret, None, None).await?;
let my_principal = web3.get_principal();
log::info!(target: LOG_TARGET, "start local service, principal: {:?}", my_principal.to_text());

Expand Down Expand Up @@ -559,10 +559,6 @@ async fn connect_knowledge_store(
fn connect_model(cfg: &config::Llm) -> Result<Model, BoxError> {
if cfg.openai_api_key.is_empty() {
Ok(Model::new(
Arc::new(
cohere::Client::new(&cfg.cohere_api_key)
.embedding_model(&cfg.cohere_embedding_model),
),
Arc::new(
deepseek::Client::new(
&cfg.deepseek_api_key,
Expand All @@ -578,6 +574,10 @@ fn connect_model(cfg: &config::Llm) -> Result<Model, BoxError> {
&cfg.deepseek_model
}),
),
Arc::new(
cohere::Client::new(&cfg.cohere_api_key)
.embedding_model(&cfg.cohere_embedding_model),
),
))
} else {
let cli = openai::Client::new(
Expand All @@ -589,8 +589,8 @@ fn connect_model(cfg: &config::Llm) -> Result<Model, BoxError> {
},
);
Ok(Model::new(
Arc::new(cli.embedding_model(&cfg.openai_embedding_model)),
Arc::new(cli.completion_model(&cfg.openai_completion_model)),
Arc::new(cli.embedding_model(&cfg.openai_embedding_model)),
))
}
}
Expand Down
9 changes: 7 additions & 2 deletions anda_core/src/model.rs
Original file line number Diff line number Diff line change
Expand Up @@ -30,6 +30,10 @@ pub struct AgentOutput {
/// Tool call that this message is responding to. If this message is a response to a tool call, this field should be set to the tool call ID.
#[serde(skip_serializing_if = "Option::is_none")]
pub tool_calls: Option<Vec<ToolCall>>,

/// full_history will not be included in the engine's response
#[serde(skip_serializing_if = "Option::is_none")]
pub full_history: Option<Vec<Value>>,
}

/// Represents a tool call response with it's ID, function name, and arguments
Expand Down Expand Up @@ -192,13 +196,14 @@ pub struct CompletionRequest {
/// The name of system role
pub system_name: Option<String>,

/// The chat history to be sent to the completion model provider
pub chat_history: Vec<Message>,
/// The chat history (raw message) to be sent to the completion model provider
pub chat_history: Vec<Value>,

/// The documents to embed into the prompt
pub documents: Documents,

/// The prompt to be sent to the completion model provider as "user" role
/// It can be empty.
pub prompt: String,

/// The name of the prompter
Expand Down
34 changes: 21 additions & 13 deletions anda_engine/src/context/agent.rs
Original file line number Diff line number Diff line change
Expand Up @@ -29,11 +29,12 @@ use anda_core::{
AgentContext, AgentOutput, AgentSet, BaseContext, BoxError, CacheExpiry, CacheFeatures,
CancellationToken, CanisterCaller, CompletionFeatures, CompletionRequest, Embedding,
EmbeddingFeatures, FunctionDefinition, HttpFeatures, KeysFeatures, Message, ObjectMeta, Path,
PutMode, PutResult, StateFeatures, StoreFeatures, ToolCall, ToolSet,
PutMode, PutResult, StateFeatures, StoreFeatures, ToolCall, ToolSet, Value,
};
use candid::{utils::ArgumentEncoder, CandidType, Principal};
use serde::{de::DeserializeOwned, Serialize};
use serde_bytes::ByteBuf;
use serde_json::json;
use std::{future::Future, sync::Arc, time::Duration};

use super::base::BaseCtx;
Expand Down Expand Up @@ -260,27 +261,31 @@ impl CompletionFeatures for AgentCtx {
let mut tool_calls_result: Vec<ToolCall> = Vec::new();
loop {
let mut res = self.model.completion(req.clone()).await?;
// 自动执行 tools 调用
let mut tool_calls_continue: Vec<Message> = Vec::new();
// automatically executes tools calls
let mut tool_calls_continue: Vec<Value> = Vec::new();
if let Some(tool_calls) = &mut res.tool_calls {
// 移除已处理的 tools
req.tools
.retain(|t| !tool_calls.iter().any(|o| o.name == t.name));
for tool in tool_calls.iter_mut() {
match self.tool_call(&tool.id, tool.args.clone()).await {
if !req.tools.iter().any(|t| t.name == tool.name) {
// tool already called, skip
continue;
}

// remove called tool from req.tools
req.tools.retain(|t| t.name != tool.name);
match self.tool_call(&tool.name, tool.args.clone()).await {
Ok((val, con)) => {
if con {
// 需要使用大模型继续处理 tool 返回结果
tool_calls_continue.push(Message {
// need to use LLM to continue processing tool_call result
tool_calls_continue.push(json!(Message {
role: "tool".to_string(),
content: val.clone().into(),
name: Some(tool.name.clone()),
name: None,
tool_call_id: Some(tool.id.clone()),
});
}));
}
tool.result = Some(val);
}
Err(_) => {
Err(_err) => {
// TODO:
// support remote_tool_call
// support agent_run
Expand All @@ -301,7 +306,10 @@ impl CompletionFeatures for AgentCtx {
return Ok(res);
}

// 将 tools 处理结果追加到 history 消息列表,交给大模型继续处理
req.system = None;
req.documents.clear();
req.prompt = "".to_string();
req.chat_history = res.full_history.unwrap_or_default();
req.chat_history.append(&mut tool_calls_continue);
}
}
Expand Down
4 changes: 3 additions & 1 deletion anda_engine/src/engine.rs
Original file line number Diff line number Diff line change
Expand Up @@ -159,7 +159,9 @@ impl Engine {
}

let ctx = self.ctx.child_with(&name, user, caller)?;
self.ctx.agents.run(&name, ctx, prompt, attachment).await
let mut res = self.ctx.agents.run(&name, ctx, prompt, attachment).await?;
res.full_history = None; // clear full history
Ok(res)
}

/// Calls a tool by name with the specified arguments.
Expand Down
5 changes: 3 additions & 2 deletions anda_engine/src/extension/character.rs
Original file line number Diff line number Diff line change
Expand Up @@ -34,6 +34,7 @@ use anda_core::{
};
use ic_cose_types::to_cbor_bytes;
use serde::{Deserialize, Serialize};
use serde_json::json;
use std::{fmt::Write, sync::Arc, time::Duration};

use super::{
Expand Down Expand Up @@ -545,7 +546,7 @@ where
.append_tools(tools);

if let Some((user, chat)) = &mut chat_history {
req.chat_history = chat.clone();
req.chat_history = chat.clone().into_iter().map(|m| json!(m)).collect();
chat.push(Message {
role: "user".to_string(),
content: req.prompt.clone().into(),
Expand All @@ -568,7 +569,7 @@ where
chat.push(Message {
role: "tool".to_string(),
content: "".to_string().into(),
name: Some(tool_res.name.clone()),
name: None,
tool_call_id: Some(tool_res.id.clone()),
});
}
Expand Down
1 change: 1 addition & 0 deletions anda_engine/src/extension/segmenter.rs
Original file line number Diff line number Diff line change
Expand Up @@ -117,6 +117,7 @@ impl DocumentSegmenter {
args: res_str.clone(),
result: Some(res_str),
}]),
full_history: None,
},
));
}
Expand Down
51 changes: 31 additions & 20 deletions anda_engine/src/model/deepseek.rs
Original file line number Diff line number Diff line change
Expand Up @@ -82,7 +82,7 @@ impl Client {
}

/// Token usage statistics from DeepSeek API responses
#[derive(Clone, Debug, Deserialize)]
#[derive(Clone, Debug, Deserialize, Serialize)]
pub struct Usage {
/// Number of tokens used in the prompt
pub prompt_tokens: usize,
Expand All @@ -101,7 +101,7 @@ impl std::fmt::Display for Usage {
}

/// Completion response from DeepSeek API
#[derive(Debug, Deserialize)]
#[derive(Debug, Deserialize, Serialize)]
pub struct CompletionResponse {
/// Unique identifier for the completion
pub id: String,
Expand All @@ -118,8 +118,9 @@ pub struct CompletionResponse {
}

impl CompletionResponse {
fn try_into(mut self) -> Result<AgentOutput, BoxError> {
fn try_into(mut self, mut full_history: Vec<Value>) -> Result<AgentOutput, BoxError> {
let choice = self.choices.pop().ok_or("No completion choice")?;
full_history.push(json!(choice.message));
let mut output = AgentOutput {
content: choice.message.content,
tool_calls: choice.message.tool_calls.map(|tools| {
Expand All @@ -133,6 +134,7 @@ impl CompletionResponse {
})
.collect()
}),
full_history: Some(full_history),
..Default::default()
};

Expand All @@ -148,15 +150,15 @@ impl CompletionResponse {
}

/// Individual completion choice from DeepSeek API
#[derive(Debug, Deserialize)]
#[derive(Debug, Deserialize, Serialize)]
pub struct Choice {
pub index: usize,
pub message: MessageOutput,
pub finish_reason: String,
}

/// Output message structure from DeepSeek API
#[derive(Debug, Deserialize)]
#[derive(Debug, Deserialize, Serialize)]
pub struct MessageOutput {
pub role: String,
#[serde(default)]
Expand All @@ -166,7 +168,7 @@ pub struct MessageOutput {
}

/// Tool call output structure from DeepSeek API
#[derive(Debug, Deserialize)]
#[derive(Debug, Deserialize, Serialize)]
pub struct ToolCallOutput {
pub id: String,
pub r#type: String,
Expand All @@ -188,7 +190,7 @@ impl From<FunctionDefinition> for ToolDefinition {
}
}

#[derive(Debug, Deserialize)]
#[derive(Debug, Deserialize, Serialize)]
pub struct Function {
pub name: String,
pub arguments: String,
Expand Down Expand Up @@ -231,38 +233,40 @@ impl CompletionFeaturesDyn for CompletionModel {
Box::pin(async move {
// Add system to chat history (if available)
let mut full_history = if let Some(system) = &req.system {
vec![Message {
vec![json!(Message {
role: "system".into(),
content: system.to_owned().into(),
name: req.system_name.clone(),
..Default::default()
}]
})]
} else {
vec![]
};

// Add context documents to chat history
if !req.documents.is_empty() {
full_history.push(Message {
full_history.push(json!(Message {
role: "user".into(),
content: format!("{}", req.documents).into(),
..Default::default()
});
}));
}

// Extend existing chat history
full_history.append(&mut req.chat_history);

full_history.push(Message {
role: "user".into(),
content: req.prompt.into(),
name: req.prompter_name,
..Default::default()
});
if !req.prompt.is_empty() {
full_history.push(json!(Message {
role: "user".into(),
content: req.prompt.into(),
name: req.prompter_name,
..Default::default()
}));
}

let mut body = json!({
"model": model,
"messages": full_history,
"messages": full_history.clone(),
"temperature": req.temperature,
});
let body = body.as_object_mut().unwrap();
Expand Down Expand Up @@ -304,14 +308,21 @@ impl CompletionFeaturesDyn for CompletionModel {

if log_enabled!(Debug) {
if let Ok(val) = serde_json::to_string(&body) {
log::debug!("DeepSeek request: {}", val);
log::debug!(request = val; "DeepSeek completions request");
}
}

let response = client.post("/chat/completions").json(body).send().await?;
if response.status().is_success() {
match response.json::<CompletionResponse>().await {
Ok(res) => res.try_into(),
Ok(res) => {
if log_enabled!(Debug) {
if let Ok(val) = serde_json::to_string(&res) {
log::debug!(response = val; "DeepSeek completions response");
}
}
res.try_into(full_history)
}
Err(err) => Err(format!("DeepSeek completions error: {}", err).into()),
}
} else {
Expand Down
Loading

0 comments on commit eb1f8e4

Please sign in to comment.