Skip to content

Commit

Permalink
tweak complete
Browse files Browse the repository at this point in the history
  • Loading branch information
neowu committed Jul 12, 2024
1 parent a62bb69 commit 93c8793
Show file tree
Hide file tree
Showing 4 changed files with 19 additions and 7 deletions.
2 changes: 1 addition & 1 deletion src/azure/chatgpt.rs
Original file line number Diff line number Diff line change
Expand Up @@ -168,7 +168,7 @@ impl ChatGPT {
// stream_options: Some(StreamOptions { include_usage: true }),
stream_options: None,
stop: None,
max_tokens: 800,
max_tokens: 4096,
presence_penalty: 0.0,
frequency_penalty: 0.0,
tool_choice: self.tools.is_some().then_some("auto".to_string()),
Expand Down
3 changes: 1 addition & 2 deletions src/command/chat.rs
Original file line number Diff line number Diff line change
@@ -1,4 +1,3 @@
use std::io;
use std::io::Write;
use std::mem;
use std::path::PathBuf;
Expand Down Expand Up @@ -74,6 +73,6 @@ impl Chat {

fn print_flush(message: &str) -> Result<(), Exception> {
print!("{message}");
io::stdout().flush()?;
std::io::stdout().flush()?;
Ok(())
}
18 changes: 14 additions & 4 deletions src/command/complete.rs
Original file line number Diff line number Diff line change
Expand Up @@ -70,15 +70,25 @@ impl Complete {
return Err(Exception::ValidationError("system message must be at first".to_string()));
}
on_system_message = true;
} else if line.starts_with("---") || line.starts_with("# file: ") {
} else if line.starts_with("# prompt") {
if on_system_message {
info!("system message: {}", message);
model.system_message(message);
message = String::new();
on_system_message = false;
}
if line.starts_with("# file: ") {
let file = PathBuf::from(line.strip_prefix("# file: ").unwrap().to_string());
} else if line.starts_with("# anwser") {
break;
} else if line.starts_with("> file: ") {
let file = self.prompt.with_file_name(line.strip_prefix("> file: ").unwrap());
let extension = file
.extension()
.ok_or_else(|| Exception::ValidationError(format!("file must have extension, path={}", file.to_string_lossy())))?
.to_str()
.unwrap();
if extension == "txt" {
message.push_str(&fs::read_to_string(file).await?)
} else {
info!("file: {}", file.to_string_lossy());
files.push(file);
}
Expand All @@ -93,7 +103,7 @@ impl Complete {
let message = model.chat(message, files).await?;

let mut prompt = fs::OpenOptions::new().append(true).open(&self.prompt).await?;
prompt.write_all(b"\n---\n\n").await?;
prompt.write_all(format!("\n# anwser ({})\n\n", self.name).as_bytes()).await?;
prompt.write_all(message.as_bytes()).await?;

Ok(())
Expand Down
3 changes: 3 additions & 0 deletions src/gcloud/gemini.rs
Original file line number Diff line number Diff line change
Expand Up @@ -61,6 +61,9 @@ impl Gemini {

pub async fn chat(&mut self, message: String, files: Option<Vec<PathBuf>>) -> Result<String, Exception> {
let data = inline_datas(files).await?;
if data.is_some() {
self.tools = None; // function call is not supported with inline data
}
self.add_message(Content::new_user_text(message, data));

let mut result = self.process().await?;
Expand Down

0 comments on commit 93c8793

Please sign in to comment.