From 1d9a05fb51ea540c1f6358a25b62f83dec43f07e Mon Sep 17 00:00:00 2001 From: "Ahmad-A0 (aider)" <71089234+Ahmad-A0@users.noreply.github.com> Date: Sun, 4 Aug 2024 01:41:15 +0100 Subject: [PATCH 1/5] feat: Add gpt-4o-mini model to config --- src/config.rs | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/src/config.rs b/src/config.rs index 9f7d486..3ada810 100644 --- a/src/config.rs +++ b/src/config.rs @@ -13,7 +13,8 @@ pub enum Model { Claude, GPT3, Llama, - Mixtral + Mixtral, + GPT4OMini } impl ToString for Model { @@ -25,7 +26,8 @@ impl ToString for Model { Self::Claude => String::from("claude-3-haiku-20240307"), Self::GPT3 => String::from("gpt-3.5-turbo-0125"), Self::Llama => String::from("meta-llama/Llama-3-70b-chat-hf"), - Self::Mixtral => String::from("mistralai/Mixtral-8x7B-Instruct-v0.1") + Self::Mixtral => String::from("mistralai/Mixtral-8x7B-Instruct-v0.1"), + Self::GPT4OMini => String::from("gpt-4-0125-preview") } } } @@ -92,4 +94,4 @@ impl Config { Ok(conf) } } -} \ No newline at end of file +} From 146ea25974daed4b350e5bb6d9cc5f045f1710f2 Mon Sep 17 00:00:00 2001 From: "Ahmad-A0 (aider)" <71089234+Ahmad-A0@users.noreply.github.com> Date: Sun, 4 Aug 2024 01:41:58 +0100 Subject: [PATCH 2/5] fix: Update GPT4OMini model identifier --- src/config.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/config.rs b/src/config.rs index 3ada810..eb4790d 100644 --- a/src/config.rs +++ b/src/config.rs @@ -27,7 +27,7 @@ impl ToString for Model { Self::GPT3 => String::from("gpt-3.5-turbo-0125"), Self::Llama => String::from("meta-llama/Llama-3-70b-chat-hf"), Self::Mixtral => String::from("mistralai/Mixtral-8x7B-Instruct-v0.1"), - Self::GPT4OMini => String::from("gpt-4-0125-preview") + Self::GPT4OMini => String::from("gpt-4o-mini") } } } From f0d3ef36e2417e52e4af0041891f771d4c232cf8 Mon Sep 17 00:00:00 2001 From: Ahmad-A0 <71089234+Ahmad-A0@users.noreply.github.com> Date: Sun, 4 Aug 2024 02:12:50 +0100 Subject: [PATCH 3/5] Disable retrieving the vqd, as it seems to break things --- src/cache.rs | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/src/cache.rs b/src/cache.rs index 1583461..4fbf4ca 100644 --- a/src/cache.rs +++ b/src/cache.rs @@ -76,10 +76,11 @@ impl Cache { } pub fn get_last_vqd<'a, T: From<&'a String>>(self: &'a Self) -> Option { - if self.last_vqd_time - (chrono::Local::now().timestamp_millis() as u64) < 60000 { + None + /*if self.last_vqd_time - (chrono::Local::now().timestamp_millis() as u64) < 60000 { Some((&self.last_vqd).into()) } else { None - } + } */ } -} \ No newline at end of file +} From c49bbd26927b1a3c978a39881370df962d9b1ead Mon Sep 17 00:00:00 2001 From: Ahmad-A0 <71089234+Ahmad-A0@users.noreply.github.com> Date: Sun, 4 Aug 2024 02:20:51 +0100 Subject: [PATCH 4/5] Update README and config.rs to mark GPT4o as the newer model --- README.md | 2 +- src/config.rs | 6 +++--- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/README.md b/README.md index c74b94a..f2c1466 100644 --- a/README.md +++ b/README.md @@ -45,7 +45,7 @@ you can set their paths and filenames via `HEY_CONFIG_PATH`, `HEY_CONFIG_FILENAM ## config file reference ```toml -model = "Claude" # or "GPT3" +model = "Claude" # or "GPT4OMini" tos = false # whether if you agree to ddg chat tos ``` diff --git a/src/config.rs b/src/config.rs index eb4790d..644acb8 100644 --- a/src/config.rs +++ b/src/config.rs @@ -8,13 +8,13 @@ pub enum Model { // outdated Claude12, GPT35, + GPT3, // current Claude, - GPT3, + GPT4OMini, Llama, - Mixtral, - GPT4OMini + Mixtral } impl ToString for Model { From ffa2bc9bd23fb8ff70b314c63d7646c4b6549b0d Mon Sep 17 00:00:00 2001 From: aa0 <71089234+Ahmad-A0@users.noreply.github.com> Date: Mon, 5 Aug 2024 06:54:25 +0100 Subject: [PATCH 5/5] Remove commented code in `get_last_vqd`. --- src/cache.rs | 5 ----- 1 file changed, 5 deletions(-) diff --git a/src/cache.rs b/src/cache.rs index 4fbf4ca..969b9b0 100644 --- a/src/cache.rs +++ b/src/cache.rs @@ -77,10 +77,5 @@ impl Cache { pub fn get_last_vqd<'a, T: From<&'a String>>(self: &'a Self) -> Option { None - /*if self.last_vqd_time - (chrono::Local::now().timestamp_millis() as u64) < 60000 { - Some((&self.last_vqd).into()) - } else { - None - } */ } }