From 5db183132b8e0672ab8d0171bd6474490062f5c6 Mon Sep 17 00:00:00 2001 From: sdcb Date: Sat, 21 Dec 2024 22:16:52 +0800 Subject: [PATCH 1/6] =?UTF-8?q?=E6=9B=B4=E6=96=B0=E6=A8=A1=E5=9E=8B?= =?UTF-8?q?=E5=B1=9E=E6=80=A7=E5=B9=B6=E6=B7=BB=E5=8A=A0=E6=96=B0=E8=AE=B0?= =?UTF-8?q?=E5=BD=95=E5=88=B0=20ModelReference=20=E8=A1=A8?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit 更新了多个模型的 IsLegacy 属性,添加了新的模型 o1-2024-12-17、grok-2-1212 和 grok-2-vision-1212。更新了 InsertModelReferences 方法中的 ModelReferences 数据。在 20241221-o1.sql 文件中,添加了几条新的记录到 ModelReference 表中,并更新了一些记录的 IsLegacy 字段。 --- src/BE/DB/Init/BasicData.cs | 19 ++++++++++++------- src/scripts/db-migration/2024/20241221-o1.sql | 7 +++++++ 2 files changed, 19 insertions(+), 7 deletions(-) create mode 100644 src/scripts/db-migration/2024/20241221-o1.sql diff --git a/src/BE/DB/Init/BasicData.cs b/src/BE/DB/Init/BasicData.cs index 2db1cc72..bf709495 100644 --- a/src/BE/DB/Init/BasicData.cs +++ b/src/BE/DB/Init/BasicData.cs @@ -107,7 +107,7 @@ private static void InsertTransactionTypes(ChatsDB db) private static void InsertModelReferences(ChatsDB db) { - // Generated from data, hash: 9caa2b35db42ae46c596e6d1b8bfe5119e2d42e846ebe192f59415c35566257a + // Generated from data, hash: c8beb20f27ffd4e4e2a71aa2e3af7741707f29f25d840373fd3ee5d37a3d9238 db.ModelReferences.AddRange( [ new(){ Id=0, ProviderId=0, Name="Test", ShortName=null, IsLegacy=false, MinTemperature=0.00M, MaxTemperature=2.00M, AllowSearch=false, AllowVision=true, AllowSystemPrompt=true, AllowStreaming=true, ContextWindow=2048, MaxResponseTokens=2048, TokenizerId=1, InputTokenPrice1M=0.00000M, OutputTokenPrice1M=0.00000M, CurrencyCode="RMB", }, @@ -123,12 +123,13 @@ private static void InsertModelReferences(ChatsDB db) new(){ Id=109, ProviderId=1, Name="gpt-4-32k", ShortName="gpt-4-32k", IsLegacy=true, MinTemperature=0.00M, MaxTemperature=2.00M, AllowSearch=false, AllowVision=false, AllowSystemPrompt=true, AllowStreaming=true, ContextWindow=32768, MaxResponseTokens=32768, TokenizerId=1, InputTokenPrice1M=60.00000M, OutputTokenPrice1M=120.00000M, CurrencyCode="USD", }, new(){ Id=110, ProviderId=1, Name="gpt-4", ShortName="gpt-4", IsLegacy=true, MinTemperature=0.00M, MaxTemperature=2.00M, AllowSearch=false, AllowVision=false, AllowSystemPrompt=true, AllowStreaming=true, ContextWindow=8192, MaxResponseTokens=8192, TokenizerId=1, InputTokenPrice1M=30.00000M, OutputTokenPrice1M=60.00000M, CurrencyCode="USD", }, new(){ Id=111, ProviderId=1, Name="gpt-4-turbo-2024-04-09", ShortName="gpt-4-turbo", IsLegacy=false, MinTemperature=0.00M, MaxTemperature=2.00M, AllowSearch=false, AllowVision=true, AllowSystemPrompt=true, AllowStreaming=true, ContextWindow=128000, MaxResponseTokens=4096, TokenizerId=1, InputTokenPrice1M=10.00000M, OutputTokenPrice1M=30.00000M, CurrencyCode="USD", }, - new(){ Id=112, ProviderId=1, Name="o1-preview-2024-09-12", ShortName="o1-preview", IsLegacy=false, MinTemperature=1.00M, MaxTemperature=1.00M, AllowSearch=false, AllowVision=false, AllowSystemPrompt=false, AllowStreaming=false, ContextWindow=128000, MaxResponseTokens=32768, TokenizerId=2, InputTokenPrice1M=15.00000M, OutputTokenPrice1M=60.00000M, CurrencyCode="USD", }, + new(){ Id=112, ProviderId=1, Name="o1-preview-2024-09-12", ShortName="o1-preview", IsLegacy=true, MinTemperature=1.00M, MaxTemperature=1.00M, AllowSearch=false, AllowVision=false, AllowSystemPrompt=false, AllowStreaming=false, ContextWindow=128000, MaxResponseTokens=32768, TokenizerId=2, InputTokenPrice1M=15.00000M, OutputTokenPrice1M=60.00000M, CurrencyCode="USD", }, new(){ Id=113, ProviderId=1, Name="o1-mini-2024-09-12", ShortName="o1-mini", IsLegacy=false, MinTemperature=1.00M, MaxTemperature=1.00M, AllowSearch=false, AllowVision=false, AllowSystemPrompt=false, AllowStreaming=false, ContextWindow=128000, MaxResponseTokens=65536, TokenizerId=2, InputTokenPrice1M=3.00000M, OutputTokenPrice1M=12.00000M, CurrencyCode="USD", }, new(){ Id=114, ProviderId=1, Name="gpt-4o-mini-2024-07-18", ShortName="gpt-4o", IsLegacy=false, MinTemperature=0.00M, MaxTemperature=2.00M, AllowSearch=false, AllowVision=true, AllowSystemPrompt=true, AllowStreaming=true, ContextWindow=128000, MaxResponseTokens=16384, TokenizerId=2, InputTokenPrice1M=0.15000M, OutputTokenPrice1M=0.60000M, CurrencyCode="USD", }, new(){ Id=115, ProviderId=1, Name="gpt-4o-2024-05-13", ShortName="gpt-4o", IsLegacy=true, MinTemperature=0.00M, MaxTemperature=2.00M, AllowSearch=false, AllowVision=true, AllowSystemPrompt=true, AllowStreaming=true, ContextWindow=128000, MaxResponseTokens=4096, TokenizerId=2, InputTokenPrice1M=5.00000M, OutputTokenPrice1M=15.00000M, CurrencyCode="USD", }, new(){ Id=116, ProviderId=1, Name="gpt-4o-2024-08-06", ShortName="gpt-4o", IsLegacy=false, MinTemperature=0.00M, MaxTemperature=2.00M, AllowSearch=false, AllowVision=true, AllowSystemPrompt=true, AllowStreaming=true, ContextWindow=128000, MaxResponseTokens=16384, TokenizerId=2, InputTokenPrice1M=2.50000M, OutputTokenPrice1M=10.00000M, CurrencyCode="USD", }, new(){ Id=117, ProviderId=1, Name="gpt-4o-2024-11-20", ShortName="gpt-4o", IsLegacy=false, MinTemperature=0.00M, MaxTemperature=2.00M, AllowSearch=false, AllowVision=true, AllowSystemPrompt=true, AllowStreaming=true, ContextWindow=128000, MaxResponseTokens=16384, TokenizerId=2, InputTokenPrice1M=2.50000M, OutputTokenPrice1M=10.00000M, CurrencyCode="USD", }, + new(){ Id=118, ProviderId=1, Name="o1-2024-12-17", ShortName="o1", IsLegacy=false, MinTemperature=1.00M, MaxTemperature=1.00M, AllowSearch=false, AllowVision=true, AllowSystemPrompt=false, AllowStreaming=false, ContextWindow=200000, MaxResponseTokens=100000, TokenizerId=2, InputTokenPrice1M=15.00000M, OutputTokenPrice1M=60.00000M, CurrencyCode="USD", }, new(){ Id=200, ProviderId=2, Name="hunyuan-turbo", ShortName=null, IsLegacy=false, MinTemperature=0.00M, MaxTemperature=2.00M, AllowSearch=true, AllowVision=false, AllowSystemPrompt=true, AllowStreaming=true, ContextWindow=32768, MaxResponseTokens=4096, TokenizerId=null, InputTokenPrice1M=15.00000M, OutputTokenPrice1M=50.00000M, CurrencyCode="RMB", }, new(){ Id=201, ProviderId=2, Name="hunyuan-pro", ShortName=null, IsLegacy=false, MinTemperature=0.00M, MaxTemperature=2.00M, AllowSearch=true, AllowVision=false, AllowSystemPrompt=true, AllowStreaming=true, ContextWindow=32768, MaxResponseTokens=4096, TokenizerId=null, InputTokenPrice1M=30.00000M, OutputTokenPrice1M=100.00000M, CurrencyCode="RMB", }, new(){ Id=202, ProviderId=2, Name="hunyuan-standard-256K", ShortName=null, IsLegacy=false, MinTemperature=0.00M, MaxTemperature=2.00M, AllowSearch=true, AllowVision=false, AllowSystemPrompt=true, AllowStreaming=true, ContextWindow=262144, MaxResponseTokens=6144, TokenizerId=null, InputTokenPrice1M=15.00000M, OutputTokenPrice1M=60.00000M, CurrencyCode="RMB", }, @@ -162,12 +163,13 @@ private static void InsertModelReferences(ChatsDB db) new(){ Id=509, ProviderId=5, Name="gpt-4-32k", ShortName="gpt-4-32k", IsLegacy=true, MinTemperature=0.00M, MaxTemperature=2.00M, AllowSearch=false, AllowVision=false, AllowSystemPrompt=true, AllowStreaming=true, ContextWindow=32768, MaxResponseTokens=32768, TokenizerId=1, InputTokenPrice1M=60.00000M, OutputTokenPrice1M=120.00000M, CurrencyCode="USD", }, new(){ Id=510, ProviderId=5, Name="gpt-4", ShortName="gpt-4", IsLegacy=true, MinTemperature=0.00M, MaxTemperature=2.00M, AllowSearch=false, AllowVision=false, AllowSystemPrompt=true, AllowStreaming=true, ContextWindow=8192, MaxResponseTokens=8192, TokenizerId=1, InputTokenPrice1M=30.00000M, OutputTokenPrice1M=60.00000M, CurrencyCode="USD", }, new(){ Id=511, ProviderId=5, Name="gpt-4-turbo-2024-04-09", ShortName="gpt-4-turbo", IsLegacy=false, MinTemperature=0.00M, MaxTemperature=2.00M, AllowSearch=false, AllowVision=true, AllowSystemPrompt=true, AllowStreaming=true, ContextWindow=128000, MaxResponseTokens=4096, TokenizerId=1, InputTokenPrice1M=10.00000M, OutputTokenPrice1M=30.00000M, CurrencyCode="USD", }, - new(){ Id=512, ProviderId=5, Name="o1-preview-2024-09-12", ShortName="o1-preview", IsLegacy=false, MinTemperature=1.00M, MaxTemperature=1.00M, AllowSearch=false, AllowVision=false, AllowSystemPrompt=false, AllowStreaming=false, ContextWindow=128000, MaxResponseTokens=32768, TokenizerId=2, InputTokenPrice1M=15.00000M, OutputTokenPrice1M=60.00000M, CurrencyCode="USD", }, + new(){ Id=512, ProviderId=5, Name="o1-preview-2024-09-12", ShortName="o1-preview", IsLegacy=true, MinTemperature=1.00M, MaxTemperature=1.00M, AllowSearch=false, AllowVision=false, AllowSystemPrompt=false, AllowStreaming=false, ContextWindow=128000, MaxResponseTokens=32768, TokenizerId=2, InputTokenPrice1M=15.00000M, OutputTokenPrice1M=60.00000M, CurrencyCode="USD", }, new(){ Id=513, ProviderId=5, Name="o1-mini-2024-09-12", ShortName="o1-mini", IsLegacy=false, MinTemperature=1.00M, MaxTemperature=1.00M, AllowSearch=false, AllowVision=false, AllowSystemPrompt=false, AllowStreaming=false, ContextWindow=128000, MaxResponseTokens=65536, TokenizerId=2, InputTokenPrice1M=3.00000M, OutputTokenPrice1M=12.00000M, CurrencyCode="USD", }, new(){ Id=514, ProviderId=5, Name="gpt-4o-mini-2024-07-18", ShortName="gpt-4o", IsLegacy=false, MinTemperature=0.00M, MaxTemperature=2.00M, AllowSearch=false, AllowVision=true, AllowSystemPrompt=true, AllowStreaming=true, ContextWindow=128000, MaxResponseTokens=16384, TokenizerId=2, InputTokenPrice1M=0.15000M, OutputTokenPrice1M=0.60000M, CurrencyCode="USD", }, new(){ Id=515, ProviderId=5, Name="gpt-4o-2024-05-13", ShortName="gpt-4o", IsLegacy=true, MinTemperature=0.00M, MaxTemperature=2.00M, AllowSearch=false, AllowVision=true, AllowSystemPrompt=true, AllowStreaming=true, ContextWindow=128000, MaxResponseTokens=4096, TokenizerId=2, InputTokenPrice1M=5.00000M, OutputTokenPrice1M=15.00000M, CurrencyCode="USD", }, new(){ Id=516, ProviderId=5, Name="gpt-4o-2024-08-06", ShortName="gpt-4o", IsLegacy=false, MinTemperature=0.00M, MaxTemperature=2.00M, AllowSearch=false, AllowVision=true, AllowSystemPrompt=true, AllowStreaming=true, ContextWindow=128000, MaxResponseTokens=16384, TokenizerId=2, InputTokenPrice1M=2.50000M, OutputTokenPrice1M=10.00000M, CurrencyCode="USD", }, new(){ Id=517, ProviderId=5, Name="gpt-4o-2024-11-20", ShortName="gpt-4o", IsLegacy=false, MinTemperature=0.00M, MaxTemperature=2.00M, AllowSearch=false, AllowVision=true, AllowSystemPrompt=true, AllowStreaming=true, ContextWindow=128000, MaxResponseTokens=16384, TokenizerId=2, InputTokenPrice1M=2.50000M, OutputTokenPrice1M=10.00000M, CurrencyCode="USD", }, + new(){ Id=518, ProviderId=5, Name="o1-2024-12-17", ShortName="o1", IsLegacy=false, MinTemperature=1.00M, MaxTemperature=1.00M, AllowSearch=false, AllowVision=true, AllowSystemPrompt=false, AllowStreaming=false, ContextWindow=200000, MaxResponseTokens=100000, TokenizerId=2, InputTokenPrice1M=15.00000M, OutputTokenPrice1M=60.00000M, CurrencyCode="USD", }, new(){ Id=600, ProviderId=6, Name="ernie-4.0-turbo-8k", ShortName="ERNIE-4.0-Turbo-8K", IsLegacy=false, MinTemperature=0.00M, MaxTemperature=1.00M, AllowSearch=true, AllowVision=false, AllowSystemPrompt=true, AllowStreaming=true, ContextWindow=8192, MaxResponseTokens=2048, TokenizerId=null, InputTokenPrice1M=30.00000M, OutputTokenPrice1M=90.00000M, CurrencyCode="RMB", }, new(){ Id=601, ProviderId=6, Name="completions_pro", ShortName="ERNIE-4.0-8K", IsLegacy=false, MinTemperature=0.00M, MaxTemperature=1.00M, AllowSearch=true, AllowVision=false, AllowSystemPrompt=true, AllowStreaming=true, ContextWindow=8192, MaxResponseTokens=2048, TokenizerId=null, InputTokenPrice1M=20.00000M, OutputTokenPrice1M=60.00000M, CurrencyCode="RMB", }, new(){ Id=602, ProviderId=6, Name="completions", ShortName="ERNIE-3.5-8K", IsLegacy=false, MinTemperature=0.00M, MaxTemperature=1.00M, AllowSearch=true, AllowVision=false, AllowSystemPrompt=true, AllowStreaming=true, ContextWindow=8192, MaxResponseTokens=2048, TokenizerId=null, InputTokenPrice1M=0.80000M, OutputTokenPrice1M=2.00000M, CurrencyCode="RMB", }, @@ -218,8 +220,10 @@ private static void InsertModelReferences(ChatsDB db) new(){ Id=907, ProviderId=9, Name="glm-4v-plus", ShortName=null, IsLegacy=false, MinTemperature=0.00M, MaxTemperature=1.00M, AllowSearch=false, AllowVision=true, AllowSystemPrompt=true, AllowStreaming=true, ContextWindow=8192, MaxResponseTokens=1024, TokenizerId=null, InputTokenPrice1M=10.00000M, OutputTokenPrice1M=10.00000M, CurrencyCode="RMB", }, new(){ Id=908, ProviderId=9, Name="glm-4v", ShortName=null, IsLegacy=false, MinTemperature=0.00M, MaxTemperature=1.00M, AllowSearch=false, AllowVision=true, AllowSystemPrompt=true, AllowStreaming=true, ContextWindow=2048, MaxResponseTokens=1024, TokenizerId=null, InputTokenPrice1M=50.00000M, OutputTokenPrice1M=50.00000M, CurrencyCode="RMB", }, new(){ Id=1000, ProviderId=10, Name="deepseek-chat", ShortName=null, IsLegacy=false, MinTemperature=0.00M, MaxTemperature=2.00M, AllowSearch=false, AllowVision=false, AllowSystemPrompt=true, AllowStreaming=true, ContextWindow=64000, MaxResponseTokens=4000, TokenizerId=null, InputTokenPrice1M=1.00000M, OutputTokenPrice1M=2.00000M, CurrencyCode="RMB", }, - new(){ Id=1100, ProviderId=11, Name="grok-beta", ShortName="grok", IsLegacy=false, MinTemperature=0.00M, MaxTemperature=2.00M, AllowSearch=false, AllowVision=false, AllowSystemPrompt=true, AllowStreaming=true, ContextWindow=131072, MaxResponseTokens=4096, TokenizerId=null, InputTokenPrice1M=5.00000M, OutputTokenPrice1M=15.00000M, CurrencyCode="USD", }, - new(){ Id=1101, ProviderId=11, Name="grok-vision-beta", ShortName="grok", IsLegacy=false, MinTemperature=0.00M, MaxTemperature=2.00M, AllowSearch=false, AllowVision=true, AllowSystemPrompt=true, AllowStreaming=true, ContextWindow=8192, MaxResponseTokens=4096, TokenizerId=null, InputTokenPrice1M=5.00000M, OutputTokenPrice1M=15.00000M, CurrencyCode="USD", }, + new(){ Id=1100, ProviderId=11, Name="grok-beta", ShortName="grok", IsLegacy=true, MinTemperature=0.00M, MaxTemperature=2.00M, AllowSearch=false, AllowVision=false, AllowSystemPrompt=true, AllowStreaming=true, ContextWindow=131072, MaxResponseTokens=4096, TokenizerId=null, InputTokenPrice1M=5.00000M, OutputTokenPrice1M=15.00000M, CurrencyCode="USD", }, + new(){ Id=1101, ProviderId=11, Name="grok-vision-beta", ShortName="grok", IsLegacy=true, MinTemperature=0.00M, MaxTemperature=2.00M, AllowSearch=false, AllowVision=true, AllowSystemPrompt=true, AllowStreaming=true, ContextWindow=8192, MaxResponseTokens=4096, TokenizerId=null, InputTokenPrice1M=5.00000M, OutputTokenPrice1M=15.00000M, CurrencyCode="USD", }, + new(){ Id=1102, ProviderId=11, Name="grok-2-1212", ShortName="grok", IsLegacy=false, MinTemperature=0.00M, MaxTemperature=2.00M, AllowSearch=false, AllowVision=false, AllowSystemPrompt=true, AllowStreaming=true, ContextWindow=131072, MaxResponseTokens=4096, TokenizerId=null, InputTokenPrice1M=2.00000M, OutputTokenPrice1M=10.00000M, CurrencyCode="USD", }, + new(){ Id=1103, ProviderId=11, Name="grok-2-vision-1212", ShortName="grok", IsLegacy=false, MinTemperature=0.00M, MaxTemperature=2.00M, AllowSearch=false, AllowVision=true, AllowSystemPrompt=true, AllowStreaming=true, ContextWindow=32768, MaxResponseTokens=4096, TokenizerId=null, InputTokenPrice1M=2.00000M, OutputTokenPrice1M=10.00000M, CurrencyCode="USD", }, new(){ Id=1200, ProviderId=12, Name="AI21-Jamba-1.5-Large", ShortName="AI21-Jamba", IsLegacy=false, MinTemperature=0.00M, MaxTemperature=2.00M, AllowSearch=false, AllowVision=false, AllowSystemPrompt=true, AllowStreaming=true, ContextWindow=256000, MaxResponseTokens=4000, TokenizerId=null, InputTokenPrice1M=5.00000M, OutputTokenPrice1M=15.00000M, CurrencyCode="USD", }, new(){ Id=1201, ProviderId=12, Name="AI21-Jamba-1.5-Mini", ShortName="AI21-Jamba", IsLegacy=false, MinTemperature=0.00M, MaxTemperature=2.00M, AllowSearch=false, AllowVision=false, AllowSystemPrompt=true, AllowStreaming=true, ContextWindow=256000, MaxResponseTokens=4000, TokenizerId=null, InputTokenPrice1M=1.00000M, OutputTokenPrice1M=2.00000M, CurrencyCode="USD", }, new(){ Id=1202, ProviderId=12, Name="Cohere-command-r", ShortName=null, IsLegacy=false, MinTemperature=0.00M, MaxTemperature=2.00M, AllowSearch=false, AllowVision=false, AllowSystemPrompt=true, AllowStreaming=true, ContextWindow=131072, MaxResponseTokens=4096, TokenizerId=null, InputTokenPrice1M=1.00000M, OutputTokenPrice1M=2.00000M, CurrencyCode="USD", }, @@ -238,8 +242,9 @@ private static void InsertModelReferences(ChatsDB db) new(){ Id=1215, ProviderId=12, Name="Phi-3.5-MoE-instruct", ShortName="Phi-3.5", IsLegacy=false, MinTemperature=0.00M, MaxTemperature=2.00M, AllowSearch=false, AllowVision=false, AllowSystemPrompt=true, AllowStreaming=true, ContextWindow=131072, MaxResponseTokens=4096, TokenizerId=null, InputTokenPrice1M=1.00000M, OutputTokenPrice1M=2.00000M, CurrencyCode="USD", }, new(){ Id=1216, ProviderId=12, Name="Phi-3.5-mini-instruct", ShortName="Phi-3.5", IsLegacy=false, MinTemperature=0.00M, MaxTemperature=2.00M, AllowSearch=false, AllowVision=false, AllowSystemPrompt=true, AllowStreaming=true, ContextWindow=131072, MaxResponseTokens=4096, TokenizerId=null, InputTokenPrice1M=0.50000M, OutputTokenPrice1M=1.00000M, CurrencyCode="USD", }, new(){ Id=1217, ProviderId=12, Name="Phi-3.5-vision-instruct", ShortName="Phi-3.5", IsLegacy=false, MinTemperature=0.00M, MaxTemperature=2.00M, AllowSearch=false, AllowVision=true, AllowSystemPrompt=true, AllowStreaming=true, ContextWindow=131072, MaxResponseTokens=4096, TokenizerId=null, InputTokenPrice1M=1.00000M, OutputTokenPrice1M=2.00000M, CurrencyCode="USD", }, - new(){ Id=1218, ProviderId=12, Name="o1-preview", ShortName=null, IsLegacy=false, MinTemperature=1.00M, MaxTemperature=1.00M, AllowSearch=false, AllowVision=false, AllowSystemPrompt=false, AllowStreaming=false, ContextWindow=128000, MaxResponseTokens=32768, TokenizerId=null, InputTokenPrice1M=15.00000M, OutputTokenPrice1M=60.00000M, CurrencyCode="USD", }, - new(){ Id=1219, ProviderId=12, Name="o1-mini", ShortName=null, IsLegacy=false, MinTemperature=1.00M, MaxTemperature=1.00M, AllowSearch=false, AllowVision=false, AllowSystemPrompt=false, AllowStreaming=false, ContextWindow=128000, MaxResponseTokens=65536, TokenizerId=null, InputTokenPrice1M=3.00000M, OutputTokenPrice1M=12.00000M, CurrencyCode="USD", } + new(){ Id=1218, ProviderId=12, Name="o1-preview", ShortName=null, IsLegacy=true, MinTemperature=1.00M, MaxTemperature=1.00M, AllowSearch=false, AllowVision=false, AllowSystemPrompt=false, AllowStreaming=false, ContextWindow=128000, MaxResponseTokens=32768, TokenizerId=null, InputTokenPrice1M=15.00000M, OutputTokenPrice1M=60.00000M, CurrencyCode="USD", }, + new(){ Id=1219, ProviderId=12, Name="o1-mini", ShortName=null, IsLegacy=false, MinTemperature=1.00M, MaxTemperature=1.00M, AllowSearch=false, AllowVision=false, AllowSystemPrompt=false, AllowStreaming=false, ContextWindow=128000, MaxResponseTokens=65536, TokenizerId=null, InputTokenPrice1M=3.00000M, OutputTokenPrice1M=12.00000M, CurrencyCode="USD", }, + new(){ Id=1220, ProviderId=12, Name="o1-2024-12-17", ShortName="o1", IsLegacy=false, MinTemperature=1.00M, MaxTemperature=1.00M, AllowSearch=false, AllowVision=true, AllowSystemPrompt=false, AllowStreaming=false, ContextWindow=200000, MaxResponseTokens=100000, TokenizerId=2, InputTokenPrice1M=15.00000M, OutputTokenPrice1M=60.00000M, CurrencyCode="USD", } ]); } diff --git a/src/scripts/db-migration/2024/20241221-o1.sql b/src/scripts/db-migration/2024/20241221-o1.sql new file mode 100644 index 00000000..fc54d549 --- /dev/null +++ b/src/scripts/db-migration/2024/20241221-o1.sql @@ -0,0 +1,7 @@ +insert into ModelReference values +(118, 1, 'o1-2024-12-17', 'o1', 0, 1, 1, 0, 1, 0, 0, 200000, 100000, 2, 15, 60, 'USD'), +(518, 5, 'o1-2024-12-17', 'o1', 0, 1, 1, 0, 1, 0, 0, 200000, 100000, 2, 15, 60, 'USD'), +(1220, 12, 'o1-2024-12-17', 'o1', 0, 1, 1, 0, 1, 0, 0, 200000, 100000, 2, 15, 60, 'USD'), +(1102, 11, 'grok-2-1212', 'grok', 0, 0, 2, 0, 0, 1, 1, 131072, 4096, NULL, 2, 10, 'USD'), +(1103, 11, 'grok-2-vision-1212', 'grok', 0, 0, 2, 0, 1, 1, 1, 32768, 4096, NULL, 2, 10, 'USD'); +update ModelReference set IsLegacy = 1 where id in (1100, 1101, 112, 512, 1218) \ No newline at end of file From 1908fd4722be66f73e596cf6633a80a9abefa55e Mon Sep 17 00:00:00 2001 From: sdcb Date: Sat, 21 Dec 2024 22:30:08 +0800 Subject: [PATCH 2/6] o1 only supports api version: 2024-12-01-preview --- .../Implementations/OpenAI/AzureChatService.cs | 15 ++++++++++++++- 1 file changed, 14 insertions(+), 1 deletion(-) diff --git a/src/BE/Services/ChatServices/Implementations/OpenAI/AzureChatService.cs b/src/BE/Services/ChatServices/Implementations/OpenAI/AzureChatService.cs index d9c911d9..73f745d7 100644 --- a/src/BE/Services/ChatServices/Implementations/OpenAI/AzureChatService.cs +++ b/src/BE/Services/ChatServices/Implementations/OpenAI/AzureChatService.cs @@ -3,6 +3,7 @@ using OpenAI; using OpenAI.Chat; using System.ClientModel; +using System.Reflection; namespace Chats.BE.Services.ChatServices.Implementations.OpenAI; @@ -13,7 +14,19 @@ static ChatClient CreateChatClient(Model model) ArgumentException.ThrowIfNullOrWhiteSpace(model.ModelKey.Host, nameof(model.ModelKey.Host)); ArgumentException.ThrowIfNullOrWhiteSpace(model.ModelKey.Secret, nameof(model.ModelKey.Secret)); - OpenAIClient api = new AzureOpenAIClient(new Uri(model.ModelKey.Host), new ApiKeyCredential(model.ModelKey.Secret)); + AzureOpenAIClientOptions options = new(); + if (model.ModelReference.Name == "o1") + { + // o1 only supports api version: 2024-12-01-preview + options + .GetType() + .GetField("k__BackingField", BindingFlags.NonPublic | BindingFlags.Instance) + !.SetValue(options, "2024-12-01-preview"); + } + + OpenAIClient api = new AzureOpenAIClient( + new Uri(model.ModelKey.Host), + new ApiKeyCredential(model.ModelKey.Secret), ); return api.GetChatClient(model.ApiModelId); } } From 1df1a2aaf006b4dedec8c399304f31200a8e1b97 Mon Sep 17 00:00:00 2001 From: sdcb Date: Sat, 21 Dec 2024 22:33:41 +0800 Subject: [PATCH 3/6] fix compilation error. --- .../ChatServices/Implementations/OpenAI/AzureChatService.cs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/BE/Services/ChatServices/Implementations/OpenAI/AzureChatService.cs b/src/BE/Services/ChatServices/Implementations/OpenAI/AzureChatService.cs index 73f745d7..d130d4c6 100644 --- a/src/BE/Services/ChatServices/Implementations/OpenAI/AzureChatService.cs +++ b/src/BE/Services/ChatServices/Implementations/OpenAI/AzureChatService.cs @@ -26,7 +26,7 @@ static ChatClient CreateChatClient(Model model) OpenAIClient api = new AzureOpenAIClient( new Uri(model.ModelKey.Host), - new ApiKeyCredential(model.ModelKey.Secret), ); + new ApiKeyCredential(model.ModelKey.Secret), options); return api.GetChatClient(model.ApiModelId); } } From 76f1bb44dea9ed598bd306058d6ed3f283970585 Mon Sep 17 00:00:00 2001 From: sdcb Date: Sat, 21 Dec 2024 22:36:22 +0800 Subject: [PATCH 4/6] correct o1 model name --- .../ChatServices/Implementations/OpenAI/AzureChatService.cs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/BE/Services/ChatServices/Implementations/OpenAI/AzureChatService.cs b/src/BE/Services/ChatServices/Implementations/OpenAI/AzureChatService.cs index d130d4c6..94f8cb27 100644 --- a/src/BE/Services/ChatServices/Implementations/OpenAI/AzureChatService.cs +++ b/src/BE/Services/ChatServices/Implementations/OpenAI/AzureChatService.cs @@ -15,7 +15,7 @@ static ChatClient CreateChatClient(Model model) ArgumentException.ThrowIfNullOrWhiteSpace(model.ModelKey.Secret, nameof(model.ModelKey.Secret)); AzureOpenAIClientOptions options = new(); - if (model.ModelReference.Name == "o1") + if (model.ModelReference.Name == "o1-2024-12-17") { // o1 only supports api version: 2024-12-01-preview options From e1bd7fb3795dd7381b51f3bc6170111535afcb3c Mon Sep 17 00:00:00 2001 From: sdcb Date: Mon, 23 Dec 2024 18:46:24 +0800 Subject: [PATCH 5/6] rename into system prompt --- src/BE/Controllers/Chats/Chats/Dtos/ChatRequest.cs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/BE/Controllers/Chats/Chats/Dtos/ChatRequest.cs b/src/BE/Controllers/Chats/Chats/Dtos/ChatRequest.cs index d9384590..d0a80ba9 100644 --- a/src/BE/Controllers/Chats/Chats/Dtos/ChatRequest.cs +++ b/src/BE/Controllers/Chats/Chats/Dtos/ChatRequest.cs @@ -51,7 +51,7 @@ public record ChatSpanRequest [JsonPropertyName("spanId")] public required byte Id { get; init; } - [JsonPropertyName("prompt")] + [JsonPropertyName("systemPrompt")] public required string? SystemPrompt { get; init; } public bool SystemPromptValid => !string.IsNullOrEmpty(SystemPrompt); From e3e02cd7defc47ff7318468c8cb0ac511bed59a9 Mon Sep 17 00:00:00 2001 From: sdcb Date: Mon, 23 Dec 2024 18:46:38 +0800 Subject: [PATCH 6/6] return badrequest when modelstate invalid --- src/BE/Controllers/Chats/Chats/ChatController.cs | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/src/BE/Controllers/Chats/Chats/ChatController.cs b/src/BE/Controllers/Chats/Chats/ChatController.cs index b20d66fd..9ea43738 100644 --- a/src/BE/Controllers/Chats/Chats/ChatController.cs +++ b/src/BE/Controllers/Chats/Chats/ChatController.cs @@ -38,6 +38,11 @@ public async Task StartConversationStreamed( [FromServices] FileUrlProvider fup, CancellationToken cancellationToken) { + if (!ModelState.IsValid) + { + return BadRequest(ModelState); + } + long firstTick = Stopwatch.GetTimestamp(); DecryptedChatRequest req = request.Decrypt(idEncryption);